├── .DS_Store ├── FaceDemo-Bridging-Header.h ├── FaceDemo.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ ├── xcshareddata │ │ └── IDEWorkspaceChecks.plist │ └── xcuserdata │ │ └── user.xcuserdatad │ │ └── UserInterfaceState.xcuserstate └── xcuserdata │ ├── admin.xcuserdatad │ └── xcschemes │ │ └── xcschememanagement.plist │ ├── kjh.xcuserdatad │ └── xcschemes │ │ └── xcschememanagement.plist │ └── user.xcuserdatad │ ├── xcdebugger │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ └── xcschememanagement.plist ├── FaceDemo ├── .DS_Store ├── AppDelegate.swift ├── Assets.xcassets │ ├── .DS_Store │ ├── AccentColor.colorset │ │ └── Contents.json │ ├── AppIcon.appiconset │ │ ├── Contents.json │ │ └── Recognito-1024x1024.png │ ├── Contents.json │ ├── Title.imageset │ │ ├── Contents.json │ │ ├── title 1.png │ │ ├── title 2.png │ │ └── title.png │ ├── clr_bg.colorset │ │ └── Contents.json │ ├── clr_btn_text.colorset │ │ └── Contents.json │ ├── clr_item_bg.colorset │ │ └── Contents.json │ ├── clr_main_button_bg1.colorset │ │ └── Contents.json │ ├── clr_main_button_bg2.colorset │ │ └── Contents.json │ ├── clr_main_button_bg3.colorset │ │ └── Contents.json │ ├── clr_text.colorset │ │ └── Contents.json │ ├── clr_title_text.colorset │ │ └── Contents.json │ ├── clr_toast_bg.colorset │ │ └── Contents.json │ ├── img_close.imageset │ │ ├── Contents.json │ │ ├── ic_close 1.png │ │ ├── ic_close 2.png │ │ └── ic_close.png │ ├── img_delete.imageset │ │ ├── Contents.json │ │ ├── ic_delete 1.png │ │ ├── ic_delete 2.png │ │ └── ic_delete.png │ ├── img_edit.imageset │ │ ├── Contents.json │ │ ├── ic_edit 1.png │ │ ├── ic_edit 2.png │ │ └── ic_edit.png │ ├── img_logo.imageset │ │ ├── Contents.json │ │ ├── Recognito_white_back_url 1.png │ │ ├── Recognito_white_back_url 2.png │ │ └── Recognito_white_back_url.png │ ├── img_setting.imageset │ │ ├── Contents.json │ │ ├── ic_setting 1.png │ │ ├── ic_setting 2.png │ │ └── ic_setting.png │ ├── img_switchCamera.imageset │ │ ├── Contents.json │ │ ├── ic_camera_flip 1.png │ │ ├── ic_camera_flip 2.png │ │ └── ic_camera_flip.png │ └── txt_clr_third.colorset │ │ └── Contents.json ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── CameraViewController.swift ├── CircularProgressView.swift ├── FaceRecognition.xcdatamodeld │ ├── .xccurrentversion │ └── FaceRecognition.xcdatamodel │ │ └── contents ├── FaceView.swift ├── Info.plist ├── Model.xcdatamodeld │ └── Model.xcdatamodel │ │ └── contents ├── PersonViewCell.swift ├── SceneDelegate.swift ├── SettingsViewController.swift ├── ToastView.swift ├── UIImageExtension.swift └── ViewController.swift ├── FaceDemoTests └── FaceDemoTests.swift ├── FaceDemoUITests ├── FaceDemoUITests.swift └── FaceDemoUITestsLaunchTests.swift ├── README.md ├── facesdk.framework ├── Headers │ ├── facesdk.h │ └── facesdk_api.h ├── Info.plist ├── Modules │ └── module.modulemap ├── _CodeSignature │ └── CodeResources ├── detection.bin ├── detection.param ├── facesdk ├── landmark.bin ├── landmark.param ├── liveness.bin ├── recognize.bin └── recognize.param └── license.txt /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/.DS_Store -------------------------------------------------------------------------------- /FaceDemo-Bridging-Header.h: -------------------------------------------------------------------------------- 1 | // 2 | // Use this file to import your target's public headers that you would like to expose to Swift. 3 | // 4 | #import "facesdk/facesdk.h" 5 | -------------------------------------------------------------------------------- /FaceDemo.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 54; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 37F3DCEB2B84F230002DE7A3 /* facesdk.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 37F3DCE92B84F12A002DE7A3 /* facesdk.framework */; }; 11 | 37F3DCEC2B84F230002DE7A3 /* facesdk.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 37F3DCE92B84F12A002DE7A3 /* facesdk.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; 12 | CE08D25A29ED8D4F0071F025 /* ToastView.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE08D25929ED8D4F0071F025 /* ToastView.swift */; }; 13 | CE29C50529ECD0A5008EDB5A /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C50429ECD0A5008EDB5A /* AppDelegate.swift */; }; 14 | CE29C50729ECD0A5008EDB5A /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C50629ECD0A5008EDB5A /* SceneDelegate.swift */; }; 15 | CE29C50929ECD0A5008EDB5A /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C50829ECD0A5008EDB5A /* ViewController.swift */; }; 16 | CE29C50C29ECD0A5008EDB5A /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = CE29C50A29ECD0A5008EDB5A /* Main.storyboard */; }; 17 | CE29C50F29ECD0A5008EDB5A /* FaceRecognition.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = CE29C50D29ECD0A5008EDB5A /* FaceRecognition.xcdatamodeld */; }; 18 | CE29C51129ECD0FB008EDB5A /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = CE29C51029ECD0FB008EDB5A /* Assets.xcassets */; }; 19 | CE29C51429ECD0FB008EDB5A /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = CE29C51229ECD0FB008EDB5A /* LaunchScreen.storyboard */; }; 20 | CE29C51F29ECD0FC008EDB5A /* FaceDemoTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C51E29ECD0FC008EDB5A /* FaceDemoTests.swift */; }; 21 | CE29C52929ECD0FC008EDB5A /* FaceDemoUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C52829ECD0FC008EDB5A /* FaceDemoUITests.swift */; }; 22 | CE29C52B29ECD0FC008EDB5A /* FaceDemoUITestsLaunchTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C52A29ECD0FC008EDB5A /* FaceDemoUITestsLaunchTests.swift */; }; 23 | CE29C54329ECE15C008EDB5A /* CameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C53E29ECE159008EDB5A /* CameraViewController.swift */; }; 24 | CE29C54429ECE15C008EDB5A /* UIImageExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C53F29ECE15A008EDB5A /* UIImageExtension.swift */; }; 25 | CE29C54529ECE15C008EDB5A /* FaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C54029ECE15B008EDB5A /* FaceView.swift */; }; 26 | CE29C54829ED6A0C008EDB5A /* Model.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = CE29C54629ED6A0C008EDB5A /* Model.xcdatamodeld */; }; 27 | CE29C54A29ED7527008EDB5A /* PersonViewCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C54929ED7527008EDB5A /* PersonViewCell.swift */; }; 28 | CE40B5242BBC3E6900348038 /* license.txt in Resources */ = {isa = PBXBuildFile; fileRef = CE40B5232BBC3E6900348038 /* license.txt */; }; 29 | CE75EF152BBB0438007599D8 /* CircularProgressView.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE75EF142BBB0438007599D8 /* CircularProgressView.swift */; }; 30 | CE75EF172BBB154D007599D8 /* SettingsViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE75EF162BBB154D007599D8 /* SettingsViewController.swift */; }; 31 | /* End PBXBuildFile section */ 32 | 33 | /* Begin PBXContainerItemProxy section */ 34 | CE29C51B29ECD0FC008EDB5A /* PBXContainerItemProxy */ = { 35 | isa = PBXContainerItemProxy; 36 | containerPortal = CE29C4F929ECD0A5008EDB5A /* Project object */; 37 | proxyType = 1; 38 | remoteGlobalIDString = CE29C50029ECD0A5008EDB5A; 39 | remoteInfo = FaceRecognition; 40 | }; 41 | CE29C52529ECD0FC008EDB5A /* PBXContainerItemProxy */ = { 42 | isa = PBXContainerItemProxy; 43 | containerPortal = CE29C4F929ECD0A5008EDB5A /* Project object */; 44 | proxyType = 1; 45 | remoteGlobalIDString = CE29C50029ECD0A5008EDB5A; 46 | remoteInfo = FaceRecognition; 47 | }; 48 | /* End PBXContainerItemProxy section */ 49 | 50 | /* Begin PBXCopyFilesBuildPhase section */ 51 | CE29C53B29ECE136008EDB5A /* Embed Frameworks */ = { 52 | isa = PBXCopyFilesBuildPhase; 53 | buildActionMask = 2147483647; 54 | dstPath = ""; 55 | dstSubfolderSpec = 10; 56 | files = ( 57 | 37F3DCEC2B84F230002DE7A3 /* facesdk.framework in Embed Frameworks */, 58 | ); 59 | name = "Embed Frameworks"; 60 | runOnlyForDeploymentPostprocessing = 0; 61 | }; 62 | /* End PBXCopyFilesBuildPhase section */ 63 | 64 | /* Begin PBXFileReference section */ 65 | 37F3DCE92B84F12A002DE7A3 /* facesdk.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = facesdk.framework; sourceTree = ""; }; 66 | CE08D25929ED8D4F0071F025 /* ToastView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ToastView.swift; sourceTree = ""; }; 67 | CE29C50129ECD0A5008EDB5A /* FaceDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FaceDemo.app; sourceTree = BUILT_PRODUCTS_DIR; }; 68 | CE29C50429ECD0A5008EDB5A /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 69 | CE29C50629ECD0A5008EDB5A /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; 70 | CE29C50829ECD0A5008EDB5A /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 71 | CE29C50B29ECD0A5008EDB5A /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 72 | CE29C50E29ECD0A5008EDB5A /* FaceRecognition.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = FaceRecognition.xcdatamodel; sourceTree = ""; }; 73 | CE29C51029ECD0FB008EDB5A /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 74 | CE29C51329ECD0FB008EDB5A /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 75 | CE29C51529ECD0FB008EDB5A /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 76 | CE29C51A29ECD0FC008EDB5A /* FaceDemoTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = FaceDemoTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 77 | CE29C51E29ECD0FC008EDB5A /* FaceDemoTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceDemoTests.swift; sourceTree = ""; }; 78 | CE29C52429ECD0FC008EDB5A /* FaceDemoUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = FaceDemoUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 79 | CE29C52829ECD0FC008EDB5A /* FaceDemoUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceDemoUITests.swift; sourceTree = ""; }; 80 | CE29C52A29ECD0FC008EDB5A /* FaceDemoUITestsLaunchTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceDemoUITestsLaunchTests.swift; sourceTree = ""; }; 81 | CE29C53E29ECE159008EDB5A /* CameraViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraViewController.swift; sourceTree = ""; }; 82 | CE29C53F29ECE15A008EDB5A /* UIImageExtension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = UIImageExtension.swift; sourceTree = ""; }; 83 | CE29C54029ECE15B008EDB5A /* FaceView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FaceView.swift; sourceTree = ""; }; 84 | CE29C54729ED6A0C008EDB5A /* Model.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = Model.xcdatamodel; sourceTree = ""; }; 85 | CE29C54929ED7527008EDB5A /* PersonViewCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PersonViewCell.swift; sourceTree = ""; }; 86 | CE40B5232BBC3E6900348038 /* license.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = license.txt; sourceTree = ""; }; 87 | CE75EF142BBB0438007599D8 /* CircularProgressView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CircularProgressView.swift; sourceTree = ""; }; 88 | CE75EF162BBB154D007599D8 /* SettingsViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SettingsViewController.swift; sourceTree = ""; }; 89 | /* End PBXFileReference section */ 90 | 91 | /* Begin PBXFrameworksBuildPhase section */ 92 | CE29C4FE29ECD0A5008EDB5A /* Frameworks */ = { 93 | isa = PBXFrameworksBuildPhase; 94 | buildActionMask = 2147483647; 95 | files = ( 96 | 37F3DCEB2B84F230002DE7A3 /* facesdk.framework in Frameworks */, 97 | ); 98 | runOnlyForDeploymentPostprocessing = 0; 99 | }; 100 | CE29C51729ECD0FC008EDB5A /* Frameworks */ = { 101 | isa = PBXFrameworksBuildPhase; 102 | buildActionMask = 2147483647; 103 | files = ( 104 | ); 105 | runOnlyForDeploymentPostprocessing = 0; 106 | }; 107 | CE29C52129ECD0FC008EDB5A /* Frameworks */ = { 108 | isa = PBXFrameworksBuildPhase; 109 | buildActionMask = 2147483647; 110 | files = ( 111 | ); 112 | runOnlyForDeploymentPostprocessing = 0; 113 | }; 114 | /* End PBXFrameworksBuildPhase section */ 115 | 116 | /* Begin PBXGroup section */ 117 | CE29C4F829ECD0A4008EDB5A = { 118 | isa = PBXGroup; 119 | children = ( 120 | CE40B5232BBC3E6900348038 /* license.txt */, 121 | CE29C50329ECD0A5008EDB5A /* FaceDemo */, 122 | CE29C51D29ECD0FC008EDB5A /* FaceDemoTests */, 123 | CE29C52729ECD0FC008EDB5A /* FaceDemoUITests */, 124 | CE29C50229ECD0A5008EDB5A /* Products */, 125 | CE29C53729ECE131008EDB5A /* Frameworks */, 126 | ); 127 | sourceTree = ""; 128 | }; 129 | CE29C50229ECD0A5008EDB5A /* Products */ = { 130 | isa = PBXGroup; 131 | children = ( 132 | CE29C50129ECD0A5008EDB5A /* FaceDemo.app */, 133 | CE29C51A29ECD0FC008EDB5A /* FaceDemoTests.xctest */, 134 | CE29C52429ECD0FC008EDB5A /* FaceDemoUITests.xctest */, 135 | ); 136 | name = Products; 137 | sourceTree = ""; 138 | }; 139 | CE29C50329ECD0A5008EDB5A /* FaceDemo */ = { 140 | isa = PBXGroup; 141 | children = ( 142 | CE75EF162BBB154D007599D8 /* SettingsViewController.swift */, 143 | CE29C50429ECD0A5008EDB5A /* AppDelegate.swift */, 144 | CE08D25929ED8D4F0071F025 /* ToastView.swift */, 145 | CE29C50629ECD0A5008EDB5A /* SceneDelegate.swift */, 146 | CE29C50829ECD0A5008EDB5A /* ViewController.swift */, 147 | CE29C54929ED7527008EDB5A /* PersonViewCell.swift */, 148 | CE29C54629ED6A0C008EDB5A /* Model.xcdatamodeld */, 149 | CE29C53E29ECE159008EDB5A /* CameraViewController.swift */, 150 | CE29C54029ECE15B008EDB5A /* FaceView.swift */, 151 | CE29C53F29ECE15A008EDB5A /* UIImageExtension.swift */, 152 | CE29C50A29ECD0A5008EDB5A /* Main.storyboard */, 153 | CE29C51029ECD0FB008EDB5A /* Assets.xcassets */, 154 | CE29C51229ECD0FB008EDB5A /* LaunchScreen.storyboard */, 155 | CE29C51529ECD0FB008EDB5A /* Info.plist */, 156 | CE29C50D29ECD0A5008EDB5A /* FaceRecognition.xcdatamodeld */, 157 | CE75EF142BBB0438007599D8 /* CircularProgressView.swift */, 158 | ); 159 | path = FaceDemo; 160 | sourceTree = ""; 161 | }; 162 | CE29C51D29ECD0FC008EDB5A /* FaceDemoTests */ = { 163 | isa = PBXGroup; 164 | children = ( 165 | CE29C51E29ECD0FC008EDB5A /* FaceDemoTests.swift */, 166 | ); 167 | path = FaceDemoTests; 168 | sourceTree = ""; 169 | }; 170 | CE29C52729ECD0FC008EDB5A /* FaceDemoUITests */ = { 171 | isa = PBXGroup; 172 | children = ( 173 | CE29C52829ECD0FC008EDB5A /* FaceDemoUITests.swift */, 174 | CE29C52A29ECD0FC008EDB5A /* FaceDemoUITestsLaunchTests.swift */, 175 | ); 176 | path = FaceDemoUITests; 177 | sourceTree = ""; 178 | }; 179 | CE29C53729ECE131008EDB5A /* Frameworks */ = { 180 | isa = PBXGroup; 181 | children = ( 182 | 37F3DCE92B84F12A002DE7A3 /* facesdk.framework */, 183 | ); 184 | name = Frameworks; 185 | sourceTree = ""; 186 | }; 187 | /* End PBXGroup section */ 188 | 189 | /* Begin PBXNativeTarget section */ 190 | CE29C50029ECD0A5008EDB5A /* FaceDemo */ = { 191 | isa = PBXNativeTarget; 192 | buildConfigurationList = CE29C52E29ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceDemo" */; 193 | buildPhases = ( 194 | CE29C4FD29ECD0A5008EDB5A /* Sources */, 195 | CE29C4FE29ECD0A5008EDB5A /* Frameworks */, 196 | CE29C4FF29ECD0A5008EDB5A /* Resources */, 197 | CE29C53B29ECE136008EDB5A /* Embed Frameworks */, 198 | ); 199 | buildRules = ( 200 | ); 201 | dependencies = ( 202 | ); 203 | name = FaceDemo; 204 | productName = FaceRecognition; 205 | productReference = CE29C50129ECD0A5008EDB5A /* FaceDemo.app */; 206 | productType = "com.apple.product-type.application"; 207 | }; 208 | CE29C51929ECD0FC008EDB5A /* FaceDemoTests */ = { 209 | isa = PBXNativeTarget; 210 | buildConfigurationList = CE29C53129ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceDemoTests" */; 211 | buildPhases = ( 212 | CE29C51629ECD0FC008EDB5A /* Sources */, 213 | CE29C51729ECD0FC008EDB5A /* Frameworks */, 214 | CE29C51829ECD0FC008EDB5A /* Resources */, 215 | ); 216 | buildRules = ( 217 | ); 218 | dependencies = ( 219 | CE29C51C29ECD0FC008EDB5A /* PBXTargetDependency */, 220 | ); 221 | name = FaceDemoTests; 222 | productName = FaceRecognitionTests; 223 | productReference = CE29C51A29ECD0FC008EDB5A /* FaceDemoTests.xctest */; 224 | productType = "com.apple.product-type.bundle.unit-test"; 225 | }; 226 | CE29C52329ECD0FC008EDB5A /* FaceDemoUITests */ = { 227 | isa = PBXNativeTarget; 228 | buildConfigurationList = CE29C53429ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceDemoUITests" */; 229 | buildPhases = ( 230 | CE29C52029ECD0FC008EDB5A /* Sources */, 231 | CE29C52129ECD0FC008EDB5A /* Frameworks */, 232 | CE29C52229ECD0FC008EDB5A /* Resources */, 233 | ); 234 | buildRules = ( 235 | ); 236 | dependencies = ( 237 | CE29C52629ECD0FC008EDB5A /* PBXTargetDependency */, 238 | ); 239 | name = FaceDemoUITests; 240 | productName = FaceRecognitionUITests; 241 | productReference = CE29C52429ECD0FC008EDB5A /* FaceDemoUITests.xctest */; 242 | productType = "com.apple.product-type.bundle.ui-testing"; 243 | }; 244 | /* End PBXNativeTarget section */ 245 | 246 | /* Begin PBXProject section */ 247 | CE29C4F929ECD0A5008EDB5A /* Project object */ = { 248 | isa = PBXProject; 249 | attributes = { 250 | BuildIndependentTargetsInParallel = 1; 251 | LastSwiftUpdateCheck = 1420; 252 | LastUpgradeCheck = 1420; 253 | TargetAttributes = { 254 | CE29C50029ECD0A5008EDB5A = { 255 | CreatedOnToolsVersion = 14.2; 256 | }; 257 | CE29C51929ECD0FC008EDB5A = { 258 | CreatedOnToolsVersion = 14.2; 259 | TestTargetID = CE29C50029ECD0A5008EDB5A; 260 | }; 261 | CE29C52329ECD0FC008EDB5A = { 262 | CreatedOnToolsVersion = 14.2; 263 | TestTargetID = CE29C50029ECD0A5008EDB5A; 264 | }; 265 | }; 266 | }; 267 | buildConfigurationList = CE29C4FC29ECD0A5008EDB5A /* Build configuration list for PBXProject "FaceDemo" */; 268 | compatibilityVersion = "Xcode 12.0"; 269 | developmentRegion = en; 270 | hasScannedForEncodings = 0; 271 | knownRegions = ( 272 | en, 273 | Base, 274 | ); 275 | mainGroup = CE29C4F829ECD0A4008EDB5A; 276 | productRefGroup = CE29C50229ECD0A5008EDB5A /* Products */; 277 | projectDirPath = ""; 278 | projectRoot = ""; 279 | targets = ( 280 | CE29C50029ECD0A5008EDB5A /* FaceDemo */, 281 | CE29C51929ECD0FC008EDB5A /* FaceDemoTests */, 282 | CE29C52329ECD0FC008EDB5A /* FaceDemoUITests */, 283 | ); 284 | }; 285 | /* End PBXProject section */ 286 | 287 | /* Begin PBXResourcesBuildPhase section */ 288 | CE29C4FF29ECD0A5008EDB5A /* Resources */ = { 289 | isa = PBXResourcesBuildPhase; 290 | buildActionMask = 2147483647; 291 | files = ( 292 | CE40B5242BBC3E6900348038 /* license.txt in Resources */, 293 | CE29C51429ECD0FB008EDB5A /* LaunchScreen.storyboard in Resources */, 294 | CE29C51129ECD0FB008EDB5A /* Assets.xcassets in Resources */, 295 | CE29C50C29ECD0A5008EDB5A /* Main.storyboard in Resources */, 296 | ); 297 | runOnlyForDeploymentPostprocessing = 0; 298 | }; 299 | CE29C51829ECD0FC008EDB5A /* Resources */ = { 300 | isa = PBXResourcesBuildPhase; 301 | buildActionMask = 2147483647; 302 | files = ( 303 | ); 304 | runOnlyForDeploymentPostprocessing = 0; 305 | }; 306 | CE29C52229ECD0FC008EDB5A /* Resources */ = { 307 | isa = PBXResourcesBuildPhase; 308 | buildActionMask = 2147483647; 309 | files = ( 310 | ); 311 | runOnlyForDeploymentPostprocessing = 0; 312 | }; 313 | /* End PBXResourcesBuildPhase section */ 314 | 315 | /* Begin PBXSourcesBuildPhase section */ 316 | CE29C4FD29ECD0A5008EDB5A /* Sources */ = { 317 | isa = PBXSourcesBuildPhase; 318 | buildActionMask = 2147483647; 319 | files = ( 320 | CE29C50F29ECD0A5008EDB5A /* FaceRecognition.xcdatamodeld in Sources */, 321 | CE29C50929ECD0A5008EDB5A /* ViewController.swift in Sources */, 322 | CE29C50529ECD0A5008EDB5A /* AppDelegate.swift in Sources */, 323 | CE29C54429ECE15C008EDB5A /* UIImageExtension.swift in Sources */, 324 | CE08D25A29ED8D4F0071F025 /* ToastView.swift in Sources */, 325 | CE29C54329ECE15C008EDB5A /* CameraViewController.swift in Sources */, 326 | CE29C50729ECD0A5008EDB5A /* SceneDelegate.swift in Sources */, 327 | CE29C54829ED6A0C008EDB5A /* Model.xcdatamodeld in Sources */, 328 | CE29C54A29ED7527008EDB5A /* PersonViewCell.swift in Sources */, 329 | CE29C54529ECE15C008EDB5A /* FaceView.swift in Sources */, 330 | CE75EF152BBB0438007599D8 /* CircularProgressView.swift in Sources */, 331 | CE75EF172BBB154D007599D8 /* SettingsViewController.swift in Sources */, 332 | ); 333 | runOnlyForDeploymentPostprocessing = 0; 334 | }; 335 | CE29C51629ECD0FC008EDB5A /* Sources */ = { 336 | isa = PBXSourcesBuildPhase; 337 | buildActionMask = 2147483647; 338 | files = ( 339 | CE29C51F29ECD0FC008EDB5A /* FaceDemoTests.swift in Sources */, 340 | ); 341 | runOnlyForDeploymentPostprocessing = 0; 342 | }; 343 | CE29C52029ECD0FC008EDB5A /* Sources */ = { 344 | isa = PBXSourcesBuildPhase; 345 | buildActionMask = 2147483647; 346 | files = ( 347 | CE29C52929ECD0FC008EDB5A /* FaceDemoUITests.swift in Sources */, 348 | CE29C52B29ECD0FC008EDB5A /* FaceDemoUITestsLaunchTests.swift in Sources */, 349 | ); 350 | runOnlyForDeploymentPostprocessing = 0; 351 | }; 352 | /* End PBXSourcesBuildPhase section */ 353 | 354 | /* Begin PBXTargetDependency section */ 355 | CE29C51C29ECD0FC008EDB5A /* PBXTargetDependency */ = { 356 | isa = PBXTargetDependency; 357 | target = CE29C50029ECD0A5008EDB5A /* FaceDemo */; 358 | targetProxy = CE29C51B29ECD0FC008EDB5A /* PBXContainerItemProxy */; 359 | }; 360 | CE29C52629ECD0FC008EDB5A /* PBXTargetDependency */ = { 361 | isa = PBXTargetDependency; 362 | target = CE29C50029ECD0A5008EDB5A /* FaceDemo */; 363 | targetProxy = CE29C52529ECD0FC008EDB5A /* PBXContainerItemProxy */; 364 | }; 365 | /* End PBXTargetDependency section */ 366 | 367 | /* Begin PBXVariantGroup section */ 368 | CE29C50A29ECD0A5008EDB5A /* Main.storyboard */ = { 369 | isa = PBXVariantGroup; 370 | children = ( 371 | CE29C50B29ECD0A5008EDB5A /* Base */, 372 | ); 373 | name = Main.storyboard; 374 | sourceTree = ""; 375 | }; 376 | CE29C51229ECD0FB008EDB5A /* LaunchScreen.storyboard */ = { 377 | isa = PBXVariantGroup; 378 | children = ( 379 | CE29C51329ECD0FB008EDB5A /* Base */, 380 | ); 381 | name = LaunchScreen.storyboard; 382 | sourceTree = ""; 383 | }; 384 | /* End PBXVariantGroup section */ 385 | 386 | /* Begin XCBuildConfiguration section */ 387 | CE29C52C29ECD0FC008EDB5A /* Debug */ = { 388 | isa = XCBuildConfiguration; 389 | buildSettings = { 390 | ALWAYS_SEARCH_USER_PATHS = NO; 391 | CLANG_ANALYZER_NONNULL = YES; 392 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 393 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 394 | CLANG_ENABLE_MODULES = YES; 395 | CLANG_ENABLE_OBJC_ARC = YES; 396 | CLANG_ENABLE_OBJC_WEAK = YES; 397 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 398 | CLANG_WARN_BOOL_CONVERSION = YES; 399 | CLANG_WARN_COMMA = YES; 400 | CLANG_WARN_CONSTANT_CONVERSION = YES; 401 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 402 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 403 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 404 | CLANG_WARN_EMPTY_BODY = YES; 405 | CLANG_WARN_ENUM_CONVERSION = YES; 406 | CLANG_WARN_INFINITE_RECURSION = YES; 407 | CLANG_WARN_INT_CONVERSION = YES; 408 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 409 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 410 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 411 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 412 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 413 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 414 | CLANG_WARN_STRICT_PROTOTYPES = YES; 415 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 416 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 417 | CLANG_WARN_UNREACHABLE_CODE = YES; 418 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 419 | COPY_PHASE_STRIP = NO; 420 | DEBUG_INFORMATION_FORMAT = dwarf; 421 | ENABLE_STRICT_OBJC_MSGSEND = YES; 422 | ENABLE_TESTABILITY = YES; 423 | GCC_C_LANGUAGE_STANDARD = gnu11; 424 | GCC_DYNAMIC_NO_PIC = NO; 425 | GCC_NO_COMMON_BLOCKS = YES; 426 | GCC_OPTIMIZATION_LEVEL = 0; 427 | GCC_PREPROCESSOR_DEFINITIONS = ( 428 | "DEBUG=1", 429 | "$(inherited)", 430 | ); 431 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 432 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 433 | GCC_WARN_UNDECLARED_SELECTOR = YES; 434 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 435 | GCC_WARN_UNUSED_FUNCTION = YES; 436 | GCC_WARN_UNUSED_VARIABLE = YES; 437 | IPHONEOS_DEPLOYMENT_TARGET = 16.2; 438 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; 439 | MTL_FAST_MATH = YES; 440 | ONLY_ACTIVE_ARCH = YES; 441 | SDKROOT = iphoneos; 442 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 443 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 444 | }; 445 | name = Debug; 446 | }; 447 | CE29C52D29ECD0FC008EDB5A /* Release */ = { 448 | isa = XCBuildConfiguration; 449 | buildSettings = { 450 | ALWAYS_SEARCH_USER_PATHS = NO; 451 | CLANG_ANALYZER_NONNULL = YES; 452 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 453 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 454 | CLANG_ENABLE_MODULES = YES; 455 | CLANG_ENABLE_OBJC_ARC = YES; 456 | CLANG_ENABLE_OBJC_WEAK = YES; 457 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 458 | CLANG_WARN_BOOL_CONVERSION = YES; 459 | CLANG_WARN_COMMA = YES; 460 | CLANG_WARN_CONSTANT_CONVERSION = YES; 461 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 462 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 463 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 464 | CLANG_WARN_EMPTY_BODY = YES; 465 | CLANG_WARN_ENUM_CONVERSION = YES; 466 | CLANG_WARN_INFINITE_RECURSION = YES; 467 | CLANG_WARN_INT_CONVERSION = YES; 468 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 469 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 470 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 471 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 472 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 473 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 474 | CLANG_WARN_STRICT_PROTOTYPES = YES; 475 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 476 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 477 | CLANG_WARN_UNREACHABLE_CODE = YES; 478 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 479 | COPY_PHASE_STRIP = NO; 480 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 481 | ENABLE_NS_ASSERTIONS = NO; 482 | ENABLE_STRICT_OBJC_MSGSEND = YES; 483 | GCC_C_LANGUAGE_STANDARD = gnu11; 484 | GCC_NO_COMMON_BLOCKS = YES; 485 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 486 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 487 | GCC_WARN_UNDECLARED_SELECTOR = YES; 488 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 489 | GCC_WARN_UNUSED_FUNCTION = YES; 490 | GCC_WARN_UNUSED_VARIABLE = YES; 491 | IPHONEOS_DEPLOYMENT_TARGET = 16.2; 492 | MTL_ENABLE_DEBUG_INFO = NO; 493 | MTL_FAST_MATH = YES; 494 | SDKROOT = iphoneos; 495 | SWIFT_COMPILATION_MODE = wholemodule; 496 | SWIFT_OPTIMIZATION_LEVEL = "-O"; 497 | VALIDATE_PRODUCT = YES; 498 | }; 499 | name = Release; 500 | }; 501 | CE29C52F29ECD0FC008EDB5A /* Debug */ = { 502 | isa = XCBuildConfiguration; 503 | buildSettings = { 504 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 505 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 506 | CODE_SIGN_STYLE = Automatic; 507 | CURRENT_PROJECT_VERSION = 2; 508 | DEVELOPMENT_TEAM = ""; 509 | ENABLE_BITCODE = NO; 510 | FRAMEWORK_SEARCH_PATHS = ( 511 | "$(inherited)", 512 | "$(PROJECT_DIR)", 513 | ); 514 | GENERATE_INFOPLIST_FILE = YES; 515 | INFOPLIST_FILE = FaceDemo/Info.plist; 516 | INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.productivity"; 517 | INFOPLIST_KEY_NSCameraUsageDescription = "The application utilizes the camera for performing facial recognition.\n\n\n\n\nThe app utilizes the camera for performing facial recognition."; 518 | INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; 519 | INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; 520 | INFOPLIST_KEY_UIMainStoryboardFile = Main; 521 | INFOPLIST_KEY_UISupportedInterfaceOrientations = UIInterfaceOrientationPortrait; 522 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 523 | LD_RUNPATH_SEARCH_PATHS = ( 524 | "$(inherited)", 525 | "@executable_path/Frameworks", 526 | ); 527 | MARKETING_VERSION = 1.1; 528 | PRODUCT_BUNDLE_IDENTIFIER = com.bio.facedemo; 529 | PRODUCT_NAME = "$(TARGET_NAME)"; 530 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; 531 | SUPPORTS_MACCATALYST = NO; 532 | SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO; 533 | SWIFT_EMIT_LOC_STRINGS = YES; 534 | SWIFT_OBJC_BRIDGING_HEADER = "FaceDemo-Bridging-Header.h"; 535 | SWIFT_VERSION = 5.0; 536 | TARGETED_DEVICE_FAMILY = 1; 537 | }; 538 | name = Debug; 539 | }; 540 | CE29C53029ECD0FC008EDB5A /* Release */ = { 541 | isa = XCBuildConfiguration; 542 | buildSettings = { 543 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 544 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 545 | CODE_SIGN_STYLE = Automatic; 546 | CURRENT_PROJECT_VERSION = 2; 547 | DEVELOPMENT_TEAM = ""; 548 | ENABLE_BITCODE = NO; 549 | FRAMEWORK_SEARCH_PATHS = ( 550 | "$(inherited)", 551 | "$(PROJECT_DIR)", 552 | ); 553 | GENERATE_INFOPLIST_FILE = YES; 554 | INFOPLIST_FILE = FaceDemo/Info.plist; 555 | INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.productivity"; 556 | INFOPLIST_KEY_NSCameraUsageDescription = "The application utilizes the camera for performing facial recognition.\n\n\n\n\nThe app utilizes the camera for performing facial recognition."; 557 | INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; 558 | INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; 559 | INFOPLIST_KEY_UIMainStoryboardFile = Main; 560 | INFOPLIST_KEY_UISupportedInterfaceOrientations = UIInterfaceOrientationPortrait; 561 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 562 | LD_RUNPATH_SEARCH_PATHS = ( 563 | "$(inherited)", 564 | "@executable_path/Frameworks", 565 | ); 566 | MARKETING_VERSION = 1.1; 567 | PRODUCT_BUNDLE_IDENTIFIER = com.bio.facedemo; 568 | PRODUCT_NAME = "$(TARGET_NAME)"; 569 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; 570 | SUPPORTS_MACCATALYST = NO; 571 | SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO; 572 | SWIFT_EMIT_LOC_STRINGS = YES; 573 | SWIFT_OBJC_BRIDGING_HEADER = "FaceDemo-Bridging-Header.h"; 574 | SWIFT_VERSION = 5.0; 575 | TARGETED_DEVICE_FAMILY = 1; 576 | }; 577 | name = Release; 578 | }; 579 | CE29C53229ECD0FC008EDB5A /* Debug */ = { 580 | isa = XCBuildConfiguration; 581 | buildSettings = { 582 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 583 | BUNDLE_LOADER = "$(TEST_HOST)"; 584 | CODE_SIGN_STYLE = Automatic; 585 | CURRENT_PROJECT_VERSION = 1; 586 | DEVELOPMENT_TEAM = JSUUF48N9C; 587 | GENERATE_INFOPLIST_FILE = YES; 588 | IPHONEOS_DEPLOYMENT_TARGET = 16.2; 589 | MARKETING_VERSION = 1.0; 590 | PRODUCT_BUNDLE_IDENTIFIER = com.bio.facedemo; 591 | PRODUCT_NAME = "$(TARGET_NAME)"; 592 | SWIFT_EMIT_LOC_STRINGS = NO; 593 | SWIFT_VERSION = 5.0; 594 | TARGETED_DEVICE_FAMILY = "1,2"; 595 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/FaceDemo.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/FaceDemo"; 596 | }; 597 | name = Debug; 598 | }; 599 | CE29C53329ECD0FC008EDB5A /* Release */ = { 600 | isa = XCBuildConfiguration; 601 | buildSettings = { 602 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 603 | BUNDLE_LOADER = "$(TEST_HOST)"; 604 | CODE_SIGN_STYLE = Automatic; 605 | CURRENT_PROJECT_VERSION = 1; 606 | DEVELOPMENT_TEAM = JSUUF48N9C; 607 | GENERATE_INFOPLIST_FILE = YES; 608 | IPHONEOS_DEPLOYMENT_TARGET = 16.2; 609 | MARKETING_VERSION = 1.0; 610 | PRODUCT_BUNDLE_IDENTIFIER = com.bio.facedemo; 611 | PRODUCT_NAME = "$(TARGET_NAME)"; 612 | SWIFT_EMIT_LOC_STRINGS = NO; 613 | SWIFT_VERSION = 5.0; 614 | TARGETED_DEVICE_FAMILY = "1,2"; 615 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/FaceDemo.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/FaceDemo"; 616 | }; 617 | name = Release; 618 | }; 619 | CE29C53529ECD0FC008EDB5A /* Debug */ = { 620 | isa = XCBuildConfiguration; 621 | buildSettings = { 622 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 623 | CODE_SIGN_STYLE = Automatic; 624 | CURRENT_PROJECT_VERSION = 1; 625 | DEVELOPMENT_TEAM = JSUUF48N9C; 626 | GENERATE_INFOPLIST_FILE = YES; 627 | MARKETING_VERSION = 1.0; 628 | PRODUCT_BUNDLE_IDENTIFIER = ""; 629 | PRODUCT_NAME = "$(TARGET_NAME)"; 630 | SWIFT_EMIT_LOC_STRINGS = NO; 631 | SWIFT_VERSION = 5.0; 632 | TARGETED_DEVICE_FAMILY = "1,2"; 633 | TEST_TARGET_NAME = FaceDemo; 634 | }; 635 | name = Debug; 636 | }; 637 | CE29C53629ECD0FC008EDB5A /* Release */ = { 638 | isa = XCBuildConfiguration; 639 | buildSettings = { 640 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 641 | CODE_SIGN_STYLE = Automatic; 642 | CURRENT_PROJECT_VERSION = 1; 643 | DEVELOPMENT_TEAM = JSUUF48N9C; 644 | GENERATE_INFOPLIST_FILE = YES; 645 | MARKETING_VERSION = 1.0; 646 | PRODUCT_BUNDLE_IDENTIFIER = ""; 647 | PRODUCT_NAME = "$(TARGET_NAME)"; 648 | SWIFT_EMIT_LOC_STRINGS = NO; 649 | SWIFT_VERSION = 5.0; 650 | TARGETED_DEVICE_FAMILY = "1,2"; 651 | TEST_TARGET_NAME = FaceDemo; 652 | }; 653 | name = Release; 654 | }; 655 | /* End XCBuildConfiguration section */ 656 | 657 | /* Begin XCConfigurationList section */ 658 | CE29C4FC29ECD0A5008EDB5A /* Build configuration list for PBXProject "FaceDemo" */ = { 659 | isa = XCConfigurationList; 660 | buildConfigurations = ( 661 | CE29C52C29ECD0FC008EDB5A /* Debug */, 662 | CE29C52D29ECD0FC008EDB5A /* Release */, 663 | ); 664 | defaultConfigurationIsVisible = 0; 665 | defaultConfigurationName = Release; 666 | }; 667 | CE29C52E29ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceDemo" */ = { 668 | isa = XCConfigurationList; 669 | buildConfigurations = ( 670 | CE29C52F29ECD0FC008EDB5A /* Debug */, 671 | CE29C53029ECD0FC008EDB5A /* Release */, 672 | ); 673 | defaultConfigurationIsVisible = 0; 674 | defaultConfigurationName = Release; 675 | }; 676 | CE29C53129ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceDemoTests" */ = { 677 | isa = XCConfigurationList; 678 | buildConfigurations = ( 679 | CE29C53229ECD0FC008EDB5A /* Debug */, 680 | CE29C53329ECD0FC008EDB5A /* Release */, 681 | ); 682 | defaultConfigurationIsVisible = 0; 683 | defaultConfigurationName = Release; 684 | }; 685 | CE29C53429ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceDemoUITests" */ = { 686 | isa = XCConfigurationList; 687 | buildConfigurations = ( 688 | CE29C53529ECD0FC008EDB5A /* Debug */, 689 | CE29C53629ECD0FC008EDB5A /* Release */, 690 | ); 691 | defaultConfigurationIsVisible = 0; 692 | defaultConfigurationName = Release; 693 | }; 694 | /* End XCConfigurationList section */ 695 | 696 | /* Begin XCVersionGroup section */ 697 | CE29C50D29ECD0A5008EDB5A /* FaceRecognition.xcdatamodeld */ = { 698 | isa = XCVersionGroup; 699 | children = ( 700 | CE29C50E29ECD0A5008EDB5A /* FaceRecognition.xcdatamodel */, 701 | ); 702 | currentVersion = CE29C50E29ECD0A5008EDB5A /* FaceRecognition.xcdatamodel */; 703 | path = FaceRecognition.xcdatamodeld; 704 | sourceTree = ""; 705 | versionGroupType = wrapper.xcdatamodel; 706 | }; 707 | CE29C54629ED6A0C008EDB5A /* Model.xcdatamodeld */ = { 708 | isa = XCVersionGroup; 709 | children = ( 710 | CE29C54729ED6A0C008EDB5A /* Model.xcdatamodel */, 711 | ); 712 | currentVersion = CE29C54729ED6A0C008EDB5A /* Model.xcdatamodel */; 713 | path = Model.xcdatamodeld; 714 | sourceTree = ""; 715 | versionGroupType = wrapper.xcdatamodel; 716 | }; 717 | /* End XCVersionGroup section */ 718 | }; 719 | rootObject = CE29C4F929ECD0A5008EDB5A /* Project object */; 720 | } 721 | -------------------------------------------------------------------------------- /FaceDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /FaceDemo.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /FaceDemo.xcodeproj/project.xcworkspace/xcuserdata/user.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo.xcodeproj/project.xcworkspace/xcuserdata/user.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /FaceDemo.xcodeproj/xcuserdata/admin.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | FaceRecognition.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /FaceDemo.xcodeproj/xcuserdata/kjh.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | FaceRecognition.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /FaceDemo.xcodeproj/xcuserdata/user.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | -------------------------------------------------------------------------------- /FaceDemo.xcodeproj/xcuserdata/user.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | FaceDemo.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 0 11 | 12 | FaceRecognition.xcscheme_^#shared#^_ 13 | 14 | orderHint 15 | 0 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /FaceDemo/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/.DS_Store -------------------------------------------------------------------------------- /FaceDemo/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | 2 | import UIKit 3 | import CoreData 4 | 5 | @main 6 | class AppDelegate: UIResponder, UIApplicationDelegate { 7 | 8 | 9 | 10 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { 11 | // Override point for customization after application launch. 12 | return true 13 | } 14 | 15 | // MARK: UISceneSession Lifecycle 16 | 17 | func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration { 18 | // Called when a new scene session is being created. 19 | // Use this method to select a configuration to create the new scene with. 20 | return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role) 21 | } 22 | 23 | func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set) { 24 | // Called when the user discards a scene session. 25 | // If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions. 26 | // Use this method to release any resources that were specific to the discarded scenes, as they will not return. 27 | } 28 | 29 | // MARK: - Core Data stack 30 | 31 | lazy var persistentContainer: NSPersistentContainer = { 32 | /* 33 | The persistent container for the application. This implementation 34 | creates and returns a container, having loaded the store for the 35 | application to it. This property is optional since there are legitimate 36 | error conditions that could cause the creation of the store to fail. 37 | */ 38 | let container = NSPersistentContainer(name: "FaceDemo") 39 | container.loadPersistentStores(completionHandler: { (storeDescription, error) in 40 | if let error = error as NSError? { 41 | // Replace this implementation with code to handle the error appropriately. 42 | // fatalError() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development. 43 | 44 | /* 45 | Typical reasons for an error here include: 46 | * The parent directory does not exist, cannot be created, or disallows writing. 47 | * The persistent store is not accessible, due to permissions or data protection when the device is locked. 48 | * The device is out of space. 49 | * The store could not be migrated to the current model version. 50 | Check the error message to determine what the actual problem was. 51 | */ 52 | fatalError("Unresolved error \(error), \(error.userInfo)") 53 | } 54 | }) 55 | return container 56 | }() 57 | 58 | // MARK: - Core Data Saving support 59 | 60 | func saveContext () { 61 | let context = persistentContainer.viewContext 62 | if context.hasChanges { 63 | do { 64 | try context.save() 65 | } catch { 66 | // Replace this implementation with code to handle the error appropriately. 67 | // fatalError() causes the application to generate a crash log and terminate. You should not use this function in a shipping application, although it may be useful during development. 68 | let nserror = error as NSError 69 | fatalError("Unresolved error \(nserror), \(nserror.userInfo)") 70 | } 71 | } 72 | } 73 | 74 | } 75 | 76 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/.DS_Store -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/AccentColor.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "idiom" : "universal" 5 | } 6 | ], 7 | "info" : { 8 | "author" : "xcode", 9 | "version" : 1 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "Recognito-1024x1024.png", 5 | "idiom" : "universal", 6 | "platform" : "ios", 7 | "size" : "1024x1024" 8 | } 9 | ], 10 | "info" : { 11 | "author" : "xcode", 12 | "version" : 1 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/AppIcon.appiconset/Recognito-1024x1024.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/AppIcon.appiconset/Recognito-1024x1024.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/Title.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "title 1.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "title.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "title 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/Title.imageset/title 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/Title.imageset/title 1.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/Title.imageset/title 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/Title.imageset/title 2.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/Title.imageset/title.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/Title.imageset/title.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/clr_bg.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0x33", 9 | "green" : "0x30", 10 | "red" : "0x30" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0x33", 27 | "green" : "0x30", 28 | "red" : "0x30" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/clr_btn_text.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0xFF", 9 | "green" : "0xFF", 10 | "red" : "0xFF" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0xFF", 27 | "green" : "0xFF", 28 | "red" : "0xFF" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/clr_item_bg.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0xEE", 9 | "green" : "0xEE", 10 | "red" : "0xEE" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0xEE", 27 | "green" : "0xEE", 28 | "red" : "0xEE" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/clr_main_button_bg1.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0x28", 9 | "green" : "0xB5", 10 | "red" : "0xFD" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0x28", 27 | "green" : "0xB5", 28 | "red" : "0xFD" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/clr_main_button_bg2.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0x07", 9 | "green" : "0xC1", 10 | "red" : "0xFF" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0x07", 27 | "green" : "0xC1", 28 | "red" : "0xFF" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/clr_main_button_bg3.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "216", 9 | "green" : "255", 10 | "red" : "255" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "216", 27 | "green" : "255", 28 | "red" : "255" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/clr_text.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0xFF", 9 | "green" : "0xFF", 10 | "red" : "0xFF" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0xFF", 27 | "green" : "0xFF", 28 | "red" : "0xFF" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/clr_title_text.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0x28", 9 | "green" : "0xB5", 10 | "red" : "0xFD" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0x28", 27 | "green" : "0xB5", 28 | "red" : "0xFD" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/clr_toast_bg.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "216", 9 | "green" : "255", 10 | "red" : "255" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "216", 27 | "green" : "255", 28 | "red" : "255" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_close.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "ic_close.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "ic_close 1.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "ic_close 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_close.imageset/ic_close 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_close.imageset/ic_close 1.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_close.imageset/ic_close 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_close.imageset/ic_close 2.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_close.imageset/ic_close.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_close.imageset/ic_close.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_delete.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "ic_delete.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "ic_delete 1.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "ic_delete 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_delete.imageset/ic_delete 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_delete.imageset/ic_delete 1.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_delete.imageset/ic_delete 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_delete.imageset/ic_delete 2.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_delete.imageset/ic_delete.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_delete.imageset/ic_delete.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_edit.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "ic_edit.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "ic_edit 1.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "ic_edit 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_edit.imageset/ic_edit 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_edit.imageset/ic_edit 1.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_edit.imageset/ic_edit 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_edit.imageset/ic_edit 2.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_edit.imageset/ic_edit.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_edit.imageset/ic_edit.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_logo.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "Recognito_white_back_url.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "Recognito_white_back_url 1.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "Recognito_white_back_url 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_logo.imageset/Recognito_white_back_url 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_logo.imageset/Recognito_white_back_url 1.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_logo.imageset/Recognito_white_back_url 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_logo.imageset/Recognito_white_back_url 2.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_logo.imageset/Recognito_white_back_url.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_logo.imageset/Recognito_white_back_url.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_setting.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "ic_setting.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "ic_setting 1.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "ic_setting 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_setting.imageset/ic_setting 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_setting.imageset/ic_setting 1.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_setting.imageset/ic_setting 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_setting.imageset/ic_setting 2.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_setting.imageset/ic_setting.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_setting.imageset/ic_setting.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_switchCamera.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "ic_camera_flip.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "ic_camera_flip 1.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "ic_camera_flip 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_switchCamera.imageset/ic_camera_flip 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_switchCamera.imageset/ic_camera_flip 1.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_switchCamera.imageset/ic_camera_flip 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_switchCamera.imageset/ic_camera_flip 2.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/img_switchCamera.imageset/ic_camera_flip.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/FaceDemo/Assets.xcassets/img_switchCamera.imageset/ic_camera_flip.png -------------------------------------------------------------------------------- /FaceDemo/Assets.xcassets/txt_clr_third.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0x2B", 9 | "green" : "0x19", 10 | "red" : "0x1D" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0x2B", 27 | "green" : "0x19", 28 | "red" : "0x1D" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceDemo/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /FaceDemo/CameraViewController.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | import AVFoundation 3 | import CoreData 4 | 5 | class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate{ 6 | 7 | @IBOutlet weak var cameraView: UIView! 8 | @IBOutlet weak var faceView: FaceView! 9 | @IBOutlet weak var resultView: UIView! 10 | 11 | 12 | @IBOutlet weak var enrolledView: UIView! 13 | @IBOutlet weak var enrolledImage: UIImageView! 14 | @IBOutlet weak var identifiedView: UIView! 15 | @IBOutlet weak var identifiedImage: UIImageView! 16 | 17 | @IBOutlet weak var enrolledNameLbl: UILabel! 18 | @IBOutlet weak var livenessLbl: UILabel! 19 | @IBOutlet weak var yawLbl: UILabel! 20 | @IBOutlet weak var rollLbl: UILabel! 21 | @IBOutlet weak var pitchLbl: UILabel! 22 | 23 | @IBOutlet weak var similarityScoreLbl: UILabel! 24 | @IBOutlet weak var similarityView: CircularProgressView! 25 | 26 | 27 | var session = AVCaptureSession() 28 | var recognized = false 29 | 30 | var cameraLens_val:AVCaptureDevice.Position = .front 31 | var livenessThreshold = Float(0) 32 | var matchingThreshold = Float(0) 33 | 34 | lazy var persistentContainer: NSPersistentContainer = { 35 | let container = NSPersistentContainer(name: ViewController.CORE_DATA_NAME) 36 | container.loadPersistentStores(completionHandler: { (storeDescription, error) in 37 | if let error = error as NSError? { 38 | fatalError("Unresolved error \(error), \(error.userInfo)") 39 | } 40 | }) 41 | return container 42 | }() 43 | 44 | override func viewDidLoad() { 45 | super.viewDidLoad() 46 | // Do any additional setup after loading the view. 47 | 48 | cameraView.translatesAutoresizingMaskIntoConstraints = true 49 | cameraView.frame = view.bounds 50 | 51 | faceView.translatesAutoresizingMaskIntoConstraints = true 52 | faceView.frame = view.bounds 53 | 54 | resultView.translatesAutoresizingMaskIntoConstraints = true 55 | resultView.frame = view.bounds 56 | 57 | let defaults = UserDefaults.standard 58 | cameraLens_val = .front 59 | livenessThreshold = defaults.float(forKey: "liveness_threshold") 60 | matchingThreshold = defaults.float(forKey: "matching_threshold") 61 | 62 | 63 | self.startCamera(cameraLens: AVCaptureDevice.Position.front) 64 | } 65 | 66 | func startCamera(cameraLens: AVCaptureDevice.Position) { 67 | // Create an AVCaptureDevice for the camera 68 | guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraLens) else { 69 | print("Failed to get video device for camera position: \(cameraLens)") 70 | return 71 | } 72 | 73 | do { 74 | // Create an AVCaptureDeviceInput 75 | let input = try AVCaptureDeviceInput(device: videoDevice) 76 | 77 | // Configure the session with the input 78 | session.beginConfiguration() 79 | if session.canAddInput(input) { 80 | session.addInput(input) 81 | } else { 82 | print("Failed to add input device to session") 83 | session.commitConfiguration() 84 | return 85 | } 86 | 87 | // Create an AVCaptureVideoDataOutput 88 | let videoOutput = AVCaptureVideoDataOutput() 89 | 90 | // Set the video output's delegate and queue for processing video frames 91 | videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.global(qos: .default)) 92 | 93 | // Add the video output to the session 94 | session.addOutput(videoOutput) 95 | 96 | // Configure preview layer 97 | let previewLayer = AVCaptureVideoPreviewLayer(session: session) 98 | previewLayer.videoGravity = .resizeAspectFill 99 | previewLayer.frame = cameraView.bounds 100 | 101 | // Add preview layer to camera view 102 | cameraView.layer.addSublayer(previewLayer) 103 | 104 | // Start the session 105 | session.commitConfiguration() 106 | DispatchQueue.global(qos: .background).async { 107 | self.session.startRunning() 108 | } 109 | } catch { 110 | print("Error setting up camera: \(error.localizedDescription)") 111 | } 112 | } 113 | 114 | func stopCamera() { 115 | // Stop the session 116 | session.stopRunning() 117 | 118 | // Remove the preview layer from the view 119 | for layer in cameraView.layer.sublayers ?? [] { 120 | if layer is AVCaptureVideoPreviewLayer { 121 | layer.removeFromSuperlayer() 122 | } 123 | } 124 | } 125 | 126 | @IBAction func switchCamera_clicked(_ sender: Any) { 127 | guard let currentInput = session.inputs.first as? AVCaptureDeviceInput else { 128 | return 129 | } 130 | 131 | let currentDevice = currentInput.device 132 | let newCameraPosition: AVCaptureDevice.Position = (currentDevice.position == .front) ? .back : .front 133 | cameraLens_val = newCameraPosition 134 | 135 | do { 136 | let newVideoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: newCameraPosition) 137 | 138 | guard let newDevice = newVideoDevice else { 139 | print("Failed to get new video device for \(newCameraPosition)") 140 | return 141 | } 142 | 143 | let newInput = try AVCaptureDeviceInput(device: newDevice) 144 | 145 | session.beginConfiguration() 146 | 147 | if let currentInput = session.inputs.first { 148 | session.removeInput(currentInput) 149 | } 150 | 151 | if session.canAddInput(newInput) { 152 | session.addInput(newInput) 153 | } else { 154 | print("Failed to add new input device to session") 155 | session.commitConfiguration() 156 | return 157 | } 158 | 159 | session.commitConfiguration() 160 | 161 | print("Switched camera to \(newCameraPosition)") 162 | } catch { 163 | print("Error switching camera: \(error.localizedDescription)") 164 | } 165 | } 166 | 167 | 168 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 169 | 170 | if(recognized == true) { 171 | return 172 | } 173 | 174 | guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } 175 | 176 | CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.readOnly) 177 | let ciImage = CIImage(cvPixelBuffer: pixelBuffer) 178 | 179 | let context = CIContext() 180 | let cgImage = context.createCGImage(ciImage, from: ciImage.extent) 181 | let image = UIImage(cgImage: cgImage!) 182 | CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.readOnly) 183 | 184 | // Rotate and flip the image 185 | var capturedImage = image.rotate(radians: .pi/2) 186 | if(cameraLens_val == .front) { 187 | capturedImage = capturedImage.flipHorizontally() 188 | } 189 | 190 | let faceBoxes = FaceSDK.faceDetection(capturedImage) 191 | 192 | DispatchQueue.main.sync { 193 | self.faceView.setFrameSize(frameSize: capturedImage.size) 194 | self.faceView.setFaceBoxes(faceBoxes: faceBoxes) 195 | } 196 | 197 | if(faceBoxes.count > 0) { 198 | 199 | let faceBox = faceBoxes[0] as! FaceBox 200 | if(faceBox.liveness > livenessThreshold) { 201 | 202 | let templates = FaceSDK.templateExtraction(capturedImage, faceBox: faceBox) 203 | 204 | var maxSimilarity = Float(0) 205 | var maxSimilarityName = "" 206 | var maxSimilarityFace: Data? = nil 207 | 208 | let context = self.persistentContainer.viewContext 209 | let fetchRequest = NSFetchRequest(entityName: ViewController.ENTITIES_NAME) 210 | 211 | do { 212 | let persons = try context.fetch(fetchRequest) as! [NSManagedObject] 213 | for person in persons { 214 | 215 | let personTemplates = person.value(forKey: ViewController.ATTRIBUTE_TEMPLATES) as! Data 216 | 217 | let similarity = FaceSDK.similarityCalculation(templates, templates2: personTemplates) 218 | 219 | if(maxSimilarity < similarity) { 220 | maxSimilarity = similarity 221 | maxSimilarityName = person.value(forKey: ViewController.ATTRIBUTE_NAME) as! String 222 | maxSimilarityFace = person.value(forKey: ViewController.ATTRIBUTE_FACE) as? Data 223 | } 224 | } 225 | } catch { 226 | print("Failed fetching: \(error)") 227 | } 228 | 229 | if(maxSimilarity > matchingThreshold) { 230 | let enrolledFaceImage = UIImage(data: maxSimilarityFace!) 231 | let identifiedFaceImage = capturedImage.cropFace(faceBox: faceBox) 232 | 233 | recognized = true 234 | 235 | DispatchQueue.main.sync { 236 | self.enrolledImage.image = enrolledFaceImage 237 | self.identifiedImage.image = identifiedFaceImage 238 | enrolledNameLbl.text = maxSimilarityName 239 | similarityScoreLbl.text = String(format: "%.01f", maxSimilarity*100) + "%" 240 | 241 | similarityView.setProgressColor = UIColor(named: "clr_main_button_bg1")! 242 | similarityView.setTrackColor = UIColor(named: "clr_main_button_bg3")! 243 | similarityView.setProgressWithAnimation(duration: 0.4, value: maxSimilarity) 244 | 245 | self.livenessLbl.text = String(format: "%.04f", faceBox.liveness) 246 | self.yawLbl.text = String(format: "%.04f", faceBox.yaw) 247 | self.rollLbl.text = String(format: "%.04f", faceBox.roll) 248 | self.pitchLbl.text = String(format: "%.04f", faceBox.pitch) 249 | 250 | enrolledView.layer.cornerRadius = enrolledView.frame.size.width/2 251 | enrolledImage.layer.cornerRadius = enrolledImage.frame.size.width/2 252 | identifiedView.layer.cornerRadius = identifiedView.frame.size.width/2 253 | identifiedImage.layer.cornerRadius = identifiedImage.frame.size.width/2 254 | self.resultView.showView(isHidden_: true) 255 | } 256 | } 257 | } 258 | } 259 | } 260 | 261 | @IBAction func done_clicked(_ sender: Any) { 262 | self.resultView.showView(isHidden_: false) 263 | recognized = false 264 | } 265 | 266 | } 267 | 268 | extension UIView { 269 | 270 | func showView(isHidden_: Bool) { 271 | 272 | if isHidden_ { 273 | UIView.animate(withDuration: 0.3, animations: { 274 | self.alpha = 1.0 275 | }, completion: {_ in 276 | self.isHidden = false 277 | }) 278 | } else { 279 | UIView.animate(withDuration: 0.3, animations: { 280 | self.alpha = 0.0 281 | }, completion: {_ in 282 | self.isHidden = true 283 | }) 284 | } 285 | } 286 | } 287 | 288 | -------------------------------------------------------------------------------- /FaceDemo/CircularProgressView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CircularProgressView.swift 3 | // 4 | // Created by user on 4/1/24. 5 | // 6 | 7 | import Foundation 8 | import UIKit 9 | import QuartzCore 10 | class CircularProgressView: UIView { 11 | 12 | private var progressLayer = CAShapeLayer() 13 | private var tracklayer = CAShapeLayer() 14 | private var frameSize = CGSize() 15 | 16 | override init(frame: CGRect) { 17 | super.init(frame: frame) 18 | self.configureProgressViewToBeCircular() 19 | } 20 | 21 | override func layoutSubviews() { 22 | super.layoutSubviews() 23 | 24 | // Recalculate the path for drawing the circular progress view based on the current frame size 25 | progressLayer.path = viewCGPath 26 | tracklayer.path = viewCGPath 27 | } 28 | 29 | required init?(coder aDecoder: NSCoder) { 30 | super.init(coder: aDecoder) 31 | self.configureProgressViewToBeCircular() 32 | } 33 | 34 | var setProgressColor: UIColor = UIColor.red { 35 | didSet { 36 | progressLayer.strokeColor = setProgressColor.cgColor 37 | } 38 | } 39 | 40 | var setTrackColor: UIColor = UIColor.white { 41 | didSet { 42 | tracklayer.strokeColor = setTrackColor.cgColor 43 | } 44 | } 45 | /** 46 | A path that consists of straight and curved line segments that you can render in your custom views. 47 | Meaning our CAShapeLayer will now be drawn on the screen with the path we have specified here 48 | */ 49 | private var viewCGPath: CGPath? { 50 | return UIBezierPath(arcCenter: CGPoint(x: frame.size.width / 2.0, y: frame.size.height / 2.0), 51 | radius: (frame.size.width - 1.5)/2, 52 | startAngle: CGFloat(-0.5 * Double.pi), 53 | endAngle: CGFloat(1.5 * Double.pi), clockwise: true).cgPath 54 | } 55 | 56 | private func configureProgressViewToBeCircular() { 57 | self.drawsView(using: tracklayer, startingPoint: 10.0, ending: 1.0) 58 | self.drawsView(using: progressLayer, startingPoint: 10.0, ending: 0.0) 59 | } 60 | 61 | private func drawsView(using shape: CAShapeLayer, startingPoint: CGFloat, ending: CGFloat) { 62 | self.backgroundColor = UIColor.clear 63 | self.layer.cornerRadius = self.frame.size.width/2.0 64 | 65 | shape.path = self.viewCGPath 66 | shape.fillColor = UIColor.clear.cgColor 67 | shape.strokeColor = setProgressColor.cgColor 68 | shape.lineWidth = startingPoint 69 | shape.strokeEnd = ending 70 | 71 | self.layer.addSublayer(shape) 72 | } 73 | 74 | func setProgressWithAnimation(duration: TimeInterval, value: Float) { 75 | let animation = CABasicAnimation(keyPath: "strokeEnd") 76 | animation.duration = duration 77 | 78 | animation.fromValue = 0 //start animation at point 0 79 | animation.toValue = value //end animation at point specified 80 | animation.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.linear) 81 | progressLayer.strokeEnd = CGFloat(value) 82 | progressLayer.add(animation, forKey: "animateCircle") 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /FaceDemo/FaceRecognition.xcdatamodeld/.xccurrentversion: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | _XCCurrentVersionName 6 | FaceRecognition.xcdatamodel 7 | 8 | 9 | -------------------------------------------------------------------------------- /FaceDemo/FaceRecognition.xcdatamodeld/FaceRecognition.xcdatamodel/contents: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /FaceDemo/FaceView.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | 3 | class FaceView: UIView { 4 | 5 | var faceBoxes: NSMutableArray? = nil 6 | var frameSize: CGSize? 7 | 8 | public func setFaceBoxes(faceBoxes: NSMutableArray) { 9 | self.faceBoxes = faceBoxes 10 | setNeedsDisplay() 11 | } 12 | 13 | public func setFrameSize(frameSize: CGSize) { 14 | self.frameSize = frameSize 15 | } 16 | 17 | private func drawBBox(context: CGContext, rect: CGRect) { 18 | let lineLength: CGFloat = 60 // specify your line length 19 | let radius: CGFloat = 30// specify your radius 20 | 21 | // Draw left top corner 22 | context.move(to: CGPoint(x: rect.minX, y: rect.minY + lineLength)) 23 | context.addLine(to: CGPoint(x: rect.minX, y: rect.minY + radius)) 24 | context.addArc(center: CGPoint(x: rect.minX + radius, y: rect.minY + radius), radius: radius, startAngle: .pi, endAngle: -.pi / 2, clockwise: false) 25 | context.addLine(to: CGPoint(x: rect.minX + lineLength, y: rect.minY)) 26 | 27 | // Draw right top corner 28 | context.move(to: CGPoint(x: rect.maxX - lineLength, y: rect.minY)) 29 | context.addLine(to: CGPoint(x: rect.maxX - radius, y: rect.minY)) 30 | context.addArc(center: CGPoint(x: rect.maxX - radius, y: rect.minY + radius), radius: radius, startAngle: -.pi / 2, endAngle: 0, clockwise: false) 31 | context.addLine(to: CGPoint(x: rect.maxX, y: rect.minY + lineLength)) 32 | 33 | // Draw right bottom corner 34 | context.move(to: CGPoint(x: rect.maxX, y: rect.maxY - lineLength)) 35 | context.addLine(to: CGPoint(x: rect.maxX, y: rect.maxY - radius)) 36 | context.addArc(center: CGPoint(x: rect.maxX - radius, y: rect.maxY - radius), radius: radius, startAngle: 0, endAngle: .pi / 2, clockwise: false) 37 | context.addLine(to: CGPoint(x: rect.maxX - lineLength, y: rect.maxY)) 38 | 39 | // Draw left bottom corner 40 | context.move(to: CGPoint(x: rect.minX + lineLength, y: rect.maxY)) 41 | context.addLine(to: CGPoint(x: rect.minX + radius, y: rect.maxY)) 42 | context.addArc(center: CGPoint(x: rect.minX + radius, y: rect.maxY - radius), radius: radius, startAngle: .pi / 2, endAngle: .pi, clockwise: false) 43 | context.addLine(to: CGPoint(x: rect.minX, y: rect.maxY - lineLength)) 44 | } 45 | 46 | // Only override draw() if you perform custom drawing. 47 | // An empty implementation adversely affects performance during animation. 48 | override func draw(_ rect: CGRect) { 49 | 50 | guard let context = UIGraphicsGetCurrentContext() else { 51 | return 52 | } 53 | 54 | let defaults = UserDefaults.standard 55 | let livenessThreshold = defaults.float(forKey: "liveness_threshold") 56 | 57 | if(self.frameSize != nil) { 58 | context.beginPath() 59 | 60 | let x_scale = self.frameSize!.width / self.bounds.width 61 | let y_scale = self.frameSize!.height / self.bounds.height 62 | 63 | for faceBox in (faceBoxes! as NSArray as! [FaceBox]) { 64 | var color = UIColor(named: "clr_main_button_bg1") 65 | var string = "REAL " + String(format: "%.3f", faceBox.liveness) 66 | if(faceBox.liveness < livenessThreshold) { 67 | color = UIColor.red 68 | string = "SPOOF " + String(format: "%.3f", faceBox.liveness) 69 | } 70 | 71 | context.setStrokeColor(color!.cgColor) 72 | context.setLineWidth(2.0) 73 | 74 | let scaledRect = CGRect(x: Int(CGFloat(faceBox.x1) / x_scale), y: Int(CGFloat(faceBox.y1) / y_scale), width: Int(CGFloat(faceBox.x2 - faceBox.x1 + 1) / x_scale), height: Int(CGFloat(faceBox.y2 - faceBox.y1 + 1) / y_scale)) 75 | 76 | 77 | let attributes = [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 20), 78 | NSAttributedString.Key.foregroundColor: color] 79 | string.draw(at: CGPoint(x: CGFloat(scaledRect.minX + 5), y: CGFloat(scaledRect.minY - 25)), withAttributes: attributes) 80 | drawBBox(context: context, rect: scaledRect) // Call drawBBox method 81 | context.strokePath() 82 | } 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /FaceDemo/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | UIApplicationSceneManifest 6 | 7 | UIApplicationSupportsMultipleScenes 8 | 9 | UISceneConfigurations 10 | 11 | UIWindowSceneSessionRoleApplication 12 | 13 | 14 | UISceneConfigurationName 15 | Default Configuration 16 | UISceneDelegateClassName 17 | $(PRODUCT_MODULE_NAME).SceneDelegate 18 | UISceneStoryboardFile 19 | Main 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /FaceDemo/Model.xcdatamodeld/Model.xcdatamodel/contents: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /FaceDemo/PersonViewCell.swift: -------------------------------------------------------------------------------- 1 | 2 | 3 | import UIKit 4 | 5 | protocol PersonViewCellDelegate: AnyObject { 6 | func didPersonDelete(_ cell: UITableViewCell) 7 | func didNameChanged(_ cell: UITableViewCell) 8 | } 9 | 10 | class PersonViewCell: UITableViewCell, UITextViewDelegate { 11 | 12 | @IBOutlet weak var faceImage: UIImageView! 13 | @IBOutlet weak var txtName: UITextView! 14 | 15 | weak var delegate: PersonViewCellDelegate? 16 | var indexPath: IndexPath? 17 | 18 | override func awakeFromNib() { 19 | super.awakeFromNib() 20 | txtName.isEditable = false 21 | txtName.autocorrectionType = .no 22 | txtName.isSelectable = false 23 | txtName.delegate = self 24 | // Initialization code 25 | } 26 | 27 | override func setSelected(_ selected: Bool, animated: Bool) { 28 | super.setSelected(selected, animated: animated) 29 | 30 | // Configure the view for the selected state 31 | } 32 | 33 | @IBAction func delete_clicked(_ sender: Any) { 34 | delegate?.didPersonDelete(self) 35 | } 36 | 37 | @IBAction func edit_clicked(_ sender: Any) { 38 | txtName.isEditable = true 39 | txtName.becomeFirstResponder() 40 | } 41 | 42 | func textView(_ textView: UITextView, shouldChangeTextIn range: NSRange, replacementText text: String) -> Bool { 43 | if text == "\n" { 44 | textView.resignFirstResponder() 45 | delegate?.didNameChanged(self) 46 | txtName.isEditable = false 47 | txtName.isSelectable = false 48 | return false 49 | } 50 | return true 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /FaceDemo/SceneDelegate.swift: -------------------------------------------------------------------------------- 1 | 2 | 3 | import UIKit 4 | 5 | class SceneDelegate: UIResponder, UIWindowSceneDelegate { 6 | 7 | var window: UIWindow? 8 | 9 | 10 | func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) { 11 | // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`. 12 | // If using a storyboard, the `window` property will automatically be initialized and attached to the scene. 13 | // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead). 14 | guard let _ = (scene as? UIWindowScene) else { return } 15 | } 16 | 17 | func sceneDidDisconnect(_ scene: UIScene) { 18 | // Called as the scene is being released by the system. 19 | // This occurs shortly after the scene enters the background, or when its session is discarded. 20 | // Release any resources associated with this scene that can be re-created the next time the scene connects. 21 | // The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead). 22 | } 23 | 24 | func sceneDidBecomeActive(_ scene: UIScene) { 25 | // Called when the scene has moved from an inactive state to an active state. 26 | // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive. 27 | } 28 | 29 | func sceneWillResignActive(_ scene: UIScene) { 30 | // Called when the scene will move from an active state to an inactive state. 31 | // This may occur due to temporary interruptions (ex. an incoming phone call). 32 | } 33 | 34 | func sceneWillEnterForeground(_ scene: UIScene) { 35 | // Called as the scene transitions from the background to the foreground. 36 | // Use this method to undo the changes made on entering the background. 37 | } 38 | 39 | func sceneDidEnterBackground(_ scene: UIScene) { 40 | // Called as the scene transitions from the foreground to the background. 41 | // Use this method to save data, release shared resources, and store enough scene-specific state information 42 | // to restore the scene back to its current state. 43 | 44 | // Save changes in the application's managed object context when the application transitions to the background. 45 | (UIApplication.shared.delegate as? AppDelegate)?.saveContext() 46 | } 47 | 48 | 49 | } 50 | 51 | -------------------------------------------------------------------------------- /FaceDemo/SettingsViewController.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | import AVFoundation 3 | import CoreData 4 | 5 | class SettingsViewController: UIViewController{ 6 | 7 | static let LIVENESS_THRESHOLD_DEFAULT = Float(0.7) 8 | static let MATCHING_THRESHOLD_DEFAULT = Float(0.8) 9 | 10 | 11 | @IBOutlet weak var livenessThresholdLbl: UILabel! 12 | @IBOutlet weak var matchingThresholdLbl: UILabel! 13 | 14 | lazy var persistentContainer: NSPersistentContainer = { 15 | let container = NSPersistentContainer(name: ViewController.CORE_DATA_NAME) 16 | container.loadPersistentStores(completionHandler: { (storeDescription, error) in 17 | if let error = error as NSError? { 18 | fatalError("Unresolved error \(error), \(error.userInfo)") 19 | } 20 | }) 21 | return container 22 | }() 23 | 24 | override func viewDidLoad() { 25 | super.viewDidLoad() 26 | // Do any additional setup after loading the view. 27 | 28 | let defaults = UserDefaults.standard 29 | 30 | let livenessThreshold = defaults.float(forKey: "liveness_threshold") 31 | livenessThresholdLbl.text = String(livenessThreshold) 32 | 33 | let matchingThreshold = defaults.float(forKey: "matching_threshold") 34 | matchingThresholdLbl.text = String(matchingThreshold) 35 | } 36 | 37 | static func setDefaultSettings() { 38 | let defaults = UserDefaults.standard 39 | let defaultChanged = defaults.bool(forKey: "default_changed") 40 | if(defaultChanged == false) { 41 | defaults.set(true, forKey: "default_changed") 42 | 43 | defaults.set(SettingsViewController.LIVENESS_THRESHOLD_DEFAULT, forKey: "liveness_threshold") 44 | defaults.set(SettingsViewController.MATCHING_THRESHOLD_DEFAULT, forKey: "matching_threshold") 45 | } 46 | } 47 | 48 | @IBAction func done_clicked(_ sender: Any) { 49 | if let vc = self.presentingViewController as? ViewController { 50 | self.dismiss(animated: true, completion: { 51 | vc.personView.reloadData() 52 | }) 53 | } 54 | } 55 | 56 | @IBAction func livenessThreshold_clicked(_ sender: Any) { 57 | 58 | let title = "Liveness threshold" 59 | let alertController = UIAlertController(title: title, message: "Please input a number between 0 and 1.", preferredStyle: .alert) 60 | 61 | let minimum = Float(0) 62 | let maximum = Float(1) 63 | alertController.addTextField { (textField) in 64 | textField.keyboardType = .decimalPad 65 | 66 | let defaults = UserDefaults.standard 67 | textField.text = String(defaults.float(forKey: "liveness_threshold")) 68 | } 69 | 70 | let cancelAction = UIAlertAction(title: "Cancel", style: .cancel, handler: nil) 71 | 72 | let submitAction = UIAlertAction(title: "Ok", style: .default) { (action) in 73 | 74 | var hasError = false 75 | var errorStr = "" 76 | let defaults = UserDefaults.standard 77 | 78 | if let numberString = alertController.textFields?.first?.text, let number = Float(numberString) { 79 | if(number < Float(minimum) || number > Float(maximum)) { 80 | hasError = true 81 | errorStr = "Setting failed!" 82 | } else { 83 | self.livenessThresholdLbl.text = String(number) 84 | defaults.set(number, forKey: "liveness_threshold") 85 | } 86 | } else { 87 | hasError = true 88 | errorStr = "Setting failed!" 89 | } 90 | 91 | if(hasError) { 92 | let errorNotification = UIAlertController(title: "Error", message: errorStr, preferredStyle: .alert) 93 | let okAction = UIAlertAction(title: "OK", style: .default, handler: nil) 94 | errorNotification.addAction(okAction) 95 | self.present(errorNotification, animated: true, completion: nil) 96 | 97 | DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) { 98 | errorNotification.dismiss(animated: true, completion: nil) 99 | } 100 | } 101 | } 102 | 103 | alertController.addAction(cancelAction) 104 | alertController.addAction(submitAction) 105 | 106 | present(alertController, animated: true, completion: nil) 107 | } 108 | 109 | @IBAction func matchingThreshold_clicked(_ sender: Any) { 110 | 111 | let title = "Matching threshold" 112 | let alertController = UIAlertController(title: title, message: "Please input a number between 0 and 1.", preferredStyle: .alert) 113 | 114 | let minimum = Float(0) 115 | let maximum = Float(1) 116 | alertController.addTextField { (textField) in 117 | textField.keyboardType = .decimalPad 118 | 119 | let defaults = UserDefaults.standard 120 | textField.text = String(defaults.float(forKey: "matching_threshold")) 121 | } 122 | 123 | let cancelAction = UIAlertAction(title: "Cancel", style: .cancel, handler: nil) 124 | 125 | let submitAction = UIAlertAction(title: "Ok", style: .default) { (action) in 126 | 127 | var hasError = false 128 | var errorStr = "" 129 | let defaults = UserDefaults.standard 130 | 131 | if let numberString = alertController.textFields?.first?.text, let number = Float(numberString) { 132 | if(number < Float(minimum) || number > Float(maximum)) { 133 | hasError = true 134 | errorStr = "Setting failed!" 135 | } else { 136 | self.matchingThresholdLbl.text = String(number) 137 | defaults.set(number, forKey: "matching_threshold") 138 | } 139 | } else { 140 | hasError = true 141 | errorStr = "Setting failed!" 142 | } 143 | 144 | if(hasError) { 145 | let errorNotification = UIAlertController(title: "Error", message: errorStr, preferredStyle: .alert) 146 | let okAction = UIAlertAction(title: "OK", style: .default, handler: nil) 147 | errorNotification.addAction(okAction) 148 | self.present(errorNotification, animated: true, completion: nil) 149 | 150 | DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) { 151 | errorNotification.dismiss(animated: true, completion: nil) 152 | } 153 | } 154 | } 155 | 156 | alertController.addAction(cancelAction) 157 | alertController.addAction(submitAction) 158 | 159 | present(alertController, animated: true, completion: nil) 160 | } 161 | 162 | 163 | @IBAction func restore_settings_clicked(_ sender: Any) { 164 | let alertController = UIAlertController(title: "Confirm", message: "Are you sure you want to reset all settings?", preferredStyle: .alert) 165 | 166 | let yesAction = UIAlertAction(title: "Yes", style: .default) { _ in 167 | // Code to execute when "Yes" is tapped 168 | let defaults = UserDefaults.standard 169 | defaults.set(false, forKey: "default_changed") 170 | 171 | SettingsViewController.setDefaultSettings() 172 | self.viewDidLoad() 173 | showToast(message: "Reset to default settings") 174 | } 175 | let noAction = UIAlertAction(title: "No", style: .cancel) { _ in 176 | // Code to execute when "No" is tapped 177 | print("User tapped No") 178 | } 179 | 180 | alertController.addAction(yesAction) 181 | alertController.addAction(noAction) 182 | 183 | present(alertController, animated: true, completion: nil) 184 | } 185 | 186 | 187 | @IBAction func clear_all_person_clicked(_ sender: Any) { 188 | 189 | let alertController = UIAlertController(title: "Confirm", message: "Are you sure you want to remove all users?", preferredStyle: .alert) 190 | 191 | let yesAction = UIAlertAction(title: "Yes", style: .default) { _ in 192 | 193 | let context = self.persistentContainer.viewContext 194 | let fetchRequest = NSFetchRequest(entityName: ViewController.ENTITIES_NAME) 195 | 196 | do { 197 | let persons = try context.fetch(fetchRequest) as! [NSManagedObject] 198 | for person in persons { 199 | context.delete(person) 200 | } 201 | try context.save() 202 | } catch { 203 | print("Failed fetching: \(error)") 204 | } 205 | 206 | showToast(message: "Removed all users") 207 | } 208 | let noAction = UIAlertAction(title: "No", style: .cancel) { _ in 209 | // Code to execute when "No" is tapped 210 | print("User tapped No") 211 | } 212 | 213 | alertController.addAction(yesAction) 214 | alertController.addAction(noAction) 215 | 216 | present(alertController, animated: true, completion: nil) 217 | } 218 | 219 | @IBAction func gotoSite(_ sender: Any) { 220 | let telegramURL = URL(string: "https://recognito.vision")! 221 | UIApplication.shared.open(telegramURL, options: [:], completionHandler: nil) 222 | } 223 | } 224 | 225 | -------------------------------------------------------------------------------- /FaceDemo/ToastView.swift: -------------------------------------------------------------------------------- 1 | 2 | import UIKit 3 | 4 | class ToastView: UIView { 5 | private let messageLabel: UILabel = UILabel() 6 | 7 | init(message: String) { 8 | super.init(frame: .zero) 9 | configureUI() 10 | setMessage(message) 11 | } 12 | 13 | required init?(coder aDecoder: NSCoder) { 14 | fatalError("init(coder:) has not been implemented") 15 | } 16 | 17 | private func configureUI() { 18 | backgroundColor = UIColor(named: "clr_toast_bg") 19 | layer.cornerRadius = 25 20 | clipsToBounds = true 21 | 22 | messageLabel.textColor = UIColor(named: "txt_clr_third") 23 | messageLabel.font = UIFont.systemFont(ofSize: 14) 24 | messageLabel.numberOfLines = 0 25 | messageLabel.textAlignment = .center 26 | addSubview(messageLabel) 27 | messageLabel.translatesAutoresizingMaskIntoConstraints = false 28 | NSLayoutConstraint.activate([ 29 | messageLabel.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 16), 30 | messageLabel.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -16), 31 | messageLabel.topAnchor.constraint(equalTo: topAnchor, constant: 16), 32 | messageLabel.bottomAnchor.constraint(equalTo: bottomAnchor, constant: -16) 33 | ]) 34 | } 35 | 36 | private func setMessage(_ message: String) { 37 | messageLabel.text = message 38 | } 39 | } 40 | 41 | func showToast(message: String, duration: TimeInterval = 2.0) { 42 | let toastView = ToastView(message: message) 43 | if let window = UIApplication.shared.windows.first { 44 | window.addSubview(toastView) 45 | toastView.translatesAutoresizingMaskIntoConstraints = false 46 | NSLayoutConstraint.activate([ 47 | toastView.centerXAnchor.constraint(equalTo: window.centerXAnchor), 48 | toastView.bottomAnchor.constraint(equalTo: window.bottomAnchor, constant: -50), 49 | toastView.leadingAnchor.constraint(greaterThanOrEqualTo: window.leadingAnchor, constant: 32), 50 | toastView.trailingAnchor.constraint(lessThanOrEqualTo: window.trailingAnchor, constant: -32), 51 | toastView.widthAnchor.constraint(greaterThanOrEqualToConstant: 300) 52 | ]) 53 | 54 | UIView.animate(withDuration: 0.2, delay: duration, options: .curveEaseInOut) { 55 | toastView.alpha = 0 56 | } completion: { _ in 57 | toastView.removeFromSuperview() 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /FaceDemo/UIImageExtension.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import UIKit 3 | 4 | 5 | public extension UIImage { 6 | 7 | func cropFace(faceBox: FaceBox) -> UIImage? { 8 | let centerX = Int((faceBox.x1 + faceBox.x2) / 2) 9 | let centerY = Int((faceBox.y1 + faceBox.y2) / 2) 10 | let cropWidth = Int(Float(faceBox.x2 - faceBox.x1) * Float(1.4)) 11 | 12 | let cropX1 = Int(Float(centerX) - Float(cropWidth / 2)) 13 | let cropX2 = Int(Float(centerY) - Float(cropWidth / 2)) 14 | let cropRect = CGRect(x: CGFloat(cropX1), y: CGFloat(cropX2), width: CGFloat(cropWidth), height: CGFloat(cropWidth)) 15 | 16 | guard let croppedImage = self.cgImage!.cropping(to: cropRect) else { return nil } 17 | 18 | let faceImage = UIImage(cgImage: croppedImage) 19 | 20 | let renderer = UIGraphicsImageRenderer(size: CGSize(width: 150, height: 150)) 21 | let newImage = renderer.image { (context) in 22 | faceImage.draw(in: CGRect(origin: .zero, size: CGSize(width: 150, height: 150))) 23 | } 24 | return newImage 25 | } 26 | 27 | /// Extension to fix orientation of an UIImage without EXIF 28 | func fixOrientation() -> UIImage { 29 | 30 | guard let cgImage = cgImage else { return self } 31 | 32 | if imageOrientation == .up { return self } 33 | 34 | var transform = CGAffineTransform.identity 35 | 36 | switch imageOrientation { 37 | 38 | case .down, .downMirrored: 39 | transform = transform.translatedBy(x: size.width, y: size.height) 40 | transform = transform.rotated(by: CGFloat(Double.pi)) 41 | 42 | case .left, .leftMirrored: 43 | transform = transform.translatedBy(x: size.width, y: 0) 44 | transform = transform.rotated(by: CGFloat(Double.pi/2)) 45 | 46 | case .right, .rightMirrored: 47 | transform = transform.translatedBy(x: 0, y : size.height) 48 | transform = transform.rotated(by: CGFloat(-Double.pi/2)) 49 | 50 | case .up, .upMirrored: 51 | break 52 | } 53 | 54 | switch imageOrientation { 55 | 56 | case .upMirrored, .downMirrored: 57 | transform.translatedBy(x: size.width, y: 0) 58 | transform.scaledBy(x: -1, y: 1) 59 | 60 | case .leftMirrored, .rightMirrored: 61 | transform.translatedBy(x: size.height, y: 0) 62 | transform.scaledBy(x: -1, y: 1) 63 | 64 | case .up, .down, .left, .right: 65 | break 66 | } 67 | 68 | if let ctx = CGContext(data: nil, width: Int(size.width), height: Int(size.height), bitsPerComponent: cgImage.bitsPerComponent, bytesPerRow: 0, space: cgImage.colorSpace!, bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue) { 69 | 70 | ctx.concatenate(transform) 71 | 72 | switch imageOrientation { 73 | 74 | case .left, .leftMirrored, .right, .rightMirrored: 75 | ctx.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.height, height: size.width)) 76 | 77 | default: 78 | ctx.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.width, height: size.height)) 79 | } 80 | 81 | if let finalImage = ctx.makeImage() { 82 | return (UIImage(cgImage: finalImage)) 83 | } 84 | } 85 | 86 | // something failed -- return original 87 | return self 88 | } 89 | 90 | func rotate(radians: CGFloat) -> UIImage { 91 | let rotatedSize = CGRect(origin: .zero, size: size) 92 | .applying(CGAffineTransform(rotationAngle: radians)) 93 | .integral.size 94 | UIGraphicsBeginImageContext(rotatedSize) 95 | if let context = UIGraphicsGetCurrentContext() { 96 | context.translateBy(x: rotatedSize.width / 2, y: rotatedSize.height / 2) 97 | context.rotate(by: radians) 98 | draw(in: CGRect(x: -size.width / 2, y: -size.height / 2, width: size.width, height: size.height)) 99 | let rotatedImage = UIGraphicsGetImageFromCurrentImageContext() 100 | UIGraphicsEndImageContext() 101 | return rotatedImage ?? self 102 | } 103 | return self 104 | } 105 | 106 | // Extension to flip UIImage horizontally 107 | func flipHorizontally() -> UIImage { 108 | UIGraphicsBeginImageContextWithOptions(size, false, scale) 109 | let context = UIGraphicsGetCurrentContext()! 110 | context.translateBy(x: size.width, y: 0) 111 | context.scaleBy(x: -1.0, y: 1.0) 112 | draw(in: CGRect(origin: .zero, size: size)) 113 | let flippedImage = UIGraphicsGetImageFromCurrentImageContext()! 114 | UIGraphicsEndImageContext() 115 | return flippedImage 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /FaceDemo/ViewController.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | import AVFoundation 3 | import CoreData 4 | 5 | class ViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate, UITableViewDataSource, UITableViewDelegate, PersonViewCellDelegate{ 6 | 7 | static let CORE_DATA_NAME = "Model" 8 | static let ENTITIES_NAME = "Person" 9 | static let ATTRIBUTE_NAME = "name" 10 | static let ATTRIBUTE_FACE = "face" 11 | static let ATTRIBUTE_TEMPLATES = "templates" 12 | 13 | @IBOutlet weak var warningLbl: UILabel! 14 | 15 | @IBOutlet weak var enrollBtnView: UIView! 16 | @IBOutlet weak var identifyBtnView: UIView! 17 | 18 | @IBOutlet weak var personView: UITableView! 19 | 20 | 21 | lazy var persistentContainer: NSPersistentContainer = { 22 | let container = NSPersistentContainer(name: ViewController.CORE_DATA_NAME) 23 | container.loadPersistentStores(completionHandler: { (storeDescription, error) in 24 | if let error = error as NSError? { 25 | fatalError("Unresolved error \(error), \(error.userInfo)") 26 | } 27 | }) 28 | return container 29 | }() 30 | 31 | 32 | override func viewDidLoad() { 33 | super.viewDidLoad() 34 | // Do any additional setup after loading the view. 35 | var ret = SDK_LICENSE_KEY_ERROR.rawValue 36 | 37 | if let filePath = Bundle.main.path(forResource: "license", ofType: "txt") { 38 | do { 39 | let license = try String(contentsOfFile: filePath, encoding: .utf8) 40 | ret = FaceSDK.setActivation(license) 41 | } catch { 42 | print("Error reading file: \(error)") 43 | } 44 | } else { 45 | print("File not found") 46 | } 47 | 48 | if(ret == SDK_SUCCESS.rawValue) { 49 | ret = FaceSDK.initSDK() 50 | } 51 | 52 | if(ret != SDK_SUCCESS.rawValue) { 53 | warningLbl.isHidden = false 54 | 55 | if(ret == SDK_LICENSE_KEY_ERROR.rawValue) { 56 | warningLbl.text = "License key error!" 57 | } else if(ret == SDK_LICENSE_APPID_ERROR.rawValue) { 58 | warningLbl.text = "App ID error!" 59 | } else if(ret == SDK_LICENSE_EXPIRED.rawValue) { 60 | warningLbl.text = "License key expired!" 61 | } else if(ret == SDK_NO_ACTIVATED.rawValue) { 62 | warningLbl.text = "Activation failed!" 63 | } else if(ret == SDK_INIT_ERROR.rawValue) { 64 | warningLbl.text = "Engine init error!" 65 | } 66 | } 67 | 68 | SettingsViewController.setDefaultSettings() 69 | 70 | personView.delegate = self 71 | personView.dataSource = self 72 | personView.separatorStyle = .none 73 | personView.reloadData() 74 | 75 | } 76 | 77 | 78 | @IBAction func enroll_touch_down(_ sender: Any) { 79 | UIView.animate(withDuration: 0.5) { 80 | self.enrollBtnView.backgroundColor = UIColor(named: "clr_main_button_bg2") // Change to desired color 81 | } 82 | } 83 | 84 | @IBAction func enroll_touch_cancel(_ sender: Any) { 85 | UIView.animate(withDuration: 0.5) { 86 | self.enrollBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") // Change to desired color 87 | } 88 | } 89 | 90 | @IBAction func enroll_clicked(_ sender: Any) { 91 | UIView.animate(withDuration: 0.5) { 92 | self.enrollBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") // Change to desired color 93 | } 94 | 95 | let imagePicker = UIImagePickerController() 96 | imagePicker.sourceType = .photoLibrary 97 | imagePicker.delegate = self 98 | present(imagePicker, animated: true, completion: nil) 99 | } 100 | 101 | 102 | @IBAction func identify_touch_down(_ sender: Any) { 103 | UIView.animate(withDuration: 0.5) { 104 | self.identifyBtnView.backgroundColor = UIColor(named: "clr_main_button_bg2") // Change to desired color 105 | } 106 | } 107 | 108 | @IBAction func identify_touch_up(_ sender: Any) { 109 | UIView.animate(withDuration: 0.5) { 110 | self.identifyBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") // Change to desired color 111 | } 112 | } 113 | 114 | @IBAction func identify_clicked(_ sender: Any) { 115 | UIView.animate(withDuration: 0.5) { 116 | self.identifyBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") // Change to desired color 117 | } 118 | 119 | performSegue(withIdentifier: "camera", sender: self) 120 | } 121 | 122 | func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) { 123 | 124 | dismiss(animated: true, completion: nil) 125 | 126 | guard let image = info[.originalImage] as? UIImage else { 127 | return 128 | } 129 | 130 | let fixed_image = image.fixOrientation() 131 | let faceBoxes = FaceSDK.faceDetection(fixed_image) 132 | if(faceBoxes.count == 0) { 133 | showToast(message: "No face!") 134 | return 135 | } else if(faceBoxes.count > 1) { 136 | showToast(message: "Multiple faces detected!") 137 | } 138 | 139 | for faceBox in (faceBoxes as NSArray as! [FaceBox]) { 140 | 141 | let templates = FaceSDK.templateExtraction(fixed_image, faceBox: faceBox) 142 | if(templates.isEmpty) { 143 | continue 144 | } 145 | 146 | let faceImage = fixed_image.cropFace(faceBox: faceBox) 147 | 148 | let context = self.persistentContainer.viewContext 149 | let entity = NSEntityDescription.entity(forEntityName: ViewController.ENTITIES_NAME, in: context)! 150 | let user = NSManagedObject(entity: entity, insertInto: context) 151 | 152 | 153 | let currentDate = Date() 154 | let calendar = Calendar.current 155 | let year = calendar.component(.year, from: currentDate) 156 | let month = calendar.component(.month, from: currentDate) 157 | let dayOfMonth = calendar.component(.day, from: currentDate) 158 | let hour = calendar.component(.hour, from: currentDate) 159 | let minute = calendar.component(.minute, from: currentDate) 160 | let second = calendar.component(.second, from: currentDate) 161 | 162 | let name = "User " + String(year) + String(month) + String(dayOfMonth) + String(hour) + String(minute) + String(second) 163 | let face = faceImage!.jpegData(compressionQuality: CGFloat(1.0)) 164 | 165 | user.setValue(name, forKey: ViewController.ATTRIBUTE_NAME) 166 | user.setValue(templates, forKey: ViewController.ATTRIBUTE_TEMPLATES) 167 | user.setValue(face, forKey: ViewController.ATTRIBUTE_FACE) 168 | 169 | do { 170 | try context.save() 171 | } catch let error as NSError { 172 | print("Could not save. \(error), \(error.userInfo)") 173 | } 174 | } 175 | 176 | personView.reloadData() 177 | showToast(message: "Registered user successfully") 178 | } 179 | 180 | func imagePickerControllerDidCancel(_ picker: UIImagePickerController) { 181 | dismiss(animated: true, completion: nil) 182 | } 183 | 184 | // UITableViewDataSource methods 185 | func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int { 186 | // Return the number of cells in the table view 187 | 188 | let context = self.persistentContainer.viewContext 189 | let count = try! context.count(for: NSFetchRequest(entityName: ViewController.ENTITIES_NAME)) 190 | 191 | return count 192 | } 193 | 194 | func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { 195 | // Get the table view cell for the specified index path 196 | let cell = tableView.dequeueReusableCell(withIdentifier: "PersonCell", for: indexPath) as! PersonViewCell 197 | cell.delegate = self 198 | cell.indexPath = indexPath 199 | 200 | let context = self.persistentContainer.viewContext 201 | let fetchRequest = NSFetchRequest(entityName: ViewController.ENTITIES_NAME) 202 | do { 203 | let persons = try context.fetch(fetchRequest) as! [NSManagedObject] 204 | var rowCount = 0 205 | for person in persons { 206 | if(rowCount == indexPath.row) { 207 | cell.txtName.text = person.value(forKey: ViewController.ATTRIBUTE_NAME) as? String 208 | cell.faceImage.image = UIImage(data: person.value(forKey: ViewController.ATTRIBUTE_FACE) as! Data) 209 | 210 | break 211 | } 212 | rowCount = rowCount + 1 213 | } 214 | } catch { 215 | print("Failed fetching: \(error)") 216 | } 217 | 218 | // Customize the cell 219 | return cell 220 | } 221 | 222 | // UITableViewDelegate methods 223 | func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) { 224 | // Handle cell selection 225 | tableView.deselectRow(at: indexPath, animated: true) 226 | } 227 | 228 | func didPersonDelete(_ cell: UITableViewCell) { 229 | let context = self.persistentContainer.viewContext 230 | let fetchRequest = NSFetchRequest(entityName: ViewController.ENTITIES_NAME) 231 | let personCell = cell as! PersonViewCell 232 | 233 | let message = String(format: "Are you sure you want to remove <%@> user?", personCell.txtName.text) 234 | let alertController = UIAlertController(title: "Warning", message: message, preferredStyle: .alert) 235 | 236 | let yesAction = UIAlertAction(title: "Yes", style: .default) { _ in 237 | // Code to execute when "Yes" is tapped 238 | do { 239 | let persons = try context.fetch(fetchRequest) as! [NSManagedObject] 240 | var rowCount = 0 241 | for person in persons { 242 | if(rowCount == personCell.indexPath?.row) { 243 | context.delete(person) 244 | try context.save() 245 | break 246 | } 247 | rowCount = rowCount + 1 248 | } 249 | } catch { 250 | print("Failed fetching: \(error)") 251 | } 252 | 253 | self.personView.reloadData() 254 | } 255 | let noAction = UIAlertAction(title: "No", style: .cancel) { _ in 256 | // Code to execute when "No" is tapped 257 | print("User tapped No") 258 | } 259 | 260 | alertController.addAction(yesAction) 261 | alertController.addAction(noAction) 262 | 263 | if let viewController = UIApplication.shared.keyWindow?.rootViewController { 264 | viewController.present(alertController, animated: true, completion: nil) 265 | } 266 | } 267 | 268 | func checkUserExist(name: String) -> Bool { 269 | let context = self.persistentContainer.viewContext 270 | 271 | let fetchRequest = NSFetchRequest(entityName: ViewController.ENTITIES_NAME) 272 | fetchRequest.predicate = NSPredicate(format: "\(ViewController.ATTRIBUTE_NAME) == %@", name) 273 | 274 | do { 275 | let existingUsers = try context.fetch(fetchRequest) as! [NSManagedObject] 276 | if existingUsers.isEmpty { 277 | return false 278 | } else { 279 | print("Name already exists in the database") 280 | return true 281 | } 282 | } catch { 283 | print("Failed fetching: \(error)") 284 | } 285 | return false 286 | } 287 | 288 | func didNameChanged(_ cell: UITableViewCell) { 289 | let context = self.persistentContainer.viewContext 290 | guard let personCell = cell as? PersonViewCell else { 291 | return 292 | } 293 | guard let updatedName = personCell.txtName.text else { 294 | return 295 | } 296 | let isExistName = checkUserExist(name: updatedName) 297 | let fetchRequest = NSFetchRequest(entityName: ViewController.ENTITIES_NAME) 298 | do { 299 | let persons = try context.fetch(fetchRequest) as! [NSManagedObject] 300 | var rowCount = 0 301 | for person in persons { 302 | if(rowCount == personCell.indexPath?.row) { 303 | if (updatedName == person.value(forKey: ViewController.ATTRIBUTE_NAME) as! String) { 304 | break 305 | } 306 | 307 | if !isExistName { 308 | print("Updated user name") 309 | person.setValue(updatedName, forKey: ViewController.ATTRIBUTE_NAME) 310 | try context.save() 311 | } else { 312 | personCell.txtName.text = person.value(forKey: ViewController.ATTRIBUTE_NAME) as! String 313 | showToast(message: "Failed! New name already exists in the user list") 314 | } 315 | 316 | break 317 | } 318 | rowCount = rowCount + 1 319 | } 320 | } catch { 321 | print("Failed fetching: \(error)") 322 | } 323 | 324 | self.personView.reloadData() 325 | } 326 | } 327 | 328 | -------------------------------------------------------------------------------- /FaceDemoTests/FaceDemoTests.swift: -------------------------------------------------------------------------------- 1 | 2 | // 3 | // Created by user on 4/17/23. 4 | // 5 | 6 | import XCTest 7 | @testable import FaceDemo 8 | 9 | final class FaceDemoTests: XCTestCase { 10 | 11 | override func setUpWithError() throws { 12 | // Put setup code here. This method is called before the invocation of each test method in the class. 13 | } 14 | 15 | override func tearDownWithError() throws { 16 | // Put teardown code here. This method is called after the invocation of each test method in the class. 17 | } 18 | 19 | func testExample() throws { 20 | // This is an example of a functional test case. 21 | // Use XCTAssert and related functions to verify your tests produce the correct results. 22 | // Any test you write for XCTest can be annotated as throws and async. 23 | // Mark your test throws to produce an unexpected failure when your test encounters an uncaught error. 24 | // Mark your test async to allow awaiting for asynchronous code to complete. Check the results with assertions afterwards. 25 | } 26 | 27 | func testPerformanceExample() throws { 28 | // This is an example of a performance test case. 29 | self.measure { 30 | // Put the code you want to measure the time of here. 31 | } 32 | } 33 | 34 | } 35 | -------------------------------------------------------------------------------- /FaceDemoUITests/FaceDemoUITests.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Created by user on 4/17/23. 3 | // 4 | 5 | import XCTest 6 | 7 | final class FaceDemoUITests: XCTestCase { 8 | 9 | override func setUpWithError() throws { 10 | // Put setup code here. This method is called before the invocation of each test method in the class. 11 | 12 | // In UI tests it is usually best to stop immediately when a failure occurs. 13 | continueAfterFailure = false 14 | 15 | // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this. 16 | } 17 | 18 | override func tearDownWithError() throws { 19 | // Put teardown code here. This method is called after the invocation of each test method in the class. 20 | } 21 | 22 | func testExample() throws { 23 | // UI tests must launch the application that they test. 24 | let app = XCUIApplication() 25 | app.launch() 26 | 27 | // Use XCTAssert and related functions to verify your tests produce the correct results. 28 | } 29 | 30 | func testLaunchPerformance() throws { 31 | if #available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *) { 32 | // This measures how long it takes to launch your application. 33 | measure(metrics: [XCTApplicationLaunchMetric()]) { 34 | XCUIApplication().launch() 35 | } 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceDemoUITests/FaceDemoUITestsLaunchTests.swift: -------------------------------------------------------------------------------- 1 | // Created by user on 4/17/23. 2 | // 3 | 4 | import XCTest 5 | 6 | final class FaceDemoUITestsLaunchTests: XCTestCase { 7 | 8 | override class var runsForEachTargetApplicationUIConfiguration: Bool { 9 | true 10 | } 11 | 12 | override func setUpWithError() throws { 13 | continueAfterFailure = false 14 | } 15 | 16 | func testLaunch() throws { 17 | let app = XCUIApplication() 18 | app.launch() 19 | 20 | // Insert steps here to perform after app launch but before taking a screenshot, 21 | // such as logging into a test account or navigating somewhere in the app 22 | 23 | let attachment = XCTAttachment(screenshot: app.screenshot()) 24 | attachment.name = "Launch Screen" 25 | attachment.lifetime = .keepAlways 26 | add(attachment) 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | recognito.vision 3 |
4 | 5 | # Face Recognition, Liveness Detection, Pose Estimation iOS SDK Demo 6 |

7 | 8 |

9 |

10 | 11 | Latest NIST FRVT Report 12 | 13 |

14 | 15 | ### 📰 _Recognito Developer News_ 16 | - home 1:1 & 1:N [**Windows .NET Demo for Face Recognition, Liveness Detection**](https://github.com/recognito-vision/FaceRecognition-LivenessDetection-CSharp-.Net). 17 | - Global Coverage [**ID Card/Passport OCR Mobile Demo**](https://github.com/recognito-vision/Android-ID-Document-Recognition/tree/main) and [**ID Document Recognition Server Demo**](https://github.com/recognito-vision/Linux-ID-Document-Recognition). 18 | - Try **1:N Face Search** through our [**Face Identification Web Demo**](https://github.com/recognito-vision/Linux-FaceRecognition-FaceLivenessDetection/tree/main/Identification(1%3AN)-Demo). 19 | - Subscribe our **Free APIs** for your app or website from our [**API Hub**](https://rapidapi.com/organization/recognito). 20 | 21 |
22 | 23 | This repository contains a demonstration of Recognito's face recognition SDK for iOS. 24 | The SDK includes advanced features such as face recognition, liveness detection, and pose estimation. 25 | Recognito's face recognition algorithm has been ranked as the **Top 1 in the NIST FRVT** (Face Recognition Vendor Test). 26 | 27 | Our [**Product List**](https://github.com/recognito-vision/Product-List/) for ID verification. 28 | 29 | ## home RECOGNITO Product Documentation 30 |    31 | 32 | ## feature Features 33 | - **Face Recognition:** Identify and verify individuals by comparing their facial features. 34 | - **Liveness Detection:** Determine whether a face is live or spoofed to prevent fraud in authentication processes. 35 | - **Pose Estimation:** Estimate the pose of a detected face, including Yaw, Roll, Pitch 36 | 37 | ### - Additional Features 38 | - **NIST FRVT Top 1 Algorithm:** Utilize the top-ranked face recognition algorithm from the NIST FRVT for accurate and reliable results. 39 | - **On-premise:** Operate entirely within your infrastructure, ensuring data privacy and security. 40 | - **Real-time:** Perform face recognition, liveness detection, and pose estimation with minimal latency. 41 | - **Fully-offline:** Function without the need for an internet connection, ensuring reliability and data privacy. 42 | 43 | ## youtube Demo Video 44 | [](https://www.youtube.com/watch?v=9HM70PFa4lQ) 45 | 46 | Recognito Youtube Channel: [youtube.com/@recognito-vision](https://www.youtube.com/@recognito-vision) 47 |

48 | face recognition, liveness detection ios demo snap 1 49 | face recognition, liveness detection ios demo snap 2 50 | face recognition, liveness detection ios demo snap 3 51 | face recognition, liveness detection ios demo snap 4 52 | face recognition, liveness detection ios demo snap 5 53 | face recognition, liveness detection ios demo snap 6 54 |

55 | 56 | ## face recognition, liveness detection iOS SDK API SDK Integration 57 | To use the Recognito SDK in your iOS project, follow these steps: 58 | #### 1. Add `facesdk.framework` into the project 59 | - Copy and add the SDK framework to your iOS project. 60 | facesdkframework

61 | BridgingHeader 62 | 63 | #### 2. Application License (One-Time License) 64 | - For trial license, share your Bundle ID. 65 | 66 | BundleID 67 | 68 |
69 | www.recognito.vision 70 |     www.recognito.vision 71 |     www.recognito.vision 72 |     www.recognito.vision 73 |
74 | 75 | - Add your license to `license.txt` file: 76 | https://github.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/blob/bef9e295a1fbff07d2a403227cacdc51ef6ff700/license.txt#L1-L5 77 | - Initialize SDK with license. 78 | https://github.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/blob/6e30f27487ac8eafafe122c7ab918ee6ae9eb0a7/FaceDemo/ViewController.swift#L37-L50 79 | 80 | Initialization status codes: 81 | 82 | | Code | Status | 83 | |:------:|------| 84 | |0|Activate SDK successfully| 85 | |-1|License Key Error| 86 | |-2|License AppID Error| 87 | |-3|License Expired| 88 | |-4|Activate Error| 89 | |-5|Init SDK Error| 90 | #### 3. APIs of SDK 91 | ##### - Activate SDK 92 | ```objective-c 93 | +(int)setActivation:(NSString*)license; 94 | ``` 95 | Parameters 96 | - `license`: A string representing the license key required for activation. 97 | - Return Value: An integer representing the SDK activation status code. 98 |
99 | 100 | ##### - Initiate SDK 101 | ```objective-c 102 | +(int)initSDK; 103 | ``` 104 | Parameters 105 | - Return Value: An integer representing the initialization status code. 106 |
107 | 108 | ##### - Detect Face 109 | ```objective-c 110 | +(NSMutableArray*)faceDetection:(UIImage*)image; 111 | ``` 112 | Parameters 113 | - `image`: The `UIImage` object representing the image in which faces will be detected. 114 | - Return Value: An `NSMutableArray` containing `FaceBox` about the detected faces. 115 | ```objective-c 116 | @interface FaceBox : NSObject 117 | 118 | @property (nonatomic) int x1; 119 | @property (nonatomic) int y1; 120 | @property (nonatomic) int x2; 121 | @property (nonatomic) int y2; 122 | @property (nonatomic) float liveness; 123 | @property (nonatomic) float yaw; 124 | @property (nonatomic) float roll; 125 | @property (nonatomic) float pitch; 126 | @end 127 | ``` 128 |
129 | 130 | ##### - Extract face feature 131 | ```objective-c 132 | +(NSData*)templateExtraction:(UIImage*)image faceBox:(FaceBox*)faceBox; 133 | ``` 134 | Parameters 135 | - `image`: The `UIImage` object representing the source image from which the face template will be extracted. 136 | - `faceBox`: The `FaceBox` object representing the bounding box around the detected face. 137 | - Return Value: An `NSData` object containing the extracted face template data. 138 |
139 | 140 | ##### - Calculate similarity between two face features 141 | ```objective-c 142 | +(float)similarityCalculation:(NSData*)templates1 templates2:(NSData*)templates2; 143 | ``` 144 | Parameters 145 | - `templates1`: An `NSData` object representing the first face template. 146 | - `templates2`: An `NSData` object representing the second face template. 147 | - Return Value: A float value representing the similarity score between the two face templates. 148 |
149 | 150 | ## contact Support 151 | For any questions, issues, or feature requests, please contact our support team. 152 | 153 |
154 | www.recognito.vision 155 |     www.recognito.vision 156 |     www.recognito.vision 157 |     www.recognito.vision 158 |
159 |
160 |

161 |    162 |      163 |      164 |      165 |      166 |      167 |

168 | -------------------------------------------------------------------------------- /facesdk.framework/Headers/facesdk.h: -------------------------------------------------------------------------------- 1 | // 2 | // facesdk.h 3 | // facesdk 4 | // 5 | // Created by user on 4/12/23. 6 | // 7 | 8 | #import 9 | 10 | //! Project version number for facesdk. 11 | FOUNDATION_EXPORT double facesdkVersionNumber; 12 | 13 | //! Project version string for facesdk. 14 | FOUNDATION_EXPORT const unsigned char facesdkVersionString[]; 15 | 16 | // In this header, you should import all the public headers of your framework using statements like #import 17 | 18 | 19 | #include "facesdk_api.h" 20 | -------------------------------------------------------------------------------- /facesdk.framework/Headers/facesdk_api.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | 4 | NS_ASSUME_NONNULL_BEGIN 5 | 6 | enum SDK_ERROR 7 | { 8 | SDK_SUCCESS = 0, 9 | SDK_LICENSE_KEY_ERROR = -1, 10 | SDK_LICENSE_APPID_ERROR = -2, 11 | SDK_LICENSE_EXPIRED = -3, 12 | SDK_NO_ACTIVATED = -4, 13 | SDK_INIT_ERROR = -5, 14 | }; 15 | 16 | @interface FaceBox : NSObject 17 | 18 | @property (nonatomic) int x1; 19 | @property (nonatomic) int y1; 20 | @property (nonatomic) int x2; 21 | @property (nonatomic) int y2; 22 | @property (nonatomic) float liveness; 23 | @property (nonatomic) float yaw; 24 | @property (nonatomic) float roll; 25 | @property (nonatomic) float pitch; 26 | @end 27 | 28 | @interface FaceSDK : NSObject 29 | 30 | +(int) setActivation: (NSString*) license; 31 | +(int) initSDK; 32 | +(NSMutableArray*) faceDetection: (UIImage*) image; 33 | +(NSData*) templateExtraction: (UIImage*) image faceBox: (FaceBox*) faceBox; 34 | +(float) similarityCalculation: (NSData*) templates1 templates2: (NSData*) templates2; 35 | 36 | @end 37 | 38 | NS_ASSUME_NONNULL_END 39 | -------------------------------------------------------------------------------- /facesdk.framework/Info.plist: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/facesdk.framework/Info.plist -------------------------------------------------------------------------------- /facesdk.framework/Modules/module.modulemap: -------------------------------------------------------------------------------- 1 | framework module facesdk { 2 | umbrella header "facesdk.h" 3 | export * 4 | 5 | module * { export * } 6 | } 7 | -------------------------------------------------------------------------------- /facesdk.framework/_CodeSignature/CodeResources: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | files 6 | 7 | Headers/facesdk.h 8 | 9 | iXedJdnTNjJdkav0lex0htXPBa8= 10 | 11 | Headers/facesdk_api.h 12 | 13 | nvq2BAwf5zwJ4Ztl7CQydAFcrZo= 14 | 15 | Info.plist 16 | 17 | b6JJJTDOecm349xupUCc8icsvTQ= 18 | 19 | Modules/module.modulemap 20 | 21 | WrHmhQafWafNCSGU1Xr0vQaW6WY= 22 | 23 | detection.bin 24 | 25 | G52JYi47ACwak8HkEcpkm9zo1as= 26 | 27 | detection.param 28 | 29 | tHFm7ohczZG7KEtACVgRSjX3XIA= 30 | 31 | landmark.bin 32 | 33 | YwuxUSePlqnFEDLD95H35rgexRI= 34 | 35 | landmark.param 36 | 37 | HHZGt8b9GPLjgGplhK/7hcOW0Mo= 38 | 39 | liveness.bin 40 | 41 | qh1obYoGNe0LoyD5JBmUkdeYrEY= 42 | 43 | recognize.bin 44 | 45 | SViF2bgK4XlpoZCVL4WMJbPq6q8= 46 | 47 | recognize.param 48 | 49 | G2oZ25SbKg4KWk0aPL047vM/Rv0= 50 | 51 | 52 | files2 53 | 54 | Headers/facesdk.h 55 | 56 | hash2 57 | 58 | CEHWZwXGt6HTp0cWeGAWrhBufzotOw42QLbWl2FjbpE= 59 | 60 | 61 | Headers/facesdk_api.h 62 | 63 | hash2 64 | 65 | AzDRsIwSMWBPsvoOmXuBP3EefyILyncJlo/a6keydI0= 66 | 67 | 68 | Modules/module.modulemap 69 | 70 | hash2 71 | 72 | e+AaXT/TjwyPpTaKiiPdA/TglVoEmdjtOjlJBMOX3fs= 73 | 74 | 75 | detection.bin 76 | 77 | hash2 78 | 79 | aSBPVzW2w5vueXMHRoelxdPFfzE45CIHzJJfvcmNVVc= 80 | 81 | 82 | detection.param 83 | 84 | hash2 85 | 86 | yb/QW3dSgGun1LdCqss+oipjB11F0sE0NtewKSJibFo= 87 | 88 | 89 | landmark.bin 90 | 91 | hash2 92 | 93 | q59zkpnecoyeYz1houDmoVK0+ROe2x8l/0dtpqDC9Ic= 94 | 95 | 96 | landmark.param 97 | 98 | hash2 99 | 100 | nkGOHyOPviHWfG8E67n+geJfRcw+yc7qGU4famEpZNc= 101 | 102 | 103 | liveness.bin 104 | 105 | hash2 106 | 107 | 3XjsZ9HffijptH6kD8YcgXR9DwsM2tDsYTNDvdErRkE= 108 | 109 | 110 | recognize.bin 111 | 112 | hash2 113 | 114 | ljjc/uY3hbh6WrCw7Z+d/+k+SzcY5eobU33VJDpmG4c= 115 | 116 | 117 | recognize.param 118 | 119 | hash2 120 | 121 | kDIa3Db/MNtnTOFGFGQ1ZvrQfA8iAnM1hx/ZDYyci4A= 122 | 123 | 124 | 125 | rules 126 | 127 | ^.* 128 | 129 | ^.*\.lproj/ 130 | 131 | optional 132 | 133 | weight 134 | 1000 135 | 136 | ^.*\.lproj/locversion.plist$ 137 | 138 | omit 139 | 140 | weight 141 | 1100 142 | 143 | ^Base\.lproj/ 144 | 145 | weight 146 | 1010 147 | 148 | ^version.plist$ 149 | 150 | 151 | rules2 152 | 153 | .*\.dSYM($|/) 154 | 155 | weight 156 | 11 157 | 158 | ^(.*/)?\.DS_Store$ 159 | 160 | omit 161 | 162 | weight 163 | 2000 164 | 165 | ^.* 166 | 167 | ^.*\.lproj/ 168 | 169 | optional 170 | 171 | weight 172 | 1000 173 | 174 | ^.*\.lproj/locversion.plist$ 175 | 176 | omit 177 | 178 | weight 179 | 1100 180 | 181 | ^Base\.lproj/ 182 | 183 | weight 184 | 1010 185 | 186 | ^Info\.plist$ 187 | 188 | omit 189 | 190 | weight 191 | 20 192 | 193 | ^PkgInfo$ 194 | 195 | omit 196 | 197 | weight 198 | 20 199 | 200 | ^embedded\.provisionprofile$ 201 | 202 | weight 203 | 20 204 | 205 | ^version\.plist$ 206 | 207 | weight 208 | 20 209 | 210 | 211 | 212 | 213 | -------------------------------------------------------------------------------- /facesdk.framework/detection.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/facesdk.framework/detection.bin -------------------------------------------------------------------------------- /facesdk.framework/detection.param: -------------------------------------------------------------------------------- 1 | 7767517 2 | 234 255 3 | Input data 0 1 data 0=60 1=60 2=3 4 | Split splitncnn_0 1 4 data data_splitncnn_0 data_splitncnn_1 data_splitncnn_2 data_splitncnn_3 5 | Convolution conv1 1 1 data_splitncnn_3 conv_blob1 0=8 1=3 2=1 3=2 4=1 5=0 6=216 6 | BatchNorm batch_norm1 1 1 conv_blob1 batch_norm_blob1 0=8 7 | Scale bn_scale1 1 1 batch_norm_blob1 batch_norm_blob1_bn_scale1 0=8 1=1 8 | ReLU relu1 1 1 batch_norm_blob1_bn_scale1 relu_blob1 9 | ConvolutionDepthWise conv2 1 1 relu_blob1 conv_blob2 0=8 1=3 2=1 3=1 4=1 5=0 6=72 7=8 10 | BatchNorm batch_norm2 1 1 conv_blob2 batch_norm_blob2 0=8 11 | Scale bn_scale2 1 1 batch_norm_blob2 batch_norm_blob2_bn_scale2 0=8 1=1 12 | ReLU relu2 1 1 batch_norm_blob2_bn_scale2 relu_blob2 13 | Convolution conv3 1 1 relu_blob2 conv_blob3 0=16 1=1 2=1 3=1 4=0 5=0 6=128 14 | BatchNorm batch_norm3 1 1 conv_blob3 batch_norm_blob3 0=16 15 | Scale bn_scale3 1 1 batch_norm_blob3 batch_norm_blob3_bn_scale3 0=16 1=1 16 | ReLU relu3 1 1 batch_norm_blob3_bn_scale3 relu_blob3 17 | ConvolutionDepthWise conv4 1 1 relu_blob3 conv_blob4 0=16 1=3 2=1 3=2 4=1 5=0 6=144 7=16 18 | BatchNorm batch_norm4 1 1 conv_blob4 batch_norm_blob4 0=16 19 | Scale bn_scale4 1 1 batch_norm_blob4 batch_norm_blob4_bn_scale4 0=16 1=1 20 | ReLU relu4 1 1 batch_norm_blob4_bn_scale4 relu_blob4 21 | Convolution conv5 1 1 relu_blob4 conv_blob5 0=32 1=1 2=1 3=1 4=0 5=0 6=512 22 | BatchNorm batch_norm5 1 1 conv_blob5 batch_norm_blob5 0=32 23 | Scale bn_scale5 1 1 batch_norm_blob5 batch_norm_blob5_bn_scale5 0=32 1=1 24 | ReLU relu5 1 1 batch_norm_blob5_bn_scale5 relu_blob5 25 | ConvolutionDepthWise conv6 1 1 relu_blob5 conv_blob6 0=32 1=3 2=1 3=1 4=1 5=0 6=288 7=32 26 | BatchNorm batch_norm6 1 1 conv_blob6 batch_norm_blob6 0=32 27 | Scale bn_scale6 1 1 batch_norm_blob6 batch_norm_blob6_bn_scale6 0=32 1=1 28 | ReLU relu6 1 1 batch_norm_blob6_bn_scale6 relu_blob6 29 | Convolution conv7 1 1 relu_blob6 conv_blob7 0=32 1=1 2=1 3=1 4=0 5=0 6=1024 30 | BatchNorm batch_norm7 1 1 conv_blob7 batch_norm_blob7 0=32 31 | Scale bn_scale7 1 1 batch_norm_blob7 batch_norm_blob7_bn_scale7 0=32 1=1 32 | ReLU relu7 1 1 batch_norm_blob7_bn_scale7 relu_blob7 33 | ConvolutionDepthWise conv8 1 1 relu_blob7 conv_blob8 0=32 1=3 2=1 3=2 4=1 5=0 6=288 7=32 34 | BatchNorm batch_norm8 1 1 conv_blob8 batch_norm_blob8 0=32 35 | Scale bn_scale8 1 1 batch_norm_blob8 batch_norm_blob8_bn_scale8 0=32 1=1 36 | ReLU relu8 1 1 batch_norm_blob8_bn_scale8 relu_blob8 37 | Convolution conv9 1 1 relu_blob8 conv_blob9 0=64 1=1 2=1 3=1 4=0 5=0 6=2048 38 | BatchNorm batch_norm9 1 1 conv_blob9 batch_norm_blob9 0=64 39 | Scale bn_scale9 1 1 batch_norm_blob9 batch_norm_blob9_bn_scale9 0=64 1=1 40 | ReLU relu9 1 1 batch_norm_blob9_bn_scale9 relu_blob9 41 | ConvolutionDepthWise conv10 1 1 relu_blob9 conv_blob10 0=64 1=3 2=1 3=1 4=1 5=0 6=576 7=64 42 | BatchNorm batch_norm10 1 1 conv_blob10 batch_norm_blob10 0=64 43 | Scale bn_scale10 1 1 batch_norm_blob10 batch_norm_blob10_bn_scale10 0=64 1=1 44 | ReLU relu10 1 1 batch_norm_blob10_bn_scale10 relu_blob10 45 | Convolution conv11 1 1 relu_blob10 conv_blob11 0=64 1=1 2=1 3=1 4=0 5=0 6=4096 46 | BatchNorm batch_norm11 1 1 conv_blob11 batch_norm_blob11 0=64 47 | Scale bn_scale11 1 1 batch_norm_blob11 batch_norm_blob11_bn_scale11 0=64 1=1 48 | ReLU relu11 1 1 batch_norm_blob11_bn_scale11 relu_blob11 49 | Split splitncnn_1 1 2 relu_blob11 relu_blob11_splitncnn_0 relu_blob11_splitncnn_1 50 | ConvolutionDepthWise conv12 1 1 relu_blob11_splitncnn_1 conv_blob12 0=64 1=3 2=1 3=2 4=1 5=0 6=576 7=64 51 | BatchNorm batch_norm12 1 1 conv_blob12 batch_norm_blob12 0=64 52 | Scale bn_scale12 1 1 batch_norm_blob12 batch_norm_blob12_bn_scale12 0=64 1=1 53 | ReLU relu12 1 1 batch_norm_blob12_bn_scale12 relu_blob12 54 | Convolution conv13 1 1 relu_blob12 conv_blob13 0=128 1=1 2=1 3=1 4=0 5=0 6=8192 55 | BatchNorm batch_norm13 1 1 conv_blob13 batch_norm_blob13 0=128 56 | Scale bn_scale13 1 1 batch_norm_blob13 batch_norm_blob13_bn_scale13 0=128 1=1 57 | ReLU relu13 1 1 batch_norm_blob13_bn_scale13 relu_blob13 58 | ConvolutionDepthWise conv14 1 1 relu_blob13 conv_blob14 0=128 1=3 2=1 3=1 4=1 5=0 6=1152 7=128 59 | BatchNorm batch_norm14 1 1 conv_blob14 batch_norm_blob14 0=128 60 | Scale bn_scale14 1 1 batch_norm_blob14 batch_norm_blob14_bn_scale14 0=128 1=1 61 | ReLU relu14 1 1 batch_norm_blob14_bn_scale14 relu_blob14 62 | Convolution conv15 1 1 relu_blob14 conv_blob15 0=128 1=1 2=1 3=1 4=0 5=0 6=16384 63 | BatchNorm batch_norm15 1 1 conv_blob15 batch_norm_blob15 0=128 64 | Scale bn_scale15 1 1 batch_norm_blob15 batch_norm_blob15_bn_scale15 0=128 1=1 65 | ReLU relu15 1 1 batch_norm_blob15_bn_scale15 relu_blob15 66 | ConvolutionDepthWise conv16 1 1 relu_blob15 conv_blob16 0=128 1=3 2=1 3=1 4=1 5=0 6=1152 7=128 67 | BatchNorm batch_norm16 1 1 conv_blob16 batch_norm_blob16 0=128 68 | Scale bn_scale16 1 1 batch_norm_blob16 batch_norm_blob16_bn_scale16 0=128 1=1 69 | ReLU relu16 1 1 batch_norm_blob16_bn_scale16 relu_blob16 70 | Convolution conv17 1 1 relu_blob16 conv_blob17 0=128 1=1 2=1 3=1 4=0 5=0 6=16384 71 | BatchNorm batch_norm17 1 1 conv_blob17 batch_norm_blob17 0=128 72 | Scale bn_scale17 1 1 batch_norm_blob17 batch_norm_blob17_bn_scale17 0=128 1=1 73 | ReLU relu17 1 1 batch_norm_blob17_bn_scale17 relu_blob17 74 | ConvolutionDepthWise conv18 1 1 relu_blob17 conv_blob18 0=128 1=3 2=1 3=1 4=1 5=0 6=1152 7=128 75 | BatchNorm batch_norm18 1 1 conv_blob18 batch_norm_blob18 0=128 76 | Scale bn_scale18 1 1 batch_norm_blob18 batch_norm_blob18_bn_scale18 0=128 1=1 77 | ReLU relu18 1 1 batch_norm_blob18_bn_scale18 relu_blob18 78 | Convolution conv19 1 1 relu_blob18 conv_blob19 0=128 1=1 2=1 3=1 4=0 5=0 6=16384 79 | BatchNorm batch_norm19 1 1 conv_blob19 batch_norm_blob19 0=128 80 | Scale bn_scale19 1 1 batch_norm_blob19 batch_norm_blob19_bn_scale19 0=128 1=1 81 | ReLU relu19 1 1 batch_norm_blob19_bn_scale19 relu_blob19 82 | ConvolutionDepthWise conv20 1 1 relu_blob19 conv_blob20 0=128 1=3 2=1 3=1 4=1 5=0 6=1152 7=128 83 | BatchNorm batch_norm20 1 1 conv_blob20 batch_norm_blob20 0=128 84 | Scale bn_scale20 1 1 batch_norm_blob20 batch_norm_blob20_bn_scale20 0=128 1=1 85 | ReLU relu20 1 1 batch_norm_blob20_bn_scale20 relu_blob20 86 | Convolution conv21 1 1 relu_blob20 conv_blob21 0=128 1=1 2=1 3=1 4=0 5=0 6=16384 87 | BatchNorm batch_norm21 1 1 conv_blob21 batch_norm_blob21 0=128 88 | Scale bn_scale21 1 1 batch_norm_blob21 batch_norm_blob21_bn_scale21 0=128 1=1 89 | ReLU relu21 1 1 batch_norm_blob21_bn_scale21 relu_blob21 90 | ConvolutionDepthWise conv22 1 1 relu_blob21 conv_blob22 0=128 1=3 2=1 3=1 4=1 5=0 6=1152 7=128 91 | BatchNorm batch_norm22 1 1 conv_blob22 batch_norm_blob22 0=128 92 | Scale bn_scale22 1 1 batch_norm_blob22 batch_norm_blob22_bn_scale22 0=128 1=1 93 | ReLU relu22 1 1 batch_norm_blob22_bn_scale22 relu_blob22 94 | Convolution conv23 1 1 relu_blob22 conv_blob23 0=128 1=1 2=1 3=1 4=0 5=0 6=16384 95 | BatchNorm batch_norm23 1 1 conv_blob23 batch_norm_blob23 0=128 96 | Scale bn_scale23 1 1 batch_norm_blob23 batch_norm_blob23_bn_scale23 0=128 1=1 97 | ReLU relu23 1 1 batch_norm_blob23_bn_scale23 relu_blob23 98 | Split splitncnn_2 1 2 relu_blob23 relu_blob23_splitncnn_0 relu_blob23_splitncnn_1 99 | ConvolutionDepthWise conv24 1 1 relu_blob23_splitncnn_1 conv_blob24 0=128 1=3 2=1 3=2 4=1 5=0 6=1152 7=128 100 | BatchNorm batch_norm24 1 1 conv_blob24 batch_norm_blob24 0=128 101 | Scale bn_scale24 1 1 batch_norm_blob24 batch_norm_blob24_bn_scale24 0=128 1=1 102 | ReLU relu24 1 1 batch_norm_blob24_bn_scale24 relu_blob24 103 | Convolution conv25 1 1 relu_blob24 conv_blob25 0=256 1=1 2=1 3=1 4=0 5=0 6=32768 104 | BatchNorm batch_norm25 1 1 conv_blob25 batch_norm_blob25 0=256 105 | Scale bn_scale25 1 1 batch_norm_blob25 batch_norm_blob25_bn_scale25 0=256 1=1 106 | ReLU relu25 1 1 batch_norm_blob25_bn_scale25 relu_blob25 107 | ConvolutionDepthWise conv26 1 1 relu_blob25 conv_blob26 0=256 1=3 2=1 3=1 4=1 5=0 6=2304 7=256 108 | BatchNorm batch_norm26 1 1 conv_blob26 batch_norm_blob26 0=256 109 | Scale bn_scale26 1 1 batch_norm_blob26 batch_norm_blob26_bn_scale26 0=256 1=1 110 | ReLU relu26 1 1 batch_norm_blob26_bn_scale26 relu_blob26 111 | Convolution conv27 1 1 relu_blob26 conv_blob27 0=256 1=1 2=1 3=1 4=0 5=0 6=65536 112 | BatchNorm batch_norm27 1 1 conv_blob27 batch_norm_blob27 0=256 113 | Scale bn_scale27 1 1 batch_norm_blob27 batch_norm_blob27_bn_scale27 0=256 1=1 114 | ReLU relu27 1 1 batch_norm_blob27_bn_scale27 relu_blob27 115 | Convolution conv28 1 1 relu_blob11_splitncnn_0 conv_blob28 0=64 1=1 2=1 3=1 4=0 5=0 6=4096 116 | BatchNorm batch_norm28 1 1 conv_blob28 batch_norm_blob28 0=64 117 | Scale bn_scale28 1 1 batch_norm_blob28 batch_norm_blob28_bn_scale28 0=64 1=1 118 | ReLU relu28 1 1 batch_norm_blob28_bn_scale28 relu_blob28 119 | Split splitncnn_3 1 2 relu_blob28 relu_blob28_splitncnn_0 relu_blob28_splitncnn_1 120 | Convolution conv29 1 1 relu_blob23_splitncnn_0 conv_blob29 0=64 1=1 2=1 3=1 4=0 5=0 6=8192 121 | BatchNorm batch_norm29 1 1 conv_blob29 batch_norm_blob29 0=64 122 | Scale bn_scale29 1 1 batch_norm_blob29 batch_norm_blob29_bn_scale29 0=64 1=1 123 | ReLU relu29 1 1 batch_norm_blob29_bn_scale29 relu_blob29 124 | Split splitncnn_4 1 2 relu_blob29 relu_blob29_splitncnn_0 relu_blob29_splitncnn_1 125 | Convolution conv30 1 1 relu_blob27 conv_blob30 0=64 1=1 2=1 3=1 4=0 5=0 6=16384 126 | BatchNorm batch_norm30 1 1 conv_blob30 batch_norm_blob30 0=64 127 | Scale bn_scale30 1 1 batch_norm_blob30 batch_norm_blob30_bn_scale30 0=64 1=1 128 | ReLU relu30 1 1 batch_norm_blob30_bn_scale30 relu_blob30 129 | Split splitncnn_5 1 3 relu_blob30 relu_blob30_splitncnn_0 relu_blob30_splitncnn_1 relu_blob30_splitncnn_2 130 | Deconvolution conv_transpose1 1 1 relu_blob30_splitncnn_2 conv_transpose_blob1 0=64 1=2 2=1 3=2 4=0 5=1 6=16384 131 | Crop crop1 2 1 conv_transpose_blob1 relu_blob29_splitncnn_1 crop1 132 | Eltwise add1 2 1 relu_blob29_splitncnn_0 crop1 add_blob1 0=1 -23301=0 133 | Convolution conv31 1 1 add_blob1 conv_blob31 0=64 1=3 2=1 3=1 4=1 5=0 6=36864 134 | BatchNorm batch_norm31 1 1 conv_blob31 batch_norm_blob31 0=64 135 | Scale bn_scale31 1 1 batch_norm_blob31 batch_norm_blob31_bn_scale31 0=64 1=1 136 | ReLU relu31 1 1 batch_norm_blob31_bn_scale31 relu_blob31 137 | Split splitncnn_6 1 3 relu_blob31 relu_blob31_splitncnn_0 relu_blob31_splitncnn_1 relu_blob31_splitncnn_2 138 | Deconvolution conv_transpose2 1 1 relu_blob31_splitncnn_2 conv_transpose_blob2 0=64 1=2 2=1 3=2 4=0 5=1 6=16384 139 | Crop crop2 2 1 conv_transpose_blob2 relu_blob28_splitncnn_1 crop2 140 | Eltwise add2 2 1 relu_blob28_splitncnn_0 crop2 add_blob2 0=1 -23301=0 141 | Convolution conv32 1 1 add_blob2 conv_blob32 0=64 1=3 2=1 3=1 4=1 5=0 6=36864 142 | BatchNorm batch_norm32 1 1 conv_blob32 batch_norm_blob32 0=64 143 | Scale bn_scale32 1 1 batch_norm_blob32 batch_norm_blob32_bn_scale32 0=64 1=1 144 | ReLU relu32 1 1 batch_norm_blob32_bn_scale32 relu_blob32 145 | Split splitncnn_7 1 2 relu_blob32 relu_blob32_splitncnn_0 relu_blob32_splitncnn_1 146 | Convolution conv33 1 1 relu_blob32_splitncnn_1 conv_blob33 0=32 1=3 2=1 3=1 4=1 5=0 6=18432 147 | BatchNorm batch_norm33 1 1 conv_blob33 batch_norm_blob33 0=32 148 | Scale bn_scale33 1 1 batch_norm_blob33 batch_norm_blob33_bn_scale33 0=32 1=1 149 | Convolution conv34 1 1 relu_blob32_splitncnn_0 conv_blob34 0=16 1=3 2=1 3=1 4=1 5=0 6=9216 150 | BatchNorm batch_norm34 1 1 conv_blob34 batch_norm_blob34 0=16 151 | Scale bn_scale34 1 1 batch_norm_blob34 batch_norm_blob34_bn_scale34 0=16 1=1 152 | ReLU relu33 1 1 batch_norm_blob34_bn_scale34 relu_blob33 153 | Split splitncnn_8 1 2 relu_blob33 relu_blob33_splitncnn_0 relu_blob33_splitncnn_1 154 | Convolution conv35 1 1 relu_blob33_splitncnn_1 conv_blob35 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 155 | BatchNorm batch_norm35 1 1 conv_blob35 batch_norm_blob35 0=16 156 | Scale bn_scale35 1 1 batch_norm_blob35 batch_norm_blob35_bn_scale35 0=16 1=1 157 | Convolution conv36 1 1 relu_blob33_splitncnn_0 conv_blob36 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 158 | BatchNorm batch_norm36 1 1 conv_blob36 batch_norm_blob36 0=16 159 | Scale bn_scale36 1 1 batch_norm_blob36 batch_norm_blob36_bn_scale36 0=16 1=1 160 | ReLU relu34 1 1 batch_norm_blob36_bn_scale36 relu_blob34 161 | Convolution conv37 1 1 relu_blob34 conv_blob37 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 162 | BatchNorm batch_norm37 1 1 conv_blob37 batch_norm_blob37 0=16 163 | Scale bn_scale37 1 1 batch_norm_blob37 batch_norm_blob37_bn_scale37 0=16 1=1 164 | Concat cat1 3 1 batch_norm_blob33_bn_scale33 batch_norm_blob35_bn_scale35 batch_norm_blob37_bn_scale37 cat_blob1 0=0 165 | ReLU relu35 1 1 cat_blob1 relu_blob35 166 | Split splitncnn_9 1 3 relu_blob35 relu_blob35_splitncnn_0 relu_blob35_splitncnn_1 relu_blob35_splitncnn_2 167 | Convolution conv38 1 1 relu_blob31_splitncnn_1 conv_blob38 0=32 1=3 2=1 3=1 4=1 5=0 6=18432 168 | BatchNorm batch_norm38 1 1 conv_blob38 batch_norm_blob38 0=32 169 | Scale bn_scale38 1 1 batch_norm_blob38 batch_norm_blob38_bn_scale38 0=32 1=1 170 | Convolution conv39 1 1 relu_blob31_splitncnn_0 conv_blob39 0=16 1=3 2=1 3=1 4=1 5=0 6=9216 171 | BatchNorm batch_norm39 1 1 conv_blob39 batch_norm_blob39 0=16 172 | Scale bn_scale39 1 1 batch_norm_blob39 batch_norm_blob39_bn_scale39 0=16 1=1 173 | ReLU relu36 1 1 batch_norm_blob39_bn_scale39 relu_blob36 174 | Split splitncnn_10 1 2 relu_blob36 relu_blob36_splitncnn_0 relu_blob36_splitncnn_1 175 | Convolution conv40 1 1 relu_blob36_splitncnn_1 conv_blob40 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 176 | BatchNorm batch_norm40 1 1 conv_blob40 batch_norm_blob40 0=16 177 | Scale bn_scale40 1 1 batch_norm_blob40 batch_norm_blob40_bn_scale40 0=16 1=1 178 | Convolution conv41 1 1 relu_blob36_splitncnn_0 conv_blob41 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 179 | BatchNorm batch_norm41 1 1 conv_blob41 batch_norm_blob41 0=16 180 | Scale bn_scale41 1 1 batch_norm_blob41 batch_norm_blob41_bn_scale41 0=16 1=1 181 | ReLU relu37 1 1 batch_norm_blob41_bn_scale41 relu_blob37 182 | Convolution conv42 1 1 relu_blob37 conv_blob42 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 183 | BatchNorm batch_norm42 1 1 conv_blob42 batch_norm_blob42 0=16 184 | Scale bn_scale42 1 1 batch_norm_blob42 batch_norm_blob42_bn_scale42 0=16 1=1 185 | Concat cat2 3 1 batch_norm_blob38_bn_scale38 batch_norm_blob40_bn_scale40 batch_norm_blob42_bn_scale42 cat_blob2 0=0 186 | ReLU relu38 1 1 cat_blob2 relu_blob38 187 | Split splitncnn_11 1 3 relu_blob38 relu_blob38_splitncnn_0 relu_blob38_splitncnn_1 relu_blob38_splitncnn_2 188 | Convolution conv43 1 1 relu_blob30_splitncnn_1 conv_blob43 0=32 1=3 2=1 3=1 4=1 5=0 6=18432 189 | BatchNorm batch_norm43 1 1 conv_blob43 batch_norm_blob43 0=32 190 | Scale bn_scale43 1 1 batch_norm_blob43 batch_norm_blob43_bn_scale43 0=32 1=1 191 | Convolution conv44 1 1 relu_blob30_splitncnn_0 conv_blob44 0=16 1=3 2=1 3=1 4=1 5=0 6=9216 192 | BatchNorm batch_norm44 1 1 conv_blob44 batch_norm_blob44 0=16 193 | Scale bn_scale44 1 1 batch_norm_blob44 batch_norm_blob44_bn_scale44 0=16 1=1 194 | ReLU relu39 1 1 batch_norm_blob44_bn_scale44 relu_blob39 195 | Split splitncnn_12 1 2 relu_blob39 relu_blob39_splitncnn_0 relu_blob39_splitncnn_1 196 | Convolution conv45 1 1 relu_blob39_splitncnn_1 conv_blob45 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 197 | BatchNorm batch_norm45 1 1 conv_blob45 batch_norm_blob45 0=16 198 | Scale bn_scale45 1 1 batch_norm_blob45 batch_norm_blob45_bn_scale45 0=16 1=1 199 | Convolution conv46 1 1 relu_blob39_splitncnn_0 conv_blob46 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 200 | BatchNorm batch_norm46 1 1 conv_blob46 batch_norm_blob46 0=16 201 | Scale bn_scale46 1 1 batch_norm_blob46 batch_norm_blob46_bn_scale46 0=16 1=1 202 | ReLU relu40 1 1 batch_norm_blob46_bn_scale46 relu_blob40 203 | Convolution conv47 1 1 relu_blob40 conv_blob47 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 204 | BatchNorm batch_norm47 1 1 conv_blob47 batch_norm_blob47 0=16 205 | Scale bn_scale47 1 1 batch_norm_blob47 batch_norm_blob47_bn_scale47 0=16 1=1 206 | Concat cat3 3 1 batch_norm_blob43_bn_scale43 batch_norm_blob45_bn_scale45 batch_norm_blob47_bn_scale47 cat_blob3 0=0 207 | ReLU relu41 1 1 cat_blob3 relu_blob41 208 | Split splitncnn_13 1 3 relu_blob41 relu_blob41_splitncnn_0 relu_blob41_splitncnn_1 relu_blob41_splitncnn_2 209 | Convolution conv48 1 1 relu_blob35_splitncnn_2 conv_blob48 0=8 1=1 2=1 3=1 4=0 5=1 6=512 210 | Convolution conv49 1 1 relu_blob35_splitncnn_1 conv_blob49 0=4 1=1 2=1 3=1 4=0 5=1 6=256 211 | Convolution conv50 1 1 relu_blob38_splitncnn_2 conv_blob50 0=8 1=1 2=1 3=1 4=0 5=1 6=512 212 | Convolution conv51 1 1 relu_blob38_splitncnn_1 conv_blob51 0=4 1=1 2=1 3=1 4=0 5=1 6=256 213 | Convolution conv52 1 1 relu_blob41_splitncnn_2 conv_blob52 0=8 1=1 2=1 3=1 4=0 5=1 6=512 214 | Convolution conv53 1 1 relu_blob41_splitncnn_1 conv_blob53 0=4 1=1 2=1 3=1 4=0 5=1 6=256 215 | Permute conv4_3_norm_mbox_loc_perm 1 1 conv_blob48 conv4_3_norm_mbox_loc_perm 0=3 216 | Flatten conv4_3_norm_mbox_loc_flat 1 1 conv4_3_norm_mbox_loc_perm conv4_3_norm_mbox_loc_flat 217 | Permute conv4_3_norm_mbox_conf_perm 1 1 conv_blob49 conv4_3_norm_mbox_conf_perm 0=3 218 | Flatten conv4_3_norm_mbox_conf_flat 1 1 conv4_3_norm_mbox_conf_perm conv4_3_norm_mbox_conf_flat 219 | PriorBox conv4_3_norm_mbox_priorbox 2 1 relu_blob35_splitncnn_0 data_splitncnn_2 conv4_3_norm_mbox_priorbox -23300=2,1.600000e+01,3.200000e+01 -23301=0 -23302=0 3=1.000000e-01 4=1.000000e-01 5=2.000000e-01 6=2.000000e-01 7=1 8=0 9=-233 10=-233 11=8.000000e+00 12=8.000000e+00 13=5.000000e-01 220 | Permute conv5_3_norm_mbox_loc_perm 1 1 conv_blob50 conv5_3_norm_mbox_loc_perm 0=3 221 | Flatten conv5_3_norm_mbox_loc_flat 1 1 conv5_3_norm_mbox_loc_perm conv5_3_norm_mbox_loc_flat 222 | Permute conv5_3_norm_mbox_conf_perm 1 1 conv_blob51 conv5_3_norm_mbox_conf_perm 0=3 223 | Flatten conv5_3_norm_mbox_conf_flat 1 1 conv5_3_norm_mbox_conf_perm conv5_3_norm_mbox_conf_flat 224 | PriorBox conv5_3_norm_mbox_priorbox 2 1 relu_blob38_splitncnn_0 data_splitncnn_1 conv5_3_norm_mbox_priorbox -23300=2,6.400000e+01,1.280000e+02 -23301=0 -23302=0 3=1.000000e-01 4=1.000000e-01 5=2.000000e-01 6=2.000000e-01 7=1 8=0 9=-233 10=-233 11=1.600000e+01 12=1.600000e+01 13=5.000000e-01 225 | Permute conv6_3_norm_mbox_loc_perm 1 1 conv_blob52 conv6_3_norm_mbox_loc_perm 0=3 226 | Flatten conv6_3_norm_mbox_loc_flat 1 1 conv6_3_norm_mbox_loc_perm conv6_3_norm_mbox_loc_flat 227 | Permute conv6_3_norm_mbox_conf_perm 1 1 conv_blob53 conv6_3_norm_mbox_conf_perm 0=3 228 | Flatten conv6_3_norm_mbox_conf_flat 1 1 conv6_3_norm_mbox_conf_perm conv6_3_norm_mbox_conf_flat 229 | PriorBox conv6_3_norm_mbox_priorbox 2 1 relu_blob41_splitncnn_0 data_splitncnn_0 conv6_3_norm_mbox_priorbox -23300=2,2.560000e+02,5.120000e+02 -23301=0 -23302=0 3=1.000000e-01 4=1.000000e-01 5=2.000000e-01 6=2.000000e-01 7=1 8=0 9=-233 10=-233 11=3.200000e+01 12=3.200000e+01 13=5.000000e-01 230 | Concat mbox_loc 3 1 conv4_3_norm_mbox_loc_flat conv5_3_norm_mbox_loc_flat conv6_3_norm_mbox_loc_flat mbox_loc 0=0 231 | Concat mbox_conf 3 1 conv4_3_norm_mbox_conf_flat conv5_3_norm_mbox_conf_flat conv6_3_norm_mbox_conf_flat mbox_conf 0=0 232 | Concat mbox_priorbox 3 1 conv4_3_norm_mbox_priorbox conv5_3_norm_mbox_priorbox conv6_3_norm_mbox_priorbox mbox_priorbox 0=1 233 | Reshape mbox_conf_reshape 1 1 mbox_conf mbox_conf_reshape 0=2 1=-1 2=-233 3=0 234 | Softmax mbox_conf_softmax 1 1 mbox_conf_reshape mbox_conf_softmax 0=1 1=1 235 | Flatten mbox_conf_flatten 1 1 mbox_conf_softmax mbox_conf_flatten 236 | DetectionOutput detection_out 3 1 mbox_loc mbox_conf_flatten mbox_priorbox detection_out 0=2 1=3.000000e-01 2=400 3=200 4=1.000000e-01 237 | -------------------------------------------------------------------------------- /facesdk.framework/facesdk: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/facesdk.framework/facesdk -------------------------------------------------------------------------------- /facesdk.framework/landmark.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/facesdk.framework/landmark.bin -------------------------------------------------------------------------------- /facesdk.framework/landmark.param: -------------------------------------------------------------------------------- 1 | 7767517 2 | 15 15 3 | Input input 0 1 data 0=60 1=60 2=1 4 | Convolution Conv1 1 1 data Conv1 0=20 1=5 2=1 3=1 4=2 5=1 6=500 5 | ReLU ActivationReLU1 1 1 Conv1 ActivationReLU1 6 | Pooling Pool1 1 1 ActivationReLU1 Pool1 0=0 1=2 2=2 3=0 4=0 7 | Convolution Conv2 1 1 Pool1 Conv2 0=48 1=5 2=1 3=1 4=2 5=1 6=24000 8 | ReLU ActivationReLU2 1 1 Conv2 ActivationReLU2 9 | Pooling Pool2 1 1 ActivationReLU2 Pool2 0=0 1=2 2=2 3=0 4=0 10 | Convolution Conv3 1 1 Pool2 Conv3 0=64 1=3 2=1 3=1 4=0 5=1 6=27648 11 | ReLU ActivationReLU3 1 1 Conv3 ActivationReLU3 12 | Pooling Pool3 1 1 ActivationReLU3 Pool3 0=0 1=3 2=2 3=0 4=0 13 | Convolution Conv4 1 1 Pool3 Conv4 0=80 1=3 2=1 3=1 4=0 5=1 6=46080 14 | ReLU ActivationReLU4 1 1 Conv4 ActivationReLU4 15 | InnerProduct Dense1 1 1 ActivationReLU4 Dense1 0=512 1=1 2=655360 16 | ReLU ActivationReLU5 1 1 Dense1 ActivationReLU5 17 | InnerProduct Dense3 1 1 ActivationReLU5 Dense3 0=136 1=1 2=69632 18 | -------------------------------------------------------------------------------- /facesdk.framework/liveness.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/facesdk.framework/liveness.bin -------------------------------------------------------------------------------- /facesdk.framework/recognize.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/recognito-vision/iOS-FaceRecognition-FaceLivenessDetection/027c67096d4e96cff94d7b420c57e497446e91bf/facesdk.framework/recognize.bin -------------------------------------------------------------------------------- /facesdk.framework/recognize.param: -------------------------------------------------------------------------------- 1 | 7767517 2 | 160 172 3 | Input data 0 1 data 4 | BinaryOp _minusscalar0 1 1 data _minusscalar0 0=1 1=1 2=127.500000 5 | BinaryOp _mulscalar0 1 1 _minusscalar0 _mulscalar0 0=2 1=1 2=0.007812 6 | Convolution conv_1_conv2d 1 1 _mulscalar0 conv_1_conv2d 0=64 1=3 11=3 3=2 13=2 4=1 14=1 5=0 6=1728 7 | BatchNorm conv_1_batchnorm 1 1 conv_1_conv2d conv_1_batchnorm 0=64 8 | PReLU conv_1_relu 1 1 conv_1_batchnorm conv_1_relu 0=64 9 | ConvolutionDepthWise conv_2_dw_conv2d 1 1 conv_1_relu conv_2_dw_conv2d 0=64 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=576 7=64 10 | BatchNorm conv_2_dw_batchnorm 1 1 conv_2_dw_conv2d conv_2_dw_batchnorm 0=64 11 | PReLU conv_2_dw_relu 1 1 conv_2_dw_batchnorm conv_2_dw_relu 0=64 12 | Convolution dconv_23_conv_sep_conv2d 1 1 conv_2_dw_relu dconv_23_conv_sep_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 13 | BatchNorm dconv_23_conv_sep_batchnorm 1 1 dconv_23_conv_sep_conv2d dconv_23_conv_sep_batchnorm 0=128 14 | PReLU dconv_23_conv_sep_relu 1 1 dconv_23_conv_sep_batchnorm dconv_23_conv_sep_relu 0=128 15 | ConvolutionDepthWise dconv_23_conv_dw_conv2d 1 1 dconv_23_conv_sep_relu dconv_23_conv_dw_conv2d 0=128 1=3 11=3 3=2 13=2 4=1 14=1 5=0 6=1152 7=128 16 | BatchNorm dconv_23_conv_dw_batchnorm 1 1 dconv_23_conv_dw_conv2d dconv_23_conv_dw_batchnorm 0=128 17 | PReLU dconv_23_conv_dw_relu 1 1 dconv_23_conv_dw_batchnorm dconv_23_conv_dw_relu 0=128 18 | Convolution dconv_23_conv_proj_conv2d 1 1 dconv_23_conv_dw_relu dconv_23_conv_proj_conv2d 0=64 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 19 | BatchNorm dconv_23_conv_proj_batchnorm 1 1 dconv_23_conv_proj_conv2d dconv_23_conv_proj_batchnorm 0=64 20 | Split splitncnn_0 1 2 dconv_23_conv_proj_batchnorm dconv_23_conv_proj_batchnorm_splitncnn_0 dconv_23_conv_proj_batchnorm_splitncnn_1 21 | Convolution res_3_block0_conv_sep_conv2d 1 1 dconv_23_conv_proj_batchnorm_splitncnn_1 res_3_block0_conv_sep_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 22 | BatchNorm res_3_block0_conv_sep_batchnorm 1 1 res_3_block0_conv_sep_conv2d res_3_block0_conv_sep_batchnorm 0=128 23 | PReLU res_3_block0_conv_sep_relu 1 1 res_3_block0_conv_sep_batchnorm res_3_block0_conv_sep_relu 0=128 24 | ConvolutionDepthWise res_3_block0_conv_dw_conv2d 1 1 res_3_block0_conv_sep_relu res_3_block0_conv_dw_conv2d 0=128 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=1152 7=128 25 | BatchNorm res_3_block0_conv_dw_batchnorm 1 1 res_3_block0_conv_dw_conv2d res_3_block0_conv_dw_batchnorm 0=128 26 | PReLU res_3_block0_conv_dw_relu 1 1 res_3_block0_conv_dw_batchnorm res_3_block0_conv_dw_relu 0=128 27 | Convolution res_3_block0_conv_proj_conv2d 1 1 res_3_block0_conv_dw_relu res_3_block0_conv_proj_conv2d 0=64 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 28 | BatchNorm res_3_block0_conv_proj_batchnorm 1 1 res_3_block0_conv_proj_conv2d res_3_block0_conv_proj_batchnorm 0=64 29 | BinaryOp _plus0 2 1 res_3_block0_conv_proj_batchnorm dconv_23_conv_proj_batchnorm_splitncnn_0 _plus0 0=0 30 | Split splitncnn_1 1 2 _plus0 _plus0_splitncnn_0 _plus0_splitncnn_1 31 | Convolution res_3_block1_conv_sep_conv2d 1 1 _plus0_splitncnn_1 res_3_block1_conv_sep_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 32 | BatchNorm res_3_block1_conv_sep_batchnorm 1 1 res_3_block1_conv_sep_conv2d res_3_block1_conv_sep_batchnorm 0=128 33 | PReLU res_3_block1_conv_sep_relu 1 1 res_3_block1_conv_sep_batchnorm res_3_block1_conv_sep_relu 0=128 34 | ConvolutionDepthWise res_3_block1_conv_dw_conv2d 1 1 res_3_block1_conv_sep_relu res_3_block1_conv_dw_conv2d 0=128 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=1152 7=128 35 | BatchNorm res_3_block1_conv_dw_batchnorm 1 1 res_3_block1_conv_dw_conv2d res_3_block1_conv_dw_batchnorm 0=128 36 | PReLU res_3_block1_conv_dw_relu 1 1 res_3_block1_conv_dw_batchnorm res_3_block1_conv_dw_relu 0=128 37 | Convolution res_3_block1_conv_proj_conv2d 1 1 res_3_block1_conv_dw_relu res_3_block1_conv_proj_conv2d 0=64 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 38 | BatchNorm res_3_block1_conv_proj_batchnorm 1 1 res_3_block1_conv_proj_conv2d res_3_block1_conv_proj_batchnorm 0=64 39 | BinaryOp _plus1 2 1 res_3_block1_conv_proj_batchnorm _plus0_splitncnn_0 _plus1 0=0 40 | Split splitncnn_2 1 2 _plus1 _plus1_splitncnn_0 _plus1_splitncnn_1 41 | Convolution res_3_block2_conv_sep_conv2d 1 1 _plus1_splitncnn_1 res_3_block2_conv_sep_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 42 | BatchNorm res_3_block2_conv_sep_batchnorm 1 1 res_3_block2_conv_sep_conv2d res_3_block2_conv_sep_batchnorm 0=128 43 | PReLU res_3_block2_conv_sep_relu 1 1 res_3_block2_conv_sep_batchnorm res_3_block2_conv_sep_relu 0=128 44 | ConvolutionDepthWise res_3_block2_conv_dw_conv2d 1 1 res_3_block2_conv_sep_relu res_3_block2_conv_dw_conv2d 0=128 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=1152 7=128 45 | BatchNorm res_3_block2_conv_dw_batchnorm 1 1 res_3_block2_conv_dw_conv2d res_3_block2_conv_dw_batchnorm 0=128 46 | PReLU res_3_block2_conv_dw_relu 1 1 res_3_block2_conv_dw_batchnorm res_3_block2_conv_dw_relu 0=128 47 | Convolution res_3_block2_conv_proj_conv2d 1 1 res_3_block2_conv_dw_relu res_3_block2_conv_proj_conv2d 0=64 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 48 | BatchNorm res_3_block2_conv_proj_batchnorm 1 1 res_3_block2_conv_proj_conv2d res_3_block2_conv_proj_batchnorm 0=64 49 | BinaryOp _plus2 2 1 res_3_block2_conv_proj_batchnorm _plus1_splitncnn_0 _plus2 0=0 50 | Split splitncnn_3 1 2 _plus2 _plus2_splitncnn_0 _plus2_splitncnn_1 51 | Convolution res_3_block3_conv_sep_conv2d 1 1 _plus2_splitncnn_1 res_3_block3_conv_sep_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 52 | BatchNorm res_3_block3_conv_sep_batchnorm 1 1 res_3_block3_conv_sep_conv2d res_3_block3_conv_sep_batchnorm 0=128 53 | PReLU res_3_block3_conv_sep_relu 1 1 res_3_block3_conv_sep_batchnorm res_3_block3_conv_sep_relu 0=128 54 | ConvolutionDepthWise res_3_block3_conv_dw_conv2d 1 1 res_3_block3_conv_sep_relu res_3_block3_conv_dw_conv2d 0=128 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=1152 7=128 55 | BatchNorm res_3_block3_conv_dw_batchnorm 1 1 res_3_block3_conv_dw_conv2d res_3_block3_conv_dw_batchnorm 0=128 56 | PReLU res_3_block3_conv_dw_relu 1 1 res_3_block3_conv_dw_batchnorm res_3_block3_conv_dw_relu 0=128 57 | Convolution res_3_block3_conv_proj_conv2d 1 1 res_3_block3_conv_dw_relu res_3_block3_conv_proj_conv2d 0=64 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 58 | BatchNorm res_3_block3_conv_proj_batchnorm 1 1 res_3_block3_conv_proj_conv2d res_3_block3_conv_proj_batchnorm 0=64 59 | BinaryOp _plus3 2 1 res_3_block3_conv_proj_batchnorm _plus2_splitncnn_0 _plus3 0=0 60 | Convolution dconv_34_conv_sep_conv2d 1 1 _plus3 dconv_34_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=16384 61 | BatchNorm dconv_34_conv_sep_batchnorm 1 1 dconv_34_conv_sep_conv2d dconv_34_conv_sep_batchnorm 0=256 62 | PReLU dconv_34_conv_sep_relu 1 1 dconv_34_conv_sep_batchnorm dconv_34_conv_sep_relu 0=256 63 | ConvolutionDepthWise dconv_34_conv_dw_conv2d 1 1 dconv_34_conv_sep_relu dconv_34_conv_dw_conv2d 0=256 1=3 11=3 3=2 13=2 4=1 14=1 5=0 6=2304 7=256 64 | BatchNorm dconv_34_conv_dw_batchnorm 1 1 dconv_34_conv_dw_conv2d dconv_34_conv_dw_batchnorm 0=256 65 | PReLU dconv_34_conv_dw_relu 1 1 dconv_34_conv_dw_batchnorm dconv_34_conv_dw_relu 0=256 66 | Convolution dconv_34_conv_proj_conv2d 1 1 dconv_34_conv_dw_relu dconv_34_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 67 | BatchNorm dconv_34_conv_proj_batchnorm 1 1 dconv_34_conv_proj_conv2d dconv_34_conv_proj_batchnorm 0=128 68 | Split splitncnn_4 1 2 dconv_34_conv_proj_batchnorm dconv_34_conv_proj_batchnorm_splitncnn_0 dconv_34_conv_proj_batchnorm_splitncnn_1 69 | Convolution res_4_block0_conv_sep_conv2d 1 1 dconv_34_conv_proj_batchnorm_splitncnn_1 res_4_block0_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 70 | BatchNorm res_4_block0_conv_sep_batchnorm 1 1 res_4_block0_conv_sep_conv2d res_4_block0_conv_sep_batchnorm 0=256 71 | PReLU res_4_block0_conv_sep_relu 1 1 res_4_block0_conv_sep_batchnorm res_4_block0_conv_sep_relu 0=256 72 | ConvolutionDepthWise res_4_block0_conv_dw_conv2d 1 1 res_4_block0_conv_sep_relu res_4_block0_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 73 | BatchNorm res_4_block0_conv_dw_batchnorm 1 1 res_4_block0_conv_dw_conv2d res_4_block0_conv_dw_batchnorm 0=256 74 | PReLU res_4_block0_conv_dw_relu 1 1 res_4_block0_conv_dw_batchnorm res_4_block0_conv_dw_relu 0=256 75 | Convolution res_4_block0_conv_proj_conv2d 1 1 res_4_block0_conv_dw_relu res_4_block0_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 76 | BatchNorm res_4_block0_conv_proj_batchnorm 1 1 res_4_block0_conv_proj_conv2d res_4_block0_conv_proj_batchnorm 0=128 77 | BinaryOp _plus4 2 1 res_4_block0_conv_proj_batchnorm dconv_34_conv_proj_batchnorm_splitncnn_0 _plus4 0=0 78 | Split splitncnn_5 1 2 _plus4 _plus4_splitncnn_0 _plus4_splitncnn_1 79 | Convolution res_4_block1_conv_sep_conv2d 1 1 _plus4_splitncnn_1 res_4_block1_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 80 | BatchNorm res_4_block1_conv_sep_batchnorm 1 1 res_4_block1_conv_sep_conv2d res_4_block1_conv_sep_batchnorm 0=256 81 | PReLU res_4_block1_conv_sep_relu 1 1 res_4_block1_conv_sep_batchnorm res_4_block1_conv_sep_relu 0=256 82 | ConvolutionDepthWise res_4_block1_conv_dw_conv2d 1 1 res_4_block1_conv_sep_relu res_4_block1_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 83 | BatchNorm res_4_block1_conv_dw_batchnorm 1 1 res_4_block1_conv_dw_conv2d res_4_block1_conv_dw_batchnorm 0=256 84 | PReLU res_4_block1_conv_dw_relu 1 1 res_4_block1_conv_dw_batchnorm res_4_block1_conv_dw_relu 0=256 85 | Convolution res_4_block1_conv_proj_conv2d 1 1 res_4_block1_conv_dw_relu res_4_block1_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 86 | BatchNorm res_4_block1_conv_proj_batchnorm 1 1 res_4_block1_conv_proj_conv2d res_4_block1_conv_proj_batchnorm 0=128 87 | BinaryOp _plus5 2 1 res_4_block1_conv_proj_batchnorm _plus4_splitncnn_0 _plus5 0=0 88 | Split splitncnn_6 1 2 _plus5 _plus5_splitncnn_0 _plus5_splitncnn_1 89 | Convolution res_4_block2_conv_sep_conv2d 1 1 _plus5_splitncnn_1 res_4_block2_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 90 | BatchNorm res_4_block2_conv_sep_batchnorm 1 1 res_4_block2_conv_sep_conv2d res_4_block2_conv_sep_batchnorm 0=256 91 | PReLU res_4_block2_conv_sep_relu 1 1 res_4_block2_conv_sep_batchnorm res_4_block2_conv_sep_relu 0=256 92 | ConvolutionDepthWise res_4_block2_conv_dw_conv2d 1 1 res_4_block2_conv_sep_relu res_4_block2_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 93 | BatchNorm res_4_block2_conv_dw_batchnorm 1 1 res_4_block2_conv_dw_conv2d res_4_block2_conv_dw_batchnorm 0=256 94 | PReLU res_4_block2_conv_dw_relu 1 1 res_4_block2_conv_dw_batchnorm res_4_block2_conv_dw_relu 0=256 95 | Convolution res_4_block2_conv_proj_conv2d 1 1 res_4_block2_conv_dw_relu res_4_block2_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 96 | BatchNorm res_4_block2_conv_proj_batchnorm 1 1 res_4_block2_conv_proj_conv2d res_4_block2_conv_proj_batchnorm 0=128 97 | BinaryOp _plus6 2 1 res_4_block2_conv_proj_batchnorm _plus5_splitncnn_0 _plus6 0=0 98 | Split splitncnn_7 1 2 _plus6 _plus6_splitncnn_0 _plus6_splitncnn_1 99 | Convolution res_4_block3_conv_sep_conv2d 1 1 _plus6_splitncnn_1 res_4_block3_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 100 | BatchNorm res_4_block3_conv_sep_batchnorm 1 1 res_4_block3_conv_sep_conv2d res_4_block3_conv_sep_batchnorm 0=256 101 | PReLU res_4_block3_conv_sep_relu 1 1 res_4_block3_conv_sep_batchnorm res_4_block3_conv_sep_relu 0=256 102 | ConvolutionDepthWise res_4_block3_conv_dw_conv2d 1 1 res_4_block3_conv_sep_relu res_4_block3_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 103 | BatchNorm res_4_block3_conv_dw_batchnorm 1 1 res_4_block3_conv_dw_conv2d res_4_block3_conv_dw_batchnorm 0=256 104 | PReLU res_4_block3_conv_dw_relu 1 1 res_4_block3_conv_dw_batchnorm res_4_block3_conv_dw_relu 0=256 105 | Convolution res_4_block3_conv_proj_conv2d 1 1 res_4_block3_conv_dw_relu res_4_block3_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 106 | BatchNorm res_4_block3_conv_proj_batchnorm 1 1 res_4_block3_conv_proj_conv2d res_4_block3_conv_proj_batchnorm 0=128 107 | BinaryOp _plus7 2 1 res_4_block3_conv_proj_batchnorm _plus6_splitncnn_0 _plus7 0=0 108 | Split splitncnn_8 1 2 _plus7 _plus7_splitncnn_0 _plus7_splitncnn_1 109 | Convolution res_4_block4_conv_sep_conv2d 1 1 _plus7_splitncnn_1 res_4_block4_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 110 | BatchNorm res_4_block4_conv_sep_batchnorm 1 1 res_4_block4_conv_sep_conv2d res_4_block4_conv_sep_batchnorm 0=256 111 | PReLU res_4_block4_conv_sep_relu 1 1 res_4_block4_conv_sep_batchnorm res_4_block4_conv_sep_relu 0=256 112 | ConvolutionDepthWise res_4_block4_conv_dw_conv2d 1 1 res_4_block4_conv_sep_relu res_4_block4_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 113 | BatchNorm res_4_block4_conv_dw_batchnorm 1 1 res_4_block4_conv_dw_conv2d res_4_block4_conv_dw_batchnorm 0=256 114 | PReLU res_4_block4_conv_dw_relu 1 1 res_4_block4_conv_dw_batchnorm res_4_block4_conv_dw_relu 0=256 115 | Convolution res_4_block4_conv_proj_conv2d 1 1 res_4_block4_conv_dw_relu res_4_block4_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 116 | BatchNorm res_4_block4_conv_proj_batchnorm 1 1 res_4_block4_conv_proj_conv2d res_4_block4_conv_proj_batchnorm 0=128 117 | BinaryOp _plus8 2 1 res_4_block4_conv_proj_batchnorm _plus7_splitncnn_0 _plus8 0=0 118 | Split splitncnn_9 1 2 _plus8 _plus8_splitncnn_0 _plus8_splitncnn_1 119 | Convolution res_4_block5_conv_sep_conv2d 1 1 _plus8_splitncnn_1 res_4_block5_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 120 | BatchNorm res_4_block5_conv_sep_batchnorm 1 1 res_4_block5_conv_sep_conv2d res_4_block5_conv_sep_batchnorm 0=256 121 | PReLU res_4_block5_conv_sep_relu 1 1 res_4_block5_conv_sep_batchnorm res_4_block5_conv_sep_relu 0=256 122 | ConvolutionDepthWise res_4_block5_conv_dw_conv2d 1 1 res_4_block5_conv_sep_relu res_4_block5_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 123 | BatchNorm res_4_block5_conv_dw_batchnorm 1 1 res_4_block5_conv_dw_conv2d res_4_block5_conv_dw_batchnorm 0=256 124 | PReLU res_4_block5_conv_dw_relu 1 1 res_4_block5_conv_dw_batchnorm res_4_block5_conv_dw_relu 0=256 125 | Convolution res_4_block5_conv_proj_conv2d 1 1 res_4_block5_conv_dw_relu res_4_block5_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 126 | BatchNorm res_4_block5_conv_proj_batchnorm 1 1 res_4_block5_conv_proj_conv2d res_4_block5_conv_proj_batchnorm 0=128 127 | BinaryOp _plus9 2 1 res_4_block5_conv_proj_batchnorm _plus8_splitncnn_0 _plus9 0=0 128 | Convolution dconv_45_conv_sep_conv2d 1 1 _plus9 dconv_45_conv_sep_conv2d 0=512 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=65536 129 | BatchNorm dconv_45_conv_sep_batchnorm 1 1 dconv_45_conv_sep_conv2d dconv_45_conv_sep_batchnorm 0=512 130 | PReLU dconv_45_conv_sep_relu 1 1 dconv_45_conv_sep_batchnorm dconv_45_conv_sep_relu 0=512 131 | ConvolutionDepthWise dconv_45_conv_dw_conv2d 1 1 dconv_45_conv_sep_relu dconv_45_conv_dw_conv2d 0=512 1=3 11=3 3=2 13=2 4=1 14=1 5=0 6=4608 7=512 132 | BatchNorm dconv_45_conv_dw_batchnorm 1 1 dconv_45_conv_dw_conv2d dconv_45_conv_dw_batchnorm 0=512 133 | PReLU dconv_45_conv_dw_relu 1 1 dconv_45_conv_dw_batchnorm dconv_45_conv_dw_relu 0=512 134 | Convolution dconv_45_conv_proj_conv2d 1 1 dconv_45_conv_dw_relu dconv_45_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=65536 135 | BatchNorm dconv_45_conv_proj_batchnorm 1 1 dconv_45_conv_proj_conv2d dconv_45_conv_proj_batchnorm 0=128 136 | Split splitncnn_10 1 2 dconv_45_conv_proj_batchnorm dconv_45_conv_proj_batchnorm_splitncnn_0 dconv_45_conv_proj_batchnorm_splitncnn_1 137 | Convolution res_5_block0_conv_sep_conv2d 1 1 dconv_45_conv_proj_batchnorm_splitncnn_1 res_5_block0_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 138 | BatchNorm res_5_block0_conv_sep_batchnorm 1 1 res_5_block0_conv_sep_conv2d res_5_block0_conv_sep_batchnorm 0=256 139 | PReLU res_5_block0_conv_sep_relu 1 1 res_5_block0_conv_sep_batchnorm res_5_block0_conv_sep_relu 0=256 140 | ConvolutionDepthWise res_5_block0_conv_dw_conv2d 1 1 res_5_block0_conv_sep_relu res_5_block0_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 141 | BatchNorm res_5_block0_conv_dw_batchnorm 1 1 res_5_block0_conv_dw_conv2d res_5_block0_conv_dw_batchnorm 0=256 142 | PReLU res_5_block0_conv_dw_relu 1 1 res_5_block0_conv_dw_batchnorm res_5_block0_conv_dw_relu 0=256 143 | Convolution res_5_block0_conv_proj_conv2d 1 1 res_5_block0_conv_dw_relu res_5_block0_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 144 | BatchNorm res_5_block0_conv_proj_batchnorm 1 1 res_5_block0_conv_proj_conv2d res_5_block0_conv_proj_batchnorm 0=128 145 | BinaryOp _plus10 2 1 res_5_block0_conv_proj_batchnorm dconv_45_conv_proj_batchnorm_splitncnn_0 _plus10 0=0 146 | Split splitncnn_11 1 2 _plus10 _plus10_splitncnn_0 _plus10_splitncnn_1 147 | Convolution res_5_block1_conv_sep_conv2d 1 1 _plus10_splitncnn_1 res_5_block1_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 148 | BatchNorm res_5_block1_conv_sep_batchnorm 1 1 res_5_block1_conv_sep_conv2d res_5_block1_conv_sep_batchnorm 0=256 149 | PReLU res_5_block1_conv_sep_relu 1 1 res_5_block1_conv_sep_batchnorm res_5_block1_conv_sep_relu 0=256 150 | ConvolutionDepthWise res_5_block1_conv_dw_conv2d 1 1 res_5_block1_conv_sep_relu res_5_block1_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 151 | BatchNorm res_5_block1_conv_dw_batchnorm 1 1 res_5_block1_conv_dw_conv2d res_5_block1_conv_dw_batchnorm 0=256 152 | PReLU res_5_block1_conv_dw_relu 1 1 res_5_block1_conv_dw_batchnorm res_5_block1_conv_dw_relu 0=256 153 | Convolution res_5_block1_conv_proj_conv2d 1 1 res_5_block1_conv_dw_relu res_5_block1_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 154 | BatchNorm res_5_block1_conv_proj_batchnorm 1 1 res_5_block1_conv_proj_conv2d res_5_block1_conv_proj_batchnorm 0=128 155 | BinaryOp _plus11 2 1 res_5_block1_conv_proj_batchnorm _plus10_splitncnn_0 _plus11 0=0 156 | Convolution conv_6sep_conv2d 1 1 _plus11 conv_6sep_conv2d 0=512 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=65536 157 | BatchNorm conv_6sep_batchnorm 1 1 conv_6sep_conv2d conv_6sep_batchnorm 0=512 158 | PReLU conv_6sep_relu 1 1 conv_6sep_batchnorm conv_6sep_relu 0=512 159 | ConvolutionDepthWise conv_6dw7_7_conv2d 1 1 conv_6sep_relu conv_6dw7_7_conv2d 0=512 1=7 11=7 3=1 13=1 4=0 14=0 5=0 6=25088 7=512 160 | BatchNorm conv_6dw7_7_batchnorm 1 1 conv_6dw7_7_conv2d conv_6dw7_7_batchnorm 0=512 161 | InnerProduct pre_fc1 1 1 conv_6dw7_7_batchnorm pre_fc1 0=128 1=1 2=65536 162 | BatchNorm fc1 1 1 pre_fc1 fc1 0=128 163 | -------------------------------------------------------------------------------- /license.txt: -------------------------------------------------------------------------------- 1 | H/Fs6Zgbsi9av6VVDAi54yqpYxnq0eDV3MSZAxMnARvUVePNY85UJu3d95nM7iO2RrCm19/eq+qb 2 | gSDmhJRYVJBMEUcxG+0cPPWVAW7m46dfS1Kpn+Flqbanfbco+Hd9Uda3aAzDkklzgdfYt7TvSXRt 3 | LZ8wW7jLiPjt8Lufj1GvhRzfESARv18VrxfQV+U8x3EqqvfKTJrkkg91NuAKvUZSoao4B5pQLpRd 4 | GwQ/saP9AQSWuyU1Zw+Whw/cnmXY2xZLGx6n/ict3NW9vpttv2tBbPCe/TdofRuJbE7R1Yb60BvQ 5 | ajzoaQWx3RsRgca9ah+Pccxb15tPVzr1apTK7A== 6 | --------------------------------------------------------------------------------