├── FaceAttribute-Bridging-Header.h ├── FaceAttribute.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ ├── xcshareddata │ │ └── IDEWorkspaceChecks.plist │ └── xcuserdata │ │ └── user.xcuserdatad │ │ └── UserInterfaceState.xcuserstate └── xcuserdata │ └── user.xcuserdatad │ ├── xcdebugger │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ └── xcschememanagement.plist ├── FaceAttribute ├── .DS_Store ├── AboutViewController.swift ├── AppDelegate.swift ├── Assets.xcassets │ ├── AccentColor.colorset │ │ └── Contents.json │ ├── AppIcon.appiconset │ │ ├── Contents.json │ │ └── appstore.png │ ├── Contents.json │ ├── clr_bg.colorset │ │ └── Contents.json │ ├── clr_main_button_bg1.colorset │ │ └── Contents.json │ ├── clr_main_button_bg2.colorset │ │ └── Contents.json │ ├── clr_roi_circle.colorset │ │ └── Contents.json │ ├── clr_roi_line.colorset │ │ └── Contents.json │ ├── clr_text.colorset │ │ └── Contents.json │ ├── clr_toast_bg.colorset │ │ └── Contents.json │ ├── ic_github.imageset │ │ ├── Contents.json │ │ ├── ic_github 1.png │ │ ├── ic_github 2.png │ │ └── ic_github.png │ ├── ic_kby.imageset │ │ ├── Contents.json │ │ ├── ic_kby 1.png │ │ ├── ic_kby 2.png │ │ └── ic_kby.png │ ├── ic_skype.imageset │ │ ├── Contents.json │ │ ├── ic_skype 1.png │ │ ├── ic_skype 2.png │ │ └── ic_skype.png │ ├── ic_telegram.imageset │ │ ├── Contents.json │ │ ├── ic_telegram 1.png │ │ ├── ic_telegram 2.png │ │ └── ic_telegram.png │ └── ic_whatsapp.imageset │ │ ├── Contents.json │ │ ├── ic_whatsapp 1.png │ │ ├── ic_whatsapp 2.png │ │ └── ic_whatsapp.png ├── AttributeViewController.swift ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── CameraViewController.swift ├── CaptureViewController.swift ├── FaceAttribute.xcdatamodeld │ ├── .xccurrentversion │ └── FaceRecognition.xcdatamodel │ │ └── contents ├── FaceView.swift ├── Info.plist ├── Model.xcdatamodeld │ └── Model.xcdatamodel │ │ └── contents ├── PersonViewCell.swift ├── SceneDelegate.swift ├── SettingsViewController.swift ├── ToastView.swift ├── UIImageExtension.swift └── ViewController.swift ├── FaceAttributeTests └── FaceAttributeTests.swift ├── FaceAttributeUITests ├── FaceAttributeUITests.swift └── FaceAttributeUITestsLaunchTests.swift ├── README.md ├── __MACOSX ├── ._FaceAttribute-Bridging-Header.h ├── ._FaceAttribute.xcodeproj ├── FaceAttribute.xcodeproj │ └── ._project.xcworkspace ├── FaceAttribute │ ├── ._.DS_Store │ ├── ._AboutViewController.swift │ ├── ._AppDelegate.swift │ ├── ._AttributeViewController.swift │ ├── ._CameraViewController.swift │ ├── ._CaptureViewController.swift │ ├── ._FaceView.swift │ ├── ._Info.plist │ ├── ._PersonViewCell.swift │ ├── ._SceneDelegate.swift │ ├── ._SettingsViewController.swift │ ├── ._ToastView.swift │ ├── ._UIImageExtension.swift │ ├── ._ViewController.swift │ ├── Assets.xcassets │ │ └── AccentColor.colorset │ │ │ └── ._Contents.json │ ├── Base.lproj │ │ ├── ._LaunchScreen.storyboard │ │ └── ._Main.storyboard │ ├── FaceAttribute.xcdatamodeld │ │ ├── ._FaceRecognition.xcdatamodel │ │ └── FaceRecognition.xcdatamodel │ │ │ └── ._contents │ └── Model.xcdatamodeld │ │ └── ._Model.xcdatamodel ├── FaceAttributeTests │ └── ._FaceAttributeTests.swift └── FaceAttributeUITests │ ├── ._FaceAttributeUITests.swift │ └── ._FaceAttributeUITestsLaunchTests.swift ├── facesdk.framework ├── .DS_Store ├── Headers │ ├── facesdk.h │ └── facesdk_api.h ├── Info.plist ├── Modules │ └── module.modulemap ├── _CodeSignature │ └── CodeResources ├── ag.bin ├── detection.bin ├── detection.param ├── ec.bin ├── ec.param.bin ├── facesdk ├── landmark.bin ├── landmark.param ├── liveness.bin ├── occ.bin ├── recognize.bin └── recognize.param └── privacy /FaceAttribute-Bridging-Header.h: -------------------------------------------------------------------------------- 1 | // 2 | // Use this file to import your target's public headers that you would like to expose to Swift. 3 | // 4 | #import "facesdk/facesdk.h" 5 | -------------------------------------------------------------------------------- /FaceAttribute.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 56; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | CE08D25A29ED8D4F0071F025 /* ToastView.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE08D25929ED8D4F0071F025 /* ToastView.swift */; }; 11 | CE1C76A72A0960F900CB9CE3 /* AttributeViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE1C76A62A0960F900CB9CE3 /* AttributeViewController.swift */; }; 12 | CE29C50529ECD0A5008EDB5A /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C50429ECD0A5008EDB5A /* AppDelegate.swift */; }; 13 | CE29C50929ECD0A5008EDB5A /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C50829ECD0A5008EDB5A /* ViewController.swift */; }; 14 | CE29C50C29ECD0A5008EDB5A /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = CE29C50A29ECD0A5008EDB5A /* Main.storyboard */; }; 15 | CE29C50F29ECD0A5008EDB5A /* FaceAttribute.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = CE29C50D29ECD0A5008EDB5A /* FaceAttribute.xcdatamodeld */; }; 16 | CE29C51129ECD0FB008EDB5A /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = CE29C51029ECD0FB008EDB5A /* Assets.xcassets */; }; 17 | CE29C51429ECD0FB008EDB5A /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = CE29C51229ECD0FB008EDB5A /* LaunchScreen.storyboard */; }; 18 | CE29C51F29ECD0FC008EDB5A /* FaceAttributeTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C51E29ECD0FC008EDB5A /* FaceAttributeTests.swift */; }; 19 | CE29C52929ECD0FC008EDB5A /* FaceAttributeUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C52829ECD0FC008EDB5A /* FaceAttributeUITests.swift */; }; 20 | CE29C52B29ECD0FC008EDB5A /* FaceAttributeUITestsLaunchTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C52A29ECD0FC008EDB5A /* FaceAttributeUITestsLaunchTests.swift */; }; 21 | CE29C53929ECE135008EDB5A /* facesdk.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = CE29C53829ECE134008EDB5A /* facesdk.framework */; }; 22 | CE29C53A29ECE135008EDB5A /* facesdk.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = CE29C53829ECE134008EDB5A /* facesdk.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; }; 23 | CE29C54129ECE15C008EDB5A /* AboutViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C53C29ECE157008EDB5A /* AboutViewController.swift */; }; 24 | CE29C54229ECE15C008EDB5A /* SettingsViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C53D29ECE158008EDB5A /* SettingsViewController.swift */; }; 25 | CE29C54329ECE15C008EDB5A /* CameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C53E29ECE159008EDB5A /* CameraViewController.swift */; }; 26 | CE29C54429ECE15C008EDB5A /* UIImageExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C53F29ECE15A008EDB5A /* UIImageExtension.swift */; }; 27 | CE29C54529ECE15C008EDB5A /* FaceView.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C54029ECE15B008EDB5A /* FaceView.swift */; }; 28 | CE29C54829ED6A0C008EDB5A /* Model.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = CE29C54629ED6A0C008EDB5A /* Model.xcdatamodeld */; }; 29 | CE29C54A29ED7527008EDB5A /* PersonViewCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = CE29C54929ED7527008EDB5A /* PersonViewCell.swift */; }; 30 | CEF44D702A05404A002AD362 /* CaptureViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = CEF44D6F2A05404A002AD362 /* CaptureViewController.swift */; }; 31 | /* End PBXBuildFile section */ 32 | 33 | /* Begin PBXContainerItemProxy section */ 34 | CE29C51B29ECD0FC008EDB5A /* PBXContainerItemProxy */ = { 35 | isa = PBXContainerItemProxy; 36 | containerPortal = CE29C4F929ECD0A5008EDB5A /* Project object */; 37 | proxyType = 1; 38 | remoteGlobalIDString = CE29C50029ECD0A5008EDB5A; 39 | remoteInfo = FaceRecognition; 40 | }; 41 | CE29C52529ECD0FC008EDB5A /* PBXContainerItemProxy */ = { 42 | isa = PBXContainerItemProxy; 43 | containerPortal = CE29C4F929ECD0A5008EDB5A /* Project object */; 44 | proxyType = 1; 45 | remoteGlobalIDString = CE29C50029ECD0A5008EDB5A; 46 | remoteInfo = FaceRecognition; 47 | }; 48 | /* End PBXContainerItemProxy section */ 49 | 50 | /* Begin PBXCopyFilesBuildPhase section */ 51 | CE29C53B29ECE136008EDB5A /* Embed Frameworks */ = { 52 | isa = PBXCopyFilesBuildPhase; 53 | buildActionMask = 2147483647; 54 | dstPath = ""; 55 | dstSubfolderSpec = 10; 56 | files = ( 57 | CE29C53A29ECE135008EDB5A /* facesdk.framework in Embed Frameworks */, 58 | ); 59 | name = "Embed Frameworks"; 60 | runOnlyForDeploymentPostprocessing = 0; 61 | }; 62 | /* End PBXCopyFilesBuildPhase section */ 63 | 64 | /* Begin PBXFileReference section */ 65 | CE08D25929ED8D4F0071F025 /* ToastView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ToastView.swift; sourceTree = ""; }; 66 | CE1C76A62A0960F900CB9CE3 /* AttributeViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AttributeViewController.swift; sourceTree = ""; }; 67 | CE29C50129ECD0A5008EDB5A /* FaceAttribute.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FaceAttribute.app; sourceTree = BUILT_PRODUCTS_DIR; }; 68 | CE29C50429ECD0A5008EDB5A /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 69 | CE29C50829ECD0A5008EDB5A /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 70 | CE29C50B29ECD0A5008EDB5A /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 71 | CE29C50E29ECD0A5008EDB5A /* FaceRecognition.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = FaceRecognition.xcdatamodel; sourceTree = ""; }; 72 | CE29C51029ECD0FB008EDB5A /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 73 | CE29C51329ECD0FB008EDB5A /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 74 | CE29C51529ECD0FB008EDB5A /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 75 | CE29C51A29ECD0FC008EDB5A /* FaceAttributeTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = FaceAttributeTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 76 | CE29C51E29ECD0FC008EDB5A /* FaceAttributeTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceAttributeTests.swift; sourceTree = ""; }; 77 | CE29C52429ECD0FC008EDB5A /* FaceAttributeUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = FaceAttributeUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 78 | CE29C52829ECD0FC008EDB5A /* FaceAttributeUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceAttributeUITests.swift; sourceTree = ""; }; 79 | CE29C52A29ECD0FC008EDB5A /* FaceAttributeUITestsLaunchTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceAttributeUITestsLaunchTests.swift; sourceTree = ""; }; 80 | CE29C53829ECE134008EDB5A /* facesdk.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = facesdk.framework; sourceTree = ""; }; 81 | CE29C53C29ECE157008EDB5A /* AboutViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AboutViewController.swift; sourceTree = ""; }; 82 | CE29C53D29ECE158008EDB5A /* SettingsViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SettingsViewController.swift; sourceTree = ""; }; 83 | CE29C53E29ECE159008EDB5A /* CameraViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraViewController.swift; sourceTree = ""; }; 84 | CE29C53F29ECE15A008EDB5A /* UIImageExtension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = UIImageExtension.swift; sourceTree = ""; }; 85 | CE29C54029ECE15B008EDB5A /* FaceView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FaceView.swift; sourceTree = ""; }; 86 | CE29C54729ED6A0C008EDB5A /* Model.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = Model.xcdatamodel; sourceTree = ""; }; 87 | CE29C54929ED7527008EDB5A /* PersonViewCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PersonViewCell.swift; sourceTree = ""; }; 88 | CEF44D6F2A05404A002AD362 /* CaptureViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CaptureViewController.swift; sourceTree = ""; }; 89 | /* End PBXFileReference section */ 90 | 91 | /* Begin PBXFrameworksBuildPhase section */ 92 | CE29C4FE29ECD0A5008EDB5A /* Frameworks */ = { 93 | isa = PBXFrameworksBuildPhase; 94 | buildActionMask = 2147483647; 95 | files = ( 96 | CE29C53929ECE135008EDB5A /* facesdk.framework in Frameworks */, 97 | ); 98 | runOnlyForDeploymentPostprocessing = 0; 99 | }; 100 | CE29C51729ECD0FC008EDB5A /* Frameworks */ = { 101 | isa = PBXFrameworksBuildPhase; 102 | buildActionMask = 2147483647; 103 | files = ( 104 | ); 105 | runOnlyForDeploymentPostprocessing = 0; 106 | }; 107 | CE29C52129ECD0FC008EDB5A /* Frameworks */ = { 108 | isa = PBXFrameworksBuildPhase; 109 | buildActionMask = 2147483647; 110 | files = ( 111 | ); 112 | runOnlyForDeploymentPostprocessing = 0; 113 | }; 114 | /* End PBXFrameworksBuildPhase section */ 115 | 116 | /* Begin PBXGroup section */ 117 | CE29C4F829ECD0A4008EDB5A = { 118 | isa = PBXGroup; 119 | children = ( 120 | CE29C50329ECD0A5008EDB5A /* FaceAttribute */, 121 | CE29C51D29ECD0FC008EDB5A /* FaceAttributeTests */, 122 | CE29C52729ECD0FC008EDB5A /* FaceAttributeUITests */, 123 | CE29C50229ECD0A5008EDB5A /* Products */, 124 | CE29C53729ECE131008EDB5A /* Frameworks */, 125 | ); 126 | sourceTree = ""; 127 | }; 128 | CE29C50229ECD0A5008EDB5A /* Products */ = { 129 | isa = PBXGroup; 130 | children = ( 131 | CE29C50129ECD0A5008EDB5A /* FaceAttribute.app */, 132 | CE29C51A29ECD0FC008EDB5A /* FaceAttributeTests.xctest */, 133 | CE29C52429ECD0FC008EDB5A /* FaceAttributeUITests.xctest */, 134 | ); 135 | name = Products; 136 | sourceTree = ""; 137 | }; 138 | CE29C50329ECD0A5008EDB5A /* FaceAttribute */ = { 139 | isa = PBXGroup; 140 | children = ( 141 | CE29C50429ECD0A5008EDB5A /* AppDelegate.swift */, 142 | CE08D25929ED8D4F0071F025 /* ToastView.swift */, 143 | CEF44D6F2A05404A002AD362 /* CaptureViewController.swift */, 144 | CE29C50829ECD0A5008EDB5A /* ViewController.swift */, 145 | CE29C54929ED7527008EDB5A /* PersonViewCell.swift */, 146 | CE29C54629ED6A0C008EDB5A /* Model.xcdatamodeld */, 147 | CE29C53C29ECE157008EDB5A /* AboutViewController.swift */, 148 | CE1C76A62A0960F900CB9CE3 /* AttributeViewController.swift */, 149 | CE29C53E29ECE159008EDB5A /* CameraViewController.swift */, 150 | CE29C54029ECE15B008EDB5A /* FaceView.swift */, 151 | CE29C53D29ECE158008EDB5A /* SettingsViewController.swift */, 152 | CE29C53F29ECE15A008EDB5A /* UIImageExtension.swift */, 153 | CE29C50A29ECD0A5008EDB5A /* Main.storyboard */, 154 | CE29C51029ECD0FB008EDB5A /* Assets.xcassets */, 155 | CE29C51229ECD0FB008EDB5A /* LaunchScreen.storyboard */, 156 | CE29C51529ECD0FB008EDB5A /* Info.plist */, 157 | CE29C50D29ECD0A5008EDB5A /* FaceAttribute.xcdatamodeld */, 158 | ); 159 | path = FaceAttribute; 160 | sourceTree = ""; 161 | }; 162 | CE29C51D29ECD0FC008EDB5A /* FaceAttributeTests */ = { 163 | isa = PBXGroup; 164 | children = ( 165 | CE29C51E29ECD0FC008EDB5A /* FaceAttributeTests.swift */, 166 | ); 167 | path = FaceAttributeTests; 168 | sourceTree = ""; 169 | }; 170 | CE29C52729ECD0FC008EDB5A /* FaceAttributeUITests */ = { 171 | isa = PBXGroup; 172 | children = ( 173 | CE29C52829ECD0FC008EDB5A /* FaceAttributeUITests.swift */, 174 | CE29C52A29ECD0FC008EDB5A /* FaceAttributeUITestsLaunchTests.swift */, 175 | ); 176 | path = FaceAttributeUITests; 177 | sourceTree = ""; 178 | }; 179 | CE29C53729ECE131008EDB5A /* Frameworks */ = { 180 | isa = PBXGroup; 181 | children = ( 182 | CE29C53829ECE134008EDB5A /* facesdk.framework */, 183 | ); 184 | name = Frameworks; 185 | sourceTree = ""; 186 | }; 187 | /* End PBXGroup section */ 188 | 189 | /* Begin PBXNativeTarget section */ 190 | CE29C50029ECD0A5008EDB5A /* FaceAttribute */ = { 191 | isa = PBXNativeTarget; 192 | buildConfigurationList = CE29C52E29ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceAttribute" */; 193 | buildPhases = ( 194 | CE29C4FD29ECD0A5008EDB5A /* Sources */, 195 | CE29C4FE29ECD0A5008EDB5A /* Frameworks */, 196 | CE29C4FF29ECD0A5008EDB5A /* Resources */, 197 | CE29C53B29ECE136008EDB5A /* Embed Frameworks */, 198 | ); 199 | buildRules = ( 200 | ); 201 | dependencies = ( 202 | ); 203 | name = FaceAttribute; 204 | productName = FaceRecognition; 205 | productReference = CE29C50129ECD0A5008EDB5A /* FaceAttribute.app */; 206 | productType = "com.apple.product-type.application"; 207 | }; 208 | CE29C51929ECD0FC008EDB5A /* FaceAttributeTests */ = { 209 | isa = PBXNativeTarget; 210 | buildConfigurationList = CE29C53129ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceAttributeTests" */; 211 | buildPhases = ( 212 | CE29C51629ECD0FC008EDB5A /* Sources */, 213 | CE29C51729ECD0FC008EDB5A /* Frameworks */, 214 | CE29C51829ECD0FC008EDB5A /* Resources */, 215 | ); 216 | buildRules = ( 217 | ); 218 | dependencies = ( 219 | CE29C51C29ECD0FC008EDB5A /* PBXTargetDependency */, 220 | ); 221 | name = FaceAttributeTests; 222 | productName = FaceRecognitionTests; 223 | productReference = CE29C51A29ECD0FC008EDB5A /* FaceAttributeTests.xctest */; 224 | productType = "com.apple.product-type.bundle.unit-test"; 225 | }; 226 | CE29C52329ECD0FC008EDB5A /* FaceAttributeUITests */ = { 227 | isa = PBXNativeTarget; 228 | buildConfigurationList = CE29C53429ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceAttributeUITests" */; 229 | buildPhases = ( 230 | CE29C52029ECD0FC008EDB5A /* Sources */, 231 | CE29C52129ECD0FC008EDB5A /* Frameworks */, 232 | CE29C52229ECD0FC008EDB5A /* Resources */, 233 | ); 234 | buildRules = ( 235 | ); 236 | dependencies = ( 237 | CE29C52629ECD0FC008EDB5A /* PBXTargetDependency */, 238 | ); 239 | name = FaceAttributeUITests; 240 | productName = FaceRecognitionUITests; 241 | productReference = CE29C52429ECD0FC008EDB5A /* FaceAttributeUITests.xctest */; 242 | productType = "com.apple.product-type.bundle.ui-testing"; 243 | }; 244 | /* End PBXNativeTarget section */ 245 | 246 | /* Begin PBXProject section */ 247 | CE29C4F929ECD0A5008EDB5A /* Project object */ = { 248 | isa = PBXProject; 249 | attributes = { 250 | BuildIndependentTargetsInParallel = 1; 251 | LastSwiftUpdateCheck = 1420; 252 | LastUpgradeCheck = 1420; 253 | TargetAttributes = { 254 | CE29C50029ECD0A5008EDB5A = { 255 | CreatedOnToolsVersion = 14.2; 256 | }; 257 | CE29C51929ECD0FC008EDB5A = { 258 | CreatedOnToolsVersion = 14.2; 259 | TestTargetID = CE29C50029ECD0A5008EDB5A; 260 | }; 261 | CE29C52329ECD0FC008EDB5A = { 262 | CreatedOnToolsVersion = 14.2; 263 | TestTargetID = CE29C50029ECD0A5008EDB5A; 264 | }; 265 | }; 266 | }; 267 | buildConfigurationList = CE29C4FC29ECD0A5008EDB5A /* Build configuration list for PBXProject "FaceAttribute" */; 268 | compatibilityVersion = "Xcode 14.0"; 269 | developmentRegion = en; 270 | hasScannedForEncodings = 0; 271 | knownRegions = ( 272 | en, 273 | Base, 274 | ); 275 | mainGroup = CE29C4F829ECD0A4008EDB5A; 276 | productRefGroup = CE29C50229ECD0A5008EDB5A /* Products */; 277 | projectDirPath = ""; 278 | projectRoot = ""; 279 | targets = ( 280 | CE29C50029ECD0A5008EDB5A /* FaceAttribute */, 281 | CE29C51929ECD0FC008EDB5A /* FaceAttributeTests */, 282 | CE29C52329ECD0FC008EDB5A /* FaceAttributeUITests */, 283 | ); 284 | }; 285 | /* End PBXProject section */ 286 | 287 | /* Begin PBXResourcesBuildPhase section */ 288 | CE29C4FF29ECD0A5008EDB5A /* Resources */ = { 289 | isa = PBXResourcesBuildPhase; 290 | buildActionMask = 2147483647; 291 | files = ( 292 | CE29C51429ECD0FB008EDB5A /* LaunchScreen.storyboard in Resources */, 293 | CE29C51129ECD0FB008EDB5A /* Assets.xcassets in Resources */, 294 | CE29C50C29ECD0A5008EDB5A /* Main.storyboard in Resources */, 295 | ); 296 | runOnlyForDeploymentPostprocessing = 0; 297 | }; 298 | CE29C51829ECD0FC008EDB5A /* Resources */ = { 299 | isa = PBXResourcesBuildPhase; 300 | buildActionMask = 2147483647; 301 | files = ( 302 | ); 303 | runOnlyForDeploymentPostprocessing = 0; 304 | }; 305 | CE29C52229ECD0FC008EDB5A /* Resources */ = { 306 | isa = PBXResourcesBuildPhase; 307 | buildActionMask = 2147483647; 308 | files = ( 309 | ); 310 | runOnlyForDeploymentPostprocessing = 0; 311 | }; 312 | /* End PBXResourcesBuildPhase section */ 313 | 314 | /* Begin PBXSourcesBuildPhase section */ 315 | CE29C4FD29ECD0A5008EDB5A /* Sources */ = { 316 | isa = PBXSourcesBuildPhase; 317 | buildActionMask = 2147483647; 318 | files = ( 319 | CE29C50F29ECD0A5008EDB5A /* FaceAttribute.xcdatamodeld in Sources */, 320 | CE1C76A72A0960F900CB9CE3 /* AttributeViewController.swift in Sources */, 321 | CE29C50929ECD0A5008EDB5A /* ViewController.swift in Sources */, 322 | CE29C50529ECD0A5008EDB5A /* AppDelegate.swift in Sources */, 323 | CE29C54429ECE15C008EDB5A /* UIImageExtension.swift in Sources */, 324 | CE08D25A29ED8D4F0071F025 /* ToastView.swift in Sources */, 325 | CE29C54129ECE15C008EDB5A /* AboutViewController.swift in Sources */, 326 | CE29C54329ECE15C008EDB5A /* CameraViewController.swift in Sources */, 327 | CEF44D702A05404A002AD362 /* CaptureViewController.swift in Sources */, 328 | CE29C54829ED6A0C008EDB5A /* Model.xcdatamodeld in Sources */, 329 | CE29C54229ECE15C008EDB5A /* SettingsViewController.swift in Sources */, 330 | CE29C54A29ED7527008EDB5A /* PersonViewCell.swift in Sources */, 331 | CE29C54529ECE15C008EDB5A /* FaceView.swift in Sources */, 332 | ); 333 | runOnlyForDeploymentPostprocessing = 0; 334 | }; 335 | CE29C51629ECD0FC008EDB5A /* Sources */ = { 336 | isa = PBXSourcesBuildPhase; 337 | buildActionMask = 2147483647; 338 | files = ( 339 | CE29C51F29ECD0FC008EDB5A /* FaceAttributeTests.swift in Sources */, 340 | ); 341 | runOnlyForDeploymentPostprocessing = 0; 342 | }; 343 | CE29C52029ECD0FC008EDB5A /* Sources */ = { 344 | isa = PBXSourcesBuildPhase; 345 | buildActionMask = 2147483647; 346 | files = ( 347 | CE29C52929ECD0FC008EDB5A /* FaceAttributeUITests.swift in Sources */, 348 | CE29C52B29ECD0FC008EDB5A /* FaceAttributeUITestsLaunchTests.swift in Sources */, 349 | ); 350 | runOnlyForDeploymentPostprocessing = 0; 351 | }; 352 | /* End PBXSourcesBuildPhase section */ 353 | 354 | /* Begin PBXTargetDependency section */ 355 | CE29C51C29ECD0FC008EDB5A /* PBXTargetDependency */ = { 356 | isa = PBXTargetDependency; 357 | target = CE29C50029ECD0A5008EDB5A /* FaceAttribute */; 358 | targetProxy = CE29C51B29ECD0FC008EDB5A /* PBXContainerItemProxy */; 359 | }; 360 | CE29C52629ECD0FC008EDB5A /* PBXTargetDependency */ = { 361 | isa = PBXTargetDependency; 362 | target = CE29C50029ECD0A5008EDB5A /* FaceAttribute */; 363 | targetProxy = CE29C52529ECD0FC008EDB5A /* PBXContainerItemProxy */; 364 | }; 365 | /* End PBXTargetDependency section */ 366 | 367 | /* Begin PBXVariantGroup section */ 368 | CE29C50A29ECD0A5008EDB5A /* Main.storyboard */ = { 369 | isa = PBXVariantGroup; 370 | children = ( 371 | CE29C50B29ECD0A5008EDB5A /* Base */, 372 | ); 373 | name = Main.storyboard; 374 | sourceTree = ""; 375 | }; 376 | CE29C51229ECD0FB008EDB5A /* LaunchScreen.storyboard */ = { 377 | isa = PBXVariantGroup; 378 | children = ( 379 | CE29C51329ECD0FB008EDB5A /* Base */, 380 | ); 381 | name = LaunchScreen.storyboard; 382 | sourceTree = ""; 383 | }; 384 | /* End PBXVariantGroup section */ 385 | 386 | /* Begin XCBuildConfiguration section */ 387 | CE29C52C29ECD0FC008EDB5A /* Debug */ = { 388 | isa = XCBuildConfiguration; 389 | buildSettings = { 390 | ALWAYS_SEARCH_USER_PATHS = NO; 391 | CLANG_ANALYZER_NONNULL = YES; 392 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 393 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 394 | CLANG_ENABLE_MODULES = YES; 395 | CLANG_ENABLE_OBJC_ARC = YES; 396 | CLANG_ENABLE_OBJC_WEAK = YES; 397 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 398 | CLANG_WARN_BOOL_CONVERSION = YES; 399 | CLANG_WARN_COMMA = YES; 400 | CLANG_WARN_CONSTANT_CONVERSION = YES; 401 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 402 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 403 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 404 | CLANG_WARN_EMPTY_BODY = YES; 405 | CLANG_WARN_ENUM_CONVERSION = YES; 406 | CLANG_WARN_INFINITE_RECURSION = YES; 407 | CLANG_WARN_INT_CONVERSION = YES; 408 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 409 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 410 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 411 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 412 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 413 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 414 | CLANG_WARN_STRICT_PROTOTYPES = YES; 415 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 416 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 417 | CLANG_WARN_UNREACHABLE_CODE = YES; 418 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 419 | COPY_PHASE_STRIP = NO; 420 | DEBUG_INFORMATION_FORMAT = dwarf; 421 | ENABLE_STRICT_OBJC_MSGSEND = YES; 422 | ENABLE_TESTABILITY = YES; 423 | GCC_C_LANGUAGE_STANDARD = gnu11; 424 | GCC_DYNAMIC_NO_PIC = NO; 425 | GCC_NO_COMMON_BLOCKS = YES; 426 | GCC_OPTIMIZATION_LEVEL = 0; 427 | GCC_PREPROCESSOR_DEFINITIONS = ( 428 | "DEBUG=1", 429 | "$(inherited)", 430 | ); 431 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 432 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 433 | GCC_WARN_UNDECLARED_SELECTOR = YES; 434 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 435 | GCC_WARN_UNUSED_FUNCTION = YES; 436 | GCC_WARN_UNUSED_VARIABLE = YES; 437 | IPHONEOS_DEPLOYMENT_TARGET = 16.2; 438 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; 439 | MTL_FAST_MATH = YES; 440 | ONLY_ACTIVE_ARCH = YES; 441 | SDKROOT = iphoneos; 442 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 443 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 444 | }; 445 | name = Debug; 446 | }; 447 | CE29C52D29ECD0FC008EDB5A /* Release */ = { 448 | isa = XCBuildConfiguration; 449 | buildSettings = { 450 | ALWAYS_SEARCH_USER_PATHS = NO; 451 | CLANG_ANALYZER_NONNULL = YES; 452 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 453 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 454 | CLANG_ENABLE_MODULES = YES; 455 | CLANG_ENABLE_OBJC_ARC = YES; 456 | CLANG_ENABLE_OBJC_WEAK = YES; 457 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 458 | CLANG_WARN_BOOL_CONVERSION = YES; 459 | CLANG_WARN_COMMA = YES; 460 | CLANG_WARN_CONSTANT_CONVERSION = YES; 461 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 462 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 463 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 464 | CLANG_WARN_EMPTY_BODY = YES; 465 | CLANG_WARN_ENUM_CONVERSION = YES; 466 | CLANG_WARN_INFINITE_RECURSION = YES; 467 | CLANG_WARN_INT_CONVERSION = YES; 468 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 469 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 470 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 471 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 472 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 473 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 474 | CLANG_WARN_STRICT_PROTOTYPES = YES; 475 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 476 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 477 | CLANG_WARN_UNREACHABLE_CODE = YES; 478 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 479 | COPY_PHASE_STRIP = NO; 480 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 481 | ENABLE_NS_ASSERTIONS = NO; 482 | ENABLE_STRICT_OBJC_MSGSEND = YES; 483 | GCC_C_LANGUAGE_STANDARD = gnu11; 484 | GCC_NO_COMMON_BLOCKS = YES; 485 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 486 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 487 | GCC_WARN_UNDECLARED_SELECTOR = YES; 488 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 489 | GCC_WARN_UNUSED_FUNCTION = YES; 490 | GCC_WARN_UNUSED_VARIABLE = YES; 491 | IPHONEOS_DEPLOYMENT_TARGET = 16.2; 492 | MTL_ENABLE_DEBUG_INFO = NO; 493 | MTL_FAST_MATH = YES; 494 | SDKROOT = iphoneos; 495 | SWIFT_COMPILATION_MODE = wholemodule; 496 | SWIFT_OPTIMIZATION_LEVEL = "-O"; 497 | VALIDATE_PRODUCT = YES; 498 | }; 499 | name = Release; 500 | }; 501 | CE29C52F29ECD0FC008EDB5A /* Debug */ = { 502 | isa = XCBuildConfiguration; 503 | buildSettings = { 504 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 505 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 506 | CODE_SIGN_STYLE = Automatic; 507 | CURRENT_PROJECT_VERSION = 1; 508 | DEVELOPMENT_TEAM = 6G7RACBX72; 509 | FRAMEWORK_SEARCH_PATHS = ( 510 | "$(inherited)", 511 | "$(PROJECT_DIR)", 512 | ); 513 | GENERATE_INFOPLIST_FILE = YES; 514 | INFOPLIST_FILE = FaceAttribute/Info.plist; 515 | INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.productivity"; 516 | INFOPLIST_KEY_NSCameraUsageDescription = "The application utilizes the camera for performing facial recognition.\n\n\n\n\nThe app utilizes the camera for performing facial recognition."; 517 | INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; 518 | INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; 519 | INFOPLIST_KEY_UIMainStoryboardFile = Main; 520 | INFOPLIST_KEY_UISupportedInterfaceOrientations = UIInterfaceOrientationPortrait; 521 | IPHONEOS_DEPLOYMENT_TARGET = 12.0; 522 | LD_RUNPATH_SEARCH_PATHS = ( 523 | "$(inherited)", 524 | "@executable_path/Frameworks", 525 | ); 526 | MARKETING_VERSION = 1.0; 527 | PRODUCT_BUNDLE_IDENTIFIER = com.kbyai.faceattribute; 528 | PRODUCT_NAME = "$(TARGET_NAME)"; 529 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; 530 | SUPPORTS_MACCATALYST = NO; 531 | SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO; 532 | SWIFT_EMIT_LOC_STRINGS = YES; 533 | SWIFT_OBJC_BRIDGING_HEADER = "FaceAttribute-Bridging-Header.h"; 534 | SWIFT_VERSION = 5.0; 535 | TARGETED_DEVICE_FAMILY = 1; 536 | }; 537 | name = Debug; 538 | }; 539 | CE29C53029ECD0FC008EDB5A /* Release */ = { 540 | isa = XCBuildConfiguration; 541 | buildSettings = { 542 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 543 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 544 | CODE_SIGN_STYLE = Automatic; 545 | CURRENT_PROJECT_VERSION = 1; 546 | DEVELOPMENT_TEAM = 6G7RACBX72; 547 | FRAMEWORK_SEARCH_PATHS = ( 548 | "$(inherited)", 549 | "$(PROJECT_DIR)", 550 | ); 551 | GENERATE_INFOPLIST_FILE = YES; 552 | INFOPLIST_FILE = FaceAttribute/Info.plist; 553 | INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.productivity"; 554 | INFOPLIST_KEY_NSCameraUsageDescription = "The application utilizes the camera for performing facial recognition.\n\n\n\n\nThe app utilizes the camera for performing facial recognition."; 555 | INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; 556 | INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen; 557 | INFOPLIST_KEY_UIMainStoryboardFile = Main; 558 | INFOPLIST_KEY_UISupportedInterfaceOrientations = UIInterfaceOrientationPortrait; 559 | IPHONEOS_DEPLOYMENT_TARGET = 12.0; 560 | LD_RUNPATH_SEARCH_PATHS = ( 561 | "$(inherited)", 562 | "@executable_path/Frameworks", 563 | ); 564 | MARKETING_VERSION = 1.0; 565 | PRODUCT_BUNDLE_IDENTIFIER = com.kbyai.faceattribute; 566 | PRODUCT_NAME = "$(TARGET_NAME)"; 567 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; 568 | SUPPORTS_MACCATALYST = NO; 569 | SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO; 570 | SWIFT_EMIT_LOC_STRINGS = YES; 571 | SWIFT_OBJC_BRIDGING_HEADER = "FaceAttribute-Bridging-Header.h"; 572 | SWIFT_VERSION = 5.0; 573 | TARGETED_DEVICE_FAMILY = 1; 574 | }; 575 | name = Release; 576 | }; 577 | CE29C53229ECD0FC008EDB5A /* Debug */ = { 578 | isa = XCBuildConfiguration; 579 | buildSettings = { 580 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 581 | BUNDLE_LOADER = "$(TEST_HOST)"; 582 | CODE_SIGN_STYLE = Automatic; 583 | CURRENT_PROJECT_VERSION = 1; 584 | DEVELOPMENT_TEAM = JSUUF48N9C; 585 | GENERATE_INFOPLIST_FILE = YES; 586 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 587 | MARKETING_VERSION = 1.0; 588 | PRODUCT_BUNDLE_IDENTIFIER = com.kbyai.FaceAttributeTests; 589 | PRODUCT_NAME = "$(TARGET_NAME)"; 590 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; 591 | SUPPORTS_MACCATALYST = NO; 592 | SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO; 593 | SWIFT_EMIT_LOC_STRINGS = NO; 594 | SWIFT_VERSION = 5.0; 595 | TARGETED_DEVICE_FAMILY = 1; 596 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/FaceAttribute.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/FaceAttribute"; 597 | }; 598 | name = Debug; 599 | }; 600 | CE29C53329ECD0FC008EDB5A /* Release */ = { 601 | isa = XCBuildConfiguration; 602 | buildSettings = { 603 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 604 | BUNDLE_LOADER = "$(TEST_HOST)"; 605 | CODE_SIGN_STYLE = Automatic; 606 | CURRENT_PROJECT_VERSION = 1; 607 | DEVELOPMENT_TEAM = JSUUF48N9C; 608 | GENERATE_INFOPLIST_FILE = YES; 609 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 610 | MARKETING_VERSION = 1.0; 611 | PRODUCT_BUNDLE_IDENTIFIER = com.kbyai.FaceAttributeTests; 612 | PRODUCT_NAME = "$(TARGET_NAME)"; 613 | SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; 614 | SUPPORTS_MACCATALYST = NO; 615 | SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO; 616 | SWIFT_EMIT_LOC_STRINGS = NO; 617 | SWIFT_VERSION = 5.0; 618 | TARGETED_DEVICE_FAMILY = 1; 619 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/FaceAttribute.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/FaceAttribute"; 620 | }; 621 | name = Release; 622 | }; 623 | CE29C53529ECD0FC008EDB5A /* Debug */ = { 624 | isa = XCBuildConfiguration; 625 | buildSettings = { 626 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 627 | CODE_SIGN_STYLE = Automatic; 628 | CURRENT_PROJECT_VERSION = 1; 629 | DEVELOPMENT_TEAM = JSUUF48N9C; 630 | GENERATE_INFOPLIST_FILE = YES; 631 | MARKETING_VERSION = 1.0; 632 | PRODUCT_BUNDLE_IDENTIFIER = com.kbyai.FaceRecognitionUITests; 633 | PRODUCT_NAME = "$(TARGET_NAME)"; 634 | SWIFT_EMIT_LOC_STRINGS = NO; 635 | SWIFT_VERSION = 5.0; 636 | TARGETED_DEVICE_FAMILY = "1,2"; 637 | TEST_TARGET_NAME = FaceRecognition; 638 | }; 639 | name = Debug; 640 | }; 641 | CE29C53629ECD0FC008EDB5A /* Release */ = { 642 | isa = XCBuildConfiguration; 643 | buildSettings = { 644 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 645 | CODE_SIGN_STYLE = Automatic; 646 | CURRENT_PROJECT_VERSION = 1; 647 | DEVELOPMENT_TEAM = JSUUF48N9C; 648 | GENERATE_INFOPLIST_FILE = YES; 649 | MARKETING_VERSION = 1.0; 650 | PRODUCT_BUNDLE_IDENTIFIER = com.kbyai.FaceRecognitionUITests; 651 | PRODUCT_NAME = "$(TARGET_NAME)"; 652 | SWIFT_EMIT_LOC_STRINGS = NO; 653 | SWIFT_VERSION = 5.0; 654 | TARGETED_DEVICE_FAMILY = "1,2"; 655 | TEST_TARGET_NAME = FaceRecognition; 656 | }; 657 | name = Release; 658 | }; 659 | /* End XCBuildConfiguration section */ 660 | 661 | /* Begin XCConfigurationList section */ 662 | CE29C4FC29ECD0A5008EDB5A /* Build configuration list for PBXProject "FaceAttribute" */ = { 663 | isa = XCConfigurationList; 664 | buildConfigurations = ( 665 | CE29C52C29ECD0FC008EDB5A /* Debug */, 666 | CE29C52D29ECD0FC008EDB5A /* Release */, 667 | ); 668 | defaultConfigurationIsVisible = 0; 669 | defaultConfigurationName = Release; 670 | }; 671 | CE29C52E29ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceAttribute" */ = { 672 | isa = XCConfigurationList; 673 | buildConfigurations = ( 674 | CE29C52F29ECD0FC008EDB5A /* Debug */, 675 | CE29C53029ECD0FC008EDB5A /* Release */, 676 | ); 677 | defaultConfigurationIsVisible = 0; 678 | defaultConfigurationName = Release; 679 | }; 680 | CE29C53129ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceAttributeTests" */ = { 681 | isa = XCConfigurationList; 682 | buildConfigurations = ( 683 | CE29C53229ECD0FC008EDB5A /* Debug */, 684 | CE29C53329ECD0FC008EDB5A /* Release */, 685 | ); 686 | defaultConfigurationIsVisible = 0; 687 | defaultConfigurationName = Release; 688 | }; 689 | CE29C53429ECD0FC008EDB5A /* Build configuration list for PBXNativeTarget "FaceAttributeUITests" */ = { 690 | isa = XCConfigurationList; 691 | buildConfigurations = ( 692 | CE29C53529ECD0FC008EDB5A /* Debug */, 693 | CE29C53629ECD0FC008EDB5A /* Release */, 694 | ); 695 | defaultConfigurationIsVisible = 0; 696 | defaultConfigurationName = Release; 697 | }; 698 | /* End XCConfigurationList section */ 699 | 700 | /* Begin XCVersionGroup section */ 701 | CE29C50D29ECD0A5008EDB5A /* FaceAttribute.xcdatamodeld */ = { 702 | isa = XCVersionGroup; 703 | children = ( 704 | CE29C50E29ECD0A5008EDB5A /* FaceRecognition.xcdatamodel */, 705 | ); 706 | currentVersion = CE29C50E29ECD0A5008EDB5A /* FaceRecognition.xcdatamodel */; 707 | path = FaceAttribute.xcdatamodeld; 708 | sourceTree = ""; 709 | versionGroupType = wrapper.xcdatamodel; 710 | }; 711 | CE29C54629ED6A0C008EDB5A /* Model.xcdatamodeld */ = { 712 | isa = XCVersionGroup; 713 | children = ( 714 | CE29C54729ED6A0C008EDB5A /* Model.xcdatamodel */, 715 | ); 716 | currentVersion = CE29C54729ED6A0C008EDB5A /* Model.xcdatamodel */; 717 | path = Model.xcdatamodeld; 718 | sourceTree = ""; 719 | versionGroupType = wrapper.xcdatamodel; 720 | }; 721 | /* End XCVersionGroup section */ 722 | }; 723 | rootObject = CE29C4F929ECD0A5008EDB5A /* Project object */; 724 | } 725 | -------------------------------------------------------------------------------- /FaceAttribute.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /FaceAttribute.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /FaceAttribute.xcodeproj/project.xcworkspace/xcuserdata/user.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute.xcodeproj/project.xcworkspace/xcuserdata/user.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /FaceAttribute.xcodeproj/xcuserdata/user.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | -------------------------------------------------------------------------------- /FaceAttribute.xcodeproj/xcuserdata/user.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | FaceAttribute.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 0 11 | 12 | FaceRecognition.xcscheme_^#shared#^_ 13 | 14 | orderHint 15 | 0 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /FaceAttribute/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/.DS_Store -------------------------------------------------------------------------------- /FaceAttribute/AboutViewController.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | import AVFoundation 3 | 4 | class AboutViewController: UIViewController{ 5 | 6 | override func viewDidLoad() { 7 | super.viewDidLoad() 8 | } 9 | 10 | @IBAction func done_clicked(_ sender: Any) { 11 | if let vc = self.presentingViewController as? ViewController { 12 | self.dismiss(animated: true, completion: nil) 13 | } 14 | } 15 | 16 | 17 | @IBAction func mail_clicked(_ sender: Any) { 18 | let appURL = URL(string: "mailto:contact@kby-ai.com") // URL scheme for Mail app 19 | 20 | if let appURL = appURL, UIApplication.shared.canOpenURL(appURL) { 21 | // If Mail app is installed, open it with a pre-filled email 22 | UIApplication.shared.open(appURL, options: [:], completionHandler: nil) 23 | } else { 24 | // If Mail app is not installed, show an alert indicating that Mail app is not available 25 | let alert = UIAlertController(title: "Mail App Not Available", message: "The Mail app is not installed on this device.", preferredStyle: .alert) 26 | let okAction = UIAlertAction(title: "OK", style: .default, handler: nil) 27 | alert.addAction(okAction) 28 | UIApplication.shared.keyWindow?.rootViewController?.present(alert, animated: true, completion: nil) 29 | } 30 | } 31 | 32 | 33 | @IBAction func skype_clicked(_ sender: Any) { 34 | 35 | } 36 | 37 | @IBAction func telegram_clicked(_ sender: Any) { 38 | let appURL = URL(string: "tg://resolve?domain=kbyai") // URL scheme for Telegram app 39 | 40 | if let appURL = appURL, UIApplication.shared.canOpenURL(appURL) { 41 | // If Telegram app is installed, open it to the "Add Contact" screen 42 | UIApplication.shared.open(appURL, options: [:], completionHandler: nil) 43 | } else { 44 | let username = "kbyai" 45 | let telegramURL = URL(string: "https://t.me/\(username)")! 46 | UIApplication.shared.open(telegramURL, options: [:], completionHandler: nil) 47 | } 48 | } 49 | 50 | @IBAction func whatsapp_clicked(_ sender: Any) { 51 | let appURL = URL(string: "whatsapp://send?phone=+19092802609") // URL scheme for Telegram app 52 | 53 | if let appURL = appURL, UIApplication.shared.canOpenURL(appURL) { 54 | // If Telegram app is installed, open it to the "Add Contact" screen 55 | UIApplication.shared.open(appURL, options: [:], completionHandler: nil) 56 | } else { 57 | let username = "+19092802609" 58 | let telegramURL = URL(string: "https://wa.me/\(username)")! 59 | UIApplication.shared.open(telegramURL, options: [:], completionHandler: nil) 60 | } 61 | } 62 | 63 | @IBAction func github_clicked(_ sender: Any) { 64 | let telegramURL = URL(string: "https://github.com/kby-ai")! 65 | UIApplication.shared.open(telegramURL, options: [:], completionHandler: nil) 66 | } 67 | } 68 | 69 | -------------------------------------------------------------------------------- /FaceAttribute/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // TTVFaceDemo 4 | // 5 | // Created by user on 10/28/21. 6 | // 7 | 8 | import UIKit 9 | 10 | @main 11 | class AppDelegate: UIResponder, UIApplicationDelegate { 12 | 13 | var window: UIWindow? 14 | 15 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { 16 | // Override point for customization after application launch. 17 | return true 18 | } 19 | } 20 | 21 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/AccentColor.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "idiom" : "universal" 5 | } 6 | ], 7 | "info" : { 8 | "author" : "xcode", 9 | "version" : 1 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "appstore.png", 5 | "idiom" : "universal", 6 | "platform" : "ios", 7 | "size" : "1024x1024" 8 | } 9 | ], 10 | "info" : { 11 | "author" : "xcode", 12 | "version" : 1 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/AppIcon.appiconset/appstore.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/AppIcon.appiconset/appstore.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/clr_bg.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0x33", 9 | "green" : "0x30", 10 | "red" : "0x30" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0x33", 27 | "green" : "0x30", 28 | "red" : "0x30" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/clr_main_button_bg1.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0.545", 9 | "green" : "0.216", 10 | "red" : "0.310" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0.545", 27 | "green" : "0.216", 28 | "red" : "0.310" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/clr_main_button_bg2.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "1.000", 9 | "green" : "0.867", 10 | "red" : "0.918" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "1.000", 27 | "green" : "0.867", 28 | "red" : "0.918" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/clr_roi_circle.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0x32", 9 | "green" : "0x24", 10 | "red" : "0x49" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0x32", 27 | "green" : "0x24", 28 | "red" : "0x49" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/clr_roi_line.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0xFF", 9 | "green" : "0xDD", 10 | "red" : "0xEA" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0xFF", 27 | "green" : "0xDD", 28 | "red" : "0xEA" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/clr_text.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0xE5", 9 | "green" : "0xE1", 10 | "red" : "0xE6" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0xE5", 27 | "green" : "0xE1", 28 | "red" : "0xE6" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/clr_toast_bg.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "color" : { 5 | "color-space" : "srgb", 6 | "components" : { 7 | "alpha" : "1.000", 8 | "blue" : "0x32", 9 | "green" : "0x24", 10 | "red" : "0x49" 11 | } 12 | }, 13 | "idiom" : "universal" 14 | }, 15 | { 16 | "appearances" : [ 17 | { 18 | "appearance" : "luminosity", 19 | "value" : "dark" 20 | } 21 | ], 22 | "color" : { 23 | "color-space" : "srgb", 24 | "components" : { 25 | "alpha" : "1.000", 26 | "blue" : "0x32", 27 | "green" : "0x24", 28 | "red" : "0x49" 29 | } 30 | }, 31 | "idiom" : "universal" 32 | } 33 | ], 34 | "info" : { 35 | "author" : "xcode", 36 | "version" : 1 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_github.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "ic_github.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "ic_github 1.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "ic_github 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_github.imageset/ic_github 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_github.imageset/ic_github 1.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_github.imageset/ic_github 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_github.imageset/ic_github 2.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_github.imageset/ic_github.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_github.imageset/ic_github.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_kby.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "ic_kby.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "ic_kby 1.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "ic_kby 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_kby.imageset/ic_kby 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_kby.imageset/ic_kby 1.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_kby.imageset/ic_kby 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_kby.imageset/ic_kby 2.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_kby.imageset/ic_kby.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_kby.imageset/ic_kby.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_skype.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "ic_skype.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "ic_skype 1.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "ic_skype 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_skype.imageset/ic_skype 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_skype.imageset/ic_skype 1.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_skype.imageset/ic_skype 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_skype.imageset/ic_skype 2.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_skype.imageset/ic_skype.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_skype.imageset/ic_skype.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_telegram.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "ic_telegram.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "ic_telegram 1.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "ic_telegram 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_telegram.imageset/ic_telegram 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_telegram.imageset/ic_telegram 1.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_telegram.imageset/ic_telegram 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_telegram.imageset/ic_telegram 2.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_telegram.imageset/ic_telegram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_telegram.imageset/ic_telegram.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_whatsapp.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "ic_whatsapp.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "filename" : "ic_whatsapp 1.png", 10 | "idiom" : "universal", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "filename" : "ic_whatsapp 2.png", 15 | "idiom" : "universal", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "author" : "xcode", 21 | "version" : 1 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_whatsapp.imageset/ic_whatsapp 1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_whatsapp.imageset/ic_whatsapp 1.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_whatsapp.imageset/ic_whatsapp 2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_whatsapp.imageset/ic_whatsapp 2.png -------------------------------------------------------------------------------- /FaceAttribute/Assets.xcassets/ic_whatsapp.imageset/ic_whatsapp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/FaceAttribute/Assets.xcassets/ic_whatsapp.imageset/ic_whatsapp.png -------------------------------------------------------------------------------- /FaceAttribute/AttributeViewController.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | import AVFoundation 3 | 4 | class AttributeViewController: UIViewController{ 5 | 6 | 7 | @IBOutlet weak var faceView: UIImageView! 8 | 9 | @IBOutlet weak var livenessLbl: UILabel! 10 | @IBOutlet weak var qualityLbl: UILabel! 11 | @IBOutlet weak var luminanceLbl: UILabel! 12 | @IBOutlet weak var anglesLbl: UILabel! 13 | @IBOutlet weak var occlusionLbl: UILabel! 14 | @IBOutlet weak var eyeClosedLbl: UILabel! 15 | @IBOutlet weak var mouthOpenLbl: UILabel! 16 | @IBOutlet weak var ageLbl: UILabel! 17 | @IBOutlet weak var genderLbl: UILabel! 18 | 19 | var image: UIImage? 20 | var faceBox: FaceBox? 21 | 22 | override func viewDidLoad() { 23 | super.viewDidLoad() 24 | 25 | let defaults = UserDefaults.standard 26 | let livenessThreshold = defaults.float(forKey: "liveness_threshold") 27 | let occlusionThreshold = defaults.float(forKey: "occlusion_threshold") 28 | let eyeCloseThreshold = defaults.float(forKey: "eyeclose_threshold") 29 | let mouthOpenThreshold = defaults.float(forKey: "mouthopen_threshold") 30 | 31 | print("view load", faceBox, image, faceBox!.yaw) 32 | faceView.image = image!.cropFace(faceBox: faceBox!) 33 | 34 | if(faceBox!.liveness > livenessThreshold) { 35 | let msg = String(format: "Liveness: Real, score = %.3f", faceBox!.liveness) 36 | livenessLbl.text = msg 37 | } else { 38 | let msg = String(format: "Liveness: Spoof, score = %.3f", faceBox!.liveness) 39 | livenessLbl.text = msg 40 | } 41 | 42 | if(faceBox!.face_quality < 0.5) { 43 | let msg = String(format: "Quality: Low, score = %.3f", faceBox!.face_quality) 44 | qualityLbl.text = msg 45 | } else if(faceBox!.face_quality < 0.75) { 46 | let msg = String(format: "Quality: Medium, score = %.3f", faceBox!.face_quality) 47 | qualityLbl.text = msg 48 | } else { 49 | let msg = String(format: "Quality: High, score = %.3f", faceBox!.face_quality) 50 | qualityLbl.text = msg 51 | } 52 | 53 | var msg = String(format: "Luminance: %.3f", faceBox!.face_luminance) 54 | luminanceLbl.text = msg 55 | 56 | msg = String(format: "Angles: yaw = %.03f, roll = %.03f, pitch = %.03f", faceBox!.yaw, faceBox!.roll, faceBox!.pitch) 57 | anglesLbl.text = msg 58 | 59 | if(faceBox!.face_occlusion > occlusionThreshold) { 60 | msg = String(format: "Face occluded: score = %.03f", faceBox!.face_occlusion) 61 | occlusionLbl.text = msg 62 | } else { 63 | msg = String(format: "Face not occluded: score = %.03f", faceBox!.face_occlusion) 64 | occlusionLbl.text = msg 65 | } 66 | 67 | msg = String(format: "Left eye closed: %@, %.03f, Right eye closed: %@, %.03f", (faceBox!.left_eye > eyeCloseThreshold) ? "true" : "false", 68 | faceBox!.left_eye, (faceBox!.left_eye > eyeCloseThreshold) ? "true" : "false", faceBox!.right_eye) 69 | 70 | eyeClosedLbl.text = msg 71 | 72 | msg = String(format: "Mouth opened: %@, %.03f", (faceBox!.face_mouth_opened > mouthOpenThreshold) ? "true" : "false", faceBox!.face_mouth_opened) 73 | mouthOpenLbl.text = msg 74 | 75 | msg = String(format: "Age: %d", faceBox!.age) 76 | ageLbl.text = msg 77 | 78 | if(faceBox!.gender == 0) { 79 | genderLbl.text = "Gender: Male" 80 | } else { 81 | genderLbl.text = "Gender: Female" 82 | } 83 | } 84 | 85 | @IBAction func done_clicked(_ sender: Any) { 86 | if let vc = self.presentingViewController as? ViewController { 87 | self.dismiss(animated: true, completion: nil) 88 | } 89 | } 90 | } 91 | 92 | -------------------------------------------------------------------------------- /FaceAttribute/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /FaceAttribute/CameraViewController.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | import AVFoundation 3 | import CoreData 4 | 5 | class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate{ 6 | 7 | @IBOutlet weak var cameraView: UIView! 8 | @IBOutlet weak var faceView: FaceView! 9 | @IBOutlet weak var resultView: UIView! 10 | 11 | let session = AVCaptureSession() 12 | 13 | @IBOutlet weak var enrolledImage: UIImageView! 14 | @IBOutlet weak var identifiedImage: UIImageView! 15 | @IBOutlet weak var identifiedLbl: UILabel! 16 | @IBOutlet weak var similarityLbl: UILabel! 17 | @IBOutlet weak var livenessLbl: UILabel! 18 | @IBOutlet weak var yawLbl: UILabel! 19 | @IBOutlet weak var rollLbl: UILabel! 20 | @IBOutlet weak var pitchLbl: UILabel! 21 | 22 | var recognized = false 23 | 24 | var cameraLens_val = 0 25 | var livenessThreshold = Float(0) 26 | var identifyThreshold = Float(0) 27 | 28 | lazy var persistentContainer: NSPersistentContainer = { 29 | let container = NSPersistentContainer(name: ViewController.CORE_DATA_NAME) 30 | container.loadPersistentStores(completionHandler: { (storeDescription, error) in 31 | if let error = error as NSError? { 32 | fatalError("Unresolved error \(error), \(error.userInfo)") 33 | } 34 | }) 35 | return container 36 | }() 37 | 38 | override func viewDidLoad() { 39 | super.viewDidLoad() 40 | // Do any additional setup after loading the view. 41 | 42 | cameraView.translatesAutoresizingMaskIntoConstraints = true 43 | cameraView.frame = view.bounds 44 | 45 | faceView.translatesAutoresizingMaskIntoConstraints = true 46 | faceView.frame = view.bounds 47 | 48 | resultView.translatesAutoresizingMaskIntoConstraints = true 49 | resultView.frame = view.bounds 50 | 51 | let defaults = UserDefaults.standard 52 | cameraLens_val = defaults.integer(forKey: "camera_lens") 53 | livenessThreshold = defaults.float(forKey: "liveness_threshold") 54 | identifyThreshold = defaults.float(forKey: "identify_threshold") 55 | 56 | startCamera() 57 | } 58 | 59 | func startCamera() { 60 | var cameraLens = AVCaptureDevice.Position.front 61 | if(cameraLens_val == 0) { 62 | cameraLens = AVCaptureDevice.Position.back 63 | } 64 | 65 | // Create an AVCaptureSession 66 | session.sessionPreset = .high 67 | 68 | // Create an AVCaptureDevice for the camera 69 | guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: cameraLens) else { return } 70 | guard let input = try? AVCaptureDeviceInput(device: videoDevice) else { return } 71 | if session.canAddInput(input) { 72 | session.addInput(input) 73 | } 74 | 75 | // Create an AVCaptureVideoDataOutput 76 | let videoOutput = AVCaptureVideoDataOutput() 77 | 78 | // Set the video output's delegate and queue for processing video frames 79 | videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.global(qos: .default)) 80 | 81 | // Add the video output to the session 82 | session.addOutput(videoOutput) 83 | 84 | let previewLayer = AVCaptureVideoPreviewLayer(session: session) 85 | previewLayer.videoGravity = .resizeAspectFill 86 | previewLayer.frame = view.bounds 87 | cameraView.layer.addSublayer(previewLayer) 88 | 89 | // Start the session 90 | session.startRunning() 91 | } 92 | 93 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 94 | 95 | if(recognized == true) { 96 | return 97 | } 98 | 99 | guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } 100 | 101 | CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.readOnly) 102 | let ciImage = CIImage(cvPixelBuffer: pixelBuffer) 103 | 104 | let context = CIContext() 105 | let cgImage = context.createCGImage(ciImage, from: ciImage.extent) 106 | let image = UIImage(cgImage: cgImage!) 107 | CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags.readOnly) 108 | 109 | // Rotate and flip the image 110 | let capturedImage = image.rotate(radians: .pi/2).flipHorizontally() 111 | 112 | let param = FaceDetectionParam() 113 | param.check_liveness = true 114 | 115 | let faceBoxes = FaceSDK.faceDetection(capturedImage, param: param) 116 | for faceBox in (faceBoxes as NSArray as! [FaceBox]) { 117 | if(cameraLens_val == 0) { 118 | let tmp = faceBox.x1 119 | faceBox.x1 = Int32(capturedImage.size.width) - faceBox.x2 - 1; 120 | faceBox.x2 = Int32(capturedImage.size.width) - tmp - 1; 121 | } 122 | } 123 | 124 | DispatchQueue.main.sync { 125 | self.faceView.setFrameSize(frameSize: capturedImage.size) 126 | self.faceView.setFaceBoxes(faceBoxes: faceBoxes) 127 | } 128 | 129 | if(faceBoxes.count > 0) { 130 | 131 | let faceBox = faceBoxes[0] as! FaceBox 132 | if(faceBox.liveness > livenessThreshold) { 133 | 134 | let templates = FaceSDK.templateExtraction(capturedImage, faceBox: faceBox) 135 | 136 | var maxSimilarity = Float(0) 137 | var maxSimilarityName = "" 138 | var maxSimilarityFace: Data? = nil 139 | 140 | let context = self.persistentContainer.viewContext 141 | let fetchRequest = NSFetchRequest(entityName: ViewController.ENTITIES_NAME) 142 | 143 | do { 144 | let persons = try context.fetch(fetchRequest) as! [NSManagedObject] 145 | for person in persons { 146 | 147 | let personTemplates = person.value(forKey: ViewController.ATTRIBUTE_TEMPLATES) as! Data 148 | 149 | let similarity = FaceSDK.similarityCalculation(templates, templates2: personTemplates) 150 | 151 | if(maxSimilarity < similarity) { 152 | maxSimilarity = similarity 153 | maxSimilarityName = person.value(forKey: ViewController.ATTRIBUTE_NAME) as! String 154 | maxSimilarityFace = person.value(forKey: ViewController.ATTRIBUTE_FACE) as? Data 155 | } 156 | } 157 | } catch { 158 | print("Failed fetching: \(error)") 159 | } 160 | 161 | if(maxSimilarity > identifyThreshold) { 162 | let enrolledFaceImage = UIImage(data: maxSimilarityFace!) 163 | let identifiedFaceImage = capturedImage.cropFace(faceBox: faceBox) 164 | 165 | recognized = true 166 | 167 | DispatchQueue.main.sync { 168 | self.enrolledImage.image = enrolledFaceImage 169 | self.identifiedImage.image = identifiedFaceImage 170 | self.identifiedLbl.text = "Identified: " + maxSimilarityName 171 | self.similarityLbl.text = "Similarity: " + String(format: "%.03f", maxSimilarity) 172 | self.livenessLbl.text = "Liveness score: " + String(format: "%.03f", faceBox.liveness) 173 | self.yawLbl.text = "Yaw: " + String(format: "%.03f", faceBox.yaw) 174 | self.rollLbl.text = "Roll: " + String(format: "%.03f", faceBox.yaw) 175 | self.pitchLbl.text = "Pitch: " + String(format: "%.03f", faceBox.yaw) 176 | self.resultView.showView(isHidden_: true) 177 | 178 | self.session.stopRunning() 179 | } 180 | } 181 | } 182 | } 183 | } 184 | 185 | @IBAction func done_clicked(_ sender: Any) { 186 | self.resultView.showView(isHidden_: false) 187 | recognized = false 188 | 189 | session.startRunning() 190 | } 191 | 192 | } 193 | 194 | extension UIView { 195 | 196 | func showView(isHidden_: Bool) { 197 | 198 | if isHidden_ { 199 | UIView.animate(withDuration: 0.3, animations: { 200 | self.alpha = 1.0 201 | }, completion: {_ in 202 | self.isHidden = false 203 | }) 204 | } else { 205 | UIView.animate(withDuration: 0.3, animations: { 206 | self.alpha = 0.0 207 | }, completion: {_ in 208 | self.isHidden = true 209 | }) 210 | } 211 | } 212 | } 213 | -------------------------------------------------------------------------------- /FaceAttribute/FaceAttribute.xcdatamodeld/.xccurrentversion: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | _XCCurrentVersionName 6 | FaceRecognition.xcdatamodel 7 | 8 | 9 | -------------------------------------------------------------------------------- /FaceAttribute/FaceAttribute.xcdatamodeld/FaceRecognition.xcdatamodel/contents: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /FaceAttribute/FaceView.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | 3 | class FaceView: UIView { 4 | 5 | var faceBoxes: NSMutableArray? = nil 6 | var frameSize: CGSize? 7 | 8 | public func setFaceBoxes(faceBoxes: NSMutableArray) { 9 | self.faceBoxes = faceBoxes 10 | setNeedsDisplay() 11 | } 12 | 13 | public func setFrameSize(frameSize: CGSize) { 14 | self.frameSize = frameSize 15 | } 16 | 17 | // Only override draw() if you perform custom drawing. 18 | // An empty implementation adversely affects performance during animation. 19 | override func draw(_ rect: CGRect) { 20 | 21 | guard let context = UIGraphicsGetCurrentContext() else { 22 | return 23 | } 24 | 25 | let defaults = UserDefaults.standard 26 | let livenessThreshold = defaults.float(forKey: "liveness_threshold") 27 | 28 | if(self.frameSize != nil) { 29 | context.beginPath() 30 | 31 | let x_scale = self.frameSize!.width / self.bounds.width 32 | let y_scale = self.frameSize!.height / self.bounds.height 33 | 34 | for faceBox in (faceBoxes! as NSArray as! [FaceBox]) { 35 | var color = UIColor.green 36 | var string = "REAL " + String(format: "%.3f", faceBox.liveness) 37 | if(faceBox.liveness < livenessThreshold) { 38 | color = UIColor.red 39 | string = "SPOOF " + String(format: "%.3f", faceBox.liveness) 40 | } 41 | 42 | context.setStrokeColor(color.cgColor) 43 | context.setLineWidth(2.0) 44 | 45 | let scaledRect = CGRect(x: Int(CGFloat(faceBox.x1) / x_scale), y: Int(CGFloat(faceBox.y1) / y_scale), width: Int(CGFloat(faceBox.x2 - faceBox.x1 + 1) / x_scale), height: Int(CGFloat(faceBox.y2 - faceBox.y1 + 1) / y_scale)) 46 | context.addRect(scaledRect) 47 | 48 | let attributes = [NSAttributedString.Key.font: UIFont.systemFont(ofSize: 20), 49 | NSAttributedString.Key.foregroundColor: color] 50 | string.draw(at: CGPoint(x: CGFloat(scaledRect.minX + 5), y: CGFloat(scaledRect.minY - 25)), withAttributes: attributes) 51 | 52 | context.strokePath() 53 | } 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /FaceAttribute/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | UIMainStoryboardFile 6 | Main 7 | 8 | 9 | -------------------------------------------------------------------------------- /FaceAttribute/Model.xcdatamodeld/Model.xcdatamodel/contents: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /FaceAttribute/PersonViewCell.swift: -------------------------------------------------------------------------------- 1 | 2 | 3 | import UIKit 4 | 5 | protocol PersonViewCellDelegate: AnyObject { 6 | func didPersonDelete(_ cell: UITableViewCell) 7 | } 8 | 9 | class PersonViewCell: UITableViewCell { 10 | 11 | @IBOutlet weak var nameLbl: UILabel! 12 | @IBOutlet weak var faceImage: UIImageView! 13 | 14 | weak var delegate: PersonViewCellDelegate? 15 | var indexPath: IndexPath? 16 | 17 | override func awakeFromNib() { 18 | super.awakeFromNib() 19 | // Initialization code 20 | } 21 | 22 | override func setSelected(_ selected: Bool, animated: Bool) { 23 | super.setSelected(selected, animated: animated) 24 | 25 | // Configure the view for the selected state 26 | } 27 | 28 | @IBAction func delete_clicked(_ sender: Any) { 29 | delegate?.didPersonDelete(self) 30 | } 31 | 32 | } 33 | -------------------------------------------------------------------------------- /FaceAttribute/SceneDelegate.swift: -------------------------------------------------------------------------------- 1 | 2 | 3 | import UIKit 4 | 5 | class SceneDelegate: UIResponder, UIWindowSceneDelegate { 6 | 7 | var window: UIWindow? 8 | 9 | 10 | func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) { 11 | // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`. 12 | // If using a storyboard, the `window` property will automatically be initialized and attached to the scene. 13 | // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead). 14 | guard let _ = (scene as? UIWindowScene) else { return } 15 | } 16 | 17 | func sceneDidDisconnect(_ scene: UIScene) { 18 | // Called as the scene is being released by the system. 19 | // This occurs shortly after the scene enters the background, or when its session is discarded. 20 | // Release any resources associated with this scene that can be re-created the next time the scene connects. 21 | // The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead). 22 | } 23 | 24 | func sceneDidBecomeActive(_ scene: UIScene) { 25 | // Called when the scene has moved from an inactive state to an active state. 26 | // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive. 27 | } 28 | 29 | func sceneWillResignActive(_ scene: UIScene) { 30 | // Called when the scene will move from an active state to an inactive state. 31 | // This may occur due to temporary interruptions (ex. an incoming phone call). 32 | } 33 | 34 | func sceneWillEnterForeground(_ scene: UIScene) { 35 | // Called as the scene transitions from the background to the foreground. 36 | // Use this method to undo the changes made on entering the background. 37 | } 38 | 39 | func sceneDidEnterBackground(_ scene: UIScene) { 40 | // Called as the scene transitions from the foreground to the background. 41 | // Use this method to save data, release shared resources, and store enough scene-specific state information 42 | // to restore the scene back to its current state. 43 | 44 | // Save changes in the application's managed object context when the application transitions to the background. 45 | (UIApplication.shared.delegate as? AppDelegate)?.saveContext() 46 | } 47 | 48 | 49 | } 50 | 51 | -------------------------------------------------------------------------------- /FaceAttribute/SettingsViewController.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | import AVFoundation 3 | import CoreData 4 | 5 | class SettingsViewController: UIViewController{ 6 | 7 | static let CAMERA_LENS_DEFAULT = 1 8 | static let LIVENESS_THRESHOLD_DEFAULT = Float(0.7) 9 | static let IDENTIFY_THRESHOLD_DEFAULT = Float(0.8) 10 | static let YAW_THRESHOLD_DEFAULT = Float(10.0) 11 | static let ROLL_THRESHOLD_DEFAULT = Float(10.0) 12 | static let PITCH_THRESHOLD_DEFAULT = Float(10.0) 13 | static let OCCLUSION_THRESHOLD_DEFAULT = Float(0.5) 14 | static let EYECLOSE_THRESHOLD_DEFAULT = Float(0.8) 15 | static let MOUTHOPEN_THRESHOLD_DEFAULT = Float(0.5) 16 | 17 | 18 | @IBOutlet weak var cameraLensSwitch: UISwitch! 19 | @IBOutlet weak var livenessThresholdLbl: UILabel! 20 | @IBOutlet weak var identifyThresholdLbl: UILabel! 21 | @IBOutlet weak var yawThresholdLbl: UILabel! 22 | @IBOutlet weak var rollThresholdLbl: UILabel! 23 | @IBOutlet weak var pitchThresholdLbl: UILabel! 24 | @IBOutlet weak var occlusionThresholdLbl: UILabel! 25 | @IBOutlet weak var eyeClosureThresholdLbl: UILabel! 26 | @IBOutlet weak var mouthOpenThresholdLbl: UILabel! 27 | 28 | @IBOutlet weak var cameraLensLbl: UILabel! 29 | 30 | 31 | lazy var persistentContainer: NSPersistentContainer = { 32 | let container = NSPersistentContainer(name: ViewController.CORE_DATA_NAME) 33 | container.loadPersistentStores(completionHandler: { (storeDescription, error) in 34 | if let error = error as NSError? { 35 | fatalError("Unresolved error \(error), \(error.userInfo)") 36 | } 37 | }) 38 | return container 39 | }() 40 | 41 | override func viewDidLoad() { 42 | super.viewDidLoad() 43 | // Do any additional setup after loading the view. 44 | 45 | let defaults = UserDefaults.standard 46 | let cameraLens = defaults.integer(forKey: "camera_lens") 47 | 48 | if(cameraLens == 0) { 49 | cameraLensSwitch.isOn = false 50 | cameraLensLbl.text = "Back" 51 | } else { 52 | cameraLensSwitch.isOn = true 53 | cameraLensLbl.text = "Front" 54 | } 55 | 56 | let livenessThreshold = defaults.float(forKey: "liveness_threshold") 57 | livenessThresholdLbl.text = String(livenessThreshold) 58 | 59 | let identifyThreshold = defaults.float(forKey: "identify_threshold") 60 | identifyThresholdLbl.text = String(identifyThreshold) 61 | 62 | let yawThreshold = defaults.float(forKey: "yaw_threshold") 63 | yawThresholdLbl.text = String(yawThreshold) 64 | 65 | let rollThreshold = defaults.float(forKey: "roll_threshold") 66 | rollThresholdLbl.text = String(rollThreshold) 67 | 68 | let pitchThreshold = defaults.float(forKey: "pitch_threshold") 69 | pitchThresholdLbl.text = String(pitchThreshold) 70 | 71 | let eyeCloseThreshold = defaults.float(forKey: "eyeclose_threshold") 72 | eyeClosureThresholdLbl.text = String(eyeCloseThreshold) 73 | 74 | let mouthOpenThreshold = defaults.float(forKey: "mouthopen_threshold") 75 | mouthOpenThresholdLbl.text = String(mouthOpenThreshold) 76 | 77 | let occlusionThreshold = defaults.float(forKey: "occlusion_threshold") 78 | occlusionThresholdLbl.text = String(occlusionThreshold) 79 | } 80 | 81 | static func setDefaultSettings() { 82 | let defaults = UserDefaults.standard 83 | let defaultChanged = defaults.bool(forKey: "default_changed") 84 | if(defaultChanged == false) { 85 | defaults.set(true, forKey: "default_changed") 86 | 87 | defaults.set(SettingsViewController.CAMERA_LENS_DEFAULT, forKey: "camera_lens") 88 | defaults.set(SettingsViewController.LIVENESS_THRESHOLD_DEFAULT, forKey: "liveness_threshold") 89 | defaults.set(SettingsViewController.IDENTIFY_THRESHOLD_DEFAULT, forKey: "identify_threshold") 90 | defaults.set(SettingsViewController.YAW_THRESHOLD_DEFAULT, forKey: "yaw_threshold") 91 | defaults.set(SettingsViewController.ROLL_THRESHOLD_DEFAULT, forKey: "roll_threshold") 92 | defaults.set(SettingsViewController.PITCH_THRESHOLD_DEFAULT, forKey: "pitch_threshold") 93 | defaults.set(SettingsViewController.EYECLOSE_THRESHOLD_DEFAULT, forKey: "eyeclose_threshold") 94 | defaults.set(SettingsViewController.MOUTHOPEN_THRESHOLD_DEFAULT, forKey: "mouthopen_threshold") 95 | defaults.set(SettingsViewController.OCCLUSION_THRESHOLD_DEFAULT, forKey: "occlusion_threshold") 96 | } 97 | } 98 | 99 | @IBAction func done_clicked(_ sender: Any) { 100 | if let vc = self.presentingViewController as? ViewController { 101 | self.dismiss(animated: true, completion: { 102 | vc.personView.reloadData() 103 | }) 104 | } 105 | } 106 | 107 | @IBAction func cameraLens_switch(_ sender: Any) { 108 | let defaults = UserDefaults.standard 109 | if(cameraLensSwitch.isOn) { 110 | defaults.set(1, forKey: "camera_lens") 111 | cameraLensLbl.text = "Front" 112 | } else { 113 | defaults.set(0, forKey: "camera_lens") 114 | cameraLensLbl.text = "Back" 115 | } 116 | } 117 | 118 | func threshold_clicked(mode: Int) { 119 | var title = "Liveness threshold" 120 | if(mode == 1) { 121 | title = "Identify threshold" 122 | } else if(mode == 2) { 123 | title = "Occlusion threshold" 124 | } else if(mode == 3) { 125 | title = "Eye closure threshold" 126 | } else if(mode == 4) { 127 | title = "Mouth open threshold" 128 | } 129 | 130 | let alertController = UIAlertController(title: title, message: "Please input a number between 0 and 1.", preferredStyle: .alert) 131 | 132 | let minimum = Float(0) 133 | let maximum = Float(1) 134 | alertController.addTextField { (textField) in 135 | textField.keyboardType = .decimalPad 136 | 137 | let defaults = UserDefaults.standard 138 | 139 | if(mode == 0) { 140 | textField.text = String(defaults.float(forKey: "liveness_threshold")) 141 | } else if(mode == 1) { 142 | textField.text = String(defaults.float(forKey: "identify_threshold")) 143 | } else if(mode == 2) { 144 | textField.text = String(defaults.float(forKey: "occlusion_threshold")) 145 | } else if(mode == 3) { 146 | textField.text = String(defaults.float(forKey: "eyeclose_threshold")) 147 | } else if(mode == 4) { 148 | textField.text = String(defaults.float(forKey: "mouthopen_threshold")) 149 | } 150 | } 151 | 152 | let cancelAction = UIAlertAction(title: "Cancel", style: .cancel, handler: nil) 153 | 154 | let submitAction = UIAlertAction(title: "Ok", style: .default) { (action) in 155 | 156 | var hasError = false 157 | var errorStr = "" 158 | let defaults = UserDefaults.standard 159 | 160 | if let numberString = alertController.textFields?.first?.text, let number = Float(numberString) { 161 | if(number < Float(minimum) || number > Float(maximum)) { 162 | hasError = true 163 | errorStr = "Invalid value" 164 | } else { 165 | 166 | if(mode == 0) { 167 | self.livenessThresholdLbl.text = String(number) 168 | defaults.set(number, forKey: "liveness_threshold") 169 | } else if(mode == 1) { 170 | self.identifyThresholdLbl.text = String(number) 171 | defaults.set(number, forKey: "identify_threshold") 172 | } else if(mode == 2) { 173 | self.occlusionThresholdLbl.text = String(number) 174 | defaults.set(number, forKey: "occlusion_threshold") 175 | } else if(mode == 3) { 176 | self.eyeClosureThresholdLbl.text = String(number) 177 | defaults.set(number, forKey: "eyeclose_threshold") 178 | } else if(mode == 4) { 179 | self.mouthOpenThresholdLbl.text = String(number) 180 | defaults.set(number, forKey: "mouthopen_threshold") 181 | } 182 | } 183 | } else { 184 | hasError = true 185 | errorStr = "Invalid value" 186 | } 187 | 188 | if(hasError) { 189 | let errorNotification = UIAlertController(title: "Error", message: errorStr, preferredStyle: .alert) 190 | let okAction = UIAlertAction(title: "OK", style: .default, handler: nil) 191 | errorNotification.addAction(okAction) 192 | self.present(errorNotification, animated: true, completion: nil) 193 | 194 | DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) { 195 | errorNotification.dismiss(animated: true, completion: nil) 196 | } 197 | } 198 | } 199 | 200 | alertController.addAction(cancelAction) 201 | alertController.addAction(submitAction) 202 | 203 | present(alertController, animated: true, completion: nil) 204 | } 205 | 206 | @IBAction func livenessThreshold_clicked(_ sender: Any) { 207 | threshold_clicked(mode: 0) 208 | } 209 | 210 | @IBAction func identifyThreshold_clicked(_ sender: Any) { 211 | 212 | threshold_clicked(mode: 1) 213 | } 214 | 215 | func angles_clicked(mode: Int) { 216 | var title = "Yaw threshold" 217 | if(mode == 1) { 218 | title = "Roll threshold" 219 | } else if(mode == 2) { 220 | title = "Pitch threshold" 221 | } 222 | 223 | let alertController = UIAlertController(title: title, message: "Please input a number between 0 and 30.", preferredStyle: .alert) 224 | 225 | let minimum = Float(0) 226 | let maximum = Float(30) 227 | alertController.addTextField { (textField) in 228 | textField.keyboardType = .decimalPad 229 | 230 | let defaults = UserDefaults.standard 231 | 232 | if(mode == 0) { 233 | textField.text = String(defaults.float(forKey: "yaw_threshold")) 234 | } else if(mode == 1) { 235 | textField.text = String(defaults.float(forKey: "roll_threshold")) 236 | } else if(mode == 2) { 237 | textField.text = String(defaults.float(forKey: "pitch_threshold")) 238 | } 239 | } 240 | 241 | let cancelAction = UIAlertAction(title: "Cancel", style: .cancel, handler: nil) 242 | 243 | let submitAction = UIAlertAction(title: "Ok", style: .default) { (action) in 244 | 245 | var hasError = false 246 | var errorStr = "" 247 | let defaults = UserDefaults.standard 248 | 249 | if let numberString = alertController.textFields?.first?.text, let number = Float(numberString) { 250 | if(number < Float(minimum) || number > Float(maximum)) { 251 | hasError = true 252 | errorStr = "Invalid value" 253 | } else { 254 | 255 | if(mode == 0) { 256 | self.yawThresholdLbl.text = String(number) 257 | defaults.set(number, forKey: "yaw_threshold") 258 | } else if(mode == 1) { 259 | self.rollThresholdLbl.text = String(number) 260 | defaults.set(number, forKey: "roll_threshold") 261 | } else if(mode == 2) { 262 | self.pitchThresholdLbl.text = String(number) 263 | defaults.set(number, forKey: "pitch_threshold") 264 | } 265 | } 266 | } else { 267 | hasError = true 268 | errorStr = "Invalid value" 269 | } 270 | 271 | if(hasError) { 272 | let errorNotification = UIAlertController(title: "Error", message: errorStr, preferredStyle: .alert) 273 | let okAction = UIAlertAction(title: "OK", style: .default, handler: nil) 274 | errorNotification.addAction(okAction) 275 | self.present(errorNotification, animated: true, completion: nil) 276 | 277 | DispatchQueue.main.asyncAfter(deadline: .now() + 2.0) { 278 | errorNotification.dismiss(animated: true, completion: nil) 279 | } 280 | } 281 | } 282 | 283 | alertController.addAction(cancelAction) 284 | alertController.addAction(submitAction) 285 | 286 | present(alertController, animated: true, completion: nil) 287 | } 288 | 289 | @IBAction func yaw_clicked(_ sender: Any) { 290 | angles_clicked(mode: 0) 291 | } 292 | 293 | @IBAction func roll_clicked(_ sender: Any) { 294 | angles_clicked(mode: 1) 295 | } 296 | 297 | @IBAction func pitch_clicked(_ sender: Any) { 298 | angles_clicked(mode: 2) 299 | } 300 | 301 | @IBAction func occlusion_clicked(_ sender: Any) { 302 | threshold_clicked(mode: 2) 303 | } 304 | 305 | @IBAction func eye_close_clicked(_ sender: Any) { 306 | threshold_clicked(mode: 3) 307 | } 308 | 309 | @IBAction func mouth_open_clicked(_ sender: Any) { 310 | threshold_clicked(mode: 4) 311 | } 312 | 313 | 314 | @IBAction func restore_settings_clicked(_ sender: Any) { 315 | let defaults = UserDefaults.standard 316 | defaults.set(false, forKey: "default_changed") 317 | 318 | SettingsViewController.setDefaultSettings() 319 | showToast(message: "The default settings has been restored.") 320 | } 321 | 322 | 323 | @IBAction func clear_all_person_clicked(_ sender: Any) { 324 | 325 | let context = self.persistentContainer.viewContext 326 | let fetchRequest = NSFetchRequest(entityName: ViewController.ENTITIES_NAME) 327 | 328 | do { 329 | let persons = try context.fetch(fetchRequest) as! [NSManagedObject] 330 | for person in persons { 331 | context.delete(person) 332 | } 333 | try context.save() 334 | } catch { 335 | print("Failed fetching: \(error)") 336 | } 337 | 338 | showToast(message: "All personal data has been cleared.") 339 | } 340 | } 341 | 342 | -------------------------------------------------------------------------------- /FaceAttribute/ToastView.swift: -------------------------------------------------------------------------------- 1 | 2 | import UIKit 3 | 4 | class ToastView: UIView { 5 | private let messageLabel: UILabel = UILabel() 6 | 7 | init(message: String) { 8 | super.init(frame: .zero) 9 | configureUI() 10 | setMessage(message) 11 | } 12 | 13 | required init?(coder aDecoder: NSCoder) { 14 | fatalError("init(coder:) has not been implemented") 15 | } 16 | 17 | private func configureUI() { 18 | backgroundColor = UIColor(named: "clr_toast_bg") 19 | layer.cornerRadius = 8 20 | clipsToBounds = true 21 | 22 | messageLabel.textColor = UIColor(named: "clr_text") 23 | messageLabel.font = UIFont.systemFont(ofSize: 16) 24 | messageLabel.numberOfLines = 0 25 | messageLabel.textAlignment = .center 26 | addSubview(messageLabel) 27 | messageLabel.translatesAutoresizingMaskIntoConstraints = false 28 | NSLayoutConstraint.activate([ 29 | messageLabel.leadingAnchor.constraint(equalTo: leadingAnchor, constant: 16), 30 | messageLabel.trailingAnchor.constraint(equalTo: trailingAnchor, constant: -16), 31 | messageLabel.topAnchor.constraint(equalTo: topAnchor, constant: 16), 32 | messageLabel.bottomAnchor.constraint(equalTo: bottomAnchor, constant: -16) 33 | ]) 34 | } 35 | 36 | private func setMessage(_ message: String) { 37 | messageLabel.text = message 38 | } 39 | } 40 | 41 | func showToast(message: String, duration: TimeInterval = 2.0) { 42 | let toastView = ToastView(message: message) 43 | if let window = UIApplication.shared.windows.first { 44 | window.addSubview(toastView) 45 | toastView.translatesAutoresizingMaskIntoConstraints = false 46 | NSLayoutConstraint.activate([ 47 | toastView.centerXAnchor.constraint(equalTo: window.centerXAnchor), 48 | toastView.bottomAnchor.constraint(equalTo: window.bottomAnchor, constant: -100), 49 | toastView.leadingAnchor.constraint(greaterThanOrEqualTo: window.leadingAnchor, constant: 16), 50 | toastView.trailingAnchor.constraint(lessThanOrEqualTo: window.trailingAnchor, constant: -16), 51 | toastView.widthAnchor.constraint(greaterThanOrEqualToConstant: 300) 52 | ]) 53 | 54 | UIView.animate(withDuration: 0.2, delay: duration, options: .curveEaseInOut) { 55 | toastView.alpha = 0 56 | } completion: { _ in 57 | toastView.removeFromSuperview() 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /FaceAttribute/UIImageExtension.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | import UIKit 3 | 4 | 5 | public extension UIImage { 6 | 7 | func cropFace(faceBox: FaceBox) -> UIImage? { 8 | let centerX = Int((faceBox.x1 + faceBox.x2) / 2) 9 | let centerY = Int((faceBox.y1 + faceBox.y2) / 2) 10 | let cropWidth = Int(Float(faceBox.x2 - faceBox.x1) * Float(1.4)) 11 | 12 | let cropX1 = Int(Float(centerX) - Float(cropWidth / 2)) 13 | let cropX2 = Int(Float(centerY) - Float(cropWidth / 2)) 14 | let cropRect = CGRect(x: CGFloat(cropX1), y: CGFloat(cropX2), width: CGFloat(cropWidth), height: CGFloat(cropWidth)) 15 | 16 | guard let croppedImage = self.cgImage!.cropping(to: cropRect) else { return nil } 17 | 18 | let faceImage = UIImage(cgImage: croppedImage) 19 | 20 | let renderer = UIGraphicsImageRenderer(size: CGSize(width: 150, height: 150)) 21 | let newImage = renderer.image { (context) in 22 | faceImage.draw(in: CGRect(origin: .zero, size: CGSize(width: 150, height: 150))) 23 | } 24 | return newImage 25 | } 26 | 27 | func crop(rect: CGRect) -> UIImage? { 28 | 29 | guard let croppedImage = self.cgImage!.cropping(to: rect) else { return nil } 30 | 31 | let faceImage = UIImage(cgImage: croppedImage) 32 | return faceImage 33 | } 34 | 35 | /// Extension to fix orientation of an UIImage without EXIF 36 | func fixOrientation() -> UIImage { 37 | 38 | guard let cgImage = cgImage else { return self } 39 | 40 | if imageOrientation == .up { return self } 41 | 42 | var transform = CGAffineTransform.identity 43 | 44 | switch imageOrientation { 45 | 46 | case .down, .downMirrored: 47 | transform = transform.translatedBy(x: size.width, y: size.height) 48 | transform = transform.rotated(by: CGFloat(Double.pi)) 49 | 50 | case .left, .leftMirrored: 51 | transform = transform.translatedBy(x: size.width, y: 0) 52 | transform = transform.rotated(by: CGFloat(Double.pi/2)) 53 | 54 | case .right, .rightMirrored: 55 | transform = transform.translatedBy(x: 0, y : size.height) 56 | transform = transform.rotated(by: CGFloat(-Double.pi/2)) 57 | 58 | case .up, .upMirrored: 59 | break 60 | } 61 | 62 | switch imageOrientation { 63 | 64 | case .upMirrored, .downMirrored: 65 | transform.translatedBy(x: size.width, y: 0) 66 | transform.scaledBy(x: -1, y: 1) 67 | 68 | case .leftMirrored, .rightMirrored: 69 | transform.translatedBy(x: size.height, y: 0) 70 | transform.scaledBy(x: -1, y: 1) 71 | 72 | case .up, .down, .left, .right: 73 | break 74 | } 75 | 76 | if let ctx = CGContext(data: nil, width: Int(size.width), height: Int(size.height), bitsPerComponent: cgImage.bitsPerComponent, bytesPerRow: 0, space: cgImage.colorSpace!, bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue) { 77 | 78 | ctx.concatenate(transform) 79 | 80 | switch imageOrientation { 81 | 82 | case .left, .leftMirrored, .right, .rightMirrored: 83 | ctx.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.height, height: size.width)) 84 | 85 | default: 86 | ctx.draw(cgImage, in: CGRect(x: 0, y: 0, width: size.width, height: size.height)) 87 | } 88 | 89 | if let finalImage = ctx.makeImage() { 90 | return (UIImage(cgImage: finalImage)) 91 | } 92 | } 93 | 94 | // something failed -- return original 95 | return self 96 | } 97 | 98 | func rotate(radians: CGFloat) -> UIImage { 99 | let rotatedSize = CGRect(origin: .zero, size: size) 100 | .applying(CGAffineTransform(rotationAngle: radians)) 101 | .integral.size 102 | UIGraphicsBeginImageContext(rotatedSize) 103 | if let context = UIGraphicsGetCurrentContext() { 104 | context.translateBy(x: rotatedSize.width / 2, y: rotatedSize.height / 2) 105 | context.rotate(by: radians) 106 | draw(in: CGRect(x: -size.width / 2, y: -size.height / 2, width: size.width, height: size.height)) 107 | let rotatedImage = UIGraphicsGetImageFromCurrentImageContext() 108 | UIGraphicsEndImageContext() 109 | return rotatedImage ?? self 110 | } 111 | return self 112 | } 113 | 114 | // Extension to flip UIImage horizontally 115 | func flipHorizontally() -> UIImage { 116 | UIGraphicsBeginImageContextWithOptions(size, false, scale) 117 | let context = UIGraphicsGetCurrentContext()! 118 | context.translateBy(x: size.width, y: 0) 119 | context.scaleBy(x: -1.0, y: 1.0) 120 | draw(in: CGRect(origin: .zero, size: size)) 121 | let flippedImage = UIGraphicsGetImageFromCurrentImageContext()! 122 | UIGraphicsEndImageContext() 123 | return flippedImage 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /FaceAttribute/ViewController.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | import AVFoundation 3 | import CoreData 4 | 5 | class ViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate, UITableViewDataSource, UITableViewDelegate, PersonViewCellDelegate{ 6 | 7 | static let CORE_DATA_NAME = "Model" 8 | static let ENTITIES_NAME = "Person" 9 | static let ATTRIBUTE_NAME = "name" 10 | static let ATTRIBUTE_FACE = "face" 11 | static let ATTRIBUTE_TEMPLATES = "templates" 12 | 13 | @IBOutlet weak var warningLbl: UILabel! 14 | 15 | @IBOutlet weak var enrollBtnView: UIView! 16 | @IBOutlet weak var identifyBtnView: UIView! 17 | @IBOutlet weak var captureBtnView: UIView! 18 | @IBOutlet weak var attributeBtnView: UIView! 19 | @IBOutlet weak var settingsBtnView: UIView! 20 | @IBOutlet weak var aboutBtnView: UIView! 21 | 22 | @IBOutlet weak var personView: UITableView! 23 | 24 | var attributeImage: UIImage? = nil 25 | var attributeFace: FaceBox? = nil 26 | 27 | 28 | lazy var persistentContainer: NSPersistentContainer = { 29 | let container = NSPersistentContainer(name: ViewController.CORE_DATA_NAME) 30 | container.loadPersistentStores(completionHandler: { (storeDescription, error) in 31 | if let error = error as NSError? { 32 | fatalError("Unresolved error \(error), \(error.userInfo)") 33 | } 34 | }) 35 | return container 36 | }() 37 | 38 | 39 | override func viewDidLoad() { 40 | super.viewDidLoad() 41 | // Do any additional setup after loading the view. 42 | 43 | var ret = FaceSDK.setActivation("uwWNze/jIFSKoLnyftMRuL0FCpqfU8IcdK/6694sYj/ME7OmbsouzLWzN5vU1vuOoszS3CV+BOtY" + 44 | "GOcE+Mf0DzLaw4kD8xIlDeRr3ONVU8XIeUKyzvBMrGy9oUOYvBr0yFDxM+wte1Bo28G84K3lAsj9" + 45 | "vwIRwydJoqnY5Vuhb6pwg+NNn7PLlBjmEpyYfGpGYg9RodouTEHCUyk1MP1ASVVVp3eZ9p9lGhDV" + 46 | "pKh6NyxuBsYJ8z9GGMAqQTtMTs/SMqGGKO3cZO731UeZ8NxNVnPuUpsTmWiqeJGRA3syUY8zLV3y" + 47 | "Xc/hGRClKEH1E8ZF3VR2z0aTKIY6x7PWi/QdGA==") 48 | 49 | if(ret == SDK_SUCCESS.rawValue) { 50 | ret = FaceSDK.initSDK() 51 | } 52 | 53 | if(ret != SDK_SUCCESS.rawValue) { 54 | warningLbl.isHidden = false 55 | 56 | if(ret == SDK_LICENSE_KEY_ERROR.rawValue) { 57 | warningLbl.text = "Invalid license!" 58 | } else if(ret == SDK_LICENSE_APPID_ERROR.rawValue) { 59 | warningLbl.text = "Invalid license!" 60 | } else if(ret == SDK_LICENSE_EXPIRED.rawValue) { 61 | warningLbl.text = "License expired!" 62 | } else if(ret == SDK_NO_ACTIVATED.rawValue) { 63 | warningLbl.text = "No activated!" 64 | } else if(ret == SDK_INIT_ERROR.rawValue) { 65 | warningLbl.text = "Init error!" 66 | } 67 | } 68 | 69 | SettingsViewController.setDefaultSettings() 70 | 71 | personView.delegate = self 72 | personView.dataSource = self 73 | personView.separatorStyle = .none 74 | personView.reloadData() 75 | 76 | } 77 | 78 | @IBAction func enroll_touch_down(_ sender: Any) { 79 | UIView.animate(withDuration: 0.5) { 80 | self.enrollBtnView.backgroundColor = UIColor(named: "clr_main_button_bg2") // Change to desired color 81 | } 82 | } 83 | 84 | @IBAction func enroll_touch_cancel(_ sender: Any) { 85 | UIView.animate(withDuration: 0.5) { 86 | self.enrollBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") // Change to desired color 87 | } 88 | } 89 | 90 | @IBAction func enroll_clicked(_ sender: Any) { 91 | UIView.animate(withDuration: 0.5) { 92 | self.enrollBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") // Change to desired color 93 | } 94 | 95 | let imagePicker = UIImagePickerController() 96 | imagePicker.view.tag = 1 97 | imagePicker.sourceType = .photoLibrary 98 | imagePicker.delegate = self 99 | present(imagePicker, animated: true, completion: nil) 100 | } 101 | 102 | 103 | @IBAction func identify_touch_down(_ sender: Any) { 104 | UIView.animate(withDuration: 0.5) { 105 | self.identifyBtnView.backgroundColor = UIColor(named: "clr_main_button_bg2") // Change to desired color 106 | } 107 | } 108 | 109 | @IBAction func identify_touch_up(_ sender: Any) { 110 | UIView.animate(withDuration: 0.5) { 111 | self.identifyBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") // Change to desired color 112 | } 113 | } 114 | 115 | @IBAction func identify_clicked(_ sender: Any) { 116 | UIView.animate(withDuration: 0.5) { 117 | self.identifyBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") // Change to desired color 118 | } 119 | 120 | performSegue(withIdentifier: "camera", sender: self) 121 | } 122 | 123 | 124 | @IBAction func capture_touch_down(_ sender: Any) { 125 | UIView.animate(withDuration: 0.5) { 126 | self.captureBtnView.backgroundColor = UIColor(named: "clr_main_button_bg2") 127 | } 128 | } 129 | 130 | @IBAction func capture_touch_up(_ sender: Any) { 131 | UIView.animate(withDuration: 0.5) { 132 | self.captureBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") 133 | } 134 | } 135 | 136 | @IBAction func capture_clicked(_ sender: Any) { 137 | UIView.animate(withDuration: 0.5) { 138 | self.captureBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") 139 | } 140 | 141 | performSegue(withIdentifier: "capture", sender: self) 142 | } 143 | 144 | 145 | @IBAction func attribute_touch_down(_ sender: Any) { 146 | UIView.animate(withDuration: 0.5) { 147 | self.attributeBtnView.backgroundColor = UIColor(named: "clr_main_button_bg2") 148 | } 149 | } 150 | 151 | @IBAction func attribute_touch_up(_ sender: Any) { 152 | UIView.animate(withDuration: 0.5) { 153 | self.attributeBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") 154 | } 155 | } 156 | 157 | @IBAction func attribute_clicked(_ sender: Any) { 158 | UIView.animate(withDuration: 0.5) { 159 | self.attributeBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") 160 | } 161 | 162 | let imagePicker = UIImagePickerController() 163 | imagePicker.view.tag = 2 164 | imagePicker.sourceType = .photoLibrary 165 | imagePicker.delegate = self 166 | present(imagePicker, animated: true, completion: nil) 167 | } 168 | 169 | 170 | @IBAction func settings_touch_down(_ sender: Any) { 171 | UIView.animate(withDuration: 0.5) { 172 | self.settingsBtnView.backgroundColor = UIColor(named: "clr_main_button_bg2") 173 | } 174 | } 175 | 176 | 177 | @IBAction func settings_touch_up(_ sender: Any) { 178 | UIView.animate(withDuration: 0.5) { 179 | self.settingsBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") 180 | } 181 | } 182 | 183 | @IBAction func settings_clicked(_ sender: Any) { 184 | UIView.animate(withDuration: 0.5) { 185 | self.settingsBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") // Change to desired color 186 | } 187 | 188 | performSegue(withIdentifier: "settings", sender: self) 189 | } 190 | 191 | @IBAction func about_touch_down(_ sender: Any) { 192 | UIView.animate(withDuration: 0.5) { 193 | self.aboutBtnView.backgroundColor = UIColor(named: "clr_main_button_bg2") // Change to desired color 194 | } 195 | } 196 | 197 | 198 | @IBAction func about_touch_up(_ sender: Any) { 199 | UIView.animate(withDuration: 0.5) { 200 | self.aboutBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") // Change to desired color 201 | } 202 | } 203 | 204 | @IBAction func about_clicked(_ sender: Any) { 205 | UIView.animate(withDuration: 0.5) { 206 | self.aboutBtnView.backgroundColor = UIColor(named: "clr_main_button_bg1") // Change to desired color 207 | } 208 | 209 | performSegue(withIdentifier: "about", sender: self) 210 | } 211 | 212 | @IBAction func brand_clicked(_ sender: Any) { 213 | let webURL = URL(string: "https://kby-ai.com") 214 | UIApplication.shared.open(webURL!, options: [:], completionHandler: nil) 215 | } 216 | 217 | override func prepare(for segue: UIStoryboardSegue, sender: Any?) { 218 | if let destinationVC = segue.destination as? AttributeViewController { 219 | 220 | destinationVC.image = attributeImage 221 | destinationVC.faceBox = attributeFace 222 | } 223 | } 224 | 225 | func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) { 226 | 227 | dismiss(animated: true, completion: nil) 228 | 229 | guard let image = info[.originalImage] as? UIImage else { 230 | return 231 | } 232 | 233 | let fixed_image = image.fixOrientation() 234 | 235 | if(picker.view.tag == 1) { 236 | let param = FaceDetectionParam() 237 | param.check_liveness = false 238 | param.check_eye_closeness = false 239 | param.check_face_occlusion = false 240 | param.check_mouth_opened = false 241 | param.estimate_age_gender = false 242 | 243 | let faceBoxes = FaceSDK.faceDetection(fixed_image, param: param) 244 | if(faceBoxes.count == 0) { 245 | showToast(message: "No face detected!") 246 | return 247 | } else if(faceBoxes.count > 1) { 248 | // showToast(message: "Multiple face detected!") 249 | } 250 | 251 | for faceBox in (faceBoxes as NSArray as! [FaceBox]) { 252 | 253 | let templates = FaceSDK.templateExtraction(fixed_image, faceBox: faceBox) 254 | if(templates.isEmpty) { 255 | continue 256 | } 257 | 258 | let faceImage = fixed_image.cropFace(faceBox: faceBox) 259 | 260 | let context = self.persistentContainer.viewContext 261 | let entity = NSEntityDescription.entity(forEntityName: ViewController.ENTITIES_NAME, in: context)! 262 | let user = NSManagedObject(entity: entity, insertInto: context) 263 | 264 | let name = "Person" + String(Int.random(in: 10000...20000)) 265 | let face = faceImage!.jpegData(compressionQuality: CGFloat(1.0)) 266 | 267 | user.setValue(name, forKey: ViewController.ATTRIBUTE_NAME) 268 | user.setValue(templates, forKey: ViewController.ATTRIBUTE_TEMPLATES) 269 | user.setValue(face, forKey: ViewController.ATTRIBUTE_FACE) 270 | 271 | do { 272 | try context.save() 273 | } catch let error as NSError { 274 | print("Could not save. \(error), \(error.userInfo)") 275 | } 276 | } 277 | 278 | personView.reloadData() 279 | showToast(message: "Person enrolled!") 280 | } else if(picker.view.tag == 2) { 281 | let param = FaceDetectionParam() 282 | param.check_liveness = true 283 | param.check_eye_closeness = true 284 | param.check_face_occlusion = true 285 | param.check_mouth_opened = true 286 | param.estimate_age_gender = true 287 | 288 | let faceBoxes = FaceSDK.faceDetection(fixed_image, param: param) 289 | if(faceBoxes.count == 0) { 290 | showToast(message: "No face detected!") 291 | return 292 | } else if(faceBoxes.count > 1) { 293 | showToast(message: "Multiple face detected!") 294 | return 295 | } 296 | 297 | attributeImage = fixed_image 298 | attributeFace = faceBoxes[0] as? FaceBox 299 | performSegue(withIdentifier: "attribute", sender: self) 300 | 301 | } 302 | } 303 | 304 | func imagePickerControllerDidCancel(_ picker: UIImagePickerController) { 305 | dismiss(animated: true, completion: nil) 306 | } 307 | 308 | // UITableViewDataSource methods 309 | func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int { 310 | // Return the number of cells in the table view 311 | 312 | let context = self.persistentContainer.viewContext 313 | let count = try! context.count(for: NSFetchRequest(entityName: ViewController.ENTITIES_NAME)) 314 | 315 | return count 316 | } 317 | 318 | func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { 319 | // Get the table view cell for the specified index path 320 | let cell = tableView.dequeueReusableCell(withIdentifier: "PersonCell", for: indexPath) as! PersonViewCell 321 | cell.delegate = self 322 | cell.indexPath = indexPath 323 | 324 | let context = self.persistentContainer.viewContext 325 | let fetchRequest = NSFetchRequest(entityName: ViewController.ENTITIES_NAME) 326 | do { 327 | let persons = try context.fetch(fetchRequest) as! [NSManagedObject] 328 | var rowCount = 0 329 | for person in persons { 330 | if(rowCount == indexPath.row) { 331 | cell.nameLbl.text = person.value(forKey: ViewController.ATTRIBUTE_NAME) as? String 332 | cell.faceImage.image = UIImage(data: person.value(forKey: ViewController.ATTRIBUTE_FACE) as! Data) 333 | 334 | break 335 | } 336 | rowCount = rowCount + 1 337 | } 338 | } catch { 339 | print("Failed fetching: \(error)") 340 | } 341 | 342 | // Customize the cell 343 | return cell 344 | } 345 | 346 | // UITableViewDelegate methods 347 | func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) { 348 | // Handle cell selection 349 | tableView.deselectRow(at: indexPath, animated: true) 350 | } 351 | 352 | func didPersonDelete(_ cell: UITableViewCell) { 353 | let context = self.persistentContainer.viewContext 354 | let fetchRequest = NSFetchRequest(entityName: ViewController.ENTITIES_NAME) 355 | let personCell = cell as! PersonViewCell 356 | 357 | do { 358 | let persons = try context.fetch(fetchRequest) as! [NSManagedObject] 359 | var rowCount = 0 360 | for person in persons { 361 | if(rowCount == personCell.indexPath?.row) { 362 | context.delete(person) 363 | try context.save() 364 | break 365 | } 366 | rowCount = rowCount + 1 367 | } 368 | } catch { 369 | print("Failed fetching: \(error)") 370 | } 371 | 372 | self.personView.reloadData() 373 | } 374 | } 375 | 376 | -------------------------------------------------------------------------------- /FaceAttributeTests/FaceAttributeTests.swift: -------------------------------------------------------------------------------- 1 | 2 | 3 | import XCTest 4 | @testable import FaceAttribute 5 | 6 | final class FaceAttributeTests: XCTestCase { 7 | 8 | override func setUpWithError() throws { 9 | // Put setup code here. This method is called before the invocation of each test method in the class. 10 | } 11 | 12 | override func tearDownWithError() throws { 13 | // Put teardown code here. This method is called after the invocation of each test method in the class. 14 | } 15 | 16 | func testExample() throws { 17 | // This is an example of a functional test case. 18 | // Use XCTAssert and related functions to verify your tests produce the correct results. 19 | // Any test you write for XCTest can be annotated as throws and async. 20 | // Mark your test throws to produce an unexpected failure when your test encounters an uncaught error. 21 | // Mark your test async to allow awaiting for asynchronous code to complete. Check the results with assertions afterwards. 22 | } 23 | 24 | func testPerformanceExample() throws { 25 | // This is an example of a performance test case. 26 | self.measure { 27 | // Put the code you want to measure the time of here. 28 | } 29 | } 30 | 31 | } 32 | -------------------------------------------------------------------------------- /FaceAttributeUITests/FaceAttributeUITests.swift: -------------------------------------------------------------------------------- 1 | 2 | import XCTest 3 | 4 | final class FaceAttributeUITests: XCTestCase { 5 | 6 | override func setUpWithError() throws { 7 | // Put setup code here. This method is called before the invocation of each test method in the class. 8 | 9 | // In UI tests it is usually best to stop immediately when a failure occurs. 10 | continueAfterFailure = false 11 | 12 | // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this. 13 | } 14 | 15 | override func tearDownWithError() throws { 16 | // Put teardown code here. This method is called after the invocation of each test method in the class. 17 | } 18 | 19 | func testExample() throws { 20 | // UI tests must launch the application that they test. 21 | let app = XCUIApplication() 22 | app.launch() 23 | 24 | // Use XCTAssert and related functions to verify your tests produce the correct results. 25 | } 26 | 27 | func testLaunchPerformance() throws { 28 | if #available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *) { 29 | // This measures how long it takes to launch your application. 30 | measure(metrics: [XCTApplicationLaunchMetric()]) { 31 | XCUIApplication().launch() 32 | } 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /FaceAttributeUITests/FaceAttributeUITestsLaunchTests.swift: -------------------------------------------------------------------------------- 1 | 2 | import XCTest 3 | 4 | final class FaceAttributeUITestsLaunchTests: XCTestCase { 5 | 6 | override class var runsForEachTargetApplicationUIConfiguration: Bool { 7 | true 8 | } 9 | 10 | override func setUpWithError() throws { 11 | continueAfterFailure = false 12 | } 13 | 14 | func testLaunch() throws { 15 | let app = XCUIApplication() 16 | app.launch() 17 | 18 | // Insert steps here to perform after app launch but before taking a screenshot, 19 | // such as logging into a test account or navigating somewhere in the app 20 | 21 | let attachment = XCTAttachment(screenshot: app.screenshot()) 22 | attachment.name = "Launch Screen" 23 | attachment.lifetime = .keepAlways 24 | add(attachment) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 | 4 | 5 |

6 | 7 | ### Our facial recognition algorithm is globally top-ranked by NIST in the FRVT 1:1 leaderboards. badge 8 | [Latest NIST FRVT evaluation report 2024-12-20](https://pages.nist.gov/frvt/html/frvt11.html) 9 | 10 | ![FRVT Sheet](https://github.com/user-attachments/assets/16b4cee2-3a91-453f-94e0-9e81262393d7) 11 | 12 | #### 🆔 ID Document Liveness Detection - Linux - [Here](https://web.kby-ai.com) badge 13 | #### 📚 Product & Resources - [Here](https://github.com/kby-ai/Product) 14 | #### 🛟 Help Center - [Here](https://docs.kby-ai.com) 15 | #### 💼 KYC Verification Demo - [Here](https://github.com/kby-ai/KYC-Verification-Demo-Android) 16 | #### 🙋‍♀️ Docker Hub - [Here](https://hub.docker.com/u/kbyai) 17 | 18 | # FaceAttribute-iOS 19 | 20 | ## Overview 21 | 22 | This repository integrates several `facial recognition` technologies, including `3D passive face liveness detection`, `face recognition`, `automatic face capture`, and analysis of various `face attribute`s such as `age`, `gender`, `face quality`, `facial occlusion`, `eye closure`, and `mouth opening`. 23 | 24 | The system utilizes `Face Liveness Detection` technology to generate a real-time liveness score based on a single image captured by the camera. 25 | 26 | Additionally, this demo offers `Face Recognition` capabilities, enabling enrollment from a gallery and real-time identification of faces captured by the camera. 27 | 28 | This repository features an `automatic Face Capture` function that verifies various `facial attributes`, such as `face quality`, `facial orientation` (yaw, roll, pitch), `facial occlusion` (e.g., mask, sunglass, hand over face), `eye closure`, `mouth opening`, and the position of the face within the `region of interest` (`ROI`). 29 | 30 | Moreover, the repository can compute scores for different face attributes from a gallery image, including `liveness`, `face orientation` (yaw, roll, pitch), `face quality`, `luminance of the face`, `facial occlusion`, `eye closure`, `mouth opening`, `age`, and `gender`. 31 | 32 | > In this repository, we integrated `KBY-AI`'s `Premium Face Mobile SDK` into `iOS` platform.
33 | ### ◾FaceSDK(Mobile) Details 34 | 35 | | Basic | Standard | 🔽 Premium | 36 | |------------------|------------------|------------------| 37 | | Face Detection | Face Detection | Face Detection | 38 | | Face Liveness Detection | Face Liveness Detection | Face Liveness Detection | 39 | | Pose Estimation | Pose Estimation | Pose Estimation | 40 | | | Face Recognition | Face Recognition | 41 | | | | 68 points Face Landmark Detection | 42 | | | | Face Quality Calculation | 43 | | | | Face Occlusion Detection | 44 | | | | Eye Closure Detection | 45 | | | | Age, Gender Estimation | 46 | 47 | ### ◾FaceSDK(Mobile) Product List 48 | | No. | Repository | SDK Details | 49 | |------------------|------------------|------------------| 50 | | 1 | [Face Liveness Detection - Android](https://github.com/kby-ai/FaceLivenessDetection-Android) | Basic SDK | 51 | | 2 | [Face Liveness Detection - iOS](https://github.com/kby-ai/FaceLivenessDetection-iOS) | Basic SDK | 52 | | 3 | [Face Recognition + Face Liveness Detection - Android](https://github.com/kby-ai/FaceRecognition-Android) | Standard SDK | 53 | | 4 | [Face Recognition + Face Liveness Detection - iOS](https://github.com/kby-ai/FaceRecognition-iOS) | Standard SDK | 54 | | 5 | [Face Recognition + Face Liveness Detection - Flutter](https://github.com/kby-ai/FaceRecognition-Flutter) | Standard SDK | 55 | | 6 | [Face Recognition + Face Liveness Detection - Ionic-Cordova](https://github.com/kby-ai/FaceRececogniion-Ionic-Cordova) | Standard SDK | 56 | | 7 | [Face Recognition + Face Liveness Detection - React-Native](https://github.com/kby-ai/FaceRecognition-React-Native) | Standard SDK | 57 | | 8 | [Face Attribute - Android](https://github.com/kby-ai/FaceAttribute-Android) | Premium SDK | 58 | | ➡️ | [Face Attribute - iOS](https://github.com/kby-ai/FaceAttribute-iOS) | Premium SDK | 59 | | 10 | [Face Attribute - Flutter](https://github.com/kby-ai/FaceAttribute-Flutter) | Premium SDK | 60 | 61 | > To get `Face SDK(server)`, please visit products [here](https://github.com/kby-ai/Product).
62 | 63 | ## Download on the App Store 64 | 65 | 66 | 67 | 68 | 69 | ## Performance Video 70 | 71 | You can visit our YouTube video [here](https://www.youtube.com/watch?v=-WiAethTacc) to see how well our demo app works.

72 | [![Face Recognition Android](https://img.youtube.com/vi/-WiAethTacc/0.jpg)](https://www.youtube.com/watch?v=-WiAethTacc) 73 | 74 | ## Screenshots 75 |

76 | 77 | 78 | 79 |

80 | 81 |

82 | 83 | 84 | 85 |

86 | 87 |

88 | 89 | 90 | 91 |

92 | 93 | ## SDK License 94 | 95 | The `face attribute` project relies on `KBY-AI`'s SDK, which requires a license for each `bundle ID` from `iOS` project. 96 | 97 | - The code below shows how to use the license: https://github.com/kby-ai/FaceAttribute-iOS/blob/3e377692dcd101067ba57033db8a43a84ceced28/FaceAttribute/ViewController.swift#L42-L51 98 | 99 | - To request a license, please contact us:
100 | 🧙`Email:` contact@kby-ai.com
101 | 🧙`Telegram:` [@kbyai](https://t.me/kbyai)
102 | 🧙`WhatsApp:` [+19092802609](https://wa.me/+19092802609)
103 | 🧙`Discord:` [KBY-AI](https://discord.gg/CgHtWQ3k9T)
104 | 🧙`Teams:` [KBY-AI](https://teams.live.com/l/invite/FBAYGB1-IlXkuQM3AY)
105 | 106 | ## About SDK 107 | 108 | ### 1. Set up 109 | - Copy the `SDK` (`facesdk.framework` folder) to the `root` folder in your project. 110 | 111 | - Add `SDK framework` to the project in `Xcode`. 112 | 113 | > Project Navigator -> General -> Frameworks, Libraries, and Embedded Content 114 | 115 | ![image](https://user-images.githubusercontent.com/125717930/231925359-ef30b3c0-d2d9-4b32-ae57-80b42b021b91.png) 116 | 117 | - Add the bridging header to your project settings 118 | 119 | > Project Navigator -> Build Settings -> Swift Compiler - General 120 | 121 | ![image](https://github.com/kby-ai/FaceAttribute-iOS/assets/125717930/5104749e-807b-47ce-b885-88fce65bfb77) 122 | 123 | ### 2. Initializing an SDK 124 | 125 | - Step One 126 | 127 | To begin, you need to activate the `SDK` using the license that you have received. 128 | ```swift 129 | FaceSDK.setActivation("...") 130 | ``` 131 | If activation is successful, the return value will be `SDK_SUCCESS`. Otherwise, an error value will be returned. 132 | 133 | - Step Two 134 | 135 | After activation, call the `SDK`'s initialization function. 136 | ```swift 137 | FaceSDK.initSDK() 138 | ``` 139 | If initialization is successful, the return value will be `SDK_SUCCESS`. Otherwise, an error value will be returned. 140 | 141 | ### 3. SDK Classes 142 | 143 | - FaceBox 144 | 145 | This class represents the output of the face detection function and can be utilized in `template` creation functions. 146 | 147 | | Feature| Type | Name | 148 | |------------------|------------------|------------------| 149 | | Face rectangle | int | x1, y1, x2, y2 | 150 | | Face angles (-45 ~ 45) | float | yaw, roll, pitch | 151 | | Liveness score (0 ~ 1) | float | liveness | 152 | | Face quality (0 ~ 1) | float | face_quality | 153 | | Face luminance (0 ~ 255) | float | face_luminance | 154 | | Face occlusion (0 ~ 1) | float | face_occlusion | 155 | | Eye closure (0 ~ 1) | float | left_eye, right_eye | 156 | | Mouth opening (0 ~ 1) | float | face_mouth_opened | 157 | | Age, gender | int | age, gender | 158 | | 68 points facial landmark | Data | landmark | 159 | 160 | > 68 points facial landmark 161 | 162 | 163 | 164 | - FaceDetectionParam 165 | 166 | This class serves as the input parameter for `face detection`, enabling various processing functionalities such as `face liveness detection`, `eye closure checking`, `facial occlusion checking`, `mouth opening checking`, and `age and gender estimation`. 167 | 168 | | Feature| Type | Name | 169 | |------------------|------------------|------------------| 170 | | Check liveness | bool | check_liveness | 171 | | Check eye closure | bool | check_eye_closeness | 172 | | Check face occlusion | bool | check_face_occlusion | 173 | | Check mouth opening | bool | check_mouth_opened | 174 | | Estimate age, gender | bool | estimate_age_gender | 175 | 176 | ### 4. SDK APIs 177 | #### - Face Detection 178 | 179 | The `Face SDK` provides a unified function for detecting faces, enabling multiple functionalities such as `liveness detection`, `face orientation` (yaw, roll, pitch), `face quality`, `facial occlusion`, `eye closure`, `mouth opening`, `age`, `gender`, and `facial landmarks`. 180 | 181 | The function can be used as follows: 182 | 183 | ```swift 184 | let faceBoxes = FaceSDK.faceDetection(image, param: param) 185 | ``` 186 | 187 | This function requires two parameters: a `UIImage` object and a `FaceDetectionParam` object that enables various processing functionalities. 188 | 189 | The function returns a list of `FaceBox` objects. 190 | 191 | #### - Create Templates 192 | 193 | The `FaceSDK` provides a function that can generate a `template` from a `UIImage` image. This template can then be used to verify the identity of the individual image captured. 194 | 195 | ```swift 196 | let templates = FaceSDK.templateExtraction(image, faceBox: faceBox) 197 | ``` 198 | 199 | The `SDK`'s `template` extraction function takes two parameters: a `UIImage` object and an object of `FaceBox`. 200 | 201 | The function returns a `Data`, which contains the `template` that can be used for person verification. 202 | 203 | #### - Calculation similiarity 204 | 205 | The `similarityCalculation` function takes a byte array of two `template`s as a parameter. 206 | 207 | ```swift 208 | let similarity = FaceSDK.similarityCalculation(templates, templates2: personTemplates) 209 | ``` 210 | 211 | It returns the similarity value between the two templates, which can be used to determine the level of likeness between the two individuals. 212 | 213 | -------------------------------------------------------------------------------- /__MACOSX/._FaceAttribute-Bridging-Header.h: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/._FaceAttribute-Bridging-Header.h -------------------------------------------------------------------------------- /__MACOSX/._FaceAttribute.xcodeproj: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/._FaceAttribute.xcodeproj -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute.xcodeproj/._project.xcworkspace: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute.xcodeproj/._project.xcworkspace -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._.DS_Store: -------------------------------------------------------------------------------- 1 | Mac OS X  2Fx @ATTRxx -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._AboutViewController.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._AboutViewController.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._AppDelegate.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._AppDelegate.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._AttributeViewController.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._AttributeViewController.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._CameraViewController.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._CameraViewController.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._CaptureViewController.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._CaptureViewController.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._FaceView.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._FaceView.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._Info.plist: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._Info.plist -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._PersonViewCell.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._PersonViewCell.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._SceneDelegate.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._SceneDelegate.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._SettingsViewController.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._SettingsViewController.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._ToastView.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._ToastView.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._UIImageExtension.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._UIImageExtension.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/._ViewController.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/._ViewController.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/Assets.xcassets/AccentColor.colorset/._Contents.json: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/Assets.xcassets/AccentColor.colorset/._Contents.json -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/Base.lproj/._LaunchScreen.storyboard: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/Base.lproj/._LaunchScreen.storyboard -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/Base.lproj/._Main.storyboard: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/Base.lproj/._Main.storyboard -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/FaceAttribute.xcdatamodeld/._FaceRecognition.xcdatamodel: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/FaceAttribute.xcdatamodeld/._FaceRecognition.xcdatamodel -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/FaceAttribute.xcdatamodeld/FaceRecognition.xcdatamodel/._contents: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/FaceAttribute.xcdatamodeld/FaceRecognition.xcdatamodel/._contents -------------------------------------------------------------------------------- /__MACOSX/FaceAttribute/Model.xcdatamodeld/._Model.xcdatamodel: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttribute/Model.xcdatamodeld/._Model.xcdatamodel -------------------------------------------------------------------------------- /__MACOSX/FaceAttributeTests/._FaceAttributeTests.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttributeTests/._FaceAttributeTests.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttributeUITests/._FaceAttributeUITests.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttributeUITests/._FaceAttributeUITests.swift -------------------------------------------------------------------------------- /__MACOSX/FaceAttributeUITests/._FaceAttributeUITestsLaunchTests.swift: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/__MACOSX/FaceAttributeUITests/._FaceAttributeUITestsLaunchTests.swift -------------------------------------------------------------------------------- /facesdk.framework/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/facesdk.framework/.DS_Store -------------------------------------------------------------------------------- /facesdk.framework/Headers/facesdk.h: -------------------------------------------------------------------------------- 1 | // 2 | // facesdk.h 3 | // facesdk 4 | // 5 | // Created by user on 4/12/23. 6 | // 7 | 8 | #import 9 | 10 | //! Project version number for facesdk. 11 | FOUNDATION_EXPORT double facesdkVersionNumber; 12 | 13 | //! Project version string for facesdk. 14 | FOUNDATION_EXPORT const unsigned char facesdkVersionString[]; 15 | 16 | // In this header, you should import all the public headers of your framework using statements like #import 17 | 18 | 19 | #include "facesdk_api.h" 20 | -------------------------------------------------------------------------------- /facesdk.framework/Headers/facesdk_api.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | 4 | NS_ASSUME_NONNULL_BEGIN 5 | 6 | enum SDK_ERROR 7 | { 8 | SDK_SUCCESS = 0, 9 | SDK_LICENSE_KEY_ERROR = -1, 10 | SDK_LICENSE_APPID_ERROR = -2, 11 | SDK_LICENSE_EXPIRED = -3, 12 | SDK_NO_ACTIVATED = -4, 13 | SDK_INIT_ERROR = -5, 14 | }; 15 | 16 | @interface FaceBox : NSObject 17 | 18 | @property (nonatomic) int x1; 19 | @property (nonatomic) int y1; 20 | @property (nonatomic) int x2; 21 | @property (nonatomic) int y2; 22 | @property (nonatomic) float liveness; 23 | @property (nonatomic) float yaw; 24 | @property (nonatomic) float roll; 25 | @property (nonatomic) float pitch; 26 | @property (nonatomic) int age; 27 | @property (nonatomic) int gender; 28 | @property (nonatomic) float left_eye; 29 | @property (nonatomic) float right_eye; 30 | @property (nonatomic) float face_occlusion; 31 | @property (nonatomic) float face_quality; 32 | @property (nonatomic) float face_luminance; 33 | @property (nonatomic) float face_mouth_opened; 34 | @property (atomic) NSData* landmark; 35 | @end 36 | 37 | @interface FaceDetectionParam: NSObject 38 | @property (nonatomic) bool check_liveness; 39 | @property (nonatomic) bool check_eye_closeness; 40 | @property (nonatomic) bool check_face_occlusion; 41 | @property (nonatomic) bool check_mouth_opened; 42 | @property (nonatomic) bool estimate_age_gender; 43 | @end 44 | 45 | 46 | @interface FaceSDK : NSObject 47 | 48 | +(int) setActivation: (NSString*) license; 49 | +(int) initSDK; 50 | +(NSMutableArray*) faceDetection: (UIImage*) image param: (FaceDetectionParam*) param; 51 | +(NSData*) templateExtraction: (UIImage*) image faceBox: (FaceBox*) faceBox; 52 | +(float) similarityCalculation: (NSData*) templates1 templates2: (NSData*) templates2; 53 | 54 | @end 55 | 56 | NS_ASSUME_NONNULL_END 57 | -------------------------------------------------------------------------------- /facesdk.framework/Info.plist: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/facesdk.framework/Info.plist -------------------------------------------------------------------------------- /facesdk.framework/Modules/module.modulemap: -------------------------------------------------------------------------------- 1 | framework module facesdk { 2 | umbrella header "facesdk.h" 3 | export * 4 | 5 | module * { export * } 6 | } 7 | -------------------------------------------------------------------------------- /facesdk.framework/_CodeSignature/CodeResources: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | files 6 | 7 | Headers/facesdk.h 8 | 9 | iXedJdnTNjJdkav0lex0htXPBa8= 10 | 11 | Headers/facesdk_api.h 12 | 13 | Ts2qiHASCpGTl7eniw4GomKTJGE= 14 | 15 | Info.plist 16 | 17 | ff+CRLmRQe8F7JbWP/pMFR9/0/E= 18 | 19 | Modules/module.modulemap 20 | 21 | WrHmhQafWafNCSGU1Xr0vQaW6WY= 22 | 23 | ag.bin 24 | 25 | vgCA5hPDLtSxGWCo886AQF/abXo= 26 | 27 | detection.bin 28 | 29 | G52JYi47ACwak8HkEcpkm9zo1as= 30 | 31 | detection.param 32 | 33 | tHFm7ohczZG7KEtACVgRSjX3XIA= 34 | 35 | ec.bin 36 | 37 | BQzAFyQidHaCWg0Pr7UK3QmaVVI= 38 | 39 | ec.param.bin 40 | 41 | A3Fh49MUaCPrsIDYLKP9epwcFx4= 42 | 43 | landmark.bin 44 | 45 | YwuxUSePlqnFEDLD95H35rgexRI= 46 | 47 | landmark.param 48 | 49 | HHZGt8b9GPLjgGplhK/7hcOW0Mo= 50 | 51 | liveness.bin 52 | 53 | qh1obYoGNe0LoyD5JBmUkdeYrEY= 54 | 55 | occ.bin 56 | 57 | 0A+2lZ1ELppBcl6rq/KQdCS8iyU= 58 | 59 | recognize.bin 60 | 61 | SViF2bgK4XlpoZCVL4WMJbPq6q8= 62 | 63 | recognize.param 64 | 65 | G2oZ25SbKg4KWk0aPL047vM/Rv0= 66 | 67 | 68 | files2 69 | 70 | Headers/facesdk.h 71 | 72 | hash2 73 | 74 | CEHWZwXGt6HTp0cWeGAWrhBufzotOw42QLbWl2FjbpE= 75 | 76 | 77 | Headers/facesdk_api.h 78 | 79 | hash2 80 | 81 | wK2xu6IJ+KI8evbJ/6eqigcchjKuEhfvmHL2c+PjnJA= 82 | 83 | 84 | Modules/module.modulemap 85 | 86 | hash2 87 | 88 | e+AaXT/TjwyPpTaKiiPdA/TglVoEmdjtOjlJBMOX3fs= 89 | 90 | 91 | ag.bin 92 | 93 | hash2 94 | 95 | cVeq8UFUy/qXj1Sqiv745ZM+52Gg0WWrz3nNds8+35A= 96 | 97 | 98 | detection.bin 99 | 100 | hash2 101 | 102 | aSBPVzW2w5vueXMHRoelxdPFfzE45CIHzJJfvcmNVVc= 103 | 104 | 105 | detection.param 106 | 107 | hash2 108 | 109 | yb/QW3dSgGun1LdCqss+oipjB11F0sE0NtewKSJibFo= 110 | 111 | 112 | ec.bin 113 | 114 | hash2 115 | 116 | q65m/Fn/r61xbEyRmMchVjG8BBMPJU95ZBA0YwbJvbU= 117 | 118 | 119 | ec.param.bin 120 | 121 | hash2 122 | 123 | VGYoeS+Nt1c15g6vqFWdslY6TJ/ovNY0Q3elP255Ztg= 124 | 125 | 126 | landmark.bin 127 | 128 | hash2 129 | 130 | q59zkpnecoyeYz1houDmoVK0+ROe2x8l/0dtpqDC9Ic= 131 | 132 | 133 | landmark.param 134 | 135 | hash2 136 | 137 | nkGOHyOPviHWfG8E67n+geJfRcw+yc7qGU4famEpZNc= 138 | 139 | 140 | liveness.bin 141 | 142 | hash2 143 | 144 | 3XjsZ9HffijptH6kD8YcgXR9DwsM2tDsYTNDvdErRkE= 145 | 146 | 147 | occ.bin 148 | 149 | hash2 150 | 151 | uEETiCJKIQY5PymWTWt5fUq4+UuQZdAYYIMX7XzwfMg= 152 | 153 | 154 | recognize.bin 155 | 156 | hash2 157 | 158 | ljjc/uY3hbh6WrCw7Z+d/+k+SzcY5eobU33VJDpmG4c= 159 | 160 | 161 | recognize.param 162 | 163 | hash2 164 | 165 | kDIa3Db/MNtnTOFGFGQ1ZvrQfA8iAnM1hx/ZDYyci4A= 166 | 167 | 168 | 169 | rules 170 | 171 | ^.* 172 | 173 | ^.*\.lproj/ 174 | 175 | optional 176 | 177 | weight 178 | 1000 179 | 180 | ^.*\.lproj/locversion.plist$ 181 | 182 | omit 183 | 184 | weight 185 | 1100 186 | 187 | ^Base\.lproj/ 188 | 189 | weight 190 | 1010 191 | 192 | ^version.plist$ 193 | 194 | 195 | rules2 196 | 197 | .*\.dSYM($|/) 198 | 199 | weight 200 | 11 201 | 202 | ^(.*/)?\.DS_Store$ 203 | 204 | omit 205 | 206 | weight 207 | 2000 208 | 209 | ^.* 210 | 211 | ^.*\.lproj/ 212 | 213 | optional 214 | 215 | weight 216 | 1000 217 | 218 | ^.*\.lproj/locversion.plist$ 219 | 220 | omit 221 | 222 | weight 223 | 1100 224 | 225 | ^Base\.lproj/ 226 | 227 | weight 228 | 1010 229 | 230 | ^Info\.plist$ 231 | 232 | omit 233 | 234 | weight 235 | 20 236 | 237 | ^PkgInfo$ 238 | 239 | omit 240 | 241 | weight 242 | 20 243 | 244 | ^embedded\.provisionprofile$ 245 | 246 | weight 247 | 20 248 | 249 | ^version\.plist$ 250 | 251 | weight 252 | 20 253 | 254 | 255 | 256 | 257 | -------------------------------------------------------------------------------- /facesdk.framework/ag.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/facesdk.framework/ag.bin -------------------------------------------------------------------------------- /facesdk.framework/detection.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/facesdk.framework/detection.bin -------------------------------------------------------------------------------- /facesdk.framework/detection.param: -------------------------------------------------------------------------------- 1 | 7767517 2 | 234 255 3 | Input data 0 1 data 0=60 1=60 2=3 4 | Split splitncnn_0 1 4 data data_splitncnn_0 data_splitncnn_1 data_splitncnn_2 data_splitncnn_3 5 | Convolution conv1 1 1 data_splitncnn_3 conv_blob1 0=8 1=3 2=1 3=2 4=1 5=0 6=216 6 | BatchNorm batch_norm1 1 1 conv_blob1 batch_norm_blob1 0=8 7 | Scale bn_scale1 1 1 batch_norm_blob1 batch_norm_blob1_bn_scale1 0=8 1=1 8 | ReLU relu1 1 1 batch_norm_blob1_bn_scale1 relu_blob1 9 | ConvolutionDepthWise conv2 1 1 relu_blob1 conv_blob2 0=8 1=3 2=1 3=1 4=1 5=0 6=72 7=8 10 | BatchNorm batch_norm2 1 1 conv_blob2 batch_norm_blob2 0=8 11 | Scale bn_scale2 1 1 batch_norm_blob2 batch_norm_blob2_bn_scale2 0=8 1=1 12 | ReLU relu2 1 1 batch_norm_blob2_bn_scale2 relu_blob2 13 | Convolution conv3 1 1 relu_blob2 conv_blob3 0=16 1=1 2=1 3=1 4=0 5=0 6=128 14 | BatchNorm batch_norm3 1 1 conv_blob3 batch_norm_blob3 0=16 15 | Scale bn_scale3 1 1 batch_norm_blob3 batch_norm_blob3_bn_scale3 0=16 1=1 16 | ReLU relu3 1 1 batch_norm_blob3_bn_scale3 relu_blob3 17 | ConvolutionDepthWise conv4 1 1 relu_blob3 conv_blob4 0=16 1=3 2=1 3=2 4=1 5=0 6=144 7=16 18 | BatchNorm batch_norm4 1 1 conv_blob4 batch_norm_blob4 0=16 19 | Scale bn_scale4 1 1 batch_norm_blob4 batch_norm_blob4_bn_scale4 0=16 1=1 20 | ReLU relu4 1 1 batch_norm_blob4_bn_scale4 relu_blob4 21 | Convolution conv5 1 1 relu_blob4 conv_blob5 0=32 1=1 2=1 3=1 4=0 5=0 6=512 22 | BatchNorm batch_norm5 1 1 conv_blob5 batch_norm_blob5 0=32 23 | Scale bn_scale5 1 1 batch_norm_blob5 batch_norm_blob5_bn_scale5 0=32 1=1 24 | ReLU relu5 1 1 batch_norm_blob5_bn_scale5 relu_blob5 25 | ConvolutionDepthWise conv6 1 1 relu_blob5 conv_blob6 0=32 1=3 2=1 3=1 4=1 5=0 6=288 7=32 26 | BatchNorm batch_norm6 1 1 conv_blob6 batch_norm_blob6 0=32 27 | Scale bn_scale6 1 1 batch_norm_blob6 batch_norm_blob6_bn_scale6 0=32 1=1 28 | ReLU relu6 1 1 batch_norm_blob6_bn_scale6 relu_blob6 29 | Convolution conv7 1 1 relu_blob6 conv_blob7 0=32 1=1 2=1 3=1 4=0 5=0 6=1024 30 | BatchNorm batch_norm7 1 1 conv_blob7 batch_norm_blob7 0=32 31 | Scale bn_scale7 1 1 batch_norm_blob7 batch_norm_blob7_bn_scale7 0=32 1=1 32 | ReLU relu7 1 1 batch_norm_blob7_bn_scale7 relu_blob7 33 | ConvolutionDepthWise conv8 1 1 relu_blob7 conv_blob8 0=32 1=3 2=1 3=2 4=1 5=0 6=288 7=32 34 | BatchNorm batch_norm8 1 1 conv_blob8 batch_norm_blob8 0=32 35 | Scale bn_scale8 1 1 batch_norm_blob8 batch_norm_blob8_bn_scale8 0=32 1=1 36 | ReLU relu8 1 1 batch_norm_blob8_bn_scale8 relu_blob8 37 | Convolution conv9 1 1 relu_blob8 conv_blob9 0=64 1=1 2=1 3=1 4=0 5=0 6=2048 38 | BatchNorm batch_norm9 1 1 conv_blob9 batch_norm_blob9 0=64 39 | Scale bn_scale9 1 1 batch_norm_blob9 batch_norm_blob9_bn_scale9 0=64 1=1 40 | ReLU relu9 1 1 batch_norm_blob9_bn_scale9 relu_blob9 41 | ConvolutionDepthWise conv10 1 1 relu_blob9 conv_blob10 0=64 1=3 2=1 3=1 4=1 5=0 6=576 7=64 42 | BatchNorm batch_norm10 1 1 conv_blob10 batch_norm_blob10 0=64 43 | Scale bn_scale10 1 1 batch_norm_blob10 batch_norm_blob10_bn_scale10 0=64 1=1 44 | ReLU relu10 1 1 batch_norm_blob10_bn_scale10 relu_blob10 45 | Convolution conv11 1 1 relu_blob10 conv_blob11 0=64 1=1 2=1 3=1 4=0 5=0 6=4096 46 | BatchNorm batch_norm11 1 1 conv_blob11 batch_norm_blob11 0=64 47 | Scale bn_scale11 1 1 batch_norm_blob11 batch_norm_blob11_bn_scale11 0=64 1=1 48 | ReLU relu11 1 1 batch_norm_blob11_bn_scale11 relu_blob11 49 | Split splitncnn_1 1 2 relu_blob11 relu_blob11_splitncnn_0 relu_blob11_splitncnn_1 50 | ConvolutionDepthWise conv12 1 1 relu_blob11_splitncnn_1 conv_blob12 0=64 1=3 2=1 3=2 4=1 5=0 6=576 7=64 51 | BatchNorm batch_norm12 1 1 conv_blob12 batch_norm_blob12 0=64 52 | Scale bn_scale12 1 1 batch_norm_blob12 batch_norm_blob12_bn_scale12 0=64 1=1 53 | ReLU relu12 1 1 batch_norm_blob12_bn_scale12 relu_blob12 54 | Convolution conv13 1 1 relu_blob12 conv_blob13 0=128 1=1 2=1 3=1 4=0 5=0 6=8192 55 | BatchNorm batch_norm13 1 1 conv_blob13 batch_norm_blob13 0=128 56 | Scale bn_scale13 1 1 batch_norm_blob13 batch_norm_blob13_bn_scale13 0=128 1=1 57 | ReLU relu13 1 1 batch_norm_blob13_bn_scale13 relu_blob13 58 | ConvolutionDepthWise conv14 1 1 relu_blob13 conv_blob14 0=128 1=3 2=1 3=1 4=1 5=0 6=1152 7=128 59 | BatchNorm batch_norm14 1 1 conv_blob14 batch_norm_blob14 0=128 60 | Scale bn_scale14 1 1 batch_norm_blob14 batch_norm_blob14_bn_scale14 0=128 1=1 61 | ReLU relu14 1 1 batch_norm_blob14_bn_scale14 relu_blob14 62 | Convolution conv15 1 1 relu_blob14 conv_blob15 0=128 1=1 2=1 3=1 4=0 5=0 6=16384 63 | BatchNorm batch_norm15 1 1 conv_blob15 batch_norm_blob15 0=128 64 | Scale bn_scale15 1 1 batch_norm_blob15 batch_norm_blob15_bn_scale15 0=128 1=1 65 | ReLU relu15 1 1 batch_norm_blob15_bn_scale15 relu_blob15 66 | ConvolutionDepthWise conv16 1 1 relu_blob15 conv_blob16 0=128 1=3 2=1 3=1 4=1 5=0 6=1152 7=128 67 | BatchNorm batch_norm16 1 1 conv_blob16 batch_norm_blob16 0=128 68 | Scale bn_scale16 1 1 batch_norm_blob16 batch_norm_blob16_bn_scale16 0=128 1=1 69 | ReLU relu16 1 1 batch_norm_blob16_bn_scale16 relu_blob16 70 | Convolution conv17 1 1 relu_blob16 conv_blob17 0=128 1=1 2=1 3=1 4=0 5=0 6=16384 71 | BatchNorm batch_norm17 1 1 conv_blob17 batch_norm_blob17 0=128 72 | Scale bn_scale17 1 1 batch_norm_blob17 batch_norm_blob17_bn_scale17 0=128 1=1 73 | ReLU relu17 1 1 batch_norm_blob17_bn_scale17 relu_blob17 74 | ConvolutionDepthWise conv18 1 1 relu_blob17 conv_blob18 0=128 1=3 2=1 3=1 4=1 5=0 6=1152 7=128 75 | BatchNorm batch_norm18 1 1 conv_blob18 batch_norm_blob18 0=128 76 | Scale bn_scale18 1 1 batch_norm_blob18 batch_norm_blob18_bn_scale18 0=128 1=1 77 | ReLU relu18 1 1 batch_norm_blob18_bn_scale18 relu_blob18 78 | Convolution conv19 1 1 relu_blob18 conv_blob19 0=128 1=1 2=1 3=1 4=0 5=0 6=16384 79 | BatchNorm batch_norm19 1 1 conv_blob19 batch_norm_blob19 0=128 80 | Scale bn_scale19 1 1 batch_norm_blob19 batch_norm_blob19_bn_scale19 0=128 1=1 81 | ReLU relu19 1 1 batch_norm_blob19_bn_scale19 relu_blob19 82 | ConvolutionDepthWise conv20 1 1 relu_blob19 conv_blob20 0=128 1=3 2=1 3=1 4=1 5=0 6=1152 7=128 83 | BatchNorm batch_norm20 1 1 conv_blob20 batch_norm_blob20 0=128 84 | Scale bn_scale20 1 1 batch_norm_blob20 batch_norm_blob20_bn_scale20 0=128 1=1 85 | ReLU relu20 1 1 batch_norm_blob20_bn_scale20 relu_blob20 86 | Convolution conv21 1 1 relu_blob20 conv_blob21 0=128 1=1 2=1 3=1 4=0 5=0 6=16384 87 | BatchNorm batch_norm21 1 1 conv_blob21 batch_norm_blob21 0=128 88 | Scale bn_scale21 1 1 batch_norm_blob21 batch_norm_blob21_bn_scale21 0=128 1=1 89 | ReLU relu21 1 1 batch_norm_blob21_bn_scale21 relu_blob21 90 | ConvolutionDepthWise conv22 1 1 relu_blob21 conv_blob22 0=128 1=3 2=1 3=1 4=1 5=0 6=1152 7=128 91 | BatchNorm batch_norm22 1 1 conv_blob22 batch_norm_blob22 0=128 92 | Scale bn_scale22 1 1 batch_norm_blob22 batch_norm_blob22_bn_scale22 0=128 1=1 93 | ReLU relu22 1 1 batch_norm_blob22_bn_scale22 relu_blob22 94 | Convolution conv23 1 1 relu_blob22 conv_blob23 0=128 1=1 2=1 3=1 4=0 5=0 6=16384 95 | BatchNorm batch_norm23 1 1 conv_blob23 batch_norm_blob23 0=128 96 | Scale bn_scale23 1 1 batch_norm_blob23 batch_norm_blob23_bn_scale23 0=128 1=1 97 | ReLU relu23 1 1 batch_norm_blob23_bn_scale23 relu_blob23 98 | Split splitncnn_2 1 2 relu_blob23 relu_blob23_splitncnn_0 relu_blob23_splitncnn_1 99 | ConvolutionDepthWise conv24 1 1 relu_blob23_splitncnn_1 conv_blob24 0=128 1=3 2=1 3=2 4=1 5=0 6=1152 7=128 100 | BatchNorm batch_norm24 1 1 conv_blob24 batch_norm_blob24 0=128 101 | Scale bn_scale24 1 1 batch_norm_blob24 batch_norm_blob24_bn_scale24 0=128 1=1 102 | ReLU relu24 1 1 batch_norm_blob24_bn_scale24 relu_blob24 103 | Convolution conv25 1 1 relu_blob24 conv_blob25 0=256 1=1 2=1 3=1 4=0 5=0 6=32768 104 | BatchNorm batch_norm25 1 1 conv_blob25 batch_norm_blob25 0=256 105 | Scale bn_scale25 1 1 batch_norm_blob25 batch_norm_blob25_bn_scale25 0=256 1=1 106 | ReLU relu25 1 1 batch_norm_blob25_bn_scale25 relu_blob25 107 | ConvolutionDepthWise conv26 1 1 relu_blob25 conv_blob26 0=256 1=3 2=1 3=1 4=1 5=0 6=2304 7=256 108 | BatchNorm batch_norm26 1 1 conv_blob26 batch_norm_blob26 0=256 109 | Scale bn_scale26 1 1 batch_norm_blob26 batch_norm_blob26_bn_scale26 0=256 1=1 110 | ReLU relu26 1 1 batch_norm_blob26_bn_scale26 relu_blob26 111 | Convolution conv27 1 1 relu_blob26 conv_blob27 0=256 1=1 2=1 3=1 4=0 5=0 6=65536 112 | BatchNorm batch_norm27 1 1 conv_blob27 batch_norm_blob27 0=256 113 | Scale bn_scale27 1 1 batch_norm_blob27 batch_norm_blob27_bn_scale27 0=256 1=1 114 | ReLU relu27 1 1 batch_norm_blob27_bn_scale27 relu_blob27 115 | Convolution conv28 1 1 relu_blob11_splitncnn_0 conv_blob28 0=64 1=1 2=1 3=1 4=0 5=0 6=4096 116 | BatchNorm batch_norm28 1 1 conv_blob28 batch_norm_blob28 0=64 117 | Scale bn_scale28 1 1 batch_norm_blob28 batch_norm_blob28_bn_scale28 0=64 1=1 118 | ReLU relu28 1 1 batch_norm_blob28_bn_scale28 relu_blob28 119 | Split splitncnn_3 1 2 relu_blob28 relu_blob28_splitncnn_0 relu_blob28_splitncnn_1 120 | Convolution conv29 1 1 relu_blob23_splitncnn_0 conv_blob29 0=64 1=1 2=1 3=1 4=0 5=0 6=8192 121 | BatchNorm batch_norm29 1 1 conv_blob29 batch_norm_blob29 0=64 122 | Scale bn_scale29 1 1 batch_norm_blob29 batch_norm_blob29_bn_scale29 0=64 1=1 123 | ReLU relu29 1 1 batch_norm_blob29_bn_scale29 relu_blob29 124 | Split splitncnn_4 1 2 relu_blob29 relu_blob29_splitncnn_0 relu_blob29_splitncnn_1 125 | Convolution conv30 1 1 relu_blob27 conv_blob30 0=64 1=1 2=1 3=1 4=0 5=0 6=16384 126 | BatchNorm batch_norm30 1 1 conv_blob30 batch_norm_blob30 0=64 127 | Scale bn_scale30 1 1 batch_norm_blob30 batch_norm_blob30_bn_scale30 0=64 1=1 128 | ReLU relu30 1 1 batch_norm_blob30_bn_scale30 relu_blob30 129 | Split splitncnn_5 1 3 relu_blob30 relu_blob30_splitncnn_0 relu_blob30_splitncnn_1 relu_blob30_splitncnn_2 130 | Deconvolution conv_transpose1 1 1 relu_blob30_splitncnn_2 conv_transpose_blob1 0=64 1=2 2=1 3=2 4=0 5=1 6=16384 131 | Crop crop1 2 1 conv_transpose_blob1 relu_blob29_splitncnn_1 crop1 132 | Eltwise add1 2 1 relu_blob29_splitncnn_0 crop1 add_blob1 0=1 -23301=0 133 | Convolution conv31 1 1 add_blob1 conv_blob31 0=64 1=3 2=1 3=1 4=1 5=0 6=36864 134 | BatchNorm batch_norm31 1 1 conv_blob31 batch_norm_blob31 0=64 135 | Scale bn_scale31 1 1 batch_norm_blob31 batch_norm_blob31_bn_scale31 0=64 1=1 136 | ReLU relu31 1 1 batch_norm_blob31_bn_scale31 relu_blob31 137 | Split splitncnn_6 1 3 relu_blob31 relu_blob31_splitncnn_0 relu_blob31_splitncnn_1 relu_blob31_splitncnn_2 138 | Deconvolution conv_transpose2 1 1 relu_blob31_splitncnn_2 conv_transpose_blob2 0=64 1=2 2=1 3=2 4=0 5=1 6=16384 139 | Crop crop2 2 1 conv_transpose_blob2 relu_blob28_splitncnn_1 crop2 140 | Eltwise add2 2 1 relu_blob28_splitncnn_0 crop2 add_blob2 0=1 -23301=0 141 | Convolution conv32 1 1 add_blob2 conv_blob32 0=64 1=3 2=1 3=1 4=1 5=0 6=36864 142 | BatchNorm batch_norm32 1 1 conv_blob32 batch_norm_blob32 0=64 143 | Scale bn_scale32 1 1 batch_norm_blob32 batch_norm_blob32_bn_scale32 0=64 1=1 144 | ReLU relu32 1 1 batch_norm_blob32_bn_scale32 relu_blob32 145 | Split splitncnn_7 1 2 relu_blob32 relu_blob32_splitncnn_0 relu_blob32_splitncnn_1 146 | Convolution conv33 1 1 relu_blob32_splitncnn_1 conv_blob33 0=32 1=3 2=1 3=1 4=1 5=0 6=18432 147 | BatchNorm batch_norm33 1 1 conv_blob33 batch_norm_blob33 0=32 148 | Scale bn_scale33 1 1 batch_norm_blob33 batch_norm_blob33_bn_scale33 0=32 1=1 149 | Convolution conv34 1 1 relu_blob32_splitncnn_0 conv_blob34 0=16 1=3 2=1 3=1 4=1 5=0 6=9216 150 | BatchNorm batch_norm34 1 1 conv_blob34 batch_norm_blob34 0=16 151 | Scale bn_scale34 1 1 batch_norm_blob34 batch_norm_blob34_bn_scale34 0=16 1=1 152 | ReLU relu33 1 1 batch_norm_blob34_bn_scale34 relu_blob33 153 | Split splitncnn_8 1 2 relu_blob33 relu_blob33_splitncnn_0 relu_blob33_splitncnn_1 154 | Convolution conv35 1 1 relu_blob33_splitncnn_1 conv_blob35 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 155 | BatchNorm batch_norm35 1 1 conv_blob35 batch_norm_blob35 0=16 156 | Scale bn_scale35 1 1 batch_norm_blob35 batch_norm_blob35_bn_scale35 0=16 1=1 157 | Convolution conv36 1 1 relu_blob33_splitncnn_0 conv_blob36 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 158 | BatchNorm batch_norm36 1 1 conv_blob36 batch_norm_blob36 0=16 159 | Scale bn_scale36 1 1 batch_norm_blob36 batch_norm_blob36_bn_scale36 0=16 1=1 160 | ReLU relu34 1 1 batch_norm_blob36_bn_scale36 relu_blob34 161 | Convolution conv37 1 1 relu_blob34 conv_blob37 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 162 | BatchNorm batch_norm37 1 1 conv_blob37 batch_norm_blob37 0=16 163 | Scale bn_scale37 1 1 batch_norm_blob37 batch_norm_blob37_bn_scale37 0=16 1=1 164 | Concat cat1 3 1 batch_norm_blob33_bn_scale33 batch_norm_blob35_bn_scale35 batch_norm_blob37_bn_scale37 cat_blob1 0=0 165 | ReLU relu35 1 1 cat_blob1 relu_blob35 166 | Split splitncnn_9 1 3 relu_blob35 relu_blob35_splitncnn_0 relu_blob35_splitncnn_1 relu_blob35_splitncnn_2 167 | Convolution conv38 1 1 relu_blob31_splitncnn_1 conv_blob38 0=32 1=3 2=1 3=1 4=1 5=0 6=18432 168 | BatchNorm batch_norm38 1 1 conv_blob38 batch_norm_blob38 0=32 169 | Scale bn_scale38 1 1 batch_norm_blob38 batch_norm_blob38_bn_scale38 0=32 1=1 170 | Convolution conv39 1 1 relu_blob31_splitncnn_0 conv_blob39 0=16 1=3 2=1 3=1 4=1 5=0 6=9216 171 | BatchNorm batch_norm39 1 1 conv_blob39 batch_norm_blob39 0=16 172 | Scale bn_scale39 1 1 batch_norm_blob39 batch_norm_blob39_bn_scale39 0=16 1=1 173 | ReLU relu36 1 1 batch_norm_blob39_bn_scale39 relu_blob36 174 | Split splitncnn_10 1 2 relu_blob36 relu_blob36_splitncnn_0 relu_blob36_splitncnn_1 175 | Convolution conv40 1 1 relu_blob36_splitncnn_1 conv_blob40 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 176 | BatchNorm batch_norm40 1 1 conv_blob40 batch_norm_blob40 0=16 177 | Scale bn_scale40 1 1 batch_norm_blob40 batch_norm_blob40_bn_scale40 0=16 1=1 178 | Convolution conv41 1 1 relu_blob36_splitncnn_0 conv_blob41 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 179 | BatchNorm batch_norm41 1 1 conv_blob41 batch_norm_blob41 0=16 180 | Scale bn_scale41 1 1 batch_norm_blob41 batch_norm_blob41_bn_scale41 0=16 1=1 181 | ReLU relu37 1 1 batch_norm_blob41_bn_scale41 relu_blob37 182 | Convolution conv42 1 1 relu_blob37 conv_blob42 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 183 | BatchNorm batch_norm42 1 1 conv_blob42 batch_norm_blob42 0=16 184 | Scale bn_scale42 1 1 batch_norm_blob42 batch_norm_blob42_bn_scale42 0=16 1=1 185 | Concat cat2 3 1 batch_norm_blob38_bn_scale38 batch_norm_blob40_bn_scale40 batch_norm_blob42_bn_scale42 cat_blob2 0=0 186 | ReLU relu38 1 1 cat_blob2 relu_blob38 187 | Split splitncnn_11 1 3 relu_blob38 relu_blob38_splitncnn_0 relu_blob38_splitncnn_1 relu_blob38_splitncnn_2 188 | Convolution conv43 1 1 relu_blob30_splitncnn_1 conv_blob43 0=32 1=3 2=1 3=1 4=1 5=0 6=18432 189 | BatchNorm batch_norm43 1 1 conv_blob43 batch_norm_blob43 0=32 190 | Scale bn_scale43 1 1 batch_norm_blob43 batch_norm_blob43_bn_scale43 0=32 1=1 191 | Convolution conv44 1 1 relu_blob30_splitncnn_0 conv_blob44 0=16 1=3 2=1 3=1 4=1 5=0 6=9216 192 | BatchNorm batch_norm44 1 1 conv_blob44 batch_norm_blob44 0=16 193 | Scale bn_scale44 1 1 batch_norm_blob44 batch_norm_blob44_bn_scale44 0=16 1=1 194 | ReLU relu39 1 1 batch_norm_blob44_bn_scale44 relu_blob39 195 | Split splitncnn_12 1 2 relu_blob39 relu_blob39_splitncnn_0 relu_blob39_splitncnn_1 196 | Convolution conv45 1 1 relu_blob39_splitncnn_1 conv_blob45 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 197 | BatchNorm batch_norm45 1 1 conv_blob45 batch_norm_blob45 0=16 198 | Scale bn_scale45 1 1 batch_norm_blob45 batch_norm_blob45_bn_scale45 0=16 1=1 199 | Convolution conv46 1 1 relu_blob39_splitncnn_0 conv_blob46 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 200 | BatchNorm batch_norm46 1 1 conv_blob46 batch_norm_blob46 0=16 201 | Scale bn_scale46 1 1 batch_norm_blob46 batch_norm_blob46_bn_scale46 0=16 1=1 202 | ReLU relu40 1 1 batch_norm_blob46_bn_scale46 relu_blob40 203 | Convolution conv47 1 1 relu_blob40 conv_blob47 0=16 1=3 2=1 3=1 4=1 5=0 6=2304 204 | BatchNorm batch_norm47 1 1 conv_blob47 batch_norm_blob47 0=16 205 | Scale bn_scale47 1 1 batch_norm_blob47 batch_norm_blob47_bn_scale47 0=16 1=1 206 | Concat cat3 3 1 batch_norm_blob43_bn_scale43 batch_norm_blob45_bn_scale45 batch_norm_blob47_bn_scale47 cat_blob3 0=0 207 | ReLU relu41 1 1 cat_blob3 relu_blob41 208 | Split splitncnn_13 1 3 relu_blob41 relu_blob41_splitncnn_0 relu_blob41_splitncnn_1 relu_blob41_splitncnn_2 209 | Convolution conv48 1 1 relu_blob35_splitncnn_2 conv_blob48 0=8 1=1 2=1 3=1 4=0 5=1 6=512 210 | Convolution conv49 1 1 relu_blob35_splitncnn_1 conv_blob49 0=4 1=1 2=1 3=1 4=0 5=1 6=256 211 | Convolution conv50 1 1 relu_blob38_splitncnn_2 conv_blob50 0=8 1=1 2=1 3=1 4=0 5=1 6=512 212 | Convolution conv51 1 1 relu_blob38_splitncnn_1 conv_blob51 0=4 1=1 2=1 3=1 4=0 5=1 6=256 213 | Convolution conv52 1 1 relu_blob41_splitncnn_2 conv_blob52 0=8 1=1 2=1 3=1 4=0 5=1 6=512 214 | Convolution conv53 1 1 relu_blob41_splitncnn_1 conv_blob53 0=4 1=1 2=1 3=1 4=0 5=1 6=256 215 | Permute conv4_3_norm_mbox_loc_perm 1 1 conv_blob48 conv4_3_norm_mbox_loc_perm 0=3 216 | Flatten conv4_3_norm_mbox_loc_flat 1 1 conv4_3_norm_mbox_loc_perm conv4_3_norm_mbox_loc_flat 217 | Permute conv4_3_norm_mbox_conf_perm 1 1 conv_blob49 conv4_3_norm_mbox_conf_perm 0=3 218 | Flatten conv4_3_norm_mbox_conf_flat 1 1 conv4_3_norm_mbox_conf_perm conv4_3_norm_mbox_conf_flat 219 | PriorBox conv4_3_norm_mbox_priorbox 2 1 relu_blob35_splitncnn_0 data_splitncnn_2 conv4_3_norm_mbox_priorbox -23300=2,1.600000e+01,3.200000e+01 -23301=0 -23302=0 3=1.000000e-01 4=1.000000e-01 5=2.000000e-01 6=2.000000e-01 7=1 8=0 9=-233 10=-233 11=8.000000e+00 12=8.000000e+00 13=5.000000e-01 220 | Permute conv5_3_norm_mbox_loc_perm 1 1 conv_blob50 conv5_3_norm_mbox_loc_perm 0=3 221 | Flatten conv5_3_norm_mbox_loc_flat 1 1 conv5_3_norm_mbox_loc_perm conv5_3_norm_mbox_loc_flat 222 | Permute conv5_3_norm_mbox_conf_perm 1 1 conv_blob51 conv5_3_norm_mbox_conf_perm 0=3 223 | Flatten conv5_3_norm_mbox_conf_flat 1 1 conv5_3_norm_mbox_conf_perm conv5_3_norm_mbox_conf_flat 224 | PriorBox conv5_3_norm_mbox_priorbox 2 1 relu_blob38_splitncnn_0 data_splitncnn_1 conv5_3_norm_mbox_priorbox -23300=2,6.400000e+01,1.280000e+02 -23301=0 -23302=0 3=1.000000e-01 4=1.000000e-01 5=2.000000e-01 6=2.000000e-01 7=1 8=0 9=-233 10=-233 11=1.600000e+01 12=1.600000e+01 13=5.000000e-01 225 | Permute conv6_3_norm_mbox_loc_perm 1 1 conv_blob52 conv6_3_norm_mbox_loc_perm 0=3 226 | Flatten conv6_3_norm_mbox_loc_flat 1 1 conv6_3_norm_mbox_loc_perm conv6_3_norm_mbox_loc_flat 227 | Permute conv6_3_norm_mbox_conf_perm 1 1 conv_blob53 conv6_3_norm_mbox_conf_perm 0=3 228 | Flatten conv6_3_norm_mbox_conf_flat 1 1 conv6_3_norm_mbox_conf_perm conv6_3_norm_mbox_conf_flat 229 | PriorBox conv6_3_norm_mbox_priorbox 2 1 relu_blob41_splitncnn_0 data_splitncnn_0 conv6_3_norm_mbox_priorbox -23300=2,2.560000e+02,5.120000e+02 -23301=0 -23302=0 3=1.000000e-01 4=1.000000e-01 5=2.000000e-01 6=2.000000e-01 7=1 8=0 9=-233 10=-233 11=3.200000e+01 12=3.200000e+01 13=5.000000e-01 230 | Concat mbox_loc 3 1 conv4_3_norm_mbox_loc_flat conv5_3_norm_mbox_loc_flat conv6_3_norm_mbox_loc_flat mbox_loc 0=0 231 | Concat mbox_conf 3 1 conv4_3_norm_mbox_conf_flat conv5_3_norm_mbox_conf_flat conv6_3_norm_mbox_conf_flat mbox_conf 0=0 232 | Concat mbox_priorbox 3 1 conv4_3_norm_mbox_priorbox conv5_3_norm_mbox_priorbox conv6_3_norm_mbox_priorbox mbox_priorbox 0=1 233 | Reshape mbox_conf_reshape 1 1 mbox_conf mbox_conf_reshape 0=2 1=-1 2=-233 3=0 234 | Softmax mbox_conf_softmax 1 1 mbox_conf_reshape mbox_conf_softmax 0=1 1=1 235 | Flatten mbox_conf_flatten 1 1 mbox_conf_softmax mbox_conf_flatten 236 | DetectionOutput detection_out 3 1 mbox_loc mbox_conf_flatten mbox_priorbox detection_out 0=2 1=3.000000e-01 2=400 3=200 4=1.000000e-01 237 | -------------------------------------------------------------------------------- /facesdk.framework/ec.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/facesdk.framework/ec.bin -------------------------------------------------------------------------------- /facesdk.framework/ec.param.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/facesdk.framework/ec.param.bin -------------------------------------------------------------------------------- /facesdk.framework/facesdk: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/facesdk.framework/facesdk -------------------------------------------------------------------------------- /facesdk.framework/landmark.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/facesdk.framework/landmark.bin -------------------------------------------------------------------------------- /facesdk.framework/landmark.param: -------------------------------------------------------------------------------- 1 | 7767517 2 | 15 15 3 | Input input 0 1 data 0=60 1=60 2=1 4 | Convolution Conv1 1 1 data Conv1 0=20 1=5 2=1 3=1 4=2 5=1 6=500 5 | ReLU ActivationReLU1 1 1 Conv1 ActivationReLU1 6 | Pooling Pool1 1 1 ActivationReLU1 Pool1 0=0 1=2 2=2 3=0 4=0 7 | Convolution Conv2 1 1 Pool1 Conv2 0=48 1=5 2=1 3=1 4=2 5=1 6=24000 8 | ReLU ActivationReLU2 1 1 Conv2 ActivationReLU2 9 | Pooling Pool2 1 1 ActivationReLU2 Pool2 0=0 1=2 2=2 3=0 4=0 10 | Convolution Conv3 1 1 Pool2 Conv3 0=64 1=3 2=1 3=1 4=0 5=1 6=27648 11 | ReLU ActivationReLU3 1 1 Conv3 ActivationReLU3 12 | Pooling Pool3 1 1 ActivationReLU3 Pool3 0=0 1=3 2=2 3=0 4=0 13 | Convolution Conv4 1 1 Pool3 Conv4 0=80 1=3 2=1 3=1 4=0 5=1 6=46080 14 | ReLU ActivationReLU4 1 1 Conv4 ActivationReLU4 15 | InnerProduct Dense1 1 1 ActivationReLU4 Dense1 0=512 1=1 2=655360 16 | ReLU ActivationReLU5 1 1 Dense1 ActivationReLU5 17 | InnerProduct Dense3 1 1 ActivationReLU5 Dense3 0=136 1=1 2=69632 18 | -------------------------------------------------------------------------------- /facesdk.framework/liveness.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/facesdk.framework/liveness.bin -------------------------------------------------------------------------------- /facesdk.framework/occ.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/facesdk.framework/occ.bin -------------------------------------------------------------------------------- /facesdk.framework/recognize.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/kby-ai/FaceAttribute-iOS/d5ebad6a0659fecd8c99c59e3179fabc092836a1/facesdk.framework/recognize.bin -------------------------------------------------------------------------------- /facesdk.framework/recognize.param: -------------------------------------------------------------------------------- 1 | 7767517 2 | 160 172 3 | Input data 0 1 data 4 | BinaryOp _minusscalar0 1 1 data _minusscalar0 0=1 1=1 2=127.500000 5 | BinaryOp _mulscalar0 1 1 _minusscalar0 _mulscalar0 0=2 1=1 2=0.007812 6 | Convolution conv_1_conv2d 1 1 _mulscalar0 conv_1_conv2d 0=64 1=3 11=3 3=2 13=2 4=1 14=1 5=0 6=1728 7 | BatchNorm conv_1_batchnorm 1 1 conv_1_conv2d conv_1_batchnorm 0=64 8 | PReLU conv_1_relu 1 1 conv_1_batchnorm conv_1_relu 0=64 9 | ConvolutionDepthWise conv_2_dw_conv2d 1 1 conv_1_relu conv_2_dw_conv2d 0=64 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=576 7=64 10 | BatchNorm conv_2_dw_batchnorm 1 1 conv_2_dw_conv2d conv_2_dw_batchnorm 0=64 11 | PReLU conv_2_dw_relu 1 1 conv_2_dw_batchnorm conv_2_dw_relu 0=64 12 | Convolution dconv_23_conv_sep_conv2d 1 1 conv_2_dw_relu dconv_23_conv_sep_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 13 | BatchNorm dconv_23_conv_sep_batchnorm 1 1 dconv_23_conv_sep_conv2d dconv_23_conv_sep_batchnorm 0=128 14 | PReLU dconv_23_conv_sep_relu 1 1 dconv_23_conv_sep_batchnorm dconv_23_conv_sep_relu 0=128 15 | ConvolutionDepthWise dconv_23_conv_dw_conv2d 1 1 dconv_23_conv_sep_relu dconv_23_conv_dw_conv2d 0=128 1=3 11=3 3=2 13=2 4=1 14=1 5=0 6=1152 7=128 16 | BatchNorm dconv_23_conv_dw_batchnorm 1 1 dconv_23_conv_dw_conv2d dconv_23_conv_dw_batchnorm 0=128 17 | PReLU dconv_23_conv_dw_relu 1 1 dconv_23_conv_dw_batchnorm dconv_23_conv_dw_relu 0=128 18 | Convolution dconv_23_conv_proj_conv2d 1 1 dconv_23_conv_dw_relu dconv_23_conv_proj_conv2d 0=64 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 19 | BatchNorm dconv_23_conv_proj_batchnorm 1 1 dconv_23_conv_proj_conv2d dconv_23_conv_proj_batchnorm 0=64 20 | Split splitncnn_0 1 2 dconv_23_conv_proj_batchnorm dconv_23_conv_proj_batchnorm_splitncnn_0 dconv_23_conv_proj_batchnorm_splitncnn_1 21 | Convolution res_3_block0_conv_sep_conv2d 1 1 dconv_23_conv_proj_batchnorm_splitncnn_1 res_3_block0_conv_sep_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 22 | BatchNorm res_3_block0_conv_sep_batchnorm 1 1 res_3_block0_conv_sep_conv2d res_3_block0_conv_sep_batchnorm 0=128 23 | PReLU res_3_block0_conv_sep_relu 1 1 res_3_block0_conv_sep_batchnorm res_3_block0_conv_sep_relu 0=128 24 | ConvolutionDepthWise res_3_block0_conv_dw_conv2d 1 1 res_3_block0_conv_sep_relu res_3_block0_conv_dw_conv2d 0=128 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=1152 7=128 25 | BatchNorm res_3_block0_conv_dw_batchnorm 1 1 res_3_block0_conv_dw_conv2d res_3_block0_conv_dw_batchnorm 0=128 26 | PReLU res_3_block0_conv_dw_relu 1 1 res_3_block0_conv_dw_batchnorm res_3_block0_conv_dw_relu 0=128 27 | Convolution res_3_block0_conv_proj_conv2d 1 1 res_3_block0_conv_dw_relu res_3_block0_conv_proj_conv2d 0=64 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 28 | BatchNorm res_3_block0_conv_proj_batchnorm 1 1 res_3_block0_conv_proj_conv2d res_3_block0_conv_proj_batchnorm 0=64 29 | BinaryOp _plus0 2 1 res_3_block0_conv_proj_batchnorm dconv_23_conv_proj_batchnorm_splitncnn_0 _plus0 0=0 30 | Split splitncnn_1 1 2 _plus0 _plus0_splitncnn_0 _plus0_splitncnn_1 31 | Convolution res_3_block1_conv_sep_conv2d 1 1 _plus0_splitncnn_1 res_3_block1_conv_sep_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 32 | BatchNorm res_3_block1_conv_sep_batchnorm 1 1 res_3_block1_conv_sep_conv2d res_3_block1_conv_sep_batchnorm 0=128 33 | PReLU res_3_block1_conv_sep_relu 1 1 res_3_block1_conv_sep_batchnorm res_3_block1_conv_sep_relu 0=128 34 | ConvolutionDepthWise res_3_block1_conv_dw_conv2d 1 1 res_3_block1_conv_sep_relu res_3_block1_conv_dw_conv2d 0=128 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=1152 7=128 35 | BatchNorm res_3_block1_conv_dw_batchnorm 1 1 res_3_block1_conv_dw_conv2d res_3_block1_conv_dw_batchnorm 0=128 36 | PReLU res_3_block1_conv_dw_relu 1 1 res_3_block1_conv_dw_batchnorm res_3_block1_conv_dw_relu 0=128 37 | Convolution res_3_block1_conv_proj_conv2d 1 1 res_3_block1_conv_dw_relu res_3_block1_conv_proj_conv2d 0=64 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 38 | BatchNorm res_3_block1_conv_proj_batchnorm 1 1 res_3_block1_conv_proj_conv2d res_3_block1_conv_proj_batchnorm 0=64 39 | BinaryOp _plus1 2 1 res_3_block1_conv_proj_batchnorm _plus0_splitncnn_0 _plus1 0=0 40 | Split splitncnn_2 1 2 _plus1 _plus1_splitncnn_0 _plus1_splitncnn_1 41 | Convolution res_3_block2_conv_sep_conv2d 1 1 _plus1_splitncnn_1 res_3_block2_conv_sep_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 42 | BatchNorm res_3_block2_conv_sep_batchnorm 1 1 res_3_block2_conv_sep_conv2d res_3_block2_conv_sep_batchnorm 0=128 43 | PReLU res_3_block2_conv_sep_relu 1 1 res_3_block2_conv_sep_batchnorm res_3_block2_conv_sep_relu 0=128 44 | ConvolutionDepthWise res_3_block2_conv_dw_conv2d 1 1 res_3_block2_conv_sep_relu res_3_block2_conv_dw_conv2d 0=128 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=1152 7=128 45 | BatchNorm res_3_block2_conv_dw_batchnorm 1 1 res_3_block2_conv_dw_conv2d res_3_block2_conv_dw_batchnorm 0=128 46 | PReLU res_3_block2_conv_dw_relu 1 1 res_3_block2_conv_dw_batchnorm res_3_block2_conv_dw_relu 0=128 47 | Convolution res_3_block2_conv_proj_conv2d 1 1 res_3_block2_conv_dw_relu res_3_block2_conv_proj_conv2d 0=64 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 48 | BatchNorm res_3_block2_conv_proj_batchnorm 1 1 res_3_block2_conv_proj_conv2d res_3_block2_conv_proj_batchnorm 0=64 49 | BinaryOp _plus2 2 1 res_3_block2_conv_proj_batchnorm _plus1_splitncnn_0 _plus2 0=0 50 | Split splitncnn_3 1 2 _plus2 _plus2_splitncnn_0 _plus2_splitncnn_1 51 | Convolution res_3_block3_conv_sep_conv2d 1 1 _plus2_splitncnn_1 res_3_block3_conv_sep_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 52 | BatchNorm res_3_block3_conv_sep_batchnorm 1 1 res_3_block3_conv_sep_conv2d res_3_block3_conv_sep_batchnorm 0=128 53 | PReLU res_3_block3_conv_sep_relu 1 1 res_3_block3_conv_sep_batchnorm res_3_block3_conv_sep_relu 0=128 54 | ConvolutionDepthWise res_3_block3_conv_dw_conv2d 1 1 res_3_block3_conv_sep_relu res_3_block3_conv_dw_conv2d 0=128 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=1152 7=128 55 | BatchNorm res_3_block3_conv_dw_batchnorm 1 1 res_3_block3_conv_dw_conv2d res_3_block3_conv_dw_batchnorm 0=128 56 | PReLU res_3_block3_conv_dw_relu 1 1 res_3_block3_conv_dw_batchnorm res_3_block3_conv_dw_relu 0=128 57 | Convolution res_3_block3_conv_proj_conv2d 1 1 res_3_block3_conv_dw_relu res_3_block3_conv_proj_conv2d 0=64 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=8192 58 | BatchNorm res_3_block3_conv_proj_batchnorm 1 1 res_3_block3_conv_proj_conv2d res_3_block3_conv_proj_batchnorm 0=64 59 | BinaryOp _plus3 2 1 res_3_block3_conv_proj_batchnorm _plus2_splitncnn_0 _plus3 0=0 60 | Convolution dconv_34_conv_sep_conv2d 1 1 _plus3 dconv_34_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=16384 61 | BatchNorm dconv_34_conv_sep_batchnorm 1 1 dconv_34_conv_sep_conv2d dconv_34_conv_sep_batchnorm 0=256 62 | PReLU dconv_34_conv_sep_relu 1 1 dconv_34_conv_sep_batchnorm dconv_34_conv_sep_relu 0=256 63 | ConvolutionDepthWise dconv_34_conv_dw_conv2d 1 1 dconv_34_conv_sep_relu dconv_34_conv_dw_conv2d 0=256 1=3 11=3 3=2 13=2 4=1 14=1 5=0 6=2304 7=256 64 | BatchNorm dconv_34_conv_dw_batchnorm 1 1 dconv_34_conv_dw_conv2d dconv_34_conv_dw_batchnorm 0=256 65 | PReLU dconv_34_conv_dw_relu 1 1 dconv_34_conv_dw_batchnorm dconv_34_conv_dw_relu 0=256 66 | Convolution dconv_34_conv_proj_conv2d 1 1 dconv_34_conv_dw_relu dconv_34_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 67 | BatchNorm dconv_34_conv_proj_batchnorm 1 1 dconv_34_conv_proj_conv2d dconv_34_conv_proj_batchnorm 0=128 68 | Split splitncnn_4 1 2 dconv_34_conv_proj_batchnorm dconv_34_conv_proj_batchnorm_splitncnn_0 dconv_34_conv_proj_batchnorm_splitncnn_1 69 | Convolution res_4_block0_conv_sep_conv2d 1 1 dconv_34_conv_proj_batchnorm_splitncnn_1 res_4_block0_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 70 | BatchNorm res_4_block0_conv_sep_batchnorm 1 1 res_4_block0_conv_sep_conv2d res_4_block0_conv_sep_batchnorm 0=256 71 | PReLU res_4_block0_conv_sep_relu 1 1 res_4_block0_conv_sep_batchnorm res_4_block0_conv_sep_relu 0=256 72 | ConvolutionDepthWise res_4_block0_conv_dw_conv2d 1 1 res_4_block0_conv_sep_relu res_4_block0_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 73 | BatchNorm res_4_block0_conv_dw_batchnorm 1 1 res_4_block0_conv_dw_conv2d res_4_block0_conv_dw_batchnorm 0=256 74 | PReLU res_4_block0_conv_dw_relu 1 1 res_4_block0_conv_dw_batchnorm res_4_block0_conv_dw_relu 0=256 75 | Convolution res_4_block0_conv_proj_conv2d 1 1 res_4_block0_conv_dw_relu res_4_block0_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 76 | BatchNorm res_4_block0_conv_proj_batchnorm 1 1 res_4_block0_conv_proj_conv2d res_4_block0_conv_proj_batchnorm 0=128 77 | BinaryOp _plus4 2 1 res_4_block0_conv_proj_batchnorm dconv_34_conv_proj_batchnorm_splitncnn_0 _plus4 0=0 78 | Split splitncnn_5 1 2 _plus4 _plus4_splitncnn_0 _plus4_splitncnn_1 79 | Convolution res_4_block1_conv_sep_conv2d 1 1 _plus4_splitncnn_1 res_4_block1_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 80 | BatchNorm res_4_block1_conv_sep_batchnorm 1 1 res_4_block1_conv_sep_conv2d res_4_block1_conv_sep_batchnorm 0=256 81 | PReLU res_4_block1_conv_sep_relu 1 1 res_4_block1_conv_sep_batchnorm res_4_block1_conv_sep_relu 0=256 82 | ConvolutionDepthWise res_4_block1_conv_dw_conv2d 1 1 res_4_block1_conv_sep_relu res_4_block1_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 83 | BatchNorm res_4_block1_conv_dw_batchnorm 1 1 res_4_block1_conv_dw_conv2d res_4_block1_conv_dw_batchnorm 0=256 84 | PReLU res_4_block1_conv_dw_relu 1 1 res_4_block1_conv_dw_batchnorm res_4_block1_conv_dw_relu 0=256 85 | Convolution res_4_block1_conv_proj_conv2d 1 1 res_4_block1_conv_dw_relu res_4_block1_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 86 | BatchNorm res_4_block1_conv_proj_batchnorm 1 1 res_4_block1_conv_proj_conv2d res_4_block1_conv_proj_batchnorm 0=128 87 | BinaryOp _plus5 2 1 res_4_block1_conv_proj_batchnorm _plus4_splitncnn_0 _plus5 0=0 88 | Split splitncnn_6 1 2 _plus5 _plus5_splitncnn_0 _plus5_splitncnn_1 89 | Convolution res_4_block2_conv_sep_conv2d 1 1 _plus5_splitncnn_1 res_4_block2_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 90 | BatchNorm res_4_block2_conv_sep_batchnorm 1 1 res_4_block2_conv_sep_conv2d res_4_block2_conv_sep_batchnorm 0=256 91 | PReLU res_4_block2_conv_sep_relu 1 1 res_4_block2_conv_sep_batchnorm res_4_block2_conv_sep_relu 0=256 92 | ConvolutionDepthWise res_4_block2_conv_dw_conv2d 1 1 res_4_block2_conv_sep_relu res_4_block2_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 93 | BatchNorm res_4_block2_conv_dw_batchnorm 1 1 res_4_block2_conv_dw_conv2d res_4_block2_conv_dw_batchnorm 0=256 94 | PReLU res_4_block2_conv_dw_relu 1 1 res_4_block2_conv_dw_batchnorm res_4_block2_conv_dw_relu 0=256 95 | Convolution res_4_block2_conv_proj_conv2d 1 1 res_4_block2_conv_dw_relu res_4_block2_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 96 | BatchNorm res_4_block2_conv_proj_batchnorm 1 1 res_4_block2_conv_proj_conv2d res_4_block2_conv_proj_batchnorm 0=128 97 | BinaryOp _plus6 2 1 res_4_block2_conv_proj_batchnorm _plus5_splitncnn_0 _plus6 0=0 98 | Split splitncnn_7 1 2 _plus6 _plus6_splitncnn_0 _plus6_splitncnn_1 99 | Convolution res_4_block3_conv_sep_conv2d 1 1 _plus6_splitncnn_1 res_4_block3_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 100 | BatchNorm res_4_block3_conv_sep_batchnorm 1 1 res_4_block3_conv_sep_conv2d res_4_block3_conv_sep_batchnorm 0=256 101 | PReLU res_4_block3_conv_sep_relu 1 1 res_4_block3_conv_sep_batchnorm res_4_block3_conv_sep_relu 0=256 102 | ConvolutionDepthWise res_4_block3_conv_dw_conv2d 1 1 res_4_block3_conv_sep_relu res_4_block3_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 103 | BatchNorm res_4_block3_conv_dw_batchnorm 1 1 res_4_block3_conv_dw_conv2d res_4_block3_conv_dw_batchnorm 0=256 104 | PReLU res_4_block3_conv_dw_relu 1 1 res_4_block3_conv_dw_batchnorm res_4_block3_conv_dw_relu 0=256 105 | Convolution res_4_block3_conv_proj_conv2d 1 1 res_4_block3_conv_dw_relu res_4_block3_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 106 | BatchNorm res_4_block3_conv_proj_batchnorm 1 1 res_4_block3_conv_proj_conv2d res_4_block3_conv_proj_batchnorm 0=128 107 | BinaryOp _plus7 2 1 res_4_block3_conv_proj_batchnorm _plus6_splitncnn_0 _plus7 0=0 108 | Split splitncnn_8 1 2 _plus7 _plus7_splitncnn_0 _plus7_splitncnn_1 109 | Convolution res_4_block4_conv_sep_conv2d 1 1 _plus7_splitncnn_1 res_4_block4_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 110 | BatchNorm res_4_block4_conv_sep_batchnorm 1 1 res_4_block4_conv_sep_conv2d res_4_block4_conv_sep_batchnorm 0=256 111 | PReLU res_4_block4_conv_sep_relu 1 1 res_4_block4_conv_sep_batchnorm res_4_block4_conv_sep_relu 0=256 112 | ConvolutionDepthWise res_4_block4_conv_dw_conv2d 1 1 res_4_block4_conv_sep_relu res_4_block4_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 113 | BatchNorm res_4_block4_conv_dw_batchnorm 1 1 res_4_block4_conv_dw_conv2d res_4_block4_conv_dw_batchnorm 0=256 114 | PReLU res_4_block4_conv_dw_relu 1 1 res_4_block4_conv_dw_batchnorm res_4_block4_conv_dw_relu 0=256 115 | Convolution res_4_block4_conv_proj_conv2d 1 1 res_4_block4_conv_dw_relu res_4_block4_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 116 | BatchNorm res_4_block4_conv_proj_batchnorm 1 1 res_4_block4_conv_proj_conv2d res_4_block4_conv_proj_batchnorm 0=128 117 | BinaryOp _plus8 2 1 res_4_block4_conv_proj_batchnorm _plus7_splitncnn_0 _plus8 0=0 118 | Split splitncnn_9 1 2 _plus8 _plus8_splitncnn_0 _plus8_splitncnn_1 119 | Convolution res_4_block5_conv_sep_conv2d 1 1 _plus8_splitncnn_1 res_4_block5_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 120 | BatchNorm res_4_block5_conv_sep_batchnorm 1 1 res_4_block5_conv_sep_conv2d res_4_block5_conv_sep_batchnorm 0=256 121 | PReLU res_4_block5_conv_sep_relu 1 1 res_4_block5_conv_sep_batchnorm res_4_block5_conv_sep_relu 0=256 122 | ConvolutionDepthWise res_4_block5_conv_dw_conv2d 1 1 res_4_block5_conv_sep_relu res_4_block5_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 123 | BatchNorm res_4_block5_conv_dw_batchnorm 1 1 res_4_block5_conv_dw_conv2d res_4_block5_conv_dw_batchnorm 0=256 124 | PReLU res_4_block5_conv_dw_relu 1 1 res_4_block5_conv_dw_batchnorm res_4_block5_conv_dw_relu 0=256 125 | Convolution res_4_block5_conv_proj_conv2d 1 1 res_4_block5_conv_dw_relu res_4_block5_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 126 | BatchNorm res_4_block5_conv_proj_batchnorm 1 1 res_4_block5_conv_proj_conv2d res_4_block5_conv_proj_batchnorm 0=128 127 | BinaryOp _plus9 2 1 res_4_block5_conv_proj_batchnorm _plus8_splitncnn_0 _plus9 0=0 128 | Convolution dconv_45_conv_sep_conv2d 1 1 _plus9 dconv_45_conv_sep_conv2d 0=512 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=65536 129 | BatchNorm dconv_45_conv_sep_batchnorm 1 1 dconv_45_conv_sep_conv2d dconv_45_conv_sep_batchnorm 0=512 130 | PReLU dconv_45_conv_sep_relu 1 1 dconv_45_conv_sep_batchnorm dconv_45_conv_sep_relu 0=512 131 | ConvolutionDepthWise dconv_45_conv_dw_conv2d 1 1 dconv_45_conv_sep_relu dconv_45_conv_dw_conv2d 0=512 1=3 11=3 3=2 13=2 4=1 14=1 5=0 6=4608 7=512 132 | BatchNorm dconv_45_conv_dw_batchnorm 1 1 dconv_45_conv_dw_conv2d dconv_45_conv_dw_batchnorm 0=512 133 | PReLU dconv_45_conv_dw_relu 1 1 dconv_45_conv_dw_batchnorm dconv_45_conv_dw_relu 0=512 134 | Convolution dconv_45_conv_proj_conv2d 1 1 dconv_45_conv_dw_relu dconv_45_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=65536 135 | BatchNorm dconv_45_conv_proj_batchnorm 1 1 dconv_45_conv_proj_conv2d dconv_45_conv_proj_batchnorm 0=128 136 | Split splitncnn_10 1 2 dconv_45_conv_proj_batchnorm dconv_45_conv_proj_batchnorm_splitncnn_0 dconv_45_conv_proj_batchnorm_splitncnn_1 137 | Convolution res_5_block0_conv_sep_conv2d 1 1 dconv_45_conv_proj_batchnorm_splitncnn_1 res_5_block0_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 138 | BatchNorm res_5_block0_conv_sep_batchnorm 1 1 res_5_block0_conv_sep_conv2d res_5_block0_conv_sep_batchnorm 0=256 139 | PReLU res_5_block0_conv_sep_relu 1 1 res_5_block0_conv_sep_batchnorm res_5_block0_conv_sep_relu 0=256 140 | ConvolutionDepthWise res_5_block0_conv_dw_conv2d 1 1 res_5_block0_conv_sep_relu res_5_block0_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 141 | BatchNorm res_5_block0_conv_dw_batchnorm 1 1 res_5_block0_conv_dw_conv2d res_5_block0_conv_dw_batchnorm 0=256 142 | PReLU res_5_block0_conv_dw_relu 1 1 res_5_block0_conv_dw_batchnorm res_5_block0_conv_dw_relu 0=256 143 | Convolution res_5_block0_conv_proj_conv2d 1 1 res_5_block0_conv_dw_relu res_5_block0_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 144 | BatchNorm res_5_block0_conv_proj_batchnorm 1 1 res_5_block0_conv_proj_conv2d res_5_block0_conv_proj_batchnorm 0=128 145 | BinaryOp _plus10 2 1 res_5_block0_conv_proj_batchnorm dconv_45_conv_proj_batchnorm_splitncnn_0 _plus10 0=0 146 | Split splitncnn_11 1 2 _plus10 _plus10_splitncnn_0 _plus10_splitncnn_1 147 | Convolution res_5_block1_conv_sep_conv2d 1 1 _plus10_splitncnn_1 res_5_block1_conv_sep_conv2d 0=256 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 148 | BatchNorm res_5_block1_conv_sep_batchnorm 1 1 res_5_block1_conv_sep_conv2d res_5_block1_conv_sep_batchnorm 0=256 149 | PReLU res_5_block1_conv_sep_relu 1 1 res_5_block1_conv_sep_batchnorm res_5_block1_conv_sep_relu 0=256 150 | ConvolutionDepthWise res_5_block1_conv_dw_conv2d 1 1 res_5_block1_conv_sep_relu res_5_block1_conv_dw_conv2d 0=256 1=3 11=3 3=1 13=1 4=1 14=1 5=0 6=2304 7=256 151 | BatchNorm res_5_block1_conv_dw_batchnorm 1 1 res_5_block1_conv_dw_conv2d res_5_block1_conv_dw_batchnorm 0=256 152 | PReLU res_5_block1_conv_dw_relu 1 1 res_5_block1_conv_dw_batchnorm res_5_block1_conv_dw_relu 0=256 153 | Convolution res_5_block1_conv_proj_conv2d 1 1 res_5_block1_conv_dw_relu res_5_block1_conv_proj_conv2d 0=128 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=32768 154 | BatchNorm res_5_block1_conv_proj_batchnorm 1 1 res_5_block1_conv_proj_conv2d res_5_block1_conv_proj_batchnorm 0=128 155 | BinaryOp _plus11 2 1 res_5_block1_conv_proj_batchnorm _plus10_splitncnn_0 _plus11 0=0 156 | Convolution conv_6sep_conv2d 1 1 _plus11 conv_6sep_conv2d 0=512 1=1 11=1 3=1 13=1 4=0 14=0 5=0 6=65536 157 | BatchNorm conv_6sep_batchnorm 1 1 conv_6sep_conv2d conv_6sep_batchnorm 0=512 158 | PReLU conv_6sep_relu 1 1 conv_6sep_batchnorm conv_6sep_relu 0=512 159 | ConvolutionDepthWise conv_6dw7_7_conv2d 1 1 conv_6sep_relu conv_6dw7_7_conv2d 0=512 1=7 11=7 3=1 13=1 4=0 14=0 5=0 6=25088 7=512 160 | BatchNorm conv_6dw7_7_batchnorm 1 1 conv_6dw7_7_conv2d conv_6dw7_7_batchnorm 0=512 161 | InnerProduct pre_fc1 1 1 conv_6dw7_7_batchnorm pre_fc1 0=128 1=1 2=65536 162 | BatchNorm fc1 1 1 pre_fc1 fc1 0=128 163 | -------------------------------------------------------------------------------- /privacy: -------------------------------------------------------------------------------- 1 | Face Data Collection and Use 2 | 3 | Our app collects a liveness detection template of your face when you use certain features. This data is used to calculate the liveness score of your face, which determines whether it is a real or spoofed face. 4 | 5 | We use this data solely for the purpose of providing you with a secure and accurate authentication process. We do not use this data for any other purposes, and we do not share it with any third parties. 6 | 7 | The face data is not stored or retained by our app in any way. It is only stored in memory for the duration of the liveness detection process, and is immediately discarded once the process is complete. 8 | 9 | We take the security and privacy of your face data seriously and use industry-standard security measures to protect it from unauthorized access, disclosure, alteration, or destruction 10 | --------------------------------------------------------------------------------