├── .gitignore ├── Face Recognize.xcodeproj └── project.pbxproj ├── Face Recognize ├── Assets.xcassets │ ├── AccentColor.colorset │ │ └── Contents.json │ ├── AppIcon.appiconset │ │ └── Contents.json │ └── Contents.json ├── Controller │ ├── CameraController.swift │ └── UserAdminController.swift ├── Face_Recognize.xcdatamodeld │ ├── .xccurrentversion │ └── Face_Recognize.xcdatamodel │ │ └── contents ├── Face_RecognizeApp.swift ├── Info.plist ├── Persistence.swift ├── Preview Content │ └── Preview Assets.xcassets │ │ └── Contents.json ├── Util.swift ├── View │ ├── CameraPreviewView.swift │ ├── FaceRecognizeView.swift │ ├── ImagePickerView.swift │ └── UserAdminView.swift └── objc │ ├── Face Recognize-Bridging-Header.h │ ├── FaceRecognizer.h │ └── FaceRecognizer.mm ├── Face RecognizeTests ├── Face_RecognizeTests.swift └── Info.plist ├── Face RecognizeUITests ├── Face_RecognizeUITests.swift └── Info.plist ├── Podfile └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | Pods/ 2 | Podfile.lock 3 | *.xcworkspace 4 | xcuserdata/ 5 | -------------------------------------------------------------------------------- /Face Recognize.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 50; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 4B20911A26D4F1BF0030DCFF /* shape_predictor_68_face_landmarks.dat in Resources */ = {isa = PBXBuildFile; fileRef = 4BFE97D926D4DA7400375577 /* shape_predictor_68_face_landmarks.dat */; }; 11 | 4B3E9DAD26CF9F58007176E4 /* Face_RecognizeApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4B3E9DAC26CF9F58007176E4 /* Face_RecognizeApp.swift */; }; 12 | 4B3E9DB126CF9F59007176E4 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 4B3E9DB026CF9F59007176E4 /* Assets.xcassets */; }; 13 | 4B3E9DB426CF9F59007176E4 /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 4B3E9DB326CF9F59007176E4 /* Preview Assets.xcassets */; }; 14 | 4B3E9DB626CF9F59007176E4 /* Persistence.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4B3E9DB526CF9F59007176E4 /* Persistence.swift */; }; 15 | 4B3E9DB926CF9F59007176E4 /* Face_Recognize.xcdatamodeld in Sources */ = {isa = PBXBuildFile; fileRef = 4B3E9DB726CF9F59007176E4 /* Face_Recognize.xcdatamodeld */; }; 16 | 4B3E9DC426CF9F5A007176E4 /* Face_RecognizeTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4B3E9DC326CF9F5A007176E4 /* Face_RecognizeTests.swift */; }; 17 | 4B3E9DCF26CF9F5A007176E4 /* Face_RecognizeUITests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4B3E9DCE26CF9F5A007176E4 /* Face_RecognizeUITests.swift */; }; 18 | 4B3E9DE426CFB983007176E4 /* FaceRecognizer.mm in Sources */ = {isa = PBXBuildFile; fileRef = 4B3E9DE326CFB983007176E4 /* FaceRecognizer.mm */; }; 19 | 4B3E9DE726CFD5C7007176E4 /* CameraController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4B3E9DE626CFD5C7007176E4 /* CameraController.swift */; }; 20 | 4B3E9DEC26D00948007176E4 /* CameraPreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4B3E9DEB26D00948007176E4 /* CameraPreviewView.swift */; }; 21 | 4B3E9DEE26D00D3C007176E4 /* FaceRecognizeView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4B3E9DED26D00D3C007176E4 /* FaceRecognizeView.swift */; }; 22 | 4B6F596426D629A700C2F250 /* UserAdminView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4B6F596326D629A700C2F250 /* UserAdminView.swift */; }; 23 | 4B6F596626D62A0300C2F250 /* UserAdminController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4B6F596526D62A0300C2F250 /* UserAdminController.swift */; }; 24 | 4B6F596826D6332700C2F250 /* ImagePickerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4B6F596726D6332700C2F250 /* ImagePickerView.swift */; }; 25 | 4BC4A50A26D72ABE0048AB3F /* Util.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4BC4A50926D72ABE0048AB3F /* Util.swift */; }; 26 | 4BC4A50C26D780AB0048AB3F /* dlib_face_recognition_resnet_model_v1.dat in Resources */ = {isa = PBXBuildFile; fileRef = 4BC4A50B26D780A40048AB3F /* dlib_face_recognition_resnet_model_v1.dat */; }; 27 | 4BFE97CE26D4CC8800375577 /* libdlib.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 4BFE97CA26D4C99800375577 /* libdlib.a */; }; 28 | 4BFE97CF26D4D4BB00375577 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 4BFE97CC26D4C9EA00375577 /* Accelerate.framework */; }; 29 | 4BFE97D126D4D4C500375577 /* AssetsLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 4BFE97D026D4D4C500375577 /* AssetsLibrary.framework */; }; 30 | AF181DD84671E63AA4533395 /* Pods_Face_Recognize_Face_RecognizeUITests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B0C7E8139B554373480D5366 /* Pods_Face_Recognize_Face_RecognizeUITests.framework */; }; 31 | D8DB96E0D57405F93DD18E4A /* Pods_Face_Recognize.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A967912AA8B620610CC81CBB /* Pods_Face_Recognize.framework */; }; 32 | F05E3F987A05467E9FCF4623 /* Pods_Face_RecognizeTests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 44CAA55F587CD1FA61325887 /* Pods_Face_RecognizeTests.framework */; }; 33 | /* End PBXBuildFile section */ 34 | 35 | /* Begin PBXContainerItemProxy section */ 36 | 4B3E9DC026CF9F5A007176E4 /* PBXContainerItemProxy */ = { 37 | isa = PBXContainerItemProxy; 38 | containerPortal = 4B3E9DA126CF9F58007176E4 /* Project object */; 39 | proxyType = 1; 40 | remoteGlobalIDString = 4B3E9DA826CF9F58007176E4; 41 | remoteInfo = "Face Recognize"; 42 | }; 43 | 4B3E9DCB26CF9F5A007176E4 /* PBXContainerItemProxy */ = { 44 | isa = PBXContainerItemProxy; 45 | containerPortal = 4B3E9DA126CF9F58007176E4 /* Project object */; 46 | proxyType = 1; 47 | remoteGlobalIDString = 4B3E9DA826CF9F58007176E4; 48 | remoteInfo = "Face Recognize"; 49 | }; 50 | /* End PBXContainerItemProxy section */ 51 | 52 | /* Begin PBXFileReference section */ 53 | 44CAA55F587CD1FA61325887 /* Pods_Face_RecognizeTests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Face_RecognizeTests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 54 | 48A2DD39E633E32D83096455 /* Pods-Face RecognizeTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Face RecognizeTests.release.xcconfig"; path = "Target Support Files/Pods-Face RecognizeTests/Pods-Face RecognizeTests.release.xcconfig"; sourceTree = ""; }; 55 | 4B3E9DA926CF9F58007176E4 /* Face Recognize.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Face Recognize.app"; sourceTree = BUILT_PRODUCTS_DIR; }; 56 | 4B3E9DAC26CF9F58007176E4 /* Face_RecognizeApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Face_RecognizeApp.swift; sourceTree = ""; }; 57 | 4B3E9DB026CF9F59007176E4 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 58 | 4B3E9DB326CF9F59007176E4 /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; 59 | 4B3E9DB526CF9F59007176E4 /* Persistence.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Persistence.swift; sourceTree = ""; }; 60 | 4B3E9DB826CF9F59007176E4 /* Face_Recognize.xcdatamodel */ = {isa = PBXFileReference; lastKnownFileType = wrapper.xcdatamodel; path = Face_Recognize.xcdatamodel; sourceTree = ""; }; 61 | 4B3E9DBA26CF9F59007176E4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 62 | 4B3E9DBF26CF9F5A007176E4 /* Face RecognizeTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "Face RecognizeTests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; }; 63 | 4B3E9DC326CF9F5A007176E4 /* Face_RecognizeTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Face_RecognizeTests.swift; sourceTree = ""; }; 64 | 4B3E9DC526CF9F5A007176E4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 65 | 4B3E9DCA26CF9F5A007176E4 /* Face RecognizeUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "Face RecognizeUITests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; }; 66 | 4B3E9DCE26CF9F5A007176E4 /* Face_RecognizeUITests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Face_RecognizeUITests.swift; sourceTree = ""; }; 67 | 4B3E9DD026CF9F5A007176E4 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 68 | 4B3E9DE126CFB982007176E4 /* Face Recognize-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Face Recognize-Bridging-Header.h"; sourceTree = ""; }; 69 | 4B3E9DE226CFB983007176E4 /* FaceRecognizer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FaceRecognizer.h; sourceTree = ""; }; 70 | 4B3E9DE326CFB983007176E4 /* FaceRecognizer.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = FaceRecognizer.mm; sourceTree = ""; }; 71 | 4B3E9DE626CFD5C7007176E4 /* CameraController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraController.swift; sourceTree = ""; }; 72 | 4B3E9DEB26D00948007176E4 /* CameraPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraPreviewView.swift; sourceTree = ""; }; 73 | 4B3E9DED26D00D3C007176E4 /* FaceRecognizeView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceRecognizeView.swift; sourceTree = ""; }; 74 | 4B6F596326D629A700C2F250 /* UserAdminView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserAdminView.swift; sourceTree = ""; }; 75 | 4B6F596526D62A0300C2F250 /* UserAdminController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = UserAdminController.swift; sourceTree = ""; }; 76 | 4B6F596726D6332700C2F250 /* ImagePickerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImagePickerView.swift; sourceTree = ""; }; 77 | 4BC4A50926D72ABE0048AB3F /* Util.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Util.swift; sourceTree = ""; }; 78 | 4BC4A50B26D780A40048AB3F /* dlib_face_recognition_resnet_model_v1.dat */ = {isa = PBXFileReference; lastKnownFileType = file; path = dlib_face_recognition_resnet_model_v1.dat; sourceTree = ""; }; 79 | 4BFE97CA26D4C99800375577 /* libdlib.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libdlib.a; path = "../Framework/dlib/build/dlib/Debug-iphoneos/libdlib.a"; sourceTree = ""; }; 80 | 4BFE97CC26D4C9EA00375577 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; 81 | 4BFE97D026D4D4C500375577 /* AssetsLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AssetsLibrary.framework; path = System/Library/Frameworks/AssetsLibrary.framework; sourceTree = SDKROOT; }; 82 | 4BFE97D226D4D70E00375577 /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; }; 83 | 4BFE97D426D4D71400375577 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; }; 84 | 4BFE97D926D4DA7400375577 /* shape_predictor_68_face_landmarks.dat */ = {isa = PBXFileReference; lastKnownFileType = file; path = shape_predictor_68_face_landmarks.dat; sourceTree = ""; }; 85 | 9AB5AA4B806D7398C41D7FCF /* Pods-Face Recognize.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Face Recognize.release.xcconfig"; path = "Target Support Files/Pods-Face Recognize/Pods-Face Recognize.release.xcconfig"; sourceTree = ""; }; 86 | 9FD2CDB7BD7086EBBBF60877 /* Pods-Face Recognize-Face RecognizeUITests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Face Recognize-Face RecognizeUITests.debug.xcconfig"; path = "Target Support Files/Pods-Face Recognize-Face RecognizeUITests/Pods-Face Recognize-Face RecognizeUITests.debug.xcconfig"; sourceTree = ""; }; 87 | A967912AA8B620610CC81CBB /* Pods_Face_Recognize.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Face_Recognize.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 88 | B0C7E8139B554373480D5366 /* Pods_Face_Recognize_Face_RecognizeUITests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_Face_Recognize_Face_RecognizeUITests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 89 | E31E44416C9E432EF3B2501D /* Pods-Face Recognize-Face RecognizeUITests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Face Recognize-Face RecognizeUITests.release.xcconfig"; path = "Target Support Files/Pods-Face Recognize-Face RecognizeUITests/Pods-Face Recognize-Face RecognizeUITests.release.xcconfig"; sourceTree = ""; }; 90 | E6B968120B6054E732B7D95C /* Pods-Face Recognize.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Face Recognize.debug.xcconfig"; path = "Target Support Files/Pods-Face Recognize/Pods-Face Recognize.debug.xcconfig"; sourceTree = ""; }; 91 | F16A5EE6CF241E75595796AC /* Pods-Face RecognizeTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Face RecognizeTests.debug.xcconfig"; path = "Target Support Files/Pods-Face RecognizeTests/Pods-Face RecognizeTests.debug.xcconfig"; sourceTree = ""; }; 92 | /* End PBXFileReference section */ 93 | 94 | /* Begin PBXFrameworksBuildPhase section */ 95 | 4B3E9DA626CF9F58007176E4 /* Frameworks */ = { 96 | isa = PBXFrameworksBuildPhase; 97 | buildActionMask = 2147483647; 98 | files = ( 99 | 4BFE97D126D4D4C500375577 /* AssetsLibrary.framework in Frameworks */, 100 | 4BFE97CF26D4D4BB00375577 /* Accelerate.framework in Frameworks */, 101 | D8DB96E0D57405F93DD18E4A /* Pods_Face_Recognize.framework in Frameworks */, 102 | 4BFE97CE26D4CC8800375577 /* libdlib.a in Frameworks */, 103 | ); 104 | runOnlyForDeploymentPostprocessing = 0; 105 | }; 106 | 4B3E9DBC26CF9F5A007176E4 /* Frameworks */ = { 107 | isa = PBXFrameworksBuildPhase; 108 | buildActionMask = 2147483647; 109 | files = ( 110 | F05E3F987A05467E9FCF4623 /* Pods_Face_RecognizeTests.framework in Frameworks */, 111 | ); 112 | runOnlyForDeploymentPostprocessing = 0; 113 | }; 114 | 4B3E9DC726CF9F5A007176E4 /* Frameworks */ = { 115 | isa = PBXFrameworksBuildPhase; 116 | buildActionMask = 2147483647; 117 | files = ( 118 | AF181DD84671E63AA4533395 /* Pods_Face_Recognize_Face_RecognizeUITests.framework in Frameworks */, 119 | ); 120 | runOnlyForDeploymentPostprocessing = 0; 121 | }; 122 | /* End PBXFrameworksBuildPhase section */ 123 | 124 | /* Begin PBXGroup section */ 125 | 4B3E9DA026CF9F58007176E4 = { 126 | isa = PBXGroup; 127 | children = ( 128 | 4B3E9DAB26CF9F58007176E4 /* Face Recognize */, 129 | 4B3E9DC226CF9F5A007176E4 /* Face RecognizeTests */, 130 | 4B3E9DCD26CF9F5A007176E4 /* Face RecognizeUITests */, 131 | 4B3E9DAA26CF9F58007176E4 /* Products */, 132 | B82803C578D9D7EA83F4C35A /* Pods */, 133 | C49BC048107F55C2A0D65F7B /* Frameworks */, 134 | ); 135 | sourceTree = ""; 136 | }; 137 | 4B3E9DAA26CF9F58007176E4 /* Products */ = { 138 | isa = PBXGroup; 139 | children = ( 140 | 4B3E9DA926CF9F58007176E4 /* Face Recognize.app */, 141 | 4B3E9DBF26CF9F5A007176E4 /* Face RecognizeTests.xctest */, 142 | 4B3E9DCA26CF9F5A007176E4 /* Face RecognizeUITests.xctest */, 143 | ); 144 | name = Products; 145 | sourceTree = ""; 146 | }; 147 | 4B3E9DAB26CF9F58007176E4 /* Face Recognize */ = { 148 | isa = PBXGroup; 149 | children = ( 150 | 4B3E9DEA26D0085E007176E4 /* View */, 151 | 4B3E9DE526CFD514007176E4 /* Controller */, 152 | 4B3E9DE026CFB7EA007176E4 /* objc */, 153 | 4BC4A50926D72ABE0048AB3F /* Util.swift */, 154 | 4B3E9DAC26CF9F58007176E4 /* Face_RecognizeApp.swift */, 155 | 4B3E9DB026CF9F59007176E4 /* Assets.xcassets */, 156 | 4B3E9DB526CF9F59007176E4 /* Persistence.swift */, 157 | 4B3E9DBA26CF9F59007176E4 /* Info.plist */, 158 | 4B3E9DB726CF9F59007176E4 /* Face_Recognize.xcdatamodeld */, 159 | 4B3E9DB226CF9F59007176E4 /* Preview Content */, 160 | ); 161 | path = "Face Recognize"; 162 | sourceTree = ""; 163 | }; 164 | 4B3E9DB226CF9F59007176E4 /* Preview Content */ = { 165 | isa = PBXGroup; 166 | children = ( 167 | 4B3E9DB326CF9F59007176E4 /* Preview Assets.xcassets */, 168 | ); 169 | path = "Preview Content"; 170 | sourceTree = ""; 171 | }; 172 | 4B3E9DC226CF9F5A007176E4 /* Face RecognizeTests */ = { 173 | isa = PBXGroup; 174 | children = ( 175 | 4B3E9DC326CF9F5A007176E4 /* Face_RecognizeTests.swift */, 176 | 4B3E9DC526CF9F5A007176E4 /* Info.plist */, 177 | ); 178 | path = "Face RecognizeTests"; 179 | sourceTree = ""; 180 | }; 181 | 4B3E9DCD26CF9F5A007176E4 /* Face RecognizeUITests */ = { 182 | isa = PBXGroup; 183 | children = ( 184 | 4B3E9DCE26CF9F5A007176E4 /* Face_RecognizeUITests.swift */, 185 | 4B3E9DD026CF9F5A007176E4 /* Info.plist */, 186 | ); 187 | path = "Face RecognizeUITests"; 188 | sourceTree = ""; 189 | }; 190 | 4B3E9DE026CFB7EA007176E4 /* objc */ = { 191 | isa = PBXGroup; 192 | children = ( 193 | 4B3E9DE226CFB983007176E4 /* FaceRecognizer.h */, 194 | 4B3E9DE326CFB983007176E4 /* FaceRecognizer.mm */, 195 | 4B3E9DE126CFB982007176E4 /* Face Recognize-Bridging-Header.h */, 196 | 4BFE97D926D4DA7400375577 /* shape_predictor_68_face_landmarks.dat */, 197 | 4BC4A50B26D780A40048AB3F /* dlib_face_recognition_resnet_model_v1.dat */, 198 | ); 199 | path = objc; 200 | sourceTree = ""; 201 | }; 202 | 4B3E9DE526CFD514007176E4 /* Controller */ = { 203 | isa = PBXGroup; 204 | children = ( 205 | 4B3E9DE626CFD5C7007176E4 /* CameraController.swift */, 206 | 4B6F596526D62A0300C2F250 /* UserAdminController.swift */, 207 | ); 208 | path = Controller; 209 | sourceTree = ""; 210 | }; 211 | 4B3E9DEA26D0085E007176E4 /* View */ = { 212 | isa = PBXGroup; 213 | children = ( 214 | 4B3E9DEB26D00948007176E4 /* CameraPreviewView.swift */, 215 | 4B3E9DED26D00D3C007176E4 /* FaceRecognizeView.swift */, 216 | 4B6F596326D629A700C2F250 /* UserAdminView.swift */, 217 | 4B6F596726D6332700C2F250 /* ImagePickerView.swift */, 218 | ); 219 | path = View; 220 | sourceTree = ""; 221 | }; 222 | B82803C578D9D7EA83F4C35A /* Pods */ = { 223 | isa = PBXGroup; 224 | children = ( 225 | E6B968120B6054E732B7D95C /* Pods-Face Recognize.debug.xcconfig */, 226 | 9AB5AA4B806D7398C41D7FCF /* Pods-Face Recognize.release.xcconfig */, 227 | 9FD2CDB7BD7086EBBBF60877 /* Pods-Face Recognize-Face RecognizeUITests.debug.xcconfig */, 228 | E31E44416C9E432EF3B2501D /* Pods-Face Recognize-Face RecognizeUITests.release.xcconfig */, 229 | F16A5EE6CF241E75595796AC /* Pods-Face RecognizeTests.debug.xcconfig */, 230 | 48A2DD39E633E32D83096455 /* Pods-Face RecognizeTests.release.xcconfig */, 231 | ); 232 | path = Pods; 233 | sourceTree = ""; 234 | }; 235 | C49BC048107F55C2A0D65F7B /* Frameworks */ = { 236 | isa = PBXGroup; 237 | children = ( 238 | 4BFE97D426D4D71400375577 /* AVFoundation.framework */, 239 | 4BFE97D226D4D70E00375577 /* CoreMedia.framework */, 240 | 4BFE97D026D4D4C500375577 /* AssetsLibrary.framework */, 241 | 4BFE97CC26D4C9EA00375577 /* Accelerate.framework */, 242 | 4BFE97CA26D4C99800375577 /* libdlib.a */, 243 | A967912AA8B620610CC81CBB /* Pods_Face_Recognize.framework */, 244 | B0C7E8139B554373480D5366 /* Pods_Face_Recognize_Face_RecognizeUITests.framework */, 245 | 44CAA55F587CD1FA61325887 /* Pods_Face_RecognizeTests.framework */, 246 | ); 247 | name = Frameworks; 248 | sourceTree = ""; 249 | }; 250 | /* End PBXGroup section */ 251 | 252 | /* Begin PBXNativeTarget section */ 253 | 4B3E9DA826CF9F58007176E4 /* Face Recognize */ = { 254 | isa = PBXNativeTarget; 255 | buildConfigurationList = 4B3E9DD326CF9F5A007176E4 /* Build configuration list for PBXNativeTarget "Face Recognize" */; 256 | buildPhases = ( 257 | 185B79D48C17873C31A0728A /* [CP] Check Pods Manifest.lock */, 258 | 4B3E9DA526CF9F58007176E4 /* Sources */, 259 | 4B3E9DA626CF9F58007176E4 /* Frameworks */, 260 | 4B3E9DA726CF9F58007176E4 /* Resources */, 261 | ); 262 | buildRules = ( 263 | ); 264 | dependencies = ( 265 | ); 266 | name = "Face Recognize"; 267 | productName = "Face Recognize"; 268 | productReference = 4B3E9DA926CF9F58007176E4 /* Face Recognize.app */; 269 | productType = "com.apple.product-type.application"; 270 | }; 271 | 4B3E9DBE26CF9F5A007176E4 /* Face RecognizeTests */ = { 272 | isa = PBXNativeTarget; 273 | buildConfigurationList = 4B3E9DD626CF9F5A007176E4 /* Build configuration list for PBXNativeTarget "Face RecognizeTests" */; 274 | buildPhases = ( 275 | 72334CBF5A45CBBF51019A74 /* [CP] Check Pods Manifest.lock */, 276 | 4B3E9DBB26CF9F5A007176E4 /* Sources */, 277 | 4B3E9DBC26CF9F5A007176E4 /* Frameworks */, 278 | 4B3E9DBD26CF9F5A007176E4 /* Resources */, 279 | ); 280 | buildRules = ( 281 | ); 282 | dependencies = ( 283 | 4B3E9DC126CF9F5A007176E4 /* PBXTargetDependency */, 284 | ); 285 | name = "Face RecognizeTests"; 286 | productName = "Face RecognizeTests"; 287 | productReference = 4B3E9DBF26CF9F5A007176E4 /* Face RecognizeTests.xctest */; 288 | productType = "com.apple.product-type.bundle.unit-test"; 289 | }; 290 | 4B3E9DC926CF9F5A007176E4 /* Face RecognizeUITests */ = { 291 | isa = PBXNativeTarget; 292 | buildConfigurationList = 4B3E9DD926CF9F5A007176E4 /* Build configuration list for PBXNativeTarget "Face RecognizeUITests" */; 293 | buildPhases = ( 294 | A8001126B557CD39569793CE /* [CP] Check Pods Manifest.lock */, 295 | 4B3E9DC626CF9F5A007176E4 /* Sources */, 296 | 4B3E9DC726CF9F5A007176E4 /* Frameworks */, 297 | 4B3E9DC826CF9F5A007176E4 /* Resources */, 298 | ); 299 | buildRules = ( 300 | ); 301 | dependencies = ( 302 | 4B3E9DCC26CF9F5A007176E4 /* PBXTargetDependency */, 303 | ); 304 | name = "Face RecognizeUITests"; 305 | productName = "Face RecognizeUITests"; 306 | productReference = 4B3E9DCA26CF9F5A007176E4 /* Face RecognizeUITests.xctest */; 307 | productType = "com.apple.product-type.bundle.ui-testing"; 308 | }; 309 | /* End PBXNativeTarget section */ 310 | 311 | /* Begin PBXProject section */ 312 | 4B3E9DA126CF9F58007176E4 /* Project object */ = { 313 | isa = PBXProject; 314 | attributes = { 315 | LastSwiftUpdateCheck = 1250; 316 | LastUpgradeCheck = 1250; 317 | TargetAttributes = { 318 | 4B3E9DA826CF9F58007176E4 = { 319 | CreatedOnToolsVersion = 12.5; 320 | LastSwiftMigration = 1250; 321 | }; 322 | 4B3E9DBE26CF9F5A007176E4 = { 323 | CreatedOnToolsVersion = 12.5; 324 | TestTargetID = 4B3E9DA826CF9F58007176E4; 325 | }; 326 | 4B3E9DC926CF9F5A007176E4 = { 327 | CreatedOnToolsVersion = 12.5; 328 | TestTargetID = 4B3E9DA826CF9F58007176E4; 329 | }; 330 | }; 331 | }; 332 | buildConfigurationList = 4B3E9DA426CF9F58007176E4 /* Build configuration list for PBXProject "Face Recognize" */; 333 | compatibilityVersion = "Xcode 9.3"; 334 | developmentRegion = en; 335 | hasScannedForEncodings = 0; 336 | knownRegions = ( 337 | en, 338 | Base, 339 | ); 340 | mainGroup = 4B3E9DA026CF9F58007176E4; 341 | productRefGroup = 4B3E9DAA26CF9F58007176E4 /* Products */; 342 | projectDirPath = ""; 343 | projectRoot = ""; 344 | targets = ( 345 | 4B3E9DA826CF9F58007176E4 /* Face Recognize */, 346 | 4B3E9DBE26CF9F5A007176E4 /* Face RecognizeTests */, 347 | 4B3E9DC926CF9F5A007176E4 /* Face RecognizeUITests */, 348 | ); 349 | }; 350 | /* End PBXProject section */ 351 | 352 | /* Begin PBXResourcesBuildPhase section */ 353 | 4B3E9DA726CF9F58007176E4 /* Resources */ = { 354 | isa = PBXResourcesBuildPhase; 355 | buildActionMask = 2147483647; 356 | files = ( 357 | 4BC4A50C26D780AB0048AB3F /* dlib_face_recognition_resnet_model_v1.dat in Resources */, 358 | 4B20911A26D4F1BF0030DCFF /* shape_predictor_68_face_landmarks.dat in Resources */, 359 | 4B3E9DB426CF9F59007176E4 /* Preview Assets.xcassets in Resources */, 360 | 4B3E9DB126CF9F59007176E4 /* Assets.xcassets in Resources */, 361 | ); 362 | runOnlyForDeploymentPostprocessing = 0; 363 | }; 364 | 4B3E9DBD26CF9F5A007176E4 /* Resources */ = { 365 | isa = PBXResourcesBuildPhase; 366 | buildActionMask = 2147483647; 367 | files = ( 368 | ); 369 | runOnlyForDeploymentPostprocessing = 0; 370 | }; 371 | 4B3E9DC826CF9F5A007176E4 /* Resources */ = { 372 | isa = PBXResourcesBuildPhase; 373 | buildActionMask = 2147483647; 374 | files = ( 375 | ); 376 | runOnlyForDeploymentPostprocessing = 0; 377 | }; 378 | /* End PBXResourcesBuildPhase section */ 379 | 380 | /* Begin PBXShellScriptBuildPhase section */ 381 | 185B79D48C17873C31A0728A /* [CP] Check Pods Manifest.lock */ = { 382 | isa = PBXShellScriptBuildPhase; 383 | buildActionMask = 2147483647; 384 | files = ( 385 | ); 386 | inputFileListPaths = ( 387 | ); 388 | inputPaths = ( 389 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock", 390 | "${PODS_ROOT}/Manifest.lock", 391 | ); 392 | name = "[CP] Check Pods Manifest.lock"; 393 | outputFileListPaths = ( 394 | ); 395 | outputPaths = ( 396 | "$(DERIVED_FILE_DIR)/Pods-Face Recognize-checkManifestLockResult.txt", 397 | ); 398 | runOnlyForDeploymentPostprocessing = 0; 399 | shellPath = /bin/sh; 400 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; 401 | showEnvVarsInLog = 0; 402 | }; 403 | 72334CBF5A45CBBF51019A74 /* [CP] Check Pods Manifest.lock */ = { 404 | isa = PBXShellScriptBuildPhase; 405 | buildActionMask = 2147483647; 406 | files = ( 407 | ); 408 | inputFileListPaths = ( 409 | ); 410 | inputPaths = ( 411 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock", 412 | "${PODS_ROOT}/Manifest.lock", 413 | ); 414 | name = "[CP] Check Pods Manifest.lock"; 415 | outputFileListPaths = ( 416 | ); 417 | outputPaths = ( 418 | "$(DERIVED_FILE_DIR)/Pods-Face RecognizeTests-checkManifestLockResult.txt", 419 | ); 420 | runOnlyForDeploymentPostprocessing = 0; 421 | shellPath = /bin/sh; 422 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; 423 | showEnvVarsInLog = 0; 424 | }; 425 | A8001126B557CD39569793CE /* [CP] Check Pods Manifest.lock */ = { 426 | isa = PBXShellScriptBuildPhase; 427 | buildActionMask = 2147483647; 428 | files = ( 429 | ); 430 | inputFileListPaths = ( 431 | ); 432 | inputPaths = ( 433 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock", 434 | "${PODS_ROOT}/Manifest.lock", 435 | ); 436 | name = "[CP] Check Pods Manifest.lock"; 437 | outputFileListPaths = ( 438 | ); 439 | outputPaths = ( 440 | "$(DERIVED_FILE_DIR)/Pods-Face Recognize-Face RecognizeUITests-checkManifestLockResult.txt", 441 | ); 442 | runOnlyForDeploymentPostprocessing = 0; 443 | shellPath = /bin/sh; 444 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; 445 | showEnvVarsInLog = 0; 446 | }; 447 | /* End PBXShellScriptBuildPhase section */ 448 | 449 | /* Begin PBXSourcesBuildPhase section */ 450 | 4B3E9DA526CF9F58007176E4 /* Sources */ = { 451 | isa = PBXSourcesBuildPhase; 452 | buildActionMask = 2147483647; 453 | files = ( 454 | 4B3E9DE726CFD5C7007176E4 /* CameraController.swift in Sources */, 455 | 4B3E9DEC26D00948007176E4 /* CameraPreviewView.swift in Sources */, 456 | 4B3E9DEE26D00D3C007176E4 /* FaceRecognizeView.swift in Sources */, 457 | 4B6F596826D6332700C2F250 /* ImagePickerView.swift in Sources */, 458 | 4B3E9DB626CF9F59007176E4 /* Persistence.swift in Sources */, 459 | 4B3E9DE426CFB983007176E4 /* FaceRecognizer.mm in Sources */, 460 | 4B3E9DAD26CF9F58007176E4 /* Face_RecognizeApp.swift in Sources */, 461 | 4BC4A50A26D72ABE0048AB3F /* Util.swift in Sources */, 462 | 4B6F596626D62A0300C2F250 /* UserAdminController.swift in Sources */, 463 | 4B6F596426D629A700C2F250 /* UserAdminView.swift in Sources */, 464 | 4B3E9DB926CF9F59007176E4 /* Face_Recognize.xcdatamodeld in Sources */, 465 | ); 466 | runOnlyForDeploymentPostprocessing = 0; 467 | }; 468 | 4B3E9DBB26CF9F5A007176E4 /* Sources */ = { 469 | isa = PBXSourcesBuildPhase; 470 | buildActionMask = 2147483647; 471 | files = ( 472 | 4B3E9DC426CF9F5A007176E4 /* Face_RecognizeTests.swift in Sources */, 473 | ); 474 | runOnlyForDeploymentPostprocessing = 0; 475 | }; 476 | 4B3E9DC626CF9F5A007176E4 /* Sources */ = { 477 | isa = PBXSourcesBuildPhase; 478 | buildActionMask = 2147483647; 479 | files = ( 480 | 4B3E9DCF26CF9F5A007176E4 /* Face_RecognizeUITests.swift in Sources */, 481 | ); 482 | runOnlyForDeploymentPostprocessing = 0; 483 | }; 484 | /* End PBXSourcesBuildPhase section */ 485 | 486 | /* Begin PBXTargetDependency section */ 487 | 4B3E9DC126CF9F5A007176E4 /* PBXTargetDependency */ = { 488 | isa = PBXTargetDependency; 489 | target = 4B3E9DA826CF9F58007176E4 /* Face Recognize */; 490 | targetProxy = 4B3E9DC026CF9F5A007176E4 /* PBXContainerItemProxy */; 491 | }; 492 | 4B3E9DCC26CF9F5A007176E4 /* PBXTargetDependency */ = { 493 | isa = PBXTargetDependency; 494 | target = 4B3E9DA826CF9F58007176E4 /* Face Recognize */; 495 | targetProxy = 4B3E9DCB26CF9F5A007176E4 /* PBXContainerItemProxy */; 496 | }; 497 | /* End PBXTargetDependency section */ 498 | 499 | /* Begin XCBuildConfiguration section */ 500 | 4B3E9DD126CF9F5A007176E4 /* Debug */ = { 501 | isa = XCBuildConfiguration; 502 | buildSettings = { 503 | ALWAYS_SEARCH_USER_PATHS = NO; 504 | CLANG_ANALYZER_NONNULL = YES; 505 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 506 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 507 | CLANG_CXX_LIBRARY = "libc++"; 508 | CLANG_ENABLE_MODULES = YES; 509 | CLANG_ENABLE_OBJC_ARC = YES; 510 | CLANG_ENABLE_OBJC_WEAK = YES; 511 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 512 | CLANG_WARN_BOOL_CONVERSION = YES; 513 | CLANG_WARN_COMMA = YES; 514 | CLANG_WARN_CONSTANT_CONVERSION = YES; 515 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 516 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 517 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 518 | CLANG_WARN_EMPTY_BODY = YES; 519 | CLANG_WARN_ENUM_CONVERSION = YES; 520 | CLANG_WARN_INFINITE_RECURSION = YES; 521 | CLANG_WARN_INT_CONVERSION = YES; 522 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 523 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 524 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 525 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 526 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 527 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 528 | CLANG_WARN_STRICT_PROTOTYPES = YES; 529 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 530 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 531 | CLANG_WARN_UNREACHABLE_CODE = YES; 532 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 533 | COPY_PHASE_STRIP = NO; 534 | DEBUG_INFORMATION_FORMAT = dwarf; 535 | ENABLE_STRICT_OBJC_MSGSEND = YES; 536 | ENABLE_TESTABILITY = YES; 537 | GCC_C_LANGUAGE_STANDARD = gnu11; 538 | GCC_DYNAMIC_NO_PIC = NO; 539 | GCC_NO_COMMON_BLOCKS = YES; 540 | GCC_OPTIMIZATION_LEVEL = 0; 541 | GCC_PREPROCESSOR_DEFINITIONS = ( 542 | DDLIB_USE_BLAS, 543 | DLIB_JPEG_SUPPORT, 544 | NDEBUG, 545 | ); 546 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 547 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 548 | GCC_WARN_UNDECLARED_SELECTOR = YES; 549 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 550 | GCC_WARN_UNUSED_FUNCTION = YES; 551 | GCC_WARN_UNUSED_VARIABLE = YES; 552 | IPHONEOS_DEPLOYMENT_TARGET = 14.5; 553 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; 554 | MTL_FAST_MATH = YES; 555 | ONLY_ACTIVE_ARCH = YES; 556 | SDKROOT = iphoneos; 557 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 558 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 559 | }; 560 | name = Debug; 561 | }; 562 | 4B3E9DD226CF9F5A007176E4 /* Release */ = { 563 | isa = XCBuildConfiguration; 564 | buildSettings = { 565 | ALWAYS_SEARCH_USER_PATHS = NO; 566 | CLANG_ANALYZER_NONNULL = YES; 567 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 568 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 569 | CLANG_CXX_LIBRARY = "libc++"; 570 | CLANG_ENABLE_MODULES = YES; 571 | CLANG_ENABLE_OBJC_ARC = YES; 572 | CLANG_ENABLE_OBJC_WEAK = YES; 573 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 574 | CLANG_WARN_BOOL_CONVERSION = YES; 575 | CLANG_WARN_COMMA = YES; 576 | CLANG_WARN_CONSTANT_CONVERSION = YES; 577 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 578 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 579 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 580 | CLANG_WARN_EMPTY_BODY = YES; 581 | CLANG_WARN_ENUM_CONVERSION = YES; 582 | CLANG_WARN_INFINITE_RECURSION = YES; 583 | CLANG_WARN_INT_CONVERSION = YES; 584 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 585 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 586 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 587 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 588 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 589 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 590 | CLANG_WARN_STRICT_PROTOTYPES = YES; 591 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 592 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 593 | CLANG_WARN_UNREACHABLE_CODE = YES; 594 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 595 | COPY_PHASE_STRIP = NO; 596 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 597 | ENABLE_NS_ASSERTIONS = NO; 598 | ENABLE_STRICT_OBJC_MSGSEND = YES; 599 | GCC_C_LANGUAGE_STANDARD = gnu11; 600 | GCC_NO_COMMON_BLOCKS = YES; 601 | GCC_PREPROCESSOR_DEFINITIONS = ( 602 | DDLIB_USE_BLAS, 603 | DLIB_JPEG_SUPPORT, 604 | NDEBUG, 605 | ); 606 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 607 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 608 | GCC_WARN_UNDECLARED_SELECTOR = YES; 609 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 610 | GCC_WARN_UNUSED_FUNCTION = YES; 611 | GCC_WARN_UNUSED_VARIABLE = YES; 612 | IPHONEOS_DEPLOYMENT_TARGET = 14.5; 613 | MTL_ENABLE_DEBUG_INFO = NO; 614 | MTL_FAST_MATH = YES; 615 | SDKROOT = iphoneos; 616 | SWIFT_COMPILATION_MODE = wholemodule; 617 | SWIFT_OPTIMIZATION_LEVEL = "-O"; 618 | VALIDATE_PRODUCT = YES; 619 | }; 620 | name = Release; 621 | }; 622 | 4B3E9DD426CF9F5A007176E4 /* Debug */ = { 623 | isa = XCBuildConfiguration; 624 | baseConfigurationReference = E6B968120B6054E732B7D95C /* Pods-Face Recognize.debug.xcconfig */; 625 | buildSettings = { 626 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 627 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 628 | CLANG_ENABLE_MODULES = YES; 629 | CODE_SIGN_STYLE = Automatic; 630 | DEVELOPMENT_ASSET_PATHS = "\"Face Recognize/Preview Content\""; 631 | DEVELOPMENT_TEAM = 2QN563PJ23; 632 | ENABLE_BITCODE = NO; 633 | ENABLE_PREVIEWS = YES; 634 | HEADER_SEARCH_PATHS = /Users/js/project/Framework/dlib; 635 | INFOPLIST_FILE = "Face Recognize/Info.plist"; 636 | IPHONEOS_DEPLOYMENT_TARGET = 14.0; 637 | LD_RUNPATH_SEARCH_PATHS = ( 638 | "$(inherited)", 639 | "@executable_path/Frameworks", 640 | ); 641 | LIBRARY_SEARCH_PATHS = /Users/js/project/Framework/lib; 642 | ONLY_ACTIVE_ARCH = NO; 643 | PRODUCT_BUNDLE_IDENTIFIER = "com.jsjohn.Face-Recognize"; 644 | PRODUCT_NAME = "$(TARGET_NAME)"; 645 | SWIFT_OBJC_BRIDGING_HEADER = "Face Recognize/objc/Face Recognize-Bridging-Header.h"; 646 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 647 | SWIFT_VERSION = 5.0; 648 | TARGETED_DEVICE_FAMILY = "1,2"; 649 | }; 650 | name = Debug; 651 | }; 652 | 4B3E9DD526CF9F5A007176E4 /* Release */ = { 653 | isa = XCBuildConfiguration; 654 | baseConfigurationReference = 9AB5AA4B806D7398C41D7FCF /* Pods-Face Recognize.release.xcconfig */; 655 | buildSettings = { 656 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 657 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 658 | CLANG_ENABLE_MODULES = YES; 659 | CODE_SIGN_STYLE = Automatic; 660 | DEVELOPMENT_ASSET_PATHS = "\"Face Recognize/Preview Content\""; 661 | DEVELOPMENT_TEAM = 2QN563PJ23; 662 | ENABLE_BITCODE = NO; 663 | ENABLE_PREVIEWS = YES; 664 | HEADER_SEARCH_PATHS = /Users/js/project/Framework/dlib; 665 | INFOPLIST_FILE = "Face Recognize/Info.plist"; 666 | IPHONEOS_DEPLOYMENT_TARGET = 14.0; 667 | LD_RUNPATH_SEARCH_PATHS = ( 668 | "$(inherited)", 669 | "@executable_path/Frameworks", 670 | ); 671 | LIBRARY_SEARCH_PATHS = /Users/js/project/Framework/lib; 672 | PRODUCT_BUNDLE_IDENTIFIER = "com.jsjohn.Face-Recognize"; 673 | PRODUCT_NAME = "$(TARGET_NAME)"; 674 | SWIFT_OBJC_BRIDGING_HEADER = "Face Recognize/objc/Face Recognize-Bridging-Header.h"; 675 | SWIFT_VERSION = 5.0; 676 | TARGETED_DEVICE_FAMILY = "1,2"; 677 | }; 678 | name = Release; 679 | }; 680 | 4B3E9DD726CF9F5A007176E4 /* Debug */ = { 681 | isa = XCBuildConfiguration; 682 | baseConfigurationReference = F16A5EE6CF241E75595796AC /* Pods-Face RecognizeTests.debug.xcconfig */; 683 | buildSettings = { 684 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 685 | BUNDLE_LOADER = "$(TEST_HOST)"; 686 | CODE_SIGN_STYLE = Automatic; 687 | DEVELOPMENT_TEAM = 2QN563PJ23; 688 | INFOPLIST_FILE = "Face RecognizeTests/Info.plist"; 689 | IPHONEOS_DEPLOYMENT_TARGET = 14.0; 690 | LD_RUNPATH_SEARCH_PATHS = ( 691 | "$(inherited)", 692 | "@executable_path/Frameworks", 693 | "@loader_path/Frameworks", 694 | ); 695 | PRODUCT_BUNDLE_IDENTIFIER = "com.jsjohn.Face-RecognizeTests"; 696 | PRODUCT_NAME = "$(TARGET_NAME)"; 697 | SWIFT_VERSION = 5.0; 698 | TARGETED_DEVICE_FAMILY = "1,2"; 699 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Face Recognize.app/Face Recognize"; 700 | }; 701 | name = Debug; 702 | }; 703 | 4B3E9DD826CF9F5A007176E4 /* Release */ = { 704 | isa = XCBuildConfiguration; 705 | baseConfigurationReference = 48A2DD39E633E32D83096455 /* Pods-Face RecognizeTests.release.xcconfig */; 706 | buildSettings = { 707 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 708 | BUNDLE_LOADER = "$(TEST_HOST)"; 709 | CODE_SIGN_STYLE = Automatic; 710 | DEVELOPMENT_TEAM = 2QN563PJ23; 711 | INFOPLIST_FILE = "Face RecognizeTests/Info.plist"; 712 | IPHONEOS_DEPLOYMENT_TARGET = 14.0; 713 | LD_RUNPATH_SEARCH_PATHS = ( 714 | "$(inherited)", 715 | "@executable_path/Frameworks", 716 | "@loader_path/Frameworks", 717 | ); 718 | PRODUCT_BUNDLE_IDENTIFIER = "com.jsjohn.Face-RecognizeTests"; 719 | PRODUCT_NAME = "$(TARGET_NAME)"; 720 | SWIFT_VERSION = 5.0; 721 | TARGETED_DEVICE_FAMILY = "1,2"; 722 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Face Recognize.app/Face Recognize"; 723 | }; 724 | name = Release; 725 | }; 726 | 4B3E9DDA26CF9F5A007176E4 /* Debug */ = { 727 | isa = XCBuildConfiguration; 728 | baseConfigurationReference = 9FD2CDB7BD7086EBBBF60877 /* Pods-Face Recognize-Face RecognizeUITests.debug.xcconfig */; 729 | buildSettings = { 730 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 731 | CODE_SIGN_STYLE = Automatic; 732 | DEVELOPMENT_TEAM = 2QN563PJ23; 733 | INFOPLIST_FILE = "Face RecognizeUITests/Info.plist"; 734 | LD_RUNPATH_SEARCH_PATHS = ( 735 | "$(inherited)", 736 | "@executable_path/Frameworks", 737 | "@loader_path/Frameworks", 738 | ); 739 | PRODUCT_BUNDLE_IDENTIFIER = "com.jsjohn.Face-RecognizeUITests"; 740 | PRODUCT_NAME = "$(TARGET_NAME)"; 741 | SWIFT_VERSION = 5.0; 742 | TARGETED_DEVICE_FAMILY = "1,2"; 743 | TEST_TARGET_NAME = "Face Recognize"; 744 | }; 745 | name = Debug; 746 | }; 747 | 4B3E9DDB26CF9F5A007176E4 /* Release */ = { 748 | isa = XCBuildConfiguration; 749 | baseConfigurationReference = E31E44416C9E432EF3B2501D /* Pods-Face Recognize-Face RecognizeUITests.release.xcconfig */; 750 | buildSettings = { 751 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; 752 | CODE_SIGN_STYLE = Automatic; 753 | DEVELOPMENT_TEAM = 2QN563PJ23; 754 | INFOPLIST_FILE = "Face RecognizeUITests/Info.plist"; 755 | LD_RUNPATH_SEARCH_PATHS = ( 756 | "$(inherited)", 757 | "@executable_path/Frameworks", 758 | "@loader_path/Frameworks", 759 | ); 760 | PRODUCT_BUNDLE_IDENTIFIER = "com.jsjohn.Face-RecognizeUITests"; 761 | PRODUCT_NAME = "$(TARGET_NAME)"; 762 | SWIFT_VERSION = 5.0; 763 | TARGETED_DEVICE_FAMILY = "1,2"; 764 | TEST_TARGET_NAME = "Face Recognize"; 765 | }; 766 | name = Release; 767 | }; 768 | /* End XCBuildConfiguration section */ 769 | 770 | /* Begin XCConfigurationList section */ 771 | 4B3E9DA426CF9F58007176E4 /* Build configuration list for PBXProject "Face Recognize" */ = { 772 | isa = XCConfigurationList; 773 | buildConfigurations = ( 774 | 4B3E9DD126CF9F5A007176E4 /* Debug */, 775 | 4B3E9DD226CF9F5A007176E4 /* Release */, 776 | ); 777 | defaultConfigurationIsVisible = 0; 778 | defaultConfigurationName = Release; 779 | }; 780 | 4B3E9DD326CF9F5A007176E4 /* Build configuration list for PBXNativeTarget "Face Recognize" */ = { 781 | isa = XCConfigurationList; 782 | buildConfigurations = ( 783 | 4B3E9DD426CF9F5A007176E4 /* Debug */, 784 | 4B3E9DD526CF9F5A007176E4 /* Release */, 785 | ); 786 | defaultConfigurationIsVisible = 0; 787 | defaultConfigurationName = Release; 788 | }; 789 | 4B3E9DD626CF9F5A007176E4 /* Build configuration list for PBXNativeTarget "Face RecognizeTests" */ = { 790 | isa = XCConfigurationList; 791 | buildConfigurations = ( 792 | 4B3E9DD726CF9F5A007176E4 /* Debug */, 793 | 4B3E9DD826CF9F5A007176E4 /* Release */, 794 | ); 795 | defaultConfigurationIsVisible = 0; 796 | defaultConfigurationName = Release; 797 | }; 798 | 4B3E9DD926CF9F5A007176E4 /* Build configuration list for PBXNativeTarget "Face RecognizeUITests" */ = { 799 | isa = XCConfigurationList; 800 | buildConfigurations = ( 801 | 4B3E9DDA26CF9F5A007176E4 /* Debug */, 802 | 4B3E9DDB26CF9F5A007176E4 /* Release */, 803 | ); 804 | defaultConfigurationIsVisible = 0; 805 | defaultConfigurationName = Release; 806 | }; 807 | /* End XCConfigurationList section */ 808 | 809 | /* Begin XCVersionGroup section */ 810 | 4B3E9DB726CF9F59007176E4 /* Face_Recognize.xcdatamodeld */ = { 811 | isa = XCVersionGroup; 812 | children = ( 813 | 4B3E9DB826CF9F59007176E4 /* Face_Recognize.xcdatamodel */, 814 | ); 815 | currentVersion = 4B3E9DB826CF9F59007176E4 /* Face_Recognize.xcdatamodel */; 816 | path = Face_Recognize.xcdatamodeld; 817 | sourceTree = ""; 818 | versionGroupType = wrapper.xcdatamodel; 819 | }; 820 | /* End XCVersionGroup section */ 821 | }; 822 | rootObject = 4B3E9DA126CF9F58007176E4 /* Project object */; 823 | } 824 | -------------------------------------------------------------------------------- /Face Recognize/Assets.xcassets/AccentColor.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "idiom" : "universal" 5 | } 6 | ], 7 | "info" : { 8 | "author" : "xcode", 9 | "version" : 1 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /Face Recognize/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "scale" : "2x", 6 | "size" : "20x20" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "scale" : "3x", 11 | "size" : "20x20" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "scale" : "2x", 16 | "size" : "29x29" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "scale" : "3x", 21 | "size" : "29x29" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "scale" : "2x", 26 | "size" : "40x40" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "scale" : "3x", 31 | "size" : "40x40" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "scale" : "2x", 36 | "size" : "60x60" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "scale" : "3x", 41 | "size" : "60x60" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "scale" : "1x", 46 | "size" : "20x20" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "scale" : "2x", 51 | "size" : "20x20" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "scale" : "1x", 56 | "size" : "29x29" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "scale" : "2x", 61 | "size" : "29x29" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "scale" : "1x", 66 | "size" : "40x40" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "scale" : "2x", 71 | "size" : "40x40" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "scale" : "1x", 76 | "size" : "76x76" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "scale" : "2x", 81 | "size" : "76x76" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "scale" : "2x", 86 | "size" : "83.5x83.5" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "scale" : "1x", 91 | "size" : "1024x1024" 92 | } 93 | ], 94 | "info" : { 95 | "author" : "xcode", 96 | "version" : 1 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /Face Recognize/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Face Recognize/Controller/CameraController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CameraController.swift 3 | // Face Recognize 4 | // 5 | // Created by John Smith on 2021/8/20. 6 | // 7 | 8 | import Foundation 9 | import AVFoundation 10 | import UIKit 11 | 12 | struct IdentifiablePoint: Identifiable { 13 | var id = UUID() 14 | var x: CGFloat 15 | var y: CGFloat 16 | } 17 | 18 | class CameraController: NSObject, ObservableObject, AVCaptureVideoDataOutputSampleBufferDelegate { 19 | @Published var session = AVCaptureSession() 20 | @Published var output = AVCaptureVideoDataOutput(); 21 | @Published var imageQualityResult = ImageQualityResult.init() 22 | @Published var faceRect = CGRect.zero; 23 | @Published var landmarks: [IdentifiablePoint] = [] 24 | @Published var features: [Double] = [] 25 | var input: AVCaptureDeviceInput! 26 | @Published var preview: AVCaptureVideoPreviewLayer! 27 | @Published var enhancedImg: UIImage? 28 | private let videoDataOutputQueue = DispatchQueue(label: "VideoDataOutput", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem) 29 | private var lock = false 30 | 31 | func check() { 32 | switch AVCaptureDevice.authorizationStatus(for: .video) { 33 | case .authorized: // 已被用户同意使用摄像头 34 | self.setup() 35 | return; 36 | case .notDetermined: // 首次请求使用摄像头 37 | AVCaptureDevice.requestAccess(for: .video) {[weak self] granted in 38 | if granted { 39 | self?.setup() 40 | } 41 | } 42 | return; 43 | case .denied: // 用户拒绝了摄像头调用申请 44 | return 45 | 46 | case .restricted: // 用户无法开启摄像头 47 | return 48 | @unknown default: 49 | return; 50 | } 51 | } 52 | 53 | func setup() { 54 | do { 55 | session.beginConfiguration() 56 | let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back); 57 | input = try AVCaptureDeviceInput(device: device!) 58 | if session.canAddInput(input) { 59 | session.addInput(input) 60 | } 61 | if session.canAddOutput(output) { 62 | session.addOutput(output) 63 | } 64 | output.setSampleBufferDelegate(self, queue: videoDataOutputQueue) 65 | session.commitConfiguration() 66 | } catch { 67 | print(error.localizedDescription) 68 | } 69 | } 70 | 71 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { 72 | guard lock == false, 73 | let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) 74 | else { 75 | return 76 | } 77 | lock = true 78 | let ciImage = CIImage(cvPixelBuffer: pixelBuffer).oriented(.right) 79 | let context = CIContext() 80 | guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { 81 | print("错误:无法获取图像!") 82 | lock = false; 83 | return 84 | } 85 | let image = UIImage(cgImage: cgImage) 86 | detectFaceInImage(img: image, checkQuality: true) {[weak self] result in 87 | self?.updateImageQualityResult(result: result.imgQuality) 88 | if result.enhancedImg != nil { 89 | self?.updateEnhancedImage(img: result.enhancedImg) 90 | } 91 | guard result.valid else { 92 | self?.lock = false 93 | if result.error != nil { 94 | print(result.error?.localizedDescription ?? "发生未知错误") 95 | } 96 | return 97 | } 98 | self?.updateFaceRectAndFeatures(rect: result.faceRect!, faceFeatures: result.faceFeatures!) 99 | self?.lock = false 100 | } 101 | } 102 | 103 | func updateEnhancedImage(img: UIImage?) { 104 | DispatchQueue.main.async { 105 | self.enhancedImg = img; 106 | } 107 | } 108 | 109 | func updateImageQualityResult(result: ImageQualityResult) { 110 | DispatchQueue.main.async { 111 | self.imageQualityResult = result 112 | } 113 | } 114 | 115 | func updateFaceRectAndFeatures(rect: CGRect, faceFeatures: FaceFeatures) { 116 | DispatchQueue.main.async { 117 | var landmarks:[IdentifiablePoint] = [] 118 | for value in faceFeatures.landmarks { 119 | let point = value as! CGPoint 120 | landmarks.append(IdentifiablePoint(x: point.x, y: point.y)) 121 | } 122 | self.landmarks = landmarks; 123 | self.features = faceFeatures.features as! [Double] 124 | self.faceRect = rect 125 | } 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /Face Recognize/Controller/UserAdminController.swift: -------------------------------------------------------------------------------- 1 | // 2 | // UserAdminController.swift 3 | // Face Recognize 4 | // 5 | // Created by John Smith on 2021/8/25. 6 | // 7 | 8 | import Foundation 9 | import CoreData 10 | class UserAdminController: ObservableObject { 11 | @Published var userList: [User] = [] 12 | let container = PersistenceController.shared.container 13 | func fetchUserList() { 14 | let context = container.viewContext 15 | let request: NSFetchRequest = User.fetchRequest() 16 | request.sortDescriptors = [NSSortDescriptor(key: "name", ascending: true)] 17 | do { 18 | let list = try context.fetch(request) 19 | DispatchQueue.main.async { 20 | self.userList = list 21 | } 22 | } catch { 23 | print("fetch user list error...") 24 | } 25 | } 26 | 27 | func insertUser(name: String, avatar: UIImage, features: [Double]) { 28 | let context = container.newBackgroundContext() 29 | guard let featureData = try? NSKeyedArchiver.archivedData(withRootObject: features, requiringSecureCoding: false), 30 | let avatarData = avatar.jpegData(compressionQuality: 0.8) 31 | else { 32 | return; 33 | } 34 | let user = User(context: context) 35 | user.avatar = avatarData 36 | user.features = featureData 37 | user.name = name 38 | do { 39 | try context.save() 40 | } catch { 41 | print("无法保存数据") 42 | } 43 | fetchUserList() 44 | } 45 | 46 | func drop() { 47 | let context = container.newBackgroundContext() 48 | let request = NSBatchDeleteRequest(fetchRequest: NSFetchRequest(entityName: User.entity().name ?? "")) 49 | _ = try? context.execute(request) 50 | self.fetchUserList() 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /Face Recognize/Face_Recognize.xcdatamodeld/.xccurrentversion: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | _XCCurrentVersionName 6 | Face_Recognize.xcdatamodel 7 | 8 | 9 | -------------------------------------------------------------------------------- /Face Recognize/Face_Recognize.xcdatamodeld/Face_Recognize.xcdatamodel/contents: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /Face Recognize/Face_RecognizeApp.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Face_RecognizeApp.swift 3 | // Face Recognize 4 | // 5 | // Created by John Smith on 2021/8/20. 6 | // 7 | 8 | import SwiftUI 9 | 10 | @main 11 | struct Face_RecognizeApp: App { 12 | let persistenceController = PersistenceController.shared 13 | @StateObject var userAdminController = UserAdminController.init() 14 | var body: some Scene { 15 | WindowGroup { 16 | TabView { 17 | FaceRecognizeView(userAdminController: userAdminController) 18 | .tabItem { 19 | Image(systemName: "faceid") 20 | Text("人脸识别") 21 | } 22 | UserAdminView(userAdminController: userAdminController) 23 | .tabItem { 24 | Image(systemName: "square.3.stack.3d") 25 | Text("用户库") 26 | } 27 | } 28 | .onAppear(perform: { 29 | userAdminController.fetchUserList(); 30 | }) 31 | .environment(\.managedObjectContext, persistenceController.container.viewContext) 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /Face Recognize/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | NSCameraUsageDescription 6 | 需要使用摄像头进行人脸识别。 7 | CFBundleDevelopmentRegion 8 | $(DEVELOPMENT_LANGUAGE) 9 | CFBundleExecutable 10 | $(EXECUTABLE_NAME) 11 | CFBundleIdentifier 12 | $(PRODUCT_BUNDLE_IDENTIFIER) 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | $(PRODUCT_NAME) 17 | CFBundlePackageType 18 | $(PRODUCT_BUNDLE_PACKAGE_TYPE) 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleVersion 22 | 1 23 | LSRequiresIPhoneOS 24 | 25 | UIApplicationSceneManifest 26 | 27 | UIApplicationSupportsMultipleScenes 28 | 29 | 30 | UIApplicationSupportsIndirectInputEvents 31 | 32 | UILaunchScreen 33 | 34 | UIRequiredDeviceCapabilities 35 | 36 | armv7 37 | 38 | UISupportedInterfaceOrientations 39 | 40 | UIInterfaceOrientationPortrait 41 | UIInterfaceOrientationLandscapeLeft 42 | UIInterfaceOrientationLandscapeRight 43 | 44 | UISupportedInterfaceOrientations~ipad 45 | 46 | UIInterfaceOrientationPortrait 47 | UIInterfaceOrientationPortraitUpsideDown 48 | UIInterfaceOrientationLandscapeLeft 49 | UIInterfaceOrientationLandscapeRight 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /Face Recognize/Persistence.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Persistence.swift 3 | // Face Recognize 4 | // 5 | // Created by John Smith on 2021/8/20. 6 | // 7 | 8 | import CoreData 9 | 10 | struct PersistenceController { 11 | static let shared = PersistenceController() 12 | let container: NSPersistentContainer 13 | init(inMemory: Bool = false) { 14 | container = NSPersistentContainer(name: "Face_Recognize") 15 | if inMemory { 16 | container.persistentStoreDescriptions.first!.url = URL(fileURLWithPath: "/dev/null") 17 | } 18 | container.loadPersistentStores(completionHandler: { (storeDescription, error) in 19 | if let error = error as NSError? { 20 | fatalError("Unresolved error \(error), \(error.userInfo)") 21 | } 22 | }) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /Face Recognize/Preview Content/Preview Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Face Recognize/Util.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Util.swift 3 | // Face Recognize 4 | // 5 | // Created by John Smith on 2021/8/26. 6 | // 7 | 8 | import Foundation 9 | import UIKit 10 | import Vision 11 | 12 | struct DetectResult { 13 | var imgQuality: ImageQualityResult 14 | var enhancedImg: UIImage? 15 | var faceFeatures: FaceFeatures? 16 | var faceRect: CGRect? 17 | var valid: Bool = false 18 | var error: Error? 19 | } 20 | 21 | func calcDistance(features1: [Double], features2: [Double]) -> Double { 22 | let faceRecognizer = FaceRecognizer.shared() 23 | return faceRecognizer.calcDistance(features1, with: features2) 24 | } 25 | 26 | func detectFaceInImage(img: UIImage, checkQuality: Bool, callback: @escaping ((_ result: DetectResult) -> Void)) { 27 | let faceRecognizer = FaceRecognizer.shared() 28 | let rotatedImg = img.fixOrientation(); 29 | var detectResult: DetectResult! 30 | if checkQuality { 31 | let imgQuality = faceRecognizer.checkImageQuality(rotatedImg) 32 | detectResult = DetectResult.init(imgQuality: imgQuality) 33 | detectResult.imgQuality = imgQuality 34 | guard imgQuality.passed else { 35 | callback(detectResult) 36 | return 37 | } 38 | } else { 39 | detectResult = DetectResult.init(imgQuality: ImageQualityResult.init(passed: true, brightness: 0, blur: 0, minSize: 0)) 40 | } 41 | let enhancedImg = faceRecognizer.enhanceImage(rotatedImg) 42 | guard let enhancedCgImage = enhancedImg.cgImage else { 43 | callback(detectResult) 44 | return; 45 | } 46 | detectResult.enhancedImg = enhancedImg 47 | let imageRequestHandler = VNImageRequestHandler(cgImage: enhancedCgImage, options: [:]); 48 | let request = VNDetectFaceRectanglesRequest(completionHandler: {(request, error) in 49 | if error != nil { 50 | detectResult.error = error 51 | callback(detectResult) 52 | return; 53 | } 54 | guard let faceDetectionRequest = request as? VNDetectFaceRectanglesRequest, 55 | let results = faceDetectionRequest.results as? [VNFaceObservation], 56 | results.count > 0 57 | else { 58 | callback(detectResult) 59 | return 60 | } 61 | let face = results[0]; 62 | //检测到的 face.boundingBox 即为人脸的位置。 63 | //由于VNFaceObservation结果中的Y轴方向是反向的,且他们的值相当于图片中长高的比例,需转换后使用。 64 | let x = face.boundingBox.origin.x * enhancedImg.size.width, 65 | y = (1 - face.boundingBox.origin.y - face.boundingBox.size.height) * enhancedImg.size.height, 66 | w = face.boundingBox.size.width * enhancedImg.size.width, 67 | h = face.boundingBox.size.height * enhancedImg.size.height 68 | let faceRect = CGRect.init(x: x, y: y, width: w, height: h) 69 | detectResult.faceRect = faceRect 70 | let faceFeatures = faceRecognizer.genFeatures(enhancedImg, withFace: faceRect) 71 | if (faceFeatures.features.count == 0 || faceFeatures.landmarks.count == 0) { 72 | callback(detectResult) 73 | return 74 | } 75 | detectResult.faceFeatures = faceFeatures 76 | detectResult.valid = true 77 | callback(detectResult) 78 | }) 79 | do { 80 | try imageRequestHandler.perform([request]) 81 | } catch let error as NSError { 82 | detectResult.error = error 83 | callback(detectResult) 84 | } 85 | } 86 | 87 | 88 | extension UIImage { 89 | func fixOrientation() -> UIImage { 90 | 91 | // No-op if the orientation is already correct 92 | if ( self.imageOrientation == .up ) { 93 | return self; 94 | } 95 | 96 | // We need to calculate the proper transformation to make the image upright. 97 | // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored. 98 | var transform: CGAffineTransform = .identity 99 | 100 | if ( self.imageOrientation == .down || self.imageOrientation == .downMirrored ) { 101 | transform = transform.translatedBy(x: self.size.width, y: self.size.height) 102 | transform = transform.rotated(by: .pi) 103 | } 104 | 105 | if ( self.imageOrientation == .left || self.imageOrientation == .leftMirrored ) { 106 | transform = transform.translatedBy(x: self.size.width, y: 0) 107 | transform = transform.rotated(by: .pi/2) 108 | } 109 | 110 | if ( self.imageOrientation == .right || self.imageOrientation == .rightMirrored ) { 111 | transform = transform.translatedBy(x: 0, y: self.size.height); 112 | transform = transform.rotated(by: -.pi/2); 113 | } 114 | 115 | if ( self.imageOrientation == .upMirrored || self.imageOrientation == .downMirrored ) { 116 | transform = transform.translatedBy(x: self.size.width, y: 0) 117 | transform = transform.scaledBy(x: -1, y: 1) 118 | } 119 | 120 | if ( self.imageOrientation == .leftMirrored || self.imageOrientation == .rightMirrored ) { 121 | transform = transform.translatedBy(x: self.size.height, y: 0); 122 | transform = transform.scaledBy(x: -1, y: 1); 123 | } 124 | 125 | // Now we draw the underlying CGImage into a new context, applying the transform 126 | // calculated above. 127 | let ctx: CGContext = CGContext(data: nil, width: Int(self.size.width), height: Int(self.size.height), 128 | bitsPerComponent: self.cgImage!.bitsPerComponent, bytesPerRow: 0, 129 | space: self.cgImage!.colorSpace!, 130 | bitmapInfo: self.cgImage!.bitmapInfo.rawValue)!; 131 | 132 | ctx.concatenate(transform) 133 | 134 | if ( self.imageOrientation == .left || 135 | self.imageOrientation == .leftMirrored || 136 | self.imageOrientation == .right || 137 | self.imageOrientation == .rightMirrored ) { 138 | ctx.draw(self.cgImage!, in: CGRect(x: 0.0,y: 0.0,width: self.size.height,height: self.size.width)) 139 | } else { 140 | ctx.draw(self.cgImage!, in: CGRect(x: 0.0,y: 0.0,width: self.size.width,height: self.size.height)) 141 | } 142 | 143 | // And now we just create a new UIImage from the drawing context and return it 144 | return UIImage(cgImage: ctx.makeImage()!) 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /Face Recognize/View/CameraPreviewView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CameraPreviewView.swift 3 | // Face Recognize 4 | // 5 | // Created by John Smith on 2021/8/21. 6 | // 7 | 8 | import SwiftUI 9 | import AVFoundation 10 | 11 | struct CameraPreviewView: UIViewRepresentable { 12 | @StateObject var cameraController: CameraController 13 | func makeUIView(context: Context) -> UIView { 14 | let view = UIView(frame: UIScreen.main.bounds) 15 | cameraController.preview = AVCaptureVideoPreviewLayer(session: cameraController.session) 16 | cameraController.preview.frame = view.frame 17 | cameraController.preview.videoGravity = .resizeAspectFill 18 | view.layer.addSublayer(cameraController.preview) 19 | cameraController.check(); 20 | cameraController.session.startRunning() 21 | return view; 22 | } 23 | 24 | func updateUIView(_ uiView: UIViewType, context: Context) { 25 | 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /Face Recognize/View/FaceRecognizeView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // FaceRecognizeView.swift 3 | // Face Recognize 4 | // 5 | // Created by John Smith on 2021/8/21. 6 | // 7 | 8 | import SwiftUI 9 | 10 | struct FaceRecognizeView: View { 11 | @StateObject private var cameraController = CameraController.init() 12 | @StateObject var userAdminController: UserAdminController 13 | @State private var showEnhancedImg = false; 14 | @State private var name: String = "未识别"; 15 | @State private var distance: Double = 100; 16 | var body: some View { 17 | ZStack { 18 | CameraPreviewView(cameraController: cameraController) 19 | .ignoresSafeArea() 20 | ImageQualityMonitorView(cameraController: cameraController) 21 | VStack { 22 | HStack { 23 | Spacer() 24 | Toggle.init(isOn: $showEnhancedImg) { 25 | Spacer() 26 | Text("人脸追踪预览") 27 | }.frame(width: 180) 28 | } 29 | Spacer() 30 | }.padding() 31 | if cameraController.enhancedImg != nil && showEnhancedImg { 32 | ZStack { 33 | HStack { 34 | Spacer() 35 | VStack { 36 | DetectResultView(img: cameraController.enhancedImg!, landmarks: cameraController.landmarks, faceRect: cameraController.faceRect) 37 | } 38 | } 39 | } 40 | } 41 | VStack { 42 | HStack { 43 | Text(name) 44 | .font(.title) 45 | .foregroundColor(name == "未识别" ? .red : .green) 46 | .padding() 47 | .background(Color.white) 48 | } 49 | Spacer() 50 | } 51 | } 52 | .onChange(of: cameraController.features) { f1 in 53 | var minDistance = 100.0; 54 | var name = "未识别" 55 | for user in userAdminController.userList { 56 | guard let featureData = user.features else { 57 | continue 58 | } 59 | let f2 = try? NSKeyedUnarchiver.unarchiveTopLevelObjectWithData(featureData) as? [Double] 60 | let d = calcDistance(features1: f1, features2: f2!) 61 | if d < minDistance && d < 0.6 { 62 | minDistance = d 63 | name = user.name! 64 | } 65 | } 66 | self.name = name 67 | self.distance = minDistance 68 | } 69 | } 70 | } 71 | 72 | struct DetectResultView: View { 73 | var img: UIImage 74 | var landmarks: [IdentifiablePoint] 75 | var faceRect: CGRect 76 | var body: some View { 77 | ZStack(alignment: .topLeading) { 78 | Image(uiImage: img) 79 | Rectangle() 80 | .fill(Color.clear) 81 | .border(Color.red) 82 | .frame(width: faceRect.size.width, height: faceRect.size.height) 83 | .offset(x: faceRect.origin.x, y: faceRect.origin.y) 84 | ForEach(landmarks) { point in 85 | Circle() 86 | .fill(Color.green) 87 | .frame(width: 5, height: 5) 88 | .offset(x: point.x, y: point.y) 89 | } 90 | } 91 | } 92 | } 93 | 94 | struct ImageQualityMonitorView: View { 95 | @StateObject var cameraController: CameraController 96 | var body: some View { 97 | VStack { 98 | HStack { 99 | Text("清晰度:\(cameraController.imageQualityResult.blur) 亮度:\(cameraController.imageQualityResult.brightness)") 100 | .fontWeight(.bold) 101 | .foregroundColor(cameraController.imageQualityResult.passed ? .green : .red) 102 | .padding() 103 | .background(Color.white) 104 | Spacer() 105 | } 106 | Spacer() 107 | } 108 | } 109 | } 110 | 111 | -------------------------------------------------------------------------------- /Face Recognize/View/ImagePickerView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // ImagePickerView.swift 3 | // Face Recognize 4 | // 5 | // Created by John Smith on 2021/8/25. 6 | // 7 | 8 | import Foundation 9 | import SwiftUI 10 | import UIKit 11 | 12 | class Coordinator: NSObject, UINavigationControllerDelegate, UIImagePickerControllerDelegate { 13 | var picker: ImagePickerView 14 | init(picker: ImagePickerView) { 15 | self.picker = picker 16 | 17 | } 18 | func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) { 19 | guard let selectedImage = info[.editedImage] as? UIImage else { return } 20 | self.picker.selectedImage = selectedImage 21 | self.picker.isPresented.wrappedValue.dismiss() 22 | } 23 | } 24 | 25 | struct ImagePickerView: UIViewControllerRepresentable { 26 | @Binding var selectedImage: UIImage? 27 | @Environment(\.presentationMode) var isPresented 28 | var sourceType: UIImagePickerController.SourceType 29 | func makeUIViewController(context: Context) -> UIImagePickerController { 30 | let imagePicker = UIImagePickerController() 31 | imagePicker.sourceType = self.sourceType 32 | imagePicker.allowsEditing = true; 33 | imagePicker.delegate = context.coordinator // confirming the delegate 34 | return imagePicker 35 | } 36 | func updateUIViewController(_ uiViewController: UIImagePickerController, context: Context) { 37 | 38 | } 39 | // Connecting the Coordinator class with this struct 40 | func makeCoordinator() -> Coordinator { 41 | return Coordinator(picker: self) 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /Face Recognize/View/UserAdminView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // UserAdminView.swift 3 | // Face Recognize 4 | // 5 | // Created by John Smith on 2021/8/25. 6 | // 7 | 8 | import SwiftUI 9 | struct UserAdminView: View { 10 | @StateObject var userAdminController: UserAdminController 11 | @State private var showNewUserSheet = false 12 | var body: some View { 13 | NavigationView { 14 | ScrollView { 15 | LazyVStack(content: { 16 | ForEach(userAdminController.userList) { user in 17 | HStack { 18 | Text(user.name ?? "未设置名称") 19 | .frame(width: 200, alignment: .leading) 20 | .padding() 21 | Image(uiImage: UIImage(data: user.avatar!) ?? UIImage.init(systemName: "person.crop.circle")!) 22 | .resizable() 23 | .aspectRatio(contentMode: .fill) 24 | .frame(width: 80, height: 80) 25 | .cornerRadius(10) 26 | .clipped() 27 | .padding() 28 | Spacer() 29 | } 30 | .border(Color.gray) 31 | } 32 | }).padding() 33 | } 34 | .sheet(isPresented: $showNewUserSheet, content: { 35 | NewUserView(userAdminController: userAdminController, showNewUserSheet: $showNewUserSheet) 36 | }) 37 | .toolbar { 38 | ToolbarItem(placement: .primaryAction) { 39 | Menu { 40 | Button("新增用户") { 41 | showNewUserSheet.toggle() 42 | } 43 | Button("清空数据库") { 44 | userAdminController.drop() 45 | } 46 | } label: { 47 | Image(systemName: "gearshape.fill") 48 | } 49 | } 50 | } 51 | .navigationTitle("用户库") 52 | } 53 | .navigationViewStyle(StackNavigationViewStyle()) 54 | } 55 | } 56 | 57 | struct NewUserView: View { 58 | @StateObject var userAdminController: UserAdminController 59 | @State var genFeatureComplete: Bool = false 60 | @Binding var showNewUserSheet: Bool 61 | @State private var showImagePicker = false 62 | @State private var selectedImage: UIImage? 63 | @State private var userName: String = "" 64 | @State private var showSelectImageSource = false 65 | @State private var sourceType: UIImagePickerController.SourceType = .photoLibrary 66 | @State private var errMsg: String? 67 | @State private var showMsg = false 68 | @State var features: [Double]? 69 | @State var enhancedImg: UIImage? 70 | @State var faceRect: CGRect = CGRect.zero 71 | @State var landmarks: [IdentifiablePoint] = [] 72 | @State var imgQuality: ImageQualityResult! 73 | var body: some View { 74 | Form { 75 | TextField("姓名", text: $userName) 76 | Button(action: { 77 | showSelectImageSource.toggle() 78 | }, label: { 79 | if selectedImage != nil { 80 | Image(uiImage: selectedImage!) 81 | .resizable() 82 | .aspectRatio(contentMode: .fill) 83 | .frame(width: 100, height: 100) 84 | .cornerRadius(10) 85 | .clipped() 86 | } 87 | else { 88 | Text("选择图片") 89 | } 90 | }) 91 | .actionSheet(isPresented: $showSelectImageSource, content: { 92 | ActionSheet(title: Text("选择图片"), message: nil, buttons: [ 93 | .default(Text("拍照"), action: { 94 | sourceType = .camera 95 | showImagePicker = true 96 | }), 97 | .default(Text("图库"), action: { 98 | sourceType = .photoLibrary 99 | showImagePicker = true 100 | }) 101 | ]) 102 | }) 103 | Button("计算人脸特征") { 104 | detectFaceInImage(img: selectedImage!, checkQuality: false) { result in 105 | guard result.valid, 106 | let faceFeatures = result.faceFeatures else { 107 | var msg = "" 108 | if result.faceRect == nil { 109 | msg += " - 所选图片中未检测到人脸" 110 | } 111 | else if result.faceFeatures == nil { 112 | msg += " - 检测到人脸,但无法计算人脸特征" 113 | } 114 | if result.error != nil { 115 | msg += " - " + (result.error?.localizedDescription ?? "未知错误") 116 | } 117 | errMsg = msg; 118 | showMsg.toggle() 119 | return; 120 | } 121 | faceRect = result.faceRect! 122 | features = result.faceFeatures?.features as? [Double] 123 | enhancedImg = result.enhancedImg 124 | imgQuality = result.imgQuality 125 | var landmarks:[IdentifiablePoint] = [] 126 | for value in faceFeatures.landmarks { 127 | let point = value as! CGPoint 128 | landmarks.append(IdentifiablePoint(x: point.x, y: point.y)) 129 | } 130 | self.landmarks = landmarks; 131 | genFeatureComplete = true 132 | } 133 | } 134 | .disabled(selectedImage == nil) 135 | .alert(isPresented: $showMsg) { 136 | Alert(title: Text("无法计算人脸特征"), message: Text(errMsg ?? "未知错误"), dismissButton: .default(Text("好"))) 137 | } 138 | if genFeatureComplete { 139 | DetectResultView(img: enhancedImg!, landmarks: landmarks, faceRect: faceRect) 140 | } 141 | Button("保存") { 142 | userAdminController.insertUser(name: userName, avatar: selectedImage!, features: features!) 143 | showNewUserSheet = false 144 | } 145 | .disabled(userName == "" || !genFeatureComplete) 146 | } 147 | .sheet(isPresented: $showImagePicker) { 148 | ImagePickerView(selectedImage: $selectedImage, sourceType: sourceType) 149 | .onChange(of: selectedImage) { newValue in 150 | genFeatureComplete = false; 151 | } 152 | } 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /Face Recognize/objc/Face Recognize-Bridging-Header.h: -------------------------------------------------------------------------------- 1 | // 2 | // Use this file to import your target's public headers that you would like to expose to Swift. 3 | // 4 | 5 | #import "FaceRecognizer.h" 6 | -------------------------------------------------------------------------------- /Face Recognize/objc/FaceRecognizer.h: -------------------------------------------------------------------------------- 1 | // 2 | // FaceRecognizer.h 3 | // Face Recognize 4 | // 5 | // Created by John Smith on 2021/8/20. 6 | // 7 | 8 | #import 9 | #import 10 | 11 | NS_ASSUME_NONNULL_BEGIN 12 | 13 | struct ImageQualityResult { 14 | bool passed; 15 | double brightness; 16 | double blur; 17 | int minSize; 18 | }; 19 | 20 | 21 | @interface FaceFeatures: NSObject 22 | @property(strong, nonatomic) NSArray *landmarks; 23 | @property(strong, nonatomic) NSArray *features; 24 | @end 25 | 26 | 27 | @interface FaceRecognizer : NSObject 28 | + (FaceRecognizer *) shared; 29 | - (struct ImageQualityResult) checkImageQuality: (UIImage *) img; 30 | - (UIImage *) enhanceImage: (UIImage *) img; 31 | - (FaceFeatures *) genFeatures: (UIImage *) img withFaceRect: (CGRect) rect; 32 | - (double) calcDistance: (NSArray *) f1 with: (NSArray *) f2; 33 | @end 34 | 35 | NS_ASSUME_NONNULL_END 36 | -------------------------------------------------------------------------------- /Face Recognize/objc/FaceRecognizer.mm: -------------------------------------------------------------------------------- 1 | // 2 | // FaceRecognizer.mm 3 | // Face Recognize 4 | // 5 | // Created by John Smith on 2021/8/20. 6 | // 7 | 8 | 9 | 10 | #import 11 | #import 12 | #include 13 | #import 14 | #import 15 | #import 16 | #import 17 | #import "FaceRecognizer.h" 18 | #import 19 | 20 | #define MIN_IMG_SIZE 400.0 21 | #define MIN_SOBEL_VALUE 2.0 22 | #define MIN_BRIGHTNESS_VALUE 80 23 | #define MAX_BRIGHTNESS_VALUE 200 24 | 25 | using namespace dlib; 26 | using namespace std; 27 | 28 | template