├── LICENSE
├── README.md
├── face_detection
├── face_detection.xcodeproj
│ ├── project.pbxproj
│ ├── project.xcworkspace
│ │ ├── contents.xcworkspacedata
│ │ └── xcuserdata
│ │ │ └── Soubhi.xcuserdatad
│ │ │ └── UserInterfaceState.xcuserstate
│ └── xcuserdata
│ │ └── Soubhi.xcuserdatad
│ │ └── xcschemes
│ │ └── xcschememanagement.plist
└── face_detection
│ ├── AppDelegate.swift
│ ├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
│ ├── Info.plist
│ ├── ViewController.swift
│ ├── classes
│ ├── FrameExtractor.swift
│ ├── OpencvWrapper.h
│ ├── OpencvWrapper.mm
│ ├── PrefixHeader.pch
│ └── face_detection-Bridging-Header.h
│ └── res
│ ├── haarcascade_eye.xml
│ └── haarcascade_frontalface_default.xml
└── screenshots
├── demo.png
└── demo2.jpg
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Soubhi Hadri
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Face-Detection-IOS
2 | IOS application for face detection using Haar feature-based cascade classifiers in OpenCV
3 |
4 | This [article](https://medium.com/@soubhimhadri/face-detection-in-ios-4330961e9865) contains explanation about the project.
5 |
6 | If you do not want to go through the explanation, you should:
7 | 1. Download the last version of [OpenCV](https://medium.com/r/?url=http%3A%2F%2Fopencv.org%2Freleases.html). Drag & Drop it in *res* folder.
8 | 2. Check and fix the paths for files in *res* folder.
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/face_detection/face_detection.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 48;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 63F2C2AE204B807A00F9CEDF /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 63F2C2AD204B807A00F9CEDF /* AppDelegate.swift */; };
11 | 63F2C2B0204B807A00F9CEDF /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 63F2C2AF204B807A00F9CEDF /* ViewController.swift */; };
12 | 63F2C2B3204B807A00F9CEDF /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 63F2C2B1204B807A00F9CEDF /* Main.storyboard */; };
13 | 63F2C2B5204B807A00F9CEDF /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 63F2C2B4204B807A00F9CEDF /* Assets.xcassets */; };
14 | 63F2C2B8204B807A00F9CEDF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 63F2C2B6204B807A00F9CEDF /* LaunchScreen.storyboard */; };
15 | 63F2C2C4204B809500F9CEDF /* haarcascade_frontalface_default.xml in Resources */ = {isa = PBXBuildFile; fileRef = 63F2C2C2204B809400F9CEDF /* haarcascade_frontalface_default.xml */; };
16 | 63F2C2C5204B809500F9CEDF /* haarcascade_eye.xml in Resources */ = {isa = PBXBuildFile; fileRef = 63F2C2C3204B809400F9CEDF /* haarcascade_eye.xml */; };
17 | 63F2C2C7204B80A600F9CEDF /* opencv2.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 63F2C2C6204B80A600F9CEDF /* opencv2.framework */; };
18 | 63F2C2CE204B80CE00F9CEDF /* AssetsLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 63F2C2C9204B80CE00F9CEDF /* AssetsLibrary.framework */; };
19 | 63F2C2CF204B80CE00F9CEDF /* CoreFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 63F2C2CA204B80CE00F9CEDF /* CoreFoundation.framework */; };
20 | 63F2C2D0204B80CE00F9CEDF /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 63F2C2CB204B80CE00F9CEDF /* CoreGraphics.framework */; };
21 | 63F2C2D1204B80CE00F9CEDF /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 63F2C2CC204B80CE00F9CEDF /* CoreMedia.framework */; };
22 | 63F2C2D2204B80CE00F9CEDF /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 63F2C2CD204B80CE00F9CEDF /* Accelerate.framework */; };
23 | 63F2C2D6204B80EA00F9CEDF /* OpencvWrapper.mm in Sources */ = {isa = PBXBuildFile; fileRef = 63F2C2D5204B80EA00F9CEDF /* OpencvWrapper.mm */; };
24 | 63F2C2D9204BA00900F9CEDF /* FrameExtractor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 63F2C2D8204BA00900F9CEDF /* FrameExtractor.swift */; };
25 | /* End PBXBuildFile section */
26 |
27 | /* Begin PBXFileReference section */
28 | 63F2C2AA204B807A00F9CEDF /* face_detection.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = face_detection.app; sourceTree = BUILT_PRODUCTS_DIR; };
29 | 63F2C2AD204B807A00F9CEDF /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; };
30 | 63F2C2AF204B807A00F9CEDF /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; };
31 | 63F2C2B2204B807A00F9CEDF /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
32 | 63F2C2B4204B807A00F9CEDF /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
33 | 63F2C2B7204B807A00F9CEDF /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
34 | 63F2C2B9204B807A00F9CEDF /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
35 | 63F2C2C2204B809400F9CEDF /* haarcascade_frontalface_default.xml */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; name = haarcascade_frontalface_default.xml; path = ../../../opencv_test/opencv_test/res/haarcascade_frontalface_default.xml; sourceTree = ""; };
36 | 63F2C2C3204B809400F9CEDF /* haarcascade_eye.xml */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xml; name = haarcascade_eye.xml; path = ../../../opencv_test/opencv_test/res/haarcascade_eye.xml; sourceTree = ""; };
37 | 63F2C2C6204B80A600F9CEDF /* opencv2.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = opencv2.framework; sourceTree = ""; };
38 | 63F2C2C9204B80CE00F9CEDF /* AssetsLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AssetsLibrary.framework; path = System/Library/Frameworks/AssetsLibrary.framework; sourceTree = SDKROOT; };
39 | 63F2C2CA204B80CE00F9CEDF /* CoreFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreFoundation.framework; path = System/Library/Frameworks/CoreFoundation.framework; sourceTree = SDKROOT; };
40 | 63F2C2CB204B80CE00F9CEDF /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
41 | 63F2C2CC204B80CE00F9CEDF /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; };
42 | 63F2C2CD204B80CE00F9CEDF /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
43 | 63F2C2D3204B80EA00F9CEDF /* face_detection-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "face_detection-Bridging-Header.h"; sourceTree = ""; };
44 | 63F2C2D4204B80EA00F9CEDF /* OpencvWrapper.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = OpencvWrapper.h; sourceTree = ""; };
45 | 63F2C2D5204B80EA00F9CEDF /* OpencvWrapper.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = OpencvWrapper.mm; sourceTree = ""; };
46 | 63F2C2D7204B80FD00F9CEDF /* PrefixHeader.pch */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PrefixHeader.pch; sourceTree = ""; };
47 | 63F2C2D8204BA00900F9CEDF /* FrameExtractor.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FrameExtractor.swift; sourceTree = ""; };
48 | /* End PBXFileReference section */
49 |
50 | /* Begin PBXFrameworksBuildPhase section */
51 | 63F2C2A7204B807A00F9CEDF /* Frameworks */ = {
52 | isa = PBXFrameworksBuildPhase;
53 | buildActionMask = 2147483647;
54 | files = (
55 | 63F2C2CE204B80CE00F9CEDF /* AssetsLibrary.framework in Frameworks */,
56 | 63F2C2CF204B80CE00F9CEDF /* CoreFoundation.framework in Frameworks */,
57 | 63F2C2D0204B80CE00F9CEDF /* CoreGraphics.framework in Frameworks */,
58 | 63F2C2D1204B80CE00F9CEDF /* CoreMedia.framework in Frameworks */,
59 | 63F2C2D2204B80CE00F9CEDF /* Accelerate.framework in Frameworks */,
60 | 63F2C2C7204B80A600F9CEDF /* opencv2.framework in Frameworks */,
61 | );
62 | runOnlyForDeploymentPostprocessing = 0;
63 | };
64 | /* End PBXFrameworksBuildPhase section */
65 |
66 | /* Begin PBXGroup section */
67 | 63F2C2A1204B807A00F9CEDF = {
68 | isa = PBXGroup;
69 | children = (
70 | 63F2C2AC204B807A00F9CEDF /* face_detection */,
71 | 63F2C2AB204B807A00F9CEDF /* Products */,
72 | 63F2C2C8204B80CE00F9CEDF /* Frameworks */,
73 | );
74 | sourceTree = "";
75 | };
76 | 63F2C2AB204B807A00F9CEDF /* Products */ = {
77 | isa = PBXGroup;
78 | children = (
79 | 63F2C2AA204B807A00F9CEDF /* face_detection.app */,
80 | );
81 | name = Products;
82 | sourceTree = "";
83 | };
84 | 63F2C2AC204B807A00F9CEDF /* face_detection */ = {
85 | isa = PBXGroup;
86 | children = (
87 | 63F2C2C1204B808700F9CEDF /* classes */,
88 | 63F2C2C0204B808300F9CEDF /* libs */,
89 | 63F2C2BF204B807F00F9CEDF /* res */,
90 | 63F2C2AD204B807A00F9CEDF /* AppDelegate.swift */,
91 | 63F2C2AF204B807A00F9CEDF /* ViewController.swift */,
92 | 63F2C2B1204B807A00F9CEDF /* Main.storyboard */,
93 | 63F2C2B4204B807A00F9CEDF /* Assets.xcassets */,
94 | 63F2C2B6204B807A00F9CEDF /* LaunchScreen.storyboard */,
95 | 63F2C2B9204B807A00F9CEDF /* Info.plist */,
96 | );
97 | path = face_detection;
98 | sourceTree = "";
99 | };
100 | 63F2C2BF204B807F00F9CEDF /* res */ = {
101 | isa = PBXGroup;
102 | children = (
103 | 63F2C2C3204B809400F9CEDF /* haarcascade_eye.xml */,
104 | 63F2C2C2204B809400F9CEDF /* haarcascade_frontalface_default.xml */,
105 | );
106 | path = res;
107 | sourceTree = "";
108 | };
109 | 63F2C2C0204B808300F9CEDF /* libs */ = {
110 | isa = PBXGroup;
111 | children = (
112 | 63F2C2C6204B80A600F9CEDF /* opencv2.framework */,
113 | );
114 | path = libs;
115 | sourceTree = "";
116 | };
117 | 63F2C2C1204B808700F9CEDF /* classes */ = {
118 | isa = PBXGroup;
119 | children = (
120 | 63F2C2D8204BA00900F9CEDF /* FrameExtractor.swift */,
121 | 63F2C2D4204B80EA00F9CEDF /* OpencvWrapper.h */,
122 | 63F2C2D5204B80EA00F9CEDF /* OpencvWrapper.mm */,
123 | 63F2C2D3204B80EA00F9CEDF /* face_detection-Bridging-Header.h */,
124 | 63F2C2D7204B80FD00F9CEDF /* PrefixHeader.pch */,
125 | );
126 | path = classes;
127 | sourceTree = "";
128 | };
129 | 63F2C2C8204B80CE00F9CEDF /* Frameworks */ = {
130 | isa = PBXGroup;
131 | children = (
132 | 63F2C2CD204B80CE00F9CEDF /* Accelerate.framework */,
133 | 63F2C2C9204B80CE00F9CEDF /* AssetsLibrary.framework */,
134 | 63F2C2CA204B80CE00F9CEDF /* CoreFoundation.framework */,
135 | 63F2C2CB204B80CE00F9CEDF /* CoreGraphics.framework */,
136 | 63F2C2CC204B80CE00F9CEDF /* CoreMedia.framework */,
137 | );
138 | name = Frameworks;
139 | sourceTree = "";
140 | };
141 | /* End PBXGroup section */
142 |
143 | /* Begin PBXNativeTarget section */
144 | 63F2C2A9204B807A00F9CEDF /* face_detection */ = {
145 | isa = PBXNativeTarget;
146 | buildConfigurationList = 63F2C2BC204B807A00F9CEDF /* Build configuration list for PBXNativeTarget "face_detection" */;
147 | buildPhases = (
148 | 63F2C2A6204B807A00F9CEDF /* Sources */,
149 | 63F2C2A7204B807A00F9CEDF /* Frameworks */,
150 | 63F2C2A8204B807A00F9CEDF /* Resources */,
151 | );
152 | buildRules = (
153 | );
154 | dependencies = (
155 | );
156 | name = face_detection;
157 | productName = face_detection;
158 | productReference = 63F2C2AA204B807A00F9CEDF /* face_detection.app */;
159 | productType = "com.apple.product-type.application";
160 | };
161 | /* End PBXNativeTarget section */
162 |
163 | /* Begin PBXProject section */
164 | 63F2C2A2204B807A00F9CEDF /* Project object */ = {
165 | isa = PBXProject;
166 | attributes = {
167 | LastSwiftUpdateCheck = 0920;
168 | LastUpgradeCheck = 0920;
169 | ORGANIZATIONNAME = hadri;
170 | TargetAttributes = {
171 | 63F2C2A9204B807A00F9CEDF = {
172 | CreatedOnToolsVersion = 9.2;
173 | LastSwiftMigration = 0920;
174 | ProvisioningStyle = Automatic;
175 | };
176 | };
177 | };
178 | buildConfigurationList = 63F2C2A5204B807A00F9CEDF /* Build configuration list for PBXProject "face_detection" */;
179 | compatibilityVersion = "Xcode 8.0";
180 | developmentRegion = en;
181 | hasScannedForEncodings = 0;
182 | knownRegions = (
183 | en,
184 | Base,
185 | );
186 | mainGroup = 63F2C2A1204B807A00F9CEDF;
187 | productRefGroup = 63F2C2AB204B807A00F9CEDF /* Products */;
188 | projectDirPath = "";
189 | projectRoot = "";
190 | targets = (
191 | 63F2C2A9204B807A00F9CEDF /* face_detection */,
192 | );
193 | };
194 | /* End PBXProject section */
195 |
196 | /* Begin PBXResourcesBuildPhase section */
197 | 63F2C2A8204B807A00F9CEDF /* Resources */ = {
198 | isa = PBXResourcesBuildPhase;
199 | buildActionMask = 2147483647;
200 | files = (
201 | 63F2C2B8204B807A00F9CEDF /* LaunchScreen.storyboard in Resources */,
202 | 63F2C2B5204B807A00F9CEDF /* Assets.xcassets in Resources */,
203 | 63F2C2C4204B809500F9CEDF /* haarcascade_frontalface_default.xml in Resources */,
204 | 63F2C2C5204B809500F9CEDF /* haarcascade_eye.xml in Resources */,
205 | 63F2C2B3204B807A00F9CEDF /* Main.storyboard in Resources */,
206 | );
207 | runOnlyForDeploymentPostprocessing = 0;
208 | };
209 | /* End PBXResourcesBuildPhase section */
210 |
211 | /* Begin PBXSourcesBuildPhase section */
212 | 63F2C2A6204B807A00F9CEDF /* Sources */ = {
213 | isa = PBXSourcesBuildPhase;
214 | buildActionMask = 2147483647;
215 | files = (
216 | 63F2C2D6204B80EA00F9CEDF /* OpencvWrapper.mm in Sources */,
217 | 63F2C2B0204B807A00F9CEDF /* ViewController.swift in Sources */,
218 | 63F2C2D9204BA00900F9CEDF /* FrameExtractor.swift in Sources */,
219 | 63F2C2AE204B807A00F9CEDF /* AppDelegate.swift in Sources */,
220 | );
221 | runOnlyForDeploymentPostprocessing = 0;
222 | };
223 | /* End PBXSourcesBuildPhase section */
224 |
225 | /* Begin PBXVariantGroup section */
226 | 63F2C2B1204B807A00F9CEDF /* Main.storyboard */ = {
227 | isa = PBXVariantGroup;
228 | children = (
229 | 63F2C2B2204B807A00F9CEDF /* Base */,
230 | );
231 | name = Main.storyboard;
232 | sourceTree = "";
233 | };
234 | 63F2C2B6204B807A00F9CEDF /* LaunchScreen.storyboard */ = {
235 | isa = PBXVariantGroup;
236 | children = (
237 | 63F2C2B7204B807A00F9CEDF /* Base */,
238 | );
239 | name = LaunchScreen.storyboard;
240 | sourceTree = "";
241 | };
242 | /* End PBXVariantGroup section */
243 |
244 | /* Begin XCBuildConfiguration section */
245 | 63F2C2BA204B807A00F9CEDF /* Debug */ = {
246 | isa = XCBuildConfiguration;
247 | buildSettings = {
248 | ALWAYS_SEARCH_USER_PATHS = NO;
249 | CLANG_ANALYZER_NONNULL = YES;
250 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
251 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
252 | CLANG_CXX_LIBRARY = "libc++";
253 | CLANG_ENABLE_MODULES = YES;
254 | CLANG_ENABLE_OBJC_ARC = YES;
255 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
256 | CLANG_WARN_BOOL_CONVERSION = YES;
257 | CLANG_WARN_COMMA = YES;
258 | CLANG_WARN_CONSTANT_CONVERSION = YES;
259 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
260 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
261 | CLANG_WARN_EMPTY_BODY = YES;
262 | CLANG_WARN_ENUM_CONVERSION = YES;
263 | CLANG_WARN_INFINITE_RECURSION = YES;
264 | CLANG_WARN_INT_CONVERSION = YES;
265 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
266 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
267 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
268 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
269 | CLANG_WARN_STRICT_PROTOTYPES = YES;
270 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
271 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
272 | CLANG_WARN_UNREACHABLE_CODE = YES;
273 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
274 | CODE_SIGN_IDENTITY = "iPhone Developer";
275 | COPY_PHASE_STRIP = NO;
276 | DEBUG_INFORMATION_FORMAT = dwarf;
277 | ENABLE_STRICT_OBJC_MSGSEND = YES;
278 | ENABLE_TESTABILITY = YES;
279 | GCC_C_LANGUAGE_STANDARD = gnu11;
280 | GCC_DYNAMIC_NO_PIC = NO;
281 | GCC_NO_COMMON_BLOCKS = YES;
282 | GCC_OPTIMIZATION_LEVEL = 0;
283 | GCC_PREPROCESSOR_DEFINITIONS = (
284 | "DEBUG=1",
285 | "$(inherited)",
286 | );
287 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
288 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
289 | GCC_WARN_UNDECLARED_SELECTOR = YES;
290 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
291 | GCC_WARN_UNUSED_FUNCTION = YES;
292 | GCC_WARN_UNUSED_VARIABLE = YES;
293 | IPHONEOS_DEPLOYMENT_TARGET = 11.2;
294 | MTL_ENABLE_DEBUG_INFO = YES;
295 | ONLY_ACTIVE_ARCH = YES;
296 | SDKROOT = iphoneos;
297 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
298 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
299 | };
300 | name = Debug;
301 | };
302 | 63F2C2BB204B807A00F9CEDF /* Release */ = {
303 | isa = XCBuildConfiguration;
304 | buildSettings = {
305 | ALWAYS_SEARCH_USER_PATHS = NO;
306 | CLANG_ANALYZER_NONNULL = YES;
307 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
308 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
309 | CLANG_CXX_LIBRARY = "libc++";
310 | CLANG_ENABLE_MODULES = YES;
311 | CLANG_ENABLE_OBJC_ARC = YES;
312 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
313 | CLANG_WARN_BOOL_CONVERSION = YES;
314 | CLANG_WARN_COMMA = YES;
315 | CLANG_WARN_CONSTANT_CONVERSION = YES;
316 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
317 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
318 | CLANG_WARN_EMPTY_BODY = YES;
319 | CLANG_WARN_ENUM_CONVERSION = YES;
320 | CLANG_WARN_INFINITE_RECURSION = YES;
321 | CLANG_WARN_INT_CONVERSION = YES;
322 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
323 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
324 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
325 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
326 | CLANG_WARN_STRICT_PROTOTYPES = YES;
327 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
328 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
329 | CLANG_WARN_UNREACHABLE_CODE = YES;
330 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
331 | CODE_SIGN_IDENTITY = "iPhone Developer";
332 | COPY_PHASE_STRIP = NO;
333 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
334 | ENABLE_NS_ASSERTIONS = NO;
335 | ENABLE_STRICT_OBJC_MSGSEND = YES;
336 | GCC_C_LANGUAGE_STANDARD = gnu11;
337 | GCC_NO_COMMON_BLOCKS = YES;
338 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
339 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
340 | GCC_WARN_UNDECLARED_SELECTOR = YES;
341 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
342 | GCC_WARN_UNUSED_FUNCTION = YES;
343 | GCC_WARN_UNUSED_VARIABLE = YES;
344 | IPHONEOS_DEPLOYMENT_TARGET = 11.2;
345 | MTL_ENABLE_DEBUG_INFO = NO;
346 | SDKROOT = iphoneos;
347 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
348 | VALIDATE_PRODUCT = YES;
349 | };
350 | name = Release;
351 | };
352 | 63F2C2BD204B807A00F9CEDF /* Debug */ = {
353 | isa = XCBuildConfiguration;
354 | buildSettings = {
355 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
356 | CLANG_ENABLE_MODULES = YES;
357 | CODE_SIGN_STYLE = Automatic;
358 | DEVELOPMENT_TEAM = 9FNZ88J7M8;
359 | FRAMEWORK_SEARCH_PATHS = (
360 | "$(inherited)",
361 | "$(PROJECT_DIR)/face_detection/libs",
362 | );
363 | GCC_PREFIX_HEADER = "$(SRCROOT)/face_detection/classes/prefixHeader.pch";
364 | INFOPLIST_FILE = face_detection/Info.plist;
365 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
366 | PRODUCT_BUNDLE_IDENTIFIER = "net.hadri.face-detection";
367 | PRODUCT_NAME = "$(TARGET_NAME)";
368 | SWIFT_OBJC_BRIDGING_HEADER = "face_detection/classes/face_detection-Bridging-Header.h";
369 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
370 | SWIFT_VERSION = 3.0;
371 | TARGETED_DEVICE_FAMILY = "1,2";
372 | };
373 | name = Debug;
374 | };
375 | 63F2C2BE204B807A00F9CEDF /* Release */ = {
376 | isa = XCBuildConfiguration;
377 | buildSettings = {
378 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
379 | CLANG_ENABLE_MODULES = YES;
380 | CODE_SIGN_STYLE = Automatic;
381 | DEVELOPMENT_TEAM = 9FNZ88J7M8;
382 | FRAMEWORK_SEARCH_PATHS = (
383 | "$(inherited)",
384 | "$(PROJECT_DIR)/face_detection/libs",
385 | );
386 | GCC_PREFIX_HEADER = "$(SRCROOT)/face_detection/classes/prefixHeader.pch";
387 | INFOPLIST_FILE = face_detection/Info.plist;
388 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
389 | PRODUCT_BUNDLE_IDENTIFIER = "net.hadri.face-detection";
390 | PRODUCT_NAME = "$(TARGET_NAME)";
391 | SWIFT_OBJC_BRIDGING_HEADER = "face_detection/classes/face_detection-Bridging-Header.h";
392 | SWIFT_VERSION = 3.0;
393 | TARGETED_DEVICE_FAMILY = "1,2";
394 | };
395 | name = Release;
396 | };
397 | /* End XCBuildConfiguration section */
398 |
399 | /* Begin XCConfigurationList section */
400 | 63F2C2A5204B807A00F9CEDF /* Build configuration list for PBXProject "face_detection" */ = {
401 | isa = XCConfigurationList;
402 | buildConfigurations = (
403 | 63F2C2BA204B807A00F9CEDF /* Debug */,
404 | 63F2C2BB204B807A00F9CEDF /* Release */,
405 | );
406 | defaultConfigurationIsVisible = 0;
407 | defaultConfigurationName = Release;
408 | };
409 | 63F2C2BC204B807A00F9CEDF /* Build configuration list for PBXNativeTarget "face_detection" */ = {
410 | isa = XCConfigurationList;
411 | buildConfigurations = (
412 | 63F2C2BD204B807A00F9CEDF /* Debug */,
413 | 63F2C2BE204B807A00F9CEDF /* Release */,
414 | );
415 | defaultConfigurationIsVisible = 0;
416 | defaultConfigurationName = Release;
417 | };
418 | /* End XCConfigurationList section */
419 | };
420 | rootObject = 63F2C2A2204B807A00F9CEDF /* Project object */;
421 | }
422 |
--------------------------------------------------------------------------------
/face_detection/face_detection.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/face_detection/face_detection.xcodeproj/project.xcworkspace/xcuserdata/Soubhi.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SubhiH/Face-Detection-IOS/04f42e82da2236f7b91858944f979221c5922f0d/face_detection/face_detection.xcodeproj/project.xcworkspace/xcuserdata/Soubhi.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/face_detection/face_detection.xcodeproj/xcuserdata/Soubhi.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | face_detection.xcscheme
8 |
9 | orderHint
10 | 0
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/face_detection/face_detection/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // face_detection
4 | //
5 | // Created by Soubhi Hadri on 3/3/18.
6 | // Copyright © 2018 hadri. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | @UIApplicationMain
12 | class AppDelegate: UIResponder, UIApplicationDelegate {
13 |
14 | var window: UIWindow?
15 |
16 |
17 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
18 | // Override point for customization after application launch.
19 | return true
20 | }
21 |
22 | func applicationWillResignActive(_ application: UIApplication) {
23 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
24 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
25 | }
26 |
27 | func applicationDidEnterBackground(_ application: UIApplication) {
28 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
29 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
30 | }
31 |
32 | func applicationWillEnterForeground(_ application: UIApplication) {
33 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
34 | }
35 |
36 | func applicationDidBecomeActive(_ application: UIApplication) {
37 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
38 | }
39 |
40 | func applicationWillTerminate(_ application: UIApplication) {
41 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
42 | }
43 |
44 |
45 | }
46 |
47 |
--------------------------------------------------------------------------------
/face_detection/face_detection/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | }
88 | ],
89 | "info" : {
90 | "version" : 1,
91 | "author" : "xcode"
92 | }
93 | }
--------------------------------------------------------------------------------
/face_detection/face_detection/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/face_detection/face_detection/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
--------------------------------------------------------------------------------
/face_detection/face_detection/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.1
19 | CFBundleVersion
20 | 1.1
21 | LSRequiresIPhoneOS
22 |
23 | NSCameraUsageDescription
24 | for face detection
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIMainStoryboardFile
28 | Main
29 | UIRequiredDeviceCapabilities
30 |
31 | armv7
32 |
33 | UISupportedInterfaceOrientations
34 |
35 | UIInterfaceOrientationPortrait
36 | UIInterfaceOrientationLandscapeLeft
37 | UIInterfaceOrientationLandscapeRight
38 |
39 | UISupportedInterfaceOrientations~ipad
40 |
41 | UIInterfaceOrientationPortrait
42 | UIInterfaceOrientationPortraitUpsideDown
43 | UIInterfaceOrientationLandscapeLeft
44 | UIInterfaceOrientationLandscapeRight
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/face_detection/face_detection/ViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.swift
3 | // face_detection
4 | //
5 | // Created by Soubhi Hadri on 3/3/18.
6 | // Copyright © 2018 hadri. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | class ViewController: UIViewController, FrameExtractorDelegate {
12 | @IBOutlet var imageview: UIImageView!
13 | var frameExtractor: FrameExtractor!
14 |
15 |
16 | func captured(image: UIImage) {
17 | imageview.image = OpencvWrapper.detect(image);
18 | }
19 |
20 |
21 | override func viewDidLoad() {
22 | super.viewDidLoad()
23 | frameExtractor = FrameExtractor()
24 | frameExtractor.delegate = self
25 |
26 | }
27 |
28 | override func didReceiveMemoryWarning() {
29 | super.didReceiveMemoryWarning()
30 | // Dispose of any resources that can be recreated.
31 | }
32 |
33 | @IBAction func flip_camera(_ sender: UIButton) {
34 | frameExtractor.flipCamera()
35 | }
36 |
37 | }
38 |
39 |
--------------------------------------------------------------------------------
/face_detection/face_detection/classes/FrameExtractor.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FrameExtractor.swift
3 | // Created by Bobo on 29/12/2016.
4 | //
5 |
6 | import UIKit
7 | import AVFoundation
8 |
9 | protocol FrameExtractorDelegate: class {
10 | func captured(image: UIImage)
11 | }
12 |
13 | class FrameExtractor: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
14 |
15 | private var position = AVCaptureDevicePosition.back
16 | private let quality = AVCaptureSessionPreset352x288
17 |
18 | private var permissionGranted = false
19 | private let sessionQueue = DispatchQueue(label: "session queue")
20 | private let captureSession = AVCaptureSession()
21 | private let context = CIContext()
22 |
23 | weak var delegate: FrameExtractorDelegate?
24 |
25 | override init() {
26 | super.init()
27 | checkPermission()
28 | sessionQueue.async { [unowned self] in
29 | self.configureSession()
30 | self.captureSession.startRunning()
31 | }
32 | }
33 |
34 | public func flipCamera() {
35 | sessionQueue.async { [unowned self] in
36 | self.captureSession.beginConfiguration()
37 | guard let currentCaptureInput = self.captureSession.inputs.first as? AVCaptureInput else { return }
38 | self.captureSession.removeInput(currentCaptureInput)
39 | guard let currentCaptureOutput = self.captureSession.outputs.first as? AVCaptureOutput else { return }
40 | self.captureSession.removeOutput(currentCaptureOutput)
41 | self.position = self.position == .front ? .back : .front
42 | self.configureSession()
43 | self.captureSession.commitConfiguration()
44 | }
45 | }
46 |
47 | // MARK: AVSession configuration
48 | private func checkPermission() {
49 | switch AVCaptureDevice.authorizationStatus(forMediaType: AVMediaTypeVideo) {
50 | case .authorized:
51 | permissionGranted = true
52 | case .notDetermined:
53 | requestPermission()
54 | default:
55 | permissionGranted = false
56 | }
57 | }
58 |
59 | private func requestPermission() {
60 | sessionQueue.suspend()
61 | AVCaptureDevice.requestAccess(forMediaType: AVMediaTypeVideo) { [unowned self] granted in
62 | self.permissionGranted = granted
63 | self.sessionQueue.resume()
64 | }
65 | }
66 |
67 | private func configureSession() {
68 | guard permissionGranted else { return }
69 | captureSession.sessionPreset = quality
70 | guard let captureDevice = selectCaptureDevice() else { return }
71 | guard let captureDeviceInput = try? AVCaptureDeviceInput(device: captureDevice) else { return }
72 | guard captureSession.canAddInput(captureDeviceInput) else { return }
73 | captureSession.addInput(captureDeviceInput)
74 | let videoOutput = AVCaptureVideoDataOutput()
75 | videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "sample buffer"))
76 | guard captureSession.canAddOutput(videoOutput) else { return }
77 | captureSession.addOutput(videoOutput)
78 | guard let connection = videoOutput.connection(withMediaType: AVFoundation.AVMediaTypeVideo) else { return }
79 | guard connection.isVideoOrientationSupported else { return }
80 | guard connection.isVideoMirroringSupported else { return }
81 | connection.videoOrientation = .portrait
82 | connection.isVideoMirrored = position == .front
83 |
84 | }
85 |
86 | private func selectCaptureDevice() -> AVCaptureDevice? {
87 | return AVCaptureDevice.devices().filter {
88 | ($0 as AnyObject).hasMediaType(AVMediaTypeVideo) &&
89 | ($0 as AnyObject).position == position
90 | }.first as? AVCaptureDevice
91 | }
92 |
93 | // MARK: Sample buffer to UIImage conversion
94 | private func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage? {
95 | guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil }
96 | let ciImage = CIImage(cvPixelBuffer: imageBuffer)
97 | guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return nil }
98 | return UIImage(cgImage: cgImage)
99 | }
100 |
101 | // MARK: AVCaptureVideoDataOutputSampleBufferDelegate
102 | func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
103 | guard let uiImage = imageFromSampleBuffer(sampleBuffer: sampleBuffer) else { return }
104 | DispatchQueue.main.async { [unowned self] in
105 | self.delegate?.captured(image: uiImage)
106 | }
107 | }
108 | }
109 |
110 |
--------------------------------------------------------------------------------
/face_detection/face_detection/classes/OpencvWrapper.h:
--------------------------------------------------------------------------------
1 | //
2 | // OpencvWrapper.h
3 | // face_detection
4 | //
5 | // Created by Soubhi Hadri on 3/3/18.
6 | // Copyright © 2018 hadri. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 |
13 | @interface OpencvWrapper : NSObject
14 | + (UIImage *)detect:(UIImage *)source;
15 | @end
16 |
--------------------------------------------------------------------------------
/face_detection/face_detection/classes/OpencvWrapper.mm:
--------------------------------------------------------------------------------
1 | //
2 | // OpencvWrapper.m
3 | // face_detection
4 | //
5 | // Created by Soubhi Hadri on 3/3/18.
6 | // Copyright © 2018 hadri. All rights reserved.
7 | //
8 |
9 | #import "OpencvWrapper.h"
10 | #import
11 | #import
12 | #import
13 |
14 | @implementation OpencvWrapper
15 |
16 | cv::CascadeClassifier face_cascade;
17 | cv::CascadeClassifier eyes_cascade;
18 | bool cascade_loaded = false;
19 |
20 | + (UIImage *)detect:(UIImage *)source {
21 | ///1. Convert input UIImage to Mat
22 | std::vector faces;
23 | CGImageRef image = CGImageCreateCopy(source.CGImage);
24 | CGFloat cols = CGImageGetWidth(image);
25 | CGFloat rows = CGImageGetHeight(image);
26 | cv::Mat frame(rows, cols, CV_8UC4);
27 |
28 | CGBitmapInfo bitmapFlags = kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault;
29 | size_t bitsPerComponent = 8;
30 | size_t bytesPerRow = frame.step[0];
31 | CGColorSpaceRef colorSpace = CGImageGetColorSpace(image);
32 |
33 | CGContextRef context = CGBitmapContextCreate(frame.data, cols, rows, bitsPerComponent, bytesPerRow, colorSpace, bitmapFlags);
34 | CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, cols, rows), image);
35 | CGContextRelease(context);
36 | cv::Mat frame_gray;
37 |
38 | cvtColor( frame, frame_gray, CV_BGR2GRAY );
39 | equalizeHist( frame_gray, frame_gray );
40 |
41 | ///2. detection
42 | NSString *eyes_cascade_name = [[NSBundle mainBundle] pathForResource:@"haarcascade_eye" ofType:@"xml"];
43 | NSString *face_cascade_name = [[NSBundle mainBundle] pathForResource:@"haarcascade_frontalface_default" ofType:@"xml"];
44 | if(!cascade_loaded){
45 | std::cout<<"loading ..";
46 | if( !eyes_cascade.load( std::string([eyes_cascade_name UTF8String]) ) ){ printf("--(!)Error loading\n"); return source;};
47 | if( !face_cascade.load( std::string([face_cascade_name UTF8String]) ) ){ printf("--(!)Error loading\n"); return source;};
48 | cascade_loaded = true;
49 | }
50 | face_cascade.detectMultiScale(frame_gray, faces, 1.3, 5);
51 | for( size_t i = 0; i < faces.size(); i++ )
52 | {
53 | cv::Point center( faces[i].x + faces[i].width*0.5, faces[i].y + faces[i].height*0.5 );
54 | ellipse( frame, center, cv::Size( faces[i].width*0.5, faces[i].height*0.5), 0, 0, 360, cv::Scalar( 0, 100, 255 ), 4, 8, 0 );
55 |
56 | cv::Mat faceROI = frame_gray( faces[i] );
57 | std::vector eyes;
58 |
59 | //-- In each face, detect eyes
60 | eyes_cascade.detectMultiScale( faceROI, eyes, 1.1, 2, 0 |CV_HAAR_SCALE_IMAGE, cv::Size(30, 30) );
61 |
62 | for( size_t j = 0; j < eyes.size(); j++ )
63 | {
64 | cv::Point center( faces[i].x + eyes[j].x + eyes[j].width*0.5, faces[i].y + eyes[j].y + eyes[j].height*0.5 );
65 | int radius = cvRound( (eyes[j].width + eyes[j].height)*0.25 );
66 | circle( frame, center, radius, cv::Scalar( 5, 255, 0 ), 2, 8, 0 );
67 | }
68 | }
69 |
70 | ///1. Convert Mat to UIImage
71 | NSData *data = [NSData dataWithBytes:frame.data length:frame.elemSize() * frame.total()];
72 | CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
73 |
74 | bitmapFlags = kCGImageAlphaNone | kCGBitmapByteOrderDefault;
75 | bitsPerComponent = 8;
76 | bytesPerRow = frame.step[0];
77 | colorSpace = (frame.elemSize() == 1 ? CGColorSpaceCreateDeviceGray() : CGColorSpaceCreateDeviceRGB());
78 |
79 | image = CGImageCreate(frame.cols, frame.rows, bitsPerComponent, bitsPerComponent * frame.elemSize(), bytesPerRow, colorSpace, bitmapFlags, provider, NULL, false, kCGRenderingIntentDefault);
80 | UIImage *result = [UIImage imageWithCGImage:image];
81 |
82 | CGImageRelease(image);
83 | CGDataProviderRelease(provider);
84 | CGColorSpaceRelease(colorSpace);
85 |
86 | return result;
87 | }
88 |
89 |
90 | @end
91 |
--------------------------------------------------------------------------------
/face_detection/face_detection/classes/PrefixHeader.pch:
--------------------------------------------------------------------------------
1 | //
2 | // PrefixHeader.pch
3 | // face_detection
4 | //
5 | // Created by Soubhi Hadri on 3/3/18.
6 | // Copyright © 2018 hadri. All rights reserved.
7 | //
8 |
9 | #ifndef PrefixHeader_pch
10 | #define PrefixHeader_pch
11 |
12 | // Include any system framework and library headers here that should be included in all compilation units.
13 | // You will also need to set the Prefix Header build setting of one or more of your targets to reference this file.
14 | #ifdef __cplusplus
15 | #include
16 | #endif
17 | #endif /* PrefixHeader_pch */
18 |
--------------------------------------------------------------------------------
/face_detection/face_detection/classes/face_detection-Bridging-Header.h:
--------------------------------------------------------------------------------
1 | //
2 | // Use this file to import your target's public headers that you would like to expose to Swift.
3 | //
4 |
5 |
6 | #import "OpencvWrapper.h"
7 |
--------------------------------------------------------------------------------
/screenshots/demo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SubhiH/Face-Detection-IOS/04f42e82da2236f7b91858944f979221c5922f0d/screenshots/demo.png
--------------------------------------------------------------------------------
/screenshots/demo2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SubhiH/Face-Detection-IOS/04f42e82da2236f7b91858944f979221c5922f0d/screenshots/demo2.jpg
--------------------------------------------------------------------------------