├── FaceRecognition
├── Resources
│ ├── Images
│ │ └── Assets.xcassets
│ │ │ ├── Contents.json
│ │ │ ├── Close.imageset
│ │ │ ├── Contents.json
│ │ │ └── Close.pdf
│ │ │ └── AppIcon.appiconset
│ │ │ └── Contents.json
│ ├── Model
│ │ └── ImageClassifier.mlmodel
│ └── LaunchScreen
│ │ └── Base.lproj
│ │ └── LaunchScreen.storyboard
├── Design
│ ├── Extensions
│ │ ├── CGFloat+Math.swift
│ │ ├── Error+FaceRecognition.swift
│ │ ├── UIViewController+Alert.swift
│ │ └── CIImage+Vision.swift
│ ├── Manager
│ │ ├── ModelManager.swift
│ │ └── PhotoManager.swift
│ └── View
│ │ └── CameraViewController.swift
├── AppDelegate
│ └── AppDelegate.swift
├── Supporting Files
│ └── Info.plist
└── Modules
│ ├── Home
│ └── HomeViewController.swift
│ ├── TakePictures
│ └── TakePicturesViewController.swift
│ └── FaceTracking
│ └── FaceTrackingController.swift
├── Gemfile
├── FaceRecognition.xcodeproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
└── project.pbxproj
├── FaceRecognition.xcworkspace
├── contents.xcworkspacedata
└── xcshareddata
│ └── IDEWorkspaceChecks.plist
├── Podfile.lock
├── Podfile
├── FaceRecognitionTests
└── Supporting Files
│ └── Info.plist
├── FaceRecognitionUITests
└── Supporting FIles
│ └── Info.plist
├── README.md
├── LICENSE
├── Gemfile.lock
└── .gitignore
/FaceRecognition/Resources/Images/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/FaceRecognition/Resources/Model/ImageClassifier.mlmodel:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nimblehq/face-recognition-ios/HEAD/FaceRecognition/Resources/Model/ImageClassifier.mlmodel
--------------------------------------------------------------------------------
/Gemfile:
--------------------------------------------------------------------------------
1 | # frozen_string_literal: true
2 |
3 | source "https://rubygems.org"
4 |
5 | git_source(:github) {|repo_name| "https://github.com/#{repo_name}" }
6 |
7 | # gem "rails"
8 | gem "cocoapods"
--------------------------------------------------------------------------------
/FaceRecognition.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/FaceRecognition/Resources/Images/Assets.xcassets/Close.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "Close.pdf"
6 | }
7 | ],
8 | "info" : {
9 | "version" : 1,
10 | "author" : "xcode"
11 | }
12 | }
--------------------------------------------------------------------------------
/FaceRecognition.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/FaceRecognition.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/FaceRecognition.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Podfile.lock:
--------------------------------------------------------------------------------
1 | PODS:
2 | - SnapKit (5.0.1)
3 |
4 | DEPENDENCIES:
5 | - SnapKit
6 |
7 | SPEC REPOS:
8 | https://github.com/cocoapods/specs.git:
9 | - SnapKit
10 |
11 | SPEC CHECKSUMS:
12 | SnapKit: 97b92857e3df3a0c71833cce143274bf6ef8e5eb
13 |
14 | PODFILE CHECKSUM: 5f4a8856c8187ee6127b01bebc163cffd7089071
15 |
16 | COCOAPODS: 1.7.5
17 |
--------------------------------------------------------------------------------
/FaceRecognition/Design/Extensions/CGFloat+Math.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CGFloat+Math.swift
3 | // FaceRecognition
4 | //
5 | // Created by Su Van Ho on 22/8/19.
6 | // Copyright © 2019 Nimble. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | extension CGFloat {
12 | var radians: CGFloat {
13 | return CGFloat(Double(self) * Double.pi / 180.0)
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/FaceRecognition/Design/Extensions/Error+FaceRecognition.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Error+FaceRecognition.swift
3 | // FaceRecognition
4 | //
5 | // Created by Su Van Ho on 22/8/19.
6 | // Copyright © 2019 Nimble. All rights reserved.
7 | //
8 |
9 | import Foundation
10 |
11 | enum AppError: Error {
12 | case setUpSession
13 | case frontCamera
14 | case photoManager
15 | case takePicture
16 | }
17 |
--------------------------------------------------------------------------------
/Podfile:
--------------------------------------------------------------------------------
1 | # Uncomment the next line to define a global platform for your project
2 | # platform :ios, '9.0'
3 |
4 | target 'FaceRecognition' do
5 | # Comment the next line if you don't want to use dynamic frameworks
6 | use_frameworks!
7 |
8 | # Pods for FaceRecognition
9 | pod 'SnapKit'
10 |
11 | target 'FaceRecognitionTests' do
12 | inherit! :search_paths
13 | # Pods for testing
14 | end
15 |
16 | target 'FaceRecognitionUITests' do
17 | inherit! :search_paths
18 | # Pods for testing
19 | end
20 |
21 | end
22 |
--------------------------------------------------------------------------------
/FaceRecognition/Design/Extensions/UIViewController+Alert.swift:
--------------------------------------------------------------------------------
1 | //
2 | // UIViewController+Alert.swift
3 | // FaceRecognition
4 | //
5 | // Created by Su Van Ho on 22/8/19.
6 | // Copyright © 2019 Nimble. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | extension UIViewController {
12 | func showError(_ error: Error) {
13 | let alert = UIAlertController(title: "face-recognition", message: error.localizedDescription, preferredStyle: .alert)
14 | alert.addAction(UIAlertAction(title: "OK", style: .cancel, handler: nil))
15 | present(alert, animated: true, completion: nil)
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/FaceRecognition/AppDelegate/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // FaceRecognition
4 | //
5 | // Created by Su Van Ho on 21/8/19.
6 | // Copyright © 2019 Nimble. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import SnapKit
11 |
12 | @UIApplicationMain
13 | class AppDelegate: UIResponder, UIApplicationDelegate {
14 |
15 | var window: UIWindow?
16 |
17 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
18 | let window = UIWindow(frame: UIScreen.main.bounds)
19 | window.makeKeyAndVisible()
20 | window.rootViewController = UINavigationController(rootViewController: HomeViewController())
21 | self.window = window
22 | return true
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/FaceRecognitionTests/Supporting Files/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | BNDL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 |
22 |
23 |
--------------------------------------------------------------------------------
/FaceRecognitionUITests/Supporting FIles/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | BNDL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 |
22 |
23 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Face Recognition App iOS
2 |
3 | Create a Face Recognition iOS App that can detect the face of a person also show their name on the screen. The goal of this project is learn how to implement a machine learning App using Apple's Frameworks
4 |
5 | ## Technologies
6 |
7 | - Create ML
8 | - Core ML
9 | - Vision
10 |
11 | ## License
12 |
13 | This project is Copyright (c) 2014-2019 Nimble. It is free software,
14 | and may be redistributed under the terms specified in the [LICENSE] file.
15 |
16 | [LICENSE]: /LICENSE
17 |
18 | ## About
19 |
20 | 
21 |
22 | This project is maintained and funded by Nimble.
23 |
24 | We love open source and do our part in sharing our work with the community!
25 | See [our other projects][community] or [hire our team][hire] to help build your product.
26 |
27 | [community]: https://github.com/nimblehq
28 | [hire]: https://nimblehq.co/
29 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Nimble
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/FaceRecognition/Design/Extensions/CIImage+Vision.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CIImage+Vision.swift
3 | // FaceRecognition
4 | //
5 | // Created by Su Van Ho on 22/8/19.
6 | // Copyright © 2019 Nimble. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import CoreImage
11 | import Vision
12 |
13 | extension CIImage {
14 | func toUIImage() -> UIImage? {
15 | let context = CIContext(options: nil)
16 | guard let cgImage: CGImage = context.createCGImage(self, from: self.extent) else { return nil }
17 | let image: UIImage = UIImage(cgImage: cgImage)
18 | return image
19 | }
20 |
21 | func cropped(toFace face: VNFaceObservation) -> CIImage {
22 | let width = face.boundingBox.width * CGFloat(extent.size.width)
23 | let tempHeight = face.boundingBox.height * CGFloat(extent.size.height)
24 | let height = 4 * tempHeight / 3
25 | let x = face.boundingBox.origin.x * CGFloat(extent.size.width)
26 | let y = face.boundingBox.origin.y * CGFloat(extent.size.height)
27 | let rect = CGRect(x: x, y: y, width: width, height: height)
28 | return cropped(to: rect)
29 | }
30 | }
31 |
32 |
--------------------------------------------------------------------------------
/FaceRecognition/Design/Manager/ModelManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelManager.swift
3 | // FaceRecognition
4 | //
5 | // Created by Su Van Ho on 23/8/19.
6 | // Copyright © 2019 Nimble. All rights reserved.
7 | //
8 |
9 | import Foundation
10 | import CoreML
11 | import Vision
12 |
13 | final class ModelManager {
14 |
15 | static let shared = ModelManager()
16 |
17 | let model: VNCoreMLModel? = try? VNCoreMLModel(for: ImageClassifier().model)
18 |
19 | func request(completion: @escaping (String) -> ()) -> VNCoreMLRequest {
20 | guard let model = self.model else { fatalError("Create ImageClassifier Error!!!") }
21 | return VNCoreMLRequest(model: model, completionHandler: { (request, error) in
22 | if error != nil {
23 | print(error.debugDescription)
24 | } else {
25 | guard let classifications = request.results as? [VNClassificationObservation],
26 | let observation = classifications.first else {
27 | return
28 | }
29 | if observation.confidence >= 0.7 {
30 | completion("\(observation.identifier) - confidence: \(observation.confidence)")
31 | } else {
32 | print("\(observation.identifier) - confidence: \(observation.confidence)")
33 | }
34 | }
35 | })
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/FaceRecognition/Resources/LaunchScreen/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/FaceRecognition/Supporting Files/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | NSCameraUsageDescription
24 | This app requires a feed from the selfie cam, so it can track your face.
25 | NSPhotoLibraryAddUsageDescription
26 | This app requires a feed from the selfie cam, so it can track your face.
27 | NSPhotoLibraryUsageDescription
28 | This app requires a feed from the selfie cam, so it can track your face.
29 | UILaunchStoryboardName
30 | LaunchScreen
31 | UIRequiredDeviceCapabilities
32 |
33 | armv7
34 |
35 | UISupportedInterfaceOrientations
36 |
37 | UIInterfaceOrientationPortrait
38 |
39 | UISupportedInterfaceOrientations~ipad
40 |
41 | UIInterfaceOrientationPortrait
42 |
43 | UIRequiresFullScreen
44 |
45 | UIRequiresFullScreen~ipad
46 |
47 |
48 |
49 |
--------------------------------------------------------------------------------
/FaceRecognition/Resources/Images/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/Gemfile.lock:
--------------------------------------------------------------------------------
1 | GEM
2 | remote: https://rubygems.org/
3 | specs:
4 | CFPropertyList (3.0.0)
5 | activesupport (4.2.11.1)
6 | i18n (~> 0.7)
7 | minitest (~> 5.1)
8 | thread_safe (~> 0.3, >= 0.3.4)
9 | tzinfo (~> 1.1)
10 | atomos (0.1.3)
11 | claide (1.0.3)
12 | cocoapods (1.7.5)
13 | activesupport (>= 4.0.2, < 5)
14 | claide (>= 1.0.2, < 2.0)
15 | cocoapods-core (= 1.7.5)
16 | cocoapods-deintegrate (>= 1.0.3, < 2.0)
17 | cocoapods-downloader (>= 1.2.2, < 2.0)
18 | cocoapods-plugins (>= 1.0.0, < 2.0)
19 | cocoapods-search (>= 1.0.0, < 2.0)
20 | cocoapods-stats (>= 1.0.0, < 2.0)
21 | cocoapods-trunk (>= 1.3.1, < 2.0)
22 | cocoapods-try (>= 1.1.0, < 2.0)
23 | colored2 (~> 3.1)
24 | escape (~> 0.0.4)
25 | fourflusher (>= 2.3.0, < 3.0)
26 | gh_inspector (~> 1.0)
27 | molinillo (~> 0.6.6)
28 | nap (~> 1.0)
29 | ruby-macho (~> 1.4)
30 | xcodeproj (>= 1.10.0, < 2.0)
31 | cocoapods-core (1.7.5)
32 | activesupport (>= 4.0.2, < 6)
33 | fuzzy_match (~> 2.0.4)
34 | nap (~> 1.0)
35 | cocoapods-deintegrate (1.0.4)
36 | cocoapods-downloader (1.2.2)
37 | cocoapods-plugins (1.0.0)
38 | nap
39 | cocoapods-search (1.0.0)
40 | cocoapods-stats (1.1.0)
41 | cocoapods-trunk (1.3.1)
42 | nap (>= 0.8, < 2.0)
43 | netrc (~> 0.11)
44 | cocoapods-try (1.1.0)
45 | colored2 (3.1.2)
46 | concurrent-ruby (1.1.5)
47 | escape (0.0.4)
48 | fourflusher (2.3.1)
49 | fuzzy_match (2.0.4)
50 | gh_inspector (1.1.3)
51 | i18n (0.9.5)
52 | concurrent-ruby (~> 1.0)
53 | minitest (5.11.3)
54 | molinillo (0.6.6)
55 | nanaimo (0.2.6)
56 | nap (1.1.0)
57 | netrc (0.11.0)
58 | ruby-macho (1.4.0)
59 | thread_safe (0.3.6)
60 | tzinfo (1.2.5)
61 | thread_safe (~> 0.1)
62 | xcodeproj (1.12.0)
63 | CFPropertyList (>= 2.3.3, < 4.0)
64 | atomos (~> 0.1.3)
65 | claide (>= 1.0.2, < 2.0)
66 | colored2 (~> 3.1)
67 | nanaimo (~> 0.2.6)
68 |
69 | PLATFORMS
70 | ruby
71 |
72 | DEPENDENCIES
73 | cocoapods
74 |
75 | BUNDLED WITH
76 | 2.0.2
77 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by https://www.gitignore.io/api/swift
2 | # Edit at https://www.gitignore.io/?templates=swift
3 |
4 | ### Swift ###
5 | # Xcode
6 | #
7 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
8 |
9 | ## Build generated
10 | build/
11 | DerivedData/
12 |
13 | ## Various settings
14 | *.pbxuser
15 | !default.pbxuser
16 | *.mode1v3
17 | !default.mode1v3
18 | *.mode2v3
19 | !default.mode2v3
20 | *.perspectivev3
21 | !default.perspectivev3
22 | xcuserdata/
23 |
24 | ## Other
25 | *.moved-aside
26 | *.xccheckout
27 | *.xcscmblueprint
28 |
29 | ## Obj-C/Swift specific
30 | *.hmap
31 | *.ipa
32 | *.dSYM.zip
33 | *.dSYM
34 |
35 | ## Playgrounds
36 | timeline.xctimeline
37 | playground.xcworkspace
38 |
39 | # Swift Package Manager
40 | # Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
41 | # Packages/
42 | # Package.pins
43 | # Package.resolved
44 | .build/
45 |
46 | # CocoaPods
47 | # We recommend against adding the Pods directory to your .gitignore. However
48 | # you should judge for yourself, the pros and cons are mentioned at:
49 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
50 | Pods/
51 |
52 | # Add this line if you want to avoid checking in source code from the Xcode workspace
53 | # *.xcworkspace
54 |
55 | # Carthage
56 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
57 | # Carthage/Checkouts
58 |
59 | Carthage/Build
60 |
61 | # Accio dependency management
62 | Dependencies/
63 | .accio/
64 |
65 | # fastlane
66 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
67 | # screenshots whenever they are needed.
68 | # For more information about the recommended setup visit:
69 | # https://docs.fastlane.tools/best-practices/source-control/#source-control
70 |
71 | fastlane/report.xml
72 | fastlane/Preview.html
73 | fastlane/screenshots/**/*.png
74 | fastlane/test_output
75 |
76 | # Code Injection
77 | # After new code Injection tools there's a generated folder /iOSInjectionProject
78 | # https://github.com/johnno1962/injectionforxcode
79 |
80 | iOSInjectionProject/
81 |
82 | # End of https://www.gitignore.io/api/swift
--------------------------------------------------------------------------------
/FaceRecognition/Resources/Images/Assets.xcassets/Close.imageset/Close.pdf:
--------------------------------------------------------------------------------
1 | %PDF-1.7
2 |
3 | 1 0 obj
4 | << >>
5 | endobj
6 |
7 | 2 0 obj
8 | << /Length 3 0 R >>
9 | stream
10 | /DeviceRGB CS
11 | /DeviceRGB cs
12 | q
13 | 1.000000 0.000000 -0.000000 1.000000 7.000000 3.804688 cm
14 | 0.960000 0.960000 0.960000 scn
15 | 14.292893 1.488206 m
16 | 14.683417 1.097681 15.316583 1.097681 15.707107 1.488206 c
17 | 16.097631 1.878730 16.097631 2.511895 15.707107 2.902419 c
18 | 14.292893 1.488206 l
19 | h
20 | 15.707107 16.488205 m
21 | 16.097631 16.878731 16.097631 17.511894 15.707107 17.902420 c
22 | 15.316583 18.292944 14.683417 18.292944 14.292893 17.902420 c
23 | 15.707107 16.488205 l
24 | h
25 | -0.707107 2.902419 m
26 | -1.097631 2.511895 -1.097631 1.878730 -0.707107 1.488206 c
27 | -0.316582 1.097681 0.316583 1.097681 0.707107 1.488206 c
28 | -0.707107 2.902419 l
29 | h
30 | 0.707107 17.902420 m
31 | 0.316582 18.292944 -0.316583 18.292944 -0.707107 17.902420 c
32 | -1.097631 17.511894 -1.097631 16.878731 -0.707107 16.488205 c
33 | 0.707107 17.902420 l
34 | h
35 | 15.707107 2.902419 m
36 | 8.207107 10.402421 l
37 | 6.792893 8.988208 l
38 | 14.292893 1.488206 l
39 | 15.707107 2.902419 l
40 | h
41 | 14.292893 17.902420 m
42 | 6.792893 10.402421 l
43 | 8.207107 8.988208 l
44 | 15.707107 16.488205 l
45 | 14.292893 17.902420 l
46 | h
47 | 0.707107 1.488206 m
48 | 8.207107 8.988208 l
49 | 6.792893 10.402421 l
50 | -0.707107 2.902419 l
51 | 0.707107 1.488206 l
52 | h
53 | -0.707107 16.488205 m
54 | 6.792893 8.988208 l
55 | 8.207107 10.402421 l
56 | 0.707107 17.902420 l
57 | -0.707107 16.488205 l
58 | h
59 | f
60 | n
61 | Q
62 |
63 | endstream
64 | endobj
65 |
66 | 3 0 obj
67 | 1211
68 | endobj
69 |
70 | 4 0 obj
71 | << /MediaBox [ 0.000000 0.000000 28.000000 28.000000 ]
72 | /Resources 1 0 R
73 | /Contents 2 0 R
74 | /Parent 5 0 R
75 | /Type /Page
76 | >>
77 | endobj
78 |
79 | 5 0 obj
80 | << /Kids [ 4 0 R ]
81 | /Count 1
82 | /Type /Pages
83 | >>
84 | endobj
85 |
86 | 6 0 obj
87 | << /Type /Catalog
88 | /Pages 5 0 R
89 | >>
90 | endobj
91 |
92 | xref
93 | 0 7
94 | 0000000000 65535 f
95 | 0000000010 00000 n
96 | 0000000034 00000 n
97 | 0000001301 00000 n
98 | 0000001324 00000 n
99 | 0000001481 00000 n
100 | 0000001555 00000 n
101 | trailer
102 | << /ID [ (some) (id) ]
103 | /Root 6 0 R
104 | /Size 7
105 | >>
106 | startxref
107 | 1614
108 | %%EOF
--------------------------------------------------------------------------------
/FaceRecognition/Modules/Home/HomeViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HomeViewController.swift
3 | // FaceRecognition
4 | //
5 | // Created by Su Van Ho on 21/8/19.
6 | // Copyright © 2019 Nimble. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | final class HomeViewController: UIViewController {
12 |
13 | let tableView = UITableView()
14 |
15 | override func viewDidLoad() {
16 | super.viewDidLoad()
17 | setUpLayouts()
18 | setUpViews()
19 | }
20 | }
21 |
22 | // MARK: - HomeViewInput
23 | extension HomeViewController {
24 | private func setUpLayouts() {
25 | view.addSubview(tableView)
26 | tableView.snp.makeConstraints { $0.edges.equalToSuperview() }
27 | }
28 |
29 | private func setUpViews() {
30 | navigationItem.title = "Face Recognition"
31 | view.backgroundColor = .white
32 | setUpTableView()
33 | }
34 |
35 | private func setUpTableView() {
36 | tableView.register(UITableViewCell.self, forCellReuseIdentifier: String(describing: UITableViewCell.self))
37 | tableView.delegate = self
38 | tableView.dataSource = self
39 | tableView.tableFooterView = UIView()
40 | }
41 | }
42 |
43 | // MARK: - Config
44 | extension HomeViewController {
45 | enum Row: Int, CaseIterable {
46 | case camera
47 | case recognize
48 |
49 | var title: String {
50 | switch self {
51 | case .camera:
52 | return "Take pictures for training model"
53 | case .recognize:
54 | return "Face Recognition"
55 | }
56 | }
57 | }
58 | }
59 |
60 | // MARK: - UITableViewDataSource
61 | extension HomeViewController: UITableViewDataSource {
62 | func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
63 | return Row.allCases.count
64 | }
65 |
66 | func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
67 | let cell = tableView.dequeueReusableCell(withIdentifier: String(describing: UITableViewCell.self), for: indexPath)
68 | let item = Row(rawValue: indexPath.row)
69 | cell.textLabel?.text = item?.title
70 | cell.accessoryType = .disclosureIndicator
71 | cell.selectionStyle = .none
72 | return cell
73 | }
74 | }
75 |
76 | // MARK: - UITableViewDelegate
77 | extension HomeViewController: UITableViewDelegate {
78 | func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
79 | guard let item = Row(rawValue: indexPath.row) else { return }
80 | switch item {
81 | case .camera:
82 | navigationController?.pushViewController(TakePicturesViewController(), animated: true)
83 | case .recognize:
84 | navigationController?.pushViewController(FaceTrackingController(), animated: true)
85 | }
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/FaceRecognition/Design/Manager/PhotoManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PhotoManager.swift
3 | // FaceRecognition
4 | //
5 | // Created by Su Van Ho on 22/8/19.
6 | // Copyright © 2019 Nimble. All rights reserved.
7 | //
8 |
9 | import Photos
10 |
11 | var count = 0
12 |
13 | final class PhotoManager {
14 | static let albumName = "co.nimblehq.growth.FaceRecognition.album"
15 | static let shared = PhotoManager()
16 |
17 | private var assetCollection: PHAssetCollection?
18 |
19 | init() {
20 | assetCollection = fetchAssetCollectionForAlbum()
21 | }
22 |
23 | private func authorizationWithHandler(completion: @escaping ((Bool) -> Void)) {
24 | switch PHPhotoLibrary.authorizationStatus() {
25 | case .notDetermined:
26 | PHPhotoLibrary.requestAuthorization { (_) in
27 | self.authorizationWithHandler(completion: completion)
28 | }
29 | case .authorized:
30 | createAlbumIfNeeded(completion: completion)
31 | default:
32 | completion(false)
33 | }
34 | }
35 |
36 | private func createAlbumIfNeeded(completion: @escaping ((Bool) -> Void)) {
37 | if let collection = fetchAssetCollectionForAlbum() {
38 | assetCollection = collection
39 | completion(true)
40 | } else {
41 | PHPhotoLibrary.shared().performChanges({
42 | PHAssetCollectionChangeRequest.creationRequestForAssetCollection(withTitle: PhotoManager.albumName)
43 | }, completionHandler: { (success, error) in
44 | if success {
45 | self.assetCollection = self.fetchAssetCollectionForAlbum()
46 | } else {
47 | fatalError("Unable create Album")
48 | }
49 | completion(success)
50 | })
51 | }
52 | }
53 |
54 | private func fetchAssetCollectionForAlbum() -> PHAssetCollection? {
55 | let options = PHFetchOptions()
56 | options.predicate = NSPredicate(format: "title = %@", PhotoManager.albumName)
57 | let collection = PHAssetCollection.fetchAssetCollections(with: .album, subtype: .any, options: options)
58 | return collection.firstObject
59 | }
60 |
61 | func save(image: UIImage) {
62 | if count >= 10 { return }
63 | authorizationWithHandler { (success) in
64 | guard success, let collection = self.assetCollection else {
65 | fatalError("Error To Save Image")
66 | }
67 |
68 | PHPhotoLibrary.shared().performChanges({
69 | let assetChangeRequest = PHAssetChangeRequest.creationRequestForAsset(from: image)
70 | guard let assetPlaceHolder = assetChangeRequest.placeholderForCreatedAsset,
71 | let albumChangeRequest = PHAssetCollectionChangeRequest(for: collection) else {
72 | fatalError("PHAsset Error")
73 | }
74 | let enumeration: NSArray = [assetPlaceHolder]
75 | albumChangeRequest.addAssets(enumeration)
76 | }, completionHandler: { (success, error) in
77 | if success {
78 | print("Saved Image")
79 | count += 1
80 | } else {
81 | fatalError("Unable Save Image")
82 | }
83 | })
84 | }
85 | }
86 | }
87 |
88 |
--------------------------------------------------------------------------------
/FaceRecognition/Modules/TakePictures/TakePicturesViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TakePicturesViewController.swift
3 | // FaceRecognition
4 | //
5 | // Created by Su Van Ho on 22/8/19.
6 | // Copyright © 2019 Nimble. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import AVKit
11 | import Vision
12 |
13 | final class TakePicturesViewController: CameraViewController {
14 |
15 | private var detectFaceRequests: [VNDetectFaceRectanglesRequest]?
16 | private lazy var sequenceRequestHandler = VNSequenceRequestHandler()
17 | private lazy var cacheRequests: [VNTrackObjectRequest] = []
18 |
19 | var detectionOverlayLayer: CALayer?
20 | var currentImage: CIImage?
21 |
22 | override func setupBeforeSessionRunning() {
23 | super.setupBeforeSessionRunning()
24 | prepareVisionRequest()
25 | }
26 | }
27 |
28 | // MARK: - Private Functions
29 | extension TakePicturesViewController {
30 | private func prepareVisionRequest() {
31 | let detectFaceRequest: VNDetectFaceRectanglesRequest = VNDetectFaceRectanglesRequest { (request, error) in
32 | if error != nil {
33 | fatalError("FaceDetection error: \(error.debugDescription)")
34 | }
35 | guard let request = request as? VNDetectFaceRectanglesRequest,
36 | let results = request.results as? [VNFaceObservation] else {
37 | return
38 | }
39 | DispatchQueue.main.async {
40 | self.updateRequests(with: results)
41 | self.detectionOverlayLayer?.sublayers = nil
42 | results.forEach({ self.drawFace(observation: $0) })
43 | }
44 | }
45 | detectFaceRequests = [detectFaceRequest]
46 | setupVisionDrawingLayers()
47 | }
48 |
49 | private func updateRequests(with observations: [VNFaceObservation]) {
50 | cacheRequests = observations.map({ VNTrackObjectRequest(detectedObjectObservation: $0) })
51 | }
52 |
53 | private func drawFace(observation: VNFaceObservation) {
54 | CATransaction.begin()
55 | CATransaction.setValue(NSNumber(value: true), forKey: kCATransactionDisableActions)
56 | let faceLayer = faceRectangleLayer()
57 | let faceRectanglePath = CGMutablePath()
58 | let displaySize = captureDeviceResolution
59 | let faceBounds = VNImageRectForNormalizedRect(observation.boundingBox, Int(displaySize.width), Int(displaySize.height))
60 | faceRectanglePath.addRect(faceBounds)
61 | faceLayer.path = faceRectanglePath
62 | detectionOverlayLayer?.addSublayer(faceLayer)
63 | if let image = currentImage?
64 | .oriented(exifOrientation)
65 | .cropped(toFace: observation)
66 | .toUIImage() {
67 | PhotoManager.shared.save(image: image)
68 | }
69 | updateLayerGeometry()
70 | CATransaction.commit()
71 | }
72 |
73 | private func setupVisionDrawingLayers() {
74 | let resolution = captureDeviceResolution
75 | let captureDeviceBounds = CGRect(x: 0,
76 | y: 0,
77 | width: resolution.width,
78 | height: resolution.height)
79 | let normalizedCenterPoint = CGPoint(x: 0.5, y: 0.5)
80 | guard let rootLayer = self.rootLayer else {
81 | fatalError(AppError.takePicture.localizedDescription)
82 | }
83 | let overlayLayer = CALayer()
84 | overlayLayer.name = "DetectionOverlay"
85 | overlayLayer.masksToBounds = true
86 | overlayLayer.anchorPoint = normalizedCenterPoint
87 | overlayLayer.bounds = captureDeviceBounds
88 | overlayLayer.position = CGPoint(x: rootLayer.bounds.midX, y: rootLayer.bounds.midY)
89 |
90 | rootLayer.addSublayer(overlayLayer)
91 | detectionOverlayLayer = overlayLayer
92 |
93 | updateLayerGeometry()
94 | }
95 |
96 | private func faceRectangleLayer() -> CAShapeLayer {
97 | let resolution = captureDeviceResolution
98 | let captureDeviceBounds = CGRect(x: 0,
99 | y: 0,
100 | width: resolution.width,
101 | height: resolution.height)
102 | let deviceCenterPoint = CGPoint(x: captureDeviceBounds.midX,
103 | y: captureDeviceBounds.midY)
104 | let normalizedCenterPoint = CGPoint(x: 0.5, y: 0.5)
105 | let faceRectangleShapeLayer = CAShapeLayer()
106 | faceRectangleShapeLayer.name = "FaceRectangleLayer"
107 | faceRectangleShapeLayer.bounds = captureDeviceBounds
108 | faceRectangleShapeLayer.anchorPoint = normalizedCenterPoint
109 | faceRectangleShapeLayer.position = deviceCenterPoint
110 | faceRectangleShapeLayer.fillColor = nil
111 | faceRectangleShapeLayer.strokeColor = UIColor.yellow.withAlphaComponent(0.7).cgColor
112 | faceRectangleShapeLayer.lineWidth = 5
113 | faceRectangleShapeLayer.shadowOpacity = 0.7
114 | faceRectangleShapeLayer.shadowRadius = 5
115 | return faceRectangleShapeLayer
116 | }
117 |
118 | private func updateLayerGeometry() {
119 | guard let overlayLayer = self.detectionOverlayLayer,
120 | let rootLayer = self.rootLayer,
121 | let previewLayer = self.previewLayer
122 | else {
123 | return
124 | }
125 | CATransaction.setValue(NSNumber(value: true), forKey: kCATransactionDisableActions)
126 | let videoPreviewRect = previewLayer.layerRectConverted(fromMetadataOutputRect: CGRect(x: 0, y: 0, width: 1, height: 1))
127 | var rotation: CGFloat
128 | var scaleX: CGFloat
129 | var scaleY: CGFloat
130 | switch UIDevice.current.orientation {
131 | case .portraitUpsideDown:
132 | rotation = 180
133 | scaleX = videoPreviewRect.width / captureDeviceResolution.width
134 | scaleY = videoPreviewRect.height / captureDeviceResolution.height
135 |
136 | case .landscapeLeft:
137 | rotation = 90
138 | scaleX = videoPreviewRect.height / captureDeviceResolution.width
139 | scaleY = scaleX
140 |
141 | case .landscapeRight:
142 | rotation = -90
143 | scaleX = videoPreviewRect.height / captureDeviceResolution.width
144 | scaleY = scaleX
145 | default:
146 | rotation = 0
147 | scaleX = videoPreviewRect.width / captureDeviceResolution.width
148 | scaleY = videoPreviewRect.height / captureDeviceResolution.height
149 | }
150 | var scaleXForPosition: CGFloat
151 | if position == .back {
152 | scaleXForPosition = -scaleX
153 | } else {
154 | scaleXForPosition = scaleX
155 | }
156 | let affineTransform = CGAffineTransform(rotationAngle: rotation.radians)
157 | .scaledBy(x: scaleXForPosition, y: -scaleY)
158 | overlayLayer.setAffineTransform(affineTransform)
159 | let rootLayerBounds = rootLayer.bounds
160 | overlayLayer.position = CGPoint(x: rootLayerBounds.midX, y: rootLayerBounds.midY)
161 | }
162 | }
163 |
164 | extension TakePicturesViewController {
165 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
166 | var options: [VNImageOption: Any] = [:]
167 | let cameraIntrinsicData = CMGetAttachment(sampleBuffer, key: kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, attachmentModeOut: nil)
168 | if cameraIntrinsicData != nil {
169 | options[VNImageOption.cameraIntrinsics] = cameraIntrinsicData
170 | }
171 | guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
172 | fatalError("Failed to obtain a CVPixelBuffer for the current output frame.")
173 | }
174 | let eOrientation = exifOrientation
175 | if cacheRequests.isEmpty {
176 | let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: eOrientation, options: options)
177 | currentImage = CIImage(cvPixelBuffer: pixelBuffer)
178 | do {
179 | guard let detectRequest = detectFaceRequests else { return }
180 | try imageRequestHandler.perform(detectRequest)
181 | return
182 | } catch {
183 | fatalError(error.localizedDescription)
184 | }
185 | }
186 | do {
187 | try sequenceRequestHandler.perform(cacheRequests, on: pixelBuffer, orientation: eOrientation)
188 | } catch {
189 | fatalError(error.localizedDescription)
190 | }
191 |
192 | var newCacheRequests: [VNTrackObjectRequest] = []
193 | for request in cacheRequests {
194 | guard let results = request.results as? [VNDetectedObjectObservation], let observation = results.first else {
195 | return
196 | }
197 | if !request.isLastFrame {
198 | if observation.confidence > 0.5 {
199 | request.inputObservation = observation
200 | } else {
201 | request.isLastFrame = true
202 | }
203 | newCacheRequests.append(request)
204 | }
205 | request.isLastFrame = true
206 | }
207 | cacheRequests = newCacheRequests
208 | }
209 | }
210 |
--------------------------------------------------------------------------------
/FaceRecognition/Design/View/CameraViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CameraViewController.swift
3 | // FaceRecognition
4 | //
5 | // Created by Su Van Ho on 22/8/19.
6 | // Copyright © 2019 Nimble. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import AVKit
11 |
12 | class CameraViewController: UIViewController {
13 |
14 | let backButton = UIButton(type: .system)
15 | let switchButton = UIButton(type: .system)
16 |
17 | // View for showing camera content
18 | let previewView = UIView(frame: UIScreen.main.bounds)
19 | var rootLayer: CALayer?
20 |
21 | // AVCapture variables
22 | var session: AVCaptureSession?
23 | var previewLayer: AVCaptureVideoPreviewLayer?
24 | var position: AVCaptureDevice.Position = .front
25 |
26 | var videoDataOutput: AVCaptureVideoDataOutput?
27 | var videoDataOutputQueue: DispatchQueue?
28 |
29 | var captureDevice: AVCaptureDevice?
30 | var captureDeviceResolution: CGSize = CGSize()
31 |
32 | var exifOrientation: CGImagePropertyOrientation {
33 | switch UIDevice.current.orientation {
34 | case .portraitUpsideDown:
35 | return .rightMirrored
36 | case .landscapeLeft:
37 | return .downMirrored
38 | case .landscapeRight:
39 | return .upMirrored
40 | default:
41 | return .leftMirrored
42 | }
43 | }
44 |
45 | override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
46 | return .portrait
47 | }
48 |
49 | override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
50 | return .portrait
51 | }
52 |
53 | override func loadView() {
54 | super.loadView()
55 | setUpPreviewView()
56 | previewView.layer.frame = previewView.bounds
57 | }
58 |
59 | override func viewWillAppear(_ animated: Bool) {
60 | super.viewWillAppear(animated)
61 | navigationController?.isNavigationBarHidden = true
62 | }
63 |
64 | override func viewWillDisappear(_ animated: Bool) {
65 | super.viewWillDisappear(animated)
66 | navigationController?.isNavigationBarHidden = false
67 | }
68 |
69 | override func viewDidLoad() {
70 | super.viewDidLoad()
71 | session = setUpAVCaptureSession()
72 | setupBeforeSessionRunning()
73 | session?.startRunning()
74 | }
75 |
76 | func setupBeforeSessionRunning() {}
77 | }
78 |
79 | // MARK: - SetUp
80 | extension CameraViewController {
81 | private func setUpPreviewView() {
82 | previewView.backgroundColor = .white
83 | view.addSubview(previewView)
84 |
85 | view.addSubview(backButton)
86 | backButton.setImage(UIImage(named: "Close"), for: .normal)
87 | backButton.addTarget(self, action: #selector(back), for: .touchUpInside)
88 |
89 | backButton.snp.makeConstraints { make in
90 | make.size.equalTo(50)
91 | make.left.top.equalToSuperview().inset(16)
92 | }
93 |
94 | view.addSubview(switchButton)
95 | switchButton.setTitle("Switch Camera", for: .normal)
96 | switchButton.addTarget(self, action: #selector(switchCamera), for: .touchUpInside)
97 |
98 | switchButton.snp.makeConstraints { make in
99 | make.centerY.equalTo(backButton.snp.centerY)
100 | make.left.equalTo(backButton.snp.right).inset(-16)
101 | }
102 | }
103 |
104 | @objc private func back() {
105 | navigationController?.popViewController(animated: true)
106 | }
107 |
108 | @objc private func switchCamera() {
109 | switch position {
110 | case .front:
111 | position = .back
112 | case .back:
113 | position = .front
114 | default: return
115 | }
116 | if let session = self.session {
117 | _ = try? setUpCamera(for: session, position: position)
118 | }
119 | }
120 | }
121 |
122 | // MARK: - SetUpAVCaptureSession
123 | extension CameraViewController {
124 | private func setUpAVCaptureSession() -> AVCaptureSession? {
125 | let session = AVCaptureSession()
126 | do {
127 | let inputDevice = try setUpCamera(for: session, position: position)
128 | setUpVideoDataOutput(for: session)
129 | captureDevice = inputDevice.device
130 | captureDeviceResolution = inputDevice.resolution
131 | designatePreviewLayer(for: session)
132 | return session
133 | } catch {
134 | teardownAVCapture()
135 | }
136 | return nil
137 | }
138 |
139 | private func setUpCamera(for session: AVCaptureSession, position: AVCaptureDevice.Position) throws -> (device: AVCaptureDevice, resolution: CGSize) {
140 | let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
141 | mediaType: .video,
142 | position: position)
143 | guard let device = deviceDiscoverySession.devices.first, let deviceInput = try? AVCaptureDeviceInput(device: device) else {
144 | throw AppError.frontCamera
145 | }
146 | if let input = session.inputs.first {
147 | session.removeInput(input)
148 | }
149 | if session.canAddInput(deviceInput) {
150 | session.addInput(deviceInput)
151 | }
152 | if let highestResolution = highestResolution420Format(for: device) {
153 | try device.lockForConfiguration()
154 | device.activeFormat = highestResolution.format
155 | device.unlockForConfiguration()
156 | return (device, highestResolution.resolution)
157 | }
158 | throw AppError.frontCamera
159 | }
160 |
161 | private func setUpFrontCamera(for session: AVCaptureSession) throws -> (device: AVCaptureDevice, resolution: CGSize) {
162 | let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
163 | mediaType: .video,
164 | position: .front)
165 | guard let device = deviceDiscoverySession.devices.first, let deviceInput = try? AVCaptureDeviceInput(device: device) else {
166 | throw AppError.frontCamera
167 | }
168 | if session.canAddInput(deviceInput) {
169 | session.addInput(deviceInput)
170 | }
171 | if let highestResolution = highestResolution420Format(for: device) {
172 | try device.lockForConfiguration()
173 | device.activeFormat = highestResolution.format
174 | device.unlockForConfiguration()
175 | return (device, highestResolution.resolution)
176 | }
177 | throw AppError.frontCamera
178 | }
179 |
180 | private func highestResolution420Format(for device: AVCaptureDevice) -> (format: AVCaptureDevice.Format, resolution: CGSize)? {
181 | var highestResolutionFormat: AVCaptureDevice.Format? = nil
182 | var highestResolutionDimensions = CMVideoDimensions(width: 0, height: 0)
183 | for format in device.formats {
184 | let deviceFormat = format as AVCaptureDevice.Format
185 | let deviceFormatDescription = deviceFormat.formatDescription
186 | if CMFormatDescriptionGetMediaSubType(deviceFormatDescription) == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange {
187 | let candidateDimensions = CMVideoFormatDescriptionGetDimensions(deviceFormatDescription)
188 | if (highestResolutionFormat == nil) || (candidateDimensions.width > highestResolutionDimensions.width) {
189 | highestResolutionFormat = deviceFormat
190 | highestResolutionDimensions = candidateDimensions
191 | }
192 | }
193 | }
194 | guard highestResolutionFormat != nil else {
195 | return nil
196 | }
197 | let resolution = CGSize(width: CGFloat(highestResolutionDimensions.width), height: CGFloat(highestResolutionDimensions.height))
198 | return (highestResolutionFormat!, resolution)
199 | }
200 |
201 | private func setUpVideoDataOutput(for captureSession: AVCaptureSession) {
202 | let output = AVCaptureVideoDataOutput()
203 | output.alwaysDiscardsLateVideoFrames = true
204 | let queue = DispatchQueue(label: "co.nimblehq.growth.FaceRecognition.queue")
205 | output.setSampleBufferDelegate(self, queue: queue)
206 | if captureSession.canAddOutput(output) {
207 | captureSession.addOutput(output)
208 | }
209 | output.connection(with: .video)?.isEnabled = true
210 | if let captureConnection = output.connection(with: AVMediaType.video) {
211 | if captureConnection.isCameraIntrinsicMatrixDeliverySupported {
212 | captureConnection.isCameraIntrinsicMatrixDeliveryEnabled = true
213 | }
214 | }
215 | videoDataOutput = output
216 | videoDataOutputQueue = queue
217 | }
218 |
219 | private func designatePreviewLayer(for session: AVCaptureSession) {
220 | let layer = AVCaptureVideoPreviewLayer(session: session)
221 | previewLayer = layer
222 | layer.name = "CameraPreview"
223 | layer.backgroundColor = UIColor.black.cgColor
224 | layer.videoGravity = AVLayerVideoGravity.resizeAspectFill
225 | previewView.layer.masksToBounds = true
226 | layer.frame = previewView.layer.bounds
227 | previewView.layer.addSublayer(layer)
228 | rootLayer = previewView.layer
229 | }
230 |
231 | private func teardownAVCapture() {
232 | self.videoDataOutput = nil
233 | self.videoDataOutputQueue = nil
234 | if let previewLayer = self.previewLayer {
235 | previewLayer.removeFromSuperlayer()
236 | self.previewLayer = nil
237 | }
238 | }
239 | }
240 |
241 | extension CameraViewController: AVCaptureVideoDataOutputSampleBufferDelegate {}
242 |
--------------------------------------------------------------------------------
/FaceRecognition/Modules/FaceTracking/FaceTrackingController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FaceTrackingController.swift
3 | // FaceRecognition
4 | //
5 | // Created by Su Van Ho on 23/8/19.
6 | // Copyright © 2019 Nimble. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import AVKit
11 | import Vision
12 |
13 | final class FaceTrackingController: CameraViewController {
14 |
15 | let predictLabel = UILabel()
16 |
17 | private var detectFaceRequests: [VNDetectFaceRectanglesRequest]?
18 | private var sequenceRequestHandler = VNSequenceRequestHandler()
19 | private var cacheRequests: [VNTrackObjectRequest] = []
20 | private var faceClassificationRequest: VNCoreMLRequest {
21 | let request = ModelManager.shared.request { result in
22 | self.predictLabel.text = result
23 | }
24 | request.imageCropAndScaleOption = .scaleFit
25 | return request
26 | }
27 |
28 | var detectionOverlayLayer: CALayer?
29 | var currentImage: CIImage?
30 |
31 | override func setupBeforeSessionRunning() {
32 | super.setupBeforeSessionRunning()
33 | prepareVisionRequest()
34 | }
35 |
36 | override func loadView() {
37 | super.loadView()
38 | view.addSubview(predictLabel)
39 | predictLabel.textColor = .white
40 | predictLabel.backgroundColor = .black
41 | predictLabel.font = UIFont(name: "CourierNewPSMT", size: 30)
42 |
43 | predictLabel.snp.makeConstraints { make in
44 | make.left.bottom.right.equalToSuperview().inset(16)
45 | }
46 | }
47 | }
48 |
49 | extension FaceTrackingController {
50 | private func prepareVisionRequest() {
51 | let detectFaceRequest: VNDetectFaceRectanglesRequest = VNDetectFaceRectanglesRequest { (request, error) in
52 | if error != nil {
53 | fatalError("FaceDetection error: \(error.debugDescription)")
54 | }
55 | guard let request = request as? VNDetectFaceRectanglesRequest,
56 | let results = request.results as? [VNFaceObservation], !results.isEmpty else {
57 | return
58 | }
59 | DispatchQueue.main.async {
60 | self.predictLabel.text = ""
61 | self.updateRequests(with: results)
62 | self.detectionOverlayLayer?.sublayers = nil
63 | results.forEach({ self.drawFace(observation: $0) })
64 | }
65 | }
66 | detectFaceRequests = [detectFaceRequest]
67 | setupVisionDrawingLayers()
68 | }
69 |
70 | private func updateRequests(with observations: [VNFaceObservation]) {
71 | cacheRequests = observations.map({ VNTrackObjectRequest(detectedObjectObservation: $0) })
72 | }
73 | }
74 |
75 | extension FaceTrackingController {
76 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
77 | var options: [VNImageOption: Any] = [:]
78 | let cameraIntrinsicData = CMGetAttachment(sampleBuffer, key: kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, attachmentModeOut: nil)
79 | if cameraIntrinsicData != nil {
80 | options[VNImageOption.cameraIntrinsics] = cameraIntrinsicData
81 | }
82 | guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
83 | fatalError("Failed to obtain a CVPixelBuffer for the current output frame.")
84 | }
85 | let eOrientation = exifOrientation
86 | if cacheRequests.isEmpty {
87 | let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: eOrientation, options: options)
88 | currentImage = CIImage(cvPixelBuffer: pixelBuffer)
89 | do {
90 | guard let detectRequest = detectFaceRequests else { return }
91 | try imageRequestHandler.perform(detectRequest)
92 | return
93 | } catch {
94 | fatalError(error.localizedDescription)
95 | }
96 | }
97 | do {
98 | try sequenceRequestHandler.perform(cacheRequests, on: pixelBuffer, orientation: eOrientation)
99 | } catch {
100 | fatalError(error.localizedDescription)
101 | }
102 |
103 | var newCacheRequests: [VNTrackObjectRequest] = []
104 | for request in cacheRequests {
105 | guard let results = request.results as? [VNDetectedObjectObservation], let observation = results.first else {
106 | return
107 | }
108 | if !request.isLastFrame {
109 | if observation.confidence > 0.5 {
110 | request.inputObservation = observation
111 | } else {
112 | request.isLastFrame = true
113 | }
114 | newCacheRequests.append(request)
115 | }
116 | request.isLastFrame = true
117 | }
118 | cacheRequests = newCacheRequests
119 | }
120 | }
121 |
122 | extension FaceTrackingController {
123 | private func drawFace(observation: VNFaceObservation) {
124 | CATransaction.begin()
125 | CATransaction.setValue(NSNumber(value: true), forKey: kCATransactionDisableActions)
126 | let faceLayer = faceRectangleLayer()
127 | let faceRectanglePath = CGMutablePath()
128 | let displaySize = captureDeviceResolution
129 | let faceBounds = VNImageRectForNormalizedRect(observation.boundingBox, Int(displaySize.width), Int(displaySize.height))
130 | faceRectanglePath.addRect(faceBounds)
131 | faceLayer.path = faceRectanglePath
132 | detectionOverlayLayer?.addSublayer(faceLayer)
133 | if let image = currentImage?.oriented(exifOrientation).cropped(toFace: observation) {
134 | do {
135 | try VNImageRequestHandler(ciImage: image, options: [:]).perform([faceClassificationRequest])
136 | } catch {
137 | print("ML request handler error: \(error.localizedDescription)")
138 | }
139 | }
140 | updateLayerGeometry()
141 | CATransaction.commit()
142 | }
143 |
144 | private func setupVisionDrawingLayers() {
145 | let resolution = captureDeviceResolution
146 | let captureDeviceBounds = CGRect(x: 0,
147 | y: 0,
148 | width: resolution.width,
149 | height: resolution.height)
150 | let normalizedCenterPoint = CGPoint(x: 0.5, y: 0.5)
151 | guard let rootLayer = self.rootLayer else {
152 | fatalError("View was not property initialized")
153 | }
154 | let overlayLayer = CALayer()
155 | overlayLayer.name = "DetectionOverlay"
156 | overlayLayer.masksToBounds = true
157 | overlayLayer.anchorPoint = normalizedCenterPoint
158 | overlayLayer.bounds = captureDeviceBounds
159 | overlayLayer.position = CGPoint(x: rootLayer.bounds.midX, y: rootLayer.bounds.midY)
160 | rootLayer.addSublayer(overlayLayer)
161 | detectionOverlayLayer = overlayLayer
162 | updateLayerGeometry()
163 | }
164 |
165 | private func faceRectangleLayer() -> CAShapeLayer {
166 | let resolution = captureDeviceResolution
167 | let captureDeviceBounds = CGRect(x: 0,
168 | y: 0,
169 | width: resolution.width,
170 | height: resolution.height)
171 | let deviceCenterPoint = CGPoint(x: captureDeviceBounds.midX,
172 | y: captureDeviceBounds.midY)
173 | let normalizedCenterPoint = CGPoint(x: 0.5, y: 0.5)
174 | let faceRectangleShapeLayer = CAShapeLayer()
175 | faceRectangleShapeLayer.name = "FaceRectangleLayer"
176 | faceRectangleShapeLayer.bounds = captureDeviceBounds
177 | faceRectangleShapeLayer.anchorPoint = normalizedCenterPoint
178 | faceRectangleShapeLayer.position = deviceCenterPoint
179 | faceRectangleShapeLayer.fillColor = nil
180 | faceRectangleShapeLayer.strokeColor = UIColor.yellow.withAlphaComponent(0.7).cgColor
181 | faceRectangleShapeLayer.lineWidth = 5
182 | faceRectangleShapeLayer.shadowOpacity = 0.7
183 | faceRectangleShapeLayer.shadowRadius = 5
184 | return faceRectangleShapeLayer
185 | }
186 |
187 | private func updateLayerGeometry() {
188 | guard let overlayLayer = self.detectionOverlayLayer,
189 | let rootLayer = self.rootLayer,
190 | let previewLayer = self.previewLayer
191 | else {
192 | return
193 | }
194 | CATransaction.setValue(NSNumber(value: true), forKey: kCATransactionDisableActions)
195 | let videoPreviewRect = previewLayer.layerRectConverted(fromMetadataOutputRect: CGRect(x: 0, y: 0, width: 1, height: 1))
196 | var rotation: CGFloat
197 | var scaleX: CGFloat
198 | var scaleY: CGFloat
199 | switch UIDevice.current.orientation {
200 | case .portraitUpsideDown:
201 | rotation = 180
202 | scaleX = videoPreviewRect.width / captureDeviceResolution.width
203 | scaleY = videoPreviewRect.height / captureDeviceResolution.height
204 |
205 | case .landscapeLeft:
206 | rotation = 90
207 | scaleX = videoPreviewRect.height / captureDeviceResolution.width
208 | scaleY = scaleX
209 |
210 | case .landscapeRight:
211 | rotation = -90
212 | scaleX = videoPreviewRect.height / captureDeviceResolution.width
213 | scaleY = scaleX
214 |
215 | default:
216 | rotation = 0
217 | scaleX = videoPreviewRect.width / captureDeviceResolution.width
218 | scaleY = videoPreviewRect.height / captureDeviceResolution.height
219 | }
220 | var scaleXForPosition: CGFloat
221 | if position == .back {
222 | scaleXForPosition = -scaleX
223 | } else {
224 | scaleXForPosition = scaleX
225 | }
226 | let affineTransform = CGAffineTransform(rotationAngle: rotation.radians)
227 | .scaledBy(x: scaleXForPosition, y: -scaleY)
228 | overlayLayer.setAffineTransform(affineTransform)
229 | let rootLayerBounds = rootLayer.bounds
230 | overlayLayer.position = CGPoint(x: rootLayerBounds.midX, y: rootLayerBounds.midY)
231 | }
232 | }
233 |
--------------------------------------------------------------------------------
/FaceRecognition.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 51;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 1938B7E3AC282A9E06E24827 /* Pods_FaceRecognitionUITests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BA53D631966FCF7D8CC4F056 /* Pods_FaceRecognitionUITests.framework */; };
11 | 9000E8BF230E7ADE007E271F /* CameraViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9000E8BE230E7ADE007E271F /* CameraViewController.swift */; };
12 | 9000E8C2230E833B007E271F /* UIViewController+Alert.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9000E8C1230E833B007E271F /* UIViewController+Alert.swift */; };
13 | 9000E8C5230E83B9007E271F /* Error+FaceRecognition.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9000E8C4230E83B9007E271F /* Error+FaceRecognition.swift */; };
14 | 9000E8D2230E8881007E271F /* HomeViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9000E8CC230E8881007E271F /* HomeViewController.swift */; };
15 | 9000E8DC230E88E4007E271F /* TakePicturesViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9000E8D7230E88E4007E271F /* TakePicturesViewController.swift */; };
16 | 9000E8EC230E982D007E271F /* PhotoManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9000E8EB230E982D007E271F /* PhotoManager.swift */; };
17 | 9000E8EE230E98B4007E271F /* CGFloat+Math.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9000E8ED230E98B4007E271F /* CGFloat+Math.swift */; };
18 | 9000E8F0230E98CE007E271F /* CIImage+Vision.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9000E8EF230E98CE007E271F /* CIImage+Vision.swift */; };
19 | 900F6BC3230CF55C0018D22C /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 900F6BC2230CF55C0018D22C /* AppDelegate.swift */; };
20 | 900F6BCA230CF55D0018D22C /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 900F6BC9230CF55D0018D22C /* Assets.xcassets */; };
21 | 900F6BCD230CF55D0018D22C /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 900F6BCB230CF55D0018D22C /* LaunchScreen.storyboard */; };
22 | 909D30CA230F89D600816CD3 /* FaceTrackingController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 909D30C9230F89D600816CD3 /* FaceTrackingController.swift */; };
23 | 909D30CC230F8A0200816CD3 /* ModelManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 909D30CB230F8A0200816CD3 /* ModelManager.swift */; };
24 | 909D30CF230F8A2D00816CD3 /* ImageClassifier.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 909D30CE230F8A2D00816CD3 /* ImageClassifier.mlmodel */; };
25 | A075AE94CDF6E3B249F23F66 /* Pods_FaceRecognition.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 976E1064DF22785CD4F8D392 /* Pods_FaceRecognition.framework */; };
26 | B0BB017E92525A014333288D /* Pods_FaceRecognitionTests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 477E265737A280E217FE5923 /* Pods_FaceRecognitionTests.framework */; };
27 | /* End PBXBuildFile section */
28 |
29 | /* Begin PBXContainerItemProxy section */
30 | 900F6BD4230CF55D0018D22C /* PBXContainerItemProxy */ = {
31 | isa = PBXContainerItemProxy;
32 | containerPortal = 900F6BB7230CF55C0018D22C /* Project object */;
33 | proxyType = 1;
34 | remoteGlobalIDString = 900F6BBE230CF55C0018D22C;
35 | remoteInfo = FaceRecognition;
36 | };
37 | 900F6BDF230CF55D0018D22C /* PBXContainerItemProxy */ = {
38 | isa = PBXContainerItemProxy;
39 | containerPortal = 900F6BB7230CF55C0018D22C /* Project object */;
40 | proxyType = 1;
41 | remoteGlobalIDString = 900F6BBE230CF55C0018D22C;
42 | remoteInfo = FaceRecognition;
43 | };
44 | /* End PBXContainerItemProxy section */
45 |
46 | /* Begin PBXFileReference section */
47 | 477E265737A280E217FE5923 /* Pods_FaceRecognitionTests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_FaceRecognitionTests.framework; sourceTree = BUILT_PRODUCTS_DIR; };
48 | 5F15704D38DC038E2FC93FF8 /* Pods-FaceRecognitionTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FaceRecognitionTests.debug.xcconfig"; path = "Target Support Files/Pods-FaceRecognitionTests/Pods-FaceRecognitionTests.debug.xcconfig"; sourceTree = ""; };
49 | 6872641C28CE954A36F811E6 /* Pods-FaceRecognitionUITests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FaceRecognitionUITests.release.xcconfig"; path = "Target Support Files/Pods-FaceRecognitionUITests/Pods-FaceRecognitionUITests.release.xcconfig"; sourceTree = ""; };
50 | 89FAB5FFD0499E3BA7D67DB0 /* Pods-FaceRecognition.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FaceRecognition.debug.xcconfig"; path = "Target Support Files/Pods-FaceRecognition/Pods-FaceRecognition.debug.xcconfig"; sourceTree = ""; };
51 | 9000E8BE230E7ADE007E271F /* CameraViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraViewController.swift; sourceTree = ""; };
52 | 9000E8C1230E833B007E271F /* UIViewController+Alert.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIViewController+Alert.swift"; sourceTree = ""; };
53 | 9000E8C4230E83B9007E271F /* Error+FaceRecognition.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Error+FaceRecognition.swift"; sourceTree = ""; };
54 | 9000E8CC230E8881007E271F /* HomeViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = HomeViewController.swift; sourceTree = ""; };
55 | 9000E8D7230E88E4007E271F /* TakePicturesViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TakePicturesViewController.swift; sourceTree = ""; };
56 | 9000E8EB230E982D007E271F /* PhotoManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoManager.swift; sourceTree = ""; };
57 | 9000E8ED230E98B4007E271F /* CGFloat+Math.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CGFloat+Math.swift"; sourceTree = ""; };
58 | 9000E8EF230E98CE007E271F /* CIImage+Vision.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CIImage+Vision.swift"; sourceTree = ""; };
59 | 900F6BBF230CF55C0018D22C /* FaceRecognition.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FaceRecognition.app; sourceTree = BUILT_PRODUCTS_DIR; };
60 | 900F6BC2230CF55C0018D22C /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; };
61 | 900F6BC9230CF55D0018D22C /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
62 | 900F6BCC230CF55D0018D22C /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
63 | 900F6BCE230CF55D0018D22C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
64 | 900F6BD3230CF55D0018D22C /* FaceRecognitionTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = FaceRecognitionTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
65 | 900F6BD9230CF55D0018D22C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
66 | 900F6BDE230CF55D0018D22C /* FaceRecognitionUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = FaceRecognitionUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
67 | 900F6BE4230CF55D0018D22C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
68 | 909D30C9230F89D600816CD3 /* FaceTrackingController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceTrackingController.swift; sourceTree = ""; };
69 | 909D30CB230F8A0200816CD3 /* ModelManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ModelManager.swift; sourceTree = ""; };
70 | 909D30CE230F8A2D00816CD3 /* ImageClassifier.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = ImageClassifier.mlmodel; sourceTree = ""; };
71 | 976E1064DF22785CD4F8D392 /* Pods_FaceRecognition.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_FaceRecognition.framework; sourceTree = BUILT_PRODUCTS_DIR; };
72 | BA53D631966FCF7D8CC4F056 /* Pods_FaceRecognitionUITests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_FaceRecognitionUITests.framework; sourceTree = BUILT_PRODUCTS_DIR; };
73 | C98649B71E1D625262A16B5B /* Pods-FaceRecognitionTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FaceRecognitionTests.release.xcconfig"; path = "Target Support Files/Pods-FaceRecognitionTests/Pods-FaceRecognitionTests.release.xcconfig"; sourceTree = ""; };
74 | CDC0EBEA2A29ECFADA36A320 /* Pods-FaceRecognitionUITests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FaceRecognitionUITests.debug.xcconfig"; path = "Target Support Files/Pods-FaceRecognitionUITests/Pods-FaceRecognitionUITests.debug.xcconfig"; sourceTree = ""; };
75 | F5799412932ED8F64AAA039C /* Pods-FaceRecognition.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FaceRecognition.release.xcconfig"; path = "Target Support Files/Pods-FaceRecognition/Pods-FaceRecognition.release.xcconfig"; sourceTree = ""; };
76 | /* End PBXFileReference section */
77 |
78 | /* Begin PBXFrameworksBuildPhase section */
79 | 900F6BBC230CF55C0018D22C /* Frameworks */ = {
80 | isa = PBXFrameworksBuildPhase;
81 | buildActionMask = 2147483647;
82 | files = (
83 | A075AE94CDF6E3B249F23F66 /* Pods_FaceRecognition.framework in Frameworks */,
84 | );
85 | runOnlyForDeploymentPostprocessing = 0;
86 | };
87 | 900F6BD0230CF55D0018D22C /* Frameworks */ = {
88 | isa = PBXFrameworksBuildPhase;
89 | buildActionMask = 2147483647;
90 | files = (
91 | B0BB017E92525A014333288D /* Pods_FaceRecognitionTests.framework in Frameworks */,
92 | );
93 | runOnlyForDeploymentPostprocessing = 0;
94 | };
95 | 900F6BDB230CF55D0018D22C /* Frameworks */ = {
96 | isa = PBXFrameworksBuildPhase;
97 | buildActionMask = 2147483647;
98 | files = (
99 | 1938B7E3AC282A9E06E24827 /* Pods_FaceRecognitionUITests.framework in Frameworks */,
100 | );
101 | runOnlyForDeploymentPostprocessing = 0;
102 | };
103 | /* End PBXFrameworksBuildPhase section */
104 |
105 | /* Begin PBXGroup section */
106 | 4538BC4725F66A468A71F9F7 /* Pods */ = {
107 | isa = PBXGroup;
108 | children = (
109 | 89FAB5FFD0499E3BA7D67DB0 /* Pods-FaceRecognition.debug.xcconfig */,
110 | F5799412932ED8F64AAA039C /* Pods-FaceRecognition.release.xcconfig */,
111 | 5F15704D38DC038E2FC93FF8 /* Pods-FaceRecognitionTests.debug.xcconfig */,
112 | C98649B71E1D625262A16B5B /* Pods-FaceRecognitionTests.release.xcconfig */,
113 | CDC0EBEA2A29ECFADA36A320 /* Pods-FaceRecognitionUITests.debug.xcconfig */,
114 | 6872641C28CE954A36F811E6 /* Pods-FaceRecognitionUITests.release.xcconfig */,
115 | );
116 | path = Pods;
117 | sourceTree = "";
118 | };
119 | 660F955628F355D7608BA017 /* Frameworks */ = {
120 | isa = PBXGroup;
121 | children = (
122 | 976E1064DF22785CD4F8D392 /* Pods_FaceRecognition.framework */,
123 | 477E265737A280E217FE5923 /* Pods_FaceRecognitionTests.framework */,
124 | BA53D631966FCF7D8CC4F056 /* Pods_FaceRecognitionUITests.framework */,
125 | );
126 | name = Frameworks;
127 | sourceTree = "";
128 | };
129 | 9000E8BC230E7AC5007E271F /* Design */ = {
130 | isa = PBXGroup;
131 | children = (
132 | 9000E8EA230E9812007E271F /* Manager */,
133 | 9000E8C0230E8214007E271F /* Extensions */,
134 | 9000E8BD230E7AD0007E271F /* View */,
135 | );
136 | path = Design;
137 | sourceTree = "";
138 | };
139 | 9000E8BD230E7AD0007E271F /* View */ = {
140 | isa = PBXGroup;
141 | children = (
142 | 9000E8BE230E7ADE007E271F /* CameraViewController.swift */,
143 | );
144 | path = View;
145 | sourceTree = "";
146 | };
147 | 9000E8C0230E8214007E271F /* Extensions */ = {
148 | isa = PBXGroup;
149 | children = (
150 | 9000E8C1230E833B007E271F /* UIViewController+Alert.swift */,
151 | 9000E8C4230E83B9007E271F /* Error+FaceRecognition.swift */,
152 | 9000E8ED230E98B4007E271F /* CGFloat+Math.swift */,
153 | 9000E8EF230E98CE007E271F /* CIImage+Vision.swift */,
154 | );
155 | path = Extensions;
156 | sourceTree = "";
157 | };
158 | 9000E8D4230E88AA007E271F /* TakePictures */ = {
159 | isa = PBXGroup;
160 | children = (
161 | 9000E8D7230E88E4007E271F /* TakePicturesViewController.swift */,
162 | );
163 | path = TakePictures;
164 | sourceTree = "";
165 | };
166 | 9000E8EA230E9812007E271F /* Manager */ = {
167 | isa = PBXGroup;
168 | children = (
169 | 9000E8EB230E982D007E271F /* PhotoManager.swift */,
170 | 909D30CB230F8A0200816CD3 /* ModelManager.swift */,
171 | );
172 | path = Manager;
173 | sourceTree = "";
174 | };
175 | 900F6BB6230CF55C0018D22C = {
176 | isa = PBXGroup;
177 | children = (
178 | 900F6BC1230CF55C0018D22C /* FaceRecognition */,
179 | 900F6BD6230CF55D0018D22C /* FaceRecognitionTests */,
180 | 900F6BE1230CF55D0018D22C /* FaceRecognitionUITests */,
181 | 900F6BC0230CF55C0018D22C /* Products */,
182 | 4538BC4725F66A468A71F9F7 /* Pods */,
183 | 660F955628F355D7608BA017 /* Frameworks */,
184 | );
185 | sourceTree = "";
186 | };
187 | 900F6BC0230CF55C0018D22C /* Products */ = {
188 | isa = PBXGroup;
189 | children = (
190 | 900F6BBF230CF55C0018D22C /* FaceRecognition.app */,
191 | 900F6BD3230CF55D0018D22C /* FaceRecognitionTests.xctest */,
192 | 900F6BDE230CF55D0018D22C /* FaceRecognitionUITests.xctest */,
193 | );
194 | name = Products;
195 | sourceTree = "";
196 | };
197 | 900F6BC1230CF55C0018D22C /* FaceRecognition */ = {
198 | isa = PBXGroup;
199 | children = (
200 | 900F6BF6230CF60F0018D22C /* AppDelegate */,
201 | 9000E8BC230E7AC5007E271F /* Design */,
202 | 900F6BF7230CF6160018D22C /* Modules */,
203 | 900F6BF3230CF5EF0018D22C /* Resources */,
204 | 900F6BF2230CF5E80018D22C /* Supporting Files */,
205 | );
206 | path = FaceRecognition;
207 | sourceTree = "";
208 | };
209 | 900F6BD6230CF55D0018D22C /* FaceRecognitionTests */ = {
210 | isa = PBXGroup;
211 | children = (
212 | 900F6BF0230CF5810018D22C /* Supporting Files */,
213 | );
214 | path = FaceRecognitionTests;
215 | sourceTree = "";
216 | };
217 | 900F6BE1230CF55D0018D22C /* FaceRecognitionUITests */ = {
218 | isa = PBXGroup;
219 | children = (
220 | 900F6BF1230CF5890018D22C /* Supporting FIles */,
221 | );
222 | path = FaceRecognitionUITests;
223 | sourceTree = "";
224 | };
225 | 900F6BF0230CF5810018D22C /* Supporting Files */ = {
226 | isa = PBXGroup;
227 | children = (
228 | 900F6BD9230CF55D0018D22C /* Info.plist */,
229 | );
230 | path = "Supporting Files";
231 | sourceTree = "";
232 | };
233 | 900F6BF1230CF5890018D22C /* Supporting FIles */ = {
234 | isa = PBXGroup;
235 | children = (
236 | 900F6BE4230CF55D0018D22C /* Info.plist */,
237 | );
238 | path = "Supporting FIles";
239 | sourceTree = "";
240 | };
241 | 900F6BF2230CF5E80018D22C /* Supporting Files */ = {
242 | isa = PBXGroup;
243 | children = (
244 | 900F6BCE230CF55D0018D22C /* Info.plist */,
245 | );
246 | path = "Supporting Files";
247 | sourceTree = "";
248 | };
249 | 900F6BF3230CF5EF0018D22C /* Resources */ = {
250 | isa = PBXGroup;
251 | children = (
252 | 909D30CD230F8A2400816CD3 /* Model */,
253 | 900F6BF5230CF5FD0018D22C /* LaunchScreen */,
254 | 900F6BF4230CF5F60018D22C /* Images */,
255 | );
256 | path = Resources;
257 | sourceTree = "";
258 | };
259 | 900F6BF4230CF5F60018D22C /* Images */ = {
260 | isa = PBXGroup;
261 | children = (
262 | 900F6BC9230CF55D0018D22C /* Assets.xcassets */,
263 | );
264 | path = Images;
265 | sourceTree = "";
266 | };
267 | 900F6BF5230CF5FD0018D22C /* LaunchScreen */ = {
268 | isa = PBXGroup;
269 | children = (
270 | 900F6BCB230CF55D0018D22C /* LaunchScreen.storyboard */,
271 | );
272 | path = LaunchScreen;
273 | sourceTree = "";
274 | };
275 | 900F6BF6230CF60F0018D22C /* AppDelegate */ = {
276 | isa = PBXGroup;
277 | children = (
278 | 900F6BC2230CF55C0018D22C /* AppDelegate.swift */,
279 | );
280 | path = AppDelegate;
281 | sourceTree = "";
282 | };
283 | 900F6BF7230CF6160018D22C /* Modules */ = {
284 | isa = PBXGroup;
285 | children = (
286 | 900F6BF8230CF6240018D22C /* Home */,
287 | 9000E8D4230E88AA007E271F /* TakePictures */,
288 | 909D30C8230F89C000816CD3 /* FaceTracking */,
289 | );
290 | path = Modules;
291 | sourceTree = "";
292 | };
293 | 900F6BF8230CF6240018D22C /* Home */ = {
294 | isa = PBXGroup;
295 | children = (
296 | 9000E8CC230E8881007E271F /* HomeViewController.swift */,
297 | );
298 | path = Home;
299 | sourceTree = "";
300 | };
301 | 909D30C8230F89C000816CD3 /* FaceTracking */ = {
302 | isa = PBXGroup;
303 | children = (
304 | 909D30C9230F89D600816CD3 /* FaceTrackingController.swift */,
305 | );
306 | path = FaceTracking;
307 | sourceTree = "";
308 | };
309 | 909D30CD230F8A2400816CD3 /* Model */ = {
310 | isa = PBXGroup;
311 | children = (
312 | 909D30CE230F8A2D00816CD3 /* ImageClassifier.mlmodel */,
313 | );
314 | path = Model;
315 | sourceTree = "";
316 | };
317 | /* End PBXGroup section */
318 |
319 | /* Begin PBXNativeTarget section */
320 | 900F6BBE230CF55C0018D22C /* FaceRecognition */ = {
321 | isa = PBXNativeTarget;
322 | buildConfigurationList = 900F6BE7230CF55D0018D22C /* Build configuration list for PBXNativeTarget "FaceRecognition" */;
323 | buildPhases = (
324 | A6E9DCA7D3C3E8BCE3BC42FE /* [CP] Check Pods Manifest.lock */,
325 | 900F6BBB230CF55C0018D22C /* Sources */,
326 | 900F6BBC230CF55C0018D22C /* Frameworks */,
327 | 900F6BBD230CF55C0018D22C /* Resources */,
328 | 96946EF16EFC8EF0054A53B5 /* [CP] Embed Pods Frameworks */,
329 | );
330 | buildRules = (
331 | );
332 | dependencies = (
333 | );
334 | name = FaceRecognition;
335 | productName = FaceRecognition;
336 | productReference = 900F6BBF230CF55C0018D22C /* FaceRecognition.app */;
337 | productType = "com.apple.product-type.application";
338 | };
339 | 900F6BD2230CF55D0018D22C /* FaceRecognitionTests */ = {
340 | isa = PBXNativeTarget;
341 | buildConfigurationList = 900F6BEA230CF55D0018D22C /* Build configuration list for PBXNativeTarget "FaceRecognitionTests" */;
342 | buildPhases = (
343 | A6B00539C9A629155648E420 /* [CP] Check Pods Manifest.lock */,
344 | 900F6BCF230CF55D0018D22C /* Sources */,
345 | 900F6BD0230CF55D0018D22C /* Frameworks */,
346 | 900F6BD1230CF55D0018D22C /* Resources */,
347 | );
348 | buildRules = (
349 | );
350 | dependencies = (
351 | 900F6BD5230CF55D0018D22C /* PBXTargetDependency */,
352 | );
353 | name = FaceRecognitionTests;
354 | productName = FaceRecognitionTests;
355 | productReference = 900F6BD3230CF55D0018D22C /* FaceRecognitionTests.xctest */;
356 | productType = "com.apple.product-type.bundle.unit-test";
357 | };
358 | 900F6BDD230CF55D0018D22C /* FaceRecognitionUITests */ = {
359 | isa = PBXNativeTarget;
360 | buildConfigurationList = 900F6BED230CF55D0018D22C /* Build configuration list for PBXNativeTarget "FaceRecognitionUITests" */;
361 | buildPhases = (
362 | 41D9CF5D0E8B7D461729D13D /* [CP] Check Pods Manifest.lock */,
363 | 900F6BDA230CF55D0018D22C /* Sources */,
364 | 900F6BDB230CF55D0018D22C /* Frameworks */,
365 | 900F6BDC230CF55D0018D22C /* Resources */,
366 | );
367 | buildRules = (
368 | );
369 | dependencies = (
370 | 900F6BE0230CF55D0018D22C /* PBXTargetDependency */,
371 | );
372 | name = FaceRecognitionUITests;
373 | productName = FaceRecognitionUITests;
374 | productReference = 900F6BDE230CF55D0018D22C /* FaceRecognitionUITests.xctest */;
375 | productType = "com.apple.product-type.bundle.ui-testing";
376 | };
377 | /* End PBXNativeTarget section */
378 |
379 | /* Begin PBXProject section */
380 | 900F6BB7230CF55C0018D22C /* Project object */ = {
381 | isa = PBXProject;
382 | attributes = {
383 | LastSwiftUpdateCheck = 1030;
384 | LastUpgradeCheck = 1030;
385 | ORGANIZATIONNAME = Nimble;
386 | TargetAttributes = {
387 | 900F6BBE230CF55C0018D22C = {
388 | CreatedOnToolsVersion = 10.3;
389 | };
390 | 900F6BD2230CF55D0018D22C = {
391 | CreatedOnToolsVersion = 10.3;
392 | TestTargetID = 900F6BBE230CF55C0018D22C;
393 | };
394 | 900F6BDD230CF55D0018D22C = {
395 | CreatedOnToolsVersion = 10.3;
396 | TestTargetID = 900F6BBE230CF55C0018D22C;
397 | };
398 | };
399 | };
400 | buildConfigurationList = 900F6BBA230CF55C0018D22C /* Build configuration list for PBXProject "FaceRecognition" */;
401 | compatibilityVersion = "Xcode 9.3";
402 | developmentRegion = en;
403 | hasScannedForEncodings = 0;
404 | knownRegions = (
405 | en,
406 | Base,
407 | );
408 | mainGroup = 900F6BB6230CF55C0018D22C;
409 | productRefGroup = 900F6BC0230CF55C0018D22C /* Products */;
410 | projectDirPath = "";
411 | projectRoot = "";
412 | targets = (
413 | 900F6BBE230CF55C0018D22C /* FaceRecognition */,
414 | 900F6BD2230CF55D0018D22C /* FaceRecognitionTests */,
415 | 900F6BDD230CF55D0018D22C /* FaceRecognitionUITests */,
416 | );
417 | };
418 | /* End PBXProject section */
419 |
420 | /* Begin PBXResourcesBuildPhase section */
421 | 900F6BBD230CF55C0018D22C /* Resources */ = {
422 | isa = PBXResourcesBuildPhase;
423 | buildActionMask = 2147483647;
424 | files = (
425 | 900F6BCD230CF55D0018D22C /* LaunchScreen.storyboard in Resources */,
426 | 900F6BCA230CF55D0018D22C /* Assets.xcassets in Resources */,
427 | );
428 | runOnlyForDeploymentPostprocessing = 0;
429 | };
430 | 900F6BD1230CF55D0018D22C /* Resources */ = {
431 | isa = PBXResourcesBuildPhase;
432 | buildActionMask = 2147483647;
433 | files = (
434 | );
435 | runOnlyForDeploymentPostprocessing = 0;
436 | };
437 | 900F6BDC230CF55D0018D22C /* Resources */ = {
438 | isa = PBXResourcesBuildPhase;
439 | buildActionMask = 2147483647;
440 | files = (
441 | );
442 | runOnlyForDeploymentPostprocessing = 0;
443 | };
444 | /* End PBXResourcesBuildPhase section */
445 |
446 | /* Begin PBXShellScriptBuildPhase section */
447 | 41D9CF5D0E8B7D461729D13D /* [CP] Check Pods Manifest.lock */ = {
448 | isa = PBXShellScriptBuildPhase;
449 | buildActionMask = 2147483647;
450 | files = (
451 | );
452 | inputFileListPaths = (
453 | );
454 | inputPaths = (
455 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
456 | "${PODS_ROOT}/Manifest.lock",
457 | );
458 | name = "[CP] Check Pods Manifest.lock";
459 | outputFileListPaths = (
460 | );
461 | outputPaths = (
462 | "$(DERIVED_FILE_DIR)/Pods-FaceRecognitionUITests-checkManifestLockResult.txt",
463 | );
464 | runOnlyForDeploymentPostprocessing = 0;
465 | shellPath = /bin/sh;
466 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
467 | showEnvVarsInLog = 0;
468 | };
469 | 96946EF16EFC8EF0054A53B5 /* [CP] Embed Pods Frameworks */ = {
470 | isa = PBXShellScriptBuildPhase;
471 | buildActionMask = 2147483647;
472 | files = (
473 | );
474 | inputFileListPaths = (
475 | "${PODS_ROOT}/Target Support Files/Pods-FaceRecognition/Pods-FaceRecognition-frameworks-${CONFIGURATION}-input-files.xcfilelist",
476 | );
477 | name = "[CP] Embed Pods Frameworks";
478 | outputFileListPaths = (
479 | "${PODS_ROOT}/Target Support Files/Pods-FaceRecognition/Pods-FaceRecognition-frameworks-${CONFIGURATION}-output-files.xcfilelist",
480 | );
481 | runOnlyForDeploymentPostprocessing = 0;
482 | shellPath = /bin/sh;
483 | shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-FaceRecognition/Pods-FaceRecognition-frameworks.sh\"\n";
484 | showEnvVarsInLog = 0;
485 | };
486 | A6B00539C9A629155648E420 /* [CP] Check Pods Manifest.lock */ = {
487 | isa = PBXShellScriptBuildPhase;
488 | buildActionMask = 2147483647;
489 | files = (
490 | );
491 | inputFileListPaths = (
492 | );
493 | inputPaths = (
494 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
495 | "${PODS_ROOT}/Manifest.lock",
496 | );
497 | name = "[CP] Check Pods Manifest.lock";
498 | outputFileListPaths = (
499 | );
500 | outputPaths = (
501 | "$(DERIVED_FILE_DIR)/Pods-FaceRecognitionTests-checkManifestLockResult.txt",
502 | );
503 | runOnlyForDeploymentPostprocessing = 0;
504 | shellPath = /bin/sh;
505 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
506 | showEnvVarsInLog = 0;
507 | };
508 | A6E9DCA7D3C3E8BCE3BC42FE /* [CP] Check Pods Manifest.lock */ = {
509 | isa = PBXShellScriptBuildPhase;
510 | buildActionMask = 2147483647;
511 | files = (
512 | );
513 | inputFileListPaths = (
514 | );
515 | inputPaths = (
516 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
517 | "${PODS_ROOT}/Manifest.lock",
518 | );
519 | name = "[CP] Check Pods Manifest.lock";
520 | outputFileListPaths = (
521 | );
522 | outputPaths = (
523 | "$(DERIVED_FILE_DIR)/Pods-FaceRecognition-checkManifestLockResult.txt",
524 | );
525 | runOnlyForDeploymentPostprocessing = 0;
526 | shellPath = /bin/sh;
527 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
528 | showEnvVarsInLog = 0;
529 | };
530 | /* End PBXShellScriptBuildPhase section */
531 |
532 | /* Begin PBXSourcesBuildPhase section */
533 | 900F6BBB230CF55C0018D22C /* Sources */ = {
534 | isa = PBXSourcesBuildPhase;
535 | buildActionMask = 2147483647;
536 | files = (
537 | 9000E8F0230E98CE007E271F /* CIImage+Vision.swift in Sources */,
538 | 9000E8DC230E88E4007E271F /* TakePicturesViewController.swift in Sources */,
539 | 900F6BC3230CF55C0018D22C /* AppDelegate.swift in Sources */,
540 | 909D30CA230F89D600816CD3 /* FaceTrackingController.swift in Sources */,
541 | 9000E8EE230E98B4007E271F /* CGFloat+Math.swift in Sources */,
542 | 9000E8EC230E982D007E271F /* PhotoManager.swift in Sources */,
543 | 9000E8BF230E7ADE007E271F /* CameraViewController.swift in Sources */,
544 | 909D30CF230F8A2D00816CD3 /* ImageClassifier.mlmodel in Sources */,
545 | 909D30CC230F8A0200816CD3 /* ModelManager.swift in Sources */,
546 | 9000E8D2230E8881007E271F /* HomeViewController.swift in Sources */,
547 | 9000E8C5230E83B9007E271F /* Error+FaceRecognition.swift in Sources */,
548 | 9000E8C2230E833B007E271F /* UIViewController+Alert.swift in Sources */,
549 | );
550 | runOnlyForDeploymentPostprocessing = 0;
551 | };
552 | 900F6BCF230CF55D0018D22C /* Sources */ = {
553 | isa = PBXSourcesBuildPhase;
554 | buildActionMask = 2147483647;
555 | files = (
556 | );
557 | runOnlyForDeploymentPostprocessing = 0;
558 | };
559 | 900F6BDA230CF55D0018D22C /* Sources */ = {
560 | isa = PBXSourcesBuildPhase;
561 | buildActionMask = 2147483647;
562 | files = (
563 | );
564 | runOnlyForDeploymentPostprocessing = 0;
565 | };
566 | /* End PBXSourcesBuildPhase section */
567 |
568 | /* Begin PBXTargetDependency section */
569 | 900F6BD5230CF55D0018D22C /* PBXTargetDependency */ = {
570 | isa = PBXTargetDependency;
571 | target = 900F6BBE230CF55C0018D22C /* FaceRecognition */;
572 | targetProxy = 900F6BD4230CF55D0018D22C /* PBXContainerItemProxy */;
573 | };
574 | 900F6BE0230CF55D0018D22C /* PBXTargetDependency */ = {
575 | isa = PBXTargetDependency;
576 | target = 900F6BBE230CF55C0018D22C /* FaceRecognition */;
577 | targetProxy = 900F6BDF230CF55D0018D22C /* PBXContainerItemProxy */;
578 | };
579 | /* End PBXTargetDependency section */
580 |
581 | /* Begin PBXVariantGroup section */
582 | 900F6BCB230CF55D0018D22C /* LaunchScreen.storyboard */ = {
583 | isa = PBXVariantGroup;
584 | children = (
585 | 900F6BCC230CF55D0018D22C /* Base */,
586 | );
587 | name = LaunchScreen.storyboard;
588 | sourceTree = "";
589 | };
590 | /* End PBXVariantGroup section */
591 |
592 | /* Begin XCBuildConfiguration section */
593 | 900F6BE5230CF55D0018D22C /* Debug */ = {
594 | isa = XCBuildConfiguration;
595 | buildSettings = {
596 | ALWAYS_SEARCH_USER_PATHS = NO;
597 | CLANG_ANALYZER_NONNULL = YES;
598 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
599 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
600 | CLANG_CXX_LIBRARY = "libc++";
601 | CLANG_ENABLE_MODULES = YES;
602 | CLANG_ENABLE_OBJC_ARC = YES;
603 | CLANG_ENABLE_OBJC_WEAK = YES;
604 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
605 | CLANG_WARN_BOOL_CONVERSION = YES;
606 | CLANG_WARN_COMMA = YES;
607 | CLANG_WARN_CONSTANT_CONVERSION = YES;
608 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
609 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
610 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
611 | CLANG_WARN_EMPTY_BODY = YES;
612 | CLANG_WARN_ENUM_CONVERSION = YES;
613 | CLANG_WARN_INFINITE_RECURSION = YES;
614 | CLANG_WARN_INT_CONVERSION = YES;
615 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
616 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
617 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
618 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
619 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
620 | CLANG_WARN_STRICT_PROTOTYPES = YES;
621 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
622 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
623 | CLANG_WARN_UNREACHABLE_CODE = YES;
624 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
625 | CODE_SIGN_IDENTITY = "iPhone Developer";
626 | COPY_PHASE_STRIP = NO;
627 | DEBUG_INFORMATION_FORMAT = dwarf;
628 | ENABLE_STRICT_OBJC_MSGSEND = YES;
629 | ENABLE_TESTABILITY = YES;
630 | GCC_C_LANGUAGE_STANDARD = gnu11;
631 | GCC_DYNAMIC_NO_PIC = NO;
632 | GCC_NO_COMMON_BLOCKS = YES;
633 | GCC_OPTIMIZATION_LEVEL = 0;
634 | GCC_PREPROCESSOR_DEFINITIONS = (
635 | "DEBUG=1",
636 | "$(inherited)",
637 | );
638 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
639 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
640 | GCC_WARN_UNDECLARED_SELECTOR = YES;
641 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
642 | GCC_WARN_UNUSED_FUNCTION = YES;
643 | GCC_WARN_UNUSED_VARIABLE = YES;
644 | IPHONEOS_DEPLOYMENT_TARGET = 12.4;
645 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
646 | MTL_FAST_MATH = YES;
647 | ONLY_ACTIVE_ARCH = YES;
648 | SDKROOT = iphoneos;
649 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
650 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
651 | };
652 | name = Debug;
653 | };
654 | 900F6BE6230CF55D0018D22C /* Release */ = {
655 | isa = XCBuildConfiguration;
656 | buildSettings = {
657 | ALWAYS_SEARCH_USER_PATHS = NO;
658 | CLANG_ANALYZER_NONNULL = YES;
659 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
660 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
661 | CLANG_CXX_LIBRARY = "libc++";
662 | CLANG_ENABLE_MODULES = YES;
663 | CLANG_ENABLE_OBJC_ARC = YES;
664 | CLANG_ENABLE_OBJC_WEAK = YES;
665 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
666 | CLANG_WARN_BOOL_CONVERSION = YES;
667 | CLANG_WARN_COMMA = YES;
668 | CLANG_WARN_CONSTANT_CONVERSION = YES;
669 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
670 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
671 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
672 | CLANG_WARN_EMPTY_BODY = YES;
673 | CLANG_WARN_ENUM_CONVERSION = YES;
674 | CLANG_WARN_INFINITE_RECURSION = YES;
675 | CLANG_WARN_INT_CONVERSION = YES;
676 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
677 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
678 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
679 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
680 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
681 | CLANG_WARN_STRICT_PROTOTYPES = YES;
682 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
683 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
684 | CLANG_WARN_UNREACHABLE_CODE = YES;
685 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
686 | CODE_SIGN_IDENTITY = "iPhone Developer";
687 | COPY_PHASE_STRIP = NO;
688 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
689 | ENABLE_NS_ASSERTIONS = NO;
690 | ENABLE_STRICT_OBJC_MSGSEND = YES;
691 | GCC_C_LANGUAGE_STANDARD = gnu11;
692 | GCC_NO_COMMON_BLOCKS = YES;
693 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
694 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
695 | GCC_WARN_UNDECLARED_SELECTOR = YES;
696 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
697 | GCC_WARN_UNUSED_FUNCTION = YES;
698 | GCC_WARN_UNUSED_VARIABLE = YES;
699 | IPHONEOS_DEPLOYMENT_TARGET = 12.4;
700 | MTL_ENABLE_DEBUG_INFO = NO;
701 | MTL_FAST_MATH = YES;
702 | SDKROOT = iphoneos;
703 | SWIFT_COMPILATION_MODE = wholemodule;
704 | SWIFT_OPTIMIZATION_LEVEL = "-O";
705 | VALIDATE_PRODUCT = YES;
706 | };
707 | name = Release;
708 | };
709 | 900F6BE8230CF55D0018D22C /* Debug */ = {
710 | isa = XCBuildConfiguration;
711 | baseConfigurationReference = 89FAB5FFD0499E3BA7D67DB0 /* Pods-FaceRecognition.debug.xcconfig */;
712 | buildSettings = {
713 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
714 | CODE_SIGN_STYLE = Automatic;
715 | DEVELOPMENT_TEAM = YY4ZX4L4MB;
716 | INFOPLIST_FILE = "$(SRCROOT)/FaceRecognition/Supporting Files/Info.plist";
717 | LD_RUNPATH_SEARCH_PATHS = (
718 | "$(inherited)",
719 | "@executable_path/Frameworks",
720 | );
721 | PRODUCT_BUNDLE_IDENTIFIER = co.nimblehq.growth.FaceRecognition;
722 | PRODUCT_NAME = "$(TARGET_NAME)";
723 | SWIFT_VERSION = 5.0;
724 | TARGETED_DEVICE_FAMILY = "1,2";
725 | };
726 | name = Debug;
727 | };
728 | 900F6BE9230CF55D0018D22C /* Release */ = {
729 | isa = XCBuildConfiguration;
730 | baseConfigurationReference = F5799412932ED8F64AAA039C /* Pods-FaceRecognition.release.xcconfig */;
731 | buildSettings = {
732 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
733 | CODE_SIGN_STYLE = Automatic;
734 | DEVELOPMENT_TEAM = YY4ZX4L4MB;
735 | INFOPLIST_FILE = "$(SRCROOT)/FaceRecognition/Supporting Files/Info.plist";
736 | LD_RUNPATH_SEARCH_PATHS = (
737 | "$(inherited)",
738 | "@executable_path/Frameworks",
739 | );
740 | PRODUCT_BUNDLE_IDENTIFIER = co.nimblehq.growth.FaceRecognition;
741 | PRODUCT_NAME = "$(TARGET_NAME)";
742 | SWIFT_VERSION = 5.0;
743 | TARGETED_DEVICE_FAMILY = "1,2";
744 | };
745 | name = Release;
746 | };
747 | 900F6BEB230CF55D0018D22C /* Debug */ = {
748 | isa = XCBuildConfiguration;
749 | baseConfigurationReference = 5F15704D38DC038E2FC93FF8 /* Pods-FaceRecognitionTests.debug.xcconfig */;
750 | buildSettings = {
751 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
752 | BUNDLE_LOADER = "$(TEST_HOST)";
753 | CODE_SIGN_STYLE = Automatic;
754 | INFOPLIST_FILE = "FaceRecognitionTests/Supporting Files/Info.plist";
755 | LD_RUNPATH_SEARCH_PATHS = (
756 | "$(inherited)",
757 | "@executable_path/Frameworks",
758 | "@loader_path/Frameworks",
759 | );
760 | PRODUCT_BUNDLE_IDENTIFIER = co.nimblehq.growth.FaceRecognitionTests;
761 | PRODUCT_NAME = "$(TARGET_NAME)";
762 | SWIFT_VERSION = 5.0;
763 | TARGETED_DEVICE_FAMILY = "1,2";
764 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/FaceRecognition.app/FaceRecognition";
765 | };
766 | name = Debug;
767 | };
768 | 900F6BEC230CF55D0018D22C /* Release */ = {
769 | isa = XCBuildConfiguration;
770 | baseConfigurationReference = C98649B71E1D625262A16B5B /* Pods-FaceRecognitionTests.release.xcconfig */;
771 | buildSettings = {
772 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
773 | BUNDLE_LOADER = "$(TEST_HOST)";
774 | CODE_SIGN_STYLE = Automatic;
775 | INFOPLIST_FILE = "FaceRecognitionTests/Supporting Files/Info.plist";
776 | LD_RUNPATH_SEARCH_PATHS = (
777 | "$(inherited)",
778 | "@executable_path/Frameworks",
779 | "@loader_path/Frameworks",
780 | );
781 | PRODUCT_BUNDLE_IDENTIFIER = co.nimblehq.growth.FaceRecognitionTests;
782 | PRODUCT_NAME = "$(TARGET_NAME)";
783 | SWIFT_VERSION = 5.0;
784 | TARGETED_DEVICE_FAMILY = "1,2";
785 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/FaceRecognition.app/FaceRecognition";
786 | };
787 | name = Release;
788 | };
789 | 900F6BEE230CF55D0018D22C /* Debug */ = {
790 | isa = XCBuildConfiguration;
791 | baseConfigurationReference = CDC0EBEA2A29ECFADA36A320 /* Pods-FaceRecognitionUITests.debug.xcconfig */;
792 | buildSettings = {
793 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
794 | CODE_SIGN_STYLE = Automatic;
795 | INFOPLIST_FILE = "FaceRecognitionUITests/Supporting Files/Info.plist";
796 | LD_RUNPATH_SEARCH_PATHS = (
797 | "$(inherited)",
798 | "@executable_path/Frameworks",
799 | "@loader_path/Frameworks",
800 | );
801 | PRODUCT_BUNDLE_IDENTIFIER = co.nimblehq.growth.FaceRecognitionUITests;
802 | PRODUCT_NAME = "$(TARGET_NAME)";
803 | SWIFT_VERSION = 5.0;
804 | TARGETED_DEVICE_FAMILY = "1,2";
805 | TEST_TARGET_NAME = FaceRecognition;
806 | };
807 | name = Debug;
808 | };
809 | 900F6BEF230CF55D0018D22C /* Release */ = {
810 | isa = XCBuildConfiguration;
811 | baseConfigurationReference = 6872641C28CE954A36F811E6 /* Pods-FaceRecognitionUITests.release.xcconfig */;
812 | buildSettings = {
813 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
814 | CODE_SIGN_STYLE = Automatic;
815 | INFOPLIST_FILE = "FaceRecognitionUITests/Supporting Files/Info.plist";
816 | LD_RUNPATH_SEARCH_PATHS = (
817 | "$(inherited)",
818 | "@executable_path/Frameworks",
819 | "@loader_path/Frameworks",
820 | );
821 | PRODUCT_BUNDLE_IDENTIFIER = co.nimblehq.growth.FaceRecognitionUITests;
822 | PRODUCT_NAME = "$(TARGET_NAME)";
823 | SWIFT_VERSION = 5.0;
824 | TARGETED_DEVICE_FAMILY = "1,2";
825 | TEST_TARGET_NAME = FaceRecognition;
826 | };
827 | name = Release;
828 | };
829 | /* End XCBuildConfiguration section */
830 |
831 | /* Begin XCConfigurationList section */
832 | 900F6BBA230CF55C0018D22C /* Build configuration list for PBXProject "FaceRecognition" */ = {
833 | isa = XCConfigurationList;
834 | buildConfigurations = (
835 | 900F6BE5230CF55D0018D22C /* Debug */,
836 | 900F6BE6230CF55D0018D22C /* Release */,
837 | );
838 | defaultConfigurationIsVisible = 0;
839 | defaultConfigurationName = Release;
840 | };
841 | 900F6BE7230CF55D0018D22C /* Build configuration list for PBXNativeTarget "FaceRecognition" */ = {
842 | isa = XCConfigurationList;
843 | buildConfigurations = (
844 | 900F6BE8230CF55D0018D22C /* Debug */,
845 | 900F6BE9230CF55D0018D22C /* Release */,
846 | );
847 | defaultConfigurationIsVisible = 0;
848 | defaultConfigurationName = Release;
849 | };
850 | 900F6BEA230CF55D0018D22C /* Build configuration list for PBXNativeTarget "FaceRecognitionTests" */ = {
851 | isa = XCConfigurationList;
852 | buildConfigurations = (
853 | 900F6BEB230CF55D0018D22C /* Debug */,
854 | 900F6BEC230CF55D0018D22C /* Release */,
855 | );
856 | defaultConfigurationIsVisible = 0;
857 | defaultConfigurationName = Release;
858 | };
859 | 900F6BED230CF55D0018D22C /* Build configuration list for PBXNativeTarget "FaceRecognitionUITests" */ = {
860 | isa = XCConfigurationList;
861 | buildConfigurations = (
862 | 900F6BEE230CF55D0018D22C /* Debug */,
863 | 900F6BEF230CF55D0018D22C /* Release */,
864 | );
865 | defaultConfigurationIsVisible = 0;
866 | defaultConfigurationName = Release;
867 | };
868 | /* End XCConfigurationList section */
869 | };
870 | rootObject = 900F6BB7230CF55C0018D22C /* Project object */;
871 | }
872 |
--------------------------------------------------------------------------------