├── .gitignore ├── Converter ├── convert.py ├── evaluate.py └── requirements.txt ├── Example ├── Podfile ├── Podfile.lock ├── SwiftMaskDetection.xcodeproj │ ├── project.pbxproj │ ├── project.xcworkspace │ │ └── contents.xcworkspacedata │ └── xcshareddata │ │ └── xcschemes │ │ └── SwiftMaskDetection-Example.xcscheme ├── SwiftMaskDetection.xcworkspace │ ├── contents.xcworkspacedata │ └── xcshareddata │ │ └── IDEWorkspaceChecks.plist ├── SwiftMaskDetection │ ├── AppDelegate.swift │ ├── Base.lproj │ │ └── LaunchScreen.xib │ ├── BoundingBox.swift │ ├── Images.xcassets │ │ └── AppIcon.appiconset │ │ │ └── Contents.json │ ├── Info.plist │ ├── PreviewView.swift │ └── ViewController.swift └── Tests │ ├── Info.plist │ └── Tests.swift ├── LICENSE ├── README.md ├── SwiftMaskDetection.podspec ├── SwiftMaskDetection └── Classes │ ├── MaskDetectionVideoHelper.swift │ ├── MaskDetector.swift │ ├── MaskModel.mlmodel │ └── anchors.json └── _Pods.xcodeproj /.gitignore: -------------------------------------------------------------------------------- 1 | # Xcode 2 | build/ 3 | DerivedData/ 4 | *.pbxuser 5 | !default.pbxuser 6 | *.mode1v3 7 | !default.mode1v3 8 | *.mode2v3 9 | !default.mode2v3 10 | *.perspectivev3 11 | !default.perspectivev3 12 | xcuserdata/ 13 | .DS_Store 14 | *.xccheckout 15 | *.xcscmblueprint 16 | *.ipa 17 | *.dSYM.zip 18 | *.dSYM 19 | 20 | # Swift Package Manager 21 | Packages/ 22 | Package.pins 23 | .build/ 24 | 25 | # CocoaPods 26 | Pods/ 27 | 28 | # Python 29 | __pycache__/ 30 | .cache/ 31 | *.pyc 32 | -------------------------------------------------------------------------------- /Converter/convert.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import coremltools as ct 3 | from coremltools.models import MLModel 4 | import tensorflow as tf 5 | 6 | 7 | # Converts the AIZOO face mask detector (https://github.com/AIZOOTech/FaceMaskDetection) to CoreML 8 | def convert(args): 9 | print('Loading model: %s' % args.model) 10 | with open(args.model) as f: 11 | keras_model = tf.keras.models.model_from_json(f.read()) 12 | 13 | print('Loading weights: %s' % args.weights) 14 | keras_model.load_weights(args.weights) 15 | 16 | print('Converting to coreml') 17 | mlmodel = ct.convert(keras_model, 18 | inputs=[ct.ImageType(scale=1/255)], 19 | minimum_deployment_target=ct.target.iOS13) 20 | 21 | print('Renaming outputs') 22 | spec = mlmodel.get_spec() 23 | ct.models.utils.rename_feature(spec, 'Identity', 'output_scores') 24 | ct.models.utils.rename_feature(spec, 'Identity_1', 'output_bounds') 25 | 26 | out_path = args.output if args.output else '/tmp/MaskModel.mlmodel' 27 | print('Saving to: %s' % out_path) 28 | ct.models.utils.save_spec(spec, out_path) 29 | 30 | 31 | if __name__ == "__main__": 32 | parser = argparse.ArgumentParser() 33 | parser.add_argument('--model', required=True, 34 | help='Path to the Keras model file, e.g. face_mask_detection.json') 35 | parser.add_argument('--weights', required=True, 36 | help='Path to the Keras weights file, e.g. face_mask_detection.hdf5') 37 | parser.add_argument('--output', help='Path to write the CoreML model to') 38 | convert(parser.parse_args()) 39 | -------------------------------------------------------------------------------- /Converter/evaluate.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import coremltools as ct 3 | from coremltools.models import MLModel 4 | import json 5 | import numpy as np 6 | from pathlib import Path 7 | from PIL import Image, ImageDraw 8 | 9 | 10 | # Copied from: https://github.com/AIZOOTech/FaceMaskDetection/blob/master/utils/anchor_generator.py 11 | def generate_anchors(feature_map_sizes, anchor_sizes, anchor_ratios, offset=0.5): 12 | ''' 13 | generate anchors. 14 | :param feature_map_sizes: list of list, for example: [[40,40], [20,20]] 15 | :param anchor_sizes: list of list, for example: [[0.05, 0.075], [0.1, 0.15]] 16 | :param anchor_ratios: list of list, for example: [[1, 0.5], [1, 0.5]] 17 | :param offset: default to 0.5 18 | ''' 19 | anchor_bboxes = [] 20 | for idx, feature_size in enumerate(feature_map_sizes): 21 | cx = (np.linspace(0, feature_size[0] - 1, feature_size[0]) + 0.5) / feature_size[0] 22 | cy = (np.linspace(0, feature_size[1] - 1, feature_size[1]) + 0.5) / feature_size[1] 23 | cx_grid, cy_grid = np.meshgrid(cx, cy) 24 | cx_grid_expend = np.expand_dims(cx_grid, axis=-1) 25 | cy_grid_expend = np.expand_dims(cy_grid, axis=-1) 26 | center = np.concatenate((cx_grid_expend, cy_grid_expend), axis=-1) 27 | 28 | num_anchors = len(anchor_sizes[idx]) + len(anchor_ratios[idx]) - 1 29 | center_tiled = np.tile(center, (1, 1, 2* num_anchors)) 30 | anchor_width_heights = [] 31 | 32 | # different scales with the first aspect ratio 33 | for scale in anchor_sizes[idx]: 34 | ratio = anchor_ratios[idx][0] # select the first ratio 35 | width = scale * np.sqrt(ratio) 36 | height = scale / np.sqrt(ratio) 37 | anchor_width_heights.extend([-width / 2.0, -height / 2.0, width / 2.0, height / 2.0]) 38 | 39 | # the first scale, with different aspect ratios (except the first one) 40 | for ratio in anchor_ratios[idx][1:]: 41 | s1 = anchor_sizes[idx][0] # select the first scale 42 | width = s1 * np.sqrt(ratio) 43 | height = s1 / np.sqrt(ratio) 44 | anchor_width_heights.extend([-width / 2.0, -height / 2.0, width / 2.0, height / 2.0]) 45 | 46 | bbox_coords = center_tiled + np.array(anchor_width_heights) 47 | bbox_coords_reshape = bbox_coords.reshape((-1, 4)) 48 | anchor_bboxes.append(bbox_coords_reshape) 49 | return np.concatenate(anchor_bboxes, axis=0) 50 | 51 | 52 | # Copied from https://github.com/AIZOOTech/FaceMaskDetection/blob/master/utils/anchor_decode.py 53 | def decode_bbox(anchors, raw_outputs, variances=[0.1, 0.1, 0.2, 0.2]): 54 | ''' 55 | Decode the actual bbox according to the anchors. 56 | the anchor value order is:[xmin,ymin, xmax, ymax] 57 | :param anchors: numpy array with shape [batch, num_anchors, 4] 58 | :param raw_outputs: numpy array with the same shape with anchors 59 | :param variances: list of float, default=[0.1, 0.1, 0.2, 0.2] 60 | ''' 61 | anchor_centers_x = (anchors[:,0:1] + anchors[:,2:3]) / 2 62 | anchor_centers_y = (anchors[:,1:2] + anchors[:,3:]) / 2 63 | anchors_w = anchors[:,2:3] - anchors[:,0:1] 64 | anchors_h = anchors[:,3:] - anchors[:,1:2] 65 | raw_outputs_rescale = raw_outputs * np.array(variances) 66 | predict_center_x = raw_outputs_rescale[:,0:1] * anchors_w + anchor_centers_x 67 | predict_center_y = raw_outputs_rescale[:,1:2] * anchors_h + anchor_centers_y 68 | predict_w = np.exp(raw_outputs_rescale[:,2:3]) * anchors_w 69 | predict_h = np.exp(raw_outputs_rescale[:,3:]) * anchors_h 70 | predict_xmin = predict_center_x - predict_w / 2 71 | predict_ymin = predict_center_y - predict_h / 2 72 | predict_xmax = predict_center_x + predict_w / 2 73 | predict_ymax = predict_center_y + predict_h / 2 74 | return np.concatenate([predict_xmin, predict_ymin, predict_xmax, predict_ymax], axis=-1) 75 | 76 | 77 | # Copied from https://github.com/AIZOOTech/FaceMaskDetection/blob/master/utils/nms.py 78 | def single_class_non_max_suppression(bboxes, confidences, conf_thresh=0.2, iou_thresh=0.5, keep_top_k=-1): 79 | ''' 80 | do nms on single class. 81 | Hint: for the specific class, given the bbox and its confidence, 82 | 1) sort the bbox according to the confidence from top to down, we call this a set 83 | 2) select the bbox with the highest confidence, remove it from set, and do IOU calculate with the rest bbox 84 | 3) remove the bbox whose IOU is higher than the iou_thresh from the set, 85 | 4) loop step 2 and 3, util the set is empty. 86 | :param bboxes: numpy array of 2D, [num_bboxes, 4] 87 | :param confidences: numpy array of 1D. [num_bboxes] 88 | ''' 89 | if len(bboxes) == 0: return [] 90 | 91 | conf_keep_idx = np.where(confidences > conf_thresh)[0] 92 | 93 | bboxes = bboxes[conf_keep_idx] 94 | confidences = confidences[conf_keep_idx] 95 | 96 | pick = [] 97 | xmin = bboxes[:, 0] 98 | ymin = bboxes[:, 1] 99 | xmax = bboxes[:, 2] 100 | ymax = bboxes[:, 3] 101 | 102 | area = (xmax - xmin + 1e-3) * (ymax - ymin + 1e-3) 103 | idxs = np.argsort(confidences) 104 | 105 | while len(idxs) > 0: 106 | last = len(idxs) - 1 107 | i = idxs[last] 108 | pick.append(i) 109 | 110 | # keep top k 111 | if keep_top_k != -1: 112 | if len(pick) >= keep_top_k: 113 | break 114 | 115 | overlap_xmin = np.maximum(xmin[i], xmin[idxs[:last]]) 116 | overlap_ymin = np.maximum(ymin[i], ymin[idxs[:last]]) 117 | overlap_xmax = np.minimum(xmax[i], xmax[idxs[:last]]) 118 | overlap_ymax = np.minimum(ymax[i], ymax[idxs[:last]]) 119 | overlap_w = np.maximum(0, overlap_xmax - overlap_xmin) 120 | overlap_h = np.maximum(0, overlap_ymax - overlap_ymin) 121 | overlap_area = overlap_w * overlap_h 122 | overlap_ratio = overlap_area / (area[idxs[:last]] + area[i] - overlap_area) 123 | 124 | need_to_be_deleted_idx = np.concatenate(([last], np.where(overlap_ratio > iou_thresh)[0])) 125 | idxs = np.delete(idxs, need_to_be_deleted_idx) 126 | return conf_keep_idx[pick] 127 | 128 | 129 | # Dumps anchors to JSON, which can be 130 | def dump_anchors(anchors, filename): 131 | array = [] 132 | for anchor in anchors: 133 | array.append([round(x, 5) for x in anchor]) 134 | s = json.dumps({'anchors': array}, separators=(',',':')) 135 | s = s.replace('[[', '[\n[').replace('],', '],\n') 136 | with open(filename, 'w') as out: 137 | out.write(s) 138 | print('Wrote %d anchors to: %s' % (len(array), filename)) 139 | 140 | 141 | # Anchor configuration. 142 | # Copied from: https://github.com/AIZOOTech/FaceMaskDetection/blob/master/keras_infer.py 143 | feature_map_sizes = [[33, 33], [17, 17], [9, 9], [5, 5], [3, 3]] 144 | anchor_sizes = [[0.04, 0.056], [0.08, 0.11], [0.16, 0.22], [0.32, 0.45], [0.64, 0.72]] 145 | anchor_ratios = [[1, 0.62, 0.42]] * 5 146 | anchors = generate_anchors(feature_map_sizes, anchor_sizes, anchor_ratios) 147 | 148 | 149 | def evaluate(args): 150 | print('Loading model: %s' % args.model) 151 | mlmodel = ct.models.MLModel(args.model) 152 | image = Image.open(args.image) 153 | result = mlmodel.predict({'data': image.resize((260, 260))}) 154 | bboxes = decode_bbox(anchors, result['output_bounds'][0]) 155 | max_labels = np.argmax(result['output_scores'][0], axis=1) 156 | max_scores = np.max(result['output_scores'][0], axis=1) 157 | keep_idxs = single_class_non_max_suppression( 158 | bboxes, max_scores, conf_thresh=args.conf_threshold, iou_thresh=args.iou_threshold) 159 | 160 | # Print the bounding boxes, labels, and scores 161 | label_names = {0: 'Mask', 1: 'No Mask'} 162 | colors = {0: 'green', 1: 'red'} 163 | for i in keep_idxs: 164 | print('%s %s %.3f' % (bboxes[i], label_names[max_labels[i]], max_scores[i])) 165 | 166 | # Draw predictions into the image 167 | draw = ImageDraw.Draw(image) 168 | for i in keep_idxs: 169 | x0, y0, x1, y1 = bboxes[i] 170 | x0 = max(0, x0 * image.width) 171 | y0 = max(0, y0 * image.height) 172 | x1 = min(image.width, x1 * image.width) 173 | y1 = min(image.height, y1 * image.height) 174 | color = 'gray' 175 | if max_scores[i] > 0.4: 176 | color = colors[max_labels[i]] 177 | draw.rectangle([x0, y0, x1, y1], outline=color, width=2) 178 | print('Writing to: /tmp/predictions.png') 179 | image.save('/tmp/predictions.png') 180 | 181 | if args.dump_anchors: 182 | dump_anchors(anchors, '/tmp/anchors.json') 183 | 184 | 185 | if __name__ == "__main__": 186 | parser = argparse.ArgumentParser() 187 | parser.add_argument('image', help='The image to evaluate') 188 | parser.add_argument('--model', required=True, help='Path to the mlmodel to evaluate') 189 | parser.add_argument('--conf_threshold', type=float, default=0.5) 190 | parser.add_argument('--iou_threshold', type=float, default=0.4) 191 | parser.add_argument('--dump_anchors', action='store_true', help='Write anchors to a JSON file') 192 | evaluate(parser.parse_args()) 193 | -------------------------------------------------------------------------------- /Converter/requirements.txt: -------------------------------------------------------------------------------- 1 | coremltools==4.0b2 2 | numpy==1.19.1 3 | Pillow==8.3.2 4 | -------------------------------------------------------------------------------- /Example/Podfile: -------------------------------------------------------------------------------- 1 | platform :ios, '13.0' 2 | use_frameworks! 3 | 4 | target 'SwiftMaskDetection_Example' do 5 | pod 'SwiftMaskDetection', :path => '../' 6 | pod 'SnapKit' 7 | end 8 | -------------------------------------------------------------------------------- /Example/Podfile.lock: -------------------------------------------------------------------------------- 1 | PODS: 2 | - SnapKit (5.0.1) 3 | - SwiftMaskDetection (0.1.1) 4 | 5 | DEPENDENCIES: 6 | - SnapKit 7 | - SwiftMaskDetection (from `../`) 8 | 9 | SPEC REPOS: 10 | trunk: 11 | - SnapKit 12 | 13 | EXTERNAL SOURCES: 14 | SwiftMaskDetection: 15 | :path: "../" 16 | 17 | SPEC CHECKSUMS: 18 | SnapKit: 97b92857e3df3a0c71833cce143274bf6ef8e5eb 19 | SwiftMaskDetection: 6d8463e3cc32d00c2e4c403e0ee01c0cba11ecfa 20 | 21 | PODFILE CHECKSUM: 72d1c8eb42c5f3f47cc41f64736fb07d2e2b00d4 22 | 23 | COCOAPODS: 1.9.3 24 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 46; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 4034420C24E21FD900DE132F /* PreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4034420B24E21FD900DE132F /* PreviewView.swift */; }; 11 | 4034420E24E21FE500DE132F /* BoundingBox.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4034420D24E21FE500DE132F /* BoundingBox.swift */; }; 12 | 607FACD61AFB9204008FA782 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 607FACD51AFB9204008FA782 /* AppDelegate.swift */; }; 13 | 607FACD81AFB9204008FA782 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 607FACD71AFB9204008FA782 /* ViewController.swift */; }; 14 | 607FACDD1AFB9204008FA782 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 607FACDC1AFB9204008FA782 /* Images.xcassets */; }; 15 | 607FACE01AFB9204008FA782 /* LaunchScreen.xib in Resources */ = {isa = PBXBuildFile; fileRef = 607FACDE1AFB9204008FA782 /* LaunchScreen.xib */; }; 16 | 607FACEC1AFB9204008FA782 /* Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 607FACEB1AFB9204008FA782 /* Tests.swift */; }; 17 | F11F977A318B643BE8BD0E8C /* Pods_SwiftMaskDetection_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 636EF8584BF9F9C2F1C3B735 /* Pods_SwiftMaskDetection_Example.framework */; }; 18 | /* End PBXBuildFile section */ 19 | 20 | /* Begin PBXContainerItemProxy section */ 21 | 607FACE61AFB9204008FA782 /* PBXContainerItemProxy */ = { 22 | isa = PBXContainerItemProxy; 23 | containerPortal = 607FACC81AFB9204008FA782 /* Project object */; 24 | proxyType = 1; 25 | remoteGlobalIDString = 607FACCF1AFB9204008FA782; 26 | remoteInfo = SwiftMaskDetection; 27 | }; 28 | /* End PBXContainerItemProxy section */ 29 | 30 | /* Begin PBXFileReference section */ 31 | 0622D0E45043FC3AA72EA647 /* Pods-SwiftMaskDetection_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SwiftMaskDetection_Example.release.xcconfig"; path = "Target Support Files/Pods-SwiftMaskDetection_Example/Pods-SwiftMaskDetection_Example.release.xcconfig"; sourceTree = ""; }; 32 | 4034420B24E21FD900DE132F /* PreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreviewView.swift; sourceTree = ""; }; 33 | 4034420D24E21FE500DE132F /* BoundingBox.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BoundingBox.swift; sourceTree = ""; }; 34 | 527C9F81626A74FDB2C3D9D4 /* Pods-SwiftMaskDetection_Example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SwiftMaskDetection_Example.debug.xcconfig"; path = "Target Support Files/Pods-SwiftMaskDetection_Example/Pods-SwiftMaskDetection_Example.debug.xcconfig"; sourceTree = ""; }; 35 | 607FACD01AFB9204008FA782 /* SwiftMaskDetection_Example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SwiftMaskDetection_Example.app; sourceTree = BUILT_PRODUCTS_DIR; }; 36 | 607FACD41AFB9204008FA782 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 37 | 607FACD51AFB9204008FA782 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 38 | 607FACD71AFB9204008FA782 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 39 | 607FACDC1AFB9204008FA782 /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Images.xcassets; sourceTree = ""; }; 40 | 607FACDF1AFB9204008FA782 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/LaunchScreen.xib; sourceTree = ""; }; 41 | 607FACE51AFB9204008FA782 /* SwiftMaskDetection_Tests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = SwiftMaskDetection_Tests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 42 | 607FACEA1AFB9204008FA782 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 43 | 607FACEB1AFB9204008FA782 /* Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Tests.swift; sourceTree = ""; }; 44 | 636EF8584BF9F9C2F1C3B735 /* Pods_SwiftMaskDetection_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_SwiftMaskDetection_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 45 | BF16B655544258389CE71B39 /* LICENSE */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = LICENSE; path = ../LICENSE; sourceTree = ""; }; 46 | CFB4890496D0BF84AD605700 /* SwiftMaskDetection.podspec */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = SwiftMaskDetection.podspec; path = ../SwiftMaskDetection.podspec; sourceTree = ""; }; 47 | E750506330C14A13760E148C /* README.md */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; 48 | /* End PBXFileReference section */ 49 | 50 | /* Begin PBXFrameworksBuildPhase section */ 51 | 607FACCD1AFB9204008FA782 /* Frameworks */ = { 52 | isa = PBXFrameworksBuildPhase; 53 | buildActionMask = 2147483647; 54 | files = ( 55 | F11F977A318B643BE8BD0E8C /* Pods_SwiftMaskDetection_Example.framework in Frameworks */, 56 | ); 57 | runOnlyForDeploymentPostprocessing = 0; 58 | }; 59 | 607FACE21AFB9204008FA782 /* Frameworks */ = { 60 | isa = PBXFrameworksBuildPhase; 61 | buildActionMask = 2147483647; 62 | files = ( 63 | ); 64 | runOnlyForDeploymentPostprocessing = 0; 65 | }; 66 | /* End PBXFrameworksBuildPhase section */ 67 | 68 | /* Begin PBXGroup section */ 69 | 607FACC71AFB9204008FA782 = { 70 | isa = PBXGroup; 71 | children = ( 72 | 607FACF51AFB993E008FA782 /* Podspec Metadata */, 73 | 607FACD21AFB9204008FA782 /* Example */, 74 | 607FACE81AFB9204008FA782 /* Tests */, 75 | 607FACD11AFB9204008FA782 /* Products */, 76 | AB5E4FC23C3D25E4FA2E2E91 /* Pods */, 77 | 7A03877A1B2441D17609B96B /* Frameworks */, 78 | ); 79 | sourceTree = ""; 80 | }; 81 | 607FACD11AFB9204008FA782 /* Products */ = { 82 | isa = PBXGroup; 83 | children = ( 84 | 607FACD01AFB9204008FA782 /* SwiftMaskDetection_Example.app */, 85 | 607FACE51AFB9204008FA782 /* SwiftMaskDetection_Tests.xctest */, 86 | ); 87 | name = Products; 88 | sourceTree = ""; 89 | }; 90 | 607FACD21AFB9204008FA782 /* Example */ = { 91 | isa = PBXGroup; 92 | children = ( 93 | 607FACD51AFB9204008FA782 /* AppDelegate.swift */, 94 | 4034420D24E21FE500DE132F /* BoundingBox.swift */, 95 | 4034420B24E21FD900DE132F /* PreviewView.swift */, 96 | 607FACD71AFB9204008FA782 /* ViewController.swift */, 97 | 607FACDC1AFB9204008FA782 /* Images.xcassets */, 98 | 607FACDE1AFB9204008FA782 /* LaunchScreen.xib */, 99 | 607FACD31AFB9204008FA782 /* Supporting Files */, 100 | ); 101 | name = Example; 102 | path = SwiftMaskDetection; 103 | sourceTree = ""; 104 | }; 105 | 607FACD31AFB9204008FA782 /* Supporting Files */ = { 106 | isa = PBXGroup; 107 | children = ( 108 | 607FACD41AFB9204008FA782 /* Info.plist */, 109 | ); 110 | name = "Supporting Files"; 111 | sourceTree = ""; 112 | }; 113 | 607FACE81AFB9204008FA782 /* Tests */ = { 114 | isa = PBXGroup; 115 | children = ( 116 | 607FACEB1AFB9204008FA782 /* Tests.swift */, 117 | 607FACE91AFB9204008FA782 /* Supporting Files */, 118 | ); 119 | path = Tests; 120 | sourceTree = ""; 121 | }; 122 | 607FACE91AFB9204008FA782 /* Supporting Files */ = { 123 | isa = PBXGroup; 124 | children = ( 125 | 607FACEA1AFB9204008FA782 /* Info.plist */, 126 | ); 127 | name = "Supporting Files"; 128 | sourceTree = ""; 129 | }; 130 | 607FACF51AFB993E008FA782 /* Podspec Metadata */ = { 131 | isa = PBXGroup; 132 | children = ( 133 | CFB4890496D0BF84AD605700 /* SwiftMaskDetection.podspec */, 134 | E750506330C14A13760E148C /* README.md */, 135 | BF16B655544258389CE71B39 /* LICENSE */, 136 | ); 137 | name = "Podspec Metadata"; 138 | sourceTree = ""; 139 | }; 140 | 7A03877A1B2441D17609B96B /* Frameworks */ = { 141 | isa = PBXGroup; 142 | children = ( 143 | 636EF8584BF9F9C2F1C3B735 /* Pods_SwiftMaskDetection_Example.framework */, 144 | ); 145 | name = Frameworks; 146 | sourceTree = ""; 147 | }; 148 | AB5E4FC23C3D25E4FA2E2E91 /* Pods */ = { 149 | isa = PBXGroup; 150 | children = ( 151 | 527C9F81626A74FDB2C3D9D4 /* Pods-SwiftMaskDetection_Example.debug.xcconfig */, 152 | 0622D0E45043FC3AA72EA647 /* Pods-SwiftMaskDetection_Example.release.xcconfig */, 153 | ); 154 | path = Pods; 155 | sourceTree = ""; 156 | }; 157 | /* End PBXGroup section */ 158 | 159 | /* Begin PBXNativeTarget section */ 160 | 607FACCF1AFB9204008FA782 /* SwiftMaskDetection_Example */ = { 161 | isa = PBXNativeTarget; 162 | buildConfigurationList = 607FACEF1AFB9204008FA782 /* Build configuration list for PBXNativeTarget "SwiftMaskDetection_Example" */; 163 | buildPhases = ( 164 | 7735EB0E8A80DF9DE4EF0A62 /* [CP] Check Pods Manifest.lock */, 165 | 607FACCC1AFB9204008FA782 /* Sources */, 166 | 607FACCD1AFB9204008FA782 /* Frameworks */, 167 | 607FACCE1AFB9204008FA782 /* Resources */, 168 | 999062651A70C20643E05427 /* [CP] Embed Pods Frameworks */, 169 | ); 170 | buildRules = ( 171 | ); 172 | dependencies = ( 173 | ); 174 | name = SwiftMaskDetection_Example; 175 | productName = SwiftMaskDetection; 176 | productReference = 607FACD01AFB9204008FA782 /* SwiftMaskDetection_Example.app */; 177 | productType = "com.apple.product-type.application"; 178 | }; 179 | 607FACE41AFB9204008FA782 /* SwiftMaskDetection_Tests */ = { 180 | isa = PBXNativeTarget; 181 | buildConfigurationList = 607FACF21AFB9204008FA782 /* Build configuration list for PBXNativeTarget "SwiftMaskDetection_Tests" */; 182 | buildPhases = ( 183 | 607FACE11AFB9204008FA782 /* Sources */, 184 | 607FACE21AFB9204008FA782 /* Frameworks */, 185 | 607FACE31AFB9204008FA782 /* Resources */, 186 | ); 187 | buildRules = ( 188 | ); 189 | dependencies = ( 190 | 607FACE71AFB9204008FA782 /* PBXTargetDependency */, 191 | ); 192 | name = SwiftMaskDetection_Tests; 193 | productName = Tests; 194 | productReference = 607FACE51AFB9204008FA782 /* SwiftMaskDetection_Tests.xctest */; 195 | productType = "com.apple.product-type.bundle.unit-test"; 196 | }; 197 | /* End PBXNativeTarget section */ 198 | 199 | /* Begin PBXProject section */ 200 | 607FACC81AFB9204008FA782 /* Project object */ = { 201 | isa = PBXProject; 202 | attributes = { 203 | LastSwiftUpdateCheck = 0830; 204 | LastUpgradeCheck = 0830; 205 | ORGANIZATIONNAME = CocoaPods; 206 | TargetAttributes = { 207 | 607FACCF1AFB9204008FA782 = { 208 | CreatedOnToolsVersion = 6.3.1; 209 | LastSwiftMigration = 0900; 210 | }; 211 | 607FACE41AFB9204008FA782 = { 212 | CreatedOnToolsVersion = 6.3.1; 213 | LastSwiftMigration = 0900; 214 | TestTargetID = 607FACCF1AFB9204008FA782; 215 | }; 216 | }; 217 | }; 218 | buildConfigurationList = 607FACCB1AFB9204008FA782 /* Build configuration list for PBXProject "SwiftMaskDetection" */; 219 | compatibilityVersion = "Xcode 3.2"; 220 | developmentRegion = English; 221 | hasScannedForEncodings = 0; 222 | knownRegions = ( 223 | English, 224 | en, 225 | Base, 226 | ); 227 | mainGroup = 607FACC71AFB9204008FA782; 228 | productRefGroup = 607FACD11AFB9204008FA782 /* Products */; 229 | projectDirPath = ""; 230 | projectRoot = ""; 231 | targets = ( 232 | 607FACCF1AFB9204008FA782 /* SwiftMaskDetection_Example */, 233 | 607FACE41AFB9204008FA782 /* SwiftMaskDetection_Tests */, 234 | ); 235 | }; 236 | /* End PBXProject section */ 237 | 238 | /* Begin PBXResourcesBuildPhase section */ 239 | 607FACCE1AFB9204008FA782 /* Resources */ = { 240 | isa = PBXResourcesBuildPhase; 241 | buildActionMask = 2147483647; 242 | files = ( 243 | 607FACE01AFB9204008FA782 /* LaunchScreen.xib in Resources */, 244 | 607FACDD1AFB9204008FA782 /* Images.xcassets in Resources */, 245 | ); 246 | runOnlyForDeploymentPostprocessing = 0; 247 | }; 248 | 607FACE31AFB9204008FA782 /* Resources */ = { 249 | isa = PBXResourcesBuildPhase; 250 | buildActionMask = 2147483647; 251 | files = ( 252 | ); 253 | runOnlyForDeploymentPostprocessing = 0; 254 | }; 255 | /* End PBXResourcesBuildPhase section */ 256 | 257 | /* Begin PBXShellScriptBuildPhase section */ 258 | 7735EB0E8A80DF9DE4EF0A62 /* [CP] Check Pods Manifest.lock */ = { 259 | isa = PBXShellScriptBuildPhase; 260 | buildActionMask = 2147483647; 261 | files = ( 262 | ); 263 | inputFileListPaths = ( 264 | ); 265 | inputPaths = ( 266 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock", 267 | "${PODS_ROOT}/Manifest.lock", 268 | ); 269 | name = "[CP] Check Pods Manifest.lock"; 270 | outputFileListPaths = ( 271 | ); 272 | outputPaths = ( 273 | "$(DERIVED_FILE_DIR)/Pods-SwiftMaskDetection_Example-checkManifestLockResult.txt", 274 | ); 275 | runOnlyForDeploymentPostprocessing = 0; 276 | shellPath = /bin/sh; 277 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; 278 | showEnvVarsInLog = 0; 279 | }; 280 | 999062651A70C20643E05427 /* [CP] Embed Pods Frameworks */ = { 281 | isa = PBXShellScriptBuildPhase; 282 | buildActionMask = 2147483647; 283 | files = ( 284 | ); 285 | inputPaths = ( 286 | "${PODS_ROOT}/Target Support Files/Pods-SwiftMaskDetection_Example/Pods-SwiftMaskDetection_Example-frameworks.sh", 287 | "${BUILT_PRODUCTS_DIR}/SnapKit/SnapKit.framework", 288 | "${BUILT_PRODUCTS_DIR}/SwiftMaskDetection/SwiftMaskDetection.framework", 289 | ); 290 | name = "[CP] Embed Pods Frameworks"; 291 | outputPaths = ( 292 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/SnapKit.framework", 293 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/SwiftMaskDetection.framework", 294 | ); 295 | runOnlyForDeploymentPostprocessing = 0; 296 | shellPath = /bin/sh; 297 | shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-SwiftMaskDetection_Example/Pods-SwiftMaskDetection_Example-frameworks.sh\"\n"; 298 | showEnvVarsInLog = 0; 299 | }; 300 | /* End PBXShellScriptBuildPhase section */ 301 | 302 | /* Begin PBXSourcesBuildPhase section */ 303 | 607FACCC1AFB9204008FA782 /* Sources */ = { 304 | isa = PBXSourcesBuildPhase; 305 | buildActionMask = 2147483647; 306 | files = ( 307 | 607FACD81AFB9204008FA782 /* ViewController.swift in Sources */, 308 | 607FACD61AFB9204008FA782 /* AppDelegate.swift in Sources */, 309 | 4034420C24E21FD900DE132F /* PreviewView.swift in Sources */, 310 | 4034420E24E21FE500DE132F /* BoundingBox.swift in Sources */, 311 | ); 312 | runOnlyForDeploymentPostprocessing = 0; 313 | }; 314 | 607FACE11AFB9204008FA782 /* Sources */ = { 315 | isa = PBXSourcesBuildPhase; 316 | buildActionMask = 2147483647; 317 | files = ( 318 | 607FACEC1AFB9204008FA782 /* Tests.swift in Sources */, 319 | ); 320 | runOnlyForDeploymentPostprocessing = 0; 321 | }; 322 | /* End PBXSourcesBuildPhase section */ 323 | 324 | /* Begin PBXTargetDependency section */ 325 | 607FACE71AFB9204008FA782 /* PBXTargetDependency */ = { 326 | isa = PBXTargetDependency; 327 | target = 607FACCF1AFB9204008FA782 /* SwiftMaskDetection_Example */; 328 | targetProxy = 607FACE61AFB9204008FA782 /* PBXContainerItemProxy */; 329 | }; 330 | /* End PBXTargetDependency section */ 331 | 332 | /* Begin PBXVariantGroup section */ 333 | 607FACDE1AFB9204008FA782 /* LaunchScreen.xib */ = { 334 | isa = PBXVariantGroup; 335 | children = ( 336 | 607FACDF1AFB9204008FA782 /* Base */, 337 | ); 338 | name = LaunchScreen.xib; 339 | sourceTree = ""; 340 | }; 341 | /* End PBXVariantGroup section */ 342 | 343 | /* Begin XCBuildConfiguration section */ 344 | 607FACED1AFB9204008FA782 /* Debug */ = { 345 | isa = XCBuildConfiguration; 346 | buildSettings = { 347 | ALWAYS_SEARCH_USER_PATHS = NO; 348 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 349 | CLANG_CXX_LIBRARY = "libc++"; 350 | CLANG_ENABLE_MODULES = YES; 351 | CLANG_ENABLE_OBJC_ARC = YES; 352 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 353 | CLANG_WARN_BOOL_CONVERSION = YES; 354 | CLANG_WARN_COMMA = YES; 355 | CLANG_WARN_CONSTANT_CONVERSION = YES; 356 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 357 | CLANG_WARN_EMPTY_BODY = YES; 358 | CLANG_WARN_ENUM_CONVERSION = YES; 359 | CLANG_WARN_INFINITE_RECURSION = YES; 360 | CLANG_WARN_INT_CONVERSION = YES; 361 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 362 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 363 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 364 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 365 | CLANG_WARN_STRICT_PROTOTYPES = YES; 366 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 367 | CLANG_WARN_UNREACHABLE_CODE = YES; 368 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 369 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 370 | COPY_PHASE_STRIP = NO; 371 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 372 | ENABLE_STRICT_OBJC_MSGSEND = YES; 373 | ENABLE_TESTABILITY = YES; 374 | GCC_C_LANGUAGE_STANDARD = gnu99; 375 | GCC_DYNAMIC_NO_PIC = NO; 376 | GCC_NO_COMMON_BLOCKS = YES; 377 | GCC_OPTIMIZATION_LEVEL = 0; 378 | GCC_PREPROCESSOR_DEFINITIONS = ( 379 | "DEBUG=1", 380 | "$(inherited)", 381 | ); 382 | GCC_SYMBOLS_PRIVATE_EXTERN = NO; 383 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 384 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 385 | GCC_WARN_UNDECLARED_SELECTOR = YES; 386 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 387 | GCC_WARN_UNUSED_FUNCTION = YES; 388 | GCC_WARN_UNUSED_VARIABLE = YES; 389 | IPHONEOS_DEPLOYMENT_TARGET = 9.3; 390 | MTL_ENABLE_DEBUG_INFO = YES; 391 | ONLY_ACTIVE_ARCH = YES; 392 | SDKROOT = iphoneos; 393 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 394 | }; 395 | name = Debug; 396 | }; 397 | 607FACEE1AFB9204008FA782 /* Release */ = { 398 | isa = XCBuildConfiguration; 399 | buildSettings = { 400 | ALWAYS_SEARCH_USER_PATHS = NO; 401 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; 402 | CLANG_CXX_LIBRARY = "libc++"; 403 | CLANG_ENABLE_MODULES = YES; 404 | CLANG_ENABLE_OBJC_ARC = YES; 405 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 406 | CLANG_WARN_BOOL_CONVERSION = YES; 407 | CLANG_WARN_COMMA = YES; 408 | CLANG_WARN_CONSTANT_CONVERSION = YES; 409 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 410 | CLANG_WARN_EMPTY_BODY = YES; 411 | CLANG_WARN_ENUM_CONVERSION = YES; 412 | CLANG_WARN_INFINITE_RECURSION = YES; 413 | CLANG_WARN_INT_CONVERSION = YES; 414 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 415 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 416 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 417 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 418 | CLANG_WARN_STRICT_PROTOTYPES = YES; 419 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 420 | CLANG_WARN_UNREACHABLE_CODE = YES; 421 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 422 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; 423 | COPY_PHASE_STRIP = NO; 424 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 425 | ENABLE_NS_ASSERTIONS = NO; 426 | ENABLE_STRICT_OBJC_MSGSEND = YES; 427 | GCC_C_LANGUAGE_STANDARD = gnu99; 428 | GCC_NO_COMMON_BLOCKS = YES; 429 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 430 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 431 | GCC_WARN_UNDECLARED_SELECTOR = YES; 432 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 433 | GCC_WARN_UNUSED_FUNCTION = YES; 434 | GCC_WARN_UNUSED_VARIABLE = YES; 435 | IPHONEOS_DEPLOYMENT_TARGET = 9.3; 436 | MTL_ENABLE_DEBUG_INFO = NO; 437 | SDKROOT = iphoneos; 438 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; 439 | VALIDATE_PRODUCT = YES; 440 | }; 441 | name = Release; 442 | }; 443 | 607FACF01AFB9204008FA782 /* Debug */ = { 444 | isa = XCBuildConfiguration; 445 | baseConfigurationReference = 527C9F81626A74FDB2C3D9D4 /* Pods-SwiftMaskDetection_Example.debug.xcconfig */; 446 | buildSettings = { 447 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 448 | DEVELOPMENT_TEAM = ""; 449 | INFOPLIST_FILE = SwiftMaskDetection/Info.plist; 450 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 451 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 452 | MODULE_NAME = ExampleApp; 453 | PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.demo.SwiftMaskDetection${DEVELOPMENT_TEAM}"; 454 | PRODUCT_NAME = "$(TARGET_NAME)"; 455 | SWIFT_SWIFT3_OBJC_INFERENCE = Default; 456 | SWIFT_VERSION = 5.0; 457 | TARGETED_DEVICE_FAMILY = "1,2"; 458 | }; 459 | name = Debug; 460 | }; 461 | 607FACF11AFB9204008FA782 /* Release */ = { 462 | isa = XCBuildConfiguration; 463 | baseConfigurationReference = 0622D0E45043FC3AA72EA647 /* Pods-SwiftMaskDetection_Example.release.xcconfig */; 464 | buildSettings = { 465 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 466 | DEVELOPMENT_TEAM = ""; 467 | INFOPLIST_FILE = SwiftMaskDetection/Info.plist; 468 | IPHONEOS_DEPLOYMENT_TARGET = 13.0; 469 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 470 | MODULE_NAME = ExampleApp; 471 | PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.demo.SwiftMaskDetection${DEVELOPMENT_TEAM}"; 472 | PRODUCT_NAME = "$(TARGET_NAME)"; 473 | SWIFT_SWIFT3_OBJC_INFERENCE = Default; 474 | SWIFT_VERSION = 5.0; 475 | TARGETED_DEVICE_FAMILY = "1,2"; 476 | }; 477 | name = Release; 478 | }; 479 | 607FACF31AFB9204008FA782 /* Debug */ = { 480 | isa = XCBuildConfiguration; 481 | buildSettings = { 482 | FRAMEWORK_SEARCH_PATHS = ( 483 | "$(PLATFORM_DIR)/Developer/Library/Frameworks", 484 | "$(inherited)", 485 | ); 486 | GCC_PREPROCESSOR_DEFINITIONS = ( 487 | "DEBUG=1", 488 | "$(inherited)", 489 | ); 490 | INFOPLIST_FILE = Tests/Info.plist; 491 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 492 | PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.$(PRODUCT_NAME:rfc1034identifier)"; 493 | PRODUCT_NAME = "$(TARGET_NAME)"; 494 | SWIFT_SWIFT3_OBJC_INFERENCE = Default; 495 | SWIFT_VERSION = 4.0; 496 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/SwiftMaskDetection_Example.app/SwiftMaskDetection_Example"; 497 | }; 498 | name = Debug; 499 | }; 500 | 607FACF41AFB9204008FA782 /* Release */ = { 501 | isa = XCBuildConfiguration; 502 | buildSettings = { 503 | FRAMEWORK_SEARCH_PATHS = ( 504 | "$(PLATFORM_DIR)/Developer/Library/Frameworks", 505 | "$(inherited)", 506 | ); 507 | INFOPLIST_FILE = Tests/Info.plist; 508 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; 509 | PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.$(PRODUCT_NAME:rfc1034identifier)"; 510 | PRODUCT_NAME = "$(TARGET_NAME)"; 511 | SWIFT_SWIFT3_OBJC_INFERENCE = Default; 512 | SWIFT_VERSION = 4.0; 513 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/SwiftMaskDetection_Example.app/SwiftMaskDetection_Example"; 514 | }; 515 | name = Release; 516 | }; 517 | /* End XCBuildConfiguration section */ 518 | 519 | /* Begin XCConfigurationList section */ 520 | 607FACCB1AFB9204008FA782 /* Build configuration list for PBXProject "SwiftMaskDetection" */ = { 521 | isa = XCConfigurationList; 522 | buildConfigurations = ( 523 | 607FACED1AFB9204008FA782 /* Debug */, 524 | 607FACEE1AFB9204008FA782 /* Release */, 525 | ); 526 | defaultConfigurationIsVisible = 0; 527 | defaultConfigurationName = Release; 528 | }; 529 | 607FACEF1AFB9204008FA782 /* Build configuration list for PBXNativeTarget "SwiftMaskDetection_Example" */ = { 530 | isa = XCConfigurationList; 531 | buildConfigurations = ( 532 | 607FACF01AFB9204008FA782 /* Debug */, 533 | 607FACF11AFB9204008FA782 /* Release */, 534 | ); 535 | defaultConfigurationIsVisible = 0; 536 | defaultConfigurationName = Release; 537 | }; 538 | 607FACF21AFB9204008FA782 /* Build configuration list for PBXNativeTarget "SwiftMaskDetection_Tests" */ = { 539 | isa = XCConfigurationList; 540 | buildConfigurations = ( 541 | 607FACF31AFB9204008FA782 /* Debug */, 542 | 607FACF41AFB9204008FA782 /* Release */, 543 | ); 544 | defaultConfigurationIsVisible = 0; 545 | defaultConfigurationName = Release; 546 | }; 547 | /* End XCConfigurationList section */ 548 | }; 549 | rootObject = 607FACC81AFB9204008FA782 /* Project object */; 550 | } 551 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection.xcodeproj/xcshareddata/xcschemes/SwiftMaskDetection-Example.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 29 | 35 | 36 | 37 | 38 | 39 | 45 | 46 | 48 | 54 | 55 | 56 | 57 | 58 | 64 | 65 | 66 | 67 | 68 | 69 | 80 | 82 | 88 | 89 | 90 | 91 | 92 | 93 | 99 | 101 | 107 | 108 | 109 | 110 | 112 | 113 | 116 | 117 | 118 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | 3 | @UIApplicationMain 4 | class AppDelegate: UIResponder, UIApplicationDelegate { 5 | var window: UIWindow? 6 | 7 | func application( 8 | _ app: UIApplication, 9 | didFinishLaunchingWithOptions opts: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { 10 | window = UIWindow(frame: UIScreen.main.bounds) 11 | window!.rootViewController = ViewController() 12 | window!.makeKeyAndVisible() 13 | return true 14 | } 15 | } 16 | 17 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection/Base.lproj/LaunchScreen.xib: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection/BoundingBox.swift: -------------------------------------------------------------------------------- 1 | // From https://github.com/hollance/YOLO-CoreML-MPSNNGraph 2 | // Copyright (c) 2017-2018 M.I. Hollemans. MIT License. 3 | import Foundation 4 | import UIKit 5 | 6 | 7 | class BoundingBox { 8 | let shapeLayer: CAShapeLayer 9 | let textLayer: CATextLayer 10 | 11 | init() { 12 | shapeLayer = CAShapeLayer() 13 | shapeLayer.fillColor = UIColor.clear.cgColor 14 | shapeLayer.lineWidth = 4 15 | shapeLayer.isHidden = true 16 | 17 | textLayer = CATextLayer() 18 | textLayer.foregroundColor = UIColor.black.cgColor 19 | textLayer.isHidden = true 20 | textLayer.contentsScale = UIScreen.main.scale 21 | textLayer.fontSize = 14 22 | textLayer.font = UIFont(name: "Avenir", size: textLayer.fontSize) 23 | } 24 | 25 | func addToLayer(_ parent: CALayer) { 26 | parent.addSublayer(shapeLayer) 27 | parent.addSublayer(textLayer) 28 | } 29 | 30 | func show(frame: CGRect, label: String, color: UIColor) { 31 | CATransaction.setDisableActions(true) 32 | 33 | let path = UIBezierPath(rect: frame) 34 | shapeLayer.path = path.cgPath 35 | shapeLayer.strokeColor = color.cgColor 36 | shapeLayer.isHidden = false 37 | 38 | textLayer.string = label 39 | textLayer.backgroundColor = color.cgColor 40 | textLayer.isHidden = false 41 | 42 | let attributes = [ 43 | NSAttributedString.Key.font: textLayer.font as Any 44 | ] 45 | 46 | let textRect = label.boundingRect(with: CGSize(width: 400, height: 100), 47 | options: .truncatesLastVisibleLine, 48 | attributes: attributes, context: nil) 49 | let textSize = CGSize(width: textRect.width + 12, height: textRect.height) 50 | let textOrigin = CGPoint(x: frame.origin.x - 2, y: frame.origin.y - textSize.height) 51 | textLayer.frame = CGRect(origin: textOrigin, size: textSize) 52 | } 53 | 54 | func hide() { 55 | shapeLayer.isHidden = true 56 | textLayer.isHidden = true 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection/Images.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ios-marketing", 45 | "size" : "1024x1024", 46 | "scale" : "1x" 47 | } 48 | ], 49 | "info" : { 50 | "version" : 1, 51 | "author" : "xcode" 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | LSRequiresIPhoneOS 24 | 25 | NSCameraUsageDescription 26 | To perform face mask detection on live video. 27 | UILaunchStoryboardName 28 | LaunchScreen 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection/PreviewView.swift: -------------------------------------------------------------------------------- 1 | import AVFoundation 2 | import UIKit 3 | 4 | 5 | class PreviewView: UIView { 6 | var previewLayer: AVCaptureVideoPreviewLayer { 7 | return layer as! AVCaptureVideoPreviewLayer 8 | } 9 | 10 | init() { 11 | super.init(frame: .zero) 12 | previewLayer.videoGravity = .resizeAspectFill 13 | } 14 | 15 | required init?(coder: NSCoder) { 16 | fatalError() 17 | } 18 | 19 | override class var layerClass: AnyClass { 20 | return AVCaptureVideoPreviewLayer.self 21 | } 22 | 23 | /// Converts normalized image coordinates to coordinates in this view, taking into account that 24 | /// some of the image may be cropped due to .resizeAspectFill gravity. 25 | func toViewCoords(_ rect: CGRect, mirrored: Bool) -> CGRect { 26 | var bound = rect 27 | if mirrored { 28 | // Flip x-axis 29 | bound = bound 30 | .applying(CGAffineTransform(scaleX: -1, y: 1)) 31 | .applying(CGAffineTransform(translationX: 1, y: 0)) 32 | } 33 | 34 | // Adjust to match the aspect ratio of the preview 35 | let inputAspect: CGFloat = 9 / 16 36 | let viewAspect = bounds.width / bounds.height 37 | if inputAspect >= viewAspect { 38 | bound = bound 39 | .applying(CGAffineTransform(scaleX: inputAspect / viewAspect, y: 1)) 40 | .applying(CGAffineTransform(translationX: 0.5 * (1 - inputAspect / viewAspect), y: 0)) 41 | } else { 42 | bound = bound 43 | .applying(CGAffineTransform(scaleX: 1, y: viewAspect / inputAspect)) 44 | .applying(CGAffineTransform(translationX: 0, y: 0.5 * (1 - viewAspect / inputAspect))) 45 | } 46 | 47 | // Scale to view size 48 | return bound.applying(CGAffineTransform(scaleX: bounds.width, y: bounds.height)) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /Example/SwiftMaskDetection/ViewController.swift: -------------------------------------------------------------------------------- 1 | import AVFoundation 2 | import SnapKit 3 | import SwiftMaskDetection 4 | import UIKit 5 | 6 | 7 | class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { 8 | // Change this to .back to use the back camera: 9 | let camera: AVCaptureDevice.Position = .front 10 | 11 | let maxFaces = 8 12 | let session = AVCaptureSession() 13 | let output = AVCaptureVideoDataOutput() 14 | let sessionQueue = DispatchQueue(label: "capture_session") 15 | let detectionQueue = DispatchQueue(label: "detection", qos: .userInitiated, 16 | attributes: [], autoreleaseFrequency: .workItem) 17 | let previewView = PreviewView() 18 | var boxes: [BoundingBox] = [] 19 | var detector: MaskDetectionVideoHelper! 20 | 21 | override func viewDidLoad() { 22 | super.viewDidLoad() 23 | detector = MaskDetectionVideoHelper(maskDetector: MaskDetector(maxResults: maxFaces)) 24 | view.backgroundColor = .white 25 | configureCaptureSession() 26 | configureUI() 27 | } 28 | 29 | override func viewWillAppear(_ animated: Bool) { 30 | super.viewWillAppear(animated) 31 | startCaptureSession() 32 | } 33 | 34 | override func viewDidDisappear(_ animated: Bool) { 35 | super.viewDidDisappear(animated) 36 | stopCaptureSession() 37 | } 38 | 39 | // MARK: AVCaptureVideoDataOutputSampleBufferDelegate 40 | 41 | func captureOutput(_ output: AVCaptureOutput, 42 | didOutput buffer: CMSampleBuffer, 43 | from connection: AVCaptureConnection) { 44 | if let results = try? detector.detectInFrame(buffer) { 45 | DispatchQueue.main.async { 46 | self.showResults(results) 47 | } 48 | } 49 | } 50 | 51 | // MARK: UI 52 | 53 | private func configureUI() { 54 | view.addSubview(previewView) 55 | previewView.previewLayer.session = session 56 | for _ in 0.. 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | BNDL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | 24 | 25 | -------------------------------------------------------------------------------- /Example/Tests/Tests.swift: -------------------------------------------------------------------------------- 1 | import XCTest 2 | 3 | class Tests: XCTestCase { 4 | 5 | override func setUp() { 6 | super.setUp() 7 | // Put setup code here. This method is called before the invocation of each test method in the class. 8 | } 9 | 10 | override func tearDown() { 11 | // Put teardown code here. This method is called after the invocation of each test method in the class. 12 | super.tearDown() 13 | } 14 | 15 | func testExample() { 16 | // This is an example of a functional test case. 17 | XCTAssert(true, "Pass") 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2020 Keith Ito 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SwiftMaskDetection 2 | 3 | 6 | [![Version](https://img.shields.io/cocoapods/v/SwiftMaskDetection.svg?style=flat)](https://cocoapods.org/pods/SwiftMaskDetection) 7 | [![License](https://img.shields.io/cocoapods/l/SwiftMaskDetection.svg?style=flat)](https://cocoapods.org/pods/SwiftMaskDetection) 8 | [![Platform](https://img.shields.io/cocoapods/p/SwiftMaskDetection.svg?style=flat)](https://cocoapods.org/pods/SwiftMaskDetection) 9 | 10 | 11 | SwiftMaskDetection is a face mask detection library with a Swift interface. 12 | 13 | It is a port of [AIZOO's FaceMaskDetection model](https://github.com/AIZOOTech/FaceMaskDetection) to 14 | CoreML. The model runs at over 30fps on recent iPhones and iPads. For more information on the model and training data, 15 | please see https://github.com/AIZOOTech/FaceMaskDetection (AIZOO did all the hard work). 16 | 17 | ## Demo 18 | 19 | ![Demo video](https://data.keithito.com/maskdetection/detection1.gif) 20 | ![Demo video](https://data.keithito.com/maskdetection/detection2.gif) 21 | 22 | To run the demo: 23 | 24 | 1. Make sure you have [Xcode](https://developer.apple.com/support/xcode/) and [CocoaPods](https://cocoapods.org/). 25 | 26 | 2. Clone this repo and open the example project: 27 | ``` 28 | git clone https://github.com/keithito/SwiftMaskDetection.git 29 | cd SwiftMaskDetection/Example 30 | pod install 31 | open SwiftMaskDetection.xcworkspace 32 | ``` 33 | 34 | 3. Run the project from XCode on a device (it needs the camera) 35 | * If you see an error that signing needs a development team, open the "SwiftMaskDetection" project, click on 36 | the "Signing & Capabilities" tab, and select an option from the "Team" menu. 37 | 38 | 39 | ## Installation 40 | 41 | SwiftMaskDetection is available through [CocoaPods](https://cocoapods.org). To install it, add the following 42 | line to your Podfile: 43 | 44 | ```ruby 45 | pod 'SwiftMaskDetection' 46 | ``` 47 | 48 | 49 | If you don't use CocoaPods, you can simply copy the files in [SwiftMaskDetection/Classes](https://github.com/keithito/SwiftMaskDetection/tree/master/SwiftMaskDetection/Classes) into your Xcode project. 50 | 51 | 52 | ## Usage 53 | 54 | ### Images 55 | 56 | To recognize an image: 57 | 58 | ```swift 59 | import SwiftMaskDetection 60 | 61 | let detector = MaskDetector() 62 | let image = UIImage(named: "my_photo")! 63 | if let results = try? detector.detectMasks(cgImage: image.cgImage!) { 64 | // Do something with the results. 65 | } 66 | ``` 67 | 68 | The image **must be 260x260 pixels**. `detectMasks` supports `CGImage`, `CIImage`, and `CVPixelBuffer` inputs and 69 | returns an array of `Results`, one for each detected face: 70 | 71 | ```swift 72 | public struct Result { 73 | /// The status of the detection (.mask or .noMask) 74 | public let status: Status 75 | 76 | /// The bounding box of the face in normalized coordinates (the top-left corner of the image 77 | /// is [0, 0], and the bottom-right corner is [1, 1]). 78 | public let bound: CGRect 79 | 80 | /// Value between 0 and 1 representing the confidence in the result 81 | public let confidence: Float 82 | } 83 | ``` 84 | 85 | 86 | ### Video 87 | 88 | `MaskDetectionVideoHelper` may come in handy for running on live video. First, create the helper: 89 | 90 | ```swift 91 | let helper = MaskDetectionVideoHelper(maskDetector: MaskDetector()) 92 | ``` 93 | 94 | Then call `detectInFrame` on each video frame: 95 | 96 | ```swift 97 | if let results = try? helper.detectInFrame(cmSampleBuffer) { 98 | // Do something with the results. 99 | } 100 | 101 | ``` 102 | 103 | You don't need to resize the image to 260x260; the helper does that for you. See the example app's 104 | [ViewController](./Example/SwiftMaskDetection/ViewController.swift) for a complete example. 105 | 106 | 107 | 108 | ## Requirements 109 | * iOS 13 or later 110 | 111 | 112 | 113 | ## License 114 | 115 | SwiftMaskDetection is available under the MIT license. See the LICENSE file for more info. 116 | 117 | The face mask detection model is (c) 2020 AIZOOTech and is also available under the 118 | [MIT License](https://github.com/AIZOOTech/FaceMaskDetection/blob/6068769c7a6/LICENSE). 119 | -------------------------------------------------------------------------------- /SwiftMaskDetection.podspec: -------------------------------------------------------------------------------- 1 | Pod::Spec.new do |s| 2 | s.name = 'SwiftMaskDetection' 3 | s.version = '0.1.1' 4 | s.summary = 'A face mask detection library written in Swift.' 5 | s.homepage = 'https://github.com/keithito/SwiftMaskDetection' 6 | s.license = { :type => 'MIT', :file => 'LICENSE' } 7 | s.author = { 'Keith Ito' => 'kito@kito.us' } 8 | s.source = { :git => 'https://github.com/keithito/SwiftMaskDetection.git', :tag => s.version.to_s } 9 | s.source_files = 'SwiftMaskDetection/Classes/**/*.{swift,mlmodel}' 10 | s.resources = 'SwiftMaskDetection/Classes/**/*.json' 11 | s.pod_target_xcconfig = { 'COREML_CODEGEN_LANGUAGE' => 'Swift' } 12 | s.swift_version = '5.0' 13 | s.ios.deployment_target = '10.0' 14 | s.description = <<-END 15 | SwiftMaskDetection is a port of the AIZOO FaceMaskDetection model to CoreML, with a Swift interface. 16 | It is capable of running in real-time on the iPhone and iPad. 17 | END 18 | end 19 | -------------------------------------------------------------------------------- /SwiftMaskDetection/Classes/MaskDetectionVideoHelper.swift: -------------------------------------------------------------------------------- 1 | // Copyright © 2020 Keith Ito. MIT License. 2 | import AVFoundation 3 | import CoreImage 4 | 5 | 6 | /// Helper to assist with real-time detection in a video stream. You can call this from the 7 | /// captureOutput function in a AVCaptureVideoDataOutputSampleBufferDelegate to feed frames 8 | /// to the MaskDetector. See the Example for usage. 9 | @available(iOS 13.0, *) 10 | public class MaskDetectionVideoHelper { 11 | /// Controls how input images are resized to square 260x260 images for the model. 12 | public enum ResizeMode { 13 | /// Images are cropped along the longer dimension, with equal amounts removed from each side. 14 | /// This doesn't distort the image, but recognition will only happen in the center square. 15 | case centerCrop 16 | /// Images are stretched to be square. Recognition can take place in the entire image, but 17 | /// there will be distortion, which may affect model performance. 18 | case stretch 19 | } 20 | 21 | private let resizeMode: ResizeMode 22 | private let maskDetector: MaskDetector 23 | 24 | /// - Parameters: 25 | /// - maskDetector: the MaskDetector to use for detection 26 | /// - resizeMode: controls how input images are made square if they are not already 27 | public init(maskDetector: MaskDetector, resizeMode: ResizeMode = .centerCrop) { 28 | self.maskDetector = maskDetector 29 | self.resizeMode = resizeMode 30 | } 31 | 32 | /// Runs the detector on the given CMSampleBuffer. 33 | /// This blocks while detection is being performed and should not be called on the main thread. 34 | public func detectInFrame(_ buffer: CMSampleBuffer) throws -> [MaskDetector.Result] { 35 | guard let image = CMSampleBufferGetImageBuffer(buffer) else { return [] } 36 | let width = CVPixelBufferGetWidth(image) 37 | let height = CVPixelBufferGetHeight(image) 38 | let transform: CGAffineTransform 39 | if resizeMode == .centerCrop { 40 | let scale = CGFloat(MaskDetector.InputImageSize) / CGFloat(min(width, height)) 41 | transform = CGAffineTransform(scaleX: scale, y: scale) 42 | } else { 43 | let scaleX = CGFloat(MaskDetector.InputImageSize) / CGFloat(width) 44 | let scaleY = CGFloat(MaskDetector.InputImageSize) / CGFloat(height) 45 | transform = CGAffineTransform(scaleX: scaleX, y: scaleY) 46 | } 47 | 48 | let ciImage = CIImage(cvPixelBuffer: image) 49 | .transformed(by: transform, highQualityDownsample: true) 50 | let results = try maskDetector.detectMasks(ciImage: ciImage) 51 | 52 | if resizeMode == .centerCrop { 53 | // Transform bounding box coordinates back to the input image 54 | let inputAspect = CGFloat(width) / CGFloat(height) 55 | return results.map { res in 56 | let bound = res.bound 57 | .applying(CGAffineTransform(scaleX: 1, y: inputAspect)) 58 | .applying(CGAffineTransform(translationX: 0, y: 0.5 * (1 - inputAspect))) 59 | return MaskDetector.Result(status: res.status, bound: bound, confidence: res.confidence) 60 | } 61 | } else { 62 | return results 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /SwiftMaskDetection/Classes/MaskDetector.swift: -------------------------------------------------------------------------------- 1 | // Copyright © 2020 Keith Ito. MIT License. 2 | // 3 | // Portions of this file are based on https://github.com/AIZOOTech/FaceMaskDetection 4 | // Copyright (c) 2020 AIZOOTech. MIT License. 5 | import AVFoundation 6 | import CoreImage 7 | import Vision 8 | 9 | 10 | /// Detects faces in an image and whether or not the face has a mask on it. 11 | @available(iOS 13.0, *) 12 | public class MaskDetector { 13 | public enum Status { 14 | /// The person is wearing a mask 15 | case mask 16 | /// The person is not wearing a mask 17 | case noMask 18 | } 19 | 20 | /// A face mask detection result 21 | public struct Result { 22 | /// The status of the detection (e.g. mask/noMask) 23 | public let status: Status 24 | 25 | /// The bounding box of the face in normalized coordinates (the top-left corner of the image 26 | /// is [0, 0], and the bottom-right corner is [1, 1]). 27 | public let bound: CGRect 28 | 29 | /// Value between 0 and 1 representing the confidence in the result 30 | public let confidence: Float 31 | } 32 | 33 | /// Images sent to the model must have a height and width equal to this. 34 | public static let InputImageSize = 260 35 | 36 | private let minConfidence: Float 37 | private let iouThreshold: Float 38 | private let maxResults: Int 39 | // Don't return a result unless the best class confidence is a factor of this better than the 40 | // other class confidence. TODO: Consider making this a parameter to init? 41 | private let margin: Float = 5 42 | private let mlModel = MaskModel() 43 | private let model: VNCoreMLModel 44 | private let anchors = loadAnchors() 45 | 46 | /// - Parameters: 47 | /// - minConfidence: minimum confidence for returned results 48 | /// - iouThreshold: intersection over union threshold for non-max suppression 49 | /// - maxResults: maximum number of results to return 50 | public init(minConfidence: Float=0.8, maxResults: Int=10, iouThreshold: Float=0.2) { 51 | self.minConfidence = minConfidence 52 | self.maxResults = maxResults 53 | self.iouThreshold = iouThreshold 54 | model = try! VNCoreMLModel(for: mlModel.model) 55 | } 56 | 57 | /// Detects faces with masks or not in the input image. This blocks while detection is 58 | /// being performed and should not be called on the main thread. 59 | /// - Parameters: 60 | /// - cvPixelBuffer: A 260x260 CVPixelBuffer 61 | /// - orientation: The orientation of the input image (default .up) 62 | /// - Returns: An array of detection results, one for each face 63 | public func detectMasks(cvPixelBuffer: CVPixelBuffer, 64 | orientation: CGImagePropertyOrientation = .up) throws -> [Result] { 65 | return try detectMasks(handler: VNImageRequestHandler(cvPixelBuffer: cvPixelBuffer, 66 | orientation: orientation)) 67 | } 68 | 69 | /// Detects faces with masks or not in the input image. This blocks while detection is 70 | /// being performed and should not be called on the main thread. 71 | /// - Parameters: 72 | /// - cgImage: A 260x260 CGImage 73 | /// - orientation: The orientation of the input image (default .up) 74 | /// - Returns: An array of detection results, one for each face 75 | public func detectMasks(cgImage: CGImage, 76 | orientation: CGImagePropertyOrientation = .up) throws -> [Result] { 77 | return try detectMasks(handler: VNImageRequestHandler(cgImage: cgImage, 78 | orientation: orientation)) 79 | } 80 | 81 | /// Detects faces with masks or not in the input image. This blocks while detection is 82 | /// being performed and should not be called on the main thread. 83 | /// - Parameters: 84 | /// - ciImage: A 260x260 CIImage 85 | /// - orientation: The orientation of the input image (default .up) 86 | /// - Returns: An array of detection results, one for each face 87 | public func detectMasks(ciImage: CIImage, 88 | orientation: CGImagePropertyOrientation = .up) throws -> [Result] { 89 | return try detectMasks(handler: VNImageRequestHandler(ciImage: ciImage, 90 | orientation: orientation)) 91 | } 92 | 93 | private func detectMasks(handler: VNImageRequestHandler) throws -> [Result] { 94 | let request = VNCoreMLRequest(model: model) 95 | try handler.perform([request]) 96 | guard let results = request.results as? [VNCoreMLFeatureValueObservation], 97 | results.count == 2, 98 | results[0].featureName == "output_bounds", 99 | results[1].featureName == "output_scores", 100 | let boundOutputs = results[0].featureValue.multiArrayValue, 101 | let confOutputs = results[1].featureValue.multiArrayValue, 102 | confOutputs.dataType == .float32, 103 | boundOutputs.dataType == .float32, 104 | confOutputs.shape == [1, NSNumber(value: anchors.count), 2], 105 | boundOutputs.shape == [1, NSNumber(value: anchors.count), 4] else { 106 | print("Unexpected result from CoreML!") 107 | return [] 108 | } 109 | 110 | // Model has 2 outputs: 111 | // 1. Confidences [1,5972,2]: confidence for each anchor for each class (mask, no_mask) 112 | // 2. Bounds [1,5972,4]: encoded bounding boxes for each anchor (see decodeBound) 113 | let confPtr = UnsafeMutablePointer(OpaquePointer(confOutputs.dataPointer)) 114 | let boundPtr = UnsafeMutablePointer(OpaquePointer(boundOutputs.dataPointer)) 115 | let confStrides = confOutputs.strides.map { $0.intValue } 116 | let boundStrides = boundOutputs.strides.map { $0.intValue } 117 | var detections: [Result] = [] 118 | for i in 0.. minConfidence { 122 | let offset = i * boundStrides[1] 123 | let rawBound: [Float] = [ 124 | boundPtr[offset], 125 | boundPtr[offset + 1 * boundStrides[2]], 126 | boundPtr[offset + 2 * boundStrides[2]], 127 | boundPtr[offset + 3 * boundStrides[2]], 128 | ] 129 | let bound = decodeBound(anchor: anchors[i], rawBound: rawBound) 130 | if maskConf > noMaskConf * margin { 131 | detections.append(Result(status: .mask, bound: bound, confidence: maskConf)) 132 | } else if noMaskConf > maskConf * margin { 133 | detections.append(Result(status: .noMask, bound: bound, confidence: noMaskConf)) 134 | } 135 | } 136 | } 137 | return nonMaxSuppression(inputs: detections, 138 | iouThreshold: iouThreshold, 139 | maxResults: maxResults) 140 | } 141 | } 142 | 143 | // Anchor bounds as generated by the python code in evaluate.py. These must match the anchors that 144 | // the model was trained with. We dump them from the python code and load them here. 145 | @available(iOS 13.0, *) 146 | private func loadAnchors() -> [[Double]] { 147 | let path = Bundle(for: MaskDetector.self).path(forResource: "anchors", ofType: "json")! 148 | let data = try! Data(contentsOf: URL(fileURLWithPath: path)) 149 | let json = try! JSONSerialization.jsonObject(with: data, options: []) as! [String: Any] 150 | return json["anchors"] as! [[Double]] 151 | } 152 | 153 | // Decodes the bound output from the model based on the anchor it is for. The model output is a 154 | // 4D vector where the first 2 components are the delta from the anchor center to the bound center 155 | // and the last 2 are the log of the ratio of the bound size to the anchor size. 156 | private func decodeBound(anchor: [Double], rawBound: [Float]) -> CGRect { 157 | let anchorW = anchor[2] - anchor[0] 158 | let anchorH = anchor[3] - anchor[1] 159 | let anchorCenterX = anchor[0] + 0.5 * anchorW 160 | let anchorCenterY = anchor[1] + 0.5 * anchorH 161 | let cx = Double(rawBound[0]) * 0.1 * anchorW + anchorCenterX 162 | let cy = Double(rawBound[1]) * 0.1 * anchorH + anchorCenterY 163 | let w = exp(Double(rawBound[2]) * 0.2) * anchorW 164 | let h = exp(Double(rawBound[3]) * 0.2) * anchorH 165 | return CGRect(x: CGFloat(cx - w / 2), 166 | y: CGFloat(cy - h / 2), 167 | width: CGFloat(w), 168 | height: CGFloat(h)) 169 | } 170 | 171 | 172 | // Performs non-max supression with a configurable overlap threshold. 173 | @available(iOS 13.0, *) 174 | private func nonMaxSuppression(inputs: [MaskDetector.Result], 175 | iouThreshold: Float, 176 | maxResults: Int) -> [MaskDetector.Result] { 177 | var outputs: [MaskDetector.Result] = [] 178 | let inputsByConfidenceDesc = inputs.sorted { $0.confidence > $1.confidence } 179 | for result in inputsByConfidenceDesc { 180 | if !hasOverlap(result, with: outputs, iouThreshold: iouThreshold) { 181 | outputs.append(result) 182 | if outputs.count >= maxResults { 183 | break 184 | } 185 | } 186 | } 187 | return outputs; 188 | } 189 | 190 | @available(iOS 13.0, *) 191 | private func hasOverlap(_ result: MaskDetector.Result, 192 | with others: [MaskDetector.Result], 193 | iouThreshold: Float) -> Bool { 194 | let resultArea = result.bound.width * result.bound.height 195 | for other in others { 196 | let intersection = areaOfIntersection(result.bound, other.bound) 197 | if intersection > 0 { 198 | let union = resultArea + other.bound.width * other.bound.height - intersection 199 | if Float(intersection / union) >= iouThreshold { 200 | return true 201 | } 202 | } 203 | } 204 | return false 205 | } 206 | 207 | private func areaOfIntersection(_ a: CGRect, _ b: CGRect) -> CGFloat { 208 | let maxMinX = max(a.minX, b.minX) 209 | let minMaxX = min(a.maxX, b.maxX) 210 | let maxMinY = max(a.minY, b.minY) 211 | let minMaxY = min(a.maxY, b.maxY) 212 | return max(0, minMaxX - maxMinX) * max(0, minMaxY - maxMinY) 213 | } 214 | -------------------------------------------------------------------------------- /SwiftMaskDetection/Classes/MaskModel.mlmodel: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/keithito/SwiftMaskDetection/b98f2176c0eae131da12b18e835d50ea996c21e2/SwiftMaskDetection/Classes/MaskModel.mlmodel -------------------------------------------------------------------------------- /_Pods.xcodeproj: -------------------------------------------------------------------------------- 1 | Example/Pods/Pods.xcodeproj --------------------------------------------------------------------------------