├── FaceDetection.gif
├── LYFaceDetection
├── LYFaceDetection
│ ├── test.jpg
│ ├── OpenGL
│ │ ├── FaceTextureShader.fsh
│ │ ├── FaceTextureShader.vsh
│ │ ├── FaceDetectionShader.vsh
│ │ ├── LYShaderManager.h
│ │ ├── FaceDetectionShader.fsh
│ │ └── LYShaderManager.m
│ ├── ViewController.h
│ ├── AppDelegate.h
│ ├── FaceDetectionView.h
│ ├── main.m
│ ├── LYFaceDetector.h
│ ├── Camera
│ │ ├── LYCameraManager.h
│ │ └── LYCameraManager.m
│ ├── Info.plist
│ ├── Base.lproj
│ │ ├── LaunchScreen.storyboard
│ │ └── Main.storyboard
│ ├── LYFaceDetector.m
│ ├── Assets.xcassets
│ │ └── AppIcon.appiconset
│ │ │ └── Contents.json
│ ├── AppDelegate.m
│ ├── ViewController.m
│ └── FaceDetectionView.m
└── LYFaceDetection.xcodeproj
│ ├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcuserdata
│ │ └── jacky.xcuserdatad
│ │ └── UserInterfaceState.xcuserstate
│ ├── xcuserdata
│ └── jacky.xcuserdatad
│ │ ├── xcschemes
│ │ └── xcschememanagement.plist
│ │ └── xcdebugger
│ │ └── Breakpoints_v2.xcbkptlist
│ └── project.pbxproj
├── README.md
└── .gitignore
/FaceDetection.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LustySwimmer/LYFaceDetection/HEAD/FaceDetection.gif
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/test.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LustySwimmer/LYFaceDetection/HEAD/LYFaceDetection/LYFaceDetection/test.jpg
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/OpenGL/FaceTextureShader.fsh:
--------------------------------------------------------------------------------
1 | varying lowp vec2 texCoordVarying;
2 |
3 | uniform sampler2D inputTexture;
4 |
5 | void main()
6 | {
7 | gl_FragColor = texture2D(inputTexture, texCoordVarying);
8 | }
9 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/OpenGL/FaceTextureShader.vsh:
--------------------------------------------------------------------------------
1 | attribute vec4 aPosition;
2 | attribute vec2 aTexCoordinate;
3 |
4 | varying lowp vec2 texCoordVarying;
5 |
6 | void main()
7 | {
8 | texCoordVarying = aTexCoordinate;
9 | gl_Position = aPosition;
10 | }
11 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection.xcodeproj/project.xcworkspace/xcuserdata/jacky.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/LustySwimmer/LYFaceDetection/HEAD/LYFaceDetection/LYFaceDetection.xcodeproj/project.xcworkspace/xcuserdata/jacky.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/OpenGL/FaceDetectionShader.vsh:
--------------------------------------------------------------------------------
1 | attribute vec4 aPosition;
2 | attribute vec2 aTexCoordinate;
3 |
4 | varying lowp vec2 texCoordVarying;
5 |
6 | uniform mat4 rotateMatrix;
7 |
8 | void main()
9 | {
10 | texCoordVarying = aTexCoordinate;
11 | gl_Position = rotateMatrix * aPosition;
12 | }
13 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/ViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.h
3 | // LYFaceDetection
4 | //
5 | // Created by LustySwimmer on 2018/3/6.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface ViewController : UIViewController
12 |
13 |
14 | @end
15 |
16 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // LYFaceDetection
4 | //
5 | // Created by LustySwimmer on 2018/3/6.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 |
16 | @end
17 |
18 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/FaceDetectionView.h:
--------------------------------------------------------------------------------
1 | //
2 | // FaceDetectionView.h
3 | // LYFaceDetection
4 | //
5 | // Created by LustySwimmer on 2018/3/6.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface FaceDetectionView : UIView
12 |
13 | - (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer;
14 |
15 | - (UIImage *)snapshot;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // LYFaceDetection
4 | //
5 | // Created by LustySwimmer on 2018/3/6.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/LYFaceDetector.h:
--------------------------------------------------------------------------------
1 | //
2 | // LYFaceDetector.h
3 | // OpenGLESPracticeDemo
4 | //
5 | // Created by LustySwimmer on 2018/1/18.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface LYFaceDetector : NSObject
12 |
13 | + (void)detectCVPixelBuffer:(CVPixelBufferRef)pixelBuffer completionHandler:(void(^)(CIFaceFeature *result, CIImage *ciImage))completion;
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection.xcodeproj/xcuserdata/jacky.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | LYFaceDetection.xcscheme
8 |
9 | orderHint
10 | 0
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/Camera/LYCameraManager.h:
--------------------------------------------------------------------------------
1 | //
2 | // LYCameraManager.h
3 | // LYFaceDetection
4 | //
5 | // Created by LustySwimmer on 2018/3/6.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 | @interface LYCameraManager : NSObject
13 |
14 | + (instancetype)cameraManagerWithSampleBufferDelegate:(id)delegate;
15 |
16 | - (void)switchCamera;
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection.xcodeproj/xcuserdata/jacky.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
8 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/OpenGL/LYShaderManager.h:
--------------------------------------------------------------------------------
1 | //
2 | // LYShaderManager.h
3 | // LYFaceDetection
4 | //
5 | // Created by LustySwimmer on 2018/3/6.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import
12 |
13 | @interface LYShaderManager : NSObject
14 |
15 | @property (nonatomic, assign) GLuint program;
16 |
17 | - (instancetype)initWithVertexShaderFileName:(NSString *)vertexFileName fragmentFileName:(NSString *)fragmentFileName;
18 |
19 | - (GLint)getUniformLocation:(const GLchar*) name;
20 |
21 | - (GLuint)getAttributeLocation:(const GLchar *)name;
22 |
23 | - (void)useProgram;
24 |
25 | @end
26 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/OpenGL/FaceDetectionShader.fsh:
--------------------------------------------------------------------------------
1 | varying highp vec2 texCoordVarying;
2 | precision mediump float;
3 |
4 | uniform sampler2D SamplerY;
5 | uniform sampler2D SamplerUV;
6 |
7 | void main()
8 | {
9 | mediump vec3 yuv;
10 | lowp vec3 rgb;
11 | mediump mat3 convert = mat3(1.164, 1.164, 1.164,
12 | 0.0, -0.213, 2.112,
13 | 1.793, -0.533, 0.0);
14 | // Subtract constants to map the video range start at 0
15 | yuv.x = (texture2D(SamplerY, texCoordVarying).r);// - (16.0/255.0));
16 | yuv.yz = (texture2D(SamplerUV, texCoordVarying).rg - vec2(0.5, 0.5));
17 |
18 | rgb = convert * yuv;
19 |
20 | gl_FragColor = vec4(rgb,1);
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # LYFaceDetection
2 | A sample to recognize face with CoreImage and display based on OpenGL
3 | 
4 | You can get the result image use snapshot method in FaceDetectionView and save it to your iPhone
5 | ```
6 | UIImage *image = [self.faceDetectionView snapshot];
7 | if (image) {
8 | ALAuthorizationStatus authStatus = [ALAssetsLibrary authorizationStatus];
9 | if (authStatus == ALAuthorizationStatusRestricted || authStatus == ALAuthorizationStatusDenied){
10 | //无权限
11 | return;
12 | }
13 | [[[ALAssetsLibrary alloc] init] writeImageToSavedPhotosAlbum:image.CGImage metadata:nil completionBlock:^(NSURL *assetURL, NSError *error) {
14 | if (!error) {
15 | NSLog(@"Image saved succeed");
16 | }
17 | }];
18 | }
19 | ```
20 |
21 | # 这是一个利用CoreImage和OpenGL实现的人脸识别的demo
22 | 更多详细的介绍请访问底部博客链接:
23 | [More details in blog](https://www.jianshu.com/p/028af518c781)
24 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 |
5 | ## Build generated
6 | build/
7 | DerivedData/
8 |
9 | ## Various settings
10 | *.pbxuser
11 | !default.pbxuser
12 | *.mode1v3
13 | !default.mode1v3
14 | *.mode2v3
15 | !default.mode2v3
16 | *.perspectivev3
17 | !default.perspectivev3
18 | xcuserdata/
19 |
20 | ## Other
21 | *.moved-aside
22 | *.xccheckout
23 | *.xcscmblueprint
24 |
25 | ## Obj-C/Swift specific
26 | *.hmap
27 | *.ipa
28 | *.dSYM.zip
29 | *.dSYM
30 |
31 | # CocoaPods
32 | #
33 | # We recommend against adding the Pods directory to your .gitignore. However
34 | # you should judge for yourself, the pros and cons are mentioned at:
35 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
36 | #
37 | # Pods/
38 |
39 | # Carthage
40 | #
41 | # Add this line if you want to avoid checking in source code from Carthage dependencies.
42 | # Carthage/Checkouts
43 |
44 | Carthage/Build
45 |
46 | # fastlane
47 | #
48 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
49 | # screenshots whenever they are needed.
50 | # For more information about the recommended setup visit:
51 | # https://docs.fastlane.tools/best-practices/source-control/#source-control
52 |
53 | fastlane/report.xml
54 | fastlane/Preview.html
55 | fastlane/screenshots
56 | fastlane/test_output
57 |
58 | # Code Injection
59 | #
60 | # After new code Injection tools there's a generated folder /iOSInjectionProject
61 | # https://github.com/johnno1962/injectionforxcode
62 |
63 | iOSInjectionProject/
64 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleDisplayName
8 | LYFaceDetection
9 | CFBundleExecutable
10 | $(EXECUTABLE_NAME)
11 | CFBundleIdentifier
12 | $(PRODUCT_BUNDLE_IDENTIFIER)
13 | CFBundleInfoDictionaryVersion
14 | 6.0
15 | CFBundleName
16 | $(PRODUCT_NAME)
17 | CFBundlePackageType
18 | APPL
19 | CFBundleShortVersionString
20 | 1.0
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | NSCameraUsageDescription
26 | 请允许APP使用您的摄像头
27 | NSPhotoLibraryAddUsageDescription
28 | 请允许APP访问您的相册
29 | UILaunchStoryboardName
30 | LaunchScreen
31 | UIMainStoryboardFile
32 | Main
33 | UIRequiredDeviceCapabilities
34 |
35 | armv7
36 |
37 | UISupportedInterfaceOrientations
38 |
39 | UIInterfaceOrientationPortrait
40 |
41 | UISupportedInterfaceOrientations~ipad
42 |
43 | UIInterfaceOrientationPortrait
44 | UIInterfaceOrientationPortraitUpsideDown
45 | UIInterfaceOrientationLandscapeLeft
46 | UIInterfaceOrientationLandscapeRight
47 |
48 |
49 |
50 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/LYFaceDetector.m:
--------------------------------------------------------------------------------
1 | //
2 | // LYFaceDetector.m
3 | // OpenGLESPracticeDemo
4 | //
5 | // Created by LustySwimmer on 2018/1/18.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import "LYFaceDetector.h"
10 |
11 | @implementation LYFaceDetector
12 |
13 | + (void)detectCVPixelBuffer:(CVPixelBufferRef)pixelBuffer completionHandler:(void (^)(CIFaceFeature *, CIImage *))completion {
14 | if (pixelBuffer) {
15 | // [[CIImage alloc] initWithImage:image];
16 | CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer];
17 | NSString *accuracy = CIDetectorAccuracyLow;
18 | NSDictionary *options = [NSDictionary dictionaryWithObject:accuracy forKey:CIDetectorAccuracy];
19 | CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:options];
20 | NSArray *featuresArray = [detector featuresInImage:ciImage options:nil];
21 | CIFaceFeature *choosenFaceFeature = [self bestFaceFeaturesInFeatherArray:featuresArray];
22 | !completion ?: completion(choosenFaceFeature,ciImage);
23 | } else {
24 | !completion ?: completion(nil, nil);
25 | }
26 | }
27 |
28 | + (CIFaceFeature *)bestFaceFeaturesInFeatherArray:(NSArray *)featureArray {
29 | //get the bestFaceFeature by the maxnum bounds Square size
30 | CGFloat maxFaceSquare = 0.0;
31 | CIFaceFeature * chooseFaceFeature = nil;
32 | for (CIFaceFeature * faceFeathre in featureArray) {
33 | CGRect bounds = faceFeathre.bounds;
34 | CGFloat currentFaceSqu = CGRectGetWidth(bounds)*CGRectGetHeight(bounds);
35 | if (currentFaceSqu > maxFaceSquare) {
36 | maxFaceSquare = currentFaceSqu;
37 | chooseFaceFeature = faceFeathre;
38 | }
39 | }
40 | return chooseFaceFeature;
41 | }
42 |
43 | @end
44 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | }
88 | ],
89 | "info" : {
90 | "version" : 1,
91 | "author" : "xcode"
92 | }
93 | }
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // LYFaceDetection
4 | //
5 | // Created by LustySwimmer on 2018/3/6.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 |
11 | @interface AppDelegate ()
12 |
13 | @end
14 |
15 | @implementation AppDelegate
16 |
17 |
18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
19 | // Override point for customization after application launch.
20 | return YES;
21 | }
22 |
23 |
24 | - (void)applicationWillResignActive:(UIApplication *)application {
25 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
26 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
27 | }
28 |
29 |
30 | - (void)applicationDidEnterBackground:(UIApplication *)application {
31 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
32 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
33 | }
34 |
35 |
36 | - (void)applicationWillEnterForeground:(UIApplication *)application {
37 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
38 | }
39 |
40 |
41 | - (void)applicationDidBecomeActive:(UIApplication *)application {
42 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
43 | }
44 |
45 |
46 | - (void)applicationWillTerminate:(UIApplication *)application {
47 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
48 | }
49 |
50 |
51 | @end
52 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/ViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.m
3 | // LYFaceDetection
4 | //
5 | // Created by LustySwimmer on 2018/3/6.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import "ViewController.h"
12 | #import "FaceDetectionView.h"
13 | #import "LYCameraManager.h"
14 |
15 | @interface ViewController ()
16 |
17 | @property (nonatomic, strong) LYCameraManager *cameraManager;
18 | @property (weak, nonatomic) IBOutlet FaceDetectionView *faceDetectionView;
19 |
20 | @end
21 |
22 | @implementation ViewController
23 |
24 | - (void)viewDidLoad {
25 | [super viewDidLoad];
26 | [self startCapture];
27 | }
28 |
29 | - (IBAction)startCapture {
30 | self.cameraManager = [LYCameraManager cameraManagerWithSampleBufferDelegate:self];
31 | }
32 |
33 | - (IBAction)switchCamera {
34 | if (!self.cameraManager) { return; }
35 | [self.cameraManager switchCamera];
36 | [self animationCamera];
37 | }
38 |
39 | - (IBAction)screenshot {
40 | if (!self.cameraManager) { return; }
41 | UIImage *image = [self.faceDetectionView snapshot];
42 | if (image) {
43 | ALAuthorizationStatus authStatus = [ALAssetsLibrary authorizationStatus];
44 | if (authStatus == ALAuthorizationStatusRestricted || authStatus == ALAuthorizationStatusDenied){
45 | //无权限
46 | return;
47 | }
48 | [[[ALAssetsLibrary alloc] init] writeImageToSavedPhotosAlbum:image.CGImage metadata:nil completionBlock:^(NSURL *assetURL, NSError *error) {
49 | if (!error) {
50 | NSLog(@"图片保存成功");
51 | }
52 | }];
53 | }
54 | }
55 |
56 | - (void)animationCamera {
57 | CATransition *animation = [CATransition animation];
58 | animation.duration = .5f;
59 | animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut];
60 | animation.type = @"oglFlip";
61 | animation.subtype = kCATransitionFromRight;
62 | [self.faceDetectionView.layer addAnimation:animation forKey:nil];
63 | }
64 |
65 | #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
66 |
67 | - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
68 | CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
69 | [self.faceDetectionView displayPixelBuffer:pixelBuffer];
70 |
71 | }
72 |
73 | @end
74 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/Camera/LYCameraManager.m:
--------------------------------------------------------------------------------
1 | //
2 | // LYCameraManager.m
3 | // LYFaceDetection
4 | //
5 | // Created by LustySwimmer on 2018/3/6.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import "LYCameraManager.h"
10 |
11 | @interface LYCameraManager() {
12 | dispatch_queue_t processQueue;
13 | }
14 |
15 | @property (nonatomic, strong) AVCaptureSession *captureSession;
16 |
17 | @property (nonatomic, strong) AVCaptureDeviceInput *captureDeviceInput;
18 |
19 | @property (nonatomic, strong) AVCaptureVideoDataOutput *captureDeviceOutput;
20 |
21 | @end
22 |
23 | @implementation LYCameraManager
24 |
25 | + (instancetype)cameraManagerWithSampleBufferDelegate:(id)delegate {
26 | return [[self alloc] initWithSampleBufferDelegate:delegate];
27 | }
28 |
29 | - (instancetype)initWithSampleBufferDelegate:(id)delegate {
30 | if (self = [super init]) {
31 | self.captureSession = [[AVCaptureSession alloc] init];
32 | [self.captureSession setSessionPreset:AVCaptureSessionPresetHigh];
33 |
34 | AVCaptureDevice *captureDevice = nil;
35 | NSArray *captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
36 | for (AVCaptureDevice *device in captureDevices) {
37 | if (device.position == AVCaptureDevicePositionBack) {
38 | captureDevice = device;
39 | break;
40 | }
41 | }
42 | self.captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:nil];
43 |
44 | if ([self.captureSession canAddInput:self.captureDeviceInput]) {
45 | [self.captureSession addInput:self.captureDeviceInput];
46 | }
47 |
48 | self.captureDeviceOutput = [[AVCaptureVideoDataOutput alloc] init];
49 | [self.captureDeviceOutput setAlwaysDiscardsLateVideoFrames:YES];
50 |
51 | processQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0);
52 | [self.captureDeviceOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
53 | [self.captureDeviceOutput setSampleBufferDelegate:delegate queue:processQueue];
54 |
55 | if ([self.captureSession canAddOutput:self.captureDeviceOutput]) {
56 | [self.captureSession addOutput:self.captureDeviceOutput];
57 | }
58 |
59 | AVCaptureConnection *captureConnection = [self.captureDeviceOutput connectionWithMediaType:AVMediaTypeVideo];
60 | [captureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
61 |
62 | [self.captureSession startRunning];
63 | }
64 | return self;
65 | }
66 |
67 | - (void)switchCamera {
68 | NSUInteger cameraCount = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
69 | if (cameraCount > 1) {
70 | AVCaptureDevice *newCamera = nil;
71 | AVCaptureDeviceInput *newInput = nil;
72 | AVCaptureDevicePosition position = [[self.captureDeviceInput device] position];
73 | if (position == AVCaptureDevicePositionFront){
74 | newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
75 | }else {
76 | newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
77 | }
78 | newInput = [AVCaptureDeviceInput deviceInputWithDevice:newCamera error:nil];
79 | if (newInput != nil) {
80 | [self.captureSession beginConfiguration];
81 | [self.captureSession removeInput:self.captureDeviceInput];
82 | if ([self.captureSession canAddInput:newInput]) {
83 | [self.captureSession addInput:newInput];
84 | self.captureDeviceInput = newInput;
85 | }else {
86 | [self.captureSession addInput:self.captureDeviceInput];
87 | }
88 | AVCaptureConnection *captureConnection = [self.captureDeviceOutput connectionWithMediaType:AVMediaTypeVideo];
89 | [captureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
90 | [self.captureSession commitConfiguration];
91 | }
92 | }
93 | }
94 |
95 | - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{
96 | NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
97 | for ( AVCaptureDevice *device in devices )
98 | if ( device.position == position ) return device;
99 | return nil;
100 | }
101 |
102 | @end
103 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/OpenGL/LYShaderManager.m:
--------------------------------------------------------------------------------
1 | //
2 | // LYShaderManager.m
3 | // LYFaceDetection
4 | //
5 | // Created by LustySwimmer on 2018/3/6.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 |
9 | #import "LYShaderManager.h"
10 |
11 | @implementation LYShaderManager
12 |
13 | - (instancetype)initWithVertexShaderFileName:(NSString *)vertexFileName fragmentFileName:(NSString *)fragmentFileName {
14 | if (self = [super init]) {
15 | self.program = glCreateProgram();
16 | NSURL *vertexPath = [[NSBundle mainBundle] URLForResource:vertexFileName withExtension:@"vsh"];
17 | NSURL *fragmentPath = [[NSBundle mainBundle] URLForResource:fragmentFileName withExtension:@"fsh"];
18 | [self compileShadersWithVertexFile:vertexPath fragmentFile:fragmentPath];
19 | }
20 | return self;
21 | }
22 |
23 | - (void)compileShadersWithVertexFile:(NSURL *)vertexPath fragmentFile:(NSURL *)fragmentPath {
24 | GLuint vertexShader, fragmentShader;
25 | if (![self compileShader:&vertexShader type:GL_VERTEX_SHADER URL:vertexPath] || ![self compileShader:&fragmentShader type:GL_FRAGMENT_SHADER URL:fragmentPath]) {
26 | return;
27 | }
28 | if (![self linkProgram]) {
29 | [self deleteShader:&vertexShader];
30 | [self deleteShader:&fragmentShader];
31 | return;
32 | }
33 | [self detachAndDeleteShader:&vertexShader];
34 | [self detachAndDeleteShader:&fragmentShader];
35 | [self useProgram];
36 | }
37 |
38 | - (BOOL)compileShader:(GLuint *)shader type:(GLenum)type URL:(NSURL *)URL
39 | {
40 | NSError *error;
41 | NSString *sourceString = [[NSString alloc] initWithContentsOfURL:URL encoding:NSUTF8StringEncoding error:&error];
42 | if (sourceString == nil) {
43 | NSLog(@"Failed to load vertex shader: %@", [error localizedDescription]);
44 | return NO;
45 | }
46 |
47 | GLint status;
48 | const GLchar *source;
49 | source = (GLchar *)[sourceString UTF8String];
50 |
51 | *shader = glCreateShader(type);
52 | glShaderSource(*shader, 1, &source, NULL);
53 | glCompileShader(*shader);
54 |
55 | #if defined(DEBUG)
56 | GLint logLength;
57 | glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
58 | if (logLength > 0) {
59 | GLchar *log = (GLchar *)malloc(logLength);
60 | glGetShaderInfoLog(*shader, logLength, &logLength, log);
61 | NSLog(@"Shader compile log:\n%s", log);
62 | free(log);
63 | }
64 | #endif
65 |
66 | glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
67 | if (status == 0) {
68 | glDeleteShader(*shader);
69 | return NO;
70 | }
71 | // 编译成功后,吸附到程序中去
72 | glAttachShader(self.program, *shader);
73 |
74 | return YES;
75 | }
76 |
77 | - (BOOL)linkProgram
78 | {
79 | GLint status;
80 | glLinkProgram(_program);
81 |
82 | #if defined(DEBUG)
83 | GLint logLength;
84 | glGetProgramiv(_program, GL_INFO_LOG_LENGTH, &logLength);
85 | if (logLength > 0) {
86 | GLchar *log = (GLchar *)malloc(logLength);
87 | glGetProgramInfoLog(_program, logLength, &logLength, log);
88 | NSLog(@"Program link log:\n%s", log);
89 | free(log);
90 | }
91 | #endif
92 |
93 | glGetProgramiv(_program, GL_LINK_STATUS, &status);
94 | if (status == 0) {
95 | return NO;
96 | }
97 |
98 | return YES;
99 | }
100 |
101 | - (BOOL)validateProgram
102 | {
103 | GLint logLength, status;
104 |
105 | glValidateProgram(_program);
106 | glGetProgramiv(_program, GL_INFO_LOG_LENGTH, &logLength);
107 | if (logLength > 0) {
108 | GLchar *log = (GLchar *)malloc(logLength);
109 | glGetProgramInfoLog(_program, logLength, &logLength, log);
110 | NSLog(@"Program validate log:\n%s", log);
111 | free(log);
112 | }
113 |
114 | glGetProgramiv(_program, GL_VALIDATE_STATUS, &status);
115 | if (status == 0) {
116 | return NO;
117 | }
118 |
119 | return YES;
120 | }
121 |
122 | - (void)bindAttribLocation:(GLuint)index andAttribName:(GLchar*)name{
123 | glBindAttribLocation(self.program, index, name);
124 |
125 | }
126 |
127 | - (void)deleteShader:(GLuint*)shader{
128 | if (*shader){
129 | glDeleteShader(*shader);
130 | *shader = 0;
131 | }
132 | }
133 |
134 | - (GLint)getUniformLocation:(const GLchar*) name{
135 | return glGetUniformLocation(self.program, name);
136 | }
137 |
138 | - (GLuint)getAttributeLocation:(const GLchar *)name {
139 | return glGetAttribLocation(self.program, name);
140 | }
141 |
142 | -(void)detachAndDeleteShader:(GLuint *)shader{
143 | if (*shader){
144 | glDetachShader(self.program, *shader);
145 | glDeleteShader(*shader);
146 | *shader = 0;
147 | }
148 | }
149 |
150 | -(void)deleteProgram{
151 | if (self.program){
152 | glDeleteProgram(self.program);
153 | self.program = 0;
154 | }
155 | }
156 |
157 | -(void)useProgram{
158 | glUseProgram(self.program);
159 | }
160 |
161 | -(void)dealloc{
162 | [self deleteProgram];
163 | }
164 |
165 | @end
166 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
35 |
42 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection/FaceDetectionView.m:
--------------------------------------------------------------------------------
1 | //
2 | // FaceDetectionView.m
3 | // LYFaceDetection
4 | //
5 | // Created by LustySwimmer on 2018/3/6.
6 | // Copyright © 2018年 LustySwimmer. All rights reserved.
7 | //
8 | #import
9 | #import
10 | #import "FaceDetectionView.h"
11 | #import "FaceDetectionView.h"
12 | #import "LYShaderManager.h"
13 | #import "LYFaceDetector.h"
14 |
15 | // Uniform index.
16 | enum
17 | {
18 | UNIFORM_Y,
19 | UNIFORM_UV,
20 | UNIFORM_ROTATE_MATRIX,
21 | UNIFORM_TEMP_INPUT_IMG_TEXTURE,
22 | NUM_UNIFORMS
23 | };
24 | static GLint glViewUniforms[NUM_UNIFORMS];
25 |
26 | // Attribute index.
27 | enum
28 | {
29 | ATTRIB_VERTEX,
30 | ATTRIB_TEXCOORD,
31 | ATTRIB_TEMP_VERTEX,
32 | ATTRIB_TEMP_TEXCOORD,
33 | NUM_ATTRIBUTES
34 | };
35 | static GLint glViewAttributes[NUM_ATTRIBUTES];
36 |
37 | @interface FaceDetectionView() {
38 | GLint _backingWidth;
39 | GLint _backingHeight;
40 |
41 | CVOpenGLESTextureRef _lumaTexture;
42 | CVOpenGLESTextureRef _chromaTexture;
43 | CVOpenGLESTextureCacheRef _videoTextureCache;
44 | dispatch_semaphore_t _lock;
45 | }
46 |
47 |
48 | @property (nonatomic, weak) CAEAGLLayer *eaglLayer;
49 |
50 | @property (nonatomic, strong) EAGLContext *context;
51 |
52 | @property (nonatomic, strong) LYShaderManager *shaderManager;
53 |
54 | @property (nonatomic, strong) LYShaderManager *textureManager;
55 |
56 | @property (nonatomic, assign) GLuint frameBuffer;
57 |
58 | @property (nonatomic, assign) GLuint renderBuffer;
59 |
60 | @property (nonatomic, assign) GLuint myTexture;
61 |
62 | @end
63 |
64 | @implementation FaceDetectionView
65 |
66 | + (Class)layerClass {
67 | return [CAEAGLLayer class];
68 | }
69 |
70 | - (instancetype)initWithCoder:(NSCoder *)coder
71 | {
72 | self = [super initWithCoder:coder];
73 | if (self) {
74 | [self _initialSetup];
75 | }
76 | return self;
77 | }
78 |
79 | - (void)_initialSetup {
80 | _lock = dispatch_semaphore_create(1);
81 | [self setupLayer];
82 | [self setupContext];
83 | [self loadShaders];
84 | [self setupRenderBuffer];
85 | [self setupFrameBuffer];
86 | _myTexture = [self setupTexture:@"test.jpg"];
87 | if (!_videoTextureCache) {
88 | CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache);
89 | if (err != noErr) {
90 | NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err);
91 | return;
92 | }
93 | }
94 | }
95 |
96 | - (void)dealloc
97 | {
98 | [self cleanUpTextures];
99 |
100 | if(_videoTextureCache) {
101 | CFRelease(_videoTextureCache);
102 | }
103 | }
104 |
105 | - (void)setupLayer {
106 | self.eaglLayer = (CAEAGLLayer *)self.layer;
107 | self.eaglLayer.drawableProperties = @{kEAGLDrawablePropertyRetainedBacking : @(NO),kEAGLDrawablePropertyColorFormat : kEAGLColorFormatRGBA8};
108 | self.eaglLayer.opaque = true;
109 | self.contentScaleFactor = [UIScreen mainScreen].scale;
110 | }
111 |
112 | - (void)setupContext {
113 | self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
114 | if (!self.context) {
115 | NSLog(@"initialize failed");
116 | exit(1);
117 | }
118 | if (![EAGLContext setCurrentContext:self.context]) {
119 | NSLog(@"failed to setCurrentContext");
120 | exit(1);
121 | }
122 | }
123 |
124 | - (void)loadShaders {
125 | //一定要设置视口大小
126 | CGFloat scale = [UIScreen mainScreen].scale;
127 | glViewport(self.frame.origin.x * scale, self.frame.origin.y * scale, self.frame.size.width * scale, self.frame.size.height * scale);
128 | self.shaderManager = [[LYShaderManager alloc] initWithVertexShaderFileName:@"FaceDetectionShader" fragmentFileName:@"FaceDetectionShader"];
129 | glViewAttributes[ATTRIB_VERTEX] = [self.shaderManager getAttributeLocation:"aPosition"];
130 | glViewAttributes[ATTRIB_TEXCOORD] = [self.shaderManager getAttributeLocation:"aTexCoordinate"];
131 | glViewUniforms[UNIFORM_Y] = [self.shaderManager getUniformLocation:"SamplerY"];
132 | glViewUniforms[UNIFORM_UV] = [self.shaderManager getUniformLocation:"SamplerUV"];
133 | glViewUniforms[UNIFORM_ROTATE_MATRIX] = [self.shaderManager getUniformLocation:"rotateMatrix"];
134 |
135 | self.textureManager = [[LYShaderManager alloc] initWithVertexShaderFileName:@"FaceTextureShader" fragmentFileName:@"FaceTextureShader"];
136 | glViewAttributes[ATTRIB_TEMP_VERTEX] = [self.textureManager getAttributeLocation:"aPosition"];
137 | glViewAttributes[ATTRIB_TEMP_TEXCOORD] = [self.textureManager getAttributeLocation:"aTexCoordinate"];
138 | glViewUniforms[UNIFORM_TEMP_INPUT_IMG_TEXTURE] = [self.textureManager getUniformLocation:"inputTexture"];
139 |
140 | }
141 |
142 | - (void)setupRenderBuffer {
143 | glGenRenderbuffers(1, &_renderBuffer);
144 | glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
145 | [self.context renderbufferStorage:GL_RENDERBUFFER fromDrawable:self.eaglLayer];
146 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_backingWidth);
147 | glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_backingHeight);
148 | }
149 |
150 | - (void)setupFrameBuffer {
151 | glGenFramebuffers(1, &_frameBuffer);
152 | glBindFramebuffer(GL_FRAMEBUFFER, _frameBuffer);
153 | glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _renderBuffer);
154 | }
155 |
156 | - (GLuint)setupTexture:(NSString *)fileName {
157 | CGImageRef spriteImage = [UIImage imageNamed:fileName].CGImage;
158 | if (!spriteImage) {
159 | NSLog(@"Failed to load image %@", fileName);
160 | exit(1);
161 | }
162 |
163 | size_t width = CGImageGetWidth(spriteImage);
164 | size_t height = CGImageGetHeight(spriteImage);
165 |
166 | GLubyte *spriteData = (GLubyte *)calloc(width * height * 4, sizeof(GLubyte));
167 |
168 | CGContextRef context = CGBitmapContextCreate(spriteData, width, height, 8, width * 4, CGImageGetColorSpace(spriteImage), kCGImageAlphaPremultipliedLast);
169 | CGContextTranslateCTM(context, 0, height);
170 | CGContextScaleCTM (context, 1.0, -1.0);
171 | CGContextDrawImage(context, CGRectMake(0, 0, width, height), spriteImage);
172 |
173 | CGContextRelease(context);
174 |
175 | GLuint texture;
176 | glActiveTexture(GL_TEXTURE2);
177 | glGenTextures(1, &texture);
178 | glBindTexture(GL_TEXTURE_2D, texture);
179 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
180 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
181 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
182 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
183 |
184 | glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int32_t)width, (int32_t)height, 0, GL_RGBA, GL_UNSIGNED_BYTE, spriteData);
185 | free(spriteData);
186 | return texture;
187 | }
188 |
189 | - (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer {
190 | if (pixelBuffer != NULL) {
191 |
192 | int width = (int)CVPixelBufferGetWidth(pixelBuffer);
193 | int height = (int)CVPixelBufferGetHeight(pixelBuffer);
194 |
195 | if (!_videoTextureCache) {
196 | NSLog(@"NO Video Texture Cache");
197 | return;
198 | }
199 | if ([EAGLContext currentContext] != _context) {
200 | [EAGLContext setCurrentContext:_context];
201 | }
202 |
203 | [self cleanUpTextures];
204 |
205 | glActiveTexture(GL_TEXTURE0);
206 |
207 | CVReturn err;
208 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
209 | _videoTextureCache,
210 | pixelBuffer,
211 | NULL,
212 | GL_TEXTURE_2D,
213 | GL_RED_EXT,
214 | width,
215 | height,
216 | GL_RED_EXT,
217 | GL_UNSIGNED_BYTE,
218 | 0,
219 | &_lumaTexture);
220 |
221 | if (err) {
222 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
223 | }
224 |
225 | glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture));
226 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
227 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
228 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
229 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
230 |
231 | // UV-plane.
232 | glActiveTexture(GL_TEXTURE1);
233 | err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
234 | _videoTextureCache,
235 | pixelBuffer,
236 | NULL,
237 | GL_TEXTURE_2D,
238 | GL_RG_EXT,
239 | width / 2,
240 | height / 2,
241 | GL_RG_EXT,
242 | GL_UNSIGNED_BYTE,
243 | 1,
244 | &_chromaTexture);
245 | if (err) {
246 | NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
247 | }
248 |
249 | glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture));
250 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
251 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
252 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
253 | glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
254 |
255 | glBindFramebuffer(GL_FRAMEBUFFER, _frameBuffer);
256 |
257 | glViewport(0, 0, _backingWidth, _backingHeight);
258 |
259 | }
260 |
261 | glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
262 | glEnable(GL_BLEND);
263 | glClearColor(0, 0, 0, 1.0);
264 | glClear(GL_COLOR_BUFFER_BIT);
265 |
266 | glBindFramebuffer(GL_FRAMEBUFFER, _frameBuffer);
267 | [self.shaderManager useProgram];
268 | glUniform1i(glViewUniforms[UNIFORM_Y], 0);
269 | glUniform1i(glViewUniforms[UNIFORM_UV], 1);
270 |
271 | glUniformMatrix4fv(glViewUniforms[UNIFORM_ROTATE_MATRIX], 1, GL_FALSE, GLKMatrix4MakeXRotation(M_PI).m);
272 |
273 | GLfloat quadVertexData[] = {
274 | -1, -1,
275 | 1, -1 ,
276 | -1, 1,
277 | 1, 1,
278 | };
279 |
280 | // 更新顶点数据
281 | glVertexAttribPointer(glViewAttributes[ATTRIB_VERTEX], 2, GL_FLOAT, 0, 0, quadVertexData);
282 | glEnableVertexAttribArray(glViewAttributes[ATTRIB_VERTEX]);
283 |
284 | GLfloat quadTextureData[] = { // 正常坐标
285 | 0, 0,
286 | 1, 0,
287 | 0, 1,
288 | 1, 1
289 | };
290 |
291 | glVertexAttribPointer(glViewAttributes[ATTRIB_TEXCOORD], 2, GL_FLOAT, GL_FALSE, 0, quadTextureData);
292 | glEnableVertexAttribArray(glViewAttributes[ATTRIB_TEXCOORD]);
293 |
294 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
295 |
296 | [LYFaceDetector detectCVPixelBuffer:pixelBuffer completionHandler:^(CIFaceFeature *result, CIImage *ciImage) {
297 | if (result) {
298 | [self renderTempTexture:result ciImage:ciImage];
299 | }
300 | }];
301 |
302 | glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer);
303 |
304 | if ([EAGLContext currentContext] == _context) {
305 | [_context presentRenderbuffer:GL_RENDERBUFFER];
306 | }
307 | }
308 |
309 | - (void)renderTempTexture:(CIFaceFeature *)faceFeature ciImage:(CIImage *)ciImage {
310 | dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER);
311 | //得到图片的尺寸
312 | CGSize ciImageSize = [ciImage extent].size;
313 | //初始化transform
314 | CGAffineTransform transform = CGAffineTransformScale(CGAffineTransformIdentity, 1, -1);
315 | transform = CGAffineTransformTranslate(transform,0,-ciImageSize.height);
316 | // 实现坐标转换
317 | CGSize viewSize =self.layer.bounds.size;
318 | CGFloat scale = MIN(viewSize.width / ciImageSize.width,viewSize.height / ciImageSize.height);
319 |
320 | CGFloat offsetX = (viewSize.width - ciImageSize.width * scale) / 2;
321 | CGFloat offsetY = (viewSize.height - ciImageSize.height * scale) / 2;
322 | // 缩放
323 | CGAffineTransform scaleTransform = CGAffineTransformMakeScale(scale, scale);
324 | //获取人脸的frame
325 | CGRect faceViewBounds = CGRectApplyAffineTransform(faceFeature.bounds, transform);
326 | // 修正
327 | faceViewBounds = CGRectApplyAffineTransform(faceViewBounds,scaleTransform);
328 | faceViewBounds.origin.x += offsetX;
329 | faceViewBounds.origin.y += offsetY;
330 |
331 |
332 | NSLog(@"face frame after:%@",NSStringFromCGRect(faceViewBounds));
333 | [self.textureManager useProgram];
334 | glBindTexture(GL_TEXTURE_2D, _myTexture);
335 | glUniform1i(glViewUniforms[UNIFORM_TEMP_INPUT_IMG_TEXTURE], 2);
336 |
337 | CGFloat midX = CGRectGetMidX(self.layer.bounds);
338 | CGFloat midY = CGRectGetMidY(self.layer.bounds);
339 |
340 | CGFloat originX = CGRectGetMinX(faceViewBounds);
341 | CGFloat originY = CGRectGetMinY(faceViewBounds);
342 | CGFloat maxX = CGRectGetMaxX(faceViewBounds);
343 | CGFloat maxY = CGRectGetMaxY(faceViewBounds);
344 |
345 | //贴图顶点
346 | GLfloat minVertexX = (originX - midX) / midX;
347 | GLfloat minVertexY = (midY - maxY) / midY;
348 | GLfloat maxVertexX = (maxX - midX) / midX;
349 | GLfloat maxVertexY = (midY - originY) / midY;
350 | GLfloat quadData[] = {
351 | minVertexX, minVertexY,
352 | maxVertexX, minVertexY,
353 | minVertexX, maxVertexY,
354 | maxVertexX, maxVertexY,
355 | };
356 |
357 | glVertexAttribPointer(glViewAttributes[ATTRIB_TEMP_VERTEX], 2, GL_FLOAT, GL_FALSE, 0, quadData);
358 | glEnableVertexAttribArray(glViewAttributes[ATTRIB_TEMP_VERTEX]);
359 |
360 | GLfloat quadTextureData[] = { // 正常坐标
361 | 0, 0,
362 | 1, 0,
363 | 0, 1,
364 | 1, 1
365 | };
366 | glVertexAttribPointer(glViewAttributes[ATTRIB_TEMP_TEXCOORD], 2, GL_FLOAT, GL_FALSE, 0, quadTextureData);
367 | glEnableVertexAttribArray(glViewAttributes[ATTRIB_TEMP_TEXCOORD]);
368 |
369 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
370 | dispatch_semaphore_signal(_lock);
371 | }
372 |
373 | - (UIImage *)snapshot {
374 | UIGraphicsBeginImageContextWithOptions(self.bounds.size, NO, 0.0);
375 | [self drawViewHierarchyInRect:self.bounds afterScreenUpdates:NO];
376 | UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
377 | UIGraphicsEndImageContext();
378 | return image;
379 | }
380 |
381 | - (void)cleanUpTextures {
382 | if (_lumaTexture) {
383 | CFRelease(_lumaTexture);
384 | _lumaTexture = NULL;
385 | }
386 |
387 | if (_chromaTexture) {
388 | CFRelease(_chromaTexture);
389 | _chromaTexture = NULL;
390 | }
391 |
392 | // Periodic texture cache flush every frame
393 | CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
394 | }
395 |
396 | @end
397 |
--------------------------------------------------------------------------------
/LYFaceDetection/LYFaceDetection.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 48;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 854F6058204E4FDE0054FC3B /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 854F6057204E4FDE0054FC3B /* AppDelegate.m */; };
11 | 854F605B204E4FDE0054FC3B /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 854F605A204E4FDE0054FC3B /* ViewController.m */; };
12 | 854F605E204E4FDE0054FC3B /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 854F605C204E4FDE0054FC3B /* Main.storyboard */; };
13 | 854F6060204E4FDE0054FC3B /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 854F605F204E4FDE0054FC3B /* Assets.xcassets */; };
14 | 854F6063204E4FDE0054FC3B /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 854F6061204E4FDE0054FC3B /* LaunchScreen.storyboard */; };
15 | 854F6066204E4FDE0054FC3B /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 854F6065204E4FDE0054FC3B /* main.m */; };
16 | 854F606E204E66730054FC3B /* FaceDetectionView.m in Sources */ = {isa = PBXBuildFile; fileRef = 854F606D204E66730054FC3B /* FaceDetectionView.m */; };
17 | 854F6070204E67C10054FC3B /* FaceDetectionShader.vsh in Resources */ = {isa = PBXBuildFile; fileRef = 854F606F204E67C10054FC3B /* FaceDetectionShader.vsh */; };
18 | 854F6072204E67D30054FC3B /* FaceDetectionShader.fsh in Resources */ = {isa = PBXBuildFile; fileRef = 854F6071204E67D30054FC3B /* FaceDetectionShader.fsh */; };
19 | 854F6074204E683F0054FC3B /* FaceTextureShader.fsh in Resources */ = {isa = PBXBuildFile; fileRef = 854F6073204E683F0054FC3B /* FaceTextureShader.fsh */; };
20 | 854F6076204E68B00054FC3B /* FaceTextureShader.vsh in Resources */ = {isa = PBXBuildFile; fileRef = 854F6075204E68B00054FC3B /* FaceTextureShader.vsh */; };
21 | 854F6079204E69B90054FC3B /* LYShaderManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 854F6078204E69B90054FC3B /* LYShaderManager.m */; };
22 | 854F607C204E6C5A0054FC3B /* LYCameraManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 854F607B204E6C5A0054FC3B /* LYCameraManager.m */; };
23 | 854F607F204E71810054FC3B /* LYFaceDetector.m in Sources */ = {isa = PBXBuildFile; fileRef = 854F607E204E71810054FC3B /* LYFaceDetector.m */; };
24 | 854F6081204E74020054FC3B /* test.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 854F6080204E74020054FC3B /* test.jpg */; };
25 | /* End PBXBuildFile section */
26 |
27 | /* Begin PBXFileReference section */
28 | 854F6053204E4FDE0054FC3B /* LYFaceDetection.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = LYFaceDetection.app; sourceTree = BUILT_PRODUCTS_DIR; };
29 | 854F6056204E4FDE0054FC3B /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; };
30 | 854F6057204E4FDE0054FC3B /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; };
31 | 854F6059204E4FDE0054FC3B /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; };
32 | 854F605A204E4FDE0054FC3B /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; };
33 | 854F605D204E4FDE0054FC3B /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
34 | 854F605F204E4FDE0054FC3B /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
35 | 854F6062204E4FDE0054FC3B /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
36 | 854F6064204E4FDE0054FC3B /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
37 | 854F6065204E4FDE0054FC3B /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; };
38 | 854F606C204E66730054FC3B /* FaceDetectionView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FaceDetectionView.h; sourceTree = ""; };
39 | 854F606D204E66730054FC3B /* FaceDetectionView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FaceDetectionView.m; sourceTree = ""; };
40 | 854F606F204E67C10054FC3B /* FaceDetectionShader.vsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; path = FaceDetectionShader.vsh; sourceTree = ""; };
41 | 854F6071204E67D30054FC3B /* FaceDetectionShader.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; path = FaceDetectionShader.fsh; sourceTree = ""; };
42 | 854F6073204E683F0054FC3B /* FaceTextureShader.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; path = FaceTextureShader.fsh; sourceTree = ""; };
43 | 854F6075204E68B00054FC3B /* FaceTextureShader.vsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; path = FaceTextureShader.vsh; sourceTree = ""; };
44 | 854F6077204E69B90054FC3B /* LYShaderManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = LYShaderManager.h; sourceTree = ""; };
45 | 854F6078204E69B90054FC3B /* LYShaderManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = LYShaderManager.m; sourceTree = ""; };
46 | 854F607A204E6C5A0054FC3B /* LYCameraManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = LYCameraManager.h; sourceTree = ""; };
47 | 854F607B204E6C5A0054FC3B /* LYCameraManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = LYCameraManager.m; sourceTree = ""; };
48 | 854F607D204E71810054FC3B /* LYFaceDetector.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = LYFaceDetector.h; sourceTree = ""; };
49 | 854F607E204E71810054FC3B /* LYFaceDetector.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = LYFaceDetector.m; sourceTree = ""; };
50 | 854F6080204E74020054FC3B /* test.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = test.jpg; sourceTree = ""; };
51 | /* End PBXFileReference section */
52 |
53 | /* Begin PBXFrameworksBuildPhase section */
54 | 854F6050204E4FDE0054FC3B /* Frameworks */ = {
55 | isa = PBXFrameworksBuildPhase;
56 | buildActionMask = 2147483647;
57 | files = (
58 | );
59 | runOnlyForDeploymentPostprocessing = 0;
60 | };
61 | /* End PBXFrameworksBuildPhase section */
62 |
63 | /* Begin PBXGroup section */
64 | 854F604A204E4FDE0054FC3B = {
65 | isa = PBXGroup;
66 | children = (
67 | 854F6055204E4FDE0054FC3B /* LYFaceDetection */,
68 | 854F6054204E4FDE0054FC3B /* Products */,
69 | );
70 | sourceTree = "";
71 | };
72 | 854F6054204E4FDE0054FC3B /* Products */ = {
73 | isa = PBXGroup;
74 | children = (
75 | 854F6053204E4FDE0054FC3B /* LYFaceDetection.app */,
76 | );
77 | name = Products;
78 | sourceTree = "";
79 | };
80 | 854F6055204E4FDE0054FC3B /* LYFaceDetection */ = {
81 | isa = PBXGroup;
82 | children = (
83 | 854F6056204E4FDE0054FC3B /* AppDelegate.h */,
84 | 854F6057204E4FDE0054FC3B /* AppDelegate.m */,
85 | 854F6059204E4FDE0054FC3B /* ViewController.h */,
86 | 854F605A204E4FDE0054FC3B /* ViewController.m */,
87 | 854F606C204E66730054FC3B /* FaceDetectionView.h */,
88 | 854F606D204E66730054FC3B /* FaceDetectionView.m */,
89 | 854F607D204E71810054FC3B /* LYFaceDetector.h */,
90 | 854F607E204E71810054FC3B /* LYFaceDetector.m */,
91 | 854F6083204E7E7B0054FC3B /* Camera */,
92 | 854F6082204E7E520054FC3B /* OpenGL */,
93 | 854F605C204E4FDE0054FC3B /* Main.storyboard */,
94 | 854F6080204E74020054FC3B /* test.jpg */,
95 | 854F605F204E4FDE0054FC3B /* Assets.xcassets */,
96 | 854F6061204E4FDE0054FC3B /* LaunchScreen.storyboard */,
97 | 854F6064204E4FDE0054FC3B /* Info.plist */,
98 | 854F6065204E4FDE0054FC3B /* main.m */,
99 | );
100 | path = LYFaceDetection;
101 | sourceTree = "";
102 | };
103 | 854F6082204E7E520054FC3B /* OpenGL */ = {
104 | isa = PBXGroup;
105 | children = (
106 | 854F6077204E69B90054FC3B /* LYShaderManager.h */,
107 | 854F6078204E69B90054FC3B /* LYShaderManager.m */,
108 | 854F606F204E67C10054FC3B /* FaceDetectionShader.vsh */,
109 | 854F6071204E67D30054FC3B /* FaceDetectionShader.fsh */,
110 | 854F6075204E68B00054FC3B /* FaceTextureShader.vsh */,
111 | 854F6073204E683F0054FC3B /* FaceTextureShader.fsh */,
112 | );
113 | path = OpenGL;
114 | sourceTree = "";
115 | };
116 | 854F6083204E7E7B0054FC3B /* Camera */ = {
117 | isa = PBXGroup;
118 | children = (
119 | 854F607A204E6C5A0054FC3B /* LYCameraManager.h */,
120 | 854F607B204E6C5A0054FC3B /* LYCameraManager.m */,
121 | );
122 | path = Camera;
123 | sourceTree = "";
124 | };
125 | /* End PBXGroup section */
126 |
127 | /* Begin PBXNativeTarget section */
128 | 854F6052204E4FDE0054FC3B /* LYFaceDetection */ = {
129 | isa = PBXNativeTarget;
130 | buildConfigurationList = 854F6069204E4FDE0054FC3B /* Build configuration list for PBXNativeTarget "LYFaceDetection" */;
131 | buildPhases = (
132 | 854F604F204E4FDE0054FC3B /* Sources */,
133 | 854F6050204E4FDE0054FC3B /* Frameworks */,
134 | 854F6051204E4FDE0054FC3B /* Resources */,
135 | );
136 | buildRules = (
137 | );
138 | dependencies = (
139 | );
140 | name = LYFaceDetection;
141 | productName = LYFaceDetection;
142 | productReference = 854F6053204E4FDE0054FC3B /* LYFaceDetection.app */;
143 | productType = "com.apple.product-type.application";
144 | };
145 | /* End PBXNativeTarget section */
146 |
147 | /* Begin PBXProject section */
148 | 854F604B204E4FDE0054FC3B /* Project object */ = {
149 | isa = PBXProject;
150 | attributes = {
151 | LastUpgradeCheck = 0920;
152 | ORGANIZATIONNAME = ShenZhenShangBao;
153 | TargetAttributes = {
154 | 854F6052204E4FDE0054FC3B = {
155 | CreatedOnToolsVersion = 9.2;
156 | ProvisioningStyle = Automatic;
157 | };
158 | };
159 | };
160 | buildConfigurationList = 854F604E204E4FDE0054FC3B /* Build configuration list for PBXProject "LYFaceDetection" */;
161 | compatibilityVersion = "Xcode 8.0";
162 | developmentRegion = en;
163 | hasScannedForEncodings = 0;
164 | knownRegions = (
165 | en,
166 | Base,
167 | );
168 | mainGroup = 854F604A204E4FDE0054FC3B;
169 | productRefGroup = 854F6054204E4FDE0054FC3B /* Products */;
170 | projectDirPath = "";
171 | projectRoot = "";
172 | targets = (
173 | 854F6052204E4FDE0054FC3B /* LYFaceDetection */,
174 | );
175 | };
176 | /* End PBXProject section */
177 |
178 | /* Begin PBXResourcesBuildPhase section */
179 | 854F6051204E4FDE0054FC3B /* Resources */ = {
180 | isa = PBXResourcesBuildPhase;
181 | buildActionMask = 2147483647;
182 | files = (
183 | 854F6076204E68B00054FC3B /* FaceTextureShader.vsh in Resources */,
184 | 854F6070204E67C10054FC3B /* FaceDetectionShader.vsh in Resources */,
185 | 854F6072204E67D30054FC3B /* FaceDetectionShader.fsh in Resources */,
186 | 854F6063204E4FDE0054FC3B /* LaunchScreen.storyboard in Resources */,
187 | 854F6074204E683F0054FC3B /* FaceTextureShader.fsh in Resources */,
188 | 854F6060204E4FDE0054FC3B /* Assets.xcassets in Resources */,
189 | 854F605E204E4FDE0054FC3B /* Main.storyboard in Resources */,
190 | 854F6081204E74020054FC3B /* test.jpg in Resources */,
191 | );
192 | runOnlyForDeploymentPostprocessing = 0;
193 | };
194 | /* End PBXResourcesBuildPhase section */
195 |
196 | /* Begin PBXSourcesBuildPhase section */
197 | 854F604F204E4FDE0054FC3B /* Sources */ = {
198 | isa = PBXSourcesBuildPhase;
199 | buildActionMask = 2147483647;
200 | files = (
201 | 854F6079204E69B90054FC3B /* LYShaderManager.m in Sources */,
202 | 854F605B204E4FDE0054FC3B /* ViewController.m in Sources */,
203 | 854F607F204E71810054FC3B /* LYFaceDetector.m in Sources */,
204 | 854F6066204E4FDE0054FC3B /* main.m in Sources */,
205 | 854F607C204E6C5A0054FC3B /* LYCameraManager.m in Sources */,
206 | 854F6058204E4FDE0054FC3B /* AppDelegate.m in Sources */,
207 | 854F606E204E66730054FC3B /* FaceDetectionView.m in Sources */,
208 | );
209 | runOnlyForDeploymentPostprocessing = 0;
210 | };
211 | /* End PBXSourcesBuildPhase section */
212 |
213 | /* Begin PBXVariantGroup section */
214 | 854F605C204E4FDE0054FC3B /* Main.storyboard */ = {
215 | isa = PBXVariantGroup;
216 | children = (
217 | 854F605D204E4FDE0054FC3B /* Base */,
218 | );
219 | name = Main.storyboard;
220 | sourceTree = "";
221 | };
222 | 854F6061204E4FDE0054FC3B /* LaunchScreen.storyboard */ = {
223 | isa = PBXVariantGroup;
224 | children = (
225 | 854F6062204E4FDE0054FC3B /* Base */,
226 | );
227 | name = LaunchScreen.storyboard;
228 | sourceTree = "";
229 | };
230 | /* End PBXVariantGroup section */
231 |
232 | /* Begin XCBuildConfiguration section */
233 | 854F6067204E4FDE0054FC3B /* Debug */ = {
234 | isa = XCBuildConfiguration;
235 | buildSettings = {
236 | ALWAYS_SEARCH_USER_PATHS = NO;
237 | CLANG_ANALYZER_NONNULL = YES;
238 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
239 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
240 | CLANG_CXX_LIBRARY = "libc++";
241 | CLANG_ENABLE_MODULES = YES;
242 | CLANG_ENABLE_OBJC_ARC = YES;
243 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
244 | CLANG_WARN_BOOL_CONVERSION = YES;
245 | CLANG_WARN_COMMA = YES;
246 | CLANG_WARN_CONSTANT_CONVERSION = YES;
247 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
248 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
249 | CLANG_WARN_EMPTY_BODY = YES;
250 | CLANG_WARN_ENUM_CONVERSION = YES;
251 | CLANG_WARN_INFINITE_RECURSION = YES;
252 | CLANG_WARN_INT_CONVERSION = YES;
253 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
254 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
255 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
256 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
257 | CLANG_WARN_STRICT_PROTOTYPES = YES;
258 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
259 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
260 | CLANG_WARN_UNREACHABLE_CODE = YES;
261 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
262 | CODE_SIGN_IDENTITY = "iPhone Developer";
263 | COPY_PHASE_STRIP = NO;
264 | DEBUG_INFORMATION_FORMAT = dwarf;
265 | ENABLE_STRICT_OBJC_MSGSEND = YES;
266 | ENABLE_TESTABILITY = YES;
267 | GCC_C_LANGUAGE_STANDARD = gnu11;
268 | GCC_DYNAMIC_NO_PIC = NO;
269 | GCC_NO_COMMON_BLOCKS = YES;
270 | GCC_OPTIMIZATION_LEVEL = 0;
271 | GCC_PREPROCESSOR_DEFINITIONS = (
272 | "DEBUG=1",
273 | "$(inherited)",
274 | );
275 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
276 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
277 | GCC_WARN_UNDECLARED_SELECTOR = YES;
278 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
279 | GCC_WARN_UNUSED_FUNCTION = YES;
280 | GCC_WARN_UNUSED_VARIABLE = YES;
281 | IPHONEOS_DEPLOYMENT_TARGET = 11.2;
282 | MTL_ENABLE_DEBUG_INFO = YES;
283 | ONLY_ACTIVE_ARCH = YES;
284 | SDKROOT = iphoneos;
285 | };
286 | name = Debug;
287 | };
288 | 854F6068204E4FDE0054FC3B /* Release */ = {
289 | isa = XCBuildConfiguration;
290 | buildSettings = {
291 | ALWAYS_SEARCH_USER_PATHS = NO;
292 | CLANG_ANALYZER_NONNULL = YES;
293 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
294 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
295 | CLANG_CXX_LIBRARY = "libc++";
296 | CLANG_ENABLE_MODULES = YES;
297 | CLANG_ENABLE_OBJC_ARC = YES;
298 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
299 | CLANG_WARN_BOOL_CONVERSION = YES;
300 | CLANG_WARN_COMMA = YES;
301 | CLANG_WARN_CONSTANT_CONVERSION = YES;
302 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
303 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
304 | CLANG_WARN_EMPTY_BODY = YES;
305 | CLANG_WARN_ENUM_CONVERSION = YES;
306 | CLANG_WARN_INFINITE_RECURSION = YES;
307 | CLANG_WARN_INT_CONVERSION = YES;
308 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
309 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
310 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
311 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
312 | CLANG_WARN_STRICT_PROTOTYPES = YES;
313 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
314 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
315 | CLANG_WARN_UNREACHABLE_CODE = YES;
316 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
317 | CODE_SIGN_IDENTITY = "iPhone Developer";
318 | COPY_PHASE_STRIP = NO;
319 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
320 | ENABLE_NS_ASSERTIONS = NO;
321 | ENABLE_STRICT_OBJC_MSGSEND = YES;
322 | GCC_C_LANGUAGE_STANDARD = gnu11;
323 | GCC_NO_COMMON_BLOCKS = YES;
324 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
325 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
326 | GCC_WARN_UNDECLARED_SELECTOR = YES;
327 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
328 | GCC_WARN_UNUSED_FUNCTION = YES;
329 | GCC_WARN_UNUSED_VARIABLE = YES;
330 | IPHONEOS_DEPLOYMENT_TARGET = 11.2;
331 | MTL_ENABLE_DEBUG_INFO = NO;
332 | SDKROOT = iphoneos;
333 | VALIDATE_PRODUCT = YES;
334 | };
335 | name = Release;
336 | };
337 | 854F606A204E4FDE0054FC3B /* Debug */ = {
338 | isa = XCBuildConfiguration;
339 | buildSettings = {
340 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
341 | CODE_SIGN_STYLE = Automatic;
342 | DEVELOPMENT_TEAM = MJJHTAMJ6J;
343 | INFOPLIST_FILE = LYFaceDetection/Info.plist;
344 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
345 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
346 | PRODUCT_BUNDLE_IDENTIFIER = com.LustySwimmer.LYFaceDetection;
347 | PRODUCT_NAME = "$(TARGET_NAME)";
348 | TARGETED_DEVICE_FAMILY = 1;
349 | };
350 | name = Debug;
351 | };
352 | 854F606B204E4FDE0054FC3B /* Release */ = {
353 | isa = XCBuildConfiguration;
354 | buildSettings = {
355 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
356 | CODE_SIGN_STYLE = Automatic;
357 | DEVELOPMENT_TEAM = MJJHTAMJ6J;
358 | INFOPLIST_FILE = LYFaceDetection/Info.plist;
359 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
360 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
361 | PRODUCT_BUNDLE_IDENTIFIER = com.LustySwimmer.LYFaceDetection;
362 | PRODUCT_NAME = "$(TARGET_NAME)";
363 | TARGETED_DEVICE_FAMILY = 1;
364 | };
365 | name = Release;
366 | };
367 | /* End XCBuildConfiguration section */
368 |
369 | /* Begin XCConfigurationList section */
370 | 854F604E204E4FDE0054FC3B /* Build configuration list for PBXProject "LYFaceDetection" */ = {
371 | isa = XCConfigurationList;
372 | buildConfigurations = (
373 | 854F6067204E4FDE0054FC3B /* Debug */,
374 | 854F6068204E4FDE0054FC3B /* Release */,
375 | );
376 | defaultConfigurationIsVisible = 0;
377 | defaultConfigurationName = Release;
378 | };
379 | 854F6069204E4FDE0054FC3B /* Build configuration list for PBXNativeTarget "LYFaceDetection" */ = {
380 | isa = XCConfigurationList;
381 | buildConfigurations = (
382 | 854F606A204E4FDE0054FC3B /* Debug */,
383 | 854F606B204E4FDE0054FC3B /* Release */,
384 | );
385 | defaultConfigurationIsVisible = 0;
386 | defaultConfigurationName = Release;
387 | };
388 | /* End XCConfigurationList section */
389 | };
390 | rootObject = 854F604B204E4FDE0054FC3B /* Project object */;
391 | }
392 |
--------------------------------------------------------------------------------