├── VisionDemo
├── Assets.xcassets
│ ├── Contents.json
│ ├── 返回.imageset
│ │ ├── 返回@2x.png
│ │ └── Contents.json
│ ├── eyes.imageset
│ │ ├── eyes@2x.png
│ │ └── Contents.json
│ ├── turn.imageset
│ │ ├── turn@2x.png
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── VisionTableViewController.h
├── AppDelegate.h
├── VisionCameraViewController.h
├── main.m
├── VisionFaceViewController.h
├── OpenCameraOrPhoto.h
├── GLDiscernPointModel.h
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── Info.plist
├── AppDelegate.m
├── VisionFaceViewController.m
├── GLTools.h
├── GLDiscernPointModel.m
├── VisionTableViewController.m
├── UIImage+GLProcessing.h
├── OpenCameraOrPhoto.m
├── GLTools.m
├── UIImage+GLProcessing.m
└── VisionCameraViewController.m
├── VisionDemo.xcodeproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcuserdata
│ │ └── gaolei.xcuserdatad
│ │ └── UserInterfaceState.xcuserstate
├── xcuserdata
│ └── gaolei.xcuserdatad
│ │ ├── xcschemes
│ │ └── xcschememanagement.plist
│ │ └── xcdebugger
│ │ └── Breakpoints_v2.xcbkptlist
└── project.pbxproj
└── README.md
/VisionDemo/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/VisionDemo/Assets.xcassets/返回.imageset/返回@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gao211326/VisionDemo/HEAD/VisionDemo/Assets.xcassets/返回.imageset/返回@2x.png
--------------------------------------------------------------------------------
/VisionDemo/Assets.xcassets/eyes.imageset/eyes@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gao211326/VisionDemo/HEAD/VisionDemo/Assets.xcassets/eyes.imageset/eyes@2x.png
--------------------------------------------------------------------------------
/VisionDemo/Assets.xcassets/turn.imageset/turn@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gao211326/VisionDemo/HEAD/VisionDemo/Assets.xcassets/turn.imageset/turn@2x.png
--------------------------------------------------------------------------------
/VisionDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/VisionDemo.xcodeproj/project.xcworkspace/xcuserdata/gaolei.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/gao211326/VisionDemo/HEAD/VisionDemo.xcodeproj/project.xcworkspace/xcuserdata/gaolei.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/VisionDemo/VisionTableViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // VisionTableViewController.h
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/9/27.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface VisionTableViewController : UITableViewController
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/VisionDemo/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/9/27.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 |
16 | @end
17 |
18 |
--------------------------------------------------------------------------------
/VisionDemo/VisionCameraViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // VisionCameraViewController.h
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/9/29.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | #import "GLTools.h"
12 |
13 | @interface VisionCameraViewController : UIViewController
14 |
15 | - (id)initWithDiscernType:(GLDiscernType)type;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/VisionDemo/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/9/27.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/VisionDemo/Assets.xcassets/返回.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "scale" : "1x"
6 | },
7 | {
8 | "idiom" : "universal",
9 | "filename" : "返回@2x.png",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/VisionDemo/Assets.xcassets/eyes.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "scale" : "1x"
6 | },
7 | {
8 | "idiom" : "universal",
9 | "filename" : "eyes@2x.png",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/VisionDemo/Assets.xcassets/turn.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "scale" : "1x"
6 | },
7 | {
8 | "idiom" : "universal",
9 | "filename" : "turn@2x.png",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "version" : 1,
19 | "author" : "xcode"
20 | }
21 | }
--------------------------------------------------------------------------------
/VisionDemo/VisionFaceViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // VisionCheckViewController.h
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/9/27.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import "GLTools.h"
12 |
13 | @interface VisionFaceViewController : UIViewController
14 |
15 | - (id)initWithImage:(UIImage *)image discernType:(GLDiscernType)type;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/VisionDemo.xcodeproj/xcuserdata/gaolei.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | VisionDemo.xcscheme
8 |
9 | orderHint
10 | 0
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/VisionDemo.xcodeproj/xcuserdata/gaolei.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
8 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/VisionDemo/OpenCameraOrPhoto.h:
--------------------------------------------------------------------------------
1 | //
2 | // OpenCameraOrPhoto.h
3 | // BaishitongClient
4 | //
5 | // Created by 高磊 on 15/10/28.
6 | // Copyright © 2015年 高磊. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "GLTools.h"
11 |
12 | typedef void(^OpenCameraOrPhotoBlock)(UIImage *image);
13 |
14 | @interface OpenCameraOrPhoto : NSObject
15 |
16 | SYNTHESIZE_SINGLETON_FOR_CLASS_HEADER(OpenCameraOrPhoto);
17 |
18 | + (void)showOpenCameraOrPhotoWithView:(UIView *)view withBlock:(OpenCameraOrPhotoBlock)openCameraOrPhotoBlock;
19 |
20 | @property (nonatomic,copy) OpenCameraOrPhotoBlock openCameraOrPhotoBlock;
21 |
22 | @end
23 |
--------------------------------------------------------------------------------
/VisionDemo/GLDiscernPointModel.h:
--------------------------------------------------------------------------------
1 | //
2 | // GLDiscernPointModel.h
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/10/11.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface GLDiscernPointModel : NSObject
12 |
13 | //面部识别坐标
14 | @property (nonatomic,strong) NSMutableArray *faceRectPoints;
15 |
16 | //面部所有特征坐标
17 | @property (nonatomic,strong) NSMutableArray *faceLandMarkPoints;
18 | //面部轮廓
19 | @property (nonatomic,strong) NSMutableArray *faceContourPoints;
20 | //左眼
21 | @property (nonatomic,strong) NSMutableArray *leftEyePoints;
22 | //右眼
23 | @property (nonatomic,strong) NSMutableArray *rightEyePoints;
24 | //左眉毛
25 | @property (nonatomic,strong) NSMutableArray *leftEyebrowPoints;
26 | //右眉毛
27 | @property (nonatomic,strong) NSMutableArray *rightEyebrowPoints;
28 | //鼻子
29 | @property (nonatomic,strong) NSMutableArray *nosePoints;
30 | //鼻子顶
31 | @property (nonatomic,strong) NSMutableArray *noseCrestPoints;
32 | //鼻子中线
33 | @property (nonatomic,strong) NSMutableArray *medianLinePoints;
34 | //外唇
35 | @property (nonatomic,strong) NSMutableArray *outerLipsPoints;
36 | //内唇
37 | @property (nonatomic,strong) NSMutableArray *innerLipsPoints;
38 | //左瞳
39 | @property (nonatomic,strong) NSMutableArray *leftPupilPoints;
40 | //右瞳
41 | @property (nonatomic,strong) NSMutableArray *rightPupilPoints;
42 |
43 |
44 | @end
45 |
--------------------------------------------------------------------------------
/VisionDemo/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/VisionDemo/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/VisionDemo/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleURLTypes
20 |
21 |
22 | CFBundleTypeRole
23 | Editor
24 | CFBundleURLSchemes
25 |
26 | visiondemo
27 |
28 |
29 |
30 | CFBundleVersion
31 | 1
32 | LSRequiresIPhoneOS
33 |
34 | NSAppTransportSecurity
35 |
36 | NSAllowsArbitraryLoads
37 |
38 |
39 | NSCameraUsageDescription
40 | 需要访问你的相机
41 | NSPhotoLibraryUsageDescription
42 | 需要访问你的相册
43 | UILaunchStoryboardName
44 | LaunchScreen
45 | UIMainStoryboardFile
46 | Main
47 | UIRequiredDeviceCapabilities
48 |
49 | armv7
50 |
51 | UISupportedInterfaceOrientations
52 |
53 | UIInterfaceOrientationPortrait
54 | UIInterfaceOrientationLandscapeLeft
55 | UIInterfaceOrientationLandscapeRight
56 |
57 | UISupportedInterfaceOrientations~ipad
58 |
59 | UIInterfaceOrientationPortrait
60 | UIInterfaceOrientationPortraitUpsideDown
61 | UIInterfaceOrientationLandscapeLeft
62 | UIInterfaceOrientationLandscapeRight
63 |
64 |
65 |
66 |
--------------------------------------------------------------------------------
/VisionDemo/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/9/27.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 |
11 | @interface AppDelegate ()
12 |
13 | @end
14 |
15 | @implementation AppDelegate
16 |
17 |
18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
19 | // Override point for customization after application launch.
20 | return YES;
21 | }
22 |
23 |
24 | - (void)applicationWillResignActive:(UIApplication *)application {
25 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
26 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
27 | }
28 |
29 |
30 | - (void)applicationDidEnterBackground:(UIApplication *)application {
31 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
32 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
33 | }
34 |
35 |
36 | - (void)applicationWillEnterForeground:(UIApplication *)application {
37 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
38 | }
39 |
40 |
41 | - (void)applicationDidBecomeActive:(UIApplication *)application {
42 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
43 | }
44 |
45 |
46 | - (void)applicationWillTerminate:(UIApplication *)application {
47 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
48 | }
49 |
50 |
51 | @end
52 |
--------------------------------------------------------------------------------
/VisionDemo/VisionFaceViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // VisionCheckViewController.m
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/9/27.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import "VisionFaceViewController.h"
10 |
11 | #import "GLTools.h"
12 | #import "GLDiscernPointModel.h"
13 | #import "UIImage+GLProcessing.h"
14 |
15 | @interface VisionFaceViewController ()
16 |
17 | @property (strong, nonatomic) UIImageView *visionImageView;
18 |
19 | @property (nonatomic,strong) UIImage *visonImage;
20 |
21 | @property (nonatomic,assign) GLDiscernType discernType;
22 | @end
23 |
24 | @implementation VisionFaceViewController
25 |
26 | - (id)initWithImage:(UIImage *)image discernType:(GLDiscernType)type{
27 | self = [super init];
28 | if (self) {
29 | self.visonImage = image;
30 |
31 | self.discernType = type;
32 | }
33 | return self;
34 | }
35 |
36 | - (void)viewDidLoad {
37 | [super viewDidLoad];
38 | self.view.backgroundColor = [UIColor whiteColor];
39 | self.visionImageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, KNavagationHeight + 40, self.view.frame.size.width, self.view.frame.size.width)];
40 | self.visionImageView.contentMode = UIViewContentModeScaleAspectFit;
41 | [self.view addSubview:self.visionImageView];
42 |
43 | self.visionImageView.image = self.visonImage;
44 |
45 | [self checkImage:self.visonImage];
46 | }
47 |
48 | - (void)checkImage:(UIImage *)image
49 | {
50 | [[GLTools sharedInstance] glDiscernWithImageType:self.discernType image:image complete:^(GLDiscernPointModel * _Nonnull pointModel) {
51 | switch (self.discernType) {
52 | case GLDiscernFaceRectType:
53 | {
54 | dispatch_async(dispatch_get_main_queue(), ^{
55 | self.visionImageView.image = [UIImage gl_drawImage:image withRects:pointModel.faceRectPoints];
56 | });
57 | }
58 | break;
59 | case GLDiscernFaceLandmarkType:
60 | {
61 | dispatch_async(dispatch_get_main_queue(), ^{
62 | self.visionImageView.image = [UIImage gl_drawImage:image faceLandMarkPoints:pointModel.faceLandMarkPoints];
63 | });
64 | }
65 | break;
66 | default:
67 | break;
68 | }
69 | }];
70 | }
71 |
72 | - (void)didReceiveMemoryWarning {
73 | [super didReceiveMemoryWarning];
74 | }
75 |
76 |
77 |
78 | @end
79 |
--------------------------------------------------------------------------------
/VisionDemo/GLTools.h:
--------------------------------------------------------------------------------
1 | //
2 | // GLTools.h
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/9/30.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 | #define KNavagationHeight (CGSizeEqualToSize([UIScreen mainScreen].bounds.size, CGSizeMake(375, 812))?83:64)
13 |
14 | //定义单例的宏
15 | #define SYNTHESIZE_SINGLETON_FOR_CLASS_HEADER( __CLASSNAME__) \
16 | \
17 | + ( __CLASSNAME__*) sharedInstance; \
18 |
19 |
20 | #define SYNTHESIZE_SINGLETON_FOR_CLASS(__CLASSNAME__) \
21 | \
22 | static __CLASSNAME__ *instance = nil; \
23 | \
24 | + (__CLASSNAME__ *)sharedInstance{ \
25 | static dispatch_once_t onceToken; \
26 | dispatch_once(&onceToken, ^{ \
27 | if (nil == instance){ \
28 | instance = [[__CLASSNAME__ alloc] init]; \
29 | } \
30 | }); \
31 | \
32 | return instance; \
33 | } \
34 |
35 | typedef NS_ENUM(NSInteger,GLDiscernType) {
36 | GLDiscernFaceRectType,//人脸矩形检测
37 | GLDiscernFaceLandmarkType,//人脸特征识别
38 | GLDiscernFaceRectDynamicType,//人脸矩形动态检测
39 | GLDiscernFaceDynamicSceneType,//人脸动态添加场景
40 | };
41 |
42 |
43 | @class GLDiscernPointModel;
44 | /**
45 | 识别返回结果
46 |
47 | @param pointModel 返回结果坐标
48 | */
49 | typedef void(^discernResultBlock)(GLDiscernPointModel * _Nonnull pointModel);
50 |
51 | @interface GLTools : NSObject
52 |
53 | SYNTHESIZE_SINGLETON_FOR_CLASS_HEADER(GLTools);
54 |
55 |
56 |
57 | /**
58 | 识别图片
59 |
60 | @param type 识别类型
61 | @param image 图片
62 | @param complete 识别结果block
63 | */
64 | - (void)glDiscernWithImageType:(GLDiscernType)type image:(UIImage *_Nullable)image complete:(discernResultBlock _Nullable)complete;
65 |
66 | /**
67 | 相机权限是否打开
68 |
69 | @return 返回
70 | */
71 | - (BOOL)isCamer;
72 |
73 |
74 | /**
75 | 后置摄像头是否可用
76 |
77 | @return 返回
78 | */
79 | - (BOOL) isRearCameraAvailable;
80 |
81 |
82 | /**
83 | 前置摄像头是否可用
84 |
85 | @return 返回
86 | */
87 | - (BOOL) isFrontCameraAvailable;
88 |
89 |
90 | /**
91 | 设备是否支持相机
92 |
93 | @return 返回
94 | */
95 | - (BOOL) isCameraAvailable;
96 | /**
97 | 设备是否支持相册
98 |
99 | @return 返回
100 | */
101 | - (BOOL) isPhotoLibraryAvailable;
102 |
103 |
104 | /**
105 | 是否支持拍照
106 |
107 | @return 返回
108 | */
109 | - (BOOL) doesCameraSupportTakingPhotos;
110 |
111 |
112 | /**
113 | 坐标转换
114 |
115 | @param boundingBox 矩形坐标
116 | @param imageSize 图片大小
117 | @return 返回
118 | */
119 | - (CGRect)convertRect:(CGRect)boundingBox imageSize:(CGSize)imageSize;
120 | @end
121 |
--------------------------------------------------------------------------------
/VisionDemo/GLDiscernPointModel.m:
--------------------------------------------------------------------------------
1 | //
2 | // GLDiscernPointModel.m
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/10/11.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import "GLDiscernPointModel.h"
10 |
11 | @implementation GLDiscernPointModel
12 | - (NSMutableArray *)faceRectPoints
13 | {
14 | if (nil == _faceRectPoints)
15 | {
16 | _faceRectPoints = [[NSMutableArray alloc] init];
17 | }
18 | return _faceRectPoints;
19 | }
20 |
21 | - (NSMutableArray *)faceLandMarkPoints
22 | {
23 | if (nil == _faceLandMarkPoints)
24 | {
25 | _faceLandMarkPoints = [[NSMutableArray alloc] init];
26 | }
27 | return _faceLandMarkPoints;
28 | }
29 |
30 | - (NSMutableArray *)faceContourPoints
31 | {
32 | if (nil == _faceContourPoints)
33 | {
34 | _faceContourPoints = [[NSMutableArray alloc] init];
35 | }
36 | return _faceContourPoints;
37 | }
38 |
39 | - (NSMutableArray *)leftEyePoints
40 | {
41 | if (nil == _leftEyePoints)
42 | {
43 | _leftEyePoints = [[NSMutableArray alloc] init];
44 | }
45 | return _leftEyePoints;
46 | }
47 |
48 | - (NSMutableArray *)rightEyePoints
49 | {
50 | if (nil == _rightEyePoints)
51 | {
52 | _rightEyePoints = [[NSMutableArray alloc] init];
53 | }
54 | return _rightEyePoints;
55 | }
56 |
57 | - (NSMutableArray *)leftEyebrowPoints
58 | {
59 | if (nil == _leftEyebrowPoints)
60 | {
61 | _leftEyebrowPoints = [[NSMutableArray alloc] init];
62 | }
63 | return _leftEyebrowPoints;
64 | }
65 |
66 | - (NSMutableArray *)rightEyebrowPoints
67 | {
68 | if (nil == _rightEyebrowPoints)
69 | {
70 | _rightEyebrowPoints = [[NSMutableArray alloc] init];
71 | }
72 | return _rightEyebrowPoints;
73 | }
74 |
75 | - (NSMutableArray *)nosePoints
76 | {
77 | if (nil == _nosePoints)
78 | {
79 | _nosePoints = [[NSMutableArray alloc] init];
80 | }
81 | return _nosePoints;
82 | }
83 |
84 | - (NSMutableArray *)noseCrestPoints
85 | {
86 | if (nil == _noseCrestPoints)
87 | {
88 | _noseCrestPoints = [[NSMutableArray alloc] init];
89 | }
90 | return _noseCrestPoints;
91 | }
92 |
93 | - (NSMutableArray *)medianLinePoints
94 | {
95 | if (nil == _medianLinePoints)
96 | {
97 | _medianLinePoints = [[NSMutableArray alloc] init];
98 | }
99 | return _medianLinePoints;
100 | }
101 |
102 | - (NSMutableArray *)outerLipsPoints
103 | {
104 | if (nil == _outerLipsPoints)
105 | {
106 | _outerLipsPoints = [[NSMutableArray alloc] init];
107 | }
108 | return _outerLipsPoints;
109 | }
110 |
111 | - (NSMutableArray *)innerLipsPoints
112 | {
113 | if (nil == _innerLipsPoints)
114 | {
115 | _innerLipsPoints = [[NSMutableArray alloc] init];
116 | }
117 | return _innerLipsPoints;
118 | }
119 |
120 | - (NSMutableArray *)leftPupilPoints
121 | {
122 | if (nil == _leftPupilPoints)
123 | {
124 | _leftPupilPoints = [[NSMutableArray alloc] init];
125 | }
126 | return _leftPupilPoints;
127 | }
128 |
129 | - (NSMutableArray *)rightPupilPoints
130 | {
131 | if (nil == _rightPupilPoints)
132 | {
133 | _rightPupilPoints = [[NSMutableArray alloc] init];
134 | }
135 | return _rightPupilPoints;
136 | }
137 | @end
138 |
--------------------------------------------------------------------------------
/VisionDemo/VisionTableViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // VisionTableViewController.m
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/9/27.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import "VisionTableViewController.h"
10 | #import "VisionFaceViewController.h"
11 | #import "VisionCameraViewController.h"
12 |
13 | #import "OpenCameraOrPhoto.h"
14 | #import "UIImage+GLProcessing.h"
15 |
16 | @interface VisionTableViewController ()
17 |
18 | @property (nonatomic,strong) NSArray *titles;
19 |
20 | @end
21 |
22 | @implementation VisionTableViewController
23 |
24 | - (void)viewDidLoad {
25 | [super viewDidLoad];
26 |
27 | self.titles = @[@"人脸识别",@"人脸面部检测",@"动态人脸添加场景",@"动态人脸识别"];
28 | }
29 |
30 | - (void)didReceiveMemoryWarning {
31 | [super didReceiveMemoryWarning];
32 | // Dispose of any resources that can be recreated.
33 | }
34 |
35 | #pragma mark - Table view data source
36 |
37 | - (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
38 | return self.titles.count;
39 | }
40 |
41 |
42 | - (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath {
43 | UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:@"visionCell" forIndexPath:indexPath];
44 |
45 | cell.textLabel.text = self.titles[indexPath.row];
46 |
47 | return cell;
48 | }
49 |
50 | - (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath
51 | {
52 | [tableView deselectRowAtIndexPath:indexPath animated:NO];
53 |
54 |
55 | switch (indexPath.row) {
56 | case 0:
57 | {
58 | __weak typeof(self)weakSelf = self;
59 |
60 | [OpenCameraOrPhoto showOpenCameraOrPhotoWithView:self.view withBlock:^(UIImage *image) {
61 |
62 | //图片进行旋转
63 | UIImage *upImage = [UIImage fixOrientationImage:image];;
64 | VisionFaceViewController *visionFaceVc = [[VisionFaceViewController alloc] initWithImage:upImage discernType:GLDiscernFaceRectType];
65 | [weakSelf.navigationController pushViewController:visionFaceVc animated:YES];
66 | }];
67 |
68 | }
69 | break;
70 | case 1:
71 | {
72 | __weak typeof(self)weakSelf = self;
73 |
74 | [OpenCameraOrPhoto showOpenCameraOrPhotoWithView:self.view withBlock:^(UIImage *image) {
75 |
76 | //图片进行旋转
77 | UIImage *upImage = [UIImage fixOrientationImage:image];;
78 | VisionFaceViewController *visionFaceVc = [[VisionFaceViewController alloc] initWithImage:upImage discernType:GLDiscernFaceLandmarkType];
79 | [weakSelf.navigationController pushViewController:visionFaceVc animated:YES];
80 |
81 | }];
82 | }
83 | break;
84 | case 2:
85 | {
86 | VisionCameraViewController *visionCameraVc = [[VisionCameraViewController alloc] initWithDiscernType:GLDiscernFaceDynamicSceneType];
87 | [self.navigationController pushViewController:visionCameraVc animated:YES];
88 | }
89 | break;
90 | case 3:
91 | {
92 | VisionCameraViewController *visionCameraVc = [[VisionCameraViewController alloc] initWithDiscernType:GLDiscernFaceRectDynamicType];
93 | [self.navigationController pushViewController:visionCameraVc animated:YES];
94 | }
95 | break;
96 | default:
97 | break;
98 | }
99 | }
100 |
101 |
102 |
103 |
104 | @end
105 |
--------------------------------------------------------------------------------
/VisionDemo/UIImage+GLProcessing.h:
--------------------------------------------------------------------------------
1 | //
2 | // UIImage+GLProcessing.h
3 | // UIImageOperationDemo
4 | //
5 | // Created by 高磊 on 2017/4/12.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 | /**
13 | 图片处理
14 | */
15 | @interface UIImage (GLProcessing)
16 |
17 |
18 | /**
19 | 创建带有外围圈的圆圈图片
20 |
21 | @param image 原始图片
22 | @param border 图片外边框
23 | @param color 外围圆圈颜色
24 | @return 返回裁剪后的图片
25 | */
26 | + (UIImage*)gl_circleImage:(UIImage*)image withBorder:(CGFloat)border color:(UIColor*)color;
27 |
28 |
29 | /**
30 | 创建圆形图片
31 |
32 | @param image 原始图片
33 | @return 返回
34 | */
35 | + (UIImage *)gl_circleImage:(UIImage *)image;
36 |
37 | /**
38 | 根据颜色创建图片 (矩形)
39 |
40 | @param color 颜色
41 | @param size 图片大小
42 | @return 返回生成后的图片
43 | */
44 | + (UIImage *)gl_imageWithColor:(UIColor *)color size:(CGSize)size;
45 |
46 |
47 | /**
48 | 根据颜色返回圆形图片
49 |
50 | @param color 颜色
51 | @param radius 图片的半径
52 | @return 返回生成后的图
53 | */
54 | + (UIImage *)gl_circleImageWithColor:(UIColor *)color radius:(CGFloat)radius;
55 |
56 |
57 | /**
58 | 给图片设置圆角
59 |
60 | @param image 原生图片
61 | @param corner 圆角大小
62 | @param rectCorner 圆角的位置
63 | @return 返回生成后的图片
64 | */
65 | + (UIImage*)gl_cornerImage:(UIImage*)image corner:(CGFloat)corner rectCorner:(UIRectCorner)rectCorner;
66 |
67 |
68 | /**
69 | 压缩图片
70 |
71 | @param image 原始图片
72 | @param maxSize 压缩图片的最大尺寸 宽或者长中最大的
73 | @param maxSizeKB 压缩后的最大大小 KB
74 | @return 返回压缩后的图片
75 | */
76 | + (UIImage *)gl_compressImage:(UIImage *)image maxSize:(CGFloat)maxSize maxSizeWithKB:(CGFloat)maxSizeKB;
77 |
78 |
79 | /**
80 | 压缩图片
81 |
82 | @param image 原始图片
83 | @param maxSize 压缩图片的最大尺寸 宽或者长中最大的
84 | @return 返回压缩后的图片
85 | */
86 | + (UIImage *)gl_compressImage:(UIImage *)image maxSize:(CGFloat)maxSize;
87 |
88 | /**
89 | 加载动态gif图片
90 |
91 | @param imagePath gif图片路径
92 | @return 返回
93 | */
94 | + (UIImage *)gl_animateGIFWithImagePath:(NSString *)imagePath;
95 |
96 |
97 | /**
98 | 加载动态gif图片
99 |
100 | @param data 动态图片的data
101 | @return 返回
102 | */
103 | + (UIImage *)gl_animateGIFWithImageData:(NSData *)data;
104 |
105 |
106 | /**
107 | 加载动态gif图片
108 |
109 | @param url 图片的url地址
110 | @return 返回
111 | */
112 | + (UIImage *)gl_animateGIFWithImageUrl:(NSURL *)url;
113 |
114 |
115 |
116 | /**
117 | 在图片上添加文字
118 | @param image 图片
119 | @param text 文字信息
120 | @param attributeDic 文字的详细信息 如大小颜色等
121 | @param point 坐标
122 | @return 返回添加文字后的图片
123 | */
124 | + (UIImage *)gl_addTitleAboveImage:(UIImage *)image addTitleText:(NSString *)text attributeDic:(NSDictionary *)attributeDic point:(CGPoint)point;
125 |
126 |
127 |
128 | /**
129 | 将图片添加到图片上
130 |
131 | @param image 被添加的图片
132 | @param addImage 将要添加的图片
133 | @param rect 将要添加的图片在被添加的图片上的坐标
134 | @return 返回
135 | */
136 | + (UIImage *)gl_addAboveImage:(UIImage *)image addImage:(UIImage *)addImage rect:(CGRect)rect;
137 |
138 |
139 | /**
140 | 截屏
141 |
142 | @param view 当前view
143 | @return 返回图片
144 | */
145 | + (UIImage *)gl_snapScreenView:(UIView *)view;
146 |
147 | /**
148 | 擦除图片
149 |
150 | @param view 被擦除的view
151 | @param point 擦除位置坐标
152 | @param size 画笔的大小
153 | @return 返回擦除后的图片
154 | */
155 | + (UIImage *)gl_wipeImageWithView:(UIView *)view movePoint:(CGPoint)point brushSize:(CGSize)size;
156 |
157 |
158 | /**
159 | 根据坐标在image上添加矩形
160 |
161 | @param image 原始image
162 | @param rects 矩形的坐标
163 | @return 返回
164 | */
165 | + (UIImage *)gl_drawImage:(UIImage *)image withRects:(NSArray *)rects;
166 |
167 |
168 | /**
169 | 绘制面部特征
170 |
171 | @param image 带人像的图片
172 | @param landMarkPoints 坐标点
173 | @return 返回
174 | */
175 | + (UIImage *)gl_drawImage:(UIImage *)image faceLandMarkPoints:(NSArray *)landMarkPoints;
176 |
177 |
178 | /**
179 | 旋转图片保存正立
180 |
181 | @return 返回
182 | */
183 | + (UIImage *)fixOrientationImage:(UIImage *)image;
184 | @end
185 |
--------------------------------------------------------------------------------
/VisionDemo/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/VisionDemo/OpenCameraOrPhoto.m:
--------------------------------------------------------------------------------
1 | //
2 | // OpenCameraOrPhoto.m
3 | // BaishitongClient
4 | //
5 | // Created by 高磊 on 15/10/28.
6 | // Copyright © 2015年 高磊. All rights reserved.
7 | //
8 |
9 | #import "OpenCameraOrPhoto.h"
10 | #import
11 | #import
12 | #import
13 | #import "GLTools.h"
14 |
15 |
16 | @interface OpenCameraOrPhoto()
17 | {
18 | UIView * superView;
19 | }
20 |
21 | @property (nonatomic,strong) UIAlertController *actionSheet;
22 |
23 | @end
24 |
25 | @implementation OpenCameraOrPhoto
26 |
27 |
28 | SYNTHESIZE_SINGLETON_FOR_CLASS(OpenCameraOrPhoto)
29 |
30 | - (id)init
31 | {
32 | self = [super init];
33 | if (self)
34 | {
35 | UIAlertController *actionSheet = [UIAlertController alertControllerWithTitle:nil message:nil preferredStyle:UIAlertControllerStyleActionSheet];
36 |
37 |
38 | __weak typeof(self)weakSelf = self;
39 |
40 | if ([UIImagePickerController isCameraDeviceAvailable:UIImagePickerControllerCameraDeviceRear])
41 | {
42 | [self addActionTarget:actionSheet title:NSLocalizedString(@"拍照", nil) color:[UIColor redColor] action:^(UIAlertAction *action) {
43 | [weakSelf takePhoto];
44 | }];
45 | }
46 |
47 | [self addActionTarget:actionSheet title:NSLocalizedString(@"选择本地照片", nil) color:[UIColor redColor] action:^(UIAlertAction *action) {
48 | [weakSelf choosePhoto];
49 | }];
50 |
51 | [self addCancelActionTarget:actionSheet title:@"取消"];
52 |
53 | self.actionSheet = actionSheet;
54 | }
55 | return self;
56 | }
57 |
58 |
59 | + (void)showOpenCameraOrPhotoWithView:(UIView *)view withBlock:(OpenCameraOrPhotoBlock)openCameraOrPhotoBlock
60 | {
61 | [[OpenCameraOrPhoto sharedInstance] showOpenCameraOrPhotoWithView:view withBlock:openCameraOrPhotoBlock];
62 | }
63 |
64 |
65 |
66 | #pragma mark == private method
67 | - (void)choosePhoto
68 | {
69 | // 从相册中选取
70 | if ([[GLTools sharedInstance] isPhotoLibraryAvailable]) {
71 | UIImagePickerController *controller = [[UIImagePickerController alloc] init];
72 | controller.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
73 | NSMutableArray *mediaTypes = [[NSMutableArray alloc] init];
74 | [mediaTypes addObject:(__bridge NSString *)kUTTypeImage];
75 | controller.mediaTypes = mediaTypes;
76 | controller.delegate = self;
77 | [self.glViewController presentViewController:controller
78 | animated:YES
79 | completion:^(void){
80 | NSLog(@"Picker View Controller is presented");
81 | }];
82 | }
83 | }
84 |
85 | - (void)takePhoto
86 | {
87 | if ([[GLTools sharedInstance] isCamer])
88 | {
89 | // 拍照
90 | if ([[GLTools sharedInstance] isCameraAvailable] && [[GLTools sharedInstance] doesCameraSupportTakingPhotos]) {
91 | UIImagePickerController *controller = [[UIImagePickerController alloc] init];
92 | controller.sourceType = UIImagePickerControllerSourceTypeCamera;
93 | if ([[GLTools sharedInstance] isRearCameraAvailable]) {
94 | controller.cameraDevice = UIImagePickerControllerCameraDeviceRear;
95 | }
96 | NSMutableArray *mediaTypes = [[NSMutableArray alloc] init];
97 | [mediaTypes addObject:(__bridge NSString *)kUTTypeImage];
98 | controller.mediaTypes = mediaTypes;
99 | controller.delegate = (id)self;
100 | [self.glViewController presentViewController:controller
101 | animated:YES
102 | completion:^(void){
103 | NSLog(@"Picker View Controller is presented");
104 | }];
105 | }
106 | }
107 | else
108 | {
109 | UIAlertController *actionSheet = [UIAlertController alertControllerWithTitle:@"提示" message:@"您尚未为智能消防管理需打开”相机服务“,开启方法为“手机设置-隐私-相机服务”进行[开启]" preferredStyle:UIAlertControllerStyleActionSheet];
110 | [self addCancelActionTarget:actionSheet title:@"确定"];
111 | [[self glViewController] presentViewController:actionSheet animated:YES completion:nil];
112 | }
113 | }
114 |
115 | - (void)showOpenCameraOrPhotoWithView:(UIView *)view withBlock:(OpenCameraOrPhotoBlock)openCameraOrPhotoBlock
116 | {
117 | superView = view;
118 | self.openCameraOrPhotoBlock = openCameraOrPhotoBlock;
119 | [[self glViewController] presentViewController:self.actionSheet animated:YES completion:nil];
120 | }
121 |
122 | // 取消按钮
123 | -(void)addCancelActionTarget:(UIAlertController*)alertController title:(NSString *)title
124 | {
125 | UIAlertAction *action = [UIAlertAction actionWithTitle:title style:UIAlertActionStyleCancel handler:^(UIAlertAction *action) {
126 |
127 | }];
128 | if (([[[UIDevice currentDevice] systemVersion] floatValue] >= 8.3))
129 | {
130 | [action setValue:[UIColor blackColor] forKey:@"_titleTextColor"];
131 | }
132 | else
133 | {
134 | alertController.view.tintColor = [UIColor grayColor];
135 | }
136 |
137 | [alertController addAction:action];
138 | }
139 |
140 | - (void)addActionTarget:(UIAlertController *)alertController title:(NSString *)title color:(UIColor *)color action:(void(^)(UIAlertAction *action))actionTarget
141 | {
142 | UIAlertAction *action = [UIAlertAction actionWithTitle:title style:UIAlertActionStyleDefault handler:^(UIAlertAction *action) {
143 | actionTarget(action);
144 | }];
145 |
146 | if (([[[UIDevice currentDevice] systemVersion] floatValue] >= 8.3))
147 | {
148 | [action setValue:color forKey:@"_titleTextColor"];
149 | }
150 | else
151 | {
152 | alertController.view.tintColor = [UIColor redColor];
153 | }
154 |
155 | [alertController addAction:action];
156 | }
157 |
158 | - (UIViewController *)glViewController {
159 | UIResponder *nextResponder = superView;
160 | do
161 | {
162 | nextResponder = [nextResponder nextResponder];
163 |
164 | if ([nextResponder isKindOfClass:[UIViewController class]])
165 | return (UIViewController*)nextResponder;
166 |
167 | } while (nextResponder != nil);
168 |
169 | return nil;
170 | }
171 |
172 | #pragma mark - UIImagePickerControllerDelegate
173 | - (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info {
174 |
175 | __weak typeof(self) weakSelf = self;
176 | [picker dismissViewControllerAnimated:YES completion:^() {
177 |
178 | UIImage *portraitImg = [info objectForKey:@"UIImagePickerControllerOriginalImage"];
179 |
180 | if (weakSelf.openCameraOrPhotoBlock)
181 | {
182 | weakSelf.openCameraOrPhotoBlock(portraitImg);
183 | }
184 | }];
185 | }
186 |
187 |
188 | #pragma mark - UINavigationControllerDelegate
189 | - (void)navigationController:(UINavigationController *)navigationController willShowViewController:(UIViewController *)viewController animated:(BOOL)animated {
190 | }
191 |
192 | - (void)navigationController:(UINavigationController *)navigationController didShowViewController:(UIViewController *)viewController animated:(BOOL)animated {
193 |
194 | }
195 | @end
196 |
--------------------------------------------------------------------------------
/VisionDemo/GLTools.m:
--------------------------------------------------------------------------------
1 | //
2 | // GLTools.m
3 | // VisionDemo
4 | //
5 | // Created by 高磊 on 2017/9/30.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import "GLTools.h"
10 | #import "GLDiscernPointModel.h"
11 |
12 | #import
13 | #import
14 | #import
15 |
16 | #import
17 |
18 | #import
19 |
20 | @implementation GLTools
21 |
22 | SYNTHESIZE_SINGLETON_FOR_CLASS(GLTools)
23 |
24 | - (void)glDiscernWithImageType:(GLDiscernType)type image:(UIImage *)image complete:(discernResultBlock)complete
25 | {
26 | __block NSError *error;
27 | // 创建处理requestHandler
28 | VNImageRequestHandler *handler = [[VNImageRequestHandler alloc] initWithCIImage:[[CIImage alloc] initWithImage:image] options:@{}];
29 |
30 | VNRequestCompletionHandler completionHandler = ^(VNRequest *request, NSError * _Nullable error){
31 | NSArray *observations = request.results;
32 | GLDiscernPointModel *pointModel = [self glHandlerImageWithType:type image:image observations:observations];
33 | complete(pointModel);
34 | };
35 |
36 | VNImageBasedRequest *requset = [[VNImageBasedRequest alloc] init];
37 |
38 | switch (type) {
39 | case GLDiscernFaceRectType:
40 | {
41 | requset = [[VNDetectFaceRectanglesRequest alloc] initWithCompletionHandler:completionHandler];
42 | }
43 | break;
44 | case GLDiscernFaceLandmarkType:
45 | {
46 | requset = [[VNDetectFaceLandmarksRequest alloc] initWithCompletionHandler:completionHandler];
47 | }
48 | break;
49 | default:
50 | break;
51 | }
52 |
53 | requset.preferBackgroundProcessing = YES;
54 | // 发送识别请求 在后台执行 在更新UI的时候记得切换到主线程
55 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
56 | [handler performRequests:@[requset] error:&error];
57 | });
58 |
59 | if (error) {
60 | NSLog(@"%@",error.localizedDescription);
61 | }
62 | }
63 |
64 | - (GLDiscernPointModel *)glHandlerImageWithType:(GLDiscernType)type image:(UIImage *)image observations:(NSArray *)observations
65 | {
66 | GLDiscernPointModel *pointModel = nil;
67 | switch (type) {
68 | case GLDiscernFaceRectType:
69 | {
70 | pointModel = [self handlerFaceRect:observations image:image];
71 | }
72 | break;
73 | case GLDiscernFaceLandmarkType:
74 | {
75 | pointModel = [self handlerFaceLandMark:observations image:image];
76 | }
77 | break;
78 | default:
79 | break;
80 | }
81 |
82 | return pointModel;
83 | }
84 |
85 | - (GLDiscernPointModel *)handlerFaceRect:(NSArray *)observations image:(UIImage *)image
86 | {
87 | GLDiscernPointModel *pointModel = [[GLDiscernPointModel alloc] init];
88 | NSMutableArray *rects = @[].mutableCopy;
89 | for (VNFaceObservation *faceObservation in observations) {
90 | //boundingBox
91 | CGRect transFrame = [self convertRect:faceObservation.boundingBox imageSize:image.size];
92 | [rects addObject:[NSValue valueWithCGRect:transFrame]];
93 | }
94 | pointModel.faceRectPoints = rects;
95 | return pointModel;
96 | }
97 |
98 | - (GLDiscernPointModel *)handlerFaceLandMark:(NSArray *)observations image:(UIImage *)image
99 | {
100 | GLDiscernPointModel *pointModel = [[GLDiscernPointModel alloc] init];
101 | NSMutableArray *rects = @[].mutableCopy;
102 |
103 | for (VNFaceObservation *faceObservation in observations) {
104 |
105 | VNFaceLandmarks2D *faceLandMarks2D = faceObservation.landmarks;
106 |
107 | [self getKeysWithClass:[VNFaceLandmarks2D class] block:^(NSString *key) {
108 | if ([key isEqualToString:@"allPoints"]) {
109 | return ;
110 | }
111 | VNFaceLandmarkRegion2D *faceLandMarkRegion2D = [faceLandMarks2D valueForKey:key];
112 |
113 | NSMutableArray *sPoints = [[NSMutableArray alloc] initWithCapacity:faceLandMarkRegion2D.pointCount];
114 |
115 | for (int i = 0; i < faceLandMarkRegion2D.pointCount; i ++) {
116 | CGPoint point = faceLandMarkRegion2D.normalizedPoints[i];
117 |
118 | CGFloat rectWidth = image.size.width * faceObservation.boundingBox.size.width;
119 | CGFloat rectHeight = image.size.height * faceObservation.boundingBox.size.height;
120 | CGPoint p = CGPointMake(point.x * rectWidth + faceObservation.boundingBox.origin.x * image.size.width, faceObservation.boundingBox.origin.y * image.size.height + point.y * rectHeight);
121 | [sPoints addObject:[NSValue valueWithCGPoint:p]];
122 | }
123 |
124 | [rects addObject:sPoints];
125 | }];
126 | }
127 |
128 | pointModel.faceLandMarkPoints = rects;
129 | return pointModel;
130 | }
131 |
132 | #pragma mark camera utility
133 | - (BOOL)isCamer
134 | {
135 | BOOL camer = NO;
136 |
137 | __block BOOL blockCamer = camer;
138 |
139 | if(([[[UIDevice currentDevice] systemVersion] floatValue]>=7.0)){
140 | AVAuthorizationStatus authStatus = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
141 | if(authStatus == AVAuthorizationStatusAuthorized){
142 | camer=YES;
143 | }else if(authStatus==AVAuthorizationStatusNotDetermined){
144 | [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted)
145 | {
146 | blockCamer = granted;
147 |
148 | }];
149 | }else{
150 | camer = NO;
151 | }
152 | }
153 | else{
154 | camer = YES;
155 | }
156 | return camer;
157 | }
158 |
159 | - (BOOL)isCameraAvailable{
160 | return [UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera];
161 | }
162 |
163 | - (BOOL)isRearCameraAvailable{
164 | return [UIImagePickerController isCameraDeviceAvailable:UIImagePickerControllerCameraDeviceRear];
165 | }
166 |
167 | - (BOOL)isFrontCameraAvailable {
168 | return [UIImagePickerController isCameraDeviceAvailable:UIImagePickerControllerCameraDeviceFront];
169 | }
170 |
171 | - (BOOL)doesCameraSupportTakingPhotos {
172 | return [self cameraSupportsMedia:(__bridge NSString *)kUTTypeImage
173 | sourceType:UIImagePickerControllerSourceTypeCamera];
174 | }
175 |
176 | - (BOOL)isPhotoLibraryAvailable{
177 | return [UIImagePickerController isSourceTypeAvailable:
178 | UIImagePickerControllerSourceTypePhotoLibrary];
179 | }
180 | - (BOOL)canUserPickVideosFromPhotoLibrary{
181 | return [self
182 | cameraSupportsMedia:(__bridge NSString *)kUTTypeMovie
183 | sourceType:UIImagePickerControllerSourceTypePhotoLibrary];
184 | }
185 | - (BOOL)canUserPickPhotosFromPhotoLibrary{
186 | return [self
187 | cameraSupportsMedia:(__bridge NSString *)kUTTypeImage
188 | sourceType:UIImagePickerControllerSourceTypePhotoLibrary];
189 | }
190 |
191 | - (BOOL)cameraSupportsMedia:(NSString *)paramMediaType
192 | sourceType:(UIImagePickerControllerSourceType)paramSourceType{
193 | __block BOOL result = NO;
194 | if ([paramMediaType length] == 0) {
195 | return NO;
196 | }
197 | NSArray *availableMediaTypes = [UIImagePickerController availableMediaTypesForSourceType:paramSourceType];
198 | [availableMediaTypes enumerateObjectsUsingBlock: ^(id obj, NSUInteger idx, BOOL *stop) {
199 | NSString *mediaType = (NSString *)obj;
200 | if ([mediaType isEqualToString:paramMediaType]){
201 | result = YES;
202 | *stop= YES;
203 | }
204 | }];
205 | return result;
206 | }
207 |
208 | // 转换Rect
209 | - (CGRect)convertRect:(CGRect)boundingBox imageSize:(CGSize)imageSize{
210 | CGFloat w = boundingBox.size.width * imageSize.width;
211 | CGFloat h = boundingBox.size.height * imageSize.height;
212 | CGFloat x = boundingBox.origin.x * imageSize.width;
213 | CGFloat y = imageSize.height * (1 - boundingBox.origin.y - boundingBox.size.height);//- (boundingBox.origin.y * imageSize.height) - h;
214 | return CGRectMake(x, y, w, h);
215 | }
216 |
217 | - (NSArray *)getKeysWithClass:(Class)class block:(void(^)(NSString *key))block
218 | {
219 | NSMutableArray *keys = @[].mutableCopy;
220 | unsigned int outCount = 0;
221 |
222 | Ivar *vars = NULL;
223 | objc_property_t *propertys = NULL;
224 | const char *name;
225 |
226 | propertys = class_copyPropertyList(class, &outCount);
227 |
228 | for (int i = 0; i < outCount; i ++) {
229 | objc_property_t property = propertys[i];
230 | name = property_getName(property);
231 |
232 | NSString *key = [NSString stringWithUTF8String:name];
233 | [keys addObject:key];
234 | block(key);
235 | }
236 | free(vars);
237 | return keys.copy;
238 | }
239 |
240 | @end
241 |
--------------------------------------------------------------------------------
/VisionDemo.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 48;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | EB0E8A2B1F8CAF470030618F /* UIImage+GLProcessing.m in Sources */ = {isa = PBXBuildFile; fileRef = EB0E8A2A1F8CAF470030618F /* UIImage+GLProcessing.m */; };
11 | EB0E8A2E1F8DBBD40030618F /* VisionFaceViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = EB0E8A2D1F8DBBD40030618F /* VisionFaceViewController.m */; };
12 | EB0E8A311F8DC1660030618F /* GLDiscernPointModel.m in Sources */ = {isa = PBXBuildFile; fileRef = EB0E8A301F8DC1660030618F /* GLDiscernPointModel.m */; };
13 | EB89DC701F7B885300B3A05F /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = EB89DC6F1F7B885300B3A05F /* AppDelegate.m */; };
14 | EB89DC761F7B885300B3A05F /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = EB89DC741F7B885300B3A05F /* Main.storyboard */; };
15 | EB89DC781F7B885300B3A05F /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = EB89DC771F7B885300B3A05F /* Assets.xcassets */; };
16 | EB89DC7B1F7B885300B3A05F /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = EB89DC791F7B885300B3A05F /* LaunchScreen.storyboard */; };
17 | EB89DC7E1F7B885300B3A05F /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = EB89DC7D1F7B885300B3A05F /* main.m */; };
18 | EB89DC861F7B889900B3A05F /* OpenCameraOrPhoto.m in Sources */ = {isa = PBXBuildFile; fileRef = EB89DC841F7B889900B3A05F /* OpenCameraOrPhoto.m */; };
19 | EB89DC891F7B8CCC00B3A05F /* VisionTableViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = EB89DC881F7B8CCC00B3A05F /* VisionTableViewController.m */; };
20 | EB89DC951F7E36B100B3A05F /* VisionCameraViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = EB89DC941F7E36B000B3A05F /* VisionCameraViewController.m */; };
21 | EB89DC981F7F3FC400B3A05F /* GLTools.m in Sources */ = {isa = PBXBuildFile; fileRef = EB89DC971F7F3FC400B3A05F /* GLTools.m */; };
22 | /* End PBXBuildFile section */
23 |
24 | /* Begin PBXFileReference section */
25 | EB0E8A291F8CAF470030618F /* UIImage+GLProcessing.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIImage+GLProcessing.h"; sourceTree = ""; };
26 | EB0E8A2A1F8CAF470030618F /* UIImage+GLProcessing.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UIImage+GLProcessing.m"; sourceTree = ""; };
27 | EB0E8A2C1F8DBBD40030618F /* VisionFaceViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = VisionFaceViewController.h; sourceTree = ""; };
28 | EB0E8A2D1F8DBBD40030618F /* VisionFaceViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = VisionFaceViewController.m; sourceTree = ""; };
29 | EB0E8A2F1F8DC1660030618F /* GLDiscernPointModel.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLDiscernPointModel.h; sourceTree = ""; };
30 | EB0E8A301F8DC1660030618F /* GLDiscernPointModel.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLDiscernPointModel.m; sourceTree = ""; };
31 | EB89DC6B1F7B885300B3A05F /* VisionDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = VisionDemo.app; sourceTree = BUILT_PRODUCTS_DIR; };
32 | EB89DC6E1F7B885300B3A05F /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; };
33 | EB89DC6F1F7B885300B3A05F /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; };
34 | EB89DC751F7B885300B3A05F /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
35 | EB89DC771F7B885300B3A05F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
36 | EB89DC7A1F7B885300B3A05F /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
37 | EB89DC7C1F7B885300B3A05F /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
38 | EB89DC7D1F7B885300B3A05F /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; };
39 | EB89DC841F7B889900B3A05F /* OpenCameraOrPhoto.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = OpenCameraOrPhoto.m; sourceTree = ""; };
40 | EB89DC851F7B889900B3A05F /* OpenCameraOrPhoto.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = OpenCameraOrPhoto.h; sourceTree = ""; };
41 | EB89DC871F7B8CCC00B3A05F /* VisionTableViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionTableViewController.h; sourceTree = ""; };
42 | EB89DC881F7B8CCC00B3A05F /* VisionTableViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisionTableViewController.m; sourceTree = ""; };
43 | EB89DC931F7E36B000B3A05F /* VisionCameraViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VisionCameraViewController.h; sourceTree = ""; };
44 | EB89DC941F7E36B000B3A05F /* VisionCameraViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = VisionCameraViewController.m; sourceTree = ""; };
45 | EB89DC961F7F3FC400B3A05F /* GLTools.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GLTools.h; sourceTree = ""; };
46 | EB89DC971F7F3FC400B3A05F /* GLTools.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GLTools.m; sourceTree = ""; };
47 | /* End PBXFileReference section */
48 |
49 | /* Begin PBXFrameworksBuildPhase section */
50 | EB89DC681F7B885300B3A05F /* Frameworks */ = {
51 | isa = PBXFrameworksBuildPhase;
52 | buildActionMask = 2147483647;
53 | files = (
54 | );
55 | runOnlyForDeploymentPostprocessing = 0;
56 | };
57 | /* End PBXFrameworksBuildPhase section */
58 |
59 | /* Begin PBXGroup section */
60 | EB89DC621F7B885300B3A05F = {
61 | isa = PBXGroup;
62 | children = (
63 | EB89DC6D1F7B885300B3A05F /* VisionDemo */,
64 | EB89DC6C1F7B885300B3A05F /* Products */,
65 | );
66 | sourceTree = "";
67 | };
68 | EB89DC6C1F7B885300B3A05F /* Products */ = {
69 | isa = PBXGroup;
70 | children = (
71 | EB89DC6B1F7B885300B3A05F /* VisionDemo.app */,
72 | );
73 | name = Products;
74 | sourceTree = "";
75 | };
76 | EB89DC6D1F7B885300B3A05F /* VisionDemo */ = {
77 | isa = PBXGroup;
78 | children = (
79 | EB89DC6E1F7B885300B3A05F /* AppDelegate.h */,
80 | EB89DC6F1F7B885300B3A05F /* AppDelegate.m */,
81 | EB89DC961F7F3FC400B3A05F /* GLTools.h */,
82 | EB89DC971F7F3FC400B3A05F /* GLTools.m */,
83 | EB0E8A2F1F8DC1660030618F /* GLDiscernPointModel.h */,
84 | EB0E8A301F8DC1660030618F /* GLDiscernPointModel.m */,
85 | EB0E8A291F8CAF470030618F /* UIImage+GLProcessing.h */,
86 | EB0E8A2A1F8CAF470030618F /* UIImage+GLProcessing.m */,
87 | EB89DC851F7B889900B3A05F /* OpenCameraOrPhoto.h */,
88 | EB89DC841F7B889900B3A05F /* OpenCameraOrPhoto.m */,
89 | EB89DC871F7B8CCC00B3A05F /* VisionTableViewController.h */,
90 | EB89DC881F7B8CCC00B3A05F /* VisionTableViewController.m */,
91 | EB0E8A2C1F8DBBD40030618F /* VisionFaceViewController.h */,
92 | EB0E8A2D1F8DBBD40030618F /* VisionFaceViewController.m */,
93 | EB89DC931F7E36B000B3A05F /* VisionCameraViewController.h */,
94 | EB89DC941F7E36B000B3A05F /* VisionCameraViewController.m */,
95 | EB89DC741F7B885300B3A05F /* Main.storyboard */,
96 | EB89DC771F7B885300B3A05F /* Assets.xcassets */,
97 | EB89DC791F7B885300B3A05F /* LaunchScreen.storyboard */,
98 | EB89DC7C1F7B885300B3A05F /* Info.plist */,
99 | EB89DC7D1F7B885300B3A05F /* main.m */,
100 | );
101 | path = VisionDemo;
102 | sourceTree = "";
103 | };
104 | /* End PBXGroup section */
105 |
106 | /* Begin PBXNativeTarget section */
107 | EB89DC6A1F7B885300B3A05F /* VisionDemo */ = {
108 | isa = PBXNativeTarget;
109 | buildConfigurationList = EB89DC811F7B885300B3A05F /* Build configuration list for PBXNativeTarget "VisionDemo" */;
110 | buildPhases = (
111 | EB89DC671F7B885300B3A05F /* Sources */,
112 | EB89DC681F7B885300B3A05F /* Frameworks */,
113 | EB89DC691F7B885300B3A05F /* Resources */,
114 | );
115 | buildRules = (
116 | );
117 | dependencies = (
118 | );
119 | name = VisionDemo;
120 | productName = VisionDemo;
121 | productReference = EB89DC6B1F7B885300B3A05F /* VisionDemo.app */;
122 | productType = "com.apple.product-type.application";
123 | };
124 | /* End PBXNativeTarget section */
125 |
126 | /* Begin PBXProject section */
127 | EB89DC631F7B885300B3A05F /* Project object */ = {
128 | isa = PBXProject;
129 | attributes = {
130 | LastUpgradeCheck = 0900;
131 | ORGANIZATIONNAME = "高磊";
132 | TargetAttributes = {
133 | EB89DC6A1F7B885300B3A05F = {
134 | CreatedOnToolsVersion = 9.0;
135 | ProvisioningStyle = Automatic;
136 | };
137 | };
138 | };
139 | buildConfigurationList = EB89DC661F7B885300B3A05F /* Build configuration list for PBXProject "VisionDemo" */;
140 | compatibilityVersion = "Xcode 8.0";
141 | developmentRegion = en;
142 | hasScannedForEncodings = 0;
143 | knownRegions = (
144 | en,
145 | Base,
146 | );
147 | mainGroup = EB89DC621F7B885300B3A05F;
148 | productRefGroup = EB89DC6C1F7B885300B3A05F /* Products */;
149 | projectDirPath = "";
150 | projectRoot = "";
151 | targets = (
152 | EB89DC6A1F7B885300B3A05F /* VisionDemo */,
153 | );
154 | };
155 | /* End PBXProject section */
156 |
157 | /* Begin PBXResourcesBuildPhase section */
158 | EB89DC691F7B885300B3A05F /* Resources */ = {
159 | isa = PBXResourcesBuildPhase;
160 | buildActionMask = 2147483647;
161 | files = (
162 | EB89DC7B1F7B885300B3A05F /* LaunchScreen.storyboard in Resources */,
163 | EB89DC781F7B885300B3A05F /* Assets.xcassets in Resources */,
164 | EB89DC761F7B885300B3A05F /* Main.storyboard in Resources */,
165 | );
166 | runOnlyForDeploymentPostprocessing = 0;
167 | };
168 | /* End PBXResourcesBuildPhase section */
169 |
170 | /* Begin PBXSourcesBuildPhase section */
171 | EB89DC671F7B885300B3A05F /* Sources */ = {
172 | isa = PBXSourcesBuildPhase;
173 | buildActionMask = 2147483647;
174 | files = (
175 | EB89DC861F7B889900B3A05F /* OpenCameraOrPhoto.m in Sources */,
176 | EB89DC891F7B8CCC00B3A05F /* VisionTableViewController.m in Sources */,
177 | EB89DC7E1F7B885300B3A05F /* main.m in Sources */,
178 | EB0E8A2E1F8DBBD40030618F /* VisionFaceViewController.m in Sources */,
179 | EB89DC701F7B885300B3A05F /* AppDelegate.m in Sources */,
180 | EB0E8A2B1F8CAF470030618F /* UIImage+GLProcessing.m in Sources */,
181 | EB89DC981F7F3FC400B3A05F /* GLTools.m in Sources */,
182 | EB0E8A311F8DC1660030618F /* GLDiscernPointModel.m in Sources */,
183 | EB89DC951F7E36B100B3A05F /* VisionCameraViewController.m in Sources */,
184 | );
185 | runOnlyForDeploymentPostprocessing = 0;
186 | };
187 | /* End PBXSourcesBuildPhase section */
188 |
189 | /* Begin PBXVariantGroup section */
190 | EB89DC741F7B885300B3A05F /* Main.storyboard */ = {
191 | isa = PBXVariantGroup;
192 | children = (
193 | EB89DC751F7B885300B3A05F /* Base */,
194 | );
195 | name = Main.storyboard;
196 | sourceTree = "";
197 | };
198 | EB89DC791F7B885300B3A05F /* LaunchScreen.storyboard */ = {
199 | isa = PBXVariantGroup;
200 | children = (
201 | EB89DC7A1F7B885300B3A05F /* Base */,
202 | );
203 | name = LaunchScreen.storyboard;
204 | sourceTree = "";
205 | };
206 | /* End PBXVariantGroup section */
207 |
208 | /* Begin XCBuildConfiguration section */
209 | EB89DC7F1F7B885300B3A05F /* Debug */ = {
210 | isa = XCBuildConfiguration;
211 | buildSettings = {
212 | ALWAYS_SEARCH_USER_PATHS = NO;
213 | CLANG_ANALYZER_NONNULL = YES;
214 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
215 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
216 | CLANG_CXX_LIBRARY = "libc++";
217 | CLANG_ENABLE_MODULES = YES;
218 | CLANG_ENABLE_OBJC_ARC = YES;
219 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
220 | CLANG_WARN_BOOL_CONVERSION = YES;
221 | CLANG_WARN_COMMA = YES;
222 | CLANG_WARN_CONSTANT_CONVERSION = YES;
223 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
224 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
225 | CLANG_WARN_EMPTY_BODY = YES;
226 | CLANG_WARN_ENUM_CONVERSION = YES;
227 | CLANG_WARN_INFINITE_RECURSION = YES;
228 | CLANG_WARN_INT_CONVERSION = YES;
229 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
230 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
231 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
232 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
233 | CLANG_WARN_STRICT_PROTOTYPES = YES;
234 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
235 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
236 | CLANG_WARN_UNREACHABLE_CODE = YES;
237 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
238 | CODE_SIGN_IDENTITY = "iPhone Developer";
239 | COPY_PHASE_STRIP = NO;
240 | DEBUG_INFORMATION_FORMAT = dwarf;
241 | ENABLE_STRICT_OBJC_MSGSEND = YES;
242 | ENABLE_TESTABILITY = YES;
243 | GCC_C_LANGUAGE_STANDARD = gnu11;
244 | GCC_DYNAMIC_NO_PIC = NO;
245 | GCC_NO_COMMON_BLOCKS = YES;
246 | GCC_OPTIMIZATION_LEVEL = 0;
247 | GCC_PREPROCESSOR_DEFINITIONS = (
248 | "DEBUG=1",
249 | "$(inherited)",
250 | );
251 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
252 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
253 | GCC_WARN_UNDECLARED_SELECTOR = YES;
254 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
255 | GCC_WARN_UNUSED_FUNCTION = YES;
256 | GCC_WARN_UNUSED_VARIABLE = YES;
257 | IPHONEOS_DEPLOYMENT_TARGET = 11.0;
258 | MTL_ENABLE_DEBUG_INFO = YES;
259 | ONLY_ACTIVE_ARCH = YES;
260 | SDKROOT = iphoneos;
261 | };
262 | name = Debug;
263 | };
264 | EB89DC801F7B885300B3A05F /* Release */ = {
265 | isa = XCBuildConfiguration;
266 | buildSettings = {
267 | ALWAYS_SEARCH_USER_PATHS = NO;
268 | CLANG_ANALYZER_NONNULL = YES;
269 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
270 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
271 | CLANG_CXX_LIBRARY = "libc++";
272 | CLANG_ENABLE_MODULES = YES;
273 | CLANG_ENABLE_OBJC_ARC = YES;
274 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
275 | CLANG_WARN_BOOL_CONVERSION = YES;
276 | CLANG_WARN_COMMA = YES;
277 | CLANG_WARN_CONSTANT_CONVERSION = YES;
278 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
279 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
280 | CLANG_WARN_EMPTY_BODY = YES;
281 | CLANG_WARN_ENUM_CONVERSION = YES;
282 | CLANG_WARN_INFINITE_RECURSION = YES;
283 | CLANG_WARN_INT_CONVERSION = YES;
284 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
285 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
286 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
287 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
288 | CLANG_WARN_STRICT_PROTOTYPES = YES;
289 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
290 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
291 | CLANG_WARN_UNREACHABLE_CODE = YES;
292 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
293 | CODE_SIGN_IDENTITY = "iPhone Developer";
294 | COPY_PHASE_STRIP = NO;
295 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
296 | ENABLE_NS_ASSERTIONS = NO;
297 | ENABLE_STRICT_OBJC_MSGSEND = YES;
298 | GCC_C_LANGUAGE_STANDARD = gnu11;
299 | GCC_NO_COMMON_BLOCKS = YES;
300 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
301 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
302 | GCC_WARN_UNDECLARED_SELECTOR = YES;
303 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
304 | GCC_WARN_UNUSED_FUNCTION = YES;
305 | GCC_WARN_UNUSED_VARIABLE = YES;
306 | IPHONEOS_DEPLOYMENT_TARGET = 11.0;
307 | MTL_ENABLE_DEBUG_INFO = NO;
308 | SDKROOT = iphoneos;
309 | VALIDATE_PRODUCT = YES;
310 | };
311 | name = Release;
312 | };
313 | EB89DC821F7B885300B3A05F /* Debug */ = {
314 | isa = XCBuildConfiguration;
315 | buildSettings = {
316 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
317 | CODE_SIGN_IDENTITY = "iPhone Developer";
318 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
319 | CODE_SIGN_STYLE = Automatic;
320 | DEVELOPMENT_TEAM = GJE3HPLD6B;
321 | INFOPLIST_FILE = VisionDemo/Info.plist;
322 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
323 | PRODUCT_BUNDLE_IDENTIFIER = com.gaolei.abc;
324 | PRODUCT_NAME = "$(TARGET_NAME)";
325 | PROVISIONING_PROFILE = "";
326 | PROVISIONING_PROFILE_SPECIFIER = "";
327 | TARGETED_DEVICE_FAMILY = "1,2";
328 | };
329 | name = Debug;
330 | };
331 | EB89DC831F7B885300B3A05F /* Release */ = {
332 | isa = XCBuildConfiguration;
333 | buildSettings = {
334 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
335 | CODE_SIGN_IDENTITY = "iPhone Developer";
336 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
337 | CODE_SIGN_STYLE = Automatic;
338 | DEVELOPMENT_TEAM = GJE3HPLD6B;
339 | INFOPLIST_FILE = VisionDemo/Info.plist;
340 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
341 | PRODUCT_BUNDLE_IDENTIFIER = com.gaolei.abc;
342 | PRODUCT_NAME = "$(TARGET_NAME)";
343 | PROVISIONING_PROFILE = "";
344 | PROVISIONING_PROFILE_SPECIFIER = "";
345 | TARGETED_DEVICE_FAMILY = "1,2";
346 | };
347 | name = Release;
348 | };
349 | /* End XCBuildConfiguration section */
350 |
351 | /* Begin XCConfigurationList section */
352 | EB89DC661F7B885300B3A05F /* Build configuration list for PBXProject "VisionDemo" */ = {
353 | isa = XCConfigurationList;
354 | buildConfigurations = (
355 | EB89DC7F1F7B885300B3A05F /* Debug */,
356 | EB89DC801F7B885300B3A05F /* Release */,
357 | );
358 | defaultConfigurationIsVisible = 0;
359 | defaultConfigurationName = Release;
360 | };
361 | EB89DC811F7B885300B3A05F /* Build configuration list for PBXNativeTarget "VisionDemo" */ = {
362 | isa = XCConfigurationList;
363 | buildConfigurations = (
364 | EB89DC821F7B885300B3A05F /* Debug */,
365 | EB89DC831F7B885300B3A05F /* Release */,
366 | );
367 | defaultConfigurationIsVisible = 0;
368 | defaultConfigurationName = Release;
369 | };
370 | /* End XCConfigurationList section */
371 | };
372 | rootObject = EB89DC631F7B885300B3A05F /* Project object */;
373 | }
374 |
--------------------------------------------------------------------------------
/VisionDemo/UIImage+GLProcessing.m:
--------------------------------------------------------------------------------
1 | //
2 | // UIImage+GLProcessing.m
3 | // UIImageOperationDemo
4 | //
5 | // Created by 高磊 on 2017/4/12.
6 | // Copyright © 2017年 高磊. All rights reserved.
7 | //
8 |
9 | #import "UIImage+GLProcessing.h"
10 | #import
11 | #import
12 |
13 | @implementation UIImage (GLProcessing)
14 |
15 | + (UIImage*)gl_circleImage:(UIImage*)image withBorder:(CGFloat)border color:(UIColor *)color
16 | {
17 | //通过自己创建一个context来绘制,通常用于对图片的处理
18 | //在retian屏幕上要使用这个函数,才能保证不失真
19 | //该函数会自动创建一个context,并把它push到上下文栈顶,坐标系也经处理和UIKit的坐标系相同
20 | UIGraphicsBeginImageContextWithOptions(CGSizeMake(image.size.width, image.size.height), NO, [UIScreen mainScreen].scale);
21 | //获取上下文
22 | CGContextRef context = UIGraphicsGetCurrentContext();
23 | CGRect rect = CGRectMake(0, 0, image.size.width, image.size.height);
24 | //设置宽度
25 | CGContextSetLineWidth(context, 4*border);
26 | //设置边框颜色
27 | CGContextSetStrokeColorWithColor(context, color.CGColor);
28 |
29 | //画椭圆 当宽和高一样的时候 为圆 此处设置可视范围
30 | CGContextAddEllipseInRect(context, rect);
31 | //剪切可视范围
32 | CGContextClip(context);
33 |
34 | //绘制图片
35 | [image drawInRect:rect];
36 |
37 | CGContextAddEllipseInRect(context, rect);
38 | // 绘制当前的路径 只描绘边框
39 | CGContextStrokePath(context);
40 |
41 |
42 | UIImage *newimg = UIGraphicsGetImageFromCurrentImageContext();
43 | UIGraphicsEndImageContext();
44 | return newimg;
45 | }
46 |
47 | + (UIImage *)gl_circleImage:(UIImage *)image
48 | {
49 | UIGraphicsBeginImageContextWithOptions(CGSizeMake(image.size.width, image.size.height), NO, [UIScreen mainScreen].scale);
50 | //获取上下文
51 | CGContextRef context = UIGraphicsGetCurrentContext();
52 | CGRect rect = CGRectMake(0, 0, image.size.width, image.size.height);
53 |
54 | //画椭圆 当宽和高一样的时候 为圆
55 | CGContextAddEllipseInRect(context, rect);
56 | //剪切可视范围
57 | CGContextClip(context);
58 |
59 | //绘制图片
60 | [image drawInRect:rect];
61 |
62 | UIImage *newimg = UIGraphicsGetImageFromCurrentImageContext();
63 | UIGraphicsEndImageContext();
64 | return newimg;
65 | }
66 |
67 | + (UIImage *)gl_imageWithColor:(UIColor *)color size:(CGSize)size{
68 | CGSize imageSize = size;
69 | //通过自己创建一个context来绘制,通常用于对图片的处理
70 | UIGraphicsBeginImageContextWithOptions(imageSize, NO, [UIScreen mainScreen].scale);
71 | //获取上下文
72 | CGContextRef context = UIGraphicsGetCurrentContext();
73 | //设置填充颜色
74 | CGContextSetFillColorWithColor(context, color.CGColor);
75 | //直接按rect的范围覆盖
76 | CGContextFillRect(context, CGRectMake(0, 0, size.width, size.height));
77 | UIImage *newimg = UIGraphicsGetImageFromCurrentImageContext();
78 | UIGraphicsEndImageContext();
79 | return newimg;
80 | }
81 |
82 | + (UIImage *)gl_circleImageWithColor:(UIColor *)color radius:(CGFloat)radius
83 | {
84 | CGSize imageSize = CGSizeMake(radius, radius);
85 | //通过自己创建一个context来绘制,通常用于对图片的处理
86 | UIGraphicsBeginImageContextWithOptions(imageSize, NO, [UIScreen mainScreen].scale);
87 | //获取上下文
88 | CGContextRef context = UIGraphicsGetCurrentContext();
89 | //设置填充颜色
90 | CGContextSetFillColorWithColor(context, color.CGColor);
91 | CGContextAddEllipseInRect(context, CGRectMake(0, 0, imageSize.width, imageSize.height));
92 | //用当前的填充颜色或样式填充路径线段包围的区域
93 | CGContextFillPath(context);
94 | UIImage *newimg = UIGraphicsGetImageFromCurrentImageContext();
95 | UIGraphicsEndImageContext();
96 | return newimg;
97 | }
98 |
99 | + (UIImage*)gl_cornerImage:(UIImage*)image corner:(CGFloat)corner rectCorner:(UIRectCorner)rectCorner
100 | {
101 | CGSize imageSize = image.size;
102 | UIGraphicsBeginImageContextWithOptions(imageSize, NO, [UIScreen mainScreen].scale);
103 | CGContextRef context = UIGraphicsGetCurrentContext();
104 | CGRect rect = CGRectMake(0,
105 | 0,
106 | imageSize.width,
107 | imageSize.height);
108 | UIBezierPath *path = [UIBezierPath bezierPathWithRoundedRect:rect
109 | byRoundingCorners:rectCorner
110 | cornerRadii:CGSizeMake(corner,
111 | corner)];
112 | //添加路径
113 | CGContextAddPath(context, [path CGPath]);
114 | //剪切可视范围
115 | CGContextClip(context);
116 | [image drawInRect:rect];
117 |
118 | UIImage *newimg = UIGraphicsGetImageFromCurrentImageContext();
119 | UIGraphicsEndImageContext();
120 | return newimg;
121 | }
122 |
123 | + (UIImage*)gl_compressImage:(UIImage *)image maxSize:(CGFloat)maxSize maxSizeWithKB:(CGFloat)maxSizeKB
124 | {
125 | if (maxSize <= 0) {
126 | return nil;
127 | }
128 |
129 | if (maxSizeKB <= 0) {
130 | return nil;
131 | }
132 |
133 | CGSize compressSize = image.size;
134 | //获取缩放比 进行比较
135 | CGFloat widthScale = compressSize.width*1.0 / maxSize;
136 | CGFloat heightScale = compressSize.height*1.0 / maxSize;
137 |
138 | if (widthScale > 1 && widthScale > heightScale) {
139 | compressSize = CGSizeMake(image.size.width/widthScale, image.size.height/widthScale);
140 | }
141 | else if (heightScale > 1 && heightScale > widthScale){
142 | compressSize = CGSizeMake(image.size.width/heightScale, image.size.height/heightScale);
143 | }
144 |
145 | //创建图片上下文 并获取剪切尺寸后的图片
146 | UIGraphicsBeginImageContextWithOptions(compressSize, NO, 1);
147 | CGRect rect = {CGPointZero,compressSize};
148 | [image drawInRect:rect];
149 | UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
150 | UIGraphicsEndImageContext();
151 |
152 | //循环缩小图片大小
153 | NSData *imageData = UIImageJPEGRepresentation(newImage, 1.0);
154 | //获取当前图片的大小
155 | CGFloat currentImageSizeOfKB = imageData.length/1024.0;
156 |
157 | //压缩比例
158 | CGFloat compress = 0.9;
159 |
160 | while (currentImageSizeOfKB > maxSizeKB && compress > 0.1) {
161 | imageData = UIImageJPEGRepresentation(newImage, compress);
162 | currentImageSizeOfKB = imageData.length/1024.0;
163 | compress -= 0.1;
164 | }
165 | return [UIImage imageWithData:imageData];
166 | }
167 |
168 | + (UIImage *)gl_compressImage:(UIImage *)image maxSize:(CGFloat)maxSize
169 | {
170 | if (maxSize <= 0) {
171 | return nil;
172 | }
173 |
174 | CGSize compressSize = image.size;
175 | //获取缩放比 进行比较
176 | CGFloat widthScale = compressSize.width*1.0 / maxSize;
177 | CGFloat heightScale = compressSize.height*1.0 / maxSize;
178 |
179 | if (widthScale > 1 && widthScale > heightScale) {
180 | compressSize = CGSizeMake(image.size.width/widthScale, image.size.height/widthScale);
181 | }
182 | else if (heightScale > 1 && heightScale > widthScale){
183 | compressSize = CGSizeMake(image.size.width/heightScale, image.size.height/heightScale);
184 | }
185 |
186 | //创建图片上下文 并获取剪切尺寸后的图片
187 | UIGraphicsBeginImageContextWithOptions(compressSize, NO, 1);
188 | CGRect rect = {CGPointZero,compressSize};
189 | [image drawInRect:rect];
190 | UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
191 | UIGraphicsEndImageContext();
192 |
193 | return newImage;
194 | }
195 |
196 | #pragma mark == GIF图片
197 | + (UIImage *)gl_animateGIFWithImagePath:(NSString *)imagePath
198 | {
199 | NSData *data = [NSData dataWithContentsOfFile:imagePath];
200 | if (!data) {
201 | return nil;
202 | }
203 |
204 | //得到动态图片资源 用到create 后面需要释放
205 | CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)data, NULL);
206 |
207 | //得到图片资源的数量
208 | size_t imageCount = CGImageSourceGetCount(imageSource);
209 | //如果只有一张图片 则返回
210 | if (imageCount <= 1) {
211 |
212 | UIImage *resultImage = [UIImage imageWithData:data];
213 |
214 | return resultImage;
215 | }
216 |
217 | return animatedImageWithAnimateImageSource(imageSource);
218 | }
219 |
220 | + (UIImage *)gl_animateGIFWithImageData:(NSData *)data
221 | {
222 | if (!data) {
223 | return nil;
224 | }
225 |
226 | //得到动态图片资源 用到create 后面需要释放
227 | CGImageSourceRef imageSource = CGImageSourceCreateWithData((__bridge CFDataRef)data, NULL);
228 |
229 | //得到图片资源的数量
230 | size_t imageCount = CGImageSourceGetCount(imageSource);
231 | //如果只有一张图片 则返回
232 | if (imageCount <= 1) {
233 |
234 | UIImage *resultImage = [UIImage imageWithData:data];
235 |
236 | return resultImage;
237 | }
238 |
239 | return animatedImageWithAnimateImageSource(imageSource);
240 | }
241 |
242 | + (UIImage *)gl_animateGIFWithImageUrl:(NSURL *)url
243 | {
244 | if (!url) {
245 | return nil;
246 | }
247 | //得到动态图片资源 用到create 后面需要释放
248 | CGImageSourceRef imageSource = CGImageSourceCreateWithURL((__bridge CFURLRef)url, NULL);
249 |
250 | return animatedImageWithAnimateImageSource(imageSource);
251 | }
252 |
253 | //动态图片处理
254 | static UIImage *animatedImageWithAnimateImageSource(CGImageSourceRef imageSource)
255 | {
256 | if (imageSource) {
257 | //得到图片资源的数量
258 | size_t imageCount = CGImageSourceGetCount(imageSource);
259 |
260 | //最终图片资源
261 | UIImage *resultImage = nil;
262 |
263 | //动态图片时间
264 | NSTimeInterval duration = 0.0;
265 | //取图片资源
266 | NSMutableArray *images = [NSMutableArray arrayWithCapacity:imageCount];
267 |
268 | for (size_t i = 0; i < imageCount; i ++) {
269 | //此处用到了create 后面记得释放
270 | CGImageRef cgImage = CGImageSourceCreateImageAtIndex(imageSource, i, NULL);
271 |
272 | if (cgImage) {
273 | //将图片加入到数组中
274 | [images addObject:[UIImage imageWithCGImage:cgImage scale:[UIScreen mainScreen].scale orientation:UIImageOrientationUp]];
275 | }
276 |
277 | duration += frameDuration(i, imageSource);
278 |
279 | //释放掉 不然会内存泄漏
280 | CGImageRelease(cgImage);
281 | }
282 |
283 | if (duration == 0.0) {
284 | duration = 0.1 * imageCount;
285 | }
286 |
287 |
288 | resultImage = [UIImage animatedImageWithImages:images duration:duration];
289 |
290 | CFRelease(imageSource);
291 |
292 | return resultImage;
293 | }
294 | return nil;
295 | }
296 |
297 | static CGFloat frameDuration(NSInteger index,CGImageSourceRef source)
298 | {
299 | //获取每一帧的信息
300 | CFDictionaryRef frameProperties = CGImageSourceCopyPropertiesAtIndex(source,index, nil);
301 | //转换为dic
302 | NSDictionary *framePropertiesDic = (__bridge NSDictionary *)frameProperties;
303 | //获取每帧中关于GIF的信息
304 | NSDictionary *gifProperties = framePropertiesDic[(__bridge NSString *)kCGImagePropertyGIFDictionary];
305 | /*
306 | 苹果官方文档中的说明
307 | kCGImagePropertyGIFDelayTime
308 | The amount of time, in seconds, to wait before displaying the next image in an animated sequence
309 |
310 | kCGImagePropertyGIFUnclampedDelayTime
311 | The amount of time, in seconds, to wait before displaying the next image in an animated sequence. This value may be 0 milliseconds or higher. Unlike the kCGImagePropertyGIFDelayTime property, this value is not clamped at the low end of the range.
312 |
313 | 看了翻译瞬间蒙了 感觉一样 但是kCGImagePropertyGIFDelayTime 可能为0 所以我觉得可以先判断kCGImagePropertyGIFDelayTime
314 | */
315 | CGFloat duration = 0.1;
316 |
317 | NSNumber *unclampedPropdelayTime = gifProperties[(__bridge NSString *)kCGImagePropertyGIFUnclampedDelayTime];
318 | NSNumber *delayTime = gifProperties[(__bridge NSString *)kCGImagePropertyGIFDelayTime];
319 |
320 | if (unclampedPropdelayTime) {
321 | duration = unclampedPropdelayTime.floatValue;
322 | }else{
323 | if (delayTime) {
324 | duration = delayTime.floatValue;
325 | }
326 | }
327 |
328 | CFRelease(frameProperties);
329 |
330 | return duration;
331 | }
332 |
333 |
334 | #pragma mark == 添加文字 截屏 擦除
335 |
336 | + (UIImage *)gl_addTitleAboveImage:(UIImage *)image addTitleText:(NSString *)text
337 | attributeDic:(NSDictionary *)attributeDic point:(CGPoint)point
338 | {
339 | UIGraphicsBeginImageContextWithOptions(image.size, NO, [UIScreen mainScreen].scale);
340 |
341 | CGRect imageRect = CGRectMake(0, 0, image.size.width, image.size.height);
342 |
343 | [image drawInRect:imageRect];
344 |
345 | [text drawAtPoint:point withAttributes:attributeDic];
346 |
347 | //获取上下文中的新图片
348 | UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
349 |
350 | UIGraphicsEndImageContext();
351 |
352 | return newImage;
353 | }
354 |
355 | + (UIImage *)gl_addAboveImage:(UIImage *)image addImage:(UIImage *)addImage rect:(CGRect)rect
356 | {
357 | UIGraphicsBeginImageContextWithOptions(image.size, NO, [UIScreen mainScreen].scale);
358 |
359 | CGRect imageRect = CGRectMake(0, 0, image.size.width, image.size.height);
360 |
361 | [image drawInRect:imageRect];
362 |
363 | [addImage drawInRect:rect];
364 |
365 | //获取上下文中的新图片
366 | UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
367 |
368 | UIGraphicsEndImageContext();
369 |
370 | return newImage;
371 | }
372 |
373 | + (UIImage *)gl_snapScreenView:(UIView *)view
374 | {
375 | //开启上下文
376 | UIGraphicsBeginImageContext(view.bounds.size);
377 |
378 | CGContextRef context = UIGraphicsGetCurrentContext();
379 | //渲染图片
380 | [view.layer renderInContext:context];
381 | //得到新图片
382 | UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
383 |
384 | UIGraphicsEndImageContext();
385 | //避免内存泄漏
386 | view.layer.contents = nil;
387 |
388 | return newImage;
389 | }
390 |
391 | + (UIImage *)gl_wipeImageWithView:(UIView *)view movePoint:(CGPoint)point brushSize:(CGSize)size
392 | {
393 | //开启上下文
394 | UIGraphicsBeginImageContext(view.bounds.size);
395 |
396 | CGContextRef context = UIGraphicsGetCurrentContext();
397 | //此方法不能渲染图片 只针对layer
398 | //[view.layer drawInContext:context];
399 |
400 | //以point为中心,然后size的一半向两边延伸 坐画笔 橡皮擦
401 | CGRect clearRect = CGRectMake(point.x - size.width/2.0, point.y - size.width/2.0, size.width, size.height);
402 |
403 | //渲染图片
404 | [view.layer renderInContext:context];
405 | //清除该区域
406 | CGContextClearRect(context, clearRect);
407 | //得到新图片
408 | UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
409 |
410 | UIGraphicsEndImageContext();
411 | //避免内存泄漏
412 | view.layer.contents = nil;
413 |
414 | return newImage;
415 | }
416 |
417 | + (UIImage *)gl_drawImage:(UIImage *)image withRects:(NSArray *)rects
418 | {
419 | UIImage *newImage = nil;
420 | UIGraphicsBeginImageContextWithOptions(image.size, NO, [UIScreen mainScreen].scale);
421 | CGContextRef context = UIGraphicsGetCurrentContext();
422 | CGContextSetLineCap(context,kCGLineCapRound); //边缘样式
423 | CGContextSetLineJoin(context, kCGLineJoinRound);
424 | CGContextSetLineWidth(context,2); //线宽
425 | CGContextSetAllowsAntialiasing(context,YES); //打开抗锯齿
426 | CGContextSetStrokeColorWithColor(context, [UIColor redColor].CGColor);
427 | CGContextSetFillColorWithColor(context, [UIColor clearColor].CGColor);
428 |
429 | //绘制图片
430 | [image drawInRect:CGRectMake(0, 0,image.size.width, image.size.height)];
431 | CGContextBeginPath(context);
432 | for (int i = 0; i < rects.count; i ++) {
433 | CGRect rect = [rects[i] CGRectValue];
434 | CGPoint sPoints[4];//坐标点
435 | sPoints[0] = CGPointMake(rect.origin.x, rect.origin.y);//坐标1
436 | sPoints[1] = CGPointMake(rect.origin.x + rect.size.width, rect.origin.y);//坐标2
437 | sPoints[2] = CGPointMake(rect.origin.x + rect.size.width, rect.origin.y + rect.size.height);//坐标3
438 | sPoints[3] = CGPointMake(rect.origin.x , rect.origin.y + rect.size.height);
439 |
440 | CGContextAddLines(context, sPoints, 4);//添加线
441 | CGContextClosePath(context); //封闭
442 | }
443 | CGContextDrawPath(context, kCGPathFillStroke); //根据坐标绘制路径
444 |
445 | newImage = UIGraphicsGetImageFromCurrentImageContext();
446 | UIGraphicsEndImageContext();
447 | return newImage;
448 | }
449 |
450 | + (UIImage *)gl_drawImage:(UIImage *)image faceLandMarkPoints:(NSArray *)landMarkPoints
451 | {
452 | UIImage * newImage = image;
453 | for (NSMutableArray *points in landMarkPoints) {
454 |
455 | CGPoint sPoints [points.count];
456 |
457 | for (int i = 0;i
11 | #import
12 | #import "GLDiscernPointModel.h"
13 |
14 | @interface VisionCameraViewController ()
15 |
16 | @property (nonatomic,strong) AVCaptureDevice *avDevice;
17 | @property (nonatomic,strong) AVCaptureDeviceInput *avInput;
18 | @property (nonatomic,strong) AVCaptureVideoDataOutput *avOutput;
19 | @property (nonatomic,strong) AVCaptureSession *avSession;
20 | @property (nonatomic,strong) AVCaptureVideoPreviewLayer *avPreviewLayer;
21 |
22 | @property (nonatomic,strong) UIButton *changeCameraButton;
23 | @property (nonatomic,strong) UIButton *turnCameraButton;
24 |
25 | //矩形框数组
26 | @property (nonatomic,strong) NSMutableArray *rectLayers;
27 |
28 | @property (nonatomic,strong) UIImageView *glassesImageView;
29 |
30 | @property (nonatomic,assign) GLDiscernType discernType;
31 |
32 | @end
33 |
34 | @implementation VisionCameraViewController
35 |
36 | - (void)dealloc{
37 |
38 | }
39 |
40 |
41 | - (id)initWithDiscernType:(GLDiscernType)type
42 | {
43 | self = [super init];
44 | if (self) {
45 | self.discernType = type;
46 | }
47 | return self;
48 | }
49 |
50 | - (void)viewDidLoad {
51 | [super viewDidLoad];
52 |
53 | [self checkAuthority];
54 |
55 | [self addControls];
56 |
57 | // UIImageView *imageview = [[UIImageView alloc] initWithFrame:CGRectMake(0, 100, 200, 300)];
58 | // imageview.tag = 100;
59 | // [self.view addSubview:imageview];
60 | }
61 |
62 | - (void)viewWillAppear:(BOOL)animated
63 | {
64 | [super viewWillAppear:animated];
65 | self.navigationController.navigationBarHidden = YES;
66 | }
67 |
68 | - (void)viewWillDisappear:(BOOL)animated
69 | {
70 | [super viewWillDisappear:animated];
71 | self.navigationController.navigationBarHidden = NO;
72 | [self.avSession stopRunning];
73 | }
74 |
75 | #pragma mark == private method
76 | - (void)addControls
77 | {
78 | UIButton *backButton = [UIButton buttonWithType:UIButtonTypeCustom];
79 | backButton.frame = CGRectMake(20, 30, 30, 30);
80 | [backButton addTarget:self action:@selector(back:) forControlEvents:UIControlEventTouchUpInside];
81 | [backButton setImage:[UIImage imageNamed:@"返回"] forState:UIControlStateNormal];
82 | [self.view addSubview:backButton];
83 |
84 | self.turnCameraButton = [UIButton buttonWithType:UIButtonTypeCustom];
85 | self.turnCameraButton.frame = CGRectMake(self.view.frame.size.width - 50, 30, 30, 30);
86 | [self.turnCameraButton setImage:[UIImage imageNamed:@"turn"] forState:UIControlStateNormal];
87 | [self.turnCameraButton addTarget:self action:@selector(turnCamera:) forControlEvents:UIControlEventTouchUpInside];
88 | [self.view addSubview:self.turnCameraButton];
89 |
90 | self.glassesImageView = [[UIImageView alloc] initWithFrame:CGRectZero];
91 | [self.view addSubview:self.glassesImageView];
92 | self.glassesImageView.image = [UIImage imageNamed:@"eyes"];
93 | }
94 |
95 | - (void)addCapture
96 | {
97 | self.avDevice = [self cameraWithPosition:AVCaptureDevicePositionBack];
98 |
99 | //添加输入
100 | self.avInput = [[AVCaptureDeviceInput alloc] initWithDevice:self.avDevice error:nil];
101 | if ([self.avSession canAddInput:self.avInput]) {
102 | [self.avSession addInput:self.avInput];
103 | }
104 | //添加输出
105 |
106 | self.avOutput = [[AVCaptureVideoDataOutput alloc] init];
107 | [self.avOutput setSampleBufferDelegate:(id)self queue:dispatch_queue_create("CameraCaptureSampleBufferDelegateQueue", NULL)];
108 |
109 | if ([self.avSession canAddOutput:self.avOutput]) {
110 | [self.avSession addOutput:self.avOutput];
111 | AVCaptureConnection *captureConnection = [self.avOutput connectionWithMediaType:AVMediaTypeVideo];
112 |
113 | if ([captureConnection isVideoOrientationSupported]) {
114 | [captureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
115 | }
116 | // 视频稳定设置
117 | if ([captureConnection isVideoStabilizationSupported]) {
118 | captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
119 | }
120 | // 设置输出图片方向
121 | captureConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
122 | }
123 |
124 | //创建预览视图
125 | // 通过会话 (AVCaptureSession) 创建预览层
126 | self.avPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.avSession];
127 | self.avPreviewLayer.frame = self.view.bounds;
128 |
129 | //有时候需要拍摄完整屏幕大小的时候可以修改这个
130 | self.avPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
131 | self.avPreviewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
132 |
133 |
134 | // 显示在视图表面的图层
135 | CALayer *layer = self.view.layer;
136 | layer.masksToBounds = YES;
137 | [self.view layoutIfNeeded];
138 | [layer addSublayer:self.avPreviewLayer];
139 |
140 | [self.avSession commitConfiguration];
141 | [self.avSession startRunning];
142 | }
143 |
144 | //摄像头选择
145 | - (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{
146 | NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
147 | for ( AVCaptureDevice *device in devices )
148 | if (device.position == position ){
149 | return device;
150 | }
151 | return nil;
152 | }
153 |
154 |
155 | - (void)checkAuthority
156 | {
157 | if ([[GLTools sharedInstance] isCamer])
158 | {
159 | [self addCapture];
160 | }
161 | else
162 | {
163 | UIAlertController *actionSheet = [UIAlertController alertControllerWithTitle:@"提示" message:@"您尚未为该APP打开”相机服务“,开启方法为“手机设置-隐私-相机服务”进行[开启]" preferredStyle:UIAlertControllerStyleActionSheet];
164 | UIAlertAction *action = [UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleCancel handler:^(UIAlertAction *action) {
165 | [self.navigationController popViewControllerAnimated:YES];
166 | }];
167 | [actionSheet addAction:action];
168 | [self presentViewController:actionSheet animated:YES completion:nil];
169 | }
170 | }
171 |
172 | #pragma mark == event response
173 | - (void)back:(UIButton *)sender
174 | {
175 | [self.navigationController popViewControllerAnimated:YES];
176 | }
177 |
178 | - (void)turnCamera:(UIButton *)sender
179 | {
180 | [self.rectLayers makeObjectsPerformSelector:@selector(removeFromSuperlayer)];
181 |
182 | NSUInteger cameraCount = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
183 | if (cameraCount > 1) {
184 | NSError *error;
185 | //给摄像头的切换添加翻转动画
186 | CATransition *animation = [CATransition animation];
187 | animation.duration = .5f;
188 | animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut];
189 | animation.type = @"oglFlip";
190 |
191 | AVCaptureDevice *newCamera = nil;
192 | AVCaptureDeviceInput *newInput = nil;
193 | //拿到另外一个摄像头位置
194 | AVCaptureDevicePosition position = [[self.avInput device] position];
195 | if (position == AVCaptureDevicePositionFront){
196 | newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
197 | animation.subtype = kCATransitionFromLeft;//动画翻转方向
198 | }
199 | else {
200 | newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
201 | animation.subtype = kCATransitionFromRight;//动画翻转方向
202 | }
203 |
204 | self.avDevice = newCamera;
205 | //生成新的输入
206 | newInput = [AVCaptureDeviceInput deviceInputWithDevice:newCamera error:nil];
207 | [self.avPreviewLayer addAnimation:animation forKey:nil];
208 | if (newInput != nil) {
209 | [self.avSession beginConfiguration];
210 | [self.avSession removeInput:self.avInput];
211 |
212 | if ([self.avSession canAddInput:newInput]) {
213 | [self.avSession addInput:newInput];
214 | self.avInput = newInput;
215 | } else {
216 | [self.avSession addInput:self.avInput];
217 | }
218 |
219 | //需要重新进行配置输出 特别是下面的输出方向
220 | AVCaptureConnection *captureConnection = [self.avOutput connectionWithMediaType:AVMediaTypeVideo];
221 | if ([captureConnection isVideoOrientationSupported]) {
222 | [captureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait];
223 | }
224 | // 视频稳定设置
225 | if ([captureConnection isVideoStabilizationSupported]) {
226 | captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
227 | }
228 | // 设置输出图片方向
229 | captureConnection.videoOrientation = AVCaptureVideoOrientationPortrait;
230 |
231 |
232 | [self.avSession commitConfiguration];
233 |
234 | } else if (error) {
235 | NSLog(@"toggle carema failed, error = %@", error);
236 | }
237 | }
238 | }
239 |
240 |
241 | #pragma mark == AVCaptureVideoDataOutputSampleBufferDelegate
242 | - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
243 | {
244 | CVPixelBufferRef cvpixeBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer);
245 | //屏蔽掉218-229的代码 可以发现 当切换摄像头的时候 输出的图片方向变了
246 | // UIImage *image = [UIImage imageWithCIImage:[CIImage imageWithCVPixelBuffer:cvpixeBufferRef]];
247 | //
248 | // dispatch_async(dispatch_get_main_queue(), ^{
249 | // UIImageView *imageview = [self.view viewWithTag:100];
250 | // imageview.image = image;
251 | // });
252 |
253 | VNImageRequestHandler *handler = [[VNImageRequestHandler alloc] initWithCVPixelBuffer:cvpixeBufferRef options:@{}];
254 | VNImageBasedRequest *request = [[VNImageBasedRequest alloc] init];
255 | switch (self.discernType) {
256 | case GLDiscernFaceRectDynamicType:
257 | {
258 | request = [[VNDetectFaceRectanglesRequest alloc] initWithCompletionHandler:^(VNRequest * _Nonnull request, NSError * _Nullable error) {
259 |
260 | NSLog(@" 打印信息:%lu",request.results.count);
261 | NSArray *vnobservations = request.results;
262 |
263 | dispatch_async(dispatch_get_main_queue(), ^{
264 | //先移除之前的矩形框
265 | [self.rectLayers makeObjectsPerformSelector:@selector(removeFromSuperlayer)];
266 |
267 | AVCaptureDevicePosition position = [[self.avInput device] position];
268 |
269 |
270 |
271 | for (VNFaceObservation *faceObservation in vnobservations) {
272 | //boundingBox
273 | CGRect transFrame = [[GLTools sharedInstance] convertRect:faceObservation.boundingBox imageSize:self.view.frame.size];
274 | //前置摄像头的时候 记得转换
275 | if (position == AVCaptureDevicePositionFront){
276 | transFrame.origin.x = self.view.frame.size.width - transFrame.origin.x - transFrame.size.width;
277 | }
278 |
279 | CALayer *rectLayer = [CALayer layer];
280 | rectLayer.frame = transFrame;
281 | rectLayer.borderColor = [UIColor purpleColor].CGColor;
282 | rectLayer.borderWidth = 2;
283 | [self.view.layer addSublayer:rectLayer];
284 |
285 | [self.rectLayers addObject:rectLayer];
286 | }
287 | });
288 | }];
289 | }
290 | break;
291 | case GLDiscernFaceDynamicSceneType:
292 | {
293 | request = [[VNDetectFaceLandmarksRequest alloc] initWithCompletionHandler:^(VNRequest * _Nonnull request, NSError * _Nullable error) {
294 | NSArray *vnobservations = request.results;
295 |
296 |
297 | for (VNFaceObservation *faceObservation in vnobservations) {
298 |
299 |
300 | VNFaceLandmarks2D *faceLandMarks2D = faceObservation.landmarks;
301 |
302 | VNFaceLandmarkRegion2D *leftEyefaceLandMarkRegion2D = faceLandMarks2D.leftEye;
303 | VNFaceLandmarkRegion2D *rightEyefaceLandMarkRegion2D = faceLandMarks2D.rightEye;
304 |
305 | dispatch_async(dispatch_get_main_queue(), ^{
306 |
307 | // //先移除之前的矩形框
308 | // [self.rectLayers makeObjectsPerformSelector:@selector(removeFromSuperlayer)];
309 | //
310 | // AVCaptureDevicePosition position = [[self.avInput device] position];
311 | //
312 | // CGRect transFrame = [[GLTools sharedInstance] convertRect:faceObservation.boundingBox imageSize:self.view.frame.size];
313 | // //前置摄像头的时候 记得转换
314 | // if (position == AVCaptureDevicePositionFront){
315 | // transFrame.origin.x = self.view.frame.size.width - transFrame.origin.x - transFrame.size.width;
316 | // }
317 | //
318 | // CALayer *rectLayer = [CALayer layer];
319 | // rectLayer.frame = transFrame;
320 | // rectLayer.borderColor = [UIColor purpleColor].CGColor;
321 | // rectLayer.borderWidth = 2;
322 | // [self.view.layer addSublayer:rectLayer];
323 | //
324 | // [self.rectLayers addObject:rectLayer];
325 |
326 | AVCaptureDevicePosition position = [[self.avInput device] position];
327 |
328 |
329 | CGPoint sPoints[leftEyefaceLandMarkRegion2D.pointCount + rightEyefaceLandMarkRegion2D.pointCount];
330 |
331 | NSMutableArray *pointXs = [[NSMutableArray alloc] init];
332 | NSMutableArray *pointYs = [[NSMutableArray alloc] init];
333 |
334 | for (int i = 0; i < leftEyefaceLandMarkRegion2D.pointCount; i ++) {
335 | CGPoint point = leftEyefaceLandMarkRegion2D.normalizedPoints[i];
336 |
337 | CGFloat rectWidth = self.view.bounds.size.width * faceObservation.boundingBox.size.width;
338 | CGFloat rectHeight = self.view.bounds.size.height * faceObservation.boundingBox.size.height;
339 |
340 | CGFloat boundingBoxY = self.view.bounds.size.height * (1 - faceObservation.boundingBox.origin.y - faceObservation.boundingBox.size.height);
341 |
342 | CGPoint p = CGPointZero;
343 | if (position == AVCaptureDevicePositionFront){
344 |
345 | CGFloat boundingX = self.view.frame.size.width - faceObservation.boundingBox.origin.x * self.view.bounds.size.width - rectWidth;
346 |
347 | p = CGPointMake(point.x * rectWidth + boundingX, boundingBoxY + (1-point.y) * rectHeight);
348 |
349 | }else{
350 | p = CGPointMake(point.x * rectWidth + faceObservation.boundingBox.origin.x * self.view.bounds.size.width, boundingBoxY + (1-point.y) * rectHeight);
351 | }
352 |
353 | sPoints[i] = p;
354 |
355 | [pointXs addObject:[NSNumber numberWithFloat:p.x]];
356 | [pointYs addObject:[NSNumber numberWithFloat:p.y]];
357 | }
358 |
359 | for (int j = 0; j < rightEyefaceLandMarkRegion2D.pointCount; j ++) {
360 | CGPoint point = rightEyefaceLandMarkRegion2D.normalizedPoints[j];
361 |
362 | CGFloat rectWidth = self.view.bounds.size.width * faceObservation.boundingBox.size.width;
363 | CGFloat rectHeight = self.view.bounds.size.height * faceObservation.boundingBox.size.height;
364 |
365 | CGFloat boundingBoxY = self.view.bounds.size.height * (1 - faceObservation.boundingBox.origin.y - faceObservation.boundingBox.size.height);
366 |
367 | CGPoint p = CGPointZero;
368 | if (position == AVCaptureDevicePositionFront){
369 |
370 | CGFloat boundingX = self.view.frame.size.width - faceObservation.boundingBox.origin.x * self.view.bounds.size.width - rectWidth;
371 |
372 | p = CGPointMake(point.x * rectWidth + boundingX, boundingBoxY + (1-point.y) * rectHeight);
373 |
374 | }else{
375 | p = CGPointMake(point.x * rectWidth + faceObservation.boundingBox.origin.x * self.view.bounds.size.width, boundingBoxY + (1-point.y) * rectHeight);
376 | }
377 |
378 | sPoints[leftEyefaceLandMarkRegion2D.pointCount + j] = p;
379 |
380 | [pointXs addObject:[NSNumber numberWithFloat:p.x]];
381 | [pointYs addObject:[NSNumber numberWithFloat:p.y]];
382 | }
383 |
384 | // for (UIView *view in self.view.subviews) {
385 | // if ([view isKindOfClass:[UIImageView class]]) {
386 | // [view removeFromSuperview];
387 | // }
388 | // }
389 | //
390 | // for (int i = 0; i < rightEyefaceLandMarkRegion2D.pointCount + leftEyefaceLandMarkRegion2D.pointCount; i++) {
391 | // CGFloat x = sPoints[i].x;
392 | // CGFloat y = sPoints[i].y;
393 | // UIImageView *view = [[UIImageView alloc] initWithFrame:CGRectMake(x, y, 2, 2)];
394 | // view.backgroundColor = [UIColor redColor];
395 | // [self.view addSubview:view];
396 | // }
397 |
398 | //排序 得到最小的x和最大的x
399 | NSArray *sortPointXs = [pointXs sortedArrayWithOptions:NSSortStable usingComparator:
400 | ^NSComparisonResult(id _Nonnull obj1, id _Nonnull obj2) {
401 | int value1 = [obj1 floatValue];
402 | int value2 = [obj2 floatValue];
403 | if (value1 > value2) {
404 | return NSOrderedDescending;
405 | }else if (value1 == value2){
406 | return NSOrderedSame;
407 | }else{
408 | return NSOrderedAscending;
409 | }
410 | }];
411 |
412 | NSArray *sortPointYs = [pointYs sortedArrayWithOptions:NSSortStable usingComparator:
413 | ^NSComparisonResult(id _Nonnull obj1, id _Nonnull obj2) {
414 | int value1 = [obj1 floatValue];
415 | int value2 = [obj2 floatValue];
416 | if (value1 > value2) {
417 | return NSOrderedDescending;
418 | }else if (value1 == value2){
419 | return NSOrderedSame;
420 | }else{
421 | return NSOrderedAscending;
422 | }
423 | }];
424 |
425 | UIImage *image =[UIImage imageNamed:@"eyes"];
426 | CGFloat imageWidth = [sortPointXs.lastObject floatValue] - [sortPointXs.firstObject floatValue] + 40;
427 | CGFloat imageHeight = (imageWidth * image.size.height)/image.size.width;
428 |
429 | self.glassesImageView.frame = CGRectMake([sortPointXs.firstObject floatValue]-20, [sortPointYs.firstObject floatValue]-5, imageWidth, imageHeight);
430 | });
431 | }
432 | }];
433 | }
434 | break;
435 | default:
436 | break;
437 | }
438 | [handler performRequests:@[request] error:NULL];
439 | }
440 |
441 | #pragma mark == 懒加载
442 | - (AVCaptureSession *)avSession
443 | {
444 | if (nil == _avSession) {
445 | _avSession = [[AVCaptureSession alloc] init];
446 | if ([_avSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
447 | [_avSession setSessionPreset:AVCaptureSessionPresetHigh];
448 | }
449 | [_avSession beginConfiguration];
450 | }
451 | return _avSession;
452 | }
453 |
454 | - (NSMutableArray *)rectLayers
455 | {
456 | if (nil == _rectLayers) {
457 | _rectLayers = [[NSMutableArray alloc] init];
458 | }
459 | return _rectLayers;
460 | }
461 |
462 | - (void)didReceiveMemoryWarning {
463 | [super didReceiveMemoryWarning];
464 | // Dispose of any resources that can be recreated.
465 | }
466 |
467 |
468 | @end
469 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | >大道如青天,我独不得出
3 |
4 | ##### 前言
5 |
6 | 在上一篇[iOS Core ML与Vision初识](http://www.jianshu.com/p/b0e5f2944b3d)中,初步了解到了`vision`的作用,并在文章最后留了个疑问,就是类似下面的一些函数有什么用
7 | ```
8 | - (instancetype)initWithCIImage:(CIImage *)image options:(NSDictionary *)options;
9 |
10 | - (instancetype)initWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer options:(NSDictionary *)options;
11 | ```
12 | 在查阅一些资料后,最终通过这些函数得到了如下的效果
13 |
14 |
15 | 
16 | 对,没错,这就是通过`initWithCVPixelBuffer`函数来实现的。当然`vision`的作用远不于此,还有如下的效果
17 | 1、图像匹配(上篇文章中的效果)
18 | 2、矩形检测
19 | 3、二维码、条码检测
20 | 4、目标跟踪
21 | 5、文字检测
22 | 6、人脸检测
23 | 7、人脸面部特征检测
24 | 由于对人脸识别比较感兴趣,所以这里就主要简单了解了下人脸部分,下面就针对人脸检测和面部检测写写
25 |
26 | ##### Vision支持的图片类型
27 | 通过查看`VNRequestHandler.h`文件,我们可以看到里面的所有初始化函数,通过这些初始化函数,我们可以了解到支持的类型有:
28 | 1、`CVPixelBufferRef`
29 | 2、`CGImageRef`
30 | 3、`CIImage`
31 | 4、`NSURL`
32 | 5、`NSData`
33 |
34 | ##### Vision使用
35 | 在使用`vision`的时候,我们首先需要明确自己需要什么效果,然后根据想要的效果来选择不同的类,最后实现自己的效果
36 | 1、需要一个`RequestHandler`,在创建`RequestHandler`的时候,需要一个合适的输入源,及`图片`类型
37 | 2、需要一个`Request `,在创建`Request `的时候,也需要根据实际情况来选择,`Request `大概有如下这么些
38 |
39 | 
40 | 3、通过`requestHandler`将`request`联系起来,然后得到结果
41 | ```
42 | [handler performRequests:@[requset] error:&error];
43 | ```
44 | 4、处理结果`VNObservation`,在`VNRequest`的`results`数组中,包含了`VNObservation`结果,`VNObservation`也分很多种,这和你`Request`的类型是相关联的
45 |
46 | 
47 |
48 | 在完成上述步骤后,我们就可以根据结果来实现一些我们想要的效果
49 |
50 | ##### 人脸矩形检测
51 | 这里我们需要用到`VNDetectFaceRectanglesRequest`
52 | ```
53 | requset = [[VNDetectFaceRectanglesRequest alloc] initWithCompletionHandler:completionHandler];
54 | ```
55 | 在得到结果后,我们需要处理下坐标
56 | ```
57 | for (VNFaceObservation *faceObservation in observations) {
58 | //boundingBox
59 | CGRect transFrame = [self convertRect:faceObservation.boundingBox imageSize:image.size];
60 | [rects addObject:[NSValue valueWithCGRect:transFrame]];
61 | }
62 | ```
63 |
64 | ```
65 | // 转换Rect
66 | - (CGRect)convertRect:(CGRect)boundingBox imageSize:(CGSize)imageSize{
67 | CGFloat w = boundingBox.size.width * imageSize.width;
68 | CGFloat h = boundingBox.size.height * imageSize.height;
69 | CGFloat x = boundingBox.origin.x * imageSize.width;
70 | CGFloat y = imageSize.height * (1 - boundingBox.origin.y - boundingBox.size.height);//- (boundingBox.origin.y * imageSize.height) - h;
71 | return CGRectMake(x, y, w, h);
72 | }
73 | ```
74 | 在返回结果中的`boundingBox `中的坐标,我们并不能立即使用,而是需要进行转换,因为这里是相对于`image`的一个比例,这里需要注意的是`y`坐标的转换,因为坐标系的`y`轴和`UIView`的`y`轴是相反的。
75 | 最后就是通过返回的坐标进行矩形的绘制
76 | ```
77 | + (UIImage *)gl_drawImage:(UIImage *)image withRects:(NSArray *)rects
78 | {
79 | UIImage *newImage = nil;
80 | UIGraphicsBeginImageContextWithOptions(image.size, NO, [UIScreen mainScreen].scale);
81 | CGContextRef context = UIGraphicsGetCurrentContext();
82 | CGContextSetLineCap(context,kCGLineCapRound); //边缘样式
83 | CGContextSetLineJoin(context, kCGLineJoinRound);
84 | CGContextSetLineWidth(context,2); //线宽
85 | CGContextSetAllowsAntialiasing(context,YES); //打开抗锯齿
86 | CGContextSetStrokeColorWithColor(context, [UIColor redColor].CGColor);
87 | CGContextSetFillColorWithColor(context, [UIColor clearColor].CGColor);
88 |
89 | //绘制图片
90 | [image drawInRect:CGRectMake(0, 0,image.size.width, image.size.height)];
91 | CGContextBeginPath(context);
92 | for (int i = 0; i < rects.count; i ++) {
93 | CGRect rect = [rects[i] CGRectValue];
94 | CGPoint sPoints[4];//坐标点
95 | sPoints[0] = CGPointMake(rect.origin.x, rect.origin.y);//坐标1
96 | sPoints[1] = CGPointMake(rect.origin.x + rect.size.width, rect.origin.y);//坐标2
97 | sPoints[2] = CGPointMake(rect.origin.x + rect.size.width, rect.origin.y + rect.size.height);//坐标3
98 | sPoints[3] = CGPointMake(rect.origin.x , rect.origin.y + rect.size.height);
99 |
100 | CGContextAddLines(context, sPoints, 4);//添加线
101 | CGContextClosePath(context); //封闭
102 | }
103 | CGContextDrawPath(context, kCGPathFillStroke); //根据坐标绘制路径
104 |
105 | newImage = UIGraphicsGetImageFromCurrentImageContext();
106 | UIGraphicsEndImageContext();
107 | return newImage;
108 | }
109 | ```
110 | 效果如下
111 |
112 |
113 | 
114 |
115 | ##### 人脸特征识别
116 | 这里我们需要用到`VNDetectFaceLandmarksRequest`
117 | ```
118 | requset = [[VNDetectFaceLandmarksRequest alloc] initWithCompletionHandler:completionHandler];
119 |
120 | ```
121 | 处理结果
122 | ```
123 | for (VNFaceObservation *faceObservation in observations) {
124 | //boundingBox
125 | CGRect transFrame = [self convertRect:faceObservation.boundingBox imageSize:image.size];
126 | [rects addObject:[NSValue valueWithCGRect:transFrame]];
127 | }
128 | pointModel.faceRectPoints = rects;
129 | return pointModel;
130 | }
131 |
132 | - (GLDiscernPointModel *)handlerFaceLandMark:(NSArray *)observations image:(UIImage *)image
133 | {
134 | GLDiscernPointModel *pointModel = [[GLDiscernPointModel alloc] init];
135 | NSMutableArray *rects = @[].mutableCopy;
136 |
137 | for (VNFaceObservation *faceObservation in observations) {
138 |
139 | VNFaceLandmarks2D *faceLandMarks2D = faceObservation.landmarks;
140 |
141 | [self getKeysWithClass:[VNFaceLandmarks2D class] block:^(NSString *key) {
142 | if ([key isEqualToString:@"allPoints"]) {
143 | return ;
144 | }
145 | VNFaceLandmarkRegion2D *faceLandMarkRegion2D = [faceLandMarks2D valueForKey:key];
146 |
147 | NSMutableArray *sPoints = [[NSMutableArray alloc] initWithCapacity:faceLandMarkRegion2D.pointCount];
148 |
149 | for (int i = 0; i < faceLandMarkRegion2D.pointCount; i ++) {
150 | CGPoint point = faceLandMarkRegion2D.normalizedPoints[i];
151 |
152 | CGFloat rectWidth = image.size.width * faceObservation.boundingBox.size.width;
153 | CGFloat rectHeight = image.size.height * faceObservation.boundingBox.size.height;
154 | CGPoint p = CGPointMake(point.x * rectWidth + faceObservation.boundingBox.origin.x * image.size.width, faceObservation.boundingBox.origin.y * image.size.height + point.y * rectHeight);
155 | [sPoints addObject:[NSValue valueWithCGPoint:p]];
156 | }
157 |
158 | [rects addObject:sPoints];
159 | }];
160 | }
161 | ```
162 | 在这里,我们需要注意到`landmarks `这个属性,这是一个`VNFaceLandmarks2D`类型的对象,里面包含着许多面部特征的`VNFaceLandmarkRegion2D`对象,如:`faceContour`,`leftEye`,`nose`....分别表示面部轮廓、左眼、鼻子。这些对象中,又包含下面这么一个属性
163 | ```
164 | @property (readonly, assign, nullable) const CGPoint* normalizedPoints
165 | ```
166 | 这是一个包含该面部特征的的数组,所以我们可以通过下面的方式取出里面的坐标
167 | ```
168 | CGPoint point = faceLandMarkRegion2D.normalizedPoints[i];
169 | ```
170 | 当然这里面也存在坐标的转换,见上面代码
171 | 最后也是画线,代码如下
172 | ```
173 | + (UIImage *)gl_drawImage:(UIImage *)image faceLandMarkPoints:(NSArray *)landMarkPoints
174 | {
175 | UIImage * newImage = image;
176 | for (NSMutableArray *points in landMarkPoints) {
177 |
178 | CGPoint sPoints [points.count];
179 |
180 | for (int i = 0;i value2) {
407 | return NSOrderedDescending;
408 | }else if (value1 == value2){
409 | return NSOrderedSame;
410 | }else{
411 | return NSOrderedAscending;
412 | }
413 | }];
414 |
415 | NSArray *sortPointYs = [pointYs sortedArrayWithOptions:NSSortStable usingComparator:
416 | ^NSComparisonResult(id _Nonnull obj1, id _Nonnull obj2) {
417 | int value1 = [obj1 floatValue];
418 | int value2 = [obj2 floatValue];
419 | if (value1 > value2) {
420 | return NSOrderedDescending;
421 | }else if (value1 == value2){
422 | return NSOrderedSame;
423 | }else{
424 | return NSOrderedAscending;
425 | }
426 | }];
427 |
428 | UIImage *image =[UIImage imageNamed:@"eyes"];
429 | CGFloat imageWidth = [sortPointXs.lastObject floatValue] - [sortPointXs.firstObject floatValue] + 40;
430 | CGFloat imageHeight = (imageWidth * image.size.height)/image.size.width;
431 |
432 | self.glassesImageView.frame = CGRectMake([sortPointXs.firstObject floatValue]-20, [sortPointYs.firstObject floatValue]-5, imageWidth, imageHeight);
433 | });
434 | }
435 | }];
436 | ```
437 | 由于时间关系,代码有点乱,将就将就
438 |
439 | 先说说思路,我是想动态添加一个眼镜的,所以我必须先得到两个眼睛的位置,然后在计算出两个眼睛的宽高,最后适当的调整眼镜的大小,再动态的添加上去
440 |
441 | 这里必须要说的一个问题,就是我在实现过程中遇到的---`坐标`
442 |
443 | 首先是`y`坐标,如果还是按照静态图片的那种获取方式,那么得到的结果将会是完全相反的。
444 | ```
445 | faceObservation.boundingBox.origin.y * image.size.height + point.y * rectHeight
446 | ```
447 | 这里我做了 一个假设,估计是由于摄像机成像的原因造成的,所以必须反其道而行,于是我如下改造了下
448 | ```
449 | CGFloat boundingBoxY = self.view.bounds.size.height * (1 - faceObservation.boundingBox.origin.y - faceObservation.boundingBox.size.height);
450 |
451 | p = CGPointMake(point.x * rectWidth + faceObservation.boundingBox.origin.x * self.view.bounds.size.width, boundingBoxY + (1-point.y) * rectHeight);
452 | ```
453 | 从中可以看到,所有的`point.y`都用`1`减去了,这个试验的过程有点恼火,我还没怎么相通,若有知道的,希望可以告诉我下,当然我也会再研究研究。
454 | 再说完`y`坐标后,就是`x`坐标了,`x`坐标在`前置摄像头`的时候一切正常,然而在切换成`后置摄像头`的时候,又反了。😔!心累啊,所以没办法,我就只要加判断,然后进行测试,有了如下代码
455 | ```
456 | CGFloat boundingX = self.view.frame.size.width - faceObservation.boundingBox.origin.x * self.view.bounds.size.width - rectWidth;
457 | ```
458 | 最后终于大功告成!
459 | 效果就是文章最顶的那个效果
460 |
461 | ##### 注意
462 | 1、在使用过程中,我发现当检测图片的时候内存和`cpu`的消耗还是很高的,比如我的`5s`就成功的崩溃过.....
463 | 2、图片方向是有要求的....
464 | ```
465 | - (instancetype)initWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer options:(NSDictionary *)options;
466 |
467 | /*!
468 | @brief initWithCVPixelBuffer:options creates a VNImageRequestHandler to be used for performing requests against the image passed in as buffer.
469 |
470 | @param pixelBuffer A CVPixelBuffer containing the image to be used for performing the requests. The content of the buffer cannot be modified for the lifetime of the VNImageRequestHandler.
471 | @param orientation The orientation of the image/buffer based on the EXIF specification. For details see kCGImagePropertyOrientation. The value has to be an integer from 1 to 8. This superceeds every other orientation information.
472 | @param options A dictionary with options specifying auxilary information for the buffer/image like VNImageOptionCameraIntrinsics
473 | */
474 | - (instancetype)initWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer orientation:(CGImagePropertyOrientation)orientation options:(NSDictionary *)options;
475 |
476 | ```
477 | 通过对比上面两个函数,我们可以发现,多了一个`CGImagePropertyOrientation `类型的参数,没错,这就是指定传入图片的方向,如果指定了方向,而图片方向却不一致,那么恭喜你,检测不出来....这里我用的都是第一个方法,及没有参数,好像默认是`up`的。
478 |
479 | ##### 最后
480 | 还是附上[Demo](https://github.com/gao211326/VisionDemo),如果觉得还行的话,欢迎大家给个`star`!有什么问题,可以多多沟通
481 |
--------------------------------------------------------------------------------