├── MinimumOpenCVLiveCamera
├── MinimumOpenCVLiveCamera-Bridging-Header.h
├── Wrapper.h
├── Real-time-Tracking-iOS-Prefix.pch
├── VideoSource.h
├── FrameProcessor.h
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── Wrapper.mm
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── AppDelegate.swift
├── CompressiveTracker.h
├── ViewController.swift
├── VideoSource.mm
├── FrameProcessor.mm
└── CompressiveTracker.cpp
├── Real-time-Tracking-iOS.xcodeproj
├── project.xcworkspace
│ └── contents.xcworkspacedata
└── project.pbxproj
├── .gitignore
└── README.md
/MinimumOpenCVLiveCamera/MinimumOpenCVLiveCamera-Bridging-Header.h:
--------------------------------------------------------------------------------
1 | //
2 | // Use this file to import your target's public headers that you would like to expose to Swift.
3 | //
4 |
5 | #import "Wrapper.h"
6 |
--------------------------------------------------------------------------------
/Real-time-Tracking-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/Wrapper.h:
--------------------------------------------------------------------------------
1 | //
2 | // Wrapper.h
3 | // MinimumOpenCVLiveCamera
4 | //
5 | // Created by Akira Iwaya on 2015/11/05.
6 | // Copyright © 2015年 akira108. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface Wrapper : NSObject
12 |
13 | - (void)setTargetView:(UIView *)view;
14 | - (void)switchMode:(int)mode;
15 | - (void)updateBox:(CGPoint)coords;
16 | - (void)start;
17 | - (void)stop;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/Real-time-Tracking-iOS-Prefix.pch:
--------------------------------------------------------------------------------
1 | //
2 | // Real-time-Tracking-iOS-Prefix.pch
3 | // Real-time-Tracking-iOS
4 | //
5 | // Created by Justin Haupt on 3/20/22.
6 | // Copyright © 2022 justinh5. All rights reserved.
7 | //
8 |
9 | #ifndef Real_time_Tracking_iOS_Prefix_pch
10 | #define Real_time_Tracking_iOS_Prefix_pch
11 |
12 | #ifdef __cplusplus
13 | #import
14 | #endif
15 |
16 | #endif /* Real_time_Tracking_iOS_Prefix_pch */
17 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode (from gitignore.io)
2 | build/
3 | *.pbxuser
4 | !default.pbxuser
5 | *.mode1v3
6 | !default.mode1v3
7 | *.mode2v3
8 | !default.mode2v3
9 | *.perspectivev3
10 | !default.perspectivev3
11 | xcuserdata
12 | *.xccheckout
13 | *.moved-aside
14 | DerivedData
15 | *.hmap
16 | *.ipa
17 | *.xcuserstate
18 |
19 | MinimumOpenCVLiveCamera/opencv2.framework/*
20 |
21 | # CocoaPod
22 | Pods/*
23 | Podfile.lock
24 |
25 | # others
26 | *.swp
27 | !.gitkeep
28 | .DS_Store
29 |
30 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/VideoSource.h:
--------------------------------------------------------------------------------
1 | //
2 | // VideoSource.h
3 | // MinimumOpenCVLiveCamera
4 | //
5 | // Created by Akira Iwaya on 2015/11/05.
6 | // Copyright © 2015年 akira108. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 | #ifdef __cplusplus
13 | #include
14 | #endif
15 |
16 | @protocol VideoSourceDelegate
17 |
18 | - (void)touchMode:(cv::Mat &)frame;
19 | - (void)detectionMode:(cv::Mat &)frame;
20 | - (void)detectionModeMask:(cv::Mat &)frame;
21 | - (void)farneback:(cv::Mat &)frame;
22 | - (void)update:(CGPoint)coords;
23 | - (void)reset;
24 |
25 | @end
26 |
27 | @interface VideoSource : NSObject {
28 |
29 | int tmode;
30 | }
31 |
32 | - (void)switchMode:(int)mode;
33 | - (void)update:(CGPoint)coords;
34 | - (void)start;
35 | - (void)stop;
36 |
37 |
38 | @property(nonatomic, weak) id delegate;
39 | @property(nonatomic, strong) UIView *targetView;
40 |
41 | @end
42 |
43 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/FrameProcessor.h:
--------------------------------------------------------------------------------
1 | //
2 | // FrameProcessor.h
3 | // MinimumOpenCVLiveCamera
4 | //
5 | // Created by Akira Iwaya on 2015/11/05.
6 | // Copyright © 2015年 akira108. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "VideoSource.h"
11 | #import "CompressiveTracker.h"
12 | #include
13 |
14 | using namespace cv;
15 |
16 |
17 |
18 | @interface FrameProcessor : NSObject {
19 |
20 | // CT framework
21 | CompressiveTracker ct;
22 |
23 | cv::Rect box; // tracking box
24 | cv::Rect touchBox; // touch box location
25 |
26 | Mat current_gray;
27 |
28 | // Farneback optical flow
29 | Mat flow;
30 | Mat flowMat, prevgray;
31 |
32 | // MOG2 Background subtractor
33 | Mat fgMaskMOG2;
34 | cv::Ptr pMOG2;
35 |
36 | NSTimeInterval timeInSeconds;
37 | NSTimeInterval delay;
38 |
39 | // touch signal
40 | bool touched;
41 | cv::Point touch;
42 | }
43 |
44 |
45 |
46 | @end
47 |
48 |
49 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Real-time-Tracker-iOS
2 | This app has 4 different modes of visualizing object movement with the iPhone's camera in real-time.
3 |
4 |
5 | #### Compressive Tracking
6 | Touch mode uses a very robust and state-of-the-art tracker designed by Kaihua Zhang, Lei Zhang, and Ming-Hsuan Yang. The [official website](http://www4.comp.polyu.edu.hk/~cslzhang/CT/CT.htm) has source code, documentation, and additional testing demos.
7 |
8 | #### Object Detection
9 | This mode uses background subtraction techniques to locate moving objects in the iPhone camera's view. There is also a mask version of this mode to see where exactly foreground objects appear on top of the background model.
10 |
11 | ## Requirements
12 | - iOS 15.2+
13 | - Xcode 13.0+
14 | - Swift 5.0+
15 | - OpenCV framework 2.4.13
16 |
17 | ## Demos
18 | Test results can be seen at the following links:
19 | - https://www.youtube.com/watch?v=Z7Dl3FhZwJ8
20 | - https://www.youtube.com/watch?v=bmHnLcojRfM
21 | - https://www.youtube.com/watch?v=BPtWrWRifwU
22 | - https://www.youtube.com/watch?v=Pk4jyyv-y0c
23 | - https://www.youtube.com/watch?v=1AxHKyb-NPk
24 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "scale" : "2x",
6 | "size" : "20x20"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "scale" : "3x",
11 | "size" : "20x20"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "scale" : "2x",
16 | "size" : "29x29"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "scale" : "3x",
21 | "size" : "29x29"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "scale" : "2x",
26 | "size" : "40x40"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "scale" : "3x",
31 | "size" : "40x40"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "scale" : "2x",
36 | "size" : "60x60"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "scale" : "3x",
41 | "size" : "60x60"
42 | },
43 | {
44 | "idiom" : "ios-marketing",
45 | "scale" : "1x",
46 | "size" : "1024x1024"
47 | }
48 | ],
49 | "info" : {
50 | "author" : "xcode",
51 | "version" : 1
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/Wrapper.mm:
--------------------------------------------------------------------------------
1 | //
2 | // Wrapper.m
3 | // MinimumOpenCVLiveCamera
4 | //
5 | // Created by Akira Iwaya on 2015/11/05.
6 | // Copyright © 2015年 akira108. All rights reserved.
7 | //
8 |
9 | #import "Wrapper.h"
10 | #import "FrameProcessor.h"
11 | #import "VideoSource.h"
12 |
13 | #ifdef __cplusplus
14 | #include
15 | #endif
16 |
17 | @interface Wrapper ()
18 | @property(nonatomic, strong)FrameProcessor *frameProcessor;
19 | @property(nonatomic, strong)VideoSource *videoSource;
20 | @end
21 |
22 | @implementation Wrapper
23 |
24 | - (instancetype)init
25 | {
26 | self = [super init];
27 | if (self) {
28 | _frameProcessor = [[FrameProcessor alloc] init];
29 | _videoSource = [[VideoSource alloc] init];
30 | _videoSource.delegate = _frameProcessor;
31 | }
32 | return self;
33 | }
34 |
35 | - (void)setTargetView:(UIView *)view {
36 | self.videoSource.targetView = view;
37 | }
38 |
39 | - (void)switchMode:(int)mode {
40 | [self.videoSource switchMode:mode];
41 | }
42 |
43 | - (void)updateBox:(CGPoint)coords {
44 | [self.videoSource update:coords];
45 | }
46 |
47 | - (void)start {
48 | [self.videoSource start];
49 | }
50 |
51 | - (void)stop {
52 | [self.videoSource stop];
53 | }
54 |
55 | @end
56 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // MinimumOpenCVLiveCamera
4 | //
5 | // Created by Akira Iwaya on 2015/11/05.
6 | // Copyright © 2015年 akira108. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | @UIApplicationMain
12 | class AppDelegate: UIResponder, UIApplicationDelegate {
13 |
14 | var window: UIWindow?
15 |
16 |
17 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
18 | // Override point for customization after application launch.
19 | return true
20 | }
21 |
22 | func applicationWillResignActive(_ application: UIApplication) {
23 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
24 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
25 | }
26 |
27 | func applicationDidEnterBackground(_ application: UIApplication) {
28 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
29 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
30 | }
31 |
32 | func applicationWillEnterForeground(_ application: UIApplication) {
33 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
34 | }
35 |
36 | func applicationDidBecomeActive(_ application: UIApplication) {
37 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
38 | }
39 |
40 | func applicationWillTerminate(_ application: UIApplication) {
41 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
42 | }
43 |
44 |
45 | }
46 |
47 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/CompressiveTracker.h:
--------------------------------------------------------------------------------
1 | /************************************************************************
2 | * File: CompressiveTracker.h
3 | * Brief: C++ demo for paper: Kaihua Zhang, Lei Zhang, Ming-Hsuan Yang,"Real-Time Compressive Tracking," ECCV 2012.
4 | * Version: 1.0
5 | * Author: Yang Xian
6 | * Email: yang_xian521@163.com
7 | * Date: 2012/08/03
8 | * History:
9 | * Revised by Kaihua Zhang on 14/8/2012
10 | * Email: zhkhua@gmail.com
11 | * Homepage: http://www4.comp.polyu.edu.hk/~cskhzhang/
12 | ************************************************************************/
13 |
14 | #pragma once
15 | #include
16 | #include
17 |
18 | using std::vector;
19 | using namespace cv;
20 | //---------------------------------------------------
21 |
22 |
23 |
24 |
25 | class CompressiveTracker
26 | {
27 | public:
28 | CompressiveTracker(void);
29 | ~CompressiveTracker(void);
30 |
31 | private:
32 | int featureMinNumRect;
33 | int featureMaxNumRect;
34 | int featureNum;
35 | vector> features;
36 | vector> featuresWeight;
37 | int rOuterPositive;
38 | vector samplePositiveBox;
39 | vector sampleNegativeBox;
40 | int rSearchWindow;
41 | Mat imageIntegral;
42 | Mat samplePositiveFeatureValue;
43 | Mat sampleNegativeFeatureValue;
44 | vector muPositive;
45 | vector sigmaPositive;
46 | vector muNegative;
47 | vector sigmaNegative;
48 | float learnRate;
49 | vector detectBox;
50 | Mat detectFeatureValue;
51 | RNG rng;
52 |
53 | private:
54 | void HaarFeature(cv::Rect& _objectBox, int _numFeature);
55 | void sampleRect(Mat& _image, cv::Rect& _objectBox, float _rInner, float _rOuter, int _maxSampleNum, vector& _sampleBox);
56 | void sampleRect(Mat& _image, cv::Rect& _objectBox, float _srw, vector& _sampleBox);
57 | void getFeatureValue(Mat& _imageIntegral, vector& _sampleBox, Mat& _sampleFeatureValue);
58 | void classifierUpdate(Mat& _sampleFeatureValue, vector& _mu, vector& _sigma, float _learnRate);
59 | void radioClassifier(vector& _muPos, vector& _sigmaPos, vector& _muNeg, vector& _sigmaNeg,
60 | Mat& _sampleFeatureValue, float& _radioMax, int& _radioMaxIndex);
61 | public:
62 | void processFrame(Mat& _frame, cv::Rect& _objectBox);
63 | void init(Mat& _frame, cv::Rect& _objectBox);
64 | };
65 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/ViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.swift
3 | // MinimumOpenCVLiveCamera
4 | //
5 | // Created by Akira Iwaya on 2015/11/05.
6 | // Copyright © 2015年 akira108. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | class ViewController: UIViewController {
12 |
13 | @IBOutlet weak var previewView: UIView!
14 | @IBOutlet weak var start: UIButton!
15 | @IBOutlet weak var stop: UIButton!
16 | @IBOutlet weak var mode: UIButton!
17 | let wrapper = Wrapper()
18 |
19 |
20 | override func viewDidLoad() {
21 | super.viewDidLoad()
22 | start.layer.cornerRadius = 4
23 | mode.layer.cornerRadius = 4
24 | stop.layer.cornerRadius = 4
25 | }
26 |
27 | override func viewDidAppear(_ animated: Bool) {
28 | super.viewDidAppear(animated)
29 | wrapper.setTargetView(previewView);
30 | }
31 |
32 | override func touchesBegan(_ touches: Set, with event: UIEvent?) {
33 | if let touch = touches.first {
34 | let position :CGPoint = touch.location(in: view)
35 | wrapper.updateBox(position)
36 | print(position.x)
37 | print(position.y)
38 | }
39 | }
40 |
41 | override func touchesMoved(_ touches: Set, with event: UIEvent?) {
42 | if let touch = touches.first {
43 | let position :CGPoint = touch.location(in: view)
44 | wrapper.updateBox(position)
45 | }
46 | }
47 |
48 | override func didReceiveMemoryWarning() {
49 | super.didReceiveMemoryWarning()
50 | // Dispose of any resources that can be recreated.
51 | }
52 |
53 | @IBAction func switchMode(_ sender: Any) {
54 | wrapper.stop()
55 | showAlertMenu()
56 | }
57 |
58 | @IBAction func touchStart(_ sender: Any) {
59 | wrapper.start()
60 | }
61 |
62 | @IBAction func touchStop(_ sender: Any) {
63 | wrapper.stop()
64 | }
65 |
66 | func showAlertMenu() {
67 |
68 | let alertController = UIAlertController(title: "Mode", message: "Choose a mode", preferredStyle: UIAlertController.Style.alert)
69 |
70 | let touch = UIAlertAction(title: "touch tracking", style: UIAlertAction.Style.default) {
71 | (result : UIAlertAction) -> Void in
72 | self.wrapper.switchMode(1)
73 | self.wrapper.start()
74 | }
75 | let objectDet = UIAlertAction(title: "object detection", style: UIAlertAction.Style.default) {
76 | (result : UIAlertAction) -> Void in
77 | self.showAlertMsg(mode: 2)
78 | }
79 | let objectDetMask = UIAlertAction(title: "object detection mask", style: UIAlertAction.Style.default) {
80 | (result : UIAlertAction) -> Void in
81 | self.showAlertMsg(mode: 3)
82 | }
83 | let opticalFlow = UIAlertAction(title: "optical flow", style: UIAlertAction.Style.default) {
84 | (result : UIAlertAction) -> Void in
85 | self.showAlertMsg(mode: 4)
86 | }
87 | let cancel = UIAlertAction(title: "cancel", style: UIAlertAction.Style.default) {
88 | (result : UIAlertAction) -> Void in
89 | self.wrapper.start()
90 | }
91 |
92 | alertController.addAction(touch)
93 | alertController.addAction(objectDet)
94 | alertController.addAction(objectDetMask)
95 | alertController.addAction(opticalFlow)
96 | alertController.addAction(cancel)
97 | self.present(alertController, animated: true, completion: nil)
98 | }
99 |
100 | func showAlertMsg(mode: integer_t) {
101 |
102 | let alertController = UIAlertController(title: "Detection Mode", message: "Keep the camera still! Place your device on a steady surface.", preferredStyle: UIAlertController.Style.alert)
103 |
104 | let okAction = UIAlertAction(title: "Got it!", style: UIAlertAction.Style.default) {
105 | (result : UIAlertAction) -> Void in
106 | self.wrapper.switchMode(mode)
107 | self.wrapper.start()
108 | }
109 |
110 | alertController.addAction(okAction)
111 | self.present(alertController, animated: true, completion: nil)
112 | }
113 |
114 | }
115 |
116 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/VideoSource.mm:
--------------------------------------------------------------------------------
1 | //
2 | // VideoSource.m
3 | // MinimumOpenCVLiveCamera
4 | //
5 | // Created by Akira Iwaya on 2015/11/05.
6 | // Copyright © 2015年 akira108. All rights reserved.
7 | //
8 |
9 | #import "VideoSource.h"
10 | #import
11 | #import
12 |
13 | using namespace cv;
14 | using namespace std;
15 |
16 | @interface VideoSource ()
17 | @property (strong, nonatomic) CALayer *previewLayer;
18 | @property (strong, nonatomic) AVCaptureSession *captureSession;
19 | @end
20 |
21 | @implementation VideoSource
22 |
23 | - (void)setTargetView:(UIView *)targetView {
24 | if (self.previewLayer == nil) {
25 | return;
26 | }
27 | [targetView.layer addSublayer:self.previewLayer];
28 | self.previewLayer.contentsGravity = kCAGravityResizeAspectFill;
29 | self.previewLayer.frame = targetView.bounds;
30 | self.previewLayer.affineTransform = CGAffineTransformMakeRotation(M_PI / 2);
31 | }
32 |
33 | - (instancetype)init
34 | {
35 | self = [super init];
36 | if (self) {
37 | _captureSession = [[AVCaptureSession alloc] init];
38 | _captureSession.sessionPreset = AVCaptureSessionPreset640x480;
39 |
40 | AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
41 | NSError *error = nil;
42 | AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:device error:&error];
43 | [_captureSession addInput:input];
44 |
45 | AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
46 | output.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
47 | output.alwaysDiscardsLateVideoFrames = YES;
48 | [_captureSession addOutput:output];
49 |
50 | dispatch_queue_t queue = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL);
51 | [output setSampleBufferDelegate:self queue:queue];
52 |
53 | _previewLayer = [CALayer layer];
54 |
55 | tmode = true;
56 | }
57 |
58 | return self;
59 | }
60 |
61 |
62 | - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
63 | CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
64 | CVPixelBufferLockBaseAddress(imageBuffer, 0);
65 |
66 | uint8_t *base;
67 | int width, height, bytesPerRow;
68 | base = (uint8_t*)CVPixelBufferGetBaseAddress(imageBuffer);
69 | width = (int)CVPixelBufferGetWidth(imageBuffer);
70 | height = (int)CVPixelBufferGetHeight(imageBuffer);
71 | bytesPerRow = (int)CVPixelBufferGetBytesPerRow(imageBuffer);
72 |
73 | Mat mat = Mat(height, width, CV_8UC4, base);
74 |
75 | //Processing here
76 | switch(tmode)
77 | {
78 | case 1:
79 | [self.delegate touchMode:mat];
80 | break;
81 | case 2:
82 | [self.delegate detectionMode:mat];
83 | break;
84 | case 3:
85 | [self.delegate detectionModeMask:mat];
86 | break;
87 | case 4:
88 | [self.delegate farneback:mat];
89 | break;
90 | default:
91 | [self.delegate touchMode:mat];
92 | break;
93 | }
94 |
95 | CGImageRef imageRef = [self CGImageFromCVMat:mat];
96 | dispatch_sync(dispatch_get_main_queue(), ^{
97 | self.previewLayer.contents = (__bridge id)imageRef;
98 | });
99 |
100 | CGImageRelease(imageRef);
101 | CVPixelBufferUnlockBaseAddress( imageBuffer, 0 );
102 | }
103 |
104 | - (void)switchMode:(int)mode {
105 | tmode = mode;
106 | [self.delegate reset];
107 | }
108 |
109 | - (void)update:(CGPoint)coords {
110 | if(tmode == 1)
111 | [self.delegate update:coords];
112 | }
113 |
114 | - (void)start {
115 | [self.captureSession startRunning];
116 | }
117 |
118 | - (void)stop {
119 | [self.captureSession stopRunning];
120 | }
121 |
122 | - (CGImageRef)CGImageFromCVMat:(Mat)cvMat {
123 | if (cvMat.elemSize() == 4) {
124 | cv::cvtColor(cvMat, cvMat, COLOR_BGRA2RGBA);
125 | }
126 | NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
127 | CGColorSpaceRef colorSpace;
128 |
129 | if (cvMat.elemSize() == 1) {
130 | colorSpace = CGColorSpaceCreateDeviceGray();
131 | } else {
132 | colorSpace = CGColorSpaceCreateDeviceRGB();
133 | }
134 |
135 | CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
136 |
137 | // Creating CGImage from cv::Mat
138 | CGImageRef imageRef = CGImageCreate(cvMat.cols, //width
139 | cvMat.rows, //height
140 | 8, //bits per component
141 | 8 * cvMat.elemSize(), //bits per pixel
142 | cvMat.step[0], //bytesPerRow
143 | colorSpace, //colorspace
144 | kCGImageAlphaNone|kCGBitmapByteOrderDefault,// bitmap info
145 | provider, //CGDataProviderRef
146 | NULL, //decode
147 | false, //should interpolate
148 | kCGRenderingIntentDefault //intent
149 | );
150 |
151 | CGDataProviderRelease(provider);
152 | CGColorSpaceRelease(colorSpace);
153 |
154 | return imageRef;
155 | }
156 |
157 |
158 | @end
159 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
40 |
52 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/FrameProcessor.mm:
--------------------------------------------------------------------------------
1 | //
2 | // FrameProcessor.m
3 | // MinimumOpenCVLiveCamera
4 | //
5 | // Created by Akira Iwaya on 2015/11/05.
6 | // Copyright © 2015年 akira108. All rights reserved.
7 | //
8 |
9 | #import "FrameProcessor.h"
10 |
11 | @implementation FrameProcessor
12 |
13 | - (instancetype)init
14 | {
15 | // CT initialization
16 | box = cv::Rect(100,100,55,55);
17 | ct.init(current_gray, box);
18 |
19 | // Background subtraction object
20 | pMOG2 = new BackgroundSubtractorMOG2();
21 |
22 | // Touch signals
23 | touched = false;
24 | touch = cv::Point(0,0);
25 |
26 | return self;
27 | }
28 |
29 |
30 | // Process each frame in touch mode. Uses the compressive
31 | // tracker to predict where the object is in the current frame, then
32 | // draws a bounding box around the object. Listens for signals when
33 | // the user has touched the screen, requesting a window location change.
34 |
35 | - (void)touchMode:(Mat &)frame {
36 |
37 | cvtColor(frame, current_gray, CV_RGB2GRAY);
38 |
39 | // Update the tracking box to touched point. This check is necessary to account for
40 | // multiple threads are running for frame processing and touch actions.
41 | if(touched)
42 | {
43 | box = touchBox;
44 | touched = false;
45 | }
46 | // Process Frame
47 | ct.processFrame(current_gray, box);
48 | // Draw bounding box
49 | rectangle(frame, box, Scalar(0,0,255));
50 |
51 | // Draw small circle at the last point touched
52 | circle(frame, touch, 5, Scalar(0,255,0));
53 | }
54 |
55 |
56 | // Detect and track objects using background subtraction. The complete
57 | // algorithm can be found in the accompanying report.
58 | - (void)detectionMode:(Mat &)frame {
59 |
60 | // wait for camera to adjust before continuing after brief delay
61 | if([[NSDate date] timeIntervalSince1970] < delay)
62 | return;
63 |
64 | Mat current;
65 | frame.copyTo(current); // replace frame with mask
66 |
67 |
68 | // Apply a slight blur before updating the backgound model; then
69 | // reduce the resolution by resizing the image matrix twice, first to a
70 | // smaller size, then back to the original size.
71 | blur(current, current, cv::Size(7,7));
72 | pMOG2->operator()(current, fgMaskMOG2);
73 | resize(fgMaskMOG2, fgMaskMOG2, cv::Size(106, 80), 0, 0, INTER_CUBIC); // compress image
74 | resize(fgMaskMOG2, fgMaskMOG2, cv::Size(640, 480), 0, 0); // return to original size
75 |
76 | // Detect all contours in the mask
77 | vector> contours;
78 | vector hierarchy;
79 | double size;
80 |
81 | findContours(fgMaskMOG2.clone(), contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, cv::Point(0, 0));
82 | size = contours.size();
83 |
84 | // Approximate contours to polygons + get bounding rects and circles
85 | vector> contours_poly(size);
86 | vector boundRect;
87 | Mat output;
88 | const int minArea = 400;
89 | int i;
90 |
91 | for(i = 0; i < size; ++i)
92 | {
93 | approxPolyDP(Mat(contours[i]), contours_poly[i], 3, true);
94 | cv::Rect temp = boundingRect(Mat(contours_poly[i]));
95 | boundRect.push_back(temp);
96 | boundRect.push_back(temp);
97 | // push twice to ensure non-overlapping rectangle appear
98 | // at least once after they are grouped
99 | }
100 |
101 | groupRectangles(boundRect, 1, 0.2); // merge grouped rectangles
102 |
103 | // Draw rectangles around each contour greater than the minimum area
104 | for(i = 0; i < boundRect.size(); ++i)
105 | {
106 | if(boundRect[i].area() > minArea)
107 | rectangle(frame, boundRect[i].tl(), boundRect[i].br(), Scalar(0,0,255), 2);
108 | }
109 | }
110 |
111 |
112 | // Same as detetion mode, but the original frame is replaced with the MOG2 mask.
113 | - (void)detectionModeMask:(Mat &)frame {
114 |
115 | // wait for camera to adjust before continuing after brief delay
116 | if([[NSDate date] timeIntervalSince1970] < delay)
117 | return;
118 |
119 |
120 | // update backgound model
121 | blur(frame, frame, cv::Size(7,7));
122 | pMOG2->operator()(frame, fgMaskMOG2);
123 | resize(fgMaskMOG2, fgMaskMOG2, cv::Size(106, 80), 0, 0, INTER_CUBIC); // compress image
124 | resize(fgMaskMOG2, fgMaskMOG2, cv::Size(640, 480), 0, 0);
125 |
126 | vector > contours;
127 | vector hierarchy;
128 | double size;
129 |
130 | findContours(fgMaskMOG2.clone(), contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, cv::Point(0, 0));
131 | size = contours.size();
132 |
133 | /// Approximate contours to polygons + get bounding rects and circles
134 | vector> contours_poly(size);
135 | vector boundRect;
136 | Mat output = fgMaskMOG2;
137 | const int minArea = 400;
138 | int i;
139 |
140 | for(i = 0; i < size; ++i)
141 | {
142 | approxPolyDP(Mat(contours[i]), contours_poly[i], 3, true);
143 | cv::Rect temp = boundingRect(Mat(contours_poly[i]));
144 | boundRect.push_back(temp);
145 | boundRect.push_back(temp);
146 | }
147 |
148 | groupRectangles(boundRect, 1, 0.2); // merge grouped rectangles
149 |
150 | cvtColor(fgMaskMOG2, output, CV_GRAY2RGB); // convert mask back to rgb
151 |
152 | // Draw rectangles around each contour greater than the minimum area
153 | for(i = 0; i < boundRect.size(); ++i)
154 | {
155 | if(boundRect[i].area() > minArea)
156 | rectangle(output, boundRect[i].tl(), boundRect[i].br(), Scalar(0,255,0), 2);
157 | }
158 |
159 | output.copyTo(frame); // replace frame with mask
160 | }
161 |
162 |
163 | // Optical flow frame processing using Farneback's algorithm
164 | // Source code by Vlada Kucera
165 | // http://funvision.blogspot.dk/2016/02/opencv-31-tutorial-optical-flow.html
166 | - (void)farneback:(Mat &)frame {
167 |
168 | Mat original;
169 | Mat img;
170 |
171 | frame.copyTo(original); // update previous frame as the current one
172 |
173 | cvtColor(original, original, COLOR_BGR2GRAY);
174 |
175 | if (prevgray.empty() == false ) {
176 |
177 | // calculate optical flow
178 | calcOpticalFlowFarneback(prevgray, original, flowMat, 0.4, 1, 12, 2, 8, 1.2, 0);
179 | // copy Umat container to standard Mat
180 | flowMat.copyTo(flow);
181 |
182 | // By y += 5, x += 5 you can specify the grid
183 | for (int y = 0; y < frame.rows; y += 5) {
184 | for (int x = 0; x < frame.cols; x += 5)
185 | {
186 | // get the flow from y, x position * 10 for better visibility
187 | const Point2f flowatxy = flow.at(y, x) * 10;
188 | // draw line at flow direction
189 | line(frame, cv::Point(x, y), cv::Point(cvRound(x + flowatxy.x), cvRound(y + flowatxy.y)), Scalar(255,0,0));
190 | // draw initial point
191 | circle(frame, cv::Point(x, y), 1, Scalar(0, 0, 0), -1);
192 | }
193 | }
194 |
195 | // fill previous image again
196 | original.copyTo(prevgray);
197 |
198 | }
199 | else {
200 | // fill previous image in case prevgray.empty() == true
201 | original.copyTo(prevgray);
202 | }
203 | }
204 |
205 |
206 |
207 | /* Update the tracking box location where the screen was touched
208 | Translate iphone 6 screen coordinates to OpenCV mat coordinates.
209 | This translation is not entirely accurate yet. Work in progress.
210 |
211 | iPhone Mat | iPhone Mat
212 | __ ________ | 0,0__ __0,0
213 | | | | | | | | | |
214 | | |667 | |480 | | |y | |x
215 | |__| |________| | |__| |__|
216 | 375 640 | x y
217 |
218 | */
219 | - (void)update:(CGPoint)coords {
220 |
221 | touched = true;
222 | float ratiox = (583.0/640.0);
223 | float ratioy = (375.0/423.0);
224 | float x1 = coords.y * ratiox;
225 | float y1 = coords.x * ratioy;
226 | int x = x1;
227 | int y = 400 - y1;
228 |
229 | touchBox = cv::Rect(x, y, 55, 55);
230 | touch = cv::Point(x, y);
231 | }
232 |
233 |
234 | // Reset the background subtraction object and the delay timer.
235 | - (void)reset {
236 | pMOG2->~BackgroundSubtractor(); // necessary?
237 | pMOG2 = new BackgroundSubtractorMOG2(3, 100, false); // (history, varThreshold, detectshadows)
238 | timeInSeconds = [[NSDate date] timeIntervalSince1970];
239 | delay = timeInSeconds + 4;
240 | }
241 |
242 | @end
243 |
244 |
245 |
--------------------------------------------------------------------------------
/MinimumOpenCVLiveCamera/CompressiveTracker.cpp:
--------------------------------------------------------------------------------
1 | #include "CompressiveTracker.h"
2 | #include
3 | #include
4 | using namespace std;
5 | //------------------------------------------------
6 |
7 |
8 | CompressiveTracker::CompressiveTracker(void)
9 | {
10 | featureMinNumRect = 2;
11 | featureMaxNumRect = 4; // number of rectangle from 2 to 4
12 | featureNum = 50; // number of all weaker classifiers, i.e,feature pool
13 | rOuterPositive = 4; // radical scope of positive samples
14 | rSearchWindow = 25; // size of search window
15 | muPositive = vector(featureNum, 0.0f);
16 | muNegative = vector(featureNum, 0.0f);
17 | sigmaPositive = vector(featureNum, 1.0f);
18 | sigmaNegative = vector(featureNum, 1.0f);
19 | learnRate = 0.85f; // Learning rate parameter
20 | }
21 |
22 | CompressiveTracker::~CompressiveTracker(void){}
23 |
24 |
25 | void CompressiveTracker::HaarFeature(Rect& _objectBox, int _numFeature)
26 | /*Description: compute Haar features
27 | Arguments:
28 | -_objectBox: [x y width height] object rectangle
29 | -_numFeature: total number of features.The default is 50.
30 | */
31 | {
32 | features = vector>(_numFeature, vector());
33 | featuresWeight = vector>(_numFeature, vector());
34 |
35 | int numRect;
36 | Rect rectTemp;
37 | float weightTemp;
38 |
39 | for (int i=0; i<_numFeature; i++)
40 | {
41 | numRect = cvFloor(rng.uniform((double)featureMinNumRect, (double)featureMaxNumRect));
42 |
43 | //int c = 1;
44 | for (int j=0; j& _sampleBox)
64 | /* Description: compute the coordinate of positive and negative sample image templates
65 | Arguments:
66 | -_image: processing frame
67 | -_objectBox: recent object position
68 | -_rInner: inner sampling radius
69 | -_rOuter: Outer sampling radius
70 | -_maxSampleNum: maximal number of sampled images
71 | -_sampleBox: Storing the rectangle coordinates of the sampled images.
72 | */
73 |
74 | {
75 | int rowsz = _image.rows - _objectBox.height - 1;
76 | int colsz = _image.cols - _objectBox.width - 1;
77 | float inradsq = _rInner*_rInner;
78 | float outradsq = _rOuter*_rOuter;
79 |
80 |
81 | int dist;
82 |
83 | int minrow = max(0,(int)_objectBox.y-(int)_rInner);
84 | int maxrow = min((int)rowsz-1,(int)_objectBox.y+(int)_rInner);
85 | int mincol = max(0,(int)_objectBox.x-(int)_rInner);
86 | int maxcol = min((int)colsz-1,(int)_objectBox.x+(int)_rInner);
87 |
88 |
89 |
90 | int i = 0;
91 |
92 | float prob = ((float)(_maxSampleNum))/(maxrow-minrow+1)/(maxcol-mincol+1);
93 |
94 | int r;
95 | int c;
96 |
97 | _sampleBox.clear();//important
98 | Rect rec(0,0,0,0);
99 |
100 | for( r=minrow; r<=(int)maxrow; r++ )
101 | for( c=mincol; c<=(int)maxcol; c++ ){
102 | dist = (_objectBox.y-r)*(_objectBox.y-r) + (_objectBox.x-c)*(_objectBox.x-c);
103 |
104 | if( rng.uniform(0.,1.)= outradsq ){
105 |
106 | rec.x = c;
107 | rec.y = r;
108 | rec.width = _objectBox.width;
109 | rec.height= _objectBox.height;
110 |
111 | _sampleBox.push_back(rec);
112 |
113 | i++;
114 | }
115 | }
116 |
117 | _sampleBox.resize(i);
118 |
119 | }
120 |
121 |
122 | void CompressiveTracker::sampleRect(Mat& _image, Rect& _objectBox, float _srw, vector& _sampleBox)
123 | /* Description: Compute the coordinate of samples when detecting the object.*/
124 | {
125 | int rowsz = _image.rows - _objectBox.height - 1;
126 | int colsz = _image.cols - _objectBox.width - 1;
127 | float inradsq = _srw*_srw;
128 |
129 |
130 | int dist;
131 |
132 | int minrow = max(0,(int)_objectBox.y-(int)_srw);
133 | int maxrow = min((int)rowsz-1,(int)_objectBox.y+(int)_srw);
134 | int mincol = max(0,(int)_objectBox.x-(int)_srw);
135 | int maxcol = min((int)colsz-1,(int)_objectBox.x+(int)_srw);
136 |
137 | int i = 0;
138 |
139 | int r;
140 | int c;
141 |
142 | Rect rec(0,0,0,0);
143 | _sampleBox.clear();//important
144 |
145 | for( r=minrow; r<=(int)maxrow; r++ )
146 | for( c=mincol; c<=(int)maxcol; c++ ){
147 | dist = (_objectBox.y-r)*(_objectBox.y-r) + (_objectBox.x-c)*(_objectBox.x-c);
148 |
149 | if( dist < inradsq ){
150 |
151 | rec.x = c;
152 | rec.y = r;
153 | rec.width = _objectBox.width;
154 | rec.height= _objectBox.height;
155 |
156 | _sampleBox.push_back(rec);
157 |
158 | i++;
159 | }
160 | }
161 | _sampleBox.resize(i);
162 | }
163 |
164 |
165 | // Compute the features of samples
166 | void CompressiveTracker::getFeatureValue(Mat& _imageIntegral, vector& _sampleBox, Mat& _sampleFeatureValue)
167 | {
168 | int sampleBoxSize = _sampleBox.size();
169 | _sampleFeatureValue.create(featureNum, sampleBoxSize, CV_32F);
170 | float tempValue;
171 | int xMin;
172 | int xMax;
173 | int yMin;
174 | int yMax;
175 |
176 | for (int i=0; i(yMin, xMin) +
189 | _imageIntegral.at(yMax, xMax) -
190 | _imageIntegral.at(yMin, xMax) -
191 | _imageIntegral.at(yMax, xMin));
192 | }
193 | _sampleFeatureValue.at(i,j) = tempValue;
194 | }
195 | }
196 | }
197 |
198 |
199 | // Update the mean and variance of the gaussian classifier
200 | void CompressiveTracker::classifierUpdate(Mat& _sampleFeatureValue, vector& _mu, vector& _sigma, float _learnRate)
201 | {
202 | Scalar muTemp;
203 | Scalar sigmaTemp;
204 |
205 | for (int i=0; i& _muPos, vector& _sigmaPos, vector& _muNeg, vector& _sigmaNeg,
219 | Mat& _sampleFeatureValue, float& _radioMax, int& _radioMaxIndex)
220 | {
221 | float sumRadio;
222 | _radioMax = -FLT_MAX;
223 | _radioMaxIndex = 0;
224 | float pPos;
225 | float pNeg;
226 | int sampleBoxNum = _sampleFeatureValue.cols;
227 |
228 | for (int j=0; j(i,j)-_muPos[i])*(_sampleFeatureValue.at(i,j)-_muPos[i]) / -(2.0f*_sigmaPos[i]*_sigmaPos[i]+1e-30) ) / (_sigmaPos[i]+1e-30);
234 | pNeg = exp( (_sampleFeatureValue.at(i,j)-_muNeg[i])*(_sampleFeatureValue.at(i,j)-_muNeg[i]) / -(2.0f*_sigmaNeg[i]*_sigmaNeg[i]+1e-30) ) / (_sigmaNeg[i]+1e-30);
235 | sumRadio += log(pPos+1e-30) - log(pNeg+1e-30); // equation 4
236 | }
237 | if (_radioMax < sumRadio)
238 | {
239 | _radioMax = sumRadio;
240 | _radioMaxIndex = j;
241 | }
242 | }
243 | }
244 |
245 |
246 | void CompressiveTracker::init(Mat& _frame, Rect& _objectBox)
247 | {
248 | // compute feature template
249 | HaarFeature(_objectBox, featureNum);
250 |
251 | // compute sample templates
252 | sampleRect(_frame, _objectBox, rOuterPositive, 0, 1000000, samplePositiveBox);
253 | sampleRect(_frame, _objectBox, rSearchWindow*1.5, rOuterPositive+4.0, 100, sampleNegativeBox);
254 |
255 | integral(_frame, imageIntegral, CV_32F);
256 |
257 | getFeatureValue(imageIntegral, samplePositiveBox, samplePositiveFeatureValue);
258 | getFeatureValue(imageIntegral, sampleNegativeBox, sampleNegativeFeatureValue);
259 | classifierUpdate(samplePositiveFeatureValue, muPositive, sigmaPositive, learnRate);
260 | classifierUpdate(sampleNegativeFeatureValue, muNegative, sigmaNegative, learnRate);
261 | }
262 |
263 |
264 | void CompressiveTracker::processFrame(Mat& _frame, Rect& _objectBox)
265 | {
266 | // predict
267 | sampleRect(_frame, _objectBox, rSearchWindow,detectBox);
268 | integral(_frame, imageIntegral, CV_32F);
269 | getFeatureValue(imageIntegral, detectBox, detectFeatureValue);
270 | int radioMaxIndex;
271 | float radioMax;
272 | radioClassifier(muPositive, sigmaPositive, muNegative, sigmaNegative, detectFeatureValue, radioMax, radioMaxIndex);
273 | _objectBox = detectBox[radioMaxIndex];
274 |
275 | // update
276 | sampleRect(_frame, _objectBox, rOuterPositive, 0.0, 1000000, samplePositiveBox);
277 | sampleRect(_frame, _objectBox, rSearchWindow*1.5, rOuterPositive+4.0, 100, sampleNegativeBox);
278 |
279 | getFeatureValue(imageIntegral, samplePositiveBox, samplePositiveFeatureValue);
280 | getFeatureValue(imageIntegral, sampleNegativeBox, sampleNegativeFeatureValue);
281 | classifierUpdate(samplePositiveFeatureValue, muPositive, sigmaPositive, learnRate);
282 | classifierUpdate(sampleNegativeFeatureValue, muNegative, sigmaNegative, learnRate);
283 | }
284 |
--------------------------------------------------------------------------------
/Real-time-Tracking-iOS.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 55;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | A811721A27E7D0C5008F3D8D /* opencv2-3.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A811721927E7D0C5008F3D8D /* opencv2-3.framework */; };
11 | A84914EC1E64FE19001BF88B /* CompressiveTracker.cpp in Sources */ = {isa = PBXBuildFile; fileRef = A84914EA1E64FE19001BF88B /* CompressiveTracker.cpp */; };
12 | A8B9958827DF119B0057B0F6 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A8B9958727DF119B0057B0F6 /* CoreGraphics.framework */; };
13 | A8B9958A27DF11CA0057B0F6 /* CoreFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A8B9958927DF11CA0057B0F6 /* CoreFoundation.framework */; };
14 | E979E7D81BEB59710061BC68 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = E979E7D71BEB59710061BC68 /* AVFoundation.framework */; };
15 | E979E7DA1BEB59740061BC68 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = E979E7D91BEB59740061BC68 /* Accelerate.framework */; };
16 | E979E7DC1BEB597D0061BC68 /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = E979E7DB1BEB597D0061BC68 /* CoreMedia.framework */; };
17 | E979E8081BEB66DA0061BC68 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = E979E7F71BEB66DA0061BC68 /* AppDelegate.swift */; };
18 | E979E8091BEB66DA0061BC68 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = E979E7F81BEB66DA0061BC68 /* Assets.xcassets */; };
19 | E979E80A1BEB66DA0061BC68 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E979E7FA1BEB66DA0061BC68 /* LaunchScreen.storyboard */; };
20 | E979E80B1BEB66DA0061BC68 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = E979E7FC1BEB66DA0061BC68 /* Main.storyboard */; };
21 | E979E80C1BEB66DA0061BC68 /* FrameProcessor.mm in Sources */ = {isa = PBXBuildFile; fileRef = E979E7FF1BEB66DA0061BC68 /* FrameProcessor.mm */; };
22 | E979E80F1BEB66DA0061BC68 /* VideoSource.mm in Sources */ = {isa = PBXBuildFile; fileRef = E979E8041BEB66DA0061BC68 /* VideoSource.mm */; };
23 | E979E8101BEB66DA0061BC68 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = E979E8051BEB66DA0061BC68 /* ViewController.swift */; };
24 | E979E8111BEB66DA0061BC68 /* Wrapper.mm in Sources */ = {isa = PBXBuildFile; fileRef = E979E8071BEB66DA0061BC68 /* Wrapper.mm */; };
25 | /* End PBXBuildFile section */
26 |
27 | /* Begin PBXFileReference section */
28 | A811721927E7D0C5008F3D8D /* opencv2-3.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = "opencv2-3.framework"; sourceTree = ""; };
29 | A84914EA1E64FE19001BF88B /* CompressiveTracker.cpp */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = CompressiveTracker.cpp; path = MinimumOpenCVLiveCamera/CompressiveTracker.cpp; sourceTree = SOURCE_ROOT; };
30 | A84914EB1E64FE19001BF88B /* CompressiveTracker.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = CompressiveTracker.h; path = MinimumOpenCVLiveCamera/CompressiveTracker.h; sourceTree = SOURCE_ROOT; };
31 | A8897B1127E7B66D005CEF94 /* Real-time-Tracking-iOS-Prefix.pch */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "Real-time-Tracking-iOS-Prefix.pch"; path = "MinimumOpenCVLiveCamera/Real-time-Tracking-iOS-Prefix.pch"; sourceTree = SOURCE_ROOT; };
32 | A8B9958527DF11900057B0F6 /* AssetsLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AssetsLibrary.framework; path = System/Library/Frameworks/AssetsLibrary.framework; sourceTree = SDKROOT; };
33 | A8B9958727DF119B0057B0F6 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
34 | A8B9958927DF11CA0057B0F6 /* CoreFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreFoundation.framework; path = System/Library/Frameworks/CoreFoundation.framework; sourceTree = SDKROOT; };
35 | E979E7B81BEB4D6F0061BC68 /* Real-time-Tracking-iOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Real-time-Tracking-iOS.app"; sourceTree = BUILT_PRODUCTS_DIR; };
36 | E979E7D71BEB59710061BC68 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
37 | E979E7D91BEB59740061BC68 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
38 | E979E7DB1BEB597D0061BC68 /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; };
39 | E979E7F71BEB66DA0061BC68 /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = AppDelegate.swift; path = MinimumOpenCVLiveCamera/AppDelegate.swift; sourceTree = SOURCE_ROOT; };
40 | E979E7F81BEB66DA0061BC68 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; name = Assets.xcassets; path = MinimumOpenCVLiveCamera/Assets.xcassets; sourceTree = SOURCE_ROOT; };
41 | E979E7FB1BEB66DA0061BC68 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = LaunchScreen.storyboard; sourceTree = ""; };
42 | E979E7FD1BEB66DA0061BC68 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Main.storyboard; sourceTree = ""; };
43 | E979E7FE1BEB66DA0061BC68 /* FrameProcessor.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = FrameProcessor.h; path = MinimumOpenCVLiveCamera/FrameProcessor.h; sourceTree = SOURCE_ROOT; };
44 | E979E7FF1BEB66DA0061BC68 /* FrameProcessor.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = FrameProcessor.mm; path = MinimumOpenCVLiveCamera/FrameProcessor.mm; sourceTree = SOURCE_ROOT; };
45 | E979E8001BEB66DA0061BC68 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = Info.plist; path = MinimumOpenCVLiveCamera/Info.plist; sourceTree = SOURCE_ROOT; };
46 | E979E8031BEB66DA0061BC68 /* VideoSource.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = VideoSource.h; path = MinimumOpenCVLiveCamera/VideoSource.h; sourceTree = SOURCE_ROOT; };
47 | E979E8041BEB66DA0061BC68 /* VideoSource.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = VideoSource.mm; path = MinimumOpenCVLiveCamera/VideoSource.mm; sourceTree = SOURCE_ROOT; };
48 | E979E8051BEB66DA0061BC68 /* ViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ViewController.swift; path = MinimumOpenCVLiveCamera/ViewController.swift; sourceTree = SOURCE_ROOT; };
49 | E979E8061BEB66DA0061BC68 /* Wrapper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = Wrapper.h; path = MinimumOpenCVLiveCamera/Wrapper.h; sourceTree = SOURCE_ROOT; };
50 | E979E8071BEB66DA0061BC68 /* Wrapper.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; name = Wrapper.mm; path = MinimumOpenCVLiveCamera/Wrapper.mm; sourceTree = SOURCE_ROOT; };
51 | E9AE913E1BFDC25700C25B99 /* MinimumOpenCVLiveCamera-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = "MinimumOpenCVLiveCamera-Bridging-Header.h"; path = "MinimumOpenCVLiveCamera/MinimumOpenCVLiveCamera-Bridging-Header.h"; sourceTree = SOURCE_ROOT; };
52 | /* End PBXFileReference section */
53 |
54 | /* Begin PBXFrameworksBuildPhase section */
55 | E979E7B51BEB4D6F0061BC68 /* Frameworks */ = {
56 | isa = PBXFrameworksBuildPhase;
57 | buildActionMask = 2147483647;
58 | files = (
59 | A811721A27E7D0C5008F3D8D /* opencv2-3.framework in Frameworks */,
60 | A8B9958A27DF11CA0057B0F6 /* CoreFoundation.framework in Frameworks */,
61 | A8B9958827DF119B0057B0F6 /* CoreGraphics.framework in Frameworks */,
62 | E979E7DC1BEB597D0061BC68 /* CoreMedia.framework in Frameworks */,
63 | E979E7DA1BEB59740061BC68 /* Accelerate.framework in Frameworks */,
64 | E979E7D81BEB59710061BC68 /* AVFoundation.framework in Frameworks */,
65 | );
66 | runOnlyForDeploymentPostprocessing = 0;
67 | };
68 | /* End PBXFrameworksBuildPhase section */
69 |
70 | /* Begin PBXGroup section */
71 | A8B9958227DF113E0057B0F6 /* Frameworks */ = {
72 | isa = PBXGroup;
73 | children = (
74 | A811721927E7D0C5008F3D8D /* opencv2-3.framework */,
75 | A8B9958927DF11CA0057B0F6 /* CoreFoundation.framework */,
76 | A8B9958727DF119B0057B0F6 /* CoreGraphics.framework */,
77 | A8B9958527DF11900057B0F6 /* AssetsLibrary.framework */,
78 | );
79 | name = Frameworks;
80 | sourceTree = "";
81 | };
82 | E979E7AF1BEB4D6F0061BC68 = {
83 | isa = PBXGroup;
84 | children = (
85 | E979E7DB1BEB597D0061BC68 /* CoreMedia.framework */,
86 | E979E7D91BEB59740061BC68 /* Accelerate.framework */,
87 | E979E7D71BEB59710061BC68 /* AVFoundation.framework */,
88 | E979E7BA1BEB4D6F0061BC68 /* Real-time-Tracking-iOS */,
89 | E979E7B91BEB4D6F0061BC68 /* Products */,
90 | A8B9958227DF113E0057B0F6 /* Frameworks */,
91 | );
92 | sourceTree = "";
93 | };
94 | E979E7B91BEB4D6F0061BC68 /* Products */ = {
95 | isa = PBXGroup;
96 | children = (
97 | E979E7B81BEB4D6F0061BC68 /* Real-time-Tracking-iOS.app */,
98 | );
99 | name = Products;
100 | sourceTree = "";
101 | };
102 | E979E7BA1BEB4D6F0061BC68 /* Real-time-Tracking-iOS */ = {
103 | isa = PBXGroup;
104 | children = (
105 | E979E7F71BEB66DA0061BC68 /* AppDelegate.swift */,
106 | E979E7F81BEB66DA0061BC68 /* Assets.xcassets */,
107 | E979E7F91BEB66DA0061BC68 /* Base.lproj */,
108 | E979E7FE1BEB66DA0061BC68 /* FrameProcessor.h */,
109 | E979E7FF1BEB66DA0061BC68 /* FrameProcessor.mm */,
110 | E979E8001BEB66DA0061BC68 /* Info.plist */,
111 | E979E8031BEB66DA0061BC68 /* VideoSource.h */,
112 | E979E8041BEB66DA0061BC68 /* VideoSource.mm */,
113 | E979E8051BEB66DA0061BC68 /* ViewController.swift */,
114 | E979E8061BEB66DA0061BC68 /* Wrapper.h */,
115 | E979E8071BEB66DA0061BC68 /* Wrapper.mm */,
116 | E9AE913E1BFDC25700C25B99 /* MinimumOpenCVLiveCamera-Bridging-Header.h */,
117 | A84914EB1E64FE19001BF88B /* CompressiveTracker.h */,
118 | A84914EA1E64FE19001BF88B /* CompressiveTracker.cpp */,
119 | A8897B1127E7B66D005CEF94 /* Real-time-Tracking-iOS-Prefix.pch */,
120 | );
121 | name = "Real-time-Tracking-iOS";
122 | path = DMAR;
123 | sourceTree = "";
124 | };
125 | E979E7F91BEB66DA0061BC68 /* Base.lproj */ = {
126 | isa = PBXGroup;
127 | children = (
128 | E979E7FA1BEB66DA0061BC68 /* LaunchScreen.storyboard */,
129 | E979E7FC1BEB66DA0061BC68 /* Main.storyboard */,
130 | );
131 | name = Base.lproj;
132 | path = MinimumOpenCVLiveCamera/Base.lproj;
133 | sourceTree = SOURCE_ROOT;
134 | };
135 | /* End PBXGroup section */
136 |
137 | /* Begin PBXNativeTarget section */
138 | E979E7B71BEB4D6F0061BC68 /* Real-time-Tracking-iOS */ = {
139 | isa = PBXNativeTarget;
140 | buildConfigurationList = E979E7CA1BEB4D700061BC68 /* Build configuration list for PBXNativeTarget "Real-time-Tracking-iOS" */;
141 | buildPhases = (
142 | E979E7B41BEB4D6F0061BC68 /* Sources */,
143 | E979E7B51BEB4D6F0061BC68 /* Frameworks */,
144 | E979E7B61BEB4D6F0061BC68 /* Resources */,
145 | );
146 | buildRules = (
147 | );
148 | dependencies = (
149 | );
150 | name = "Real-time-Tracking-iOS";
151 | productName = DMAR;
152 | productReference = E979E7B81BEB4D6F0061BC68 /* Real-time-Tracking-iOS.app */;
153 | productType = "com.apple.product-type.application";
154 | };
155 | /* End PBXNativeTarget section */
156 |
157 | /* Begin PBXProject section */
158 | E979E7B01BEB4D6F0061BC68 /* Project object */ = {
159 | isa = PBXProject;
160 | attributes = {
161 | LastSwiftUpdateCheck = 0710;
162 | LastUpgradeCheck = 1320;
163 | ORGANIZATIONNAME = justinh5;
164 | TargetAttributes = {
165 | E979E7B71BEB4D6F0061BC68 = {
166 | CreatedOnToolsVersion = 7.1;
167 | DevelopmentTeam = N5R2JY98F9;
168 | LastSwiftMigration = 0820;
169 | };
170 | };
171 | };
172 | buildConfigurationList = E979E7B31BEB4D6F0061BC68 /* Build configuration list for PBXProject "Real-time-Tracking-iOS" */;
173 | compatibilityVersion = "Xcode 13.0";
174 | developmentRegion = en;
175 | hasScannedForEncodings = 0;
176 | knownRegions = (
177 | en,
178 | Base,
179 | );
180 | mainGroup = E979E7AF1BEB4D6F0061BC68;
181 | productRefGroup = E979E7B91BEB4D6F0061BC68 /* Products */;
182 | projectDirPath = "";
183 | projectRoot = "";
184 | targets = (
185 | E979E7B71BEB4D6F0061BC68 /* Real-time-Tracking-iOS */,
186 | );
187 | };
188 | /* End PBXProject section */
189 |
190 | /* Begin PBXResourcesBuildPhase section */
191 | E979E7B61BEB4D6F0061BC68 /* Resources */ = {
192 | isa = PBXResourcesBuildPhase;
193 | buildActionMask = 2147483647;
194 | files = (
195 | E979E80B1BEB66DA0061BC68 /* Main.storyboard in Resources */,
196 | E979E8091BEB66DA0061BC68 /* Assets.xcassets in Resources */,
197 | E979E80A1BEB66DA0061BC68 /* LaunchScreen.storyboard in Resources */,
198 | );
199 | runOnlyForDeploymentPostprocessing = 0;
200 | };
201 | /* End PBXResourcesBuildPhase section */
202 |
203 | /* Begin PBXSourcesBuildPhase section */
204 | E979E7B41BEB4D6F0061BC68 /* Sources */ = {
205 | isa = PBXSourcesBuildPhase;
206 | buildActionMask = 2147483647;
207 | files = (
208 | E979E80C1BEB66DA0061BC68 /* FrameProcessor.mm in Sources */,
209 | E979E80F1BEB66DA0061BC68 /* VideoSource.mm in Sources */,
210 | E979E8101BEB66DA0061BC68 /* ViewController.swift in Sources */,
211 | E979E8081BEB66DA0061BC68 /* AppDelegate.swift in Sources */,
212 | E979E8111BEB66DA0061BC68 /* Wrapper.mm in Sources */,
213 | A84914EC1E64FE19001BF88B /* CompressiveTracker.cpp in Sources */,
214 | );
215 | runOnlyForDeploymentPostprocessing = 0;
216 | };
217 | /* End PBXSourcesBuildPhase section */
218 |
219 | /* Begin PBXVariantGroup section */
220 | E979E7FA1BEB66DA0061BC68 /* LaunchScreen.storyboard */ = {
221 | isa = PBXVariantGroup;
222 | children = (
223 | E979E7FB1BEB66DA0061BC68 /* Base */,
224 | );
225 | name = LaunchScreen.storyboard;
226 | sourceTree = "";
227 | };
228 | E979E7FC1BEB66DA0061BC68 /* Main.storyboard */ = {
229 | isa = PBXVariantGroup;
230 | children = (
231 | E979E7FD1BEB66DA0061BC68 /* Base */,
232 | );
233 | name = Main.storyboard;
234 | sourceTree = "";
235 | };
236 | /* End PBXVariantGroup section */
237 |
238 | /* Begin XCBuildConfiguration section */
239 | E979E7C81BEB4D700061BC68 /* Debug */ = {
240 | isa = XCBuildConfiguration;
241 | buildSettings = {
242 | ALWAYS_SEARCH_USER_PATHS = NO;
243 | CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
244 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
245 | CLANG_CXX_LIBRARY = "libc++";
246 | CLANG_ENABLE_MODULES = YES;
247 | CLANG_ENABLE_OBJC_ARC = YES;
248 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
249 | CLANG_WARN_BOOL_CONVERSION = YES;
250 | CLANG_WARN_COMMA = YES;
251 | CLANG_WARN_CONSTANT_CONVERSION = YES;
252 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
253 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
254 | CLANG_WARN_EMPTY_BODY = YES;
255 | CLANG_WARN_ENUM_CONVERSION = YES;
256 | CLANG_WARN_INFINITE_RECURSION = YES;
257 | CLANG_WARN_INT_CONVERSION = YES;
258 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
259 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
260 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
261 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
262 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
263 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
264 | CLANG_WARN_STRICT_PROTOTYPES = YES;
265 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
266 | CLANG_WARN_UNREACHABLE_CODE = YES;
267 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
268 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
269 | COPY_PHASE_STRIP = NO;
270 | DEBUG_INFORMATION_FORMAT = dwarf;
271 | ENABLE_STRICT_OBJC_MSGSEND = YES;
272 | ENABLE_TESTABILITY = YES;
273 | FRAMEWORK_SEARCH_PATHS = (
274 | "$(PROJECT_DIR)/Frameworks/**",
275 | "$(PROJECT_DIR)/opencv2-3.framework/**",
276 | );
277 | GCC_C_LANGUAGE_STANDARD = gnu99;
278 | GCC_DYNAMIC_NO_PIC = NO;
279 | GCC_NO_COMMON_BLOCKS = YES;
280 | GCC_OPTIMIZATION_LEVEL = 0;
281 | GCC_PREPROCESSOR_DEFINITIONS = (
282 | "DEBUG=1",
283 | "$(inherited)",
284 | );
285 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
286 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
287 | GCC_WARN_UNDECLARED_SELECTOR = YES;
288 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
289 | GCC_WARN_UNUSED_FUNCTION = YES;
290 | GCC_WARN_UNUSED_VARIABLE = YES;
291 | HEADER_SEARCH_PATHS = /usr/local/include;
292 | IPHONEOS_DEPLOYMENT_TARGET = 15.2;
293 | LD_RUNPATH_SEARCH_PATHS = "@executable_path/../Frameworks";
294 | LIBRARY_SEARCH_PATHS = "";
295 | MTL_ENABLE_DEBUG_INFO = YES;
296 | ONLY_ACTIVE_ARCH = YES;
297 | SDKROOT = iphoneos;
298 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
299 | SWIFT_VERSION = 5.0;
300 | SYSTEM_FRAMEWORK_SEARCH_PATHS = "";
301 | USER_HEADER_SEARCH_PATHS = "";
302 | };
303 | name = Debug;
304 | };
305 | E979E7C91BEB4D700061BC68 /* Release */ = {
306 | isa = XCBuildConfiguration;
307 | buildSettings = {
308 | ALWAYS_SEARCH_USER_PATHS = NO;
309 | CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES;
310 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
311 | CLANG_CXX_LIBRARY = "libc++";
312 | CLANG_ENABLE_MODULES = YES;
313 | CLANG_ENABLE_OBJC_ARC = YES;
314 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
315 | CLANG_WARN_BOOL_CONVERSION = YES;
316 | CLANG_WARN_COMMA = YES;
317 | CLANG_WARN_CONSTANT_CONVERSION = YES;
318 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
319 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
320 | CLANG_WARN_EMPTY_BODY = YES;
321 | CLANG_WARN_ENUM_CONVERSION = YES;
322 | CLANG_WARN_INFINITE_RECURSION = YES;
323 | CLANG_WARN_INT_CONVERSION = YES;
324 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
325 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
326 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
327 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
328 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
329 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
330 | CLANG_WARN_STRICT_PROTOTYPES = YES;
331 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
332 | CLANG_WARN_UNREACHABLE_CODE = YES;
333 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
334 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
335 | COPY_PHASE_STRIP = NO;
336 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
337 | ENABLE_NS_ASSERTIONS = NO;
338 | ENABLE_STRICT_OBJC_MSGSEND = YES;
339 | FRAMEWORK_SEARCH_PATHS = (
340 | "$(PROJECT_DIR)/Frameworks/**",
341 | "$(PROJECT_DIR)/opencv2-3.framework/**",
342 | );
343 | GCC_C_LANGUAGE_STANDARD = gnu99;
344 | GCC_NO_COMMON_BLOCKS = YES;
345 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
346 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
347 | GCC_WARN_UNDECLARED_SELECTOR = YES;
348 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
349 | GCC_WARN_UNUSED_FUNCTION = YES;
350 | GCC_WARN_UNUSED_VARIABLE = YES;
351 | HEADER_SEARCH_PATHS = /usr/local/include;
352 | IPHONEOS_DEPLOYMENT_TARGET = 15.2;
353 | LD_RUNPATH_SEARCH_PATHS = "@executable_path/../Frameworks";
354 | LIBRARY_SEARCH_PATHS = "";
355 | MTL_ENABLE_DEBUG_INFO = NO;
356 | ONLY_ACTIVE_ARCH = NO;
357 | SDKROOT = iphoneos;
358 | SWIFT_COMPILATION_MODE = wholemodule;
359 | SWIFT_VERSION = 5.0;
360 | SYSTEM_FRAMEWORK_SEARCH_PATHS = "";
361 | USER_HEADER_SEARCH_PATHS = "";
362 | VALIDATE_PRODUCT = YES;
363 | };
364 | name = Release;
365 | };
366 | E979E7CB1BEB4D700061BC68 /* Debug */ = {
367 | isa = XCBuildConfiguration;
368 | buildSettings = {
369 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
370 | CLANG_ENABLE_MODULES = YES;
371 | DEVELOPMENT_TEAM = N5R2JY98F9;
372 | FRAMEWORK_SEARCH_PATHS = (
373 | "$(inherited)",
374 | "$(PROJECT_DIR)/MinimumOpenCVLiveCamera",
375 | "$(PROJECT_DIR)",
376 | /usr/local/Frameworks,
377 | "$(PROJECT_DIR)/Frameworks",
378 | );
379 | INFOPLIST_FILE = MinimumOpenCVLiveCamera/Info.plist;
380 | LD_RUNPATH_SEARCH_PATHS = (
381 | "$(inherited)",
382 | "@executable_path/Frameworks",
383 | );
384 | LIBRARY_SEARCH_PATHS = (
385 | "$(inherited)",
386 | /usr/local/Cellar/opencv/2.4.13.2/lib,
387 | );
388 | PRODUCT_BUNDLE_IDENTIFIER = justinh5;
389 | PRODUCT_NAME = "Real-time-Tracking-iOS";
390 | SWIFT_OBJC_BRIDGING_HEADER = "MinimumOpenCVLiveCamera/MinimumOpenCVLiveCamera-Bridging-Header.h";
391 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
392 | SWIFT_VERSION = 5.0;
393 | };
394 | name = Debug;
395 | };
396 | E979E7CC1BEB4D700061BC68 /* Release */ = {
397 | isa = XCBuildConfiguration;
398 | buildSettings = {
399 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
400 | CLANG_ENABLE_MODULES = YES;
401 | DEVELOPMENT_TEAM = N5R2JY98F9;
402 | FRAMEWORK_SEARCH_PATHS = (
403 | "$(inherited)",
404 | "$(PROJECT_DIR)/MinimumOpenCVLiveCamera",
405 | "$(PROJECT_DIR)",
406 | /usr/local/Frameworks,
407 | "$(PROJECT_DIR)/Frameworks",
408 | );
409 | INFOPLIST_FILE = MinimumOpenCVLiveCamera/Info.plist;
410 | LD_RUNPATH_SEARCH_PATHS = (
411 | "$(inherited)",
412 | "@executable_path/Frameworks",
413 | );
414 | LIBRARY_SEARCH_PATHS = (
415 | "$(inherited)",
416 | /usr/local/Cellar/opencv/2.4.13.2/lib,
417 | );
418 | PRODUCT_BUNDLE_IDENTIFIER = justinh5;
419 | PRODUCT_NAME = "Real-time-Tracking-iOS";
420 | SWIFT_OBJC_BRIDGING_HEADER = "MinimumOpenCVLiveCamera/MinimumOpenCVLiveCamera-Bridging-Header.h";
421 | SWIFT_VERSION = 5.0;
422 | };
423 | name = Release;
424 | };
425 | /* End XCBuildConfiguration section */
426 |
427 | /* Begin XCConfigurationList section */
428 | E979E7B31BEB4D6F0061BC68 /* Build configuration list for PBXProject "Real-time-Tracking-iOS" */ = {
429 | isa = XCConfigurationList;
430 | buildConfigurations = (
431 | E979E7C81BEB4D700061BC68 /* Debug */,
432 | E979E7C91BEB4D700061BC68 /* Release */,
433 | );
434 | defaultConfigurationIsVisible = 0;
435 | defaultConfigurationName = Release;
436 | };
437 | E979E7CA1BEB4D700061BC68 /* Build configuration list for PBXNativeTarget "Real-time-Tracking-iOS" */ = {
438 | isa = XCConfigurationList;
439 | buildConfigurations = (
440 | E979E7CB1BEB4D700061BC68 /* Debug */,
441 | E979E7CC1BEB4D700061BC68 /* Release */,
442 | );
443 | defaultConfigurationIsVisible = 0;
444 | defaultConfigurationName = Release;
445 | };
446 | /* End XCConfigurationList section */
447 | };
448 | rootObject = E979E7B01BEB4D6F0061BC68 /* Project object */;
449 | }
450 |
--------------------------------------------------------------------------------