├── OpenCVSample.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ └── contents.xcworkspacedata
└── xcuserdata
│ └── lukagabric.xcuserdatad
│ ├── xcdebugger
│ └── Breakpoints.xcbkptlist
│ └── xcschemes
│ ├── OpenCVSample.xcscheme
│ └── xcschememanagement.plist
├── OpenCVSample
├── Classes
│ ├── AppDelegate
│ │ ├── AppDelegate.h
│ │ └── AppDelegate.m
│ └── OpenCVViewController
│ │ ├── AbstractOCVViewController.h
│ │ ├── AbstractOCVViewController.m
│ │ ├── ColorCircleViewController.h
│ │ ├── ColorCircleViewController.mm
│ │ ├── ColorCircleViewController.xib
│ │ ├── DetectSmileViewController.h
│ │ ├── DetectSmileViewController.mm
│ │ ├── DetectSmileViewController.xib
│ │ ├── TryYourselfViewController.h
│ │ ├── TryYourselfViewController.mm
│ │ └── TryYourselfViewController.xib
└── Supporting Files
│ ├── OpenCVSample-Info.plist
│ ├── OpenCVSample-Prefix.pch
│ ├── main.m
│ └── smile.xml
├── README.md
└── opencv2.framework
├── Headers
├── Resources
├── Versions
├── A
│ ├── Headers
│ │ ├── calib3d
│ │ │ └── calib3d.hpp
│ │ ├── contrib
│ │ │ ├── contrib.hpp
│ │ │ ├── detection_based_tracker.hpp
│ │ │ ├── hybridtracker.hpp
│ │ │ ├── openfabmap.hpp
│ │ │ └── retina.hpp
│ │ ├── core
│ │ │ ├── core.hpp
│ │ │ ├── core_c.h
│ │ │ ├── cuda_devptrs.hpp
│ │ │ ├── eigen.hpp
│ │ │ ├── gpumat.hpp
│ │ │ ├── internal.hpp
│ │ │ ├── mat.hpp
│ │ │ ├── opengl_interop.hpp
│ │ │ ├── operations.hpp
│ │ │ ├── types_c.h
│ │ │ ├── version.hpp
│ │ │ └── wimage.hpp
│ │ ├── features2d
│ │ │ └── features2d.hpp
│ │ ├── flann
│ │ │ ├── all_indices.h
│ │ │ ├── allocator.h
│ │ │ ├── any.h
│ │ │ ├── autotuned_index.h
│ │ │ ├── composite_index.h
│ │ │ ├── config.h
│ │ │ ├── defines.h
│ │ │ ├── dist.h
│ │ │ ├── dummy.h
│ │ │ ├── dynamic_bitset.h
│ │ │ ├── flann.hpp
│ │ │ ├── flann_base.hpp
│ │ │ ├── general.h
│ │ │ ├── ground_truth.h
│ │ │ ├── hdf5.h
│ │ │ ├── heap.h
│ │ │ ├── hierarchical_clustering_index.h
│ │ │ ├── index_testing.h
│ │ │ ├── kdtree_index.h
│ │ │ ├── kdtree_single_index.h
│ │ │ ├── kmeans_index.h
│ │ │ ├── linear_index.h
│ │ │ ├── logger.h
│ │ │ ├── lsh_index.h
│ │ │ ├── lsh_table.h
│ │ │ ├── matrix.h
│ │ │ ├── miniflann.hpp
│ │ │ ├── nn_index.h
│ │ │ ├── object_factory.h
│ │ │ ├── params.h
│ │ │ ├── random.h
│ │ │ ├── result_set.h
│ │ │ ├── sampling.h
│ │ │ ├── saving.h
│ │ │ ├── simplex_downhill.h
│ │ │ └── timer.h
│ │ ├── highgui
│ │ │ ├── cap_ios.h
│ │ │ ├── highgui.hpp
│ │ │ └── highgui_c.h
│ │ ├── imgproc
│ │ │ ├── imgproc.hpp
│ │ │ ├── imgproc_c.h
│ │ │ └── types_c.h
│ │ ├── legacy
│ │ │ ├── blobtrack.hpp
│ │ │ ├── compat.hpp
│ │ │ ├── legacy.hpp
│ │ │ └── streams.hpp
│ │ ├── ml
│ │ │ └── ml.hpp
│ │ ├── nonfree
│ │ │ ├── features2d.hpp
│ │ │ └── nonfree.hpp
│ │ ├── objdetect
│ │ │ └── objdetect.hpp
│ │ ├── opencv.hpp
│ │ ├── opencv_modules.hpp
│ │ ├── photo
│ │ │ ├── photo.hpp
│ │ │ └── photo_c.h
│ │ ├── softcascade
│ │ │ └── softcascade.hpp
│ │ ├── stitching
│ │ │ ├── detail
│ │ │ │ ├── autocalib.hpp
│ │ │ │ ├── blenders.hpp
│ │ │ │ ├── camera.hpp
│ │ │ │ ├── exposure_compensate.hpp
│ │ │ │ ├── matchers.hpp
│ │ │ │ ├── motion_estimators.hpp
│ │ │ │ ├── seam_finders.hpp
│ │ │ │ ├── util.hpp
│ │ │ │ ├── util_inl.hpp
│ │ │ │ ├── warpers.hpp
│ │ │ │ └── warpers_inl.hpp
│ │ │ ├── stitcher.hpp
│ │ │ └── warpers.hpp
│ │ ├── video
│ │ │ ├── background_segm.hpp
│ │ │ ├── tracking.hpp
│ │ │ └── video.hpp
│ │ ├── videostab
│ │ │ ├── deblurring.hpp
│ │ │ ├── fast_marching.hpp
│ │ │ ├── fast_marching_inl.hpp
│ │ │ ├── frame_source.hpp
│ │ │ ├── global_motion.hpp
│ │ │ ├── inpainting.hpp
│ │ │ ├── log.hpp
│ │ │ ├── motion_core.hpp
│ │ │ ├── motion_stabilizing.hpp
│ │ │ ├── optical_flow.hpp
│ │ │ ├── outlier_rejection.hpp
│ │ │ ├── ring_buffer.hpp
│ │ │ ├── stabilizer.hpp
│ │ │ ├── videostab.hpp
│ │ │ └── wobble_suppression.hpp
│ │ └── world
│ │ │ └── world.hpp
│ ├── Resources
│ │ └── Info.plist
│ └── opencv2
└── Current
└── opencv2
/OpenCVSample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/OpenCVSample.xcodeproj/xcuserdata/lukagabric.xcuserdatad/xcdebugger/Breakpoints.xcbkptlist:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
--------------------------------------------------------------------------------
/OpenCVSample.xcodeproj/xcuserdata/lukagabric.xcuserdatad/xcschemes/OpenCVSample.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
32 |
33 |
39 |
40 |
41 |
42 |
51 |
52 |
58 |
59 |
60 |
61 |
62 |
63 |
69 |
70 |
76 |
77 |
78 |
79 |
81 |
82 |
85 |
86 |
87 |
--------------------------------------------------------------------------------
/OpenCVSample.xcodeproj/xcuserdata/lukagabric.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | OpenCVSample.xcscheme
8 |
9 | orderHint
10 | 0
11 |
12 |
13 | SuppressBuildableAutocreation
14 |
15 | 41158BD916F1137C009FA140
16 |
17 | primary
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/OpenCVSample/Classes/AppDelegate/AppDelegate.h:
--------------------------------------------------------------------------------
1 | #import
2 |
3 |
4 | @interface AppDelegate : UIResponder
5 |
6 |
7 | @property (strong, nonatomic) UIWindow *window;
8 |
9 |
10 | @end
--------------------------------------------------------------------------------
/OpenCVSample/Classes/AppDelegate/AppDelegate.m:
--------------------------------------------------------------------------------
1 | #import "AppDelegate.h"
2 | #import "ColorCircleViewController.h"
3 | #import "DetectSmileViewController.h"
4 | #import "TryYourselfViewController.h"
5 |
6 |
7 | @implementation AppDelegate
8 |
9 |
10 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions
11 | {
12 | _window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
13 | // _window.rootViewController = [ColorCircleViewController new];
14 | _window.rootViewController = [DetectSmileViewController new];
15 | // _window.rootViewController = [TryYourselfViewController new];
16 | [_window makeKeyAndVisible];
17 | return YES;
18 | }
19 |
20 |
21 | @end
--------------------------------------------------------------------------------
/OpenCVSample/Classes/OpenCVViewController/AbstractOCVViewController.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import
4 |
5 |
6 | @interface AbstractOCVViewController : UIViewController
7 | {
8 | __weak IBOutlet UIImageView *_imageView;
9 |
10 | AVCaptureSession *_session;
11 | AVCaptureDevice *_captureDevice;
12 |
13 | BOOL _useBackCamera;
14 | }
15 |
16 |
17 | - (UIImage*)getUIImageFromIplImage:(IplImage *)iplImage;
18 | - (void)didCaptureIplImage:(IplImage *)iplImage;
19 | - (void)didFinishProcessingImage:(IplImage *)iplImage;
20 |
21 |
22 | @end
--------------------------------------------------------------------------------
/OpenCVSample/Classes/OpenCVViewController/ColorCircleViewController.h:
--------------------------------------------------------------------------------
1 | #import "AbstractOCVViewController.h"
2 |
3 |
4 | @interface ColorCircleViewController : AbstractOCVViewController
5 | {
6 | double _min, _max;
7 | __weak IBOutlet UISlider *_slider;
8 | __weak IBOutlet UILabel *_labelValue;
9 | }
10 |
11 |
12 | - (IBAction)sliderValueChanged:(id)sender;
13 |
14 |
15 | @end
--------------------------------------------------------------------------------
/OpenCVSample/Classes/OpenCVViewController/ColorCircleViewController.mm:
--------------------------------------------------------------------------------
1 | #import "ColorCircleViewController.h"
2 | #import
3 | #include
4 | #include
5 | #include
6 | #import "opencv2/opencv.hpp"
7 |
8 |
9 | using namespace std;
10 | using namespace cv;
11 |
12 |
13 | @implementation ColorCircleViewController
14 |
15 |
16 | - (void)viewDidLoad
17 | {
18 | [super viewDidLoad];
19 |
20 | //yellow range
21 | _slider.value = 0.122;
22 | [self sliderValueChanged:nil];
23 | }
24 |
25 |
26 | - (IBAction)sliderValueChanged:(id)sender
27 | {
28 | double rangeMIN = 0;
29 | double rangeMAX = 180;
30 | double step = 10;
31 |
32 | _min = rangeMIN + _slider.value * (rangeMAX - rangeMIN - step);
33 | _max = _min + step;
34 |
35 | _labelValue.text = [NSString stringWithFormat:@"%.2f - %.2f", _min, _max];
36 | }
37 |
38 |
39 | //NO shows RGB image and highlights found circles
40 | //YES shows threshold image
41 | static BOOL _debug = NO;
42 |
43 |
44 | - (void)didCaptureIplImage:(IplImage *)iplImage
45 | {
46 | //ipl image is in BGR format, it needs to be converted to RGB for display in UIImageView
47 | IplImage *imgRGB = cvCreateImage(cvGetSize(iplImage), IPL_DEPTH_8U, 3);
48 | cvCvtColor(iplImage, imgRGB, CV_BGR2RGB);
49 | Mat matRGB = Mat(imgRGB);
50 |
51 | //ipl imaeg is also converted to HSV; hue is used to find certain color
52 | IplImage *imgHSV = cvCreateImage(cvGetSize(iplImage), 8, 3);
53 | cvCvtColor(iplImage, imgHSV, CV_BGR2HSV);
54 |
55 | IplImage *imgThreshed = cvCreateImage(cvGetSize(iplImage), 8, 1);
56 |
57 | //it is important to release all images EXCEPT the one that is going to be passed to
58 | //the didFinishProcessingImage: method and displayed in the UIImageView
59 | cvReleaseImage(&iplImage);
60 |
61 | //filter all pixels in defined range, everything in range will be white, everything else
62 | //is going to be black
63 | cvInRangeS(imgHSV, cvScalar(_min, 100, 100), cvScalar(_max, 255, 255), imgThreshed);
64 |
65 | cvReleaseImage(&imgHSV);
66 |
67 | Mat matThreshed = Mat(imgThreshed);
68 |
69 | //smooths edges
70 | cv::GaussianBlur(matThreshed,
71 | matThreshed,
72 | cv::Size(9, 9),
73 | 2,
74 | 2);
75 |
76 | //debug shows threshold image, otherwise the circles are detected in the
77 | //threshold image and shown in the RGB image
78 | if (_debug)
79 | {
80 | cvReleaseImage(&imgRGB);
81 | [self didFinishProcessingImage:imgThreshed];
82 | }
83 | else
84 | {
85 | vector circles;
86 |
87 | //get circles
88 | HoughCircles(matThreshed,
89 | circles,
90 | CV_HOUGH_GRADIENT,
91 | 2,
92 | matThreshed.rows / 4,
93 | 150,
94 | 75,
95 | 10,
96 | 150);
97 |
98 | for (size_t i = 0; i < circles.size(); i++)
99 | {
100 | cout << "Circle position x = " << (int)circles[i][0] << ", y = " << (int)circles[i][1] << ", radius = " << (int)circles[i][2] << "\n";
101 |
102 | cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1]));
103 |
104 | int radius = cvRound(circles[i][2]);
105 |
106 | circle(matRGB, center, 3, Scalar(0, 255, 0), -1, 8, 0);
107 | circle(matRGB, center, radius, Scalar(0, 0, 255), 3, 8, 0);
108 | }
109 |
110 | //threshed image is not needed any more and needs to be released
111 | cvReleaseImage(&imgThreshed);
112 |
113 | //imgRGB will be released once it is not needed, the didFinishProcessingImage:
114 | //method will take care of that
115 | [self didFinishProcessingImage:imgRGB];
116 | }
117 | }
118 |
119 |
120 | @end
--------------------------------------------------------------------------------
/OpenCVSample/Classes/OpenCVViewController/DetectSmileViewController.h:
--------------------------------------------------------------------------------
1 | #import "AbstractOCVViewController.h"
2 | #import
3 | #import
4 |
5 |
6 | @interface DetectSmileViewController : AbstractOCVViewController
7 | {
8 | __weak IBOutlet UIImageView *_imageViewSmile;
9 | CvHaarClassifierCascade *_cascade;
10 | CvMemStorage *_storage;
11 | }
12 |
13 |
14 | @end
--------------------------------------------------------------------------------
/OpenCVSample/Classes/OpenCVViewController/DetectSmileViewController.mm:
--------------------------------------------------------------------------------
1 | #import "DetectSmileViewController.h"
2 | #import
3 | #include
4 | #include
5 | #include
6 | #import "opencv2/opencv.hpp"
7 |
8 |
9 | using namespace std;
10 | using namespace cv;
11 |
12 |
13 | @implementation DetectSmileViewController
14 |
15 |
16 | #pragma mark - dealloc
17 |
18 |
19 | - (void)dealloc
20 | {
21 | cvReleaseMemStorage(&_storage);
22 | cvReleaseHaarClassifierCascade(&_cascade);
23 | }
24 |
25 |
26 | #pragma mark - View
27 |
28 |
29 | - (void)viewDidLoad
30 | {
31 | NSString *path = [[NSBundle mainBundle] pathForResource:@"smile" ofType:@"xml"];
32 | _cascade = (CvHaarClassifierCascade*)cvLoad([path cStringUsingEncoding:NSASCIIStringEncoding], NULL, NULL, NULL);
33 | _storage = cvCreateMemStorage(0);
34 |
35 | [super viewDidLoad];
36 | }
37 |
38 |
39 | - (void)viewDidUnload
40 | {
41 | _imageViewSmile = nil;
42 | [super viewDidUnload];
43 | }
44 |
45 |
46 | #pragma mark - didCaptureIplImage
47 |
48 |
49 | - (void)didCaptureIplImage:(IplImage *)iplImage
50 | {
51 | IplImage *imgRGB = cvCreateImage(cvGetSize(iplImage), IPL_DEPTH_8U, 3);
52 | cvCvtColor(iplImage, imgRGB, CV_BGR2RGB);
53 |
54 | IplImage *imgSmall = cvCreateImage(cvSize(imgRGB->width/2, imgRGB->height/2), IPL_DEPTH_8U, 3);
55 | cvPyrDown(imgRGB, imgSmall, CV_GAUSSIAN_5x5);
56 |
57 | CvSeq *smiles = cvHaarDetectObjects(imgSmall, _cascade, _storage, 1.1f, 3, CV_HAAR_DO_CANNY_PRUNING);
58 |
59 | for (int i = 0; i < smiles->total; i++)
60 | {
61 | CvRect cvrect = *(CvRect*)cvGetSeqElem(smiles, 0);
62 |
63 | Mat matImgSmall = Mat(imgSmall);
64 |
65 | rectangle(matImgSmall, cvrect, Scalar(255, 0, 0));
66 | }
67 |
68 | if (smiles->total > 0)
69 | {
70 | [self showSmileWithImage:imgSmall];
71 | }
72 | else
73 | {
74 | cvReleaseImage(&imgSmall);
75 | }
76 |
77 | [self didFinishProcessingImage:imgRGB];
78 | }
79 |
80 |
81 | #pragma mark - Show Smile Image
82 |
83 |
84 | - (void)showSmileWithImage:(IplImage *)smileImage
85 | {
86 | dispatch_async(dispatch_get_main_queue(), ^{
87 | UIImage *uiImage = [self getUIImageFromIplImage:smileImage];
88 | _imageViewSmile.image = uiImage;
89 | });
90 | }
91 |
92 |
93 | #pragma mark -
94 |
95 |
96 | @end
--------------------------------------------------------------------------------
/OpenCVSample/Classes/OpenCVViewController/TryYourselfViewController.h:
--------------------------------------------------------------------------------
1 | #import "AbstractOCVViewController.h"
2 |
3 |
4 | @interface TryYourselfViewController : AbstractOCVViewController
5 |
6 |
7 | @end
--------------------------------------------------------------------------------
/OpenCVSample/Classes/OpenCVViewController/TryYourselfViewController.mm:
--------------------------------------------------------------------------------
1 | #import "TryYourselfViewController.h"
2 | #import
3 | #include
4 | #include
5 | #include
6 | #import "opencv2/opencv.hpp"
7 |
8 |
9 | using namespace std;
10 | using namespace cv;
11 |
12 |
13 | @implementation TryYourselfViewController
14 |
15 |
16 | - (void)didCaptureIplImage:(IplImage *)iplImage
17 | {
18 | //ipl image is in BGR format, it needs to be converted to RGB for display in UIImageView
19 | IplImage *imgRGB = cvCreateImage(cvGetSize(iplImage), IPL_DEPTH_8U, 3);
20 | cvCvtColor(iplImage, imgRGB, CV_BGR2RGB);
21 |
22 | //it is important to release all images once they are not needed EXCEPT the one
23 | //that is going to be passed to the didFinishProcessingImage: method and
24 | //displayed in the UIImageView
25 | cvReleaseImage(&iplImage);
26 |
27 | //here you can manipulate RGB image, e.g. blur the image or whatever OCV magic you want
28 | Mat matRGB = Mat(imgRGB);
29 |
30 | //smooths edges
31 | cv::GaussianBlur(matRGB,
32 | matRGB,
33 | cv::Size(19, 19),
34 | 10,
35 | 10);
36 |
37 | //imgRGB will be released once it is not needed, the didFinishProcessingImage:
38 | //method will take care once it displays the image in UIImageView
39 | [self didFinishProcessingImage:imgRGB];
40 | }
41 |
42 |
43 | @end
--------------------------------------------------------------------------------
/OpenCVSample/Supporting Files/OpenCVSample-Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleDisplayName
8 | ${PRODUCT_NAME}
9 | CFBundleExecutable
10 | ${EXECUTABLE_NAME}
11 | CFBundleIdentifier
12 | com.${PRODUCT_NAME:rfc1034identifier}
13 | CFBundleInfoDictionaryVersion
14 | 6.0
15 | CFBundleName
16 | ${PRODUCT_NAME}
17 | CFBundlePackageType
18 | APPL
19 | CFBundleShortVersionString
20 | 1.0
21 | CFBundleSignature
22 | ????
23 | CFBundleVersion
24 | 1.0
25 | LSRequiresIPhoneOS
26 |
27 | UIRequiredDeviceCapabilities
28 |
29 | armv7
30 |
31 | UISupportedInterfaceOrientations
32 |
33 | UIInterfaceOrientationPortrait
34 | UIInterfaceOrientationLandscapeLeft
35 | UIInterfaceOrientationLandscapeRight
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/OpenCVSample/Supporting Files/OpenCVSample-Prefix.pch:
--------------------------------------------------------------------------------
1 | #import
2 |
3 |
4 | #ifndef __IPHONE_3_0
5 | #warning "This project uses features only available in iOS SDK 3.0 and later."
6 | #endif
7 |
8 |
9 | #ifdef __OBJC__
10 | #import
11 | #import
12 | #endif
--------------------------------------------------------------------------------
/OpenCVSample/Supporting Files/main.m:
--------------------------------------------------------------------------------
1 | #import
2 | #import "AppDelegate.h"
3 |
4 |
5 | int main(int argc, char *argv[])
6 | {
7 | @autoreleasepool
8 | {
9 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
10 | }
11 | }
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | iOS-OpenCV
2 | ==========
3 |
4 | iOS OpenCV Sample
5 |
6 | [](http://lukagabric.com/wp-content/uploads/2013/03/circle_ocv.png)
7 |
8 | Structure
9 | =========
10 |
11 | AbstractOCVViewController
12 | -------------------------
13 | The sample project is based around the AbstractOCVViewController class with main image capture and transformation from iOS captured image to OpenCV IplImage object.
14 | An AbstractOCVViewController subclass needs to override and implement the didCaptureIplImage: method. The captured and processed IplImage object is passed here as a parameter. After image manipulation using the OpenCV framework, the didFinishProcessingImage: method is called. The method will handle the transformation and presentation of the IplImage in an UIImageView. There are three samples in this project - color circle detection, smile detection and image blur detection so you can try and test OpenCV yourself.
15 |
16 | ColorCircleViewController
17 | -------------------------
18 | Used for detecting circular objects of certain color defined by hue. Change slider values in order to detect different colors. Set debug property to YES in order to see threshold image.
19 |
20 | DetectSmileViewController
21 | -------------------------
22 | When smile is detected, the captured image is presented in the top right corner of the screen.
23 |
24 | TryYourselfViewController
25 | -------------------------
26 | I just blured the image here. You may want to try OpenCV yourself here.
27 |
--------------------------------------------------------------------------------
/opencv2.framework/Headers:
--------------------------------------------------------------------------------
1 | Versions/A/Headers
--------------------------------------------------------------------------------
/opencv2.framework/Resources:
--------------------------------------------------------------------------------
1 | Versions/A/Resources
--------------------------------------------------------------------------------
/opencv2.framework/Versions/A/Headers/contrib/detection_based_tracker.hpp:
--------------------------------------------------------------------------------
1 | #pragma once
2 |
3 | #if defined(__linux__) || defined(LINUX) || defined(__APPLE__) || defined(ANDROID)
4 |
5 | #include
6 | #include
7 |
8 | #include
9 |
10 | namespace cv
11 | {
12 | class DetectionBasedTracker
13 | {
14 | public:
15 | struct Parameters
16 | {
17 | int maxTrackLifetime;
18 | int minDetectionPeriod; //the minimal time between run of the big object detector (on the whole frame) in ms (1000 mean 1 sec), default=0
19 |
20 | Parameters();
21 | };
22 |
23 | class IDetector
24 | {
25 | public:
26 | IDetector():
27 | minObjSize(96, 96),
28 | maxObjSize(INT_MAX, INT_MAX),
29 | minNeighbours(2),
30 | scaleFactor(1.1f)
31 | {}
32 |
33 | virtual void detect(const cv::Mat& image, std::vector& objects) = 0;
34 |
35 | void setMinObjectSize(const cv::Size& min)
36 | {
37 | minObjSize = min;
38 | }
39 | void setMaxObjectSize(const cv::Size& max)
40 | {
41 | maxObjSize = max;
42 | }
43 | cv::Size getMinObjectSize() const
44 | {
45 | return minObjSize;
46 | }
47 | cv::Size getMaxObjectSize() const
48 | {
49 | return maxObjSize;
50 | }
51 | float getScaleFactor()
52 | {
53 | return scaleFactor;
54 | }
55 | void setScaleFactor(float value)
56 | {
57 | scaleFactor = value;
58 | }
59 | int getMinNeighbours()
60 | {
61 | return minNeighbours;
62 | }
63 | void setMinNeighbours(int value)
64 | {
65 | minNeighbours = value;
66 | }
67 | virtual ~IDetector() {}
68 |
69 | protected:
70 | cv::Size minObjSize;
71 | cv::Size maxObjSize;
72 | int minNeighbours;
73 | float scaleFactor;
74 | };
75 |
76 | DetectionBasedTracker(cv::Ptr mainDetector, cv::Ptr trackingDetector, const Parameters& params);
77 | virtual ~DetectionBasedTracker();
78 |
79 | virtual bool run();
80 | virtual void stop();
81 | virtual void resetTracking();
82 |
83 | virtual void process(const cv::Mat& imageGray);
84 |
85 | bool setParameters(const Parameters& params);
86 | const Parameters& getParameters() const;
87 |
88 |
89 | typedef std::pair Object;
90 | virtual void getObjects(std::vector& result) const;
91 | virtual void getObjects(std::vector