├── myapp.rc ├── images ├── about.png ├── icon1.ico ├── icon1.png ├── open.png ├── play1.png ├── work.png ├── capture.png ├── export.png ├── pause1.png ├── repeat2.png └── capture_all.png ├── src ├── main.cpp ├── about.h ├── about.cpp ├── dialogsnapframes.h ├── processpoints.h ├── effects.h ├── settings.h ├── dialogsnapframes.cpp ├── exports.h ├── surf │ ├── integral.cpp │ ├── integral.h │ ├── surf.h │ ├── ipoint.h │ ├── utils.h │ ├── responselayer.h │ ├── ipoint.cpp │ ├── surflib.h │ ├── fasthessian.h │ ├── kmeans.h │ ├── utils.cpp │ ├── surf.cpp │ └── fasthessian.cpp ├── processthread.h ├── processpoints.cpp ├── effects.cpp ├── mainwindow.h ├── settings.cpp ├── processthread.cpp ├── exports.cpp └── mainwindow.cpp ├── Resources.qrc ├── forms ├── dialogsnapframes.ui ├── about.ui ├── settings.ui ├── exports.ui └── mainwindow.ui ├── FYP.pro ├── CHANGELOG ├── README.md └── FYP.pro.user /myapp.rc: -------------------------------------------------------------------------------- 1 | IDI_ICON1 ICON DISCARDABLE "images/icon1.ico" -------------------------------------------------------------------------------- /images/about.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faizshukri/humantracking/HEAD/images/about.png -------------------------------------------------------------------------------- /images/icon1.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faizshukri/humantracking/HEAD/images/icon1.ico -------------------------------------------------------------------------------- /images/icon1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faizshukri/humantracking/HEAD/images/icon1.png -------------------------------------------------------------------------------- /images/open.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faizshukri/humantracking/HEAD/images/open.png -------------------------------------------------------------------------------- /images/play1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faizshukri/humantracking/HEAD/images/play1.png -------------------------------------------------------------------------------- /images/work.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faizshukri/humantracking/HEAD/images/work.png -------------------------------------------------------------------------------- /images/capture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faizshukri/humantracking/HEAD/images/capture.png -------------------------------------------------------------------------------- /images/export.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faizshukri/humantracking/HEAD/images/export.png -------------------------------------------------------------------------------- /images/pause1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faizshukri/humantracking/HEAD/images/pause1.png -------------------------------------------------------------------------------- /images/repeat2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faizshukri/humantracking/HEAD/images/repeat2.png -------------------------------------------------------------------------------- /images/capture_all.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/faizshukri/humantracking/HEAD/images/capture_all.png -------------------------------------------------------------------------------- /src/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "mainwindow.h" 3 | 4 | int main(int argc, char *argv[]) 5 | { 6 | QApplication a(argc, argv); 7 | MainWindow w; 8 | w.show(); 9 | 10 | return a.exec(); 11 | } 12 | -------------------------------------------------------------------------------- /src/about.h: -------------------------------------------------------------------------------- 1 | #ifndef ABOUT_H 2 | #define ABOUT_H 3 | 4 | #include 5 | 6 | namespace Ui { 7 | class About; 8 | } 9 | 10 | class About : public QDialog 11 | { 12 | Q_OBJECT 13 | 14 | public: 15 | explicit About(QWidget *parent = 0); 16 | ~About(); 17 | 18 | private: 19 | Ui::About *ui; 20 | }; 21 | 22 | #endif // ABOUT_H 23 | -------------------------------------------------------------------------------- /src/about.cpp: -------------------------------------------------------------------------------- 1 | #include "about.h" 2 | #include "ui_about.h" 3 | 4 | About::About(QWidget *parent) : 5 | QDialog(parent), 6 | ui(new Ui::About) 7 | { 8 | ui->setupUi(this); 9 | ui->labelPic->setPixmap(QPixmap(":/images/about")); 10 | connect(ui->btnClose, SIGNAL(clicked()), this, SLOT(close())); 11 | } 12 | 13 | About::~About() 14 | { 15 | delete ui; 16 | } 17 | -------------------------------------------------------------------------------- /src/dialogsnapframes.h: -------------------------------------------------------------------------------- 1 | #ifndef DIALOGSNAPFRAMES_H 2 | #define DIALOGSNAPFRAMES_H 3 | 4 | #include 5 | 6 | namespace Ui { 7 | class dialogSnapFrames; 8 | } 9 | 10 | class dialogSnapFrames : public QDialog 11 | { 12 | Q_OBJECT 13 | 14 | public: 15 | explicit dialogSnapFrames(QWidget *parent = 0, int progMax = 100); 16 | ~dialogSnapFrames(); 17 | void setButtonEnable(bool); 18 | 19 | public slots: 20 | void setCurProgress(int, int); 21 | 22 | 23 | private: 24 | Ui::dialogSnapFrames *ui; 25 | }; 26 | 27 | #endif // DIALOGSNAPFRAMES_H 28 | -------------------------------------------------------------------------------- /Resources.qrc: -------------------------------------------------------------------------------- 1 | 2 | 3 | images/pause1.png 4 | images/play1.png 5 | images/repeat2.png 6 | images/icon1.ico 7 | images/capture.png 8 | images/capture_all.png 9 | images/export.png 10 | images/open.png 11 | images/about.png 12 | 13 | 14 | -------------------------------------------------------------------------------- /src/processpoints.h: -------------------------------------------------------------------------------- 1 | #ifndef PROCESSPOINTS_H 2 | #define PROCESSPOINTS_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include "surf/ipoint.h" 9 | using namespace cv; 10 | 11 | class processPoints : public QObject 12 | { 13 | Q_OBJECT 14 | public: 15 | // param fileName is including the frame no (current frame) 16 | explicit processPoints(QObject *parent = 0, IpVec point = (IpVec) 0, QString fileName = ""); 17 | explicit processPoints(QObject *parent = 0, vector point = (vector) 0, QString fileName = ""); 18 | ~processPoints(); 19 | 20 | signals: 21 | 22 | public slots: 23 | 24 | private: 25 | IpVec points; 26 | vector rectHog; 27 | QFile *myFile; 28 | QTextStream *out; 29 | 30 | void processSurf(); 31 | void processHog(); 32 | }; 33 | 34 | #endif // PROCESSPOINTS_H 35 | -------------------------------------------------------------------------------- /src/effects.h: -------------------------------------------------------------------------------- 1 | #ifndef EFFECTS_H 2 | #define EFFECTS_H 3 | 4 | #include 5 | #include 6 | #include 7 | //#include 8 | #include "surf/surflib.h" 9 | //#include "surf/kmeans.h" 10 | #include 11 | using namespace cv; 12 | 13 | class Effects : public QObject 14 | { 15 | Q_OBJECT 16 | public: 17 | explicit Effects(QObject *parent = 0); 18 | ~Effects(); 19 | void Flip(cv::Mat &img, cv::Mat &out, int code); 20 | void Edge(cv::Mat &img, cv::Mat &out, int thresh, bool invert); 21 | void SurfD(cv::Mat &img); 22 | void HogD(cv::Mat &img); 23 | 24 | //bool savePoint; //Save the points. 0 for Surf, 1 for Hog 25 | 26 | signals: 27 | void vectorOfExtractPoint(IpVec); 28 | void vectorOfExtractPoint(vector ); 29 | 30 | public slots: 31 | 32 | }; 33 | 34 | #endif // EFFECTS_H 35 | -------------------------------------------------------------------------------- /src/settings.h: -------------------------------------------------------------------------------- 1 | #ifndef SETTINGS_H 2 | #define SETTINGS_H 3 | 4 | #include 5 | #include 6 | 7 | namespace Ui { 8 | class Settings; 9 | } 10 | 11 | class Settings : public QDialog 12 | { 13 | Q_OBJECT 14 | 15 | public: 16 | ~Settings(); 17 | QString getSnapPath(); 18 | QString getExportPath(); 19 | int getVideoFrame(); 20 | int getFrameToSkip(); 21 | static Settings *getInstance(QWidget *parent); 22 | 23 | 24 | protected slots: 25 | void saveSetting(bool); 26 | void resetDefault(); 27 | void browseSnap(); 28 | void browseExport(); 29 | 30 | private: 31 | //explicit Settings(QWidget *parent = 0); //constructor is private for singleton 32 | Settings(QWidget *parent); 33 | Ui::Settings *ui; 34 | Settings *myProgSetting; 35 | QString FileName; 36 | void initUserSetting(); 37 | void initDefault(); 38 | static Settings *instance; 39 | }; 40 | 41 | #endif // SETTINGS_H 42 | -------------------------------------------------------------------------------- /src/dialogsnapframes.cpp: -------------------------------------------------------------------------------- 1 | #include "dialogsnapframes.h" 2 | #include "ui_dialogsnapframes.h" 3 | #include "settings.h" 4 | 5 | dialogSnapFrames::dialogSnapFrames(QWidget *parent, int progMax) : 6 | QDialog(parent), 7 | ui(new Ui::dialogSnapFrames) 8 | { 9 | ui->setupUi(this); 10 | ui->progressBar->setMaximum(progMax); 11 | ui->btnCancel->setEnabled(false); 12 | 13 | connect(parent, SIGNAL(displayCurProgress(int, int)), this, SLOT(setCurProgress(int, int))); 14 | connect(ui->btnCancel, SIGNAL(clicked()), this, SLOT(close())); 15 | } 16 | 17 | dialogSnapFrames::~dialogSnapFrames() 18 | { 19 | delete ui; 20 | } 21 | 22 | void dialogSnapFrames::setCurProgress(int cur, int total){ 23 | ui->progressBar->setValue(cur); 24 | ui->labelProgress->setText(QString::number(cur) + " processed from " + QString::number(total));//QString::number(total - Settings::getInstance(this)->getFrameToSkip() + 1) + " frames."); 25 | } 26 | 27 | void dialogSnapFrames::setButtonEnable(bool) 28 | { 29 | ui->btnCancel->setEnabled(true); 30 | } 31 | -------------------------------------------------------------------------------- /src/exports.h: -------------------------------------------------------------------------------- 1 | #ifndef EXPORTS_H 2 | #define EXPORTS_H 3 | 4 | #include 5 | //#include 6 | #include 7 | #include "settings.h" 8 | #include "effects.h" 9 | #include "processthread.h" 10 | 11 | namespace Ui { 12 | class Exports; 13 | } 14 | 15 | class Exports : public QDialog 16 | { 17 | Q_OBJECT 18 | 19 | public: 20 | explicit Exports(QWidget *parent = 0); 21 | ~Exports(); 22 | void setupThread(QThread&); 23 | 24 | protected slots: 25 | void browseVideos(); 26 | void showProgressBar(bool); 27 | void toggleEdge(bool); 28 | void toggleFlip(bool); 29 | void toggleHumanDetec(bool); 30 | void toggleHog(bool); 31 | void toggleSurf(bool); 32 | void setThresh(int); 33 | void setThresh(QString); 34 | void timerTick(); 35 | void setCurFrame(int); 36 | 37 | private: 38 | Ui::Exports *ui; 39 | cv::VideoCapture** capture; 40 | QStringList fileName; //store the video output filename 41 | cv::VideoWriter* writer; 42 | 43 | Settings *setting; 44 | Effects* effect; 45 | 46 | processThread **thread; 47 | 48 | 49 | int totalProgress; //store total progress frames 50 | int curFrame; //keep track of current frame for display total progress 51 | 52 | void reBrowse(); 53 | void setInitialProp(processThread *); 54 | 55 | int count; //store number of videos 56 | 57 | }; 58 | 59 | #endif // EXPORTS_H 60 | -------------------------------------------------------------------------------- /src/surf/integral.cpp: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | #include "utils.h" 13 | 14 | #include "integral.h" 15 | 16 | //! Computes the integral image of image img. Assumes source image to be a 17 | //! 32-bit floating point. Returns IplImage of 32-bit float form. 18 | IplImage *Integral(IplImage *source) 19 | { 20 | // convert the image to single channel 32f 21 | IplImage *img = getGray(source); 22 | IplImage *int_img = cvCreateImage(cvGetSize(img), IPL_DEPTH_32F, 1); 23 | 24 | // set up variables for data access 25 | int height = img->height; 26 | int width = img->width; 27 | int step = img->widthStep/sizeof(float); 28 | float *data = (float *) img->imageData; 29 | float *i_data = (float *) int_img->imageData; 30 | 31 | // first row only 32 | float rs = 0.0f; 33 | for(int j=0; j 2 | 3 | dialogSnapFrames 4 | 5 | 6 | 7 | 0 8 | 0 9 | 346 10 | 89 11 | 12 | 13 | 14 | Current progress 15 | 16 | 17 | 18 | 19 | 20 | 24 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | Qt::Horizontal 30 | 31 | 32 | 33 | 40 34 | 20 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | progress 43 | 44 | 45 | 46 | 47 | 48 | 49 | Qt::Horizontal 50 | 51 | 52 | 53 | 40 54 | 20 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | Close 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | -------------------------------------------------------------------------------- /src/surf/integral.h: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | #ifndef INTEGRAL_H 13 | #define INTEGRAL_H 14 | 15 | #include // req'd for std::min/max 16 | 17 | // undefine VS macros 18 | #ifdef min 19 | #undef min 20 | #endif 21 | 22 | #ifdef max 23 | #undef max 24 | #endif 25 | 26 | #include 27 | 28 | //! Computes the integral image of image img. Assumes source image to be a 29 | //! 32-bit floating point. Returns IplImage in 32-bit float form. 30 | IplImage *Integral(IplImage *img); 31 | 32 | 33 | //! Computes the sum of pixels within the rectangle specified by the top-left start 34 | //! co-ordinate and size 35 | inline float BoxIntegral(IplImage *img, int row, int col, int rows, int cols) 36 | { 37 | float *data = (float *) img->imageData; 38 | int step = img->widthStep/sizeof(float); 39 | 40 | // The subtraction by one for row/col is because row/col is inclusive. 41 | int r1 = std::min(row, img->height) - 1; 42 | int c1 = std::min(col, img->width) - 1; 43 | int r2 = std::min(row + rows, img->height) - 1; 44 | int c2 = std::min(col + cols, img->width) - 1; 45 | 46 | float A(0.0f), B(0.0f), C(0.0f), D(0.0f); 47 | if (r1 >= 0 && c1 >= 0) A = data[r1 * step + c1]; 48 | if (r1 >= 0 && c2 >= 0) B = data[r1 * step + c2]; 49 | if (r2 >= 0 && c1 >= 0) C = data[r2 * step + c1]; 50 | if (r2 >= 0 && c2 >= 0) D = data[r2 * step + c2]; 51 | 52 | return std::max(0.f, A - B - C + D); 53 | } 54 | 55 | #endif 56 | -------------------------------------------------------------------------------- /src/processthread.h: -------------------------------------------------------------------------------- 1 | #ifndef PROCESSTHREAD_H 2 | #define PROCESSTHREAD_H 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include "effects.h" 10 | #include "settings.h" 11 | #include "processpoints.h" 12 | 13 | class processThread : public QThread 14 | { 15 | Q_OBJECT 16 | public: 17 | explicit processThread(QObject *parent = 0, cv::VideoCapture *cap = 0, 18 | bool writeVideo = false, int extractPoint = 0, string fileName = ""); 19 | ~processThread(); 20 | void run(); 21 | void destroy(); 22 | 23 | bool stop; 24 | //Flip 25 | bool flip; 26 | int flipCode; 27 | 28 | //Edge 29 | bool edge; 30 | bool edgeInvert; 31 | int edgeThresh; 32 | 33 | //Hog 34 | bool hog; 35 | 36 | //Surf 37 | bool surf; 38 | 39 | //Other Setting 40 | string fileName; 41 | int fps; 42 | bool pause; 43 | int pauseAt; //if user pause, where it is 44 | int cur; 45 | bool move; 46 | int frameToSkip; 47 | void setValueJ(int); 48 | 49 | public slots: 50 | void resendExtractPoint(IpVec); 51 | void resendExtractPoint(vector); 52 | 53 | private: 54 | cv::VideoCapture *capture; 55 | cv::VideoWriter *writer; 56 | Effects *effect; 57 | processPoints **points; 58 | int count; //use to count current frame 59 | bool isWrite; //is writer needed? 60 | int exportPoints; //check if exportsPoint is set. 0 = not set. 1 = surf. 2 = hog. use this at exports.cpp. only when user check extract point 61 | 62 | signals: 63 | void currentFrame(int); 64 | void currentFrame(int, Mat); 65 | void finishProcess(bool); 66 | //void numOfExtPointSurf(IpVec); 67 | 68 | 69 | }; 70 | 71 | class MyTask: public QRunnable{ 72 | 73 | public: 74 | MyTask(QThread* abc) : aThread(abc){} 75 | void run(){ 76 | this->aThread->start(); 77 | } 78 | 79 | 80 | private: 81 | QThread *aThread; 82 | 83 | }; 84 | 85 | #endif // PROCESSTHREAD_H 86 | -------------------------------------------------------------------------------- /src/processpoints.cpp: -------------------------------------------------------------------------------- 1 | #include "processpoints.h" 2 | 3 | processPoints::processPoints(QObject *parent, IpVec point, QString fileName) : 4 | QObject(parent) 5 | { 6 | this->points = point; 7 | this->myFile = new QFile(fileName); // filename is including the frame no (current frame) 8 | processSurf(); 9 | } 10 | 11 | processPoints::processPoints(QObject *parent, vector point, QString fileName) : 12 | QObject(parent) 13 | { 14 | this->rectHog = point; 15 | this->myFile = new QFile(fileName); // filename is including the frame no (current frame) 16 | processHog(); 17 | } 18 | 19 | void processPoints::processSurf(){ 20 | this->myFile->open(QIODevice::WriteOnly | QIODevice::Text); 21 | this->out = new QTextStream(this->myFile); 22 | 23 | for(int i = 0; i < this->points.size(); i++){ 24 | this->out->operator <<(QString::number((double)this->points.at(i).dx) + "," + QString::number((double)this->points.at(i).dy) + "," + 25 | QString::number((double)this->points.at(i).laplacian) + "," + QString::number((double)this->points.at(i).orientation) + "," + QString::number((double)this->points.at(i).scale) + "," + 26 | QString::number((double)this->points.at(i).x) + "," + QString::number((double)this->points.at(i).y) + 27 | "\n"); 28 | } 29 | this->myFile->close(); 30 | } 31 | 32 | void processPoints::processHog(){ 33 | this->myFile->open(QIODevice::WriteOnly | QIODevice::Text); 34 | this->out = new QTextStream(this->myFile); 35 | 36 | for(int i = 0; i < this->rectHog.size(); i++){ 37 | this->out->operator <<( QString::number(this->rectHog.at(i).height) + "," + QString::number(this->rectHog.at(i).width) + "," + 38 | QString::number(this->rectHog.at(i).x) + "," + QString::number(this->rectHog.at(i).y) + "," + 39 | "\n"); 40 | } 41 | this->myFile->close(); 42 | 43 | } 44 | 45 | processPoints::~processPoints(){ 46 | delete this->myFile; 47 | delete this->out; 48 | this->myFile = 0; 49 | this->out = 0; 50 | } 51 | -------------------------------------------------------------------------------- /src/effects.cpp: -------------------------------------------------------------------------------- 1 | #include "effects.h" 2 | 3 | Effects::Effects(QObject *parent) : 4 | QObject(parent) 5 | { 6 | } 7 | 8 | Effects::~Effects(){ 9 | 10 | } 11 | 12 | void Effects::Flip(cv::Mat &img, cv::Mat &out, int code){ 13 | cv::flip(img,out,code); 14 | } 15 | 16 | void Effects::Edge(cv::Mat &img, cv::Mat &out, int thresh, bool invert = true){ 17 | cv::cvtColor(img,out,CV_BGR2GRAY); 18 | cv::Canny(out, out, (double)thresh, (double)3*thresh); 19 | if(invert) 20 | cv::threshold(out, out, 128, 256, CV_THRESH_BINARY_INV); 21 | else 22 | cv::threshold(out, out, 128, 256, CV_THRESH_BINARY); 23 | cv::cvtColor(out,out,CV_GRAY2BGR); 24 | } 25 | 26 | void Effects::SurfD(cv::Mat &img){ 27 | // SurfFeatureDetector abu; 28 | 29 | IpVec ipts; 30 | IplImage img2=img; 31 | 32 | // Extract surf points 33 | surfDetDes(&img2, ipts, false, 4, 4, 2, 0.004f); 34 | 35 | // Draw the detected points 36 | drawIpoints(&img2, ipts); 37 | 38 | img = cv::Mat(&img2); 39 | 40 | //emit numOfExtPointSurf(ipts); 41 | emit vectorOfExtractPoint(ipts); 42 | } 43 | 44 | void Effects::HogD(Mat &img){ 45 | HOGDescriptor hog; 46 | hog.setSVMDetector(HOGDescriptor::getDefaultPeopleDetector()); 47 | vector found, found_filtered; 48 | hog.detectMultiScale(img, found, 0, Size(8,8), Size(32,32), 1.05, 2); 49 | 50 | size_t i, j; 51 | 52 | Rect r; 53 | for(i =0; i < found.size(); i++){ 54 | 55 | r = found[i]; 56 | for(j = 0; j < found.size(); j++){ 57 | if(j!=i && (r & found[j]) == r) 58 | break; 59 | } 60 | 61 | if(j == found.size()){ 62 | found_filtered.push_back(r); 63 | } 64 | } 65 | 66 | for (i=0; i 16 | #include 17 | #include 18 | #include 19 | //#include 20 | #include "ipoint.h" 21 | #include "integral.h" 22 | 23 | #include 24 | 25 | class Surf { 26 | 27 | public: 28 | 29 | //! Standard Constructor (img is an integral image) 30 | Surf(IplImage *img, std::vector &ipts); 31 | 32 | //! Describe all features in the supplied vector 33 | void getDescriptors(bool bUpright = false); 34 | 35 | private: 36 | 37 | //---------------- Private Functions -----------------// 38 | 39 | //! Assign the current Ipoint an orientation 40 | void getOrientation(); 41 | 42 | //! Get the descriptor. See Agrawal ECCV 08 43 | void getDescriptor(bool bUpright = false); 44 | 45 | //! Calculate the value of the 2d gaussian at x,y 46 | inline float gaussian(int x, int y, float sig); 47 | inline float gaussian(float x, float y, float sig); 48 | 49 | //! Calculate Haar wavelet responses in x and y directions 50 | inline float haarX(int row, int column, int size); 51 | inline float haarY(int row, int column, int size); 52 | 53 | //! Get the angle from the +ve x-axis of the vector given by [X Y] 54 | float getAngle(float X, float Y); 55 | 56 | 57 | //---------------- Private Variables -----------------// 58 | 59 | //! Integral image where Ipoints have been detected 60 | IplImage *img; 61 | 62 | //! Ipoints vector 63 | IpVec &ipts; 64 | 65 | //! Index of current Ipoint in the vector 66 | int index; 67 | }; 68 | 69 | 70 | #endif 71 | -------------------------------------------------------------------------------- /src/mainwindow.h: -------------------------------------------------------------------------------- 1 | #ifndef MAINWINDOW_H 2 | #define MAINWINDOW_H 3 | 4 | #include 5 | //#include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include "exports.h" 12 | #include "settings.h" 13 | #include "about.h" 14 | #include "dialogsnapframes.h" 15 | #include "processthread.h" 16 | 17 | namespace Ui { 18 | class MainWindow; 19 | 20 | } 21 | 22 | class MainWindow : public QMainWindow 23 | { 24 | Q_OBJECT 25 | 26 | public: 27 | explicit MainWindow(QWidget *parent = 0); 28 | ~MainWindow(); 29 | 30 | protected slots: 31 | void toggleFlip(bool); 32 | void toggleFlipHor(bool); 33 | void toggleFlipVer(bool); 34 | void toggleDetectHuman(bool); 35 | void toggleEdge(bool); 36 | void toggleEdgeInvert(bool); 37 | void togglePlayPause(); 38 | void setThresh(int); 39 | void setThresh(QString); 40 | //void setTimeline(int); 41 | void toggleCaptureFrame(); 42 | void toggleCaptureFrames(); 43 | 44 | void toggleSurf(bool); 45 | void toggleHog(bool); 46 | 47 | void loadFile(); 48 | 49 | void displayResult(int, Mat img); 50 | void initEffectAndGui(); //this is the button connect that must be set after thread be set 51 | void finishProcess(bool); //Condition where the video has finished processed 52 | 53 | //Open Directory 54 | void openDirExport(); 55 | void openDirSnap(); 56 | 57 | // void setTimeline(int); 58 | 59 | signals: 60 | void displayCurProgress(int, int); 61 | void setCurPos(int); 62 | 63 | 64 | private: 65 | Ui::MainWindow *ui; 66 | // QTimer *_timer; 67 | cv::VideoCapture *capture; 68 | 69 | Exports *exports; 70 | About *aboutUs; 71 | Settings *settings; 72 | dialogSnapFrames *dialogSnaps; 73 | processThread *mThread; 74 | 75 | int curFrame; //keep track of the current frame 76 | int totalFrame; 77 | bool captureFrame; //if the button capture trigger 78 | bool captureFrames; //if the button capture all trigger 79 | // bool play; // state for play or pause 80 | void saveToFolder(Mat &img); 81 | void snapAllFrames(Mat &img); 82 | void setInitialProp(); 83 | bool hasVideo; 84 | 85 | 86 | QString folderPath; //folder for snap all frames 87 | }; 88 | 89 | 90 | 91 | #endif // MAINWINDOW_H 92 | -------------------------------------------------------------------------------- /src/surf/ipoint.h: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | #ifndef IPOINT_H 13 | #define IPOINT_H 14 | 15 | #include 16 | #include 17 | 18 | //------------------------------------------------------- 19 | 20 | class Ipoint; // Pre-declaration 21 | typedef std::vector IpVec; 22 | typedef std::vector > IpPairVec; 23 | 24 | //------------------------------------------------------- 25 | 26 | //! Ipoint operations 27 | void getMatches(IpVec &ipts1, IpVec &ipts2, IpPairVec &matches); 28 | int translateCorners(IpPairVec &matches, const CvPoint src_corners[4], CvPoint dst_corners[4]); 29 | 30 | //------------------------------------------------------- 31 | 32 | class Ipoint { 33 | 34 | public: 35 | 36 | //! Destructor 37 | ~Ipoint() {} 38 | 39 | //! Constructor 40 | Ipoint() : orientation(0) {} 41 | 42 | //! Gets the distance in descriptor space between Ipoints 43 | float operator-(const Ipoint &rhs) 44 | { 45 | float sum=0.f; 46 | for(int i=0; i < 64; ++i) 47 | sum += (this->descriptor[i] - rhs.descriptor[i])*(this->descriptor[i] - rhs.descriptor[i]); 48 | return sqrt(sum); 49 | } 50 | 51 | //! Coordinates of the detected interest point 52 | float x, y; 53 | 54 | //! Detected scale 55 | float scale; 56 | 57 | //! Orientation measured anti-clockwise from +ve x-axis 58 | float orientation; 59 | 60 | //! Sign of laplacian for fast matching purposes 61 | int laplacian; 62 | 63 | //! Vector of descriptor components 64 | float descriptor[64]; 65 | 66 | //! Placeholds for point motion (can be used for frame to frame motion analysis) 67 | float dx, dy; 68 | 69 | //! Used to store cluster index 70 | int clusterIndex; 71 | }; 72 | 73 | //------------------------------------------------------- 74 | 75 | 76 | #endif 77 | -------------------------------------------------------------------------------- /src/surf/utils.h: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | #ifndef UTILS_H 13 | #define UTILS_H 14 | 15 | //#include 16 | //#include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include "ipoint.h" 22 | 23 | #include 24 | 25 | 26 | //! Display error message and terminate program 27 | void error(const char *msg); 28 | 29 | //! Show the provided image and wait for keypress 30 | void showImage(const IplImage *img); 31 | 32 | //! Show the provided image in titled window and wait for keypress 33 | void showImage(char *title,const IplImage *img); 34 | 35 | // Convert image to single channel 32F 36 | IplImage* getGray(const IplImage *img); 37 | 38 | //! Draw a single feature on the image 39 | void drawIpoint(IplImage *img, Ipoint &ipt, int tailSize = 0); 40 | 41 | //! Draw all the Ipoints in the provided vector 42 | void drawIpoints(IplImage *img, std::vector &ipts, int tailSize = 0); 43 | 44 | //! Draw descriptor windows around Ipoints in the provided vector 45 | void drawWindows(IplImage *img, std::vector &ipts); 46 | 47 | // Draw the FPS figure on the image (requires at least 2 calls) 48 | void drawFPS(IplImage *img); 49 | 50 | //! Draw a Point at feature location 51 | void drawPoint(IplImage *img, Ipoint &ipt); 52 | 53 | //! Draw a Point at all features 54 | void drawPoints(IplImage *img, std::vector &ipts); 55 | 56 | //! Save the SURF features to file 57 | void saveSurf(char *filename, std::vector &ipts); 58 | 59 | //! Load the SURF features from file 60 | void loadSurf(char *filename, std::vector &ipts); 61 | 62 | //! Round float to nearest integer 63 | inline int fRound(float flt) 64 | { 65 | return (int) floor(flt+0.5f); 66 | } 67 | 68 | #endif 69 | -------------------------------------------------------------------------------- /CHANGELOG: -------------------------------------------------------------------------------- 1 | Changelog 2 | 3 | [30/12/2011] 4 | * Can't reBrowse after the first video finish played fixed remove this->mthread->destroy in loadVideo mainwindow.cpp 5 | * Can't continue playing video when video were pause fixed add checking pause condition before emit finishProcess signal in processThread 6 | * Can't play again the video fixed add checking pause condition false and cur frame equal to totalframe on button clicked 7 | * Problem when user move the video slider open function set(CV_CAP_PROP_POS_FRAMES) from OpenCV is still problem. We plan to deprecate this feature. 8 | * Can't replay if user cancel to load new video file fixed move the assignment curFrame to inside the checking scope in mainWindow 9 | * Can't capture all frames when video is finish played fixed add condition in toggleCaptureFrames for video that was finish display 10 | * Can't rebrowse when the video still playing fixed manytimes its ok, sometimes it problem. dont know the solution 11 | 12 | [31/12/2011] 13 | * Can't re export fixed delete a line and reorder the function in reBrowse 14 | * When finish export, it show 3 pop up message fixed initialize var move in export 15 | * Press capture screen button while pause/stop, will delay the capture untill the video is play fixed prevent user from capture while video is play / pause 16 | * Error when user click button play without load video first fixed disabled button play on program load 17 | * Error when user click button capture screen without load video first fixed disabled button capture screen on program load 18 | * After install, program can't access folder program files to create output directory fixed change output folder to Users Document 19 | * Can't reexport the same videos in list fixed add condition if cur frame is not 0 in timertick inside exports 20 | * Capture all frames is not precise. Stop before 100% fixed add update label to totalframe in finishprocess in mainwindow 21 | 22 | [2/1/2012] 23 | * Error display progress label when capture frames, when frame to skip larger than 1 fixed discard the maximumVal of progressbar to totalframe, and set the val of progressbar to totalframe when finish process 24 | * Error when user click export when there is no video loaded fixed check the video list if 0 when button export is press and inform the user 25 | 26 | [3/1/2012] 27 | * Change output filetype from txt to dat fixed change the filetype in processThread class 28 | 29 | [4/1/2012] 30 | * Edit the about us design interface fixed Edit class about.cpp -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Human Detection and Tracking in Video 2 | ====== 3 | 4 | Description 5 | ----------- 6 | This software is use for Human Detection and Tracking in video. Our main purpose for this Final Year Project is to help our lecturer do their research easier. Hope we will get an 'A'. :D 7 | 8 | > **Updated:** Yeah, our lecturer really give us A after saw the result. Yeppy. :D 9 | 10 | Features 11 | -------- 12 | - Detect and track human gait in video 13 | - Detect any movement in video 14 | - Export the result video 15 | - Add some extra effect like mirror 16 | - Multithread video processing 17 | - And other stuff. (well, not quite remember, 2 years past already. ~_^ ) 18 | 19 | Location 20 | ----------- 21 | Faculty of Information Technology, Multimedia University, Cyberjaya, Malaysia. Now have been changed to Faculty of Computing and Informatics (FCI). 22 | 23 | Installation 24 | ----------- 25 | 26 | 1. Download this source code. 27 | 2. Install MinGW 0.4 Alpha 1, and include its binary folder to Windows Environment path. 28 | 3. Install Nokia Qt Creator and include the folder /Desktop/Qt/4.7.4/mingw/bin to Windows Environment path. 29 | 4. Compile OpenCV2.3 and include its binary folder to Windows Environment path. 30 | 5. Open the Qt Project, compile and run. 31 | 6. Enjoy 32 | 33 | Compatibility Issues 34 | ----------- 35 | 36 | - The detection will be precise for black and white video only. And not always work in Color video. 37 | - Navigation sometimes not correct. 38 | 39 | Minimum Requirement 40 | ----------- 41 | These are the version when i'm developing this project 42 | - Nokia Qt 4.7 (Up till 4.8. Not working with v5.*) 43 | - OpenCV 2.3 44 | - MinGW Compiler Version 0.4 Alpha 1 45 | 46 | Screenshot 47 | ---------- 48 | ![2](https://lh5.googleusercontent.com/-lRkPpy93poU/U6-OkG81Y3I/AAAAAAAABg8/5Fdny1jgyg0/s2048/2.jpg) ![3](https://lh3.googleusercontent.com/-O-M84p20-6I/U6-OkKKPWYI/AAAAAAAABg0/6kgGzd8BgbM/s2048/3.jpg) ![4](https://lh5.googleusercontent.com/-JVfrXSIBn9Y/U6-Ok0yWVfI/AAAAAAAABhQ/dOnBaq5boGQ/s2048/4.jpg) ![9](https://lh4.googleusercontent.com/-mPDbBvj60kM/U6-OmErU1WI/AAAAAAAABhc/pMvaHE68n_I/s2048/9.jpg) 49 | ![6](https://lh5.googleusercontent.com/-d5TmfwJdmec/U6-OlMyxL0I/AAAAAAAABhM/nKUCk8ZXlWw/s2048/6.jpg) 50 | ![5](https://lh6.googleusercontent.com/-ElQsVUnRI7U/U6-Ok7jT2zI/AAAAAAAABhI/_x2eD53FV4M/s2048/5.jpg) 51 | 52 | Authors 53 | ----------- 54 | - Developer: Faiz Shukri 55 | - Version: 1.0 56 | - Release Date: 02/01/2012 57 | 58 | Licensing/Legal 59 | ----------- 60 | > This mod is released under the GNU General Public License. 61 | -------------------------------------------------------------------------------- /src/surf/responselayer.h: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | #include 13 | 14 | //#define RL_DEBUG // un-comment to test response layer 15 | 16 | class ResponseLayer 17 | { 18 | public: 19 | 20 | int width, height, step, filter; 21 | float *responses; 22 | unsigned char *laplacian; 23 | 24 | ResponseLayer(int width, int height, int step, int filter) 25 | { 26 | assert(width > 0 && height > 0); 27 | 28 | this->width = width; 29 | this->height = height; 30 | this->step = step; 31 | this->filter = filter; 32 | 33 | responses = new float[width*height]; 34 | laplacian = new unsigned char[width*height]; 35 | 36 | memset(responses,0,sizeof(float)*width*height); 37 | memset(laplacian,0,sizeof(unsigned char)*width*height); 38 | } 39 | 40 | ~ResponseLayer() 41 | { 42 | if (responses) delete [] responses; 43 | if (laplacian) delete [] laplacian; 44 | } 45 | 46 | inline unsigned char getLaplacian(unsigned int row, unsigned int column) 47 | { 48 | return laplacian[row * width + column]; 49 | } 50 | 51 | inline unsigned char getLaplacian(unsigned int row, unsigned int column, ResponseLayer *src) 52 | { 53 | int scale = this->width / src->width; 54 | 55 | #ifdef RL_DEBUG 56 | assert(src->getCoords(row, column) == this->getCoords(scale * row, scale * column)); 57 | #endif 58 | 59 | return laplacian[(scale * row) * width + (scale * column)]; 60 | } 61 | 62 | inline float getResponse(unsigned int row, unsigned int column) 63 | { 64 | return responses[row * width + column]; 65 | } 66 | 67 | inline float getResponse(unsigned int row, unsigned int column, ResponseLayer *src) 68 | { 69 | int scale = this->width / src->width; 70 | 71 | #ifdef RL_DEBUG 72 | assert(src->getCoords(row, column) == this->getCoords(scale * row, scale * column)); 73 | #endif 74 | 75 | return responses[(scale * row) * width + (scale * column)]; 76 | } 77 | 78 | #ifdef RL_DEBUG 79 | std::vector> coords; 80 | 81 | inline std::pair getCoords(unsigned int row, unsigned int column) 82 | { 83 | return coords[row * width + column]; 84 | } 85 | 86 | inline std::pair getCoords(unsigned int row, unsigned int column, ResponseLayer *src) 87 | { 88 | int scale = this->width / src->width; 89 | return coords[(scale * row) * width + (scale * column)]; 90 | } 91 | #endif 92 | }; 93 | -------------------------------------------------------------------------------- /src/surf/ipoint.cpp: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | //#include 13 | //#include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | 20 | #include "ipoint.h" 21 | 22 | //! Populate IpPairVec with matched ipts 23 | void getMatches(IpVec &ipts1, IpVec &ipts2, IpPairVec &matches) 24 | { 25 | float dist, d1, d2; 26 | Ipoint *match; 27 | 28 | matches.clear(); 29 | 30 | for(unsigned int i = 0; i < ipts1.size(); i++) 31 | { 32 | d1 = d2 = FLT_MAX; 33 | 34 | for(unsigned int j = 0; j < ipts2.size(); j++) 35 | { 36 | dist = ipts1[i] - ipts2[j]; 37 | 38 | if(distx - ipts1[i].x; 55 | ipts1[i].dy = match->y - ipts1[i].y; 56 | matches.push_back(std::make_pair(ipts1[i], *match)); 57 | } 58 | } 59 | } 60 | 61 | // 62 | // This function uses homography with CV_RANSAC (OpenCV 1.1) 63 | // Won't compile on most linux distributions 64 | // 65 | 66 | //------------------------------------------------------- 67 | 68 | //! Find homography between matched points and translate src_corners to dst_corners 69 | int translateCorners(IpPairVec &matches, const CvPoint src_corners[4], CvPoint dst_corners[4]) 70 | { 71 | #ifndef LINUX 72 | double h[9]; 73 | CvMat _h = cvMat(3, 3, CV_64F, h); 74 | std::vector pt1, pt2; 75 | CvMat _pt1, _pt2; 76 | 77 | int n = (int)matches.size(); 78 | if( n < 4 ) return 0; 79 | 80 | // Set vectors to correct size 81 | pt1.resize(n); 82 | pt2.resize(n); 83 | 84 | // Copy Ipoints from match vector into cvPoint vectors 85 | for(int i = 0; i < n; i++ ) 86 | { 87 | pt1[i] = cvPoint2D32f(matches[i].second.x, matches[i].second.y); 88 | pt2[i] = cvPoint2D32f(matches[i].first.x, matches[i].first.y); 89 | } 90 | _pt1 = cvMat(1, n, CV_32FC2, &pt1[0] ); 91 | _pt2 = cvMat(1, n, CV_32FC2, &pt2[0] ); 92 | 93 | // Find the homography (transformation) between the two sets of points 94 | if(!cvFindHomography(&_pt1, &_pt2, &_h, CV_RANSAC, 5)) // this line requires opencv 1.1 95 | return 0; 96 | 97 | // Translate src_corners to dst_corners using homography 98 | for(int i = 0; i < 4; i++ ) 99 | { 100 | double x = src_corners[i].x, y = src_corners[i].y; 101 | double Z = 1./(h[6]*x + h[7]*y + h[8]); 102 | double X = (h[0]*x + h[1]*y + h[2])*Z; 103 | double Y = (h[3]*x + h[4]*y + h[5])*Z; 104 | dst_corners[i] = cvPoint(cvRound(X), cvRound(Y)); 105 | } 106 | #endif 107 | return 1; 108 | } 109 | 110 | 111 | -------------------------------------------------------------------------------- /src/surf/surflib.h: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | #ifndef SURFLIB_H 13 | #define SURFLIB_H 14 | 15 | //#include 16 | //#include 17 | #include 18 | #include 19 | 20 | #include "integral.h" 21 | #include "fasthessian.h" 22 | #include "surf.h" 23 | #include "ipoint.h" 24 | #include "utils.h" 25 | 26 | 27 | //! Library function builds vector of described interest points 28 | inline void surfDetDes(IplImage *img, /* image to find Ipoints in */ 29 | std::vector &ipts, /* reference to vector of Ipoints */ 30 | bool upright = false, /* run in rotation invariant mode? */ 31 | int octaves = OCTAVES, /* number of octaves to calculate */ 32 | int intervals = INTERVALS, /* number of intervals per octave */ 33 | int init_sample = INIT_SAMPLE, /* initial sampling step */ 34 | float thres = THRES /* blob response threshold */) 35 | { 36 | // Create integral-image representation of the image 37 | IplImage *int_img = Integral(img); 38 | 39 | // Create Fast Hessian Object 40 | FastHessian fh(int_img, ipts, octaves, intervals, init_sample, thres); 41 | 42 | // Extract interest points and store in vector ipts 43 | fh.getIpoints(); 44 | 45 | // Create Surf Descriptor Object 46 | Surf des(int_img, ipts); 47 | 48 | // Extract the descriptors for the ipts 49 | des.getDescriptors(upright); 50 | 51 | // Deallocate the integral image 52 | cvReleaseImage(&int_img); 53 | } 54 | 55 | 56 | //! Library function builds vector of interest points 57 | inline void surfDet(IplImage *img, /* image to find Ipoints in */ 58 | std::vector &ipts, /* reference to vector of Ipoints */ 59 | int octaves = OCTAVES, /* number of octaves to calculate */ 60 | int intervals = INTERVALS, /* number of intervals per octave */ 61 | int init_sample = INIT_SAMPLE, /* initial sampling step */ 62 | float thres = THRES /* blob response threshold */) 63 | { 64 | // Create integral image representation of the image 65 | IplImage *int_img = Integral(img); 66 | 67 | // Create Fast Hessian Object 68 | FastHessian fh(int_img, ipts, octaves, intervals, init_sample, thres); 69 | 70 | // Extract interest points and store in vector ipts 71 | fh.getIpoints(); 72 | 73 | // Deallocate the integral image 74 | cvReleaseImage(&int_img); 75 | } 76 | 77 | 78 | 79 | 80 | //! Library function describes interest points in vector 81 | inline void surfDes(IplImage *img, /* image to find Ipoints in */ 82 | std::vector &ipts, /* reference to vector of Ipoints */ 83 | bool upright = false) /* run in rotation invariant mode? */ 84 | { 85 | // Create integral image representation of the image 86 | IplImage *int_img = Integral(img); 87 | 88 | // Create Surf Descriptor Object 89 | Surf des(int_img, ipts); 90 | 91 | // Extract the descriptors for the ipts 92 | des.getDescriptors(upright); 93 | 94 | // Deallocate the integral image 95 | cvReleaseImage(&int_img); 96 | } 97 | 98 | 99 | #endif 100 | -------------------------------------------------------------------------------- /src/surf/fasthessian.h: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | #ifndef FASTHESSIAN_H 13 | #define FASTHESSIAN_H 14 | 15 | //#include 16 | //#include 17 | #include 18 | #include 19 | #include 20 | #include "ipoint.h" 21 | 22 | #include 23 | 24 | class ResponseLayer; 25 | static const int OCTAVES = 5; 26 | static const int INTERVALS = 4; 27 | static const float THRES = 0.0004f; 28 | static const int INIT_SAMPLE = 2; 29 | 30 | 31 | class FastHessian { 32 | 33 | public: 34 | 35 | //! Constructor without image 36 | FastHessian(std::vector &ipts, 37 | const int octaves = OCTAVES, 38 | const int intervals = INTERVALS, 39 | const int init_sample = INIT_SAMPLE, 40 | const float thres = THRES); 41 | 42 | //! Constructor with image 43 | FastHessian(IplImage *img, 44 | std::vector &ipts, 45 | const int octaves = OCTAVES, 46 | const int intervals = INTERVALS, 47 | const int init_sample = INIT_SAMPLE, 48 | const float thres = THRES); 49 | 50 | //! Destructor 51 | ~FastHessian(); 52 | 53 | //! Save the parameters 54 | void saveParameters(const int octaves, 55 | const int intervals, 56 | const int init_sample, 57 | const float thres); 58 | 59 | //! Set or re-set the integral image source 60 | void setIntImage(IplImage *img); 61 | 62 | //! Find the image features and write into vector of features 63 | void getIpoints(); 64 | 65 | private: 66 | 67 | //---------------- Private Functions -----------------// 68 | 69 | //! Build map of DoH responses 70 | void buildResponseMap(); 71 | 72 | //! Calculate DoH responses for supplied layer 73 | void buildResponseLayer(ResponseLayer *r); 74 | 75 | //! 3x3x3 Extrema test 76 | int isExtremum(int r, int c, ResponseLayer *t, ResponseLayer *m, ResponseLayer *b); 77 | 78 | //! Interpolation functions - adapted from Lowe's SIFT implementation 79 | void interpolateExtremum(int r, int c, ResponseLayer *t, ResponseLayer *m, ResponseLayer *b); 80 | void interpolateStep(int r, int c, ResponseLayer *t, ResponseLayer *m, ResponseLayer *b, 81 | double* xi, double* xr, double* xc ); 82 | CvMat* deriv3D(int r, int c, ResponseLayer *t, ResponseLayer *m, ResponseLayer *b); 83 | CvMat* hessian3D(int r, int c, ResponseLayer *t, ResponseLayer *m, ResponseLayer *b); 84 | 85 | //---------------- Private Variables -----------------// 86 | 87 | //! Pointer to the integral Image, and its attributes 88 | IplImage *img; 89 | int i_width, i_height; 90 | 91 | //! Reference to vector of features passed from outside 92 | std::vector &ipts; 93 | 94 | //! Response stack of determinant of hessian values 95 | std::vector responseMap; 96 | 97 | //! Number of Octaves 98 | int octaves; 99 | 100 | //! Number of Intervals per octave 101 | int intervals; 102 | 103 | //! Initial sampling step for Ipoint detection 104 | int init_sample; 105 | 106 | //! Threshold value for blob resonses 107 | float thresh; 108 | }; 109 | 110 | 111 | #endif 112 | -------------------------------------------------------------------------------- /src/surf/kmeans.h: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | #include "ipoint.h" 13 | 14 | #include 15 | #include 16 | #include 17 | 18 | //----------------------------------------------------------- 19 | // Kmeans clustering class (under development) 20 | // - Can be used to cluster points based on their location. 21 | // - Create Kmeans object and call Run with IpVec. 22 | // - Planned improvements include clustering based on motion 23 | // and descriptor components. 24 | //----------------------------------------------------------- 25 | 26 | class Kmeans { 27 | 28 | public: 29 | 30 | //! Destructor 31 | ~Kmeans() {} 32 | 33 | //! Constructor 34 | Kmeans() {} 35 | 36 | //! Do it all! 37 | void Run(IpVec *ipts, int clusters, bool init = false); 38 | 39 | //! Set the ipts to be used 40 | void SetIpoints(IpVec *ipts); 41 | 42 | //! Randomly distribute 'n' clusters 43 | void InitRandomClusters(int n); 44 | 45 | //! Assign Ipoints to clusters 46 | bool AssignToClusters(); 47 | 48 | //! Calculate new cluster centers 49 | void RepositionClusters(); 50 | 51 | //! Function to measure the distance between 2 ipoints 52 | float Distance(Ipoint &ip1, Ipoint &ip2); 53 | 54 | //! Vector stores ipoints for this run 55 | IpVec *ipts; 56 | 57 | //! Vector stores cluster centers 58 | IpVec clusters; 59 | 60 | }; 61 | 62 | //------------------------------------------------------- 63 | 64 | void Kmeans::Run(IpVec *ipts, int clusters, bool init) 65 | { 66 | if (!ipts->size()) return; 67 | 68 | SetIpoints(ipts); 69 | 70 | if (init) InitRandomClusters(clusters); 71 | 72 | while (AssignToClusters()); 73 | { 74 | RepositionClusters(); 75 | } 76 | } 77 | 78 | //------------------------------------------------------- 79 | 80 | void Kmeans::SetIpoints(IpVec *ipts) 81 | { 82 | this->ipts = ipts; 83 | } 84 | 85 | //------------------------------------------------------- 86 | 87 | void Kmeans::InitRandomClusters(int n) 88 | { 89 | // clear the cluster vector 90 | clusters.clear(); 91 | 92 | // Seed the random number generator 93 | srand((int)time(NULL)); 94 | 95 | // add 'n' random ipoints to clusters list as initial centers 96 | for (int i = 0; i < n; ++i) 97 | { 98 | clusters.push_back(ipts->at(rand() % ipts->size())); 99 | } 100 | } 101 | 102 | //------------------------------------------------------- 103 | 104 | bool Kmeans::AssignToClusters() 105 | { 106 | bool Updated = false; 107 | 108 | // loop over all Ipoints and assign each to closest cluster 109 | for (unsigned int i = 0; i < ipts->size(); ++i) 110 | { 111 | float bestDist = FLT_MAX; 112 | int oldIndex = ipts->at(i).clusterIndex; 113 | 114 | for (unsigned int j = 0; j < clusters.size(); ++j) 115 | { 116 | float currentDist = Distance(ipts->at(i), clusters[j]); 117 | if (currentDist < bestDist) 118 | { 119 | bestDist = currentDist; 120 | ipts->at(i).clusterIndex = j; 121 | } 122 | } 123 | 124 | // determine whether point has changed cluster 125 | if (ipts->at(i).clusterIndex != oldIndex) Updated = true; 126 | } 127 | 128 | return Updated; 129 | } 130 | 131 | //------------------------------------------------------- 132 | 133 | void Kmeans::RepositionClusters() 134 | { 135 | float x, y, dx, dy, count; 136 | 137 | for (unsigned int i = 0; i < clusters.size(); ++i) 138 | { 139 | x = y = dx = dy = 0; 140 | count = 1; 141 | 142 | for (unsigned int j = 0; j < ipts->size(); ++j) 143 | { 144 | if (ipts->at(j).clusterIndex == i) 145 | { 146 | Ipoint ip = ipts->at(j); 147 | x += ip.x; 148 | y += ip.y; 149 | dx += ip.dx; 150 | dy += ip.dy; 151 | ++count; 152 | } 153 | } 154 | 155 | clusters[i].x = x/count; 156 | clusters[i].y = y/count; 157 | clusters[i].dx = dx/count; 158 | clusters[i].dy = dy/count; 159 | } 160 | } 161 | 162 | //------------------------------------------------------- 163 | 164 | float Kmeans::Distance(Ipoint &ip1, Ipoint &ip2) 165 | { 166 | return sqrt(pow(ip1.x - ip2.x, 2) 167 | + pow(ip1.y - ip2.y, 2) 168 | /*+ pow(ip1.dx - ip2.dx, 2) 169 | + pow(ip1.dy - ip2.dy, 2)*/); 170 | } 171 | 172 | //------------------------------------------------------- 173 | -------------------------------------------------------------------------------- /forms/about.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | About 4 | 5 | 6 | 7 | 0 8 | 0 9 | 478 10 | 368 11 | 12 | 13 | 14 | 15 | 0 16 | 0 17 | 18 | 19 | 20 | About Us 21 | 22 | 23 | false 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | ../images/about.png 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | true 42 | 43 | 44 | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN" "http://www.w3.org/TR/REC-html40/strict.dtd"> 45 | <html><head><meta name="qrichtext" content="1" /><style type="text/css"> 46 | p, li { white-space: pre-wrap; } 47 | </style></head><body style=" font-family:'MS Shell Dlg 2'; font-size:8.25pt; font-weight:400; font-style:normal;"> 48 | <p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:8pt;">Tracking of Human Gait in Video is a project mainly created forour Final Year Project Multimedia University 2011 / 2012. This program is a Video and Image Processing based. It was developed under Nokia Qt with OpenCV Framework.</span></p> 49 | <p align="justify" style="-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt;"></p> 50 | <p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:8pt;">It has the functionality to detect and track the motion in videos. It also can extract the interest point to external files. User can also capture a frame or all frames from a selected video.</span></p> 51 | <p align="justify" style="-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt;"></p> 52 | <p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:8pt;">Millions thanks for our lecturer, who help us lots. We hope this project is useful to our lecturer who gonna use it in her research. Any problem or error, can tell me directly. Hopefully not big problem.</span></p> 53 | <p align="justify" style="-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt;"></p> 54 | <p align="justify" style=" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;"><span style=" font-size:8pt;">Finally, we also want to thanks our parents, and all of our friends who help us in develop this software. And to MMU also, because provide an 'excellent' internet quality for us to make research to the internet.</span></p></body></html> 55 | 56 | 57 | 58 | 59 | 60 | 61 | Developer: Faiz Shukri, Afiq | Version: 1.0 | Released: 4/1/2012 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | Qt::Horizontal 71 | 72 | 73 | 74 | 40 75 | 20 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | Close 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | -------------------------------------------------------------------------------- /src/settings.cpp: -------------------------------------------------------------------------------- 1 | #include "settings.h" 2 | #include "ui_settings.h" 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | Settings* Settings::instance = NULL; 9 | 10 | Settings* Settings::getInstance(QWidget *parent){ 11 | if(instance == NULL){ 12 | instance = new Settings(parent); 13 | } 14 | return instance; 15 | } 16 | 17 | Settings::Settings(QWidget *parent) : 18 | QDialog(parent), 19 | ui(new Ui::Settings) 20 | { 21 | 22 | ui->setupUi(this); 23 | ui->txtVideoFrame->setValidator(new QIntValidator(0, 50 , this)); 24 | ui->txtVideoFrame->setFixedWidth(30); 25 | 26 | ui->txtFrameToSkip->setValidator(new QIntValidator(1, 9999, this)); 27 | ui->txtFrameToSkip->setFixedWidth(40); 28 | 29 | //Set the settings filename 30 | 31 | QSettings env(QSettings::UserScope, "Microsoft", "Windows"); 32 | env.beginGroup("CurrentVersion/Explorer/Shell Folders"); 33 | this->FileName = env.value("Personal").toString() + "/Human Detect and Track/settings.ini"; 34 | 35 | 36 | // myProgSetting = new Settings(this); 37 | //Fill the combobox video codec 38 | 39 | connect(ui->btnCancel, SIGNAL(clicked()), this, SLOT(close())); 40 | connect(ui->btnSave, SIGNAL(clicked(bool)), this, SLOT(saveSetting(bool))); 41 | connect(ui->btnReset, SIGNAL(clicked()), this, SLOT(resetDefault())); 42 | connect(ui->btnBrowseSnap, SIGNAL(clicked()), this, SLOT(browseSnap())); 43 | connect(ui->btnBrowseExport, SIGNAL(clicked()), this, SLOT(browseExport())); 44 | 45 | //Load the application setting file 46 | if(QFile::exists(this->FileName)){ 47 | initUserSetting(); 48 | } else { 49 | initDefault(); 50 | } 51 | } 52 | 53 | Settings::~Settings() 54 | { 55 | delete ui; 56 | } 57 | 58 | QString Settings::getSnapPath(){ 59 | QSettings settings(this->FileName, QSettings::IniFormat, this); 60 | return settings.value("snap_path").toString(); 61 | } 62 | 63 | QString Settings::getExportPath(){ 64 | QSettings settings(this->FileName, QSettings::IniFormat, this); 65 | return settings.value("export_path").toString(); 66 | } 67 | 68 | 69 | int Settings::getVideoFrame(){ 70 | QSettings settings(this->FileName, QSettings::IniFormat, this); 71 | return settings.value("video_frame").toInt(); 72 | } 73 | 74 | int Settings::getFrameToSkip(){ 75 | QSettings settings(this->FileName, QSettings::IniFormat, this); 76 | return settings.value("frame_to_skip").toInt(); 77 | } 78 | 79 | 80 | void Settings::initDefault(){ 81 | 82 | QSettings env(QSettings::UserScope, "Microsoft", "Windows"); 83 | env.beginGroup("CurrentVersion/Explorer/Shell Folders"); 84 | 85 | 86 | QString orig = env.value("Personal").toString() + "/Human Detect and Track"; 87 | QDir path(orig); 88 | path = QDir(orig); 89 | if(!path.exists(orig)){ 90 | path.mkdir(orig); 91 | } 92 | 93 | QString dir = orig + "/exports"; 94 | ui->txtExport->setText(path.toNativeSeparators(dir)); 95 | path = QDir(dir); 96 | if(!path.exists(dir)){ 97 | path.mkdir(dir); 98 | } 99 | 100 | dir = orig + "/snapshots"; 101 | ui->txtSnap->setText(path.toNativeSeparators(dir)); 102 | path = QDir(dir); 103 | if(!path.exists(dir)){ 104 | path.mkdir(dir); 105 | } 106 | 107 | ui->txtVideoFrame->setText("30"); 108 | ui->txtFrameToSkip->setText("30"); 109 | 110 | this->saveSetting(true); 111 | 112 | } 113 | 114 | void Settings::initUserSetting(){ 115 | ui->txtExport->setText(this->getExportPath()); 116 | ui->txtSnap->setText(this->getSnapPath()); 117 | ui->txtVideoFrame->setText(QString::number(this->getVideoFrame())); 118 | ui->txtFrameToSkip->setText(QString::number(this->getFrameToSkip())); 119 | } 120 | 121 | void Settings::saveSetting(bool click){ 122 | 123 | QDir path(ui->txtExport->text()); 124 | if(!path.exists()) 125 | path.mkdir(ui->txtExport->text()); 126 | 127 | path = QDir(ui->txtSnap->text()); 128 | if(!path.exists()) 129 | path.mkdir(ui->txtSnap->text()); 130 | 131 | QSettings settings(this->FileName, QSettings::IniFormat, this); 132 | 133 | settings.setValue("video_frame", ui->txtVideoFrame->text().toInt()); 134 | settings.setValue("export_path", ui->txtExport->text()); 135 | settings.setValue("snap_path", ui->txtSnap->text()); 136 | settings.setValue("frame_to_skip", ui->txtFrameToSkip->text().toInt()); 137 | 138 | if(!click) 139 | QMessageBox::information(this, "Information saved", "Your settings have been saved!", QMessageBox::Ok); 140 | } 141 | 142 | void Settings::resetDefault(){ 143 | this->initDefault(); 144 | this->saveSetting(true); 145 | 146 | QMessageBox::information(this, "Information saved", "Your settings have been restored to default", QMessageBox::Ok); 147 | } 148 | 149 | 150 | void Settings::browseExport(){ 151 | QString path = QFileDialog::getExistingDirectory(this, "Select directory", QDir::currentPath()); 152 | if(!path.isEmpty()){ 153 | QDir directory(path); 154 | directory.toNativeSeparators(path); 155 | if(directory.exists()){ 156 | ui->txtExport->setText(path); 157 | } 158 | } 159 | } 160 | 161 | 162 | void Settings::browseSnap(){ 163 | QString path = QFileDialog::getExistingDirectory(this, "Select directory", QDir::currentPath()); 164 | if(!path.isEmpty()){ 165 | QDir directory(path); 166 | directory.toNativeSeparators(path); 167 | if(directory.exists()){ 168 | ui->txtSnap->setText(path); 169 | } 170 | } 171 | } 172 | -------------------------------------------------------------------------------- /src/processthread.cpp: -------------------------------------------------------------------------------- 1 | #include "processthread.h" 2 | #include 3 | 4 | processThread::processThread(QObject *parent, cv::VideoCapture *cap, bool writeVideo, int extractPoint, string fileName) : 5 | QThread(parent), 6 | flip(false), 7 | flipCode(0), 8 | edge(false), 9 | edgeInvert(false), 10 | edgeThresh(0), 11 | hog(false), 12 | surf(false), 13 | writer(0), 14 | isWrite(writeVideo), 15 | fps(0), 16 | stop(false), 17 | pause(false), 18 | pauseAt(1), 19 | cur(0), 20 | frameToSkip(0), 21 | count(0), 22 | exportPoints(extractPoint), 23 | fileName(fileName), 24 | move(false) 25 | { 26 | this->effect = new Effects(this); 27 | this->capture = cap; 28 | 29 | //If the user check the export point, then initialize the object points 30 | if(this->exportPoints != 0){ 31 | int totalFrame = this->capture->get(CV_CAP_PROP_FRAME_COUNT); 32 | this->points = new processPoints*[totalFrame]; 33 | 34 | //SURF 35 | if(this->exportPoints == 1){ 36 | qRegisterMetaType("IpVec"); 37 | //passing vector from effect to process point 38 | connect(this->effect, SIGNAL(vectorOfExtractPoint(IpVec)), this, SLOT(resendExtractPoint(IpVec))); 39 | } else if(this->exportPoints == 2){ //HOG 40 | qRegisterMetaType< vector >("vector"); 41 | //passing vector from effect to process point 42 | connect(this->effect, SIGNAL(vectorOfExtractPoint(vector)), this, SLOT(resendExtractPoint(vector))); 43 | } 44 | } 45 | 46 | //If the user check the need the video writer, then initialize the object writer 47 | if(this->isWrite){ 48 | Mat img; 49 | this->capture->operator >>( img ); 50 | if(img.data){ 51 | this->writer = new cv::VideoWriter(this->fileName, CV_FOURCC('D','I','V','X'),this->capture->get(CV_CAP_PROP_FPS),img.size(),true); 52 | } 53 | } 54 | 55 | //If user move the slider in main window 56 | //connect(parent, SIGNAL(setCurPos(int)), this, SLOT(setValueJ(int))); 57 | } 58 | 59 | //Destructor 60 | processThread::~processThread(){ 61 | if(isWrite){ 62 | delete writer; 63 | writer = 0; 64 | } 65 | 66 | if(exportPoints){ 67 | for(int i = 0; i < this->capture->get(CV_CAP_PROP_FRAME_COUNT); i++){ 68 | delete points[i]; 69 | points[i] = 0; 70 | } 71 | delete points; 72 | points = 0; 73 | } 74 | this->capture->release(); 75 | this->destroy(); 76 | } 77 | 78 | 79 | void processThread::run(){ 80 | 81 | Mat img; 82 | int totalFrame = this->capture->get(CV_CAP_PROP_FRAME_COUNT); 83 | 84 | for(int j = pauseAt; j <= totalFrame; j++){ 85 | 86 | if(this->stop) 87 | break; 88 | 89 | if(this->pause){ 90 | this->pauseAt = j; 91 | break; 92 | } 93 | 94 | if(move){ 95 | move = false; 96 | j = this->cur; 97 | this->capture->set(CV_CAP_PROP_POS_FRAMES, j); 98 | } 99 | 100 | emit currentFrame(j); 101 | this->capture->operator >>( img ); 102 | 103 | if(!img.data){ break; } 104 | else { 105 | 106 | //============ START Effect ============// 107 | 108 | 109 | //Check Human Detect state 110 | if(this->surf){ 111 | effect->SurfD(img); 112 | //if(this->savePoint) effect->savePoint = 0; 113 | } else if(this->hog){ 114 | effect->HogD(img); 115 | //if(this->savePoint) effect->savePoint = 1; 116 | } 117 | 118 | //Check Edge Detection state 119 | if(this->edge){ 120 | effect->Edge(img, img, edgeThresh, edgeInvert); 121 | } 122 | 123 | //Check Flip state 124 | if(this->flip){ 125 | effect->Flip(img,img,flipCode); 126 | } 127 | 128 | //============ END Effect ============// 129 | 130 | if(isWrite){ //if writer needed 131 | writer->operator <<( img ); 132 | } else { //prevent emit if writer is on 133 | emit currentFrame(j, img); 134 | msleep((unsigned long)Settings::getInstance(0)->getVideoFrame()); 135 | } 136 | 137 | for(int i = 0; i < frameToSkip - 1; i++){ 138 | this->capture->operator >>( img ); 139 | j++; 140 | if(!img.data){ stop = true; break; } 141 | } 142 | } 143 | } 144 | 145 | if(!this->pause) 146 | this->stop = true; 147 | 148 | this->frameToSkip = 0; //reset back frame to skip to 0 149 | 150 | if(isWrite){ //if writer needed 151 | this->writer->~VideoWriter(); 152 | } 153 | 154 | if(!this->pause) 155 | emit finishProcess(true); 156 | 157 | } 158 | 159 | void processThread::destroy() 160 | { 161 | this->stop = true; 162 | this->terminate(); 163 | } 164 | 165 | void processThread::setValueJ(int val){ 166 | move = true; 167 | this->cur = val; 168 | } 169 | 170 | void processThread::resendExtractPoint(IpVec val) 171 | { 172 | //SURF 173 | if(this->exportPoints == 1){ 174 | 175 | //Create directory if not exist 176 | QString extractPointDir = QString::fromStdString(this->fileName) + "_SURF\\"; 177 | QDir path(extractPointDir); 178 | if(!path.exists(extractPointDir)) 179 | path.mkdir(extractPointDir); 180 | 181 | //Process the points 182 | this->points[this->count] = new processPoints(this, val, extractPointDir + QString::number(this->count) + ".dat"); 183 | 184 | } 185 | ++count; 186 | } 187 | 188 | void processThread::resendExtractPoint(vector val){ 189 | //HOG 190 | if(this->exportPoints == 2){ 191 | 192 | //Create directory if not exist 193 | QString extractPointDir = QString::fromStdString(this->fileName) + "_HOG\\"; 194 | QDir path(extractPointDir); 195 | if(!path.exists(extractPointDir)) 196 | path.mkdir(extractPointDir); 197 | 198 | //Process the points 199 | this->points[this->count] = new processPoints(this, val, extractPointDir + QString::number(this->count) + ".txt"); 200 | 201 | } 202 | ++count; 203 | } 204 | -------------------------------------------------------------------------------- /forms/settings.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | Settings 4 | 5 | 6 | 7 | 0 8 | 0 9 | 483 10 | 259 11 | 12 | 13 | 14 | Global Settings 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 120 24 | 0 25 | 26 | 27 | 28 | Snapshot Path 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 0 37 | 0 38 | 39 | 40 | 41 | 42 | 250 43 | 0 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | Browse 52 | 53 | 54 | 55 | 56 | 57 | 58 | Qt::Horizontal 59 | 60 | 61 | 62 | 40 63 | 20 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 120 77 | 0 78 | 79 | 80 | 81 | Frames To Skip 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 0 90 | 0 91 | 92 | 93 | 94 | 95 | 20 96 | 0 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | Qt::Horizontal 105 | 106 | 107 | 108 | 40 109 | 20 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 120 123 | 0 124 | 125 | 126 | 127 | Exports Path 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 0 136 | 0 137 | 138 | 139 | 140 | 141 | 250 142 | 0 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | Browse 151 | 152 | 153 | 154 | 155 | 156 | 157 | Qt::Horizontal 158 | 159 | 160 | 161 | 40 162 | 20 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 120 176 | 0 177 | 178 | 179 | 180 | Video Frame 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 0 189 | 0 190 | 191 | 192 | 193 | 194 | 20 195 | 0 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | Qt::Horizontal 204 | 205 | 206 | 207 | 50 208 | 20 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | Qt::Vertical 219 | 220 | 221 | 222 | 20 223 | 40 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | Qt::Horizontal 234 | 235 | 236 | 237 | 40 238 | 20 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | Save 247 | 248 | 249 | 250 | 251 | 252 | 253 | Close 254 | 255 | 256 | 257 | 258 | 259 | 260 | Reset to default 261 | 262 | 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | -------------------------------------------------------------------------------- /forms/exports.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | Exports 4 | 5 | 6 | 7 | 0 8 | 0 9 | 599 10 | 549 11 | 12 | 13 | 14 | Export Videos 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | Browse 28 | 29 | 30 | 31 | 32 | 33 | 34 | Qt::Horizontal 35 | 36 | 37 | 38 | 39 | 40 | 41 | Properties 42 | 43 | 44 | Qt::AlignCenter 45 | 46 | 47 | 48 | 49 | 50 | 51 | Qt::Horizontal 52 | 53 | 54 | 55 | 56 | 57 | 58 | Flip 59 | 60 | 61 | 62 | 63 | 64 | 65 | Orientation 66 | 67 | 68 | 69 | 70 | 71 | Horizontal 72 | 73 | 74 | 75 | 76 | 77 | 78 | Vertical 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | Edge Detection 89 | 90 | 91 | 92 | 93 | 94 | 95 | Threshold 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 100 104 | 105 | 106 | Qt::Horizontal 107 | 108 | 109 | 110 | 111 | 112 | 113 | 0 114 | 115 | 116 | Qt::AlignCenter 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | Invert 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | Human Detection 136 | 137 | 138 | 139 | 140 | 141 | 142 | Algorithm 143 | 144 | 145 | 146 | 147 | 148 | Hog Descriptor 149 | 150 | 151 | 152 | 153 | 154 | 155 | Surf Descriptor 156 | 157 | 158 | 159 | 160 | 161 | 162 | Extract Points 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | Qt::Vertical 173 | 174 | 175 | 176 | 20 177 | 40 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 90 195 | 0 196 | 197 | 198 | 199 | 200 | 0 201 | 0 202 | 203 | 204 | 205 | Total Progress 206 | 207 | 208 | false 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 250 217 | 0 218 | 219 | 220 | 221 | 24 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | Qt::Horizontal 231 | 232 | 233 | 234 | 40 235 | 20 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | Export 244 | 245 | 246 | 247 | 248 | 249 | 250 | Close 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | -------------------------------------------------------------------------------- /src/exports.cpp: -------------------------------------------------------------------------------- 1 | #include "exports.h" 2 | #include "ui_exports.h" 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | using namespace cv; 10 | 11 | //static int i = 0, j = 0; 12 | 13 | Exports::Exports(QWidget *parent) : 14 | QDialog(parent), 15 | ui(new Ui::Exports), 16 | totalProgress(0), 17 | curFrame(0), 18 | count(0) 19 | 20 | { 21 | //this->_timer = new QTimer(this); 22 | ui->setupUi(this); 23 | 24 | showProgressBar(false); 25 | 26 | //Hide the group by at initialization 27 | ui->groupEdge->hide(); 28 | ui->groupFlip->hide(); 29 | ui->groupHumanDetect->hide(); 30 | 31 | ui->txtThresh->setValidator(new QIntValidator(0,100, this)); 32 | ui->txtThresh->setFixedWidth(30); 33 | 34 | if(!ui->radioHog->isChecked() || !ui->radioSurf->isChecked()) ui->checkPoints->setEnabled(false); 35 | else ui->checkPoints->setEnabled(true); 36 | 37 | connect(ui->btnCancel, SIGNAL(clicked()), this, SLOT(close())); 38 | connect(ui->btnBrowse, SIGNAL(clicked()), this, SLOT(browseVideos())); 39 | connect(ui->btnExport, SIGNAL(clicked()), this, SLOT(timerTick())); 40 | //connect(this->_timer, SIGNAL(timeout()), this, SLOT(timerTick())); 41 | connect(ui->checkEdge, SIGNAL(toggled(bool)), this, SLOT(toggleEdge(bool))); 42 | connect(ui->checkFlip, SIGNAL(toggled(bool)), this, SLOT(toggleFlip(bool))); 43 | connect(ui->checkHuman, SIGNAL(toggled(bool)), this, SLOT(toggleHumanDetec(bool))); 44 | connect(ui->radioSurf, SIGNAL(toggled(bool)), this, SLOT(toggleSurf(bool))); 45 | connect(ui->radioHog, SIGNAL(toggled(bool)), this, SLOT(toggleHog(bool))); 46 | connect(ui->slideThresh, SIGNAL(valueChanged(int)), this, SLOT(setThresh(int))); 47 | connect(ui->txtThresh, SIGNAL(textChanged(QString)), this, SLOT(setThresh(QString))); 48 | 49 | this->effect = new Effects(); 50 | this->setting = Settings::getInstance(this); 51 | 52 | } 53 | 54 | void Exports::setupThread(QThread &thread){ 55 | connect(&thread, SIGNAL(started()),this, SLOT(timerTick())); 56 | } 57 | 58 | void Exports::setCurFrame(int cur){ 59 | 60 | this->curFrame++; 61 | 62 | ui->progressBarTotal->setValue(this->curFrame); 63 | if(ui->progressBarTotal->value() == this->totalProgress){ 64 | ui->btnBrowse->setDisabled(false); 65 | int ret = QMessageBox::information(this, "Export Result", "Your videos have been processed successfully.", QMessageBox::Open | QMessageBox::Ok); 66 | 67 | if(ret == QMessageBox::Open){ 68 | QProcess::startDetached("explorer " + this->setting->getExportPath()); 69 | } 70 | } 71 | 72 | } 73 | 74 | Exports::~Exports() 75 | { 76 | delete ui; 77 | this->reBrowse(); 78 | delete capture; 79 | } 80 | 81 | void Exports::toggleEdge(bool state){ 82 | if(state) ui->groupEdge->show(); 83 | else ui->groupEdge->hide(); 84 | } 85 | 86 | void Exports::toggleFlip(bool state){ 87 | if(state) ui->groupFlip->show(); 88 | else ui->groupFlip->hide(); 89 | } 90 | 91 | void Exports::toggleHumanDetec(bool state){ 92 | if(state) ui->groupHumanDetect->show(); 93 | else ui->groupHumanDetect->hide(); 94 | } 95 | 96 | void Exports::toggleHog(bool state) 97 | { 98 | if(state && !ui->checkPoints->isEnabled()){ 99 | ui->checkPoints->setEnabled(true); 100 | } 101 | } 102 | 103 | void Exports::toggleSurf(bool state) 104 | { 105 | if(state && !ui->checkPoints->isEnabled()){ 106 | ui->checkPoints->setEnabled(true); 107 | } 108 | } 109 | 110 | 111 | void Exports::reBrowse(){ 112 | for(int i = 0; i < this->count; i++){ 113 | //capture[i]->release(); 114 | thread[i]->~processThread(); 115 | delete capture[i]; 116 | capture[i] = 0; 117 | thread[i] = 0; 118 | } 119 | // writer->~VideoWriter(); 120 | this->count = 0; 121 | this->totalProgress = 0; 122 | this->curFrame = 0; 123 | this->fileName.clear(); 124 | ui->listVideos->clear(); 125 | ui->progressBarTotal->setValue(0); 126 | } 127 | 128 | void Exports::browseVideos(){ //if user click button browse 129 | 130 | QSettings env(QSettings::UserScope, "Microsoft", "Windows"); 131 | env.beginGroup("CurrentVersion/Explorer/Shell Folders"); 132 | QStringList path = QFileDialog::getOpenFileNames(this,"Select video files to export", env.value("Personal").toString(), "Video Files (*.avi)"); 133 | 134 | if(!path.isEmpty()){ 135 | 136 | if(this->count != 0) //if the list already have item, reset it 137 | this->reBrowse(); 138 | 139 | ui->listVideos->addItems(path); 140 | 141 | this->count = path.count(); 142 | this->capture = new cv::VideoCapture*[this->count]; 143 | this->thread = new processThread*[this->count]; 144 | 145 | for(int i = 0; i < this->count; i++){ 146 | this->capture[i] = new cv::VideoCapture(path.at(i).toStdString()); 147 | this->totalProgress += capture[i]->get(CV_CAP_PROP_FRAME_COUNT); 148 | QFileInfo pathInfo( path.at(i) ); 149 | this->fileName.append(pathInfo.fileName()); 150 | } 151 | 152 | ui->progressBarTotal->setMaximum(this->totalProgress); 153 | ui->progressBarTotal->setValue(0); 154 | 155 | //ui->checkFlip->setText(this->fileName.at(this->count - 1)); 156 | 157 | //ui->checkEdge->setText(QString::number(this->count)); 158 | } 159 | } 160 | 161 | void Exports::showProgressBar(bool state){ 162 | if(state) { 163 | ui->lblTotalProgress->show(); 164 | ui->progressBarTotal->show(); 165 | } else { 166 | ui->lblTotalProgress->hide(); 167 | ui->progressBarTotal->hide(); 168 | } 169 | 170 | } 171 | 172 | void Exports::setThresh(int val){ 173 | ui->txtThresh->setText(QString::number(val)); 174 | } 175 | 176 | void Exports::setThresh(QString val){ 177 | ui->slideThresh->setValue(val.toInt()); 178 | } 179 | 180 | 181 | void Exports::timerTick(){ //if user start export by press button export 182 | 183 | if(ui->listVideos->count() != 0){ 184 | showProgressBar(true); 185 | ui->btnBrowse->setDisabled(true); 186 | //If user export the same video as before without re load new video 187 | if(this->curFrame !=0){ 188 | this->curFrame = 0; 189 | ui->progressBarTotal->setValue(0); 190 | for(int i = 0; i < this->count; i++){ 191 | this->capture[i]->set(CV_CAP_PROP_POS_FRAMES,0); 192 | } 193 | } 194 | 195 | cv::Mat img; 196 | this->curFrame += this->count; 197 | 198 | for(int i = 0; i < this->count; i++){ 199 | 200 | this->capture[i]->operator >>( img ); 201 | if(!img.data) break; 202 | 203 | string path = this->setting->getExportPath().toStdString() + "\\" + this->fileName.at(i).toStdString(); 204 | // this->writer = new cv::VideoWriter(path, CV_FOURCC('D','I','V','X'), this->capture[i]->get(CV_CAP_PROP_FPS), img.size(), true); 205 | 206 | //if checkbox extract interest point is checked 207 | int extractPoint = 0; 208 | if(ui->radioSurf->isChecked() && ui->checkPoints->isChecked()) extractPoint = 1; 209 | else if(ui->radioHog->isChecked() && ui->checkPoints->isChecked()) extractPoint = 2; 210 | 211 | //Initialize thread object 212 | this->thread[i] = new processThread(this,this->capture[i], true, extractPoint, path); 213 | this->thread[i]->pauseAt = 1; 214 | 215 | 216 | connect(this->thread[i], SIGNAL(currentFrame(int)), this, SLOT(setCurFrame(int))); 217 | setInitialProp(this->thread[i]); 218 | 219 | 220 | QThreadPool::globalInstance()->start(new MyTask(thread[i])); 221 | //this->thread[i]->start(); 222 | 223 | } 224 | } else { 225 | QMessageBox::information(this, "Export Video", "You can't export when there's no video in the list.", QMessageBox::Ok); 226 | } 227 | } 228 | 229 | 230 | void Exports::setInitialProp(processThread *athread){ 231 | 232 | if(ui->checkHuman->isChecked() && ui->radioSurf->isChecked()){ 233 | //effect->SurfD(img); 234 | athread->surf = true; 235 | } else if(ui->checkHuman->isChecked() && ui->radioHog->isChecked()){ 236 | // effect->HogD(img); 237 | athread->hog = true; 238 | } 239 | 240 | //Check Edge Detection state 241 | if(ui->checkEdge->isChecked() && ui->checkEdgeInvert->isChecked()){ 242 | //effect->Edge(img, img, ui->slideThresh->value(), true); 243 | athread->edge = true; athread->edgeInvert = true; athread->edgeThresh = ui->slideThresh->value(); 244 | } else if(ui->checkEdge->isChecked()){ 245 | //effect->Edge(img, img, ui->slideThresh->value(), false); 246 | athread->edge = true; athread->edgeInvert = true; 247 | } 248 | 249 | //Check Flip state 250 | if(ui->checkFlip->isChecked() && ui->checkFlipHor->isChecked() && ui->checkFlipVer->isChecked()){ 251 | //effect->Flip(img, img, -1); //Both 252 | athread->flip = true; athread->flipCode = -1; 253 | } else if(ui->checkFlip->isChecked() && ui->checkFlipVer->isChecked()){ 254 | //effect->Flip(img, img, 0); //Ver 255 | athread->flip = true; athread->flipCode = 0; 256 | } else if(ui->checkFlip->isChecked() && ui->checkFlipHor->isChecked()){ 257 | //effect->Flip(img, img, 1); //Hor 258 | athread->flip = true; athread->flipCode = 1; 259 | } 260 | 261 | } 262 | 263 | -------------------------------------------------------------------------------- /src/surf/utils.cpp: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | //#include 13 | #include 14 | #include 15 | #include 16 | #include 17 | 18 | #include "utils.h" 19 | 20 | using namespace std; 21 | 22 | //------------------------------------------------------- 23 | 24 | static const int NCOLOURS = 8; 25 | static const CvScalar COLOURS [] = {cvScalar(255,0,0), cvScalar(0,255,0), 26 | cvScalar(0,0,255), cvScalar(255,255,0), 27 | cvScalar(0,255,255), cvScalar(255,0,255), 28 | cvScalar(255,255,255), cvScalar(0,0,0)}; 29 | 30 | //------------------------------------------------------- 31 | 32 | //! Display error message and terminate program 33 | void error(const char *msg) 34 | { 35 | cout << "\nError: " << msg; 36 | getchar(); 37 | exit(0); 38 | } 39 | 40 | //------------------------------------------------------- 41 | 42 | //! Show the provided image and wait for keypress 43 | void showImage(const IplImage *img) 44 | { 45 | cvNamedWindow("Surf", CV_WINDOW_AUTOSIZE); 46 | cvShowImage("Surf", img); 47 | cvWaitKey(0); 48 | } 49 | 50 | //------------------------------------------------------- 51 | 52 | //! Show the provided image in titled window and wait for keypress 53 | void showImage(char *title,const IplImage *img) 54 | { 55 | cvNamedWindow(title, CV_WINDOW_AUTOSIZE); 56 | cvShowImage(title, img); 57 | cvWaitKey(0); 58 | } 59 | 60 | //------------------------------------------------------- 61 | 62 | // Convert image to single channel 32F 63 | IplImage *getGray(const IplImage *img) 64 | { 65 | // Check we have been supplied a non-null img pointer 66 | if (!img) error("Unable to create grayscale image. No image supplied"); 67 | 68 | IplImage* gray8, * gray32; 69 | 70 | gray32 = cvCreateImage( cvGetSize(img), IPL_DEPTH_32F, 1 ); 71 | 72 | if( img->nChannels == 1 ) 73 | gray8 = (IplImage *) cvClone( img ); 74 | else { 75 | gray8 = cvCreateImage( cvGetSize(img), IPL_DEPTH_8U, 1 ); 76 | cvCvtColor( img, gray8, CV_BGR2GRAY ); 77 | } 78 | 79 | cvConvertScale( gray8, gray32, 1.0 / 255.0, 0 ); 80 | 81 | cvReleaseImage( &gray8 ); 82 | return gray32; 83 | } 84 | 85 | //------------------------------------------------------- 86 | 87 | //! Draw all the Ipoints in the provided vector 88 | void drawIpoints(IplImage *img, vector &ipts, int tailSize) 89 | { 90 | Ipoint *ipt; 91 | float s, o; 92 | int r1, c1, r2, c2, lap; 93 | 94 | for(unsigned int i = 0; i < ipts.size(); i++) 95 | { 96 | ipt = &ipts.at(i); 97 | s = (2.5f * ipt->scale); 98 | o = ipt->orientation; 99 | lap = ipt->laplacian; 100 | r1 = fRound(ipt->y); 101 | c1 = fRound(ipt->x); 102 | c2 = fRound(s * cos(o)) + c1; 103 | r2 = fRound(s * sin(o)) + r1; 104 | 105 | if (o) // Green line indicates orientation 106 | cvLine(img, cvPoint(c1, r1), cvPoint(c2, r2), cvScalar(0, 255, 0)); 107 | else // Green dot if using upright version 108 | cvCircle(img, cvPoint(c1,r1), 1, cvScalar(0, 255, 0),-1); 109 | 110 | if (lap == 1) 111 | { // Blue circles indicate dark blobs on light backgrounds 112 | cvCircle(img, cvPoint(c1,r1), fRound(s), cvScalar(255, 0, 0),1); 113 | } 114 | else if (lap == 0) 115 | { // Red circles indicate light blobs on dark backgrounds 116 | cvCircle(img, cvPoint(c1,r1), fRound(s), cvScalar(0, 0, 255),1); 117 | } 118 | else if (lap == 9) 119 | { // Red circles indicate light blobs on dark backgrounds 120 | cvCircle(img, cvPoint(c1,r1), fRound(s), cvScalar(0, 255, 0),1); 121 | } 122 | 123 | // Draw motion from ipoint dx and dy 124 | if (tailSize) 125 | { 126 | cvLine(img, cvPoint(c1,r1), 127 | cvPoint(int(c1+ipt->dx*tailSize), int(r1+ipt->dy*tailSize)), 128 | cvScalar(255,255,255), 1); 129 | } 130 | } 131 | } 132 | 133 | //------------------------------------------------------- 134 | 135 | //! Draw a single feature on the image 136 | void drawIpoint(IplImage *img, Ipoint &ipt, int tailSize) 137 | { 138 | float s, o; 139 | int r1, c1, r2, c2, lap; 140 | 141 | s = (2.5f * ipt.scale); 142 | o = ipt.orientation; 143 | lap = ipt.laplacian; 144 | r1 = fRound(ipt.y); 145 | c1 = fRound(ipt.x); 146 | 147 | // Green line indicates orientation 148 | if (o) // Green line indicates orientation 149 | { 150 | c2 = fRound(s * cos(o)) + c1; 151 | r2 = fRound(s * sin(o)) + r1; 152 | cvLine(img, cvPoint(c1, r1), cvPoint(c2, r2), cvScalar(0, 255, 0)); 153 | } 154 | else // Green dot if using upright version 155 | cvCircle(img, cvPoint(c1,r1), 1, cvScalar(0, 255, 0),-1); 156 | 157 | if (lap >= 0) 158 | { // Blue circles indicate light blobs on dark backgrounds 159 | cvCircle(img, cvPoint(c1,r1), fRound(s), cvScalar(255, 0, 0),1); 160 | } 161 | else 162 | { // Red circles indicate light blobs on dark backgrounds 163 | cvCircle(img, cvPoint(c1,r1), fRound(s), cvScalar(0, 0, 255),1); 164 | } 165 | 166 | // Draw motion from ipoint dx and dy 167 | if (tailSize) 168 | { 169 | cvLine(img, cvPoint(c1,r1), 170 | cvPoint(int(c1+ipt.dx*tailSize), int(r1+ipt.dy*tailSize)), 171 | cvScalar(255,255,255), 1); 172 | } 173 | } 174 | 175 | //------------------------------------------------------- 176 | 177 | //! Draw a single feature on the image 178 | void drawPoint(IplImage *img, Ipoint &ipt) 179 | { 180 | float s, o; 181 | int r1, c1; 182 | 183 | s = 3; 184 | o = ipt.orientation; 185 | r1 = fRound(ipt.y); 186 | c1 = fRound(ipt.x); 187 | 188 | cvCircle(img, cvPoint(c1,r1), fRound(s), COLOURS[ipt.clusterIndex%NCOLOURS], -1); 189 | cvCircle(img, cvPoint(c1,r1), fRound(s+1), COLOURS[(ipt.clusterIndex+1)%NCOLOURS], 2); 190 | 191 | } 192 | 193 | //------------------------------------------------------- 194 | 195 | //! Draw a single feature on the image 196 | void drawPoints(IplImage *img, vector &ipts) 197 | { 198 | float s, o; 199 | int r1, c1; 200 | 201 | for(unsigned int i = 0; i < ipts.size(); i++) 202 | { 203 | s = 3; 204 | o = ipts[i].orientation; 205 | r1 = fRound(ipts[i].y); 206 | c1 = fRound(ipts[i].x); 207 | 208 | cvCircle(img, cvPoint(c1,r1), fRound(s), COLOURS[ipts[i].clusterIndex%NCOLOURS], -1); 209 | cvCircle(img, cvPoint(c1,r1), fRound(s+1), COLOURS[(ipts[i].clusterIndex+1)%NCOLOURS], 2); 210 | } 211 | } 212 | 213 | //------------------------------------------------------- 214 | 215 | //! Draw descriptor windows around Ipoints in the provided vector 216 | void drawWindows(IplImage *img, vector &ipts) 217 | { 218 | Ipoint *ipt; 219 | float s, o, cd, sd; 220 | int x, y; 221 | CvPoint2D32f src[4]; 222 | 223 | for(unsigned int i = 0; i < ipts.size(); i++) 224 | { 225 | ipt = &ipts.at(i); 226 | s = (10 * ipt->scale); 227 | o = ipt->orientation; 228 | y = fRound(ipt->y); 229 | x = fRound(ipt->x); 230 | cd = cos(o); 231 | sd = sin(o); 232 | 233 | src[0].x=sd*s+cd*s+x; src[0].y=-cd*s+sd*s+y; 234 | src[1].x=sd*s+cd*-s+x; src[1].y=-cd*s+sd*-s+y; 235 | src[2].x=sd*-s+cd*-s+x; src[2].y=-cd*-s+sd*-s+y; 236 | src[3].x=sd*-s+cd*s+x; src[3].y=-cd*-s+sd*s+y; 237 | 238 | if (o) // Draw orientation line 239 | cvLine(img, cvPoint(x, y), 240 | cvPoint(fRound(s*cd + x), fRound(s*sd + y)), cvScalar(0, 255, 0),1); 241 | else // Green dot if using upright version 242 | cvCircle(img, cvPoint(x,y), 1, cvScalar(0, 255, 0),-1); 243 | 244 | 245 | // Draw box window around the point 246 | cvLine(img, cvPoint(fRound(src[0].x), fRound(src[0].y)), 247 | cvPoint(fRound(src[1].x), fRound(src[1].y)), cvScalar(255, 0, 0),2); 248 | cvLine(img, cvPoint(fRound(src[1].x), fRound(src[1].y)), 249 | cvPoint(fRound(src[2].x), fRound(src[2].y)), cvScalar(255, 0, 0),2); 250 | cvLine(img, cvPoint(fRound(src[2].x), fRound(src[2].y)), 251 | cvPoint(fRound(src[3].x), fRound(src[3].y)), cvScalar(255, 0, 0),2); 252 | cvLine(img, cvPoint(fRound(src[3].x), fRound(src[3].y)), 253 | cvPoint(fRound(src[0].x), fRound(src[0].y)), cvScalar(255, 0, 0),2); 254 | 255 | } 256 | } 257 | 258 | //------------------------------------------------------- 259 | 260 | // Draw the FPS figure on the image (requires at least 2 calls) 261 | void drawFPS(IplImage *img) 262 | { 263 | static int counter = 0; 264 | static clock_t t; 265 | static float fps; 266 | char fps_text[20]; 267 | CvFont font; 268 | cvInitFont(&font,CV_FONT_HERSHEY_SIMPLEX|CV_FONT_ITALIC, 1.0,1.0,0,2); 269 | 270 | // Add fps figure (every 10 frames) 271 | if (counter > 10) 272 | { 273 | fps = (10.0f/(clock()-t) * CLOCKS_PER_SEC); 274 | t=clock(); 275 | counter = 0; 276 | } 277 | 278 | // Increment counter 279 | ++counter; 280 | 281 | // Get the figure as a string 282 | sprintf(fps_text,"FPS: %.2f",fps); 283 | 284 | // Draw the string on the image 285 | cvPutText (img,fps_text,cvPoint(10,25), &font, cvScalar(255,255,0)); 286 | } 287 | 288 | //------------------------------------------------------- 289 | 290 | //! Save the SURF features to file 291 | void saveSurf(char *filename, vector &ipts) 292 | { 293 | ofstream outfile(filename); 294 | 295 | // output descriptor length 296 | outfile << "64\n"; 297 | outfile << ipts.size() << "\n"; 298 | 299 | // create output line as: scale x y des 300 | for(unsigned int i=0; i < ipts.size(); i++) 301 | { 302 | outfile << ipts.at(i).scale << " "; 303 | outfile << ipts.at(i).x << " "; 304 | outfile << ipts.at(i).y << " "; 305 | outfile << ipts.at(i).orientation << " "; 306 | outfile << ipts.at(i).laplacian << " "; 307 | outfile << ipts.at(i).scale << " "; 308 | for(int j=0; j<64; j++) 309 | outfile << ipts.at(i).descriptor[j] << " "; 310 | 311 | outfile << "\n"; 312 | } 313 | 314 | outfile.close(); 315 | } 316 | 317 | //------------------------------------------------------- 318 | 319 | //! Load the SURF features from file 320 | void loadSurf(char *filename, vector &ipts) 321 | { 322 | int descriptorLength, count; 323 | ifstream infile(filename); 324 | 325 | // clear the ipts vector first 326 | ipts.clear(); 327 | 328 | // read descriptor length/number of ipoints 329 | infile >> descriptorLength; 330 | infile >> count; 331 | 332 | // for each ipoint 333 | for (int i = 0; i < count; i++) 334 | { 335 | Ipoint ipt; 336 | 337 | // read vals 338 | infile >> ipt.scale; 339 | infile >> ipt.x; 340 | infile >> ipt.y; 341 | infile >> ipt.orientation; 342 | infile >> ipt.laplacian; 343 | infile >> ipt.scale; 344 | 345 | // read descriptor components 346 | for (int j = 0; j < 64; j++) 347 | infile >> ipt.descriptor[j]; 348 | 349 | ipts.push_back(ipt); 350 | 351 | } 352 | } 353 | 354 | //------------------------------------------------------- 355 | 356 | //------------------------------------------------------- 357 | -------------------------------------------------------------------------------- /src/surf/surf.cpp: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | #include "utils.h" 13 | 14 | #include "surf.h" 15 | 16 | //------------------------------------------------------- 17 | //! SURF priors (these need not be done at runtime) 18 | const float pi = 3.14159f; 19 | 20 | const double gauss25 [7][7] = { 21 | 0.02350693969273,0.01849121369071,0.01239503121241,0.00708015417522,0.00344628101733,0.00142945847484,0.00050524879060, 22 | 0.02169964028389,0.01706954162243,0.01144205592615,0.00653580605408,0.00318131834134,0.00131955648461,0.00046640341759, 23 | 0.01706954162243,0.01342737701584,0.00900063997939,0.00514124713667,0.00250251364222,0.00103799989504,0.00036688592278, 24 | 0.01144205592615,0.00900063997939,0.00603330940534,0.00344628101733,0.00167748505986,0.00069579213743,0.00024593098864, 25 | 0.00653580605408,0.00514124713667,0.00344628101733,0.00196854695367,0.00095819467066,0.00039744277546,0.00014047800980, 26 | 0.00318131834134,0.00250251364222,0.00167748505986,0.00095819467066,0.00046640341759,0.00019345616757,0.00006837798818, 27 | 0.00131955648461,0.00103799989504,0.00069579213743,0.00039744277546,0.00019345616757,0.00008024231247,0.00002836202103 28 | }; 29 | 30 | const double gauss33 [11][11] = { 31 | 0.014614763,0.013958917,0.012162744,0.00966788,0.00701053,0.004637568,0.002798657,0.001540738,0.000773799,0.000354525,0.000148179, 32 | 0.013958917,0.013332502,0.011616933,0.009234028,0.006695928,0.004429455,0.002673066,0.001471597,0.000739074,0.000338616,0.000141529, 33 | 0.012162744,0.011616933,0.010122116,0.008045833,0.005834325,0.003859491,0.002329107,0.001282238,0.000643973,0.000295044,0.000123318, 34 | 0.00966788,0.009234028,0.008045833,0.006395444,0.004637568,0.003067819,0.001851353,0.001019221,0.000511879,0.000234524,9.80224E-05, 35 | 0.00701053,0.006695928,0.005834325,0.004637568,0.003362869,0.002224587,0.001342483,0.000739074,0.000371182,0.000170062,7.10796E-05, 36 | 0.004637568,0.004429455,0.003859491,0.003067819,0.002224587,0.001471597,0.000888072,0.000488908,0.000245542,0.000112498,4.70202E-05, 37 | 0.002798657,0.002673066,0.002329107,0.001851353,0.001342483,0.000888072,0.000535929,0.000295044,0.000148179,6.78899E-05,2.83755E-05, 38 | 0.001540738,0.001471597,0.001282238,0.001019221,0.000739074,0.000488908,0.000295044,0.00016243,8.15765E-05,3.73753E-05,1.56215E-05, 39 | 0.000773799,0.000739074,0.000643973,0.000511879,0.000371182,0.000245542,0.000148179,8.15765E-05,4.09698E-05,1.87708E-05,7.84553E-06, 40 | 0.000354525,0.000338616,0.000295044,0.000234524,0.000170062,0.000112498,6.78899E-05,3.73753E-05,1.87708E-05,8.60008E-06,3.59452E-06, 41 | 0.000148179,0.000141529,0.000123318,9.80224E-05,7.10796E-05,4.70202E-05,2.83755E-05,1.56215E-05,7.84553E-06,3.59452E-06,1.50238E-06 42 | }; 43 | 44 | //------------------------------------------------------- 45 | 46 | //------------------------------------------------------- 47 | 48 | //! Constructor 49 | Surf::Surf(IplImage *img, IpVec &ipts) 50 | : ipts(ipts) 51 | { 52 | this->img = img; 53 | } 54 | 55 | //------------------------------------------------------- 56 | 57 | //! Describe all features in the supplied vector 58 | void Surf::getDescriptors(bool upright) 59 | { 60 | // Check there are Ipoints to be described 61 | if (!ipts.size()) return; 62 | 63 | // Get the size of the vector for fixed loop bounds 64 | int ipts_size = (int)ipts.size(); 65 | 66 | if (upright) 67 | { 68 | // U-SURF loop just gets descriptors 69 | for (int i = 0; i < ipts_size; ++i) 70 | { 71 | // Set the Ipoint to be described 72 | index = i; 73 | 74 | // Extract upright (i.e. not rotation invariant) descriptors 75 | getDescriptor(true); 76 | } 77 | } 78 | else 79 | { 80 | // Main SURF-64 loop assigns orientations and gets descriptors 81 | for (int i = 0; i < ipts_size; ++i) 82 | { 83 | // Set the Ipoint to be described 84 | index = i; 85 | 86 | // Assign Orientations and extract rotation invariant descriptors 87 | getOrientation(); 88 | getDescriptor(false); 89 | } 90 | } 91 | } 92 | 93 | //------------------------------------------------------- 94 | 95 | //! Assign the supplied Ipoint an orientation 96 | void Surf::getOrientation() 97 | { 98 | Ipoint *ipt = &ipts[index]; 99 | float gauss = 0.f, scale = ipt->scale; 100 | const int s = fRound(scale), r = fRound(ipt->y), c = fRound(ipt->x); 101 | std::vector resX(109), resY(109), Ang(109); 102 | const int id[] = {6,5,4,3,2,1,0,1,2,3,4,5,6}; 103 | 104 | int idx = 0; 105 | // calculate haar responses for points within radius of 6*scale 106 | for(int i = -6; i <= 6; ++i) 107 | { 108 | for(int j = -6; j <= 6; ++j) 109 | { 110 | if(i*i + j*j < 36) 111 | { 112 | gauss = static_cast(gauss25[id[i+6]][id[j+6]]); 113 | resX[idx] = gauss * haarX(r+j*s, c+i*s, 4*s); 114 | resY[idx] = gauss * haarY(r+j*s, c+i*s, 4*s); 115 | Ang[idx] = getAngle(resX[idx], resY[idx]); 116 | ++idx; 117 | } 118 | } 119 | } 120 | 121 | // calculate the dominant direction 122 | float sumX=0.f, sumY=0.f; 123 | float max=0.f, orientation = 0.f; 124 | float ang1=0.f, ang2=0.f; 125 | 126 | // loop slides pi/3 window around feature point 127 | for(ang1 = 0; ang1 < 2*pi; ang1+=0.15f) { 128 | ang2 = ( ang1+pi/3.0f > 2*pi ? ang1-5.0f*pi/3.0f : ang1+pi/3.0f); 129 | sumX = sumY = 0.f; 130 | for(unsigned int k = 0; k < Ang.size(); ++k) 131 | { 132 | // get angle from the x-axis of the sample point 133 | const float & ang = Ang[k]; 134 | 135 | // determine whether the point is within the window 136 | if (ang1 < ang2 && ang1 < ang && ang < ang2) 137 | { 138 | sumX+=resX[k]; 139 | sumY+=resY[k]; 140 | } 141 | else if (ang2 < ang1 && 142 | ((ang > 0 && ang < ang2) || (ang > ang1 && ang < 2*pi) )) 143 | { 144 | sumX+=resX[k]; 145 | sumY+=resY[k]; 146 | } 147 | } 148 | 149 | // if the vector produced from this window is longer than all 150 | // previous vectors then this forms the new dominant direction 151 | if (sumX*sumX + sumY*sumY > max) 152 | { 153 | // store largest orientation 154 | max = sumX*sumX + sumY*sumY; 155 | orientation = getAngle(sumX, sumY); 156 | } 157 | } 158 | 159 | // assign orientation of the dominant response vector 160 | ipt->orientation = orientation; 161 | } 162 | 163 | //------------------------------------------------------- 164 | 165 | //! Get the modified descriptor. See Agrawal ECCV 08 166 | //! Modified descriptor contributed by Pablo Fernandez 167 | void Surf::getDescriptor(bool bUpright) 168 | { 169 | int y, x, sample_x, sample_y, count=0; 170 | int i = 0, ix = 0, j = 0, jx = 0, xs = 0, ys = 0; 171 | float scale, *desc, dx, dy, mdx, mdy, co, si; 172 | float gauss_s1 = 0.f, gauss_s2 = 0.f; 173 | float rx = 0.f, ry = 0.f, rrx = 0.f, rry = 0.f, len = 0.f; 174 | float cx = -0.5f, cy = 0.f; //Subregion centers for the 4x4 gaussian weighting 175 | 176 | Ipoint *ipt = &ipts[index]; 177 | scale = ipt->scale; 178 | x = fRound(ipt->x); 179 | y = fRound(ipt->y); 180 | desc = ipt->descriptor; 181 | 182 | if (bUpright) 183 | { 184 | co = 1; 185 | si = 0; 186 | } 187 | else 188 | { 189 | co = cos(ipt->orientation); 190 | si = sin(ipt->orientation); 191 | } 192 | 193 | i = -8; 194 | 195 | //Calculate descriptor for this interest point 196 | while(i < 12) 197 | { 198 | j = -8; 199 | i = i-4; 200 | 201 | cx += 1.f; 202 | cy = -0.5f; 203 | 204 | while(j < 12) 205 | { 206 | dx=dy=mdx=mdy=0.f; 207 | cy += 1.f; 208 | 209 | j = j - 4; 210 | 211 | ix = i + 5; 212 | jx = j + 5; 213 | 214 | xs = fRound(x + ( -jx*scale*si + ix*scale*co)); 215 | ys = fRound(y + ( jx*scale*co + ix*scale*si)); 216 | 217 | for (int k = i; k < i + 9; ++k) 218 | { 219 | for (int l = j; l < j + 9; ++l) 220 | { 221 | //Get coords of sample point on the rotated axis 222 | sample_x = fRound(x + (-l*scale*si + k*scale*co)); 223 | sample_y = fRound(y + ( l*scale*co + k*scale*si)); 224 | 225 | //Get the gaussian weighted x and y responses 226 | gauss_s1 = gaussian(xs-sample_x,ys-sample_y,2.5f*scale); 227 | rx = haarX(sample_y, sample_x, 2*fRound(scale)); 228 | ry = haarY(sample_y, sample_x, 2*fRound(scale)); 229 | 230 | //Get the gaussian weighted x and y responses on rotated axis 231 | rrx = gauss_s1*(-rx*si + ry*co); 232 | rry = gauss_s1*(rx*co + ry*si); 233 | 234 | dx += rrx; 235 | dy += rry; 236 | mdx += fabs(rrx); 237 | mdy += fabs(rry); 238 | 239 | } 240 | } 241 | 242 | //Add the values to the descriptor vector 243 | gauss_s2 = gaussian(cx-2.0f,cy-2.0f,1.5f); 244 | 245 | desc[count++] = dx*gauss_s2; 246 | desc[count++] = dy*gauss_s2; 247 | desc[count++] = mdx*gauss_s2; 248 | desc[count++] = mdy*gauss_s2; 249 | 250 | len += (dx*dx + dy*dy + mdx*mdx + mdy*mdy) * gauss_s2*gauss_s2; 251 | 252 | j += 9; 253 | } 254 | i += 9; 255 | } 256 | 257 | //Convert to Unit Vector 258 | len = sqrt(len); 259 | for(int i = 0; i < 64; ++i) 260 | desc[i] /= len; 261 | 262 | } 263 | 264 | 265 | //------------------------------------------------------- 266 | 267 | //! Calculate the value of the 2d gaussian at x,y 268 | inline float Surf::gaussian(int x, int y, float sig) 269 | { 270 | return (1.0f/(2.0f*pi*sig*sig)) * exp( -(x*x+y*y)/(2.0f*sig*sig)); 271 | } 272 | 273 | //------------------------------------------------------- 274 | 275 | //! Calculate the value of the 2d gaussian at x,y 276 | inline float Surf::gaussian(float x, float y, float sig) 277 | { 278 | return 1.0f/(2.0f*pi*sig*sig) * exp( -(x*x+y*y)/(2.0f*sig*sig)); 279 | } 280 | 281 | //------------------------------------------------------- 282 | 283 | //! Calculate Haar wavelet responses in x direction 284 | inline float Surf::haarX(int row, int column, int s) 285 | { 286 | return BoxIntegral(img, row-s/2, column, s, s/2) 287 | -1 * BoxIntegral(img, row-s/2, column-s/2, s, s/2); 288 | } 289 | 290 | //------------------------------------------------------- 291 | 292 | //! Calculate Haar wavelet responses in y direction 293 | inline float Surf::haarY(int row, int column, int s) 294 | { 295 | return BoxIntegral(img, row, column-s/2, s/2, s) 296 | -1 * BoxIntegral(img, row-s/2, column-s/2, s/2, s); 297 | } 298 | 299 | //------------------------------------------------------- 300 | 301 | //! Get the angle from the +ve x-axis of the vector given by (X Y) 302 | float Surf::getAngle(float X, float Y) 303 | { 304 | if(X > 0 && Y >= 0) 305 | return atan(Y/X); 306 | 307 | if(X < 0 && Y >= 0) 308 | return pi - atan(-Y/X); 309 | 310 | if(X < 0 && Y < 0) 311 | return pi + atan(Y/X); 312 | 313 | if(X > 0 && Y < 0) 314 | return 2*pi - atan(-Y/X); 315 | 316 | return 0; 317 | } -------------------------------------------------------------------------------- /src/surf/fasthessian.cpp: -------------------------------------------------------------------------------- 1 | /*********************************************************** 2 | * --- OpenSURF --- * 3 | * This library is distributed under the GNU GPL. Please * 4 | * use the contact form at http://www.chrisevansdev.com * 5 | * for more information. * 6 | * * 7 | * C. Evans, Research Into Robust Visual Features, * 8 | * MSc University of Bristol, 2008. * 9 | * * 10 | ************************************************************/ 11 | 12 | #include "integral.h" 13 | #include "ipoint.h" 14 | #include "utils.h" 15 | 16 | #include 17 | 18 | #include "responselayer.h" 19 | #include "fasthessian.h" 20 | 21 | 22 | 23 | using namespace std; 24 | 25 | //------------------------------------------------------- 26 | 27 | //! Constructor without image 28 | FastHessian::FastHessian(std::vector &ipts, 29 | const int octaves, const int intervals, const int init_sample, 30 | const float thresh) 31 | : ipts(ipts), i_width(0), i_height(0) 32 | { 33 | // Save parameter set 34 | saveParameters(octaves, intervals, init_sample, thresh); 35 | } 36 | 37 | //------------------------------------------------------- 38 | 39 | //! Constructor with image 40 | FastHessian::FastHessian(IplImage *img, std::vector &ipts, 41 | const int octaves, const int intervals, const int init_sample, 42 | const float thresh) 43 | : ipts(ipts), i_width(0), i_height(0) 44 | { 45 | // Save parameter set 46 | saveParameters(octaves, intervals, init_sample, thresh); 47 | 48 | // Set the current image 49 | setIntImage(img); 50 | } 51 | 52 | //------------------------------------------------------- 53 | 54 | FastHessian::~FastHessian() 55 | { 56 | for (unsigned int i = 0; i < responseMap.size(); ++i) 57 | { 58 | delete responseMap[i]; 59 | } 60 | } 61 | 62 | //------------------------------------------------------- 63 | 64 | //! Save the parameters 65 | void FastHessian::saveParameters(const int octaves, const int intervals, 66 | const int init_sample, const float thresh) 67 | { 68 | // Initialise variables with bounds-checked values 69 | this->octaves = 70 | (octaves > 0 && octaves <= 4 ? octaves : OCTAVES); 71 | this->intervals = 72 | (intervals > 0 && intervals <= 4 ? intervals : INTERVALS); 73 | this->init_sample = 74 | (init_sample > 0 && init_sample <= 6 ? init_sample : INIT_SAMPLE); 75 | this->thresh = (thresh >= 0 ? thresh : THRES); 76 | } 77 | 78 | 79 | //------------------------------------------------------- 80 | 81 | //! Set or re-set the integral image source 82 | void FastHessian::setIntImage(IplImage *img) 83 | { 84 | // Change the source image 85 | this->img = img; 86 | 87 | i_height = img->height; 88 | i_width = img->width; 89 | } 90 | 91 | //------------------------------------------------------- 92 | 93 | //! Find the image features and write into vector of features 94 | void FastHessian::getIpoints() 95 | { 96 | // filter index map 97 | static const int filter_map [OCTAVES][INTERVALS] = {{0,1,2,3}, {1,3,4,5}, {3,5,6,7}, {5,7,8,9}, {7,9,10,11}}; 98 | 99 | // Clear the vector of exisiting ipts 100 | ipts.clear(); 101 | 102 | // Build the response map 103 | buildResponseMap(); 104 | 105 | // Get the response layers 106 | ResponseLayer *b, *m, *t; 107 | for (int o = 0; o < octaves; ++o) for (int i = 0; i <= 1; ++i) 108 | { 109 | b = responseMap.at(filter_map[o][i]); 110 | m = responseMap.at(filter_map[o][i+1]); 111 | t = responseMap.at(filter_map[o][i+2]); 112 | 113 | // loop over middle response layer at density of the most 114 | // sparse layer (always top), to find maxima across scale and space 115 | for (int r = 0; r < t->height; ++r) 116 | { 117 | for (int c = 0; c < t->width; ++c) 118 | { 119 | if (isExtremum(r, c, t, m, b)) 120 | { 121 | interpolateExtremum(r, c, t, m, b); 122 | } 123 | } 124 | } 125 | } 126 | } 127 | 128 | //------------------------------------------------------- 129 | 130 | //! Build map of DoH responses 131 | void FastHessian::buildResponseMap() 132 | { 133 | // Calculate responses for the first 4 octaves: 134 | // Oct1: 9, 15, 21, 27 135 | // Oct2: 15, 27, 39, 51 136 | // Oct3: 27, 51, 75, 99 137 | // Oct4: 51, 99, 147,195 138 | // Oct5: 99, 195,291,387 139 | 140 | // Deallocate memory and clear any existing response layers 141 | for(unsigned int i = 0; i < responseMap.size(); ++i) 142 | delete responseMap[i]; 143 | responseMap.clear(); 144 | 145 | // Get image attributes 146 | int w = (i_width / init_sample); 147 | int h = (i_height / init_sample); 148 | int s = (init_sample); 149 | 150 | // Calculate approximated determinant of hessian values 151 | if (octaves >= 1) 152 | { 153 | responseMap.push_back(new ResponseLayer(w, h, s, 9)); 154 | responseMap.push_back(new ResponseLayer(w, h, s, 15)); 155 | responseMap.push_back(new ResponseLayer(w, h, s, 21)); 156 | responseMap.push_back(new ResponseLayer(w, h, s, 27)); 157 | } 158 | 159 | if (octaves >= 2) 160 | { 161 | responseMap.push_back(new ResponseLayer(w/2, h/2, s*2, 39)); 162 | responseMap.push_back(new ResponseLayer(w/2, h/2, s*2, 51)); 163 | } 164 | 165 | if (octaves >= 3) 166 | { 167 | responseMap.push_back(new ResponseLayer(w/4, h/4, s*4, 75)); 168 | responseMap.push_back(new ResponseLayer(w/4, h/4, s*4, 99)); 169 | } 170 | 171 | if (octaves >= 4) 172 | { 173 | responseMap.push_back(new ResponseLayer(w/8, h/8, s*8, 147)); 174 | responseMap.push_back(new ResponseLayer(w/8, h/8, s*8, 195)); 175 | } 176 | 177 | if (octaves >= 5) 178 | { 179 | responseMap.push_back(new ResponseLayer(w/16, h/16, s*16, 291)); 180 | responseMap.push_back(new ResponseLayer(w/16, h/16, s*16, 387)); 181 | } 182 | 183 | // Extract responses from the image 184 | for (unsigned int i = 0; i < responseMap.size(); ++i) 185 | { 186 | buildResponseLayer(responseMap[i]); 187 | } 188 | } 189 | 190 | //------------------------------------------------------- 191 | 192 | //! Calculate DoH responses for supplied layer 193 | void FastHessian::buildResponseLayer(ResponseLayer *rl) 194 | { 195 | float *responses = rl->responses; // response storage 196 | unsigned char *laplacian = rl->laplacian; // laplacian sign storage 197 | int step = rl->step; // step size for this filter 198 | int b = (rl->filter - 1) / 2 + 1; // border for this filter 199 | int l = rl->filter / 3; // lobe for this filter (filter size / 3) 200 | int w = rl->filter; // filter size 201 | float inverse_area = 1.f/(w*w); // normalisation factor 202 | float Dxx, Dyy, Dxy; 203 | 204 | for(int r, c, ar = 0, index = 0; ar < rl->height; ++ar) 205 | { 206 | for(int ac = 0; ac < rl->width; ++ac, index++) 207 | { 208 | // get the image coordinates 209 | r = ar * step; 210 | c = ac * step; 211 | 212 | // Compute response components 213 | Dxx = BoxIntegral(img, r - l + 1, c - b, 2*l - 1, w) 214 | - BoxIntegral(img, r - l + 1, c - l / 2, 2*l - 1, l)*3; 215 | Dyy = BoxIntegral(img, r - b, c - l + 1, w, 2*l - 1) 216 | - BoxIntegral(img, r - l / 2, c - l + 1, l, 2*l - 1)*3; 217 | Dxy = + BoxIntegral(img, r - l, c + 1, l, l) 218 | + BoxIntegral(img, r + 1, c - l, l, l) 219 | - BoxIntegral(img, r - l, c - l, l, l) 220 | - BoxIntegral(img, r + 1, c + 1, l, l); 221 | 222 | // Normalise the filter responses with respect to their size 223 | Dxx *= inverse_area; 224 | Dyy *= inverse_area; 225 | Dxy *= inverse_area; 226 | 227 | // Get the determinant of hessian response & laplacian sign 228 | responses[index] = (Dxx * Dyy - 0.81f * Dxy * Dxy); 229 | laplacian[index] = (Dxx + Dyy >= 0 ? 1 : 0); 230 | 231 | #ifdef RL_DEBUG 232 | // create list of the image coords for each response 233 | rl->coords.push_back(std::make_pair(r,c)); 234 | #endif 235 | } 236 | } 237 | } 238 | 239 | //------------------------------------------------------- 240 | 241 | //! Non Maximal Suppression function 242 | int FastHessian::isExtremum(int r, int c, ResponseLayer *t, ResponseLayer *m, ResponseLayer *b) 243 | { 244 | // bounds check 245 | int layerBorder = (t->filter + 1) / (2 * t->step); 246 | if (r <= layerBorder || r >= t->height - layerBorder || c <= layerBorder || c >= t->width - layerBorder) 247 | return 0; 248 | 249 | // check the candidate point in the middle layer is above thresh 250 | float candidate = m->getResponse(r, c, t); 251 | if (candidate < thresh) 252 | return 0; 253 | 254 | for (int rr = -1; rr <=1; ++rr) 255 | { 256 | for (int cc = -1; cc <=1; ++cc) 257 | { 258 | // if any response in 3x3x3 is greater candidate not maximum 259 | if ( 260 | t->getResponse(r+rr, c+cc) >= candidate || 261 | ((rr != 0 || cc != 0) && m->getResponse(r+rr, c+cc, t) >= candidate) || 262 | b->getResponse(r+rr, c+cc, t) >= candidate 263 | ) 264 | return 0; 265 | } 266 | } 267 | 268 | return 1; 269 | } 270 | 271 | //------------------------------------------------------- 272 | 273 | //! Interpolate scale-space extrema to subpixel accuracy to form an image feature. 274 | void FastHessian::interpolateExtremum(int r, int c, ResponseLayer *t, ResponseLayer *m, ResponseLayer *b) 275 | { 276 | // get the step distance between filters 277 | // check the middle filter is mid way between top and bottom 278 | int filterStep = (m->filter - b->filter); 279 | assert(filterStep > 0 && t->filter - m->filter == m->filter - b->filter); 280 | 281 | // Get the offsets to the actual location of the extremum 282 | double xi = 0, xr = 0, xc = 0; 283 | interpolateStep(r, c, t, m, b, &xi, &xr, &xc ); 284 | 285 | // If point is sufficiently close to the actual extremum 286 | if( fabs( xi ) < 0.5f && fabs( xr ) < 0.5f && fabs( xc ) < 0.5f ) 287 | { 288 | Ipoint ipt; 289 | ipt.x = static_cast((c + xc) * t->step); 290 | ipt.y = static_cast((r + xr) * t->step); 291 | ipt.scale = static_cast((0.1333f) * (m->filter + xi * filterStep)); 292 | ipt.laplacian = static_cast(m->getLaplacian(r,c,t)); 293 | ipts.push_back(ipt); 294 | } 295 | } 296 | 297 | //------------------------------------------------------- 298 | 299 | //! Performs one step of extremum interpolation. 300 | void FastHessian::interpolateStep(int r, int c, ResponseLayer *t, ResponseLayer *m, ResponseLayer *b, 301 | double* xi, double* xr, double* xc ) 302 | { 303 | CvMat* dD, * H, * H_inv, X; 304 | double x[3] = { 0 }; 305 | 306 | dD = deriv3D( r, c, t, m, b ); 307 | H = hessian3D( r, c, t, m, b ); 308 | H_inv = cvCreateMat( 3, 3, CV_64FC1 ); 309 | cvInvert( H, H_inv, CV_SVD ); 310 | cvInitMatHeader( &X, 3, 1, CV_64FC1, x, CV_AUTOSTEP ); 311 | cvGEMM( H_inv, dD, -1, NULL, 0, &X, 0 ); 312 | 313 | cvReleaseMat( &dD ); 314 | cvReleaseMat( &H ); 315 | cvReleaseMat( &H_inv ); 316 | 317 | *xi = x[2]; 318 | *xr = x[1]; 319 | *xc = x[0]; 320 | } 321 | 322 | //------------------------------------------------------- 323 | 324 | //! Computes the partial derivatives in x, y, and scale of a pixel. 325 | CvMat* FastHessian::deriv3D(int r, int c, ResponseLayer *t, ResponseLayer *m, ResponseLayer *b) 326 | { 327 | CvMat* dI; 328 | double dx, dy, ds; 329 | 330 | dx = (m->getResponse(r, c + 1, t) - m->getResponse(r, c - 1, t)) / 2.0; 331 | dy = (m->getResponse(r + 1, c, t) - m->getResponse(r - 1, c, t)) / 2.0; 332 | ds = (t->getResponse(r, c) - b->getResponse(r, c, t)) / 2.0; 333 | 334 | dI = cvCreateMat( 3, 1, CV_64FC1 ); 335 | cvmSet( dI, 0, 0, dx ); 336 | cvmSet( dI, 1, 0, dy ); 337 | cvmSet( dI, 2, 0, ds ); 338 | 339 | return dI; 340 | } 341 | 342 | //------------------------------------------------------- 343 | 344 | //! Computes the 3D Hessian matrix for a pixel. 345 | CvMat* FastHessian::hessian3D(int r, int c, ResponseLayer *t, ResponseLayer *m, ResponseLayer *b) 346 | { 347 | CvMat* H; 348 | double v, dxx, dyy, dss, dxy, dxs, dys; 349 | 350 | v = m->getResponse(r, c, t); 351 | dxx = m->getResponse(r, c + 1, t) + m->getResponse(r, c - 1, t) - 2 * v; 352 | dyy = m->getResponse(r + 1, c, t) + m->getResponse(r - 1, c, t) - 2 * v; 353 | dss = t->getResponse(r, c) + b->getResponse(r, c, t) - 2 * v; 354 | dxy = ( m->getResponse(r + 1, c + 1, t) - m->getResponse(r + 1, c - 1, t) - 355 | m->getResponse(r - 1, c + 1, t) + m->getResponse(r - 1, c - 1, t) ) / 4.0; 356 | dxs = ( t->getResponse(r, c + 1) - t->getResponse(r, c - 1) - 357 | b->getResponse(r, c + 1, t) + b->getResponse(r, c - 1, t) ) / 4.0; 358 | dys = ( t->getResponse(r + 1, c) - t->getResponse(r - 1, c) - 359 | b->getResponse(r + 1, c, t) + b->getResponse(r - 1, c, t) ) / 4.0; 360 | 361 | H = cvCreateMat( 3, 3, CV_64FC1 ); 362 | cvmSet( H, 0, 0, dxx ); 363 | cvmSet( H, 0, 1, dxy ); 364 | cvmSet( H, 0, 2, dxs ); 365 | cvmSet( H, 1, 0, dxy ); 366 | cvmSet( H, 1, 1, dyy ); 367 | cvmSet( H, 1, 2, dys ); 368 | cvmSet( H, 2, 0, dxs ); 369 | cvmSet( H, 2, 1, dys ); 370 | cvmSet( H, 2, 2, dss ); 371 | 372 | return H; 373 | } 374 | 375 | //------------------------------------------------------- -------------------------------------------------------------------------------- /src/mainwindow.cpp: -------------------------------------------------------------------------------- 1 | #include "mainwindow.h" 2 | #include "ui_mainwindow.h" 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | MainWindow::MainWindow(QWidget *parent) : 13 | QMainWindow(parent), 14 | ui(new Ui::MainWindow), 15 | curFrame(0), 16 | totalFrame(0), 17 | captureFrame(false), 18 | captureFrames(false), 19 | hasVideo(false) 20 | { 21 | ui->setupUi(this); 22 | ui->txtThresh->setValidator(new QIntValidator(0, 100, this)); 23 | ui->txtThresh->setFixedWidth(30); 24 | ui->btnPlayPause->setEnabled(false); //Disable on load. no video 25 | ui->actionCapture_all_frames->setEnabled(false); //Disable on load. no video 26 | ui->actionCapture_current_frame->setEnabled(false); //Disable on load. no video 27 | ui->btnSnap->setEnabled(false); //Disable on load. no video 28 | ui->btnSnapAllFrame->setEnabled(false); 29 | 30 | toggleDetectHuman(ui->checkHuman->isChecked()); 31 | toggleEdge(ui->checkEdge->isChecked()); 32 | toggleFlip(ui->checkFlip->isChecked()); 33 | 34 | //Check the effect setting 35 | this->exports = new Exports(this); 36 | this->settings = Settings::getInstance(this); 37 | this->aboutUs = new About(this); 38 | aboutUs->setFixedSize(490, 360); 39 | 40 | 41 | //Connect all the element 42 | connect(ui->checkHuman, SIGNAL(toggled(bool)), this, SLOT(toggleDetectHuman(bool))); 43 | connect(ui->btnSnap, SIGNAL(clicked()), this, SLOT(toggleCaptureFrame())); 44 | connect(ui->btnPlayPause, SIGNAL(clicked()), this, SLOT(togglePlayPause())); 45 | connect(ui->btnBrowse, SIGNAL(clicked()), this, SLOT(loadFile())); 46 | connect(ui->btnSnapAllFrame, SIGNAL(clicked()), this, SLOT(toggleCaptureFrames())); 47 | connect(ui->btnExport, SIGNAL(clicked()), exports, SLOT(open())); 48 | 49 | //set of action 50 | connect(ui->actionCapture_current_frame, SIGNAL(triggered()), this, SLOT(toggleCaptureFrame())); 51 | connect(ui->actionOpen_File, SIGNAL(triggered()), this, SLOT(loadFile())); 52 | connect(ui->actionExit, SIGNAL(triggered()), this, SLOT(close())); 53 | connect(ui->actionExports, SIGNAL(triggered()), exports, SLOT(open())); 54 | connect(ui->actionGlobal_Settings, SIGNAL(triggered()), settings, SLOT(open())); 55 | connect(ui->actionAbout_Us, SIGNAL(triggered()), aboutUs, SLOT(open())); 56 | connect(ui->actionCapture_all_frames, SIGNAL(triggered()), this, SLOT(toggleCaptureFrames())); 57 | connect(ui->actionExport, SIGNAL(triggered()), this, SLOT(openDirExport())); 58 | connect(ui->actionSnapshot, SIGNAL(triggered()), this,SLOT(openDirSnap())); 59 | 60 | } 61 | 62 | MainWindow::~MainWindow() 63 | { 64 | delete ui; 65 | } 66 | 67 | 68 | void MainWindow::toggleDetectHuman(bool state){ 69 | 70 | if(state){ 71 | ui->groupAlgorithm->show(); 72 | if(ui->radioSurf->isChecked()) toggleSurf(true); 73 | if(ui->radioHog->isChecked()) toggleHog(true); 74 | } else { 75 | ui->groupAlgorithm->hide(); 76 | toggleSurf(false); 77 | toggleHog(false); 78 | } 79 | } 80 | 81 | 82 | void MainWindow::toggleEdge(bool state){ 83 | if(state){ 84 | ui->groupThresh->show(); 85 | mThread->edge = true; 86 | if(ui->checkEdgeInvert->isChecked()) mThread->edgeInvert = true; 87 | } else { 88 | ui->groupThresh->hide(); 89 | mThread->edge = false; 90 | mThread->edgeInvert = false; 91 | } 92 | } 93 | 94 | void MainWindow::toggleEdgeInvert(bool state){ 95 | if(state) mThread->edgeInvert = true; 96 | else mThread->edgeInvert = false; 97 | } 98 | 99 | void MainWindow::toggleFlip(bool state){ 100 | if(state){ 101 | ui->groupFlip->show(); 102 | if(!ui->checkFlipHor->isChecked() && !ui->checkFlipVer->isChecked()){ mThread->flip = false; } 103 | else { mThread->flip = true; } 104 | } else { 105 | mThread->flip = false; 106 | ui->groupFlip->hide(); 107 | } 108 | } 109 | 110 | void MainWindow::toggleFlipHor(bool state) 111 | { 112 | if(state){ 113 | mThread->flip = true; 114 | if(ui->checkFlipVer->isChecked()) { mThread->flipCode = -1; } //Both 115 | else { mThread->flipCode = 1; } //Hor only 116 | } else { 117 | if(ui->checkFlipVer->isChecked()) { mThread->flip = true; mThread->flipCode = 0; } //Ver only 118 | else { mThread->flip = false; } 119 | } 120 | } 121 | 122 | void MainWindow::toggleFlipVer(bool state) 123 | { 124 | if(state){ 125 | mThread->flip = true; 126 | if(ui->checkFlipHor->isChecked()){ mThread->flipCode = -1; } //Both 127 | else { mThread->flipCode = 0; } //Ver only 128 | } else { 129 | if(ui->checkFlipHor->isChecked()) { mThread->flip = true; mThread->flipCode = 1; } //Hor only 130 | else { mThread->flip = false; } 131 | } 132 | } 133 | 134 | 135 | void MainWindow::togglePlayPause(){ 136 | 137 | //if the video is finish playing and user press restart 138 | if (!this->mThread->pause && this->curFrame == this->totalFrame){ 139 | this->curFrame = 1; 140 | this->capture->set(CV_CAP_PROP_POS_FRAMES,0); 141 | //this->mThread->setValueJ(0); 142 | this->mThread->stop = false; 143 | this->mThread->pauseAt = 1; 144 | ui->btnPlayPause->setIcon(QIcon(":/images/pause")); 145 | QThreadPool::globalInstance()->start(new MyTask(this->mThread)); 146 | //if the video is still playing and user press pause 147 | } else if(!this->mThread->pause){ 148 | this->mThread->pause = true; 149 | ui->btnPlayPause->setIcon(QIcon(":/images/play")); 150 | //if the video is at pause condition and user press play 151 | } else { 152 | this->mThread->pause = false; 153 | ui->btnPlayPause->setIcon(QIcon(":/images/pause")); 154 | QThreadPool::globalInstance()->start(new MyTask(this->mThread)); 155 | } 156 | 157 | } 158 | 159 | void MainWindow::toggleSurf(bool state) 160 | { 161 | if(state && ui->checkHuman->isChecked()) 162 | mThread->surf = true; 163 | else 164 | mThread->surf = false; 165 | } 166 | 167 | void MainWindow::toggleHog(bool state) 168 | { 169 | if(state && ui->checkHuman->isChecked()) 170 | mThread->hog = true; 171 | else 172 | mThread->hog = false; 173 | } 174 | 175 | void MainWindow::toggleCaptureFrame(){ 176 | if(this->curFrame == this->totalFrame || this->mThread->pause) 177 | QMessageBox::information(this, "Snapshot", "You can't capture screen when video is pause or stop.", QMessageBox::Ok); 178 | else 179 | this->captureFrame = true; 180 | } 181 | 182 | void MainWindow::toggleCaptureFrames(){ 183 | 184 | //this->curFrame = 1;//baru tambah 185 | this->capture->set(CV_CAP_PROP_POS_FRAMES, 0); 186 | this->mThread->setValueJ(0); 187 | this->mThread->frameToSkip = this->settings->getFrameToSkip(); 188 | this->captureFrames = true; 189 | 190 | QDateTime timestem = QDateTime::currentDateTime(); 191 | this->folderPath = this->settings->getSnapPath() + "/" + (timestem.toString("yy-MM-dd hh-mm-ss")); 192 | 193 | QDir dir(this->folderPath); 194 | 195 | if(!dir.exists(this->folderPath)){ 196 | dir.mkdir(this->folderPath); 197 | } 198 | 199 | this->dialogSnaps = new dialogSnapFrames(this, this->totalFrame - (this->totalFrame % this->settings->getFrameToSkip())); 200 | this->dialogSnaps->setFixedWidth(300); 201 | this->dialogSnaps->open(); 202 | 203 | //if user press capture frame when the video is pause or finish playing 204 | if(this->mThread->pause || (!this->mThread->pause && this->curFrame == this->totalFrame)){ 205 | ui->btnPlayPause->setIcon(QIcon(":/images/pause")); 206 | this->mThread->pause = false; 207 | this->mThread->stop = false; 208 | QThreadPool::globalInstance()->start(new MyTask(this->mThread)); 209 | } 210 | 211 | } 212 | 213 | void MainWindow::setThresh(int val){ 214 | ui->txtThresh->setText(QString::number(val)); 215 | mThread->edgeThresh = val; 216 | } 217 | 218 | void MainWindow::setThresh(QString val){ 219 | ui->slideThresh->setValue(val.toInt()); 220 | mThread->edgeThresh = val.toInt(); 221 | } 222 | 223 | void MainWindow::saveToFolder(Mat &img){ 224 | this->captureFrame = false; 225 | QDateTime timestem = QDateTime::currentDateTime(); 226 | string path = this->settings->getSnapPath().toStdString() + "/" + (timestem.toString("yy-MM-dd hh-mm-ss")).toStdString() + ".jpg"; 227 | cv::imwrite(path, img); 228 | QMessageBox msgBox; 229 | msgBox.setText("The frame has been saved."); 230 | msgBox.setWindowTitle("Information"); 231 | msgBox.exec(); 232 | } 233 | 234 | void MainWindow::snapAllFrames(Mat &img){ 235 | 236 | string fileName = this->folderPath.toStdString() + "/" + QString::number(this->curFrame).toStdString() + ".jpg"; 237 | cv::imwrite(fileName, img); 238 | 239 | //to update to the progress dialog 240 | emit displayCurProgress(this->curFrame, this->totalFrame); 241 | } 242 | 243 | 244 | //Connect all the effect button and display that want to be send to thread 245 | void MainWindow::initEffectAndGui(){ 246 | //To passing object Mat via signal and slot 247 | typedef Mat AMAT; 248 | qRegisterMetaType("Mat"); 249 | qRegisterMetaType("IpVec"); 250 | 251 | connect(this->mThread, SIGNAL(currentFrame(int,Mat)), this, SLOT(displayResult(int,Mat))); 252 | connect(this->mThread, SIGNAL(finishProcess(bool)), this, SLOT(finishProcess(bool))); 253 | connect(ui->radioSurf, SIGNAL(toggled(bool)), this, SLOT(toggleSurf(bool))); 254 | connect(ui->radioHog, SIGNAL(toggled(bool)), this, SLOT(toggleHog(bool))); 255 | connect(ui->checkEdge, SIGNAL(toggled(bool)), this, SLOT(toggleEdge(bool))); 256 | connect(ui->checkEdgeInvert, SIGNAL(toggled(bool)), this, SLOT(toggleEdgeInvert(bool))); 257 | connect(ui->slideThresh, SIGNAL(valueChanged(int)), this, SLOT(setThresh(int))); 258 | connect(ui->txtThresh, SIGNAL(textChanged(QString)), this, SLOT(setThresh(QString))); 259 | connect(ui->checkFlip, SIGNAL(toggled(bool)), this, SLOT(toggleFlip(bool))); 260 | connect(ui->checkFlipHor, SIGNAL(toggled(bool)), this, SLOT(toggleFlipHor(bool))); 261 | connect(ui->checkFlipVer, SIGNAL(toggled(bool)), this, SLOT(toggleFlipVer(bool))); 262 | 263 | } 264 | 265 | void MainWindow::loadFile(){ 266 | 267 | const QString path = QFileDialog::getOpenFileName(this, "Select files", "", "Video Files (*.avi)"); 268 | if(!path.isEmpty()){ 269 | 270 | //Set the important button enabled back after video has been load 271 | if(!ui->btnPlayPause->isEnabled()) 272 | ui->btnPlayPause->setEnabled(true); 273 | if(!ui->actionCapture_all_frames->isEnabled()) 274 | ui->actionCapture_all_frames->setEnabled(true); 275 | if(!ui->actionCapture_current_frame->isEnabled()) 276 | ui->actionCapture_current_frame->setEnabled(true); 277 | if(!ui->btnSnap->isEnabled()) 278 | ui->btnSnap->setEnabled(true); 279 | if(!ui->btnSnapAllFrame->isEnabled()) 280 | ui->btnSnapAllFrame->setEnabled(true); 281 | 282 | //Check if the thread already run 283 | if(this->hasVideo){ 284 | delete this->mThread; 285 | delete this->capture; 286 | this->mThread = 0; 287 | this->capture = 0; 288 | } 289 | 290 | this->curFrame = 0; 291 | this->capture = new cv::VideoCapture(path.toStdString()); 292 | this->mThread = new processThread(this, this->capture, false, 0, ""); 293 | 294 | //Connect all the effect button 295 | this->initEffectAndGui(); 296 | 297 | this->totalFrame = (int)this->capture->get(CV_CAP_PROP_FRAME_COUNT); 298 | ui->slideTimeline->setMaximum(this->totalFrame); 299 | ui->btnPlayPause->setIcon(QIcon(":/images/pause")); 300 | 301 | this->setInitialProp(); 302 | 303 | this->hasVideo = true; 304 | 305 | QThreadPool::globalInstance()->start(new MyTask(this->mThread)); 306 | } 307 | } 308 | 309 | void MainWindow::setInitialProp(){ 310 | //Check Human Detection 311 | if(ui->checkHuman->isChecked() && ui->radioSurf->isChecked()){ 312 | this->mThread->surf = true; 313 | } else if(ui->checkHuman->isChecked() && ui->radioHog->isChecked()){ 314 | this->mThread->hog = true; 315 | } 316 | 317 | //Check Edge Detection state 318 | if(ui->checkEdge->isChecked() && ui->checkEdgeInvert->isChecked()){ 319 | this->mThread->edge = true; this->mThread->edgeInvert = true; this->mThread->edgeThresh = ui->slideThresh->value(); 320 | } else if(ui->checkEdge->isChecked()){ 321 | this->mThread->edge = true; this->mThread->edgeInvert = true; 322 | } 323 | 324 | //Check Flip state 325 | if(ui->checkFlip->isChecked() && ui->checkFlipHor->isChecked() && ui->checkFlipVer->isChecked()){ 326 | this->mThread->flip = true; this->mThread->flipCode = -1; //Both 327 | } else if(ui->checkFlip->isChecked() && ui->checkFlipVer->isChecked()){ 328 | this->mThread->flip = true; this->mThread->flipCode = 0; //Ver 329 | } else if(ui->checkFlip->isChecked() && ui->checkFlipHor->isChecked()){ 330 | this->mThread->flip = true; this->mThread->flipCode = 1; //Hor 331 | } 332 | 333 | this->mThread->fps = this->settings->getVideoFrame(); 334 | 335 | } 336 | 337 | void MainWindow::displayResult(int cur, Mat img) 338 | { 339 | this->curFrame = cur; 340 | 341 | if(this->captureFrames){ 342 | this->snapAllFrames(img); 343 | } 344 | 345 | if(this->captureFrame){ this->saveToFolder(img); } //if user snap a frame, save it then play as usual 346 | ui->labelDisplay->setPixmap(QPixmap::fromImage(QImage(img.data,img.cols, img.rows, img.step, QImage::Format_RGB888)).scaled(ui->labelDisplay->size(), Qt::KeepAspectRatio)); 347 | ui->slideTimeline->setValue(cur); 348 | ui->labelTimeline->setText(QString::number(cur) + "/" + QString::number(this->totalFrame) + "\n" + QString::number(cur / 30) + "/" + QString::number((int)this->totalFrame/30)); 349 | 350 | } 351 | 352 | //Condition where the video has been finish processing 353 | void MainWindow::finishProcess(bool state) 354 | { 355 | //force the cur frame to be same with total frame. to enable restart after snap all frame 356 | this->curFrame = this->totalFrame; 357 | ui->slideTimeline->setValue(this->curFrame); 358 | if (state){ 359 | ui->btnPlayPause->setIcon(QIcon(":/images/repeat")); 360 | 361 | if(this->captureFrames){ 362 | dialogSnaps->setCurProgress(this->curFrame, this->totalFrame); 363 | dialogSnaps->setButtonEnable(true); 364 | this->captureFrames = false; 365 | } 366 | } 367 | } 368 | 369 | void MainWindow::openDirExport() 370 | { 371 | QProcess::startDetached("explorer " + this->settings->getExportPath()); 372 | } 373 | 374 | void MainWindow::openDirSnap() 375 | { 376 | QProcess::startDetached("explorer " + this->settings->getSnapPath()); 377 | } 378 | -------------------------------------------------------------------------------- /forms/mainwindow.ui: -------------------------------------------------------------------------------- 1 | 2 | 3 | MainWindow 4 | 5 | 6 | 7 | 0 8 | 0 9 | 675 10 | 481 11 | 12 | 13 | 14 | Track Human Gait in Video | V1.0 15 | 16 | 17 | 18 | :/images/icon:/images/icon 19 | 20 | 21 | 22 | 32 23 | 32 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 0 32 | 33 | 34 | 35 | 36 | Open video file 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | ... 46 | 47 | 48 | 49 | :/images/open:/images/open 50 | 51 | 52 | 53 | 32 54 | 32 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | Capture the current frame 63 | 64 | 65 | ... 66 | 67 | 68 | 69 | :/images/capture:/images/capture 70 | 71 | 72 | 73 | 32 74 | 32 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | Capture all frames for this video 83 | 84 | 85 | ... 86 | 87 | 88 | 89 | :/images/capture_all:/images/capture_all 90 | 91 | 92 | 93 | 32 94 | 32 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | Export videos 103 | 104 | 105 | ... 106 | 107 | 108 | 109 | :/images/export:/images/export 110 | 111 | 112 | 113 | 32 114 | 32 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | Qt::Horizontal 123 | 124 | 125 | 126 | 40 127 | 20 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | Qt::Horizontal 142 | 143 | 144 | 145 | 146 | 147 | 148 | Properties 149 | 150 | 151 | Qt::AlignCenter 152 | 153 | 154 | 155 | 156 | 157 | 158 | Qt::Horizontal 159 | 160 | 161 | 162 | 163 | 164 | 165 | Flip 166 | 167 | 168 | 169 | 170 | 171 | 172 | Properties 173 | 174 | 175 | 176 | 177 | 178 | Horizontal 179 | 180 | 181 | 182 | 183 | 184 | 185 | Vertical 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | Edge Detector 196 | 197 | 198 | 199 | 200 | 201 | 202 | Threshold 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 100 211 | 212 | 213 | Qt::Horizontal 214 | 215 | 216 | 217 | 218 | 219 | 220 | 0 221 | 222 | 223 | Qt::AlignCenter 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | Invert 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | Human Detect 243 | 244 | 245 | 246 | 247 | 248 | 249 | Algorithm 250 | 251 | 252 | 253 | 254 | 255 | HOG Descriptor 256 | 257 | 258 | 259 | 260 | 261 | 262 | SURF Descriptor 263 | 264 | 265 | 266 | 267 | 268 | 269 | 270 | 271 | 272 | Qt::Vertical 273 | 274 | 275 | 276 | 20 277 | 40 278 | 279 | 280 | 281 | 282 | 283 | 284 | 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | 293 | 294 | 295 | 296 | 297 | 298 | 299 | 0 300 | 0 301 | 302 | 303 | 304 | 305 | 306 | 307 | 308 | :/images/play:/images/play 309 | 310 | 311 | 312 | 24 313 | 24 314 | 315 | 316 | 317 | false 318 | 319 | 320 | false 321 | 322 | 323 | false 324 | 325 | 326 | false 327 | 328 | 329 | 330 | 331 | 332 | 333 | true 334 | 335 | 336 | Qt::Horizontal 337 | 338 | 339 | 340 | 341 | 342 | 343 | 0/0 344 | 0/0 345 | 346 | 347 | 348 | 349 | 350 | 351 | 352 | 353 | 354 | 355 | 356 | 357 | 358 | 359 | 0 360 | 0 361 | 675 362 | 21 363 | 364 | 365 | 366 | 367 | Files 368 | 369 | 370 | 371 | Open Directory 372 | 373 | 374 | 375 | 376 | 377 | 378 | 379 | 380 | 381 | 382 | 383 | 384 | Setting 385 | 386 | 387 | 388 | 389 | 390 | Help 391 | 392 | 393 | 394 | 395 | 396 | Videos 397 | 398 | 399 | 400 | 401 | 402 | 403 | 404 | 405 | 406 | 407 | 408 | false 409 | 410 | 411 | 412 | 413 | Exports 414 | 415 | 416 | 417 | 418 | Exit 419 | 420 | 421 | 422 | 423 | Global Settings 424 | 425 | 426 | 427 | 428 | Open File 429 | 430 | 431 | Ctrl+O 432 | 433 | 434 | 435 | 436 | About Us 437 | 438 | 439 | 440 | 441 | Play 442 | 443 | 444 | 445 | 446 | Pause 447 | 448 | 449 | 450 | 451 | Stop 452 | 453 | 454 | 455 | 456 | Capture current frame 457 | 458 | 459 | 460 | 461 | Capture all frames 462 | 463 | 464 | 465 | 466 | Snapshot 467 | 468 | 469 | 470 | 471 | Export 472 | 473 | 474 | 475 | 476 | Extract Points 477 | 478 | 479 | 480 | 481 | 482 | 483 | 484 | 485 | 486 | -------------------------------------------------------------------------------- /FYP.pro.user: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | ProjectExplorer.Project.ActiveTarget 7 | 1 8 | 9 | 10 | ProjectExplorer.Project.EditorSettings 11 | 12 | true 13 | false 14 | true 15 | 16 | Cpp 17 | 18 | CppGlobal 19 | 20 | 21 | 22 | QmlJS 23 | 24 | QmlJSGlobal 25 | 26 | 27 | 2 28 | UTF-8 29 | false 30 | 4 31 | false 32 | true 33 | 1 34 | true 35 | 0 36 | true 37 | 0 38 | 8 39 | true 40 | 1 41 | true 42 | true 43 | true 44 | false 45 | 46 | 47 | 48 | ProjectExplorer.Project.PluginSettings 49 | 50 | 51 | 52 | ProjectExplorer.Project.Target.0 53 | 54 | Desktop Qt 5.2.1 clang 64bit 55 | Desktop Qt 5.2.1 clang 64bit 56 | qt.521.clang_64.essentials_kit 57 | 0 58 | 0 59 | 0 60 | 61 | /Users/faizshukri/tmp/build-FYP-Desktop_Qt_5_2_1_clang_64bit-Debug 62 | 63 | 64 | true 65 | qmake 66 | 67 | QtProjectManager.QMakeBuildStep 68 | true 69 | false 70 | 71 | false 72 | 73 | 74 | true 75 | Make 76 | 77 | Qt4ProjectManager.MakeStep 78 | 79 | -w 80 | -r 81 | 82 | false 83 | 84 | 85 | 86 | 2 87 | Build 88 | 89 | ProjectExplorer.BuildSteps.Build 90 | 91 | 92 | 93 | true 94 | Make 95 | 96 | Qt4ProjectManager.MakeStep 97 | 98 | -w 99 | -r 100 | 101 | true 102 | clean 103 | 104 | 105 | 1 106 | Clean 107 | 108 | ProjectExplorer.BuildSteps.Clean 109 | 110 | 2 111 | false 112 | 113 | Debug 114 | 115 | Qt4ProjectManager.Qt4BuildConfiguration 116 | 2 117 | true 118 | 119 | 120 | /Users/faizshukri/tmp/build-FYP-Desktop_Qt_5_2_1_clang_64bit-Release 121 | 122 | 123 | true 124 | qmake 125 | 126 | QtProjectManager.QMakeBuildStep 127 | false 128 | true 129 | 130 | false 131 | 132 | 133 | true 134 | Make 135 | 136 | Qt4ProjectManager.MakeStep 137 | 138 | -w 139 | -r 140 | 141 | false 142 | 143 | 144 | 145 | 2 146 | Build 147 | 148 | ProjectExplorer.BuildSteps.Build 149 | 150 | 151 | 152 | true 153 | Make 154 | 155 | Qt4ProjectManager.MakeStep 156 | 157 | -w 158 | -r 159 | 160 | true 161 | clean 162 | 163 | 164 | 1 165 | Clean 166 | 167 | ProjectExplorer.BuildSteps.Clean 168 | 169 | 2 170 | false 171 | 172 | Release 173 | 174 | Qt4ProjectManager.Qt4BuildConfiguration 175 | 0 176 | true 177 | 178 | 2 179 | 180 | 181 | 0 182 | Deploy 183 | 184 | ProjectExplorer.BuildSteps.Deploy 185 | 186 | 1 187 | Deploy locally 188 | 189 | ProjectExplorer.DefaultDeployConfiguration 190 | 191 | 1 192 | 193 | 194 | 195 | false 196 | false 197 | false 198 | false 199 | true 200 | 0.01 201 | 10 202 | true 203 | 1 204 | 25 205 | 206 | 1 207 | true 208 | false 209 | true 210 | valgrind 211 | 212 | 0 213 | 1 214 | 2 215 | 3 216 | 4 217 | 5 218 | 6 219 | 7 220 | 8 221 | 9 222 | 10 223 | 11 224 | 12 225 | 13 226 | 14 227 | 228 | 2 229 | 230 | FYP 231 | 232 | Qt4ProjectManager.Qt4RunConfiguration:/Users/faizshukri/tmp/My-FYP/FYP.pro 233 | 234 | FYP.pro 235 | false 236 | false 237 | 238 | 3768 239 | true 240 | false 241 | false 242 | false 243 | true 244 | 245 | 1 246 | 247 | 248 | 249 | ProjectExplorer.Project.Target.1 250 | 251 | Desktop Qt 4.8.5 252 | Desktop Qt 4.8.5 253 | {b6b3398a-34b8-46d4-b8c1-17b5dde34734} 254 | 0 255 | 0 256 | 0 257 | 258 | /Users/faizshukri/tmp/build-FYP-Desktop_Qt_4_8_5-Debug 259 | 260 | 261 | true 262 | qmake 263 | 264 | QtProjectManager.QMakeBuildStep 265 | false 266 | true 267 | 268 | false 269 | 270 | 271 | true 272 | Make 273 | 274 | Qt4ProjectManager.MakeStep 275 | 276 | -w 277 | -r 278 | 279 | false 280 | 281 | 282 | 283 | 2 284 | Build 285 | 286 | ProjectExplorer.BuildSteps.Build 287 | 288 | 289 | 290 | true 291 | Make 292 | 293 | Qt4ProjectManager.MakeStep 294 | 295 | -w 296 | -r 297 | 298 | true 299 | clean 300 | 301 | 302 | 1 303 | Clean 304 | 305 | ProjectExplorer.BuildSteps.Clean 306 | 307 | 2 308 | false 309 | 310 | Debug 311 | 312 | Qt4ProjectManager.Qt4BuildConfiguration 313 | 2 314 | true 315 | 316 | 317 | /Users/faizshukri/tmp/build-FYP-Desktop_Qt_4_8_5-Release 318 | 319 | 320 | true 321 | qmake 322 | 323 | QtProjectManager.QMakeBuildStep 324 | false 325 | true 326 | 327 | false 328 | 329 | 330 | true 331 | Make 332 | 333 | Qt4ProjectManager.MakeStep 334 | 335 | -w 336 | -r 337 | 338 | false 339 | 340 | 341 | 342 | 2 343 | Build 344 | 345 | ProjectExplorer.BuildSteps.Build 346 | 347 | 348 | 349 | true 350 | Make 351 | 352 | Qt4ProjectManager.MakeStep 353 | 354 | -w 355 | -r 356 | 357 | true 358 | clean 359 | 360 | 361 | 1 362 | Clean 363 | 364 | ProjectExplorer.BuildSteps.Clean 365 | 366 | 2 367 | false 368 | 369 | Release 370 | 371 | Qt4ProjectManager.Qt4BuildConfiguration 372 | 0 373 | true 374 | 375 | 2 376 | 377 | 378 | 0 379 | Deploy 380 | 381 | ProjectExplorer.BuildSteps.Deploy 382 | 383 | 1 384 | Deploy locally 385 | 386 | ProjectExplorer.DefaultDeployConfiguration 387 | 388 | 1 389 | 390 | 391 | 392 | false 393 | false 394 | false 395 | false 396 | true 397 | 0.01 398 | 10 399 | true 400 | 1 401 | 25 402 | 403 | 1 404 | true 405 | false 406 | true 407 | valgrind 408 | 409 | 0 410 | 1 411 | 2 412 | 3 413 | 4 414 | 5 415 | 6 416 | 7 417 | 8 418 | 9 419 | 10 420 | 11 421 | 12 422 | 13 423 | 14 424 | 425 | 2 426 | 427 | FYP 428 | 429 | Qt4ProjectManager.Qt4RunConfiguration:/Users/faizshukri/tmp/My-FYP/FYP.pro 430 | 431 | FYP.pro 432 | false 433 | false 434 | 435 | 3768 436 | true 437 | false 438 | false 439 | false 440 | true 441 | 442 | 1 443 | 444 | 445 | 446 | ProjectExplorer.Project.TargetCount 447 | 2 448 | 449 | 450 | ProjectExplorer.Project.Updater.EnvironmentId 451 | {c27f5f56-2909-4b6e-a5cb-cb61a3b2ec2d} 452 | 453 | 454 | ProjectExplorer.Project.Updater.FileVersion 455 | 15 456 | 457 | 458 | --------------------------------------------------------------------------------