├── ContourMatching.sln
├── ContourMatching
├── ContourMatching.cpp
├── ContourMatching.vcxproj
├── ContourMatching.vcxproj.filters
├── ContourMatching.vcxproj.user
├── Input
│ ├── ClassiTemplate
│ │ ├── 1-2.jpg
│ │ ├── 2-2.jpg
│ │ ├── 3-2.jpg
│ │ ├── 4-2.jpg
│ │ ├── 5-2.jpg
│ │ ├── 6-2.jpg
│ │ └── 7-2.jpg
│ └── file
│ │ ├── Europe4.png
│ │ ├── France1-3.png
│ │ ├── France2-3.png
│ │ └── Franch1.png
├── ShapeMatch.cpp
└── ShapeMatch.h
├── README.md
└── demo.txt
/ContourMatching.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio Version 16
4 | VisualStudioVersion = 16.0.30717.126
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "ContourMatching", "ContourMatching\ContourMatching.vcxproj", "{FE60A342-9324-4BBE-87FB-AB9E53BB7C67}"
7 | EndProject
8 | Global
9 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
10 | Debug|x64 = Debug|x64
11 | Debug|x86 = Debug|x86
12 | Release|x64 = Release|x64
13 | Release|x86 = Release|x86
14 | EndGlobalSection
15 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
16 | {FE60A342-9324-4BBE-87FB-AB9E53BB7C67}.Debug|x64.ActiveCfg = Debug|x64
17 | {FE60A342-9324-4BBE-87FB-AB9E53BB7C67}.Debug|x64.Build.0 = Debug|x64
18 | {FE60A342-9324-4BBE-87FB-AB9E53BB7C67}.Debug|x86.ActiveCfg = Debug|Win32
19 | {FE60A342-9324-4BBE-87FB-AB9E53BB7C67}.Debug|x86.Build.0 = Debug|Win32
20 | {FE60A342-9324-4BBE-87FB-AB9E53BB7C67}.Release|x64.ActiveCfg = Release|x64
21 | {FE60A342-9324-4BBE-87FB-AB9E53BB7C67}.Release|x64.Build.0 = Release|x64
22 | {FE60A342-9324-4BBE-87FB-AB9E53BB7C67}.Release|x86.ActiveCfg = Release|Win32
23 | {FE60A342-9324-4BBE-87FB-AB9E53BB7C67}.Release|x86.Build.0 = Release|Win32
24 | EndGlobalSection
25 | GlobalSection(SolutionProperties) = preSolution
26 | HideSolutionNode = FALSE
27 | EndGlobalSection
28 | GlobalSection(ExtensibilityGlobals) = postSolution
29 | SolutionGuid = {27B304F2-1B5D-48AF-BE09-C5FF49C77660}
30 | EndGlobalSection
31 | EndGlobal
32 |
--------------------------------------------------------------------------------
/ContourMatching/ContourMatching.cpp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/ContourMatching.cpp
--------------------------------------------------------------------------------
/ContourMatching/ContourMatching.vcxproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Debug
6 | Win32
7 |
8 |
9 | Release
10 | Win32
11 |
12 |
13 | Debug
14 | x64
15 |
16 |
17 | Release
18 | x64
19 |
20 |
21 |
22 | 16.0
23 | Win32Proj
24 | {fe60a342-9324-4bbe-87fb-ab9e53bb7c67}
25 | ContourMatching
26 | 10.0
27 |
28 |
29 |
30 | Application
31 | true
32 | v142
33 | Unicode
34 |
35 |
36 | Application
37 | false
38 | v142
39 | true
40 | Unicode
41 |
42 |
43 | Application
44 | true
45 | v142
46 | Unicode
47 |
48 |
49 | Application
50 | false
51 | v142
52 | true
53 | Unicode
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 | true
75 | E:\opencv\build\include\opencv2;E:\opencv\build\include;$(IncludePath)
76 | E:\opencv\build\x64\vc15\bin;$(LibraryPath)
77 |
78 |
79 | false
80 |
81 |
82 | true
83 | $(ProjectDir)opencv2.4.10\build\include\opencv2;$(ProjectDir)opencv2.4.10\build\include;$(IncludePath)
84 | $(ProjectDir)opencv2.4.10\build\include;$(ReferencePath)
85 | $(ProjectDir)opencv2.4.10\build\x64\vc12\lib;$(LibraryPath)
86 | $(ProjectDir)Output
87 |
88 |
89 | false
90 |
91 |
92 |
93 | Level3
94 | true
95 | WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)
96 | true
97 |
98 |
99 | Console
100 | true
101 | opencv_world450d.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies)
102 |
103 |
104 |
105 |
106 | Level3
107 | true
108 | true
109 | true
110 | WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)
111 | true
112 |
113 |
114 | Console
115 | true
116 | true
117 | true
118 |
119 |
120 |
121 |
122 | Level3
123 | true
124 | _DEBUG;_CONSOLE;%(PreprocessorDefinitions)
125 | true
126 | NotUsing
127 |
128 |
129 | Console
130 | true
131 | opencv_core2410.lib;opencv_ml2410d.lib;opencv_calib3d2410d.lib;opencv_contrib2410d.lib;opencv_core2410d.lib;opencv_features2d2410d.lib;opencv_flann2410d.lib;opencv_gpu2410d.lib;opencv_highgui2410d.lib;opencv_imgproc2410d.lib;opencv_legacy2410d.lib;opencv_objdetect2410d.lib;opencv_ts2410d.lib;opencv_video2410d.lib;opencv_nonfree2410d.lib;opencv_ocl2410d.lib;opencv_photo2410d.lib;opencv_stitching2410d.lib;opencv_superres2410d.lib;opencv_videostab2410d.lib;opencv_objdetect2410.lib;opencv_ts2410.lib;opencv_video2410.lib;opencv_nonfree2410.lib;opencv_ocl2410.lib;opencv_photo2410.lib;opencv_stitching2410.lib;opencv_superres2410.lib;opencv_videostab2410.lib;opencv_calib3d2410.lib;opencv_contrib2410.lib;opencv_features2d2410.lib;opencv_flann2410.lib;opencv_gpu2410.lib;opencv_highgui2410.lib;opencv_imgproc2410.lib;opencv_legacy2410.lib;opencv_ml2410.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies)
132 |
133 |
134 |
135 |
136 | Level3
137 | true
138 | true
139 | true
140 | NDEBUG;_CONSOLE;%(PreprocessorDefinitions)
141 | true
142 |
143 |
144 | Console
145 | true
146 | true
147 | true
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
--------------------------------------------------------------------------------
/ContourMatching/ContourMatching.vcxproj.filters:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | {4FC737F1-C7A5-4376-A066-2A32D752A2FF}
6 | cpp;c;cc;cxx;c++;cppm;ixx;def;odl;idl;hpj;bat;asm;asmx
7 |
8 |
9 | {93995380-89BD-4b04-88EB-625FBE52EBFB}
10 | h;hh;hpp;hxx;h++;hm;inl;inc;ipp;xsd
11 |
12 |
13 | {67DA6AB6-F800-4c08-8B7A-83BB121AAD01}
14 | rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms
15 |
16 |
17 |
18 |
19 | 源文件
20 |
21 |
22 | 源文件
23 |
24 |
25 |
26 |
27 | 头文件
28 |
29 |
30 |
--------------------------------------------------------------------------------
/ContourMatching/ContourMatching.vcxproj.user:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | PATH=$(ProjectDir)opencv2.4.10\build\x64\vc12\bin
5 | $(LocalDebuggerEnvironment)
6 | WindowsLocalDebugger
7 |
8 |
--------------------------------------------------------------------------------
/ContourMatching/Input/ClassiTemplate/1-2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/Input/ClassiTemplate/1-2.jpg
--------------------------------------------------------------------------------
/ContourMatching/Input/ClassiTemplate/2-2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/Input/ClassiTemplate/2-2.jpg
--------------------------------------------------------------------------------
/ContourMatching/Input/ClassiTemplate/3-2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/Input/ClassiTemplate/3-2.jpg
--------------------------------------------------------------------------------
/ContourMatching/Input/ClassiTemplate/4-2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/Input/ClassiTemplate/4-2.jpg
--------------------------------------------------------------------------------
/ContourMatching/Input/ClassiTemplate/5-2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/Input/ClassiTemplate/5-2.jpg
--------------------------------------------------------------------------------
/ContourMatching/Input/ClassiTemplate/6-2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/Input/ClassiTemplate/6-2.jpg
--------------------------------------------------------------------------------
/ContourMatching/Input/ClassiTemplate/7-2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/Input/ClassiTemplate/7-2.jpg
--------------------------------------------------------------------------------
/ContourMatching/Input/file/Europe4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/Input/file/Europe4.png
--------------------------------------------------------------------------------
/ContourMatching/Input/file/France1-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/Input/file/France1-3.png
--------------------------------------------------------------------------------
/ContourMatching/Input/file/France2-3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/Input/file/France2-3.png
--------------------------------------------------------------------------------
/ContourMatching/Input/file/Franch1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/Input/file/Franch1.png
--------------------------------------------------------------------------------
/ContourMatching/ShapeMatch.cpp:
--------------------------------------------------------------------------------
1 | #include "ShapeMatch.h"
2 | #include "omp.h"
3 | ShapeMatch::ShapeMatch()
4 | {
5 | modelDefined = false;
6 | modelHeight=0;
7 | modelWidth=0;
8 | }
9 | ShapeMatch::~ShapeMatch()
10 | {
11 | }
12 |
13 | int ShapeMatch::CreateMatchModel(IplImage *templateArr, double maxContrast, double minContrast, int pyramidnums, double anglestart, double angleend, double anglestep, double scalestart, double scaleend,double scalestep)
14 | {
15 | int scalenum = abs(scaleend - scalestart) / scalestep+1;
16 | int anglenum = abs(angleend - anglestart) / anglestep+1;
17 | scaleEdgePoints = (ScaleEdgePoints *)malloc(scalenum * sizeof(ScaleEdgePoints));
18 | ////求模板重心
19 | gravityPoint = extract_shape_info(templateArr, maxContrast, minContrast);
20 | ////
21 | for (int i = 0; i < scalenum; i++)
22 | {
23 | scaleEdgePoints[i].angleEdgePoints= (AngleEdgePoints *)malloc(anglenum * sizeof(AngleEdgePoints));
24 | scaleEdgePoints[i].scaleVale = scalestart + i*scalestep;
25 | AngleEdgePoints *angleEdgePtr = scaleEdgePoints[i].angleEdgePoints;
26 | for (int j = 0; j < anglenum; j++)
27 | {
28 | angleEdgePtr[j].pyramidEdgePoints = (PyramidEdgePoints *)malloc((1+pyramidnums)* sizeof(PyramidEdgePoints));
29 | angleEdgePtr[j].templateAngle= anglestart + j*anglestep;
30 | PyramidEdgePoints *pyramidEdgePtr = angleEdgePtr[j].pyramidEdgePoints;
31 | IplImage * scaleAngleImage= cvCreateImage(cvSize(templateArr->width*(scalestart + i*scalestep), templateArr->height*(scalestart + i*scalestep)), IPL_DEPTH_8U, 1);
32 | cvResize(templateArr, scaleAngleImage);
33 | rotateImage(scaleAngleImage, scaleAngleImage, anglestart + j*anglestep);
34 | IplImage * tempDownImg=cvCreateImage(cvSize(round(scaleAngleImage->width), round(scaleAngleImage->height)), IPL_DEPTH_8U, 1);
35 | cvCopy(scaleAngleImage, tempDownImg);
36 | //CalEdgeCordinates(tempDownImg, maxContrast, minContrast, &(pyramidEdgePtr[0]));
37 | extract_shape_info(tempDownImg, &(pyramidEdgePtr[0]), maxContrast, minContrast );
38 | //IplImage* colorimg = cvLoadImage("1.BMP", -1);
39 | //DrawContours(scaleAngleImage, CvScalar(0, 0, 255), 1, pyramidEdgePtr[0].edgePoints, pyramidEdgePtr[0].centerOfGravity, pyramidEdgePtr[0].numOfCordinates);
40 | //cvNamedWindow("Search Image", 0);
41 | //cvShowImage("Search Image", scaleAngleImage);
42 | ////cvSaveImage("yyy.bmp", colorimg);
43 | //cvWaitKey(0);
44 | for (int k = 1; k <= pyramidnums; k++)
45 | {
46 | pyramidEdgePtr[k].level = k;
47 | CvSize size;
48 | if (tempDownImg->height % 2 == 0)
49 | size.height = tempDownImg->height >> 1;
50 | else
51 | size.height = floor(tempDownImg->height >> 1)+1;
52 | if (tempDownImg->width % 2 == 0)
53 | size.width = tempDownImg->width >> 1;
54 | else
55 | size.width = floor(tempDownImg->width >> 1) + 1;
56 | //CvSize size = cvSize(floor(tempDownImg->height>>1), floor(tempDownImg->width>>1));///
57 | IplImage* pyDownImg=cvCreateImage(size, IPL_DEPTH_8U, 1);
58 | cvPyrDown(tempDownImg, pyDownImg);
59 | //cvResize(tempDownImg, pyDownImg)
60 | tempDownImg = cvCreateImage(cvSize( pyDownImg->width, pyDownImg->height), IPL_DEPTH_8U, 1);
61 | cvCopy(pyDownImg, tempDownImg);
62 | //cvNamedWindow("Search Image", 0);
63 | //cvShowImage("Search Image", pyDownImg);
64 | //cvWaitKey(0);
65 | //CalEdgeCordinates(pyDownImg, maxContrast, minContrast, &(pyramidEdgePtr[k]));
66 | extract_shape_info(pyDownImg, &(pyramidEdgePtr[k]), maxContrast, minContrast);
67 | //DrawContours(pyDownImg, CvScalar(0, 0, 255), 1, pyramidEdgePtr[k].edgePoints, pyramidEdgePtr[k].centerOfGravity, pyramidEdgePtr[k].numOfCordinates);
68 | //cvNamedWindow("Search Image", 0);
69 | //cvShowImage("Search Image", pyDownImg);
70 | ////cvSaveImage("yyy.bmp", colorimg);
71 | //cvWaitKey(0);
72 | }
73 | }
74 | }
75 | return 1;
76 | }
77 |
78 |
79 | int ShapeMatch::CalEdgeCordinates(IplImage *templateArr, double maxContrast, double minContrast, PyramidEdgePoints *PyramidEdgePtr)
80 | {
81 | CvMat *gx = 0; //Matrix to store X derivative
82 | CvMat *gy = 0; //Matrix to store Y derivative
83 | CvMat *nmsEdges = 0; //Matrix to store temp restult
84 | CvSize Ssize;
85 |
86 | // Convert IplImage to Matrix for integer operations
87 | CvMat srcstub, *src = (CvMat*)templateArr;
88 | src = cvGetMat(src, &srcstub);
89 | if (CV_MAT_TYPE(src->type) != CV_8UC1)
90 | {
91 | return 0;
92 | }
93 |
94 | // set width and height
95 | Ssize.width = src->width;
96 | Ssize.height = src->height;
97 | modelHeight = src->height; //Save Template height
98 | modelWidth = src->width; //Save Template width
99 |
100 | PyramidEdgePtr->numOfCordinates = 0; //initialize
101 | PyramidEdgePtr->edgePoints = new Point[modelWidth *modelHeight]; //Allocate memory for coorinates of selected points in template image
102 |
103 | PyramidEdgePtr->edgeMagnitude = new double[modelWidth *modelHeight]; //Allocate memory for edge magnitude for selected points
104 | PyramidEdgePtr->edgeDerivativeX = new double[modelWidth *modelHeight]; //Allocate memory for edge X derivative for selected points
105 | PyramidEdgePtr->edgeDerivativeY = new double[modelWidth *modelHeight]; ////Allocate memory for edge Y derivative for selected points
106 | // Calculate gradient of Template
107 | gx = cvCreateMat(Ssize.height, Ssize.width, CV_16SC1); //create Matrix to store X derivative
108 | gy = cvCreateMat(Ssize.height, Ssize.width, CV_16SC1); //create Matrix to store Y derivative
109 | cvSobel(src, gx, 1, 0, 3); //gradient in X direction
110 | cvSobel(src, gy, 0, 1, 3); //gradient in Y direction
111 |
112 | nmsEdges = cvCreateMat(Ssize.height, Ssize.width, CV_32F); //create Matrix to store Final nmsEdges
113 | const short* _sdx;
114 | const short* _sdy;
115 | double fdx, fdy;
116 | double MagG, DirG;
117 | double MaxGradient = -99999.99;
118 | double direction;
119 | int *orients = new int[Ssize.height *Ssize.width];
120 | int count = 0, i, j; // count variable;
121 |
122 | double **magMat;
123 | CreateDoubleMatrix(magMat, Ssize);
124 |
125 | for (i = 1; i < Ssize.height - 1; i++)
126 | {
127 | for (j = 1; j < Ssize.width - 1; j++)
128 | {
129 | _sdx = (short*)(gx->data.ptr + gx->step*i);
130 | _sdy = (short*)(gy->data.ptr + gy->step*i);
131 | fdx = _sdx[j]; fdy = _sdy[j]; // read x, y derivatives
132 |
133 | MagG = sqrt((float)(fdx*fdx) + (float)(fdy*fdy)); //Magnitude = Sqrt(gx^2 +gy^2)
134 | direction = cvFastArctan((float)fdy, (float)fdx); //Direction = invtan (Gy / Gx)
135 | magMat[i][j] = MagG;
136 |
137 | if (MagG>MaxGradient)
138 | MaxGradient = MagG; // get maximum gradient value for normalizing.
139 |
140 |
141 | // get closest angle from 0, 45, 90, 135 set
142 | if ((direction>0 && direction < 22.5) || (direction >157.5 && direction < 202.5) || (direction>337.5 && direction<360))
143 | direction = 0;
144 | else if ((direction>22.5 && direction < 67.5) || (direction >202.5 && direction <247.5))
145 | direction = 45;
146 | else if ((direction >67.5 && direction < 112.5) || (direction>247.5 && direction<292.5))
147 | direction = 90;
148 | else if ((direction >112.5 && direction < 157.5) || (direction>292.5 && direction<337.5))
149 | direction = 135;
150 | else
151 | direction = 0;
152 |
153 | orients[count] = (int)direction;
154 | count++;
155 | }
156 | }
157 |
158 | count = 0; // init count
159 | // non maximum suppression非极大值抑制
160 | double leftPixel, rightPixel;
161 |
162 | for (i = 1; i < Ssize.height - 1; i++)
163 | {
164 | for (j = 1; j < Ssize.width - 1; j++)
165 | {
166 | switch (orients[count])
167 | {
168 | case 0:
169 | leftPixel = magMat[i][j - 1];
170 | rightPixel = magMat[i][j + 1];
171 | break;
172 | case 45:
173 | leftPixel = magMat[i - 1][j + 1];
174 | rightPixel = magMat[i + 1][j - 1];
175 | break;
176 | case 90:
177 | leftPixel = magMat[i - 1][j];
178 | rightPixel = magMat[i + 1][j];
179 | break;
180 | case 135:
181 | leftPixel = magMat[i - 1][j - 1];
182 | rightPixel = magMat[i + 1][j + 1];
183 | break;
184 | }
185 | // compare current pixels value with adjacent pixels
186 | if ((magMat[i][j] < leftPixel) || (magMat[i][j] < rightPixel))
187 | (nmsEdges->data.ptr + nmsEdges->step*i)[j] = 0;
188 | else
189 | (nmsEdges->data.ptr + nmsEdges->step*i)[j] = (uchar)(magMat[i][j] / MaxGradient * 255);
190 |
191 | count++;
192 | }
193 | }
194 |
195 |
196 | int RSum = 0, CSum = 0;
197 | int curX, curY;
198 | int flag = 1;
199 |
200 | //Hysterisis threshold滞后阈值
201 | for (i = 1; i < Ssize.height - 1; i++)
202 | {
203 | for (j = 1; j < Ssize.width; j++)
204 | {
205 | _sdx = (short*)(gx->data.ptr + gx->step*i);
206 | _sdy = (short*)(gy->data.ptr + gy->step*i);
207 | fdx = _sdx[j]; fdy = _sdy[j];
208 |
209 | MagG = sqrt(fdx*fdx + fdy*fdy); //Magnitude = Sqrt(gx^2 +gy^2)
210 | DirG = cvFastArctan((float)fdy, (float)fdx); //Direction = tan(y/x)
211 |
212 | ////((uchar*)(imgGDir->imageData + imgGDir->widthStep*i))[j]= MagG;
213 | flag = 1;
214 | if (((double)((nmsEdges->data.ptr + nmsEdges->step*i))[j]) < maxContrast)
215 | {
216 | if (((double)((nmsEdges->data.ptr + nmsEdges->step*i))[j])< minContrast)
217 | {
218 |
219 | (nmsEdges->data.ptr + nmsEdges->step*i)[j] = 0;
220 | flag = 0; // remove from edge
221 | ////((uchar*)(imgGDir->imageData + imgGDir->widthStep*i))[j]=0;
222 | }
223 | else
224 | { // if any of 8 neighboring pixel is not greater than max contraxt remove from edge
225 | if ((((double)((nmsEdges->data.ptr + nmsEdges->step*(i - 1)))[j - 1]) < maxContrast) &&
226 | (((double)((nmsEdges->data.ptr + nmsEdges->step*(i - 1)))[j]) < maxContrast) &&
227 | (((double)((nmsEdges->data.ptr + nmsEdges->step*(i - 1)))[j + 1]) < maxContrast) &&
228 | (((double)((nmsEdges->data.ptr + nmsEdges->step*i))[j - 1]) < maxContrast) &&
229 | (((double)((nmsEdges->data.ptr + nmsEdges->step*i))[j + 1]) < maxContrast) &&
230 | (((double)((nmsEdges->data.ptr + nmsEdges->step*(i + 1)))[j - 1]) < maxContrast) &&
231 | (((double)((nmsEdges->data.ptr + nmsEdges->step*(i + 1)))[j]) < maxContrast) &&
232 | (((double)((nmsEdges->data.ptr + nmsEdges->step*(i + 1)))[j + 1]) < maxContrast))
233 | {
234 | (nmsEdges->data.ptr + nmsEdges->step*i)[j] = 0;
235 | flag = 0;
236 | ////((uchar*)(imgGDir->imageData + imgGDir->widthStep*i))[j]=0;
237 | }
238 | }
239 |
240 | }
241 |
242 | // save selected edge information
243 | curX = i; curY = j;
244 | if (flag != 0)
245 | {
246 | if (fdx != 0 || fdy != 0)
247 | {
248 | RSum = RSum + curX; CSum = CSum + curY; // Row sum and column sum for center of gravity
249 | PyramidEdgePtr->edgePoints[PyramidEdgePtr->numOfCordinates].x = curX;
250 | PyramidEdgePtr->edgePoints[PyramidEdgePtr->numOfCordinates].y = curY;
251 | PyramidEdgePtr->edgeDerivativeX[PyramidEdgePtr->numOfCordinates] = fdx;
252 | PyramidEdgePtr->edgeDerivativeY[PyramidEdgePtr->numOfCordinates] = fdy;
253 |
254 | //handle divide by zero
255 | if (MagG != 0)
256 | PyramidEdgePtr->edgeMagnitude[PyramidEdgePtr->numOfCordinates] = 1 / MagG; // gradient magnitude
257 | else
258 | PyramidEdgePtr->edgeMagnitude[PyramidEdgePtr->numOfCordinates] = 0;
259 |
260 | PyramidEdgePtr->numOfCordinates++;
261 | }
262 | }
263 | }
264 | }
265 |
266 | PyramidEdgePtr->centerOfGravity.x = RSum / PyramidEdgePtr->numOfCordinates; // center of gravity
267 | PyramidEdgePtr->centerOfGravity.y = CSum / PyramidEdgePtr->numOfCordinates; // center of gravity
268 |
269 | // change coordinates to reflect center of gravity
270 | for (int m = 0; mnumOfCordinates; m++)
271 | {
272 | int temp;
273 |
274 | temp = PyramidEdgePtr->edgePoints[m].x;
275 | PyramidEdgePtr->edgePoints[m].x = temp - PyramidEdgePtr->centerOfGravity.x;
276 | temp = PyramidEdgePtr->edgePoints[m].y;
277 | PyramidEdgePtr->edgePoints[m].y = temp - PyramidEdgePtr->centerOfGravity.y;
278 | }
279 |
280 | ////cvSaveImage("Edges.bmp",imgGDir);
281 |
282 | // free alocated memories
283 | delete[] orients;
284 | ////cvReleaseImage(&imgGDir);
285 | cvReleaseMat(&gx);
286 | cvReleaseMat(&gy);
287 | cvReleaseMat(&nmsEdges);
288 | ReleaseDoubleMatrix(magMat, Ssize.height);
289 | modelDefined = true;
290 | return 1;
291 | }
292 | //allocate memory for doubel matrix
293 | void ShapeMatch::CreateDoubleMatrix(double **&matrix, Size size)
294 | {
295 | matrix = new double*[size.height];
296 | for (int iInd = 0; iInd < size.height; iInd++)
297 | matrix[iInd] = new double[size.width];
298 | }
299 |
300 | // release memory
301 | void ShapeMatch::ReleaseDoubleMatrix(double **&matrix, int size)
302 | {
303 | for (int iInd = 0; iInd < size; iInd++)
304 | delete[] matrix[iInd];
305 | }
306 | void ShapeMatch::rotateImage(IplImage* srcImage, IplImage* dstImage, float Angle)
307 | {
308 | float m[6];
309 | m[0] = (float)cos(Angle * CV_PI / 180.);
310 | m[1] = (float)sin(Angle * CV_PI / 180.);
311 | m[3] = -m[1];
312 | m[4] = m[0];
313 | m[2] = gravityPoint.x;
314 | m[5] = gravityPoint.y;
315 | //m[2] = srcImage->width * 0.5f;
316 | //m[5] = srcImage->width * 0.5f;
317 | CvMat M = cvMat(2, 3, CV_32F, m);
318 | cvGetQuadrangleSubPix(srcImage, dstImage, &M);
319 | }
320 |
321 | double ShapeMatch::FindGeoMatchModel(IplImage* srcarr, double minScore, double greediness, CvPoint *resultPoint, int pyramidnums, double anglestart, double angleend, double anglestep, double scalestart, double scaleend, double scalestep)
322 | {
323 | if (srcarr == NULL)
324 | return -1;
325 | CvSize srcImgSize = cvSize(srcarr->width, srcarr->height);
326 | IplImage* grayImg = cvCreateImage(srcImgSize, IPL_DEPTH_8U, 1);
327 |
328 | // Convert color image to gray image.
329 | if (srcarr->nChannels == 3)
330 | {
331 | cvCvtColor(srcarr, grayImg, CV_RGB2GRAY);
332 | }
333 | else
334 | {
335 | cvCopy(srcarr, grayImg);
336 | }
337 | double resultScore = 0;
338 | double maxScore=0;
339 | int maxScoreId=0;
340 | PyramidEdgePoints *matchEdgePoints=new PyramidEdgePoints;///////////暂时注释
341 | double partialSum = 0;
342 | double sumOfCoords = 0;
343 | double partialScore;
344 | CvSize Ssize;
345 | cv::Point tempMatchPoint(0,0);
346 | AngleEdgePoints *angleEdgePtr;
347 | PyramidEdgePoints *pyramidEdgePtr;
348 | int scalenum = abs(scaleend - scalestart) / scalestep + 1;
349 | int anglenum = abs(angleend - anglestart) / anglestep + 1;
350 | ImgEdgeInfo *imgEdgeInfo= (ImgEdgeInfo *)malloc((pyramidnums + 1) * sizeof(ImgEdgeInfo));
351 |
352 | IplImageArr *pyDownImgArr= (IplImageArr *)malloc((pyramidnums+1) * sizeof(IplImageArr));
353 | IplImage * tempDownImg = cvCreateImage(cvSize(grayImg->width, grayImg->height), IPL_DEPTH_8U, 1);
354 | cvCopy(grayImg, tempDownImg);
355 | pyDownImgArr[0].img = cvCreateImage(cvSize(grayImg->width, grayImg->height), IPL_DEPTH_8U, 1);
356 | cvCopy(grayImg, pyDownImgArr[0].img);
357 | CalSearchImgEdg(tempDownImg, &(imgEdgeInfo[0]));
358 | for (int i=1;i<=pyramidnums;i++)
359 | {
360 | CvSize size;
361 | if (tempDownImg->height % 2 == 0)
362 | size.height = tempDownImg->height >> 1;
363 | else
364 | size.height = floor(tempDownImg->height >> 1) + 1;
365 | if (tempDownImg->width % 2 == 0)
366 | size.width = tempDownImg->width >> 1;
367 | else
368 | size.width = floor(tempDownImg->width >> 1) + 1;
369 | //CvSize size = cvSize(floor(tempDownImg->height>>1), floor(tempDownImg->width>>1));///
370 | IplImage* pyDownImg = cvCreateImage(size, IPL_DEPTH_8U, 1);
371 | pyDownImgArr[i].img= cvCreateImage(size, IPL_DEPTH_8U, 1);
372 | cvPyrDown(tempDownImg, pyDownImg);
373 | cvReleaseImage(&tempDownImg);
374 | tempDownImg = cvCreateImage(cvSize(pyDownImg->width, pyDownImg->height), IPL_DEPTH_8U, 1);
375 | cvCopy(pyDownImg, tempDownImg);
376 | cvCopy(pyDownImg, pyDownImgArr[i].img);
377 | CalSearchImgEdg(tempDownImg, &(imgEdgeInfo[i]));
378 | cvReleaseImage(&pyDownImg);
379 | /*cvNamedWindow("Search Image", 0);
380 | cvShowImage("Search Image", tempDownImg);
381 | cvWaitKey(0);*/
382 | //cvSaveImage("tempimg.png", tempDownImg);
383 | }
384 | // #pragma omp parallel for
385 | MatchResult *ResultList = new MatchResult;
386 | MatchResult *ResultLists = new MatchResult[9999];
387 | int matcnnums = 0;
388 | search_region *SearchRegion = new search_region;
389 | for (int ii = 0; ii < scalenum; ii++)
390 | {
391 | angleEdgePtr = scaleEdgePoints[ii].angleEdgePoints;
392 | for (int jj = 0; jj < anglenum; jj++)
393 | {
394 | pyramidEdgePtr = angleEdgePtr[jj].pyramidEdgePoints;
395 |
396 | ResultList->CenterLocX = 0;
397 | ResultList->CenterLocY = 0;
398 |
399 | SearchRegion->EndX = pyDownImgArr[pyramidnums].img->width-1; SearchRegion->EndY = pyDownImgArr[pyramidnums].img->height - 1;
400 | SearchRegion->StartX = 1; SearchRegion->StartY = 1;
401 | for (int kk = pyramidnums; kk >= 0; kk--)
402 | {
403 | ResultList->CenterLocX = 0;
404 | ResultList->CenterLocY = 0;
405 | shape_match_accurate(pyDownImgArr[kk].img, &(pyramidEdgePtr[kk]),80, 20,/////80,20参数待修改
406 | minScore, greediness,SearchRegion,ResultList, &(imgEdgeInfo[kk]));
407 | if (ResultList->CenterLocX == 0 || ResultList->CenterLocY == 0)
408 | {
409 | break;
410 | }
411 | else
412 | {
413 | SearchRegion->StartX = ResultList->CenterLocX*2 - 6;
414 | SearchRegion->StartY = ResultList->CenterLocY *2 - 6;
415 | SearchRegion->EndX = ResultList->CenterLocX *2 +6;
416 | SearchRegion->EndY = ResultList->CenterLocY * 2 + 6;
417 | resultScore = ResultList->ResultScore;
418 | }
419 | }
420 | if (resultScore > minScore&&matcnnums<9999)
421 | {
422 | if (resultScore > maxScore)
423 | {
424 | maxScore = resultScore;
425 | maxScoreId = matcnnums;
426 | matchEdgePoints = &(pyramidEdgePtr[0]);//////////////////////暂时注释
427 | }
428 | ResultLists[matcnnums].ResultScore = resultScore;
429 | ResultLists[matcnnums].CenterLocX= ResultList->CenterLocX ;
430 | ResultLists[matcnnums].CenterLocY= ResultList->CenterLocY;
431 | ResultLists[matcnnums].scale = scaleEdgePoints[ii].scaleVale;
432 | ResultLists[matcnnums].Angel = angleEdgePtr[jj].templateAngle;
433 | matcnnums++;
434 | ResultLists[matcnnums].nums = matcnnums;
435 | }
436 | }
437 | }
438 | if (matcnnums > 0)
439 | {
440 | resultPoint->x = ResultLists[maxScoreId].CenterLocX; resultPoint->y = ResultLists[maxScoreId].CenterLocY;
441 | }
442 | //if (matcnnums > 0)
443 | //{
444 | // cout << "最匹配------------------------------------" << endl;
445 | // cout << "分数:" << ResultLists[maxScoreId].ResultScore << endl;
446 | // cout << "x:" << ResultLists[maxScoreId].CenterLocX << endl;
447 | // cout << "y:" << ResultLists[maxScoreId].CenterLocY << endl;
448 | // cout << "缩放系数:" << ResultLists[maxScoreId].scale << endl;
449 | // cout << "角度:" << ResultLists[maxScoreId].Angel << endl;
450 | // cout << endl;
451 |
452 | //}///暂时注释
453 |
454 | if (matcnnums > 0)
455 | {
456 | //DrawContours(srcarr, CvScalar(0, 0, 255), 1, matchEdgePoints->edgePoints, Point(ResultLists[maxScoreId].CenterLocX, ResultLists[maxScoreId].CenterLocY), matchEdgePoints->numOfCordinates);
457 | }
458 | /*cvNamedWindow("Search Image", 0);
459 | cvShowImage("Search Image", srcarr);
460 | cvWaitKey(600);*/
461 | //////
462 | //cvDestroyWindow("Search Image");
463 | //cvReleaseImage(&srcarr);
464 | delete ResultList; ResultList = NULL;
465 | delete []ResultLists; ResultLists = NULL;
466 | delete SearchRegion; SearchRegion = NULL;
467 | ///////
468 | //delete matchEdgePoints;
469 | //////
470 | /////释放内存这里是pyramidnums=3,金字塔层数pyramidnums改变时自己稍微修改下吧
471 | free(imgEdgeInfo[0].pBufGradX); free(imgEdgeInfo[0].pBufGradY); free(imgEdgeInfo[0].pBufMag); imgEdgeInfo[0].pBufGradX = NULL; imgEdgeInfo[0].pBufGradY = NULL; imgEdgeInfo[0].pBufMag = NULL;
472 | free(imgEdgeInfo[1].pBufGradX); free(imgEdgeInfo[1].pBufGradY); free(imgEdgeInfo[1].pBufMag); imgEdgeInfo[1].pBufGradX = NULL; imgEdgeInfo[1].pBufGradY = NULL; imgEdgeInfo[1].pBufMag = NULL;
473 | free(imgEdgeInfo[2].pBufGradX); free(imgEdgeInfo[2].pBufGradY); free(imgEdgeInfo[2].pBufMag); imgEdgeInfo[2].pBufGradX = NULL; imgEdgeInfo[2].pBufGradY = NULL; imgEdgeInfo[2].pBufMag = NULL;
474 | free(imgEdgeInfo[3].pBufGradX); free(imgEdgeInfo[3].pBufGradY); free(imgEdgeInfo[3].pBufMag); imgEdgeInfo[3].pBufGradX = NULL; imgEdgeInfo[3].pBufGradY = NULL; imgEdgeInfo[3].pBufMag = NULL;
475 | /////
476 | free(imgEdgeInfo); imgEdgeInfo = NULL;
477 | ///////////
478 | cvReleaseImage(&(pyDownImgArr[0].img)); cvReleaseImage(&(pyDownImgArr[1].img)); cvReleaseImage(&(pyDownImgArr[2].img));
479 | cvReleaseImage(&(pyDownImgArr[3].img));
480 | ///////////
481 | free(pyDownImgArr) ; pyDownImgArr = NULL;
482 | cvReleaseImage(&grayImg);
483 | cvReleaseImage(&tempDownImg);
484 | return resultScore;
485 |
486 | }
487 | void ShapeMatch::DrawContours(IplImage* source, CvScalar color, int lineWidth, Point *cordinates, Point centerOfGravity,int noOfCordinates)
488 | {
489 | CvPoint point;
490 | for (int i = 0; iwidth;
506 | int height = ImageData->height;
507 | int widthstep = ImageData->widthStep;
508 | /* Compute buffer sizes */
509 | uint32_t bufferSize = widthstep * height;
510 | PyramidEdgePtr->numOfCordinates = 0; //initialize
511 | PyramidEdgePtr->edgePoints = new Point[bufferSize]; //Allocate memory for coorinates of selected points in template image
512 |
513 | PyramidEdgePtr->edgeMagnitude = new double[bufferSize]; //Allocate memory for edge magnitude for selected points
514 | PyramidEdgePtr->edgeDerivativeX = new double[bufferSize]; //Allocate memory for edge X derivative for selected points
515 | PyramidEdgePtr->edgeDerivativeY = new double[bufferSize]; ////Allocate memory for edge Y derivative for selected points
516 |
517 | /* Allocate buffers for each vector */
518 | uint8_t *pInput = (uint8_t *)malloc(bufferSize * sizeof(uint8_t));
519 | uint8_t *pBufOut = (uint8_t *)malloc(bufferSize * sizeof(uint8_t));
520 | int16_t *pBufGradX = (int16_t *)malloc(bufferSize * sizeof(int16_t));
521 | int16_t *pBufGradY = (int16_t *)malloc(bufferSize * sizeof(int16_t));
522 | int32_t *pBufOrien = (int32_t *)malloc(bufferSize * sizeof(int32_t));
523 | float *pBufMag = (float *)malloc(bufferSize * sizeof(float));
524 |
525 |
526 | if (pInput && pBufGradX && pBufGradY && pBufMag && pBufOrien && pBufOut)
527 | {
528 | //gaussian_filter(ImageData, pInput, width, height);
529 | memcpy(pInput, ImageData->imageData, bufferSize * sizeof(uint8_t));
530 | memset(pBufGradX, 0, bufferSize * sizeof(int16_t));
531 | memset(pBufGradY, 0, bufferSize * sizeof(int16_t));
532 | memset(pBufOrien, 0, bufferSize * sizeof(int32_t));
533 | memset(pBufOut, 0, bufferSize * sizeof(uint8_t));
534 | memset(pBufMag, 0, bufferSize * sizeof(float));
535 |
536 | float MaxGradient = -9999.99f;
537 | int count = 0, i, j; // count variable;
538 |
539 | for (i = 1; i < width - 1; i++)
540 | {
541 | for (j = 1; j < height - 1; j++)
542 | {
543 | int16_t sdx = *(pInput + j*widthstep + i + 1) - *(pInput + j*widthstep + i - 1);
544 | int16_t sdy = *(pInput + (j + 1)*widthstep + i) - *(pInput + (j - 1)*widthstep + i);
545 | *(pBufGradX + j*widthstep + i) = sdx;
546 | *(pBufGradY + j*widthstep + i) = sdy;
547 | float MagG = sqrt((float)(sdx*sdx) + (float)(sdy*sdy));
548 | *(pBufMag + j*widthstep + i) = MagG;
549 |
550 | // get maximum gradient value for normalizing.
551 | if (MagG>MaxGradient)
552 | MaxGradient = MagG;
553 | }
554 | }
555 |
556 | for (i = 1; i < width - 1; i++)
557 | {
558 | for (j = 1; j < height - 1; j++)
559 | {
560 | int16_t fdx = *(pBufGradX + j*widthstep + i);
561 | int16_t fdy = *(pBufGradY + j*widthstep + i);
562 |
563 | float direction = cvFastArctan((float)fdy, (float)fdx); //Direction = invtan (Gy / Gx)
564 |
565 | // get closest angle from 0, 45, 90, 135 set
566 | if ((direction>0 && direction < 22.5) || (direction >157.5 && direction < 202.5) || (direction>337.5 && direction<360))
567 | direction = 0;
568 | else if ((direction>22.5 && direction < 67.5) || (direction >202.5 && direction <247.5))
569 | direction = 45;
570 | else if ((direction >67.5 && direction < 112.5) || (direction>247.5 && direction<292.5))
571 | direction = 90;
572 | else if ((direction >112.5 && direction < 157.5) || (direction>292.5 && direction<337.5))
573 | direction = 135;
574 | else
575 | direction = 0;
576 |
577 | pBufOrien[count] = (int32_t)direction;
578 | count++;
579 | }
580 | }
581 |
582 | count = 0; // init count
583 | // non maximum suppression
584 | float leftPixel, rightPixel;
585 |
586 | for (i = 1; i < width - 1; i++)
587 | {
588 | for (j = 1; j < height - 1; j++)
589 | {
590 | switch (pBufOrien[count])
591 | {
592 | case 0:
593 | leftPixel = *(pBufMag + j*widthstep + i - 1);
594 | rightPixel = *(pBufMag + j*widthstep + i + 1);
595 | break;
596 | case 45:
597 | leftPixel = *(pBufMag + (j - 1)*widthstep + i - 1);
598 | rightPixel = *(pBufMag + (j + 1)*widthstep + i + 1);
599 | break;
600 | case 90:
601 | leftPixel = *(pBufMag + (j - 1)*widthstep + i);
602 | rightPixel = *(pBufMag + (j + 1)*widthstep + i);
603 |
604 | break;
605 | case 135:
606 | leftPixel = *(pBufMag + (j + 1)*widthstep + i - 1);
607 | rightPixel = *(pBufMag + (j - 1)*widthstep + i + 1);
608 | break;
609 | }
610 | // compare current pixels value with adjacent pixels
611 | if ((*(pBufMag + j*widthstep + i) < leftPixel) || (*(pBufMag + j*widthstep + i) < rightPixel))
612 | {
613 | *(pBufOut + j*widthstep + i) = 0;
614 | }
615 | else
616 | *(pBufOut + j*widthstep + i) = (uint8_t)(*(pBufMag + j*widthstep + i) / MaxGradient * 255);
617 |
618 | count++;
619 | }
620 | }
621 | int RSum = 0, CSum = 0;
622 | int curX, curY;
623 | int flag = 1;
624 | int n = 0;
625 | int iPr = 1;
626 | //Hysteresis threshold
627 | for (i = 1; i < width - 1; i += iPr)
628 | {
629 | for (j = 1; j < height - 1; j += iPr)
630 | {
631 | int16_t fdx = *(pBufGradX + j*widthstep + i);
632 | int16_t fdy = *(pBufGradY + j*widthstep + i);
633 | float MagG = *(pBufMag + j*widthstep + i);
634 |
635 | flag = 1;
636 | if ((float)*(pBufOut + j*widthstep + i) < Contrast)
637 | {
638 | if ((float)*(pBufOut + j*widthstep + i) < MinContrast)
639 | {
640 | *(pBufOut + j*widthstep + i) = 0;
641 | flag = 0; // remove from edge
642 | }
643 | else
644 | { // if any of 8 neighboring pixel is not greater than max contract remove from edge
645 | if (((float)*(pBufOut + (j - 1)*widthstep + i - 1) < Contrast) &&
646 | ((float)*(pBufOut + j * widthstep + i - 1) < Contrast) &&
647 | ((float)*(pBufOut + (j - 1) * widthstep + i - 1) < Contrast) &&
648 | ((float)*(pBufOut + (j - 1) * widthstep + i) < Contrast) &&
649 | ((float)*(pBufOut + (j + 1)* widthstep + i) < Contrast) &&
650 | ((float)*(pBufOut + (j - 1) * widthstep + i + 1) < Contrast) &&
651 | ((float)*(pBufOut + j * widthstep + i + 1) < Contrast) &&
652 | ((float)*(pBufOut + (j + 1) * widthstep + i + 1) < Contrast))
653 | {
654 | *(pBufOut + j*widthstep + i) = 0;
655 | flag = 0;
656 | }
657 | }
658 | }
659 |
660 | // save selected edge information
661 | curX = i; curY = j;
662 | if (flag != 0)
663 | {
664 | if (fdx != 0 || fdy != 0)
665 | {
666 | RSum = RSum + curX;
667 | CSum = CSum + curY; // Row sum and column sum for center of gravity
668 |
669 | PyramidEdgePtr->edgePoints[n].x = curX;
670 | PyramidEdgePtr->edgePoints[n].y = curY;
671 | PyramidEdgePtr->edgeDerivativeX[n] = fdx;
672 | PyramidEdgePtr->edgeDerivativeY[n] = fdy;
673 |
674 | //handle divide by zero
675 | if (MagG != 0)
676 | PyramidEdgePtr->edgeMagnitude[n] = 1 / MagG; // gradient magnitude
677 | else
678 | PyramidEdgePtr->edgeMagnitude[n] = 0;
679 | n++;
680 | }
681 | }
682 | }
683 | }
684 | if (n != 0)
685 | {
686 | PyramidEdgePtr->numOfCordinates = n;
687 | PyramidEdgePtr->centerOfGravity.x = RSum / n; // center of gravity
688 | PyramidEdgePtr->centerOfGravity.y = CSum / n; // center of gravity
689 | //PyramidEdgePtr->centerOfGravity.x = width / 2; // center of image
690 | //PyramidEdgePtr->centerOfGravity.y = height / 2; // center of image
691 | }
692 | // change coordinates to reflect center of reference
693 | int m, temp;
694 | for (m = 0; m < PyramidEdgePtr->numOfCordinates; m++)
695 | {
696 | temp = (PyramidEdgePtr->edgePoints + m)->x;
697 | (PyramidEdgePtr->edgePoints + m)->x = temp - PyramidEdgePtr->centerOfGravity.x;
698 | temp = (PyramidEdgePtr->edgePoints + m)->y;
699 | (PyramidEdgePtr->edgePoints + m)->y = temp - PyramidEdgePtr->centerOfGravity.y;
700 | }
701 | }
702 |
703 | free(pBufMag);
704 | free(pBufOrien);
705 | free(pBufGradY);
706 | free(pBufGradX);
707 | free(pBufOut);
708 | free(pInput);
709 | }
710 |
711 | /////////////////////提取重心
712 | Point ShapeMatch::extract_shape_info(IplImage *ImageData, int Contrast, int MinContrast)
713 | {
714 | Point gravity = Point(0, 0);
715 | PyramidEdgePoints *PyramidEdgePtr = new PyramidEdgePoints;
716 | /* source image size */
717 | int width = ImageData->width;
718 | int height = ImageData->height;
719 | int widthstep = ImageData->widthStep;
720 | /* Compute buffer sizes */
721 | uint32_t bufferSize = widthstep * height;
722 | PyramidEdgePtr->numOfCordinates = 0; //initialize
723 | PyramidEdgePtr->edgePoints = new Point[bufferSize]; //Allocate memory for coorinates of selected points in template image
724 |
725 | PyramidEdgePtr->edgeMagnitude = new double[bufferSize]; //Allocate memory for edge magnitude for selected points
726 | PyramidEdgePtr->edgeDerivativeX = new double[bufferSize]; //Allocate memory for edge X derivative for selected points
727 | PyramidEdgePtr->edgeDerivativeY = new double[bufferSize]; ////Allocate memory for edge Y derivative for selected points
728 |
729 | /* Allocate buffers for each vector */
730 | uint8_t *pInput = (uint8_t *)malloc(bufferSize * sizeof(uint8_t));
731 | uint8_t *pBufOut = (uint8_t *)malloc(bufferSize * sizeof(uint8_t));
732 | int16_t *pBufGradX = (int16_t *)malloc(bufferSize * sizeof(int16_t));
733 | int16_t *pBufGradY = (int16_t *)malloc(bufferSize * sizeof(int16_t));
734 | int32_t *pBufOrien = (int32_t *)malloc(bufferSize * sizeof(int32_t));
735 | float *pBufMag = (float *)malloc(bufferSize * sizeof(float));
736 |
737 |
738 | if (pInput && pBufGradX && pBufGradY && pBufMag && pBufOrien && pBufOut)
739 | {
740 | //gaussian_filter(ImageData, pInput, width, height);
741 | memcpy(pInput, ImageData->imageData, bufferSize * sizeof(uint8_t));
742 | memset(pBufGradX, 0, bufferSize * sizeof(int16_t));
743 | memset(pBufGradY, 0, bufferSize * sizeof(int16_t));
744 | memset(pBufOrien, 0, bufferSize * sizeof(int32_t));
745 | memset(pBufOut, 0, bufferSize * sizeof(uint8_t));
746 | memset(pBufMag, 0, bufferSize * sizeof(float));
747 |
748 | float MaxGradient = -9999.99f;
749 | int count = 0, i, j; // count variable;
750 |
751 | for (i = 1; i < width - 1; i++)
752 | {
753 | for (j = 1; j < height - 1; j++)
754 | {
755 | int16_t sdx = *(pInput + j*widthstep + i + 1) - *(pInput + j*widthstep + i - 1);
756 | int16_t sdy = *(pInput + (j + 1)*widthstep + i) - *(pInput + (j - 1)*widthstep + i);
757 | *(pBufGradX + j*widthstep + i) = sdx;
758 | *(pBufGradY + j*widthstep + i) = sdy;
759 | float MagG = sqrt((float)(sdx*sdx) + (float)(sdy*sdy));
760 | *(pBufMag + j*widthstep + i) = MagG;
761 |
762 | // get maximum gradient value for normalizing.
763 | if (MagG>MaxGradient)
764 | MaxGradient = MagG;
765 | }
766 | }
767 |
768 | for (i = 1; i < width - 1; i++)
769 | {
770 | for (j = 1; j < height - 1; j++)
771 | {
772 | int16_t fdx = *(pBufGradX + j*widthstep + i);
773 | int16_t fdy = *(pBufGradY + j*widthstep + i);
774 |
775 | float direction = cvFastArctan((float)fdy, (float)fdx); //Direction = invtan (Gy / Gx)
776 |
777 | // get closest angle from 0, 45, 90, 135 set
778 | if ((direction>0 && direction < 22.5) || (direction >157.5 && direction < 202.5) || (direction>337.5 && direction<360))
779 | direction = 0;
780 | else if ((direction>22.5 && direction < 67.5) || (direction >202.5 && direction <247.5))
781 | direction = 45;
782 | else if ((direction >67.5 && direction < 112.5) || (direction>247.5 && direction<292.5))
783 | direction = 90;
784 | else if ((direction >112.5 && direction < 157.5) || (direction>292.5 && direction<337.5))
785 | direction = 135;
786 | else
787 | direction = 0;
788 |
789 | pBufOrien[count] = (int32_t)direction;
790 | count++;
791 | }
792 | }
793 |
794 | count = 0; // init count
795 | // non maximum suppression
796 | float leftPixel, rightPixel;
797 |
798 | for (i = 1; i < width - 1; i++)
799 | {
800 | for (j = 1; j < height - 1; j++)
801 | {
802 | switch (pBufOrien[count])
803 | {
804 | case 0:
805 | leftPixel = *(pBufMag + j*widthstep + i - 1);
806 | rightPixel = *(pBufMag + j*widthstep + i + 1);
807 | break;
808 | case 45:
809 | leftPixel = *(pBufMag + (j - 1)*widthstep + i - 1);
810 | rightPixel = *(pBufMag + (j + 1)*widthstep + i + 1);
811 | break;
812 | case 90:
813 | leftPixel = *(pBufMag + (j - 1)*widthstep + i);
814 | rightPixel = *(pBufMag + (j + 1)*widthstep + i);
815 |
816 | break;
817 | case 135:
818 | leftPixel = *(pBufMag + (j + 1)*widthstep + i - 1);
819 | rightPixel = *(pBufMag + (j - 1)*widthstep + i + 1);
820 | break;
821 | }
822 | // compare current pixels value with adjacent pixels
823 | if ((*(pBufMag + j*widthstep + i) < leftPixel) || (*(pBufMag + j*widthstep + i) < rightPixel))
824 | {
825 | *(pBufOut + j*widthstep + i) = 0;
826 | }
827 | else
828 | *(pBufOut + j*widthstep + i) = (uint8_t)(*(pBufMag + j*widthstep + i) / MaxGradient * 255);
829 |
830 | count++;
831 | }
832 | }
833 | int RSum = 0, CSum = 0;
834 | int curX, curY;
835 | int flag = 1;
836 | int n = 0;
837 | int iPr = 1;
838 | //Hysteresis threshold
839 | for (i = 1; i < width - 1; i += iPr)
840 | {
841 | for (j = 1; j < height - 1; j += iPr)
842 | {
843 | int16_t fdx = *(pBufGradX + j*widthstep + i);
844 | int16_t fdy = *(pBufGradY + j*widthstep + i);
845 | float MagG = *(pBufMag + j*widthstep + i);
846 |
847 | flag = 1;
848 | if ((float)*(pBufOut + j*widthstep + i) < Contrast)
849 | {
850 | if ((float)*(pBufOut + j*widthstep + i) < MinContrast)
851 | {
852 | *(pBufOut + j*widthstep + i) = 0;
853 | flag = 0; // remove from edge
854 | }
855 | else
856 | { // if any of 8 neighboring pixel is not greater than max contract remove from edge
857 | if (((float)*(pBufOut + (j - 1)*widthstep + i - 1) < Contrast) &&
858 | ((float)*(pBufOut + j * widthstep + i - 1) < Contrast) &&
859 | ((float)*(pBufOut + (j - 1) * widthstep + i - 1) < Contrast) &&
860 | ((float)*(pBufOut + (j - 1) * widthstep + i) < Contrast) &&
861 | ((float)*(pBufOut + (j + 1)* widthstep + i) < Contrast) &&
862 | ((float)*(pBufOut + (j - 1) * widthstep + i + 1) < Contrast) &&
863 | ((float)*(pBufOut + j * widthstep + i + 1) < Contrast) &&
864 | ((float)*(pBufOut + (j + 1) * widthstep + i + 1) < Contrast))
865 | {
866 | *(pBufOut + j*widthstep + i) = 0;
867 | flag = 0;
868 | }
869 | }
870 | }
871 |
872 | // save selected edge information
873 | curX = i; curY = j;
874 | if (flag != 0)
875 | {
876 | if (fdx != 0 || fdy != 0)
877 | {
878 | RSum = RSum + curX;
879 | CSum = CSum + curY; // Row sum and column sum for center of gravity
880 |
881 | PyramidEdgePtr->edgePoints[n].x = curX;
882 | PyramidEdgePtr->edgePoints[n].y = curY;
883 | PyramidEdgePtr->edgeDerivativeX[n] = fdx;
884 | PyramidEdgePtr->edgeDerivativeY[n] = fdy;
885 |
886 | //handle divide by zero
887 | if (MagG != 0)
888 | PyramidEdgePtr->edgeMagnitude[n] = 1 / MagG; // gradient magnitude
889 | else
890 | PyramidEdgePtr->edgeMagnitude[n] = 0;
891 | n++;
892 | }
893 | }
894 | }
895 | }
896 | if (n != 0)
897 | {
898 | PyramidEdgePtr->numOfCordinates = n;
899 | gravity.x = RSum / n; // center of gravity
900 | gravity.y = CSum / n; // center of gravity
901 | //PyramidEdgePtr->centerOfGravity.x = width / 2; // center of image
902 | //PyramidEdgePtr->centerOfGravity.y = height / 2; // center of image
903 | }
904 | }
905 | free(pBufMag);
906 | free(pBufOrien);
907 | free(pBufGradY);
908 | free(pBufGradX);
909 | free(pBufOut);
910 | free(pInput);
911 | delete []PyramidEdgePtr->edgePoints;
912 | delete []PyramidEdgePtr->edgeMagnitude ;
913 | delete []PyramidEdgePtr->edgeDerivativeX ;
914 | delete []PyramidEdgePtr->edgeDerivativeY;
915 | delete PyramidEdgePtr;
916 | return gravity;
917 | }
918 |
919 |
920 | /////////////////////
921 |
922 | ///////轮廓匹配
923 | void ShapeMatch::shape_match_accurate(IplImage *SearchImage, PyramidEdgePoints *ShapeInfoVec, int Contrast, int MinContrast, float MinScore, float Greediness, search_region *SearchRegion, MatchResult *ResultList, ImgEdgeInfo *imgEdgeInfo)
924 | {
925 | /* source image size */
926 | int Width = SearchImage->width;
927 | int Height = SearchImage->height;
928 | int widthstep = SearchImage->widthStep;
929 | /* Compute buffer sizes */
930 | uint32_t bufferSize = widthstep * Height;
931 | int16_t *pBufGradX = imgEdgeInfo->pBufGradX; //(int16_t *)malloc(bufferSize * sizeof(int16_t));
932 | int16_t *pBufGradY = imgEdgeInfo->pBufGradY;//(int16_t *)malloc(bufferSize * sizeof(int16_t));
933 | float *pBufMag = imgEdgeInfo->pBufMag; //(float *)malloc(bufferSize * sizeof(float));
934 |
935 | if ( pBufGradX && pBufGradY && pBufMag)
936 | {
937 | int i, j, m; // count variable;
938 | int curX = 0;
939 | int curY = 0;
940 |
941 | int16_t iTx = 0;
942 | int16_t iTy = 0;
943 | int16_t iSx = 0;
944 | int16_t iSy = 0;
945 | float iSm = 0;
946 | float iTm = 0;
947 |
948 | int startX = SearchRegion->StartX;
949 | int startY = SearchRegion->StartY;
950 | int endX = SearchRegion->EndX;
951 | int endY = SearchRegion->EndY;
952 | int SumOfCoords = 0;
953 | int TempPiontX = 0;
954 | int TempPiontY = 0;
955 | float PartialSum = 0;
956 | float PartialScore = 0;
957 | float ResultScore = 0;
958 | float TempScore = 0;
959 | float anMinScore = 1 - MinScore;
960 | float NormMinScore = 0;
961 | float NormGreediness = Greediness;
962 | /*for (int k = 0; k < ShapeInfoVec[0].AngleNum; k++)
963 | {
964 | if (ShapeInfoVec[k].Angel < AngleStart || ShapeInfoVec[k].Angel > AngleStop)
965 | continue;
966 | */
967 | ResultScore = 0;
968 | NormMinScore = MinScore / ShapeInfoVec->numOfCordinates;
969 | NormGreediness = ((1 - Greediness * MinScore) / (1 - Greediness)) / ShapeInfoVec->numOfCordinates;
970 | // #pragma omp parallel for
971 | for (i = startX; i < endX; i++)
972 | {
973 | for (j = startY; j < endY; j++)
974 | {
975 | PartialSum = 0;
976 | for (m = 0; m < ShapeInfoVec->numOfCordinates; m++)
977 | {
978 | curX = i + (ShapeInfoVec->edgePoints + m)->x; // template X coordinate
979 | curY = j + (ShapeInfoVec->edgePoints + m)->y; // template Y coordinate
980 | iTx = *(ShapeInfoVec->edgeDerivativeX + m); // template X derivative
981 | iTy = *(ShapeInfoVec->edgeDerivativeY + m); // template Y derivative
982 | iTm = *(ShapeInfoVec->edgeMagnitude + m); // template gradients magnitude
983 |
984 | if (curX < 0 || curY < 0 || curX > Width - 1 || curY > Height - 1)
985 | continue;
986 |
987 | iSx = *(pBufGradX + curY*widthstep + curX); // get corresponding X derivative from source image
988 | iSy = *(pBufGradY + curY*widthstep + curX); // get corresponding Y derivative from source image
989 | iSm = *(pBufMag + curY*widthstep + curX); // get gradients magnitude from source image
990 |
991 | if ((iSx != 0 || iSy != 0) && (iTx != 0 || iTy != 0))
992 | {
993 | PartialSum = PartialSum + ((iSx * iTx) + (iSy * iTy)) * (iTm * iSm);// calculate similarity
994 | }
995 | SumOfCoords = m + 1;
996 | PartialScore = PartialSum / SumOfCoords; // Normalized
997 | if (PartialScore < (MIN(anMinScore + NormGreediness * SumOfCoords, NormMinScore * SumOfCoords)))
998 | break;
999 | }
1000 |
1001 | if (PartialScore > ResultScore)
1002 | {
1003 | ResultScore = PartialScore; // Match score
1004 | TempPiontX = i; // result coordinate X
1005 | TempPiontY = j; // result coordinate Y
1006 | /*}
1007 | if (ResultScore > TempScore)
1008 | {*/
1009 | TempScore = ResultScore;
1010 | ResultList->ResultScore = TempScore;
1011 | //ResultList->Angel = ShapeInfoVec->Angel;
1012 | ResultList->CenterLocX = TempPiontX;
1013 | ResultList->CenterLocY = TempPiontY;
1014 | }
1015 | }
1016 | }
1017 | }
1018 | }
1019 |
1020 | float ShapeMatch::new_rsqrt(float f)
1021 | {
1022 | return 1 / sqrtf(f);
1023 | }
1024 |
1025 |
1026 | void ShapeMatch::CalSearchImgEdg(IplImage *SearchImage,ImgEdgeInfo *imgEdgeInfo)
1027 | {
1028 | int Width = SearchImage->width;
1029 | int Height = SearchImage->height;
1030 | int widthstep = SearchImage->widthStep;
1031 | /* Compute buffer sizes */
1032 | uint32_t bufferSize = widthstep * Height;
1033 | /* Allocate buffers for each vector */
1034 | uint8_t *pInput = (uint8_t *)malloc(bufferSize * sizeof(uint8_t));
1035 | imgEdgeInfo->pBufGradX = (int16_t *)malloc(bufferSize * sizeof(int16_t));
1036 | imgEdgeInfo->pBufGradY = (int16_t *)malloc(bufferSize * sizeof(int16_t));
1037 | imgEdgeInfo->pBufMag = (float *)malloc(bufferSize * sizeof(float));
1038 |
1039 | if (pInput &&imgEdgeInfo->pBufGradX && imgEdgeInfo->pBufGradY &&imgEdgeInfo->pBufMag)
1040 | {
1041 | //gaussian_filter(SearchImage, pInput, width, height);
1042 | memcpy(pInput, SearchImage->imageData, bufferSize * sizeof(uint8_t));
1043 | memset(imgEdgeInfo->pBufGradX, 0, bufferSize * sizeof(int16_t));
1044 | memset(imgEdgeInfo->pBufGradY, 0, bufferSize * sizeof(int16_t));
1045 | memset(imgEdgeInfo->pBufMag, 0, bufferSize * sizeof(float));
1046 |
1047 | int i, j, m; // count variable;
1048 | #pragma omp parallel for
1049 | for (i = 1; i < Width - 1; i++)
1050 | {
1051 | for (j = 1; j < Height - 1; j++)
1052 | {
1053 | int16_t sdx = *(pInput + j*widthstep + i + 1) - *(pInput + j*widthstep + i - 1);
1054 | int16_t sdy = *(pInput + (j + 1)*widthstep + i) - *(pInput + (j - 1)*widthstep + i);
1055 | *(imgEdgeInfo->pBufGradX + j*widthstep + i) = sdx;
1056 | *(imgEdgeInfo->pBufGradY + j*widthstep + i) = sdy;
1057 | *(imgEdgeInfo->pBufMag + j*widthstep + i) = new_rsqrt((float)(sdx*sdx) + (float)(sdy*sdy));
1058 | }
1059 | }
1060 | }
1061 | free(pInput);
1062 | }
1063 |
--------------------------------------------------------------------------------
/ContourMatching/ShapeMatch.h:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Skin1234/ContourMatching/3f7a6ea3df0bfa9925767caac53c2ea27d273f0c/ContourMatching/ShapeMatch.h
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ContourMatching
2 | Halcon的形状匹配算法
3 | 配置环境:opencv2.4.10 visual studio2019
4 | 仅调试Debug x64
--------------------------------------------------------------------------------
/demo.txt:
--------------------------------------------------------------------------------
1 | dsa
--------------------------------------------------------------------------------