├── .gitignore ├── EdgeDrawing.py ├── LICENSE ├── LineDetector.py ├── README.md ├── TestLineDetection.py └── TestTool.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /EdgeDrawing.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sat Apr 21 13:10:49 2018 4 | 5 | @author: Shaojun Luo 6 | """ 7 | import numpy as np 8 | import cv2 9 | 10 | # define value of HORIZONTAL 11 | HORIZONTAL = 1 12 | VERTICAL = -1 13 | LEFT = -1 14 | RIGHT = 1 15 | UP = -1 16 | DOWN = 1 17 | max_gap = 4 18 | # default parameters 19 | EDParam_default = {'ksize': 5, 20 | 'sigma': 1.0, 21 | 'gradientThreshold': 36, 22 | 'anchorThreshold': 8, 23 | 'scanIntervals': 1} 24 | 25 | class EdgeDrawing: 26 | # initiation 27 | def __init__(self, EDParam = EDParam_default): 28 | # EDLineDetector constructor function 29 | #set parameters for line segment detection 30 | self.ksize_ = EDParam['ksize'] 31 | self.sigma_ = EDParam['sigma'] 32 | self.gradientThreshold_ = EDParam['gradientThreshold'] 33 | self.anchorThreshold_ = EDParam['anchorThreshold'] 34 | self.scanIntervals_ = EDParam['scanIntervals'] 35 | #dimension of image 36 | self.MAX_X = 0 37 | self.MAX_Y = 0 38 | self.G_ = np.array([]) 39 | self.D_ = np.array([]) 40 | self.E_ = np.array([]) 41 | 42 | # search algorithm for EdgeDrawing 43 | def GoUp_(self,x, y): 44 | segment = [] # array to record edge segment 45 | direct_next = None # search direction of left side similart to right, up and down 46 | while x>0 and self.G_[x,y]>0 and not self.E_[x,y]: 47 | next_y = [max(0,y-1), y, min(self.MAX_Y-1,y+1)] # search in a valid area 48 | segment.append((x,y))# extend line segments 49 | if self.D_[x,y] == VERTICAL: 50 | self.E_[x, y] = True # mark as edge 51 | y_last = y # record parent pixel 52 | x, y = x-1, next_y[np.argmax(self.G_[x-1, next_y])]# walk to next pixel with max gradient 53 | else: 54 | direct_next = y - y_last # change direction to continue search 55 | break # stop and proceed to next search 56 | return segment,direct_next 57 | 58 | def GoDown_(self,x, y): 59 | segment = [] 60 | direct_next = None 61 | while x < self.MAX_X-1 and self.G_[x,y]>0 and not self.E_[x, y]: 62 | next_y = [max(0,y-1), y, min(self.MAX_Y-1,y+1)] 63 | segment.append((x,y)) 64 | if self.D_[x,y] == VERTICAL: 65 | self.E_[x, y] = True 66 | y_last = y 67 | x, y = x+1, next_y[np.argmax(self.G_[x+1, next_y])] 68 | else: 69 | direct_next = y - y_last 70 | break 71 | return segment,direct_next 72 | 73 | def GoRight_(self,x, y): 74 | segment = [] 75 | direct_next = None 76 | while y < self.MAX_Y-1 and self.G_[x,y]>0 and not self.E_[x,y]: 77 | next_x = [max(0,x-1), x, min(self.MAX_X-1,x+1)] 78 | segment.append((x,y)) 79 | if self.D_[x,y] == HORIZONTAL: 80 | self.E_[x, y] = True 81 | x_last = x 82 | x, y = next_x[np.argmax(self.G_[next_x, y+1])], y+1 83 | else: 84 | direct_next = x - x_last 85 | break 86 | return segment,direct_next 87 | 88 | def GoLeft_(self,x, y): 89 | segment = [] 90 | direct_next = None 91 | while y>0 and self.G_[x, y]>0 and not self.E_[x, y]: 92 | next_x = [max(0,x-1), x, min(self.MAX_X-1,x+1)] 93 | segment.append((x,y)) 94 | if self.D_[x,y] == HORIZONTAL: 95 | self.E_[x, y] = True 96 | x_last = x 97 | x, y = next_x[np.argmax(self.G_[next_x, y-1])], y-1 98 | else: 99 | direct_next = x - x_last 100 | break 101 | return segment, direct_next 102 | 103 | # walk down until reach the end 104 | def SmartWalk_(self, x,y,direct_next): 105 | segment = [(x,y)] 106 | while direct_next is not None: 107 | x, y = segment[-1][0], segment[-1][1] 108 | # if the last point of chain is horizontal, explore horizontally 109 | if self.D_[x,y] == HORIZONTAL: 110 | # get segment sequence 111 | if direct_next == LEFT: 112 | s, direct_next = self.GoLeft_(x,y) 113 | elif direct_next == RIGHT: 114 | s, direct_next = self.GoRight_(x,y) 115 | else: break 116 | # if self.G_[x,y+1]>self.G_[x,y-1]: 117 | # s, direct_next = self.GoRight_(x,y) 118 | # else: 119 | # s, direct_next = self.GoLeft_(x,y) 120 | elif self.D_[x,y] == VERTICAL: # explore vertically 121 | if direct_next == UP: 122 | s, direct_next = self.GoUp_(x,y) 123 | elif direct_next == DOWN: 124 | s, direct_next = self.GoDown_(x,y) 125 | else: break 126 | # if self.G_[x-1,y]>self.G_[x+1,y]: 127 | # s, direct_next = self.GoUp_(x,y) 128 | # else: 129 | # s, direct_next = self.GoDown_(x,y) 130 | else: # if the next pixel is invalid 131 | break 132 | if len(s) > 1: 133 | segment.extend(s[1:]) 134 | return segment 135 | # find list of anchors 136 | def FindAnchors_(self,image): 137 | # detect the anchor 138 | anchor_list = [] 139 | for i in range(1,image.shape[0]-1,self.scanIntervals_): 140 | for j in range(1,image.shape[1]-1,self.scanIntervals_): 141 | if self.D_[i,j] == HORIZONTAL: # HORIZONTAL EDGl compare up & down 142 | if self.G_[i,j] - self.G_[i-1,j] >= self.anchorThreshold_ and self.G_[i,j] - self.G_[i+1,j] >= self.anchorThreshold_: 143 | anchor_list.append((i,j)) 144 | elif self.D_[i,j] == VERTICAL: # VERTICAL EDGE. Compare with left & right. 145 | if self.G_[i,j] - self.G_[i,j-1] >= self.anchorThreshold_ and self.G_[i,j] - self.G_[i,j+1] >= self.anchorThreshold_: 146 | anchor_list.append((i,j)) 147 | return anchor_list 148 | # merge edges 149 | def MergeEdges_(self,edges): 150 | # connect and merge the edges inplace 151 | merged = True 152 | while merged: # if last iteration perform merged 153 | p1 = 0 # pivot for first edge 154 | merged = False # assume not going to merge 155 | # iterate over edges to merge 156 | while p1 < len(edges): 157 | p2 = p1+1 158 | while p2 < len(edges): 159 | # mark start and end point of 2 segments 160 | start_1, end_1 = edges[p1][0], edges[p1][-1] 161 | start_2, end_2 = edges[p2][0], edges[p2][-1] 162 | # direction of two vectors 163 | v_1 = (end_1[0]-start_1[0],end_1[1]-start_1[1]) 164 | v_2 = (end_2[0]-start_2[0],end_2[1]-start_2[1]) 165 | # if they aligned in the same direction, compare with head-end 166 | if np.dot(v_1,v_2)>=0: 167 | if abs(end_1[0] - start_2[0]) + abs(end_1[1] - start_2[1]) < max_gap: 168 | # merge end-head 169 | edges[p1] = edges[p1] + edges.pop(p2) 170 | merged = True 171 | elif abs(start_1[0] - end_2[0])+abs(start_1[1] - end_2[1]) < max_gap: 172 | # merge end-head 173 | edges[p1] = edges.pop(p2)+ edges[p1] 174 | merged = True 175 | else: 176 | p2 += 1 # manually poceed to next segment 177 | else: 178 | if abs(start_1[0] - start_2[0]) + abs(start_1[1]-start_2[1]) < max_gap: 179 | # merge head-head 180 | edges[p1] = edges[p1][::-1] + edges.pop(p2) 181 | merged = True 182 | elif abs(end_1[0] - end_2[0]) + abs(end_1[1] - end_2[1]) < max_gap: 183 | # merge end-end 184 | edges[p1] = edges.pop(p2) + edges[p1][::-1] 185 | merged = True 186 | else: 187 | p2 += 1 # manually poceed to next segment 188 | p1 += 1 # next segment 189 | return 190 | # edge drawing algorithm 191 | def EdgeDrawing(self, image, smoothed = False): 192 | # validation check for image 193 | if len(image.shape)>2: 194 | raise('Use only 1 channel or grayscale image') 195 | return None 196 | # set up dimension 197 | self.MAX_X = image.shape[0] 198 | self.MAX_Y = image.shape[1] 199 | # if not smoothed then smooth it 200 | if not smoothed: #input image hasn't been smoothed. 201 | img = cv2.GaussianBlur(image,(self.ksize_,self.ksize_),self.sigma_) 202 | else: 203 | img = image.copy() 204 | # compute dx,dy imagegradient 205 | dxImg_ = cv2.Sobel(img,cv2.CV_64F,1,0,ksize=1) 206 | dyImg_ = cv2.Sobel(img,cv2.CV_64F,0,1,ksize=1) 207 | 208 | # Compute gradient map and direction map 209 | #self.G_ = np.sqrt(dxImg_*dxImg_ + dyImg_*dyImg_) 210 | self.G_ = np.abs(dxImg_)+ np.abs(dyImg_) 211 | self.G_[self.G_ < self.gradientThreshold_] = 0 212 | # If true, then it is horizontal edge 213 | self.D_ = -np.sign(np.abs(dxImg_) - np.abs(dyImg_)) 214 | self.D_[self.G_ < self.gradientThreshold_] = 0 215 | 216 | #cv2.imwrite('Gradient.bmp',255*(self.G_>0).astype(int)) 217 | # find anchor list 218 | anchor_list = self.FindAnchors_(image) 219 | 220 | edges = [] 221 | # initiate edgemap 222 | self.E_ = np.zeros(self.G_.shape,dtype = bool) 223 | # first round edrawing, get fragment segments 224 | for anchor in anchor_list: 225 | if not self.E_[anchor]: # if not mark as edges 226 | # walk right or down 227 | segment_1 = self.SmartWalk_(anchor[0], anchor[1], 1) 228 | # reset anchor point 229 | self.E_[anchor] = False 230 | # walk left or up 231 | segment_2 = self.SmartWalk_(anchor[0], anchor[1], -1) 232 | # concat two segments 233 | if len(segment_1[::-1] + segment_2)>0: 234 | edges.append(segment_1[::-1] + segment_2[1:]) 235 | # merge the edges with same direction 236 | #self.MergeEdges_(edges) 237 | edge_map = 255*self.E_.astype(np.uint8) 238 | return edges, edge_map 239 | 240 | 241 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Shaojun Luo 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /LineDetector.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sun May 13 17:14:43 2018 4 | 5 | @author: Shaojun Luo 6 | """ 7 | import numpy as np 8 | from numpy.linalg import eig 9 | 10 | # Internal Parameters 11 | RATIO = 50 12 | ANGLE_TURN = 67.5*np.pi/180 13 | STEP = 3 14 | 15 | """ 16 | Begin for EDLine Functions 17 | """ 18 | # distance from point to line 19 | def Distance_(a,b,point): 20 | if b is None: 21 | return np.abs(point[0] - a) 22 | else: 23 | return np.abs(b*point[0] - point[1] + a)/np.sqrt(b*b + 1) 24 | 25 | # fast fit line equation 26 | def FitLine_(pixel_chain): 27 | x,y = zip(*pixel_chain) 28 | x = np.float64(x) 29 | y = np.float64(y) 30 | if np.dot(x-x.mean(),x-x.mean()) == 0: #if it is horizontal line 31 | beta = None 32 | alpha = x.mean() 33 | mse = ((x - alpha)**2).mean() 34 | else: # else ordinary line, MSE take the orthogonal distance 35 | beta = np.dot(x-x.mean(),y-y.mean())/np.dot(x-x.mean(),x-x.mean()) 36 | alpha = np.mean(y) - beta*np.mean(x) 37 | mse = np.array([Distance_(alpha,beta,point)**2 for point in pixel_chain]).mean() 38 | return beta, alpha, mse 39 | 40 | # EDLine detection 41 | def EDLine(edges, minLineLen, lineFitErrThreshold = 1): 42 | # filter edges 43 | edges = [edge for edge in edges if len(edge)>=minLineLen] 44 | lines = [] 45 | # finding initial line segment 46 | while edges: 47 | edge = edges.pop(0) 48 | while len(edge) >= minLineLen: 49 | b,a,err = FitLine_(edge[:minLineLen]) 50 | if err < lineFitErrThreshold: # initial segment fournd 51 | break 52 | else: # otherwise move the window 53 | edge = edge[1:] 54 | 55 | if err < lineFitErrThreshold: # if line segment found, extend the line 56 | # start from segment 57 | line_len = minLineLen 58 | while line_len < len(edge):#and err < self.lineFitErrThreshold_: 59 | if Distance_(a,b,edge[line_len]) <= lineFitErrThreshold: 60 | line_len+= 1 61 | #b,a,err = self.FitLine(edge[:line_len]) 62 | else: 63 | break 64 | lines.append(edge[:line_len]) 65 | # append the rest of pixels for next line extraction 66 | if len(edge[line_len:])>= minLineLen: 67 | edges.append(edge[line_len:]) 68 | return lines 69 | 70 | """ 71 | Begin for PLineD Functions 72 | """ 73 | # cos of angle between two vectors 74 | def cosAngle_(v_1,v_2): 75 | return np.dot(v_1,v_2)/np.sqrt(np.dot(v_1,v_1)*np.dot(v_2,v_2)) 76 | 77 | # check if is a line using covariant matrix 78 | def CovarianceCheck_(segment): 79 | cov_mat = np.cov(list(zip(*segment))) # covariance matrix 80 | eig_val, _= eig(cov_mat) # eigen value 81 | #if the ratio is large then return true, 1e-10 is a protection on dividing 0 82 | return (max(eig_val) + 1e-10)/(min(eig_val) + 1e-10) > RATIO 83 | 84 | # Algorithm 1&2 segment cut and line detection 85 | def SegmentFilter_(edges): 86 | lines = [] 87 | for edge in edges: 88 | while edge: 89 | if len(edge) > 2*STEP: 90 | i = STEP 91 | while i < len(edge)-STEP: 92 | v_1 = np.subtract(edge[i],edge[i-STEP]) 93 | v_2 = np.subtract(edge[i+STEP],edge[i]) 94 | if cosAngle_(v_1,v_2) < np.cos(ANGLE_TURN): 95 | break # find segment 96 | else: # step foward 97 | i = i+STEP 98 | if i > len(edge)-STEP: # if the search is already to end 99 | segment = edge # attach whole segment 100 | break 101 | else: # cut the segement 102 | segment = edge[:i] # cut the current segment 103 | edge = edge[i:] # proceed to next search 104 | else: 105 | segment = edge # attach whole segment 106 | break 107 | # check if the segment is a line 108 | if CovarianceCheck_(segment): 109 | lines.append(segment) 110 | return lines 111 | 112 | # Algorithm 3. Group the lines 113 | def GroupLines_(lines, min_L, min_P, tol_a, tol_d): 114 | # connect and merge the line inplace 115 | line_groups = [] 116 | v_groups = [] 117 | p1 = 0 118 | while p1 < len(lines): 119 | if len(lines[p1])>min_L:# if the segment begin to consider 120 | L_line = lines.pop(p1) # pop the current segment 121 | max_l = len(L_line) # longest line segment 122 | group_ = [L_line] # initiate group 123 | p_group = len(L_line) # total pixel in segment 124 | v_1 = np.subtract(L_line[-1], L_line[0]) # v_1 is the longest segment direction 125 | v_m = v_1 # mean direction 126 | p2 = 0 127 | while p2 < len(lines): 128 | v_s = np.subtract(lines[p2][-1], lines[p2][0]) # v_s is the direstion of new segment s 129 | # if they aligned in the same direction, compare with head-end 130 | if np.abs(cosAngle_(v_m,v_s)) > np.cos(tol_a): 131 | # distance of two vectors 132 | v_1_n = (-v_m[1],v_m[0]) # normal of mean-line 133 | mid_2 = 0.5*np.add(lines[p2][0], lines[p2][-1]) # mid point of line s 134 | v_2 = np.subtract(mid_2,L_line[-1]) # direction to any point on the line 135 | ds = np.abs(np.dot(v_1_n,v_2))/np.sqrt(np.dot(v_1_n,v_1_n)) # distance of segment to line 136 | # if they are close enough, merge 137 | if ds < tol_d: 138 | N_line = lines.pop(p2) # pop the current segment 139 | group_.append(N_line) # add to group 140 | p_group += len(N_line) # update group member 141 | if max_l > len(N_line): # update longest segment 142 | L_line = N_line 143 | v_1 = np.subtract(L_line[-1], L_line[0]) 144 | v_m += v_1 145 | else: 146 | p2 += 1 # manually poceed to next segment 147 | else: 148 | p2 += 1 # manually poceed to next segment 149 | if p_group > min_P: # if the group is large enough 150 | line_groups.append(group_) # put in the line group 151 | v_groups.append(v_m) # main direction of this group 152 | p1 = 0 # if group then reset p1 153 | else: 154 | p1 += 1 # next segment 155 | return line_groups, v_groups 156 | 157 | # Algorithm 4: detect parallel groups 158 | def ParallelGroups_(line_groups,v_groups, tol_a): 159 | parallel_groups = [] 160 | while line_groups: 161 | l_1 = line_groups.pop(0) # pop the first element 162 | v_1 = v_groups.pop(0) 163 | parallel_segments = [l_1] # initiate parallel segments 164 | p = 0 165 | while p < len(line_groups): 166 | v_s = v_groups[p] 167 | if np.abs(cosAngle_(v_1,v_s)) > np.cos(tol_a): 168 | #add the segment to group and remove from pool 169 | parallel_segments.append(line_groups.pop(p)) 170 | v_groups.pop(p) 171 | else: 172 | p += 1 # manually proceed 173 | if len(parallel_segments) > 1: # there are parallel groups 174 | for segments in parallel_segments: 175 | for s in segments: 176 | parallel_groups.append(s) 177 | # check the group 178 | return parallel_groups 179 | 180 | # main body for PlineD 181 | def PLineD(edges, min_L = 10, min_P = 1000, tol_a = 5*np.pi/180, tol_d = 60): 182 | # cut and filter line segments 183 | lines = SegmentFilter_(edges) 184 | # 185 | # group lines 186 | line_groups, v_groups = GroupLines_(lines, min_L, min_P, tol_a, tol_d) 187 | #return line_groups,v_groups 188 | # find parallel lines 189 | parallel_groups = ParallelGroups_(line_groups,v_groups, tol_a) 190 | # return 191 | return parallel_groups 192 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # EDLinePython 2 | 3 | Python Package for Edge Drawing Algorithm and Line detection。 Related Paper: 4 | 5 | https://www.sciencedirect.com/science/article/pii/S1047320312000831 6 | 7 | C++ Implementaion: 8 | https://github.com/hongxinliu/edge-drawing 9 | 10 | 11 | # Usage: 12 | 13 | EdgeDrawing.py: functions for edgedrawing edge detection. 14 | 15 | LineDetector.py: detector functions to detect straight line. 16 | 17 | testTools.py / testLineDetector.py test tools for the function 18 | 19 | 20 | # package requirement: 21 | 22 | Python 2.7/3.6 23 | 24 | cv2 25 | 26 | numpy 27 | -------------------------------------------------------------------------------- /TestLineDetection.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sun Apr 22 12:09:25 2018 4 | 5 | @author: Shaojun Luo 6 | """ 7 | 8 | import EdgeDrawing 9 | import LineDetector as LD 10 | import TestTool as TT 11 | import cv2 12 | import numpy as np 13 | import glob 14 | import time 15 | 16 | # raw input 17 | #input_image = image 18 | 19 | # ground truth folder 20 | ground_truth_folder = 'test_data/IR_GT/' 21 | 22 | # inflared Light 23 | origin_image_folder = 'test_data/IR/' 24 | 25 | # parameters for Edge Drawing 26 | EDParam = {'ksize':3, # gaussian Smooth filter size if smoothed = False 27 | 'sigma': 1, # gaussian smooth sigma ify smoothed = False 28 | 'gradientThreshold': 25, # threshold on gradient image 29 | 'anchorThreshold': 10, # threshold to determine the anchor 30 | 'scanIntervals': 4} # scan interval, the smaller, the more detail 31 | ## Visible Light 32 | #origin_image_folder = 'test_data/VL/' 33 | # 34 | ## parameters for Edge Drawing (visible light) 35 | #EDParam = {'ksize':3, # gaussian Smooth filter size if smoothed = False 36 | # 'sigma': 1, # gaussian smooth sigma ify smoothed = False 37 | # 'gradientThreshold': 25, # threshold on gradient image 38 | # 'anchorThreshold': 10, # threshold to determine the anchor 39 | # 'scanIntervals': 1} # scan interval, the smaller, the more detail 40 | 41 | # image list 42 | origin_image_list = sorted(glob.glob(origin_image_folder+'*.bmp')) 43 | ground_truth_list = sorted(glob.glob(ground_truth_folder+'*.bmp')) 44 | n = len(origin_image_list) 45 | 46 | # initiate the EDLineDetector class 47 | ED = EdgeDrawing.EdgeDrawing(EDParam) 48 | 49 | """ 50 | Preprocess the images to get edges using EdgeDrawing 51 | Also the test set is constructed for calculation of confusion matrix 52 | """ 53 | edges_list = [] 54 | edges_map_list = [] 55 | image_GT_list = [] 56 | 57 | start_time = time.time() # start timing 58 | print('Preprocessing Image (EdgeDrawing)') 59 | 60 | #Edge Drawing detection: 61 | for i, image_file in enumerate(origin_image_list): 62 | # read image_file 63 | image = cv2.imread(image_file) 64 | # convert to gray-scale image 65 | input_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) 66 | # edge Drawing 67 | edges,edges_map = ED.EdgeDrawing(input_image, smoothed = False) 68 | # edge list is a list of pixel chains it is used for EDLine and PLineD 69 | edges_list.append(edges) 70 | # edges map is a matrix with edges marked as 255, it is used for RHT 71 | edges_map_list.append(edges_map) 72 | #ground truth image 73 | image_ground_truth = cv2.imread(ground_truth_list[i]) 74 | image_ground_truth = cv2.cvtColor(image_ground_truth,cv2.COLOR_BGR2GRAY) # convert to gray image 75 | image_GT_list.append(image_ground_truth) 76 | 77 | if (i+1)%100 == 0: 78 | print(str(i+1) + ' images processed') 79 | 80 | elapsed_time = time.time() - start_time # end timming 81 | 82 | # mean preprocess time for each image 83 | mean_pre_time = elapsed_time/n 84 | 85 | print('Preprocess Complete') 86 | print('Time: {0:.1f}s. Average Time: {1:.3f}s'.format(elapsed_time, mean_pre_time)) 87 | 88 | #%% 89 | """ 90 | Power Line detection Algorithm 91 | """ 92 | 93 | # parameters for RHT 94 | distance_resolution = 1 95 | angle_resolution = np.pi/180 96 | minLineLength = 12 97 | maxLineGap = 10 98 | votes = 20 99 | 100 | # Parameters for EDLine 101 | minLineLen = 40 102 | lineFitErrThreshold = 1.0 103 | 104 | # parameters for PLineD 105 | min_L = 5 106 | min_P = 20 107 | tol_a = 5*np.pi/180 108 | tol_d = 3.0 109 | 110 | print('---------Randomlized Hough------------') 111 | 112 | TPR_list_HT = [] 113 | FPR_list_HT = [] 114 | 115 | start_time = time.time() # start timing 116 | # 117 | for i, edges_map in enumerate(edges_map_list): 118 | # RHT line detection 119 | lines = cv2.HoughLinesP(edges_map,distance_resolution,angle_resolution,votes,minLineLength,maxLineGap) 120 | lines = TT.TranslateLines(lines) 121 | # get prediction label 122 | TP, FP, FN,_ = TT.ConfusionMatrix(lines,image_GT_list[i]) 123 | if TP + FN > 4: # only count valid 124 | TPR_list_HT.append(TT.TPR(TP,FN)) 125 | FPR_list_HT.append(TT.FPR(TP,FP)) 126 | # 127 | elapsed_time = time.time() - start_time # end time 128 | # Note that the value of None will not count when calculate mean 129 | print('Sensitivity: {0:.3f}'.format(np.array(TPR_list_HT).mean())) 130 | print('Specificity: {0:.3f}'.format(1-np.array(FPR_list_HT).mean())) 131 | print('Average time(including preprocessing): {0:.4f}s'.format(elapsed_time/n + mean_pre_time)) 132 | 133 | print('----------EDLine detection-------------') 134 | 135 | TPR_list_ED = [] 136 | FPR_list_ED = [] 137 | 138 | start_time = time.time() # start timing 139 | 140 | for i, edges in enumerate(edges_list): 141 | # EDline detection 142 | lines = LD.EDLine(edges, minLineLen,lineFitErrThreshold) 143 | # get prediction label 144 | TP, FP, FN,_ = TT.ConfusionMatrix(lines,image_GT_list[i]) 145 | if TP + FN > 4: #there is a coner dot wit 2x2 px in ground truth 146 | TPR_list_ED.append(TT.TPR(TP,FN)) 147 | FPR_list_ED.append(TT.FPR(TP,FP)) 148 | 149 | elapsed_time = time.time() - start_time # end time 150 | 151 | # when making the mean, it ignore the nan 152 | print('Sensitivity: {0:.3f}'.format(np.array(TPR_list_ED).mean())) 153 | print('Specificity: {0:.3f}'.format(1-np.array(FPR_list_ED).mean())) 154 | print('Average time(including preprocessing): {0:.4f}s'.format(elapsed_time/n + mean_pre_time)) 155 | 156 | print('--------------PLineD---------------') 157 | 158 | TPR_list_PD = [] 159 | FPR_list_PD = [] 160 | 161 | start_time = time.time() # start timing 162 | 163 | for i, edges in enumerate(edges_list): 164 | # PLineD 165 | lines = LD.PLineD(edges,min_L, min_P,tol_a, tol_d) 166 | # get prediction label 167 | TP, FP, FN, _ = TT.ConfusionMatrix(lines,image_GT_list[i]) 168 | if TP > 4: 169 | TPR_list_PD.append(TT.TPR(TP,FN)) 170 | FPR_list_PD.append(TT.FPR(TP,FP)) 171 | 172 | elapsed_time = time.time() - start_time # end time 173 | 174 | print('Sensitivity: {0:.3f}'.format(np.array(TPR_list_PD).mean())) 175 | print('Specificity: {0:.3f}'.format(1-np.array(FPR_list_PD).mean())) 176 | print('Average time(including preprocessing): {0:.4f}s'.format(elapsed_time/n + mean_pre_time)) -------------------------------------------------------------------------------- /TestTool.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import numpy as np 4 | import cv2 5 | 6 | THRES = 20 7 | K = 5 8 | SIGMA = 3 9 | 10 | # true positive Rate (sensitivity) 11 | def TPR(TP,FN): 12 | if TP + FN > 0: 13 | return float(TP)/(TP+FN) 14 | else: 15 | return None 16 | # false positive rate (1- specificity) 17 | def FPR(TP, FP): 18 | if FP+TP > 0: 19 | return float(FP)/(TP+FP) 20 | else: 21 | return 0.0 22 | # traslate RHT lines to pixel chains 23 | def TranslateLines(RHT_lines): 24 | lines = [] 25 | if RHT_lines is not None: # if valid 26 | for line in RHT_lines: 27 | line = line[0] 28 | # longest direction 29 | n = max(abs(line[2]-line[0]+1), abs(line[3]-line[1]+1)) 30 | # expadn line 31 | x = np.round(np.linspace(line[0],line[2],n)).astype(int).tolist() 32 | y = np.round(np.linspace(line[1],line[3],n)).astype(int).tolist() 33 | lines.append(list(zip(*[y,x]))) 34 | return lines 35 | 36 | # Confusion matrix 37 | def ConfusionMatrix(pred_lines, ground_truth): 38 | # concatenate all pixel points 39 | if pred_lines: 40 | pixels = np.concatenate(pred_lines) 41 | else: 42 | pixels = [] 43 | # create edge map 44 | edge_map = np.zeros(ground_truth.shape) 45 | edge_map[list(zip(*pixels))] = 255 46 | # smooth the map to get confidence 47 | edge_map = cv2.GaussianBlur(edge_map,(K,K),SIGMA) 48 | # calculate overlap 49 | TP = np.sum(np.logical_and(ground_truth > THRES, edge_map > THRES)) 50 | # False Positive 51 | FP = np.sum(np.logical_and(ground_truth < THRES, edge_map > THRES)) 52 | # False Negative 53 | FN = np.sum(np.logical_and(ground_truth > THRES, edge_map < THRES)) 54 | 55 | return TP, FP, FN, edge_map --------------------------------------------------------------------------------