├── ariadne ├── matchers │ ├── __init__.py │ ├── direction_matcher.py │ └── visual_matchers.py ├── utils │ ├── __init__.py │ ├── interactivewindow.py │ └── colors.py ├── predictors │ ├── __init__.py │ ├── distance.py │ ├── overall.py │ ├── visual.py │ └── curvature.py ├── predictors_config.py └── core.py ├── images ├── image_1.jpg └── image_2.jpg ├── anaconda_environment.yml ├── configs.ini ├── README.md └── manual_path_finder.py /ariadne/matchers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ariadne/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ariadne/predictors/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /images/image_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/m4nh/ariadne/HEAD/images/image_1.jpg -------------------------------------------------------------------------------- /images/image_2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/m4nh/ariadne/HEAD/images/image_2.jpg -------------------------------------------------------------------------------- /anaconda_environment.yml: -------------------------------------------------------------------------------- 1 | name: ariadne 2 | channels: 3 | - anaconda 4 | - defaults 5 | dependencies: 6 | - matplotlib=2.2.2 7 | - networkx=2.1 8 | - scikit-image=0.13.1 9 | - numpy=1.14.3 10 | - numpy-base=1.14.3 11 | - python=3.6.5 12 | - configparser=3.5.0 13 | - pip: 14 | - opencv-python==3.4.1.15 15 | prefix: /home/daniele/anaconda3/envs/ariadne 16 | -------------------------------------------------------------------------------- /configs.ini: -------------------------------------------------------------------------------- 1 | [default] 2 | num_segments: 2000 3 | segmentator_compactness: 20 4 | segmentator_sigma: 1 5 | min_score: 0.5 6 | end_region_radius: 20 7 | max_length: 100 8 | predictors_function: s0*s1*s3 9 | predictors_visual_bins: 3 10 | predictors_start_depth: 1 11 | predictors_flags: VCD 12 | predictors_neighbours: 5 13 | 14 | 15 | [clutter] 16 | num_segments: 1500 17 | segmentator_compactness: 10 18 | segmentator_sigma: 1 19 | min_score: 0.5 20 | end_region_radius: 20 21 | max_length: 100 22 | predictors_function: s0*s1*s3 23 | predictors_visual_bins: 3 24 | predictors_start_depth: 1 25 | predictors_flags: VCD 26 | predictors_neighbours: 5 -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Ariadne 2 | 3 | ## Check also for [Cable Dataset](https://github.com/m4nh/cables_dataset) ... 4 | ## Dependencies 5 | 6 | The ```anaconda_environment.yml``` contains the mandatory dependencies for the Ariadne package. With anaconda installed you can directly create an environment from this manifest with: 7 | 8 | ``` 9 | conda env create -f anaconda_environment.yml 10 | ``` 11 | 12 | And then activate it with: 13 | 14 | ``` 15 | conda activate ariadne 16 | ``` 17 | 18 | ## Manual detection script 19 | 20 | To try Ariadne you can launch: 21 | 22 | ``` 23 | python manual_path_finder.py --image_file images/image_1.jpg --config_name default 24 | ``` 25 | 26 | The ```config_name``` refers to the Section defined in the ```configs.ini``` configuration file. As an alternative you can try 27 | the second configuration with: 28 | 29 | ``` 30 | python manual_path_finder.py --image_file images/image_2.jpg --config_name clutter 31 | ``` 32 | 33 | At the beginning, the script displays the image, then with the ```spacebar``` you can launch the superpixels segmentaion. Hence, 34 | click with mouse on two random cable terminals and press ```spacebar``` again to start the iterative segmentation procedure. If necessary, you can close the script pressing ```q``` with the focus on the main window. 35 | 36 | ## Citation 37 | 38 | If you use this code for your research, please cite our paper Let's take a Walk on Superpixels Graphs: Deformable Linear Objects Segmentation and Model Estimation: 39 | 40 | ``` 41 | @article{degregorio2018, 42 | title={ 43 | Let's take a Walk on Superpixels Graphs: Deformable Linear Objects Segmentation and Model Estimation}, 44 | author={De Gregorio, Daniele and Palli, Gianluca and Di Stefano, Luigi}, 45 | journal={arXiv preprint arXiv:1810.04461}, 46 | year={2018} 47 | } 48 | ``` 49 | -------------------------------------------------------------------------------- /ariadne/predictors/distance.py: -------------------------------------------------------------------------------- 1 | import math 2 | import cv2 3 | import numpy as np 4 | from scipy.stats import norm 5 | 6 | 7 | class DistancePredictor(object): 8 | """ 9 | Base class for distance Predictors 10 | 11 | Parameters 12 | ---------- 13 | ariadne : Ariadne 14 | object associated with a source image 15 | 16 | """ 17 | 18 | def __init__(self, ariadne): 19 | self.ariadne = ariadne 20 | 21 | def computeScore(self, n1, n2): 22 | return 1.0 23 | 24 | 25 | class DistancePredictorMaxDistance(DistancePredictor): 26 | """ 27 | Distance predictor with max distance computation 28 | 29 | Parameters 30 | ---------- 31 | ariadne : Ariadne 32 | object associated with a source image 33 | 34 | max_distance : float 35 | max allowed distance 36 | 37 | bradford_coefficient : float 38 | c parameter of bradford distribudtion 39 | 40 | """ 41 | 42 | def __init__(self, ariadne, max_distance=100, bradford_coefficient=0.2): 43 | super(DistancePredictorMaxDistance, self).__init__(ariadne) 44 | 45 | self.max_distance = max_distance 46 | self.bradford_coefficient = bradford_coefficient 47 | 48 | def computeScore(self, n1, n2): 49 | """ 50 | Computes distance score of a target distance 51 | 52 | Parameters 53 | ---------- 54 | n1 : int 55 | first node/region 56 | n2 : array 57 | second node/region 58 | """ 59 | p1 = self.ariadne.graph.centroidPoint(n1) 60 | p2 = self.ariadne.graph.centroidPoint(n2) 61 | distance = np.linalg.norm(p1 - p2) 62 | norm_distance = distance / self.max_distance 63 | if norm_distance > 1: 64 | return 0.0 65 | else: 66 | c = self.bradford_coefficient 67 | return c / (math.log(1 + c) * (1 + c * norm_distance)) 68 | # return bradford.pdf(norm_distance, c=self.bradford_coefficient) 69 | -------------------------------------------------------------------------------- /ariadne/utils/interactivewindow.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import numpy as np 3 | import sys 4 | 5 | 6 | class InteractiveWindowKeys(object): 7 | KEY_ARROW_LEFT = 81 8 | KEY_ARROW_RIGHT = 83 9 | 10 | 11 | class InteractiveWindow(object): 12 | EVENT_DRAWING = "EVENT_DRAWING" 13 | EVENT_CLEARING = "EVENT_CLEARING" 14 | EVENT_MOUSEDOWN = "EVENT_MOUSEDOWN" 15 | EVENT_MOUSEUP = "EVENT_MOUSEUP" 16 | EVENT_MOUSEMOVE = "EVENT_MOUSEMOVE" 17 | EVENT_QUIT = "EVENT_QUIT" 18 | EVENT_KEYDOWN = "EVENT_KEYDOWN" 19 | 20 | def __init__(self, name, autoexit=False): 21 | self.name = name 22 | cv2.namedWindow(self.name, cv2.WINDOW_NORMAL) 23 | cv2.setMouseCallback(self.name, self.mouseCallback) 24 | 25 | self.drawing = False 26 | self.clearing = False 27 | self.callbacks = [] 28 | self.callbacks_map = {} 29 | self.autoexit = autoexit 30 | 31 | def mouseCallback(self, event, x, y, flags, param): 32 | point = np.array([x, y]) 33 | if event == cv2.EVENT_LBUTTONDOWN: 34 | self.drawing = True 35 | self.fireEvent(InteractiveWindow.EVENT_MOUSEDOWN, (0, point)) 36 | 37 | if event == cv2.EVENT_MOUSEMOVE: 38 | if self.drawing == True: 39 | self.fireEvent(InteractiveWindow.EVENT_DRAWING, (0, point)) 40 | elif self.clearing == True: 41 | self.fireEvent(InteractiveWindow.EVENT_CLEARING, (1, point)) 42 | else: 43 | self.fireEvent(InteractiveWindow.EVENT_MOUSEMOVE, (1, point)) 44 | 45 | if event == cv2.EVENT_LBUTTONUP: 46 | self.drawing = False 47 | self.fireEvent(InteractiveWindow.EVENT_MOUSEUP, (0, point)) 48 | 49 | if event == cv2.EVENT_MBUTTONDOWN: 50 | self.clearing = True 51 | self.fireEvent(InteractiveWindow.EVENT_MOUSEDOWN, (2, point)) 52 | 53 | if event == cv2.EVENT_MBUTTONUP: 54 | self.clearing = False 55 | self.fireEvent(InteractiveWindow.EVENT_MOUSEUP, (2, point)) 56 | 57 | def showImg(self, img=np.zeros((500, 500)), time=0, disable_keys=False): 58 | #res = cv2.resize(img, None, fx=2, fy=2, interpolation=cv2.INTER_CUBIC) 59 | cv2.imshow(self.name, img) 60 | if time >= 0: 61 | c = cv2.waitKey(time) 62 | if disable_keys: 63 | return 64 | # print "CH", c 65 | c = c & 255 66 | if c != 255: 67 | self.fireEvent(InteractiveWindow.EVENT_KEYDOWN, (chr(c), c)) 68 | if c == 113: 69 | self.fireEvent(InteractiveWindow.EVENT_QUIT, None) 70 | if self.autoexit: 71 | sys.exit(0) 72 | return c 73 | return -1 74 | 75 | def fireEvent(self, evt, data): 76 | for c in self.callbacks: 77 | c(evt, data) 78 | for event, cbs in self.callbacks_map.items(): 79 | if event == evt: 80 | for cb in cbs: 81 | cb(data) 82 | 83 | def registerCallback(self, callback, event=None): 84 | if event is None: 85 | self.callbacks.append(callback) 86 | else: 87 | if event not in self.callbacks_map: 88 | self.callbacks_map[event] = [] 89 | self.callbacks_map[event].append(callback) 90 | 91 | def removeCallback(self, callback, event=None): 92 | if event is None: 93 | self.callbacks.remove(callback) 94 | else: 95 | if event not in self.callbacks_map: 96 | self.callbacks_map[event].remove(callback) 97 | -------------------------------------------------------------------------------- /ariadne/predictors/overall.py: -------------------------------------------------------------------------------- 1 | 2 | class OverallScoreFunction(object): 3 | 4 | def __init__(self, scores_components=[], score_function="1.0"): 5 | self.scores_components = scores_components 6 | self.score_function = score_function 7 | 8 | def computeScores(self, path): 9 | scores = [] 10 | for sf in self.scores_components: 11 | sname = sf[0] 12 | sfunc = sf[1] 13 | scores.append((sname, sfunc.computeScore(path))) 14 | return scores 15 | 16 | def computeScore(self, path): 17 | scores = self.computeScores(path) 18 | for s in scores: 19 | sname = s[0] 20 | sval = s[1] 21 | exec("{}={}".format(sname, sval)) 22 | return eval(self.score_function) 23 | 24 | 25 | class OverallPredictor(object): 26 | """ 27 | Aggregate of simple predictors 28 | """ 29 | 30 | def __init__(self, predictors={}, options={}, neighbourhood_levels=[1, 2], start_depth=1, score_function=OverallScoreFunction()): 31 | 32 | self.visual_predictor = None 33 | self.curvature_predictor = None 34 | self.distance_predictor = None 35 | self.neighbourhood_levels = neighbourhood_levels 36 | self.score_function = score_function 37 | self.start_depth = start_depth 38 | 39 | ####################################### 40 | # Predictors 41 | ####################################### 42 | if 'visual' in predictors: 43 | self.visual_predictor = predictors['visual'] 44 | 45 | if 'curvature' in predictors: 46 | self.curvature_predictor = predictors['curvature'] 47 | 48 | if 'distance' in predictors: 49 | self.distance_predictor = predictors['distance'] 50 | 51 | ####################################### 52 | # Options 53 | ####################################### 54 | if 'neighbourhood_levels' in options: 55 | self.neighbourhood_levels = options['neighbourhood_levels'] 56 | 57 | def getNeighbourhoodLevel(self, index): 58 | if index <= 0: 59 | return self.neighbourhood_levels[0] 60 | if index < len(self.neighbourhood_levels): 61 | return self.neighbourhood_levels[index] 62 | else: 63 | return self.neighbourhood_levels[-1] 64 | 65 | def computeScoreVisual(self, n1, n2, reference_value=None): 66 | """ 67 | Visual score computation between two nodes/regions 68 | 69 | Parameters 70 | ---------- 71 | h1 : array 72 | first histogram 73 | h2 : array 74 | second histogram 75 | reference_value : float 76 | reference value to compute the normal probability for the target score 77 | """ 78 | 79 | if self.visual_predictor is not None: 80 | return self.visual_predictor.computeScore(n1, n2, reference_value=reference_value) 81 | else: 82 | return 1.0 83 | 84 | def computeScoreCurvature(self, path, initial_direction=None): 85 | """ 86 | Computes the score for a given path. Considering also degenerate paths 87 | 88 | Parameters 89 | ---------- 90 | path : AriadnePath 91 | target path 92 | 93 | initial_direction : np.array 94 | initial direction used to compute the score of single-edge paths 95 | 96 | """ 97 | if self.curvature_predictor is not None: 98 | return self.curvature_predictor.computeScore(path, initial_direction=initial_direction) 99 | else: 100 | return 1.0 101 | 102 | def computeScoreDistance(self, n1, n2): 103 | """ 104 | Computes the score for a given distance 105 | 106 | Parameters 107 | ---------- 108 | n1 : int 109 | first node/region 110 | n2 : array 111 | second node/region 112 | 113 | """ 114 | if self.distance_predictor is not None: 115 | return self.distance_predictor.computeScore(n1, n2) 116 | else: 117 | return 1.0 118 | 119 | def computeScores(self, path): 120 | scores = [] 121 | for sf in self.scores_functions: 122 | scores.append(sf.computeScore(path)) 123 | return scores 124 | -------------------------------------------------------------------------------- /ariadne/utils/colors.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | 4 | material_colors_map = {'pink': {'200': '#F48FB1', '900': '#880E4F', '600': '#D81B60', 'A100': '#FF80AB', '300': '#F06292', 'A400': '#F50057', '700': '#C2185B', '50': '#FCE4EC', 'A700': '#C51162', '400': '#EC407A', '100': '#F8BBD0', '800': '#AD1457', 'A200': '#FF4081', '500': '#E91E63'}, 'blue': {'200': '#90CAF9', '900': '#0D47A1', '600': '#1E88E5', 'A100': '#82B1FF', '300': '#64B5F6', 'A400': '#2979FF', '700': '#1976D2', '50': '#E3F2FD', 'grey': '#263238', 'A700': '#2962FF', '400': '#42A5F5', '100': '#BBDEFB', '800': '#1565C0', 'A200': '#448AFF', '500': '#2196F3'}, 'indigo': {'200': '#9FA8DA', '900': '#1A237E', '600': '#3949AB', 'A100': '#8C9EFF', '300': '#7986CB', 'A400': '#3D5AFE', '700': '#303F9F', '50': '#E8EAF6', 'A700': '#304FFE', '400': '#5C6BC0', '100': '#C5CAE9', '800': '#283593', 'A200': '#536DFE', '500': '#3F51B5'}, 'brown': {'200': '#BCAAA4', '900': '#3E2723', '600': '#6D4C41', '300': '#A1887F', '700': '#5D4037', '50': '#EFEBE9', '400': '#8D6E63', '100': '#D7CCC8', '800': '#4E342E', '500': '#795548'}, 'purple': {'200': '#CE93D8', '900': '#4A148C', '600': '#8E24AA', 'A100': '#EA80FC', '300': '#BA68C8', 'A400': '#D500F9', '700': '#7B1FA2', '50': '#F3E5F5', 'A700': '#AA00FF', '400': '#AB47BC', '100': '#E1BEE7', '800': '#6A1B9A', 'A200': '#E040FB', '500': '#9C27B0'}, 'light': {'blue': '#0091EA', 'green': '#64DD17'}, 'grey': {'200': '#EEEEEE', '900': '#212121', '600': '#757575', '300': '#E0E0E0', '700': '#616161', '50': '#FAFAFA', '400': '#BDBDBD', '100': '#F5F5F5', '800': '#424242', '500': '#9E9E9E'}, 'deep': {'purple': '#6200EA', 'orange': '#DD2600'}, 'black': {'1000': '#000000'}, 'amber': {'200': '#FFE082', '900': '#FF6F00', '600': '#FFB300', 'A100': '#FFE57F', '300': '#FFD54F', 'A400': '#FFC400', '700': '#FFA000', '50': '#FFF8E1', 'A700': '#FFAB00', '400': '#FFCA28', '100': '#FFECB3', '800': '#FF8F00', 'A200': '#FFD740', '500': '#FFC107'}, 'green': { 5 | '200': '#A5D6A7', '900': '#1B5E20', '600': '#43A047', 'A100': '#B9F6CA', '300': '#81C784', 'A400': '#00E676', '700': '#388E3C', '50': '#E8F5E9', 'A700': '#00C853', '400': '#66BB6A', '100': '#C8E6C9', '800': '#2E7D32', 'A200': '#69F0AE', '500': '#4CAF50'}, 'yellow': {'200': '#FFF590', '900': '#F57F17', '600': '#FDD835', 'A100': '#FFFF82', '300': '#FFF176', 'A400': '#FFEA00', '700': '#FBC02D', '50': '#FFFDE7', 'A700': '#FFD600', '400': '#FFEE58', '100': '#FFF9C4', '800': '#F9A825', 'A200': '#FFFF00', '500': '#FFEB3B'}, 'teal': {'200': '#80CBC4', '900': '#004D40', '600': '#00897B', 'A100': '#A7FFEB', '300': '#4DB6AC', 'A400': '#1DE9B6', '700': '#00796B', '50': '#E0F2F1', 'A700': '#00BFA5', '400': '#26A69A', '100': '#B2DFDB', '800': '#00695C', 'A200': '#64FFDA', '500': '#009688'}, 'orange': {'200': '#FFCC80', '900': '#E65100', '600': '#FB8C00', 'A100': '#FFD180', '300': '#FFB74D', 'A400': '#FF9100', '700': '#F57C00', '50': '#FFF3E0', 'A700': '#FF6D00', '400': '#FFA726', '100': '#FFE0B2', '800': '#EF6C00', 'A200': '#FFAB40', '500': '#FF9800'}, 'cyan': {'200': '#80DEEA', '900': '#006064', '600': '#00ACC1', 'A100': '#84FFFF', '300': '#4DD0E1', 'A400': '#00E5FF', '700': '#0097A7', '50': '#E0F7FA', 'A700': '#00B8D4', '400': '#26C6DA', '100': '#B2EBF2', '800': '#00838F', 'A200': '#18FFFF', '500': '#00BCD4'}, 'white': {'500': '#ffffff'}, 'red': {'200': '#EF9A9A', '900': '#B71C1C', '600': '#E53935', 'A100': '#FF8A80', '300': '#E57373', 'A400': '#FF1744', '700': '#D32F2F', '50': '#FFEBEE', 'A700': '#D50000', '400': '#EF5350', '100': '#FFCDD2', '800': '#C62828', 'A200': '#FF5252', '500': '#F44336'}, 'lime': {'200': '#E6EE9C', '900': '#827717', '600': '#C0CA33', 'A100': '#F4FF81', '300': '#DCE775', 'A400': '#C6FF00', '700': '#A4B42B', '50': '#F9FBE7', 'A700': '#AEEA00', '400': '#D4E157', '100': '#F0F4C3', '800': '#9E9D24', 'A200': '#EEFF41', '500': '#CDDC39'}} 6 | 7 | 8 | def getColor(name='', variant='500', out_type='BGRf', conversion_rate=1.0 / 255.0): 9 | 10 | if name == '': 11 | name = random.choice(material_colors_map.keys()) 12 | 13 | color = material_colors_map[name][variant] 14 | 15 | if out_type == 'HEX': 16 | return color 17 | if out_type == 'RGB': 18 | color = color.lstrip("#") 19 | return tuple(int(color[i:i + 2], 16) for i in (0, 2, 4)) 20 | if out_type == 'RGBf': 21 | color = color.lstrip("#") 22 | col = tuple(int(color[i:i + 2], 16) for i in (0, 2, 4)) 23 | return np.array(col) * conversion_rate 24 | if out_type == 'BGR': 25 | color = color.lstrip("#") 26 | rgb = tuple(int(color[i:i + 2], 16) for i in (0, 2, 4)) 27 | return tuple(reversed(rgb)) 28 | if out_type == 'BGRf': 29 | color = color.lstrip("#") 30 | rgb = tuple(int(color[i:i + 2], 16) for i in (0, 2, 4)) 31 | rgb = tuple(reversed(rgb)) 32 | bgr = np.array(rgb) * conversion_rate 33 | return bgr 34 | 35 | 36 | def getPalette(palette_name="default", conversion_rate=1.0 / 255.0): 37 | colors = [] 38 | colors_names = ['red', 'green', 'blue', 'yellow', 'indigo', 'purple', 'teal', 'cyan', 'orange', 'white', 'lime', 'amber', 'pink', 'brown'] 39 | for c in colors_names: 40 | colors.append(getColor(name=c)) 41 | return colors 42 | 43 | 44 | def getRandomColors(): 45 | 46 | keys = material_colors_map.keys() 47 | random.shuffle(keys) 48 | print(keys) 49 | -------------------------------------------------------------------------------- /ariadne/matchers/direction_matcher.py: -------------------------------------------------------------------------------- 1 | import math 2 | import numpy as np 3 | from scipy.stats import vonmises 4 | 5 | 6 | class DirectionMatcher(object): 7 | 8 | def __init__(self, ariadne): 9 | self.ariadne = ariadne 10 | 11 | def computeCost(self, n1, n2, direction): 12 | return 0.0 13 | 14 | 15 | class SimpleDirectionMatcher(DirectionMatcher): 16 | 17 | def __init__(self, ariadne): 18 | super(SimpleDirectionMatcher, self).__init__(ariadne) 19 | 20 | def computeCost(self, n1, n2, prev_direction): 21 | node1 = self.ariadne.graph.node(n1) 22 | node2 = self.ariadne.graph.node(n2) 23 | p1 = node1['centroid_point'] 24 | p2 = node2['centroid_point'] 25 | direction = p2 - p1 26 | direction = direction / np.linalg.norm(direction) 27 | comp_direction = prev_direction / np.linalg.norm(prev_direction) 28 | return 1.0 - (np.dot(direction, comp_direction) + 1.0) / 2.0 29 | 30 | 31 | class PathDirectionMatcher(object): 32 | 33 | def __init__(self, ariadne, path): 34 | self.ariadne = ariadne 35 | self.path = path 36 | 37 | def computeCost(self, n2, force_direction=None): 38 | return 0.0 39 | 40 | 41 | class VonMisesPathDirectionMatcher(PathDirectionMatcher): 42 | 43 | def __init__(self, ariadne, path, kappa=4, max_angle=math.pi * 0.5): 44 | super(VonMisesPathDirectionMatcher, self).__init__(ariadne, path) 45 | self.vonmises = vonmises(kappa) 46 | self.max_angle = max_angle 47 | 48 | def computeScore(self, path, force_direction=None): 49 | ####################################### 50 | # Single Node Path 51 | ####################################### 52 | if path.size() <= 1: 53 | return 1.0 54 | 55 | ####################################### 56 | # Single Edge Path 57 | ####################################### 58 | if path.size() == 2: 59 | if force_direction is None: 60 | return 1.0 61 | else: 62 | points = path.as2DPoints() 63 | p1 = np.array(points[0]) 64 | p2 = np.array(points[1]) 65 | direction = p2 - p1 66 | direction = direction / np.linalg.norm(direction) 67 | comp_direction = force_direction / \ 68 | np.linalg.norm(force_direction) 69 | angle = math.acos(np.dot(direction, comp_direction)) 70 | return self.vonmises.pdf(angle) 71 | 72 | ####################################### 73 | # Normal Path 74 | ####################################### 75 | directions = [] 76 | points = path.as2DPoints() 77 | for i in range(1, len(points)): 78 | p1 = np.array(points[i - 1]) 79 | p2 = np.array(points[i]) 80 | direction = p2 - p1 81 | direction = direction / np.linalg.norm(direction) 82 | directions.append(direction) 83 | 84 | thetas = [] 85 | for i in range(1, len(directions)): 86 | d1 = directions[i - 1] 87 | d2 = directions[i] 88 | a1 = math.atan2(d1[1], d1[0]) 89 | a2 = math.atan2(d2[1], d2[0]) 90 | angle = a1 - a2 91 | angle = math.acos(math.cos(angle)) 92 | thetas.append(angle) 93 | 94 | if math.fabs(thetas[-1]) > self.max_angle: 95 | return 0.0 96 | 97 | if len(thetas) == 1: 98 | return self.vonmises.pdf(thetas[0]) 99 | elif len(thetas) > 1: 100 | posterios = [] 101 | for i in range(1, len(thetas)): 102 | t1 = thetas[i - 1] 103 | t2 = thetas[i] 104 | posterios.append(self.vonmises.pdf(t1 - t2)) 105 | return np.prod(np.array(posterios).ravel())**(1.0 / (len(points) - 3.0)) 106 | 107 | def computePDF(self, n2, force_direction=None): 108 | # print("DIRECTION MATCHER COMPUTE PDF:", self.path.size()) 109 | if self.path.size() == 0: 110 | return 1.0 111 | 112 | node1 = self.ariadne.graph.node(self.path.last_node) 113 | node2 = self.ariadne.graph.node(n2) 114 | p1 = node1['centroid_point'] 115 | p2 = node2['centroid_point'] 116 | direction = p2 - p1 117 | direction = direction / np.linalg.norm(direction) 118 | 119 | if force_direction is not None: 120 | comp_direction = force_direction / np.linalg.norm(force_direction) 121 | angle = math.acos(np.dot(direction, comp_direction)) 122 | return self.vonmises.pdf(angle) 123 | else: 124 | if self.path.size() == 1: 125 | return 1.0 126 | 127 | if self.path.size() == 2: 128 | nodes = self.path.asList() 129 | node1 = self.ariadne.graph.node(nodes[0]) 130 | node2 = self.ariadne.graph.node(nodes[1]) 131 | node3 = self.ariadne.graph.node(n2) 132 | dir1 = node2['centroid_point'] - node1['centroid_point'] 133 | dir2 = node3['centroid_point'] - node2['centroid_point'] 134 | dir1 = dir1 / np.linalg.norm(dir1) 135 | dir2 = dir2 / np.linalg.norm(dir2) 136 | try: 137 | angle = math.acos(np.dot(dir1, dir2)) 138 | except: 139 | angle = math.pi 140 | # print("PDF", self.vonmises.pdf(angle)) 141 | return self.vonmises.pdf(angle) 142 | 143 | directions = [] 144 | points = self.path.as2DPoints() 145 | target_node = self.ariadne.graph.node(n2) 146 | points.append(target_node['centroid_point']) 147 | for i in range(1, len(points)): 148 | p1 = np.array(points[i - 1]) 149 | p2 = np.array(points[i]) 150 | direction = p2 - p1 151 | direction = direction / np.linalg.norm(direction) 152 | directions.append(direction) 153 | 154 | thetas = [] 155 | for i in range(1, len(directions)): 156 | d1 = directions[i - 1] 157 | d2 = directions[i] 158 | a1 = math.atan2(d1[1], d1[0]) 159 | a2 = math.atan2(d2[1], d2[0]) 160 | angle = a1 - a2 161 | thetas.append(angle) 162 | # print("DIRECTIONS", directions) 163 | # print("THETAS", thetas) 164 | posterios = [] 165 | if len(thetas) > 1: 166 | for i in range(1, len(thetas)): 167 | t1 = thetas[i - 1] 168 | t2 = thetas[i] 169 | posterios.append(self.vonmises.pdf(t1 - t2)) 170 | # print("PDF:", posterios) 171 | 172 | return np.prod(np.array(posterios).ravel())**(1.0 / (len(points) - 3.0)) 173 | -------------------------------------------------------------------------------- /ariadne/predictors/visual.py: -------------------------------------------------------------------------------- 1 | import math 2 | import cv2 3 | import numpy as np 4 | from scipy.stats import norm 5 | from scipy.stats import uniform 6 | 7 | 8 | class VisualPredictor(object): 9 | """ 10 | Base class for visual Predictors 11 | 12 | Parameters 13 | ---------- 14 | ariadne : Ariadne 15 | object associated with a source image 16 | 17 | """ 18 | 19 | def __init__(self, ariadne): 20 | self.ariadne = ariadne 21 | 22 | def computeScore(self, n1, n2, reference_value=None): 23 | return 1.0 24 | 25 | 26 | class VisualPredictorColor2DHistogram(VisualPredictor): 27 | """ 28 | 2D Histogram visual predictor 29 | 30 | Parameters 31 | ---------- 32 | ariadne : Ariadne 33 | object associated with a source image 34 | 35 | bins : Array 36 | Bins for each dimension, default: [32,32] 37 | 38 | normalization_sigma : float 39 | Sigma for normal distribution of histogram distances 40 | 41 | """ 42 | 43 | def __init__(self, ariadne, bins=[8, 8, 8], normalization_sigma=0.4, enable_cache=False): 44 | super(VisualPredictorColor2DHistogram, self).__init__(ariadne) 45 | 46 | self.img = (ariadne.image * 255.0).astype(np.uint8) 47 | #self.img = cv2.cvtColor(self.img, cv2.COLOR_RGB2BGR) 48 | self.hsv = cv2.cvtColor(self.img, cv2.COLOR_RGB2HSV) 49 | self.h, self.s, self.v = cv2.split(self.hsv) 50 | 51 | self.bins = bins 52 | self.normalization_sigma = normalization_sigma 53 | self.histogram_map = {} 54 | self.enable_cache = enable_cache 55 | 56 | # if precompute_histograms: 57 | # nodes = self.ariadne.graph.nodes() 58 | # for i, n in enumerate(nodes): 59 | # region = self.ariadne.graph.region(n) 60 | # histo = self.regionHistogram(region) 61 | # self.histogram_map[region] = histo 62 | # # print(histo) 63 | # print("Histogram percentage", 100.0 * 64 | # float(i) / float(len(nodes))) 65 | 66 | def regionHistogram(self, n): 67 | """ 68 | Computes histogram for the region with node id 'n' 69 | 70 | Parameters 71 | ---------- 72 | n : int 73 | node/region index 74 | 75 | """ 76 | if self.enable_cache: 77 | if n in self.histogram_map: 78 | return self.histogram_map[n] 79 | 80 | hist = cv2.calcHist( 81 | [self.hsv], 82 | [0, 1, 2], 83 | self.ariadne.graph.maskImage(n), 84 | self.bins, 85 | [0, 180, 0, 256, 0, 256] 86 | ) 87 | hist = hist / np.sum(hist.ravel()) 88 | 89 | if self.enable_cache: 90 | self.histogram_map[n] = hist 91 | return hist 92 | 93 | def histogramComparison(self, h1, h2): 94 | """ 95 | Comparison function between two histograms 96 | 97 | Parameters 98 | ---------- 99 | h1 : array 100 | first histogram 101 | h2 : array 102 | second histogram 103 | 104 | """ 105 | 106 | return cv2.compareHist(h1, h2, cv2.HISTCMP_INTERSECT) 107 | 108 | def computeNormalProbability(self, value): 109 | """ 110 | Computes an approximation of the probability in a normal distribution 111 | """ 112 | delta = 0.05 113 | p1 = norm.pdf(value + delta, scale=self.normalization_sigma) 114 | p2 = norm.pdf(value - delta, scale=self.normalization_sigma) 115 | return (delta / 2.0) * (p1 + p2) 116 | 117 | def computeScore(self, n1, n2, reference_value=None): 118 | """ 119 | Computes visual (match) score between two nodes/regions 120 | 121 | Parameters 122 | ---------- 123 | n1 : int 124 | first node/region 125 | n2 : array 126 | second node/region 127 | reference_value : float 128 | reference value to compute the normal probability for the target score 129 | """ 130 | 131 | h1 = self.regionHistogram(n1) 132 | h2 = self.regionHistogram(n2) 133 | score = self.histogramComparison(h1, h2) 134 | if reference_value is None: 135 | return score 136 | else: 137 | return self.computeNormalProbability(math.fabs(score - reference_value)) 138 | 139 | 140 | class VisualPredictorColor3DHistogram(VisualPredictor): 141 | """ 142 | 3D Histogram visual predictor 143 | 144 | Parameters 145 | ---------- 146 | ariadne : Ariadne 147 | object associated with a source image 148 | 149 | bins : Array 150 | Bins for each dimension, default: [32,32] 151 | 152 | normalization_sigma : float 153 | Sigma for normal distribution of histogram distances 154 | 155 | """ 156 | 157 | def __init__(self, ariadne, bins=[8, 8, 8], distribution_parameters=[0.5], enable_cache=False): 158 | super(VisualPredictorColor3DHistogram, self).__init__(ariadne) 159 | 160 | self.img = (ariadne.image * 255.0).astype(np.uint8) 161 | #self.hsv = cv2.cvtColor(self.img, cv2.COLOR_RGB2BGR) 162 | self.hsv = cv2.cvtColor(self.img, cv2.COLOR_RGB2HSV) 163 | self.h, self.s, self.v = cv2.split(self.hsv) 164 | 165 | self.bins = bins 166 | self.distribution_parameters = distribution_parameters 167 | self.histogram_map = {} 168 | self.enable_cache = enable_cache 169 | 170 | def regionHistogram(self, n): 171 | """ 172 | Computes histogram for the region with node id 'n' 173 | 174 | Parameters 175 | ---------- 176 | n : int 177 | node/region index 178 | 179 | """ 180 | if self.enable_cache: 181 | if n in self.histogram_map: 182 | return self.histogram_map[n] 183 | 184 | hist = cv2.calcHist( 185 | [self.hsv], 186 | [0, 1, 2], 187 | self.ariadne.graph.maskImage(n), 188 | self.bins, 189 | [0, 180, 0, 256, 0, 256] 190 | ) 191 | hist = hist / np.sum(hist.ravel()) 192 | 193 | if self.enable_cache: 194 | self.histogram_map[n] = hist 195 | return hist 196 | 197 | def histogramComparison(self, h1, h2): 198 | """ 199 | Comparison function between two histograms 200 | 201 | Parameters 202 | ---------- 203 | h1 : array 204 | first histogram 205 | h2 : array 206 | second histogram 207 | 208 | """ 209 | 210 | return cv2.compareHist(h1, h2, cv2.HISTCMP_INTERSECT) 211 | 212 | def computeNormalProbability(self, value): 213 | """ 214 | Computes the probability of histogram differences in a custom distribution 215 | """ 216 | c = self.distribution_parameters[0] 217 | return c / (math.log(1 + c) * (1 + c * (1.0 - value))) 218 | # return bradford.pdf(1.0 - value, c=self.distribution_parameters[0]) 219 | 220 | def computeScore(self, n1, n2, reference_value=None): 221 | """ 222 | Computes visual (match) score between two nodes/regions 223 | 224 | Parameters 225 | ---------- 226 | n1 : int 227 | first node/region 228 | n2 : array 229 | second node/region 230 | reference_value : float 231 | reference value to compute the normal probability for the target score 232 | """ 233 | 234 | h1 = self.regionHistogram(n1) 235 | h2 = self.regionHistogram(n2) 236 | score = self.histogramComparison(h1, h2) 237 | return self.computeNormalProbability(score) 238 | -------------------------------------------------------------------------------- /manual_path_finder.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import cv2 4 | import numpy as np 5 | import sys 6 | import time 7 | import configparser 8 | 9 | from ariadne.core import Ariadne, AriadnePathTip, AriadnePath, ImageSegmentator, AriadnePathFinder, AriadneMultiPathFinder 10 | import ariadne.predictors_config as predictors_config 11 | from ariadne.utils.interactivewindow import InteractiveWindow, InteractiveWindowKeys 12 | import ariadne.utils.colors as cl 13 | 14 | colors = cl.getPalette() 15 | 16 | ####################################### 17 | # Arguments 18 | ####################################### 19 | ap = argparse.ArgumentParser() 20 | ap.add_argument("--image_file", required=True, help="Target image file.") 21 | ap.add_argument("--configs_file", default='configs.ini', type=str, help='Configurations file.') 22 | ap.add_argument("--config_name", default="default", type=str, help='Configuration name') 23 | ap.add_argument("--debug", default=1, type=int, help='"1" if Debug Mode. "0" Otherwise.') 24 | args = vars(ap.parse_args()) 25 | 26 | ####################################### 27 | # Input Image 28 | ####################################### 29 | image = cv2.imread(args['image_file']) 30 | 31 | ####################################### 32 | # Configuration 33 | ####################################### 34 | config = configparser.ConfigParser() 35 | config_name = args['config_name'] 36 | config.read(args['configs_file']) 37 | print("ACTIVE CONFIG:", config_name) 38 | if not config.has_section(config_name): 39 | print("Config {} not found!".format(config_name)) 40 | sys.exit(0) 41 | 42 | ####################################### 43 | # Segmentator 44 | ####################################### 45 | segmentator = ImageSegmentator(segmentator_type='SLIC') 46 | segmentator.options_map['n_segments'] = int(config.get(config_name, 'num_segments')) 47 | segmentator.options_map['compactness'] = float(config.get(config_name, 'segmentator_compactness')) 48 | segmentator.options_map['sigma'] = float(config.get(config_name, 'segmentator_sigma')) 49 | 50 | ####################################### 51 | # Ariadne 52 | ####################################### 53 | ariadne = Ariadne(image_file=args['image_file'], segmentator=segmentator) 54 | 55 | 56 | clicked_points = [] 57 | clicked_nodes = [] 58 | 59 | 60 | ####################################### 61 | # Predictor 62 | ####################################### 63 | overall_predictor = predictors_config.buildPredictorFromConfig(ariadne, config, config_name) 64 | 65 | ####################################### 66 | # Multi Path Finder 67 | ####################################### 68 | multi_path_finder = AriadneMultiPathFinder( 69 | ariadne, predictor=overall_predictor) 70 | 71 | 72 | ####################################### 73 | # ACTIVATION 74 | ####################################### 75 | active = False 76 | 77 | ################################# 78 | # Options 79 | ################################# 80 | boundary_color = np.array([243., 150., 33.])/255. 81 | 82 | end_reached = False 83 | 84 | 85 | def clickCallback(data): 86 | global output_image, clicked_points, active, end_reached 87 | 88 | ####################################### 89 | # Debug click to check Node Label 90 | ####################################### 91 | if len(clicked_points) == 2: 92 | n = ariadne.graph.nearestNode(data[1]) 93 | print("CLICKED NODE:", n) 94 | return 95 | 96 | ####################################### 97 | # Click over tips 98 | ####################################### 99 | clicked_points.append(data[1]) 100 | print("CLICKED POINTS", clicked_points) 101 | if len(clicked_points) == 1: 102 | return 103 | 104 | print("CLICKED POINTS", clicked_points) 105 | ####################################### 106 | # Get Current Clicked Node 107 | ####################################### 108 | n = ariadne.graph.nearestNode(clicked_points[0]) 109 | 110 | ####################################### 111 | # Starts Multi-Path Finder search 112 | ####################################### 113 | multi_path_finder.startSearchInNeighbourhood( 114 | n, depth=overall_predictor.start_depth) 115 | reaches_map = [0] * multi_path_finder.size() 116 | 117 | ####################################### 118 | # Search main loop 119 | ####################################### 120 | active = True 121 | while True: 122 | ####################################### 123 | # Debug Draw 124 | ####################################### 125 | if args['debug']: 126 | output_image = ariadne.graph.generateBoundaryImage(image, color=boundary_color) 127 | cv2.circle(output_image, tuple( 128 | clicked_points[0]), int(config.get(config_name, 'end_region_radius')), (0, 0, 1), 2) 129 | cv2.circle(output_image, tuple( 130 | clicked_points[1]), int(config.get(config_name, 'end_region_radius')), (0, 0, 1), 2) 131 | 132 | if active: 133 | print("ROUND", multi_path_finder.getIterations() + 1, "=" * 50) 134 | 135 | ####################################### 136 | # Mult-Path Finder Next step 137 | ####################################### 138 | multi_path_finder.nextStep() 139 | 140 | ####################################### 141 | # Fetch scores for each Path 142 | ####################################### 143 | scores_raw = multi_path_finder.getScores(single_components=True) 144 | scores = multi_path_finder.getScores(single_components=False) 145 | 146 | reaches_counter = 0 147 | for i, f in enumerate(scores): 148 | path_finder = multi_path_finder.path_finders[i] 149 | print("Path ", i, scores_raw[i], scores[i], 150 | "REACHED" if reaches_map[i] > 0 else "") 151 | 152 | ####################################### 153 | # Debug Draw of current Path 154 | ####################################### 155 | if args['debug']: 156 | color = colors[i % len(colors)] 157 | if path_finder.isOpen(): 158 | path_finder.path.draw( 159 | output_image, draw_numbers=False, color=color) 160 | 161 | last_point = path_finder.path.as2DPoints()[-1] 162 | cv2.putText(output_image, "{}".format(i), tuple( 163 | last_point + np.array([5, 0])), cv2.FONT_HERSHEY_PLAIN, 1.5, (0, 0, 0), 2) 164 | 165 | ####################################### 166 | # Close condition for current Path 167 | ####################################### 168 | if scores[i] < float(config.get(config_name, 'min_score')): 169 | path_finder.close() 170 | # print("PAth", i, "Has Reached MIN SCORE!") 171 | if path_finder.path.endsInRegion(clicked_points[1], int(config.get(config_name, 'end_region_radius'))): 172 | # print("PAth", i, "Has Reached Destiantion!") 173 | n2 = ariadne.graph.nearestNode(clicked_points[1]) 174 | path_finder.path.addNode(n2) 175 | path_finder.close() 176 | reaches_map[i] = 1 177 | reaches_counter += 1 178 | if multi_path_finder.getIterations() > int(config.get(config_name, 'max_length')): 179 | multi_path_finder.close() 180 | 181 | c = 0 182 | step_time = 0 183 | if args['debug']: 184 | print("STOP") 185 | if not end_reached: 186 | c = window.showImg(output_image, step_time) 187 | 188 | if multi_path_finder.isFinished(): 189 | end_reached = True 190 | print("END REACHED!") 191 | 192 | # output_image = ariadne.graph.generateBoundaryImage(image) 193 | # cv2.circle(output_image, tuple( 194 | # clicked_points[0]), config.get(config_name, 'end_region_radius'), (0, 0, 1), 2) 195 | # cv2.circle(output_image, tuple( 196 | # clicked_points[1]), config.get(config_name, 'end_region_radius'), (0, 0, 1), 2) 197 | 198 | # for i, f in enumerate(multi_path_finder.path_finders): 199 | # f.path.draw(output_image, color=(0, 0, 1), draw_numbers=False) 200 | # last_point = f.path.as2DPoints()[-1] 201 | # cv2.putText(output_image, "{}".format(i), tuple( 202 | # last_point + np.array([5, 0])), cv2.FONT_HERSHEY_PLAIN, 1.5, (0, 0, 0), 2) 203 | 204 | output_image = image.copy() # ariadne.graph.generateBoundaryImage(image) 205 | 206 | best = multi_path_finder.getBestPathFinder() 207 | best.path.draw(output_image, color=(0, 255, 0), draw_numbers=False) 208 | window.showImg(output_image, 0) 209 | 210 | 211 | window = InteractiveWindow("stereo_matching", autoexit=True) 212 | window.registerCallback(clickCallback, event=InteractiveWindow.EVENT_MOUSEDOWN) 213 | 214 | output_image = ariadne.graph.generateBoundaryImage(image, color=boundary_color) 215 | window.showImg(image, time=0, disable_keys=False) 216 | 217 | while True: 218 | for point in clicked_points: 219 | cv2.circle(output_image, tuple(point.astype(int)), 5, (255, 0, 0), -1) 220 | 221 | if not end_reached: 222 | window.showImg(output_image, time=1, disable_keys=True) 223 | -------------------------------------------------------------------------------- /ariadne/predictors_config.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | from ariadne.predictors.visual import VisualPredictor, VisualPredictorColor2DHistogram 4 | from ariadne.predictors.overall import OverallPredictor, OverallScoreFunction 5 | from ariadne.predictors.visual import VisualPredictorColor2DHistogram, VisualPredictorColor3DHistogram 6 | from ariadne.predictors.distance import DistancePredictorMaxDistance 7 | from ariadne.predictors.curvature import * 8 | import sys 9 | 10 | 11 | def buildPredictorFromConfig(ariadne, config_parser, config_name): 12 | try: 13 | score_function = config_parser.get(config_name, 'predictors_function') 14 | histogram_bins = int(config_parser.get(config_name, 'predictors_visual_bins')) 15 | start_depth = int(config_parser.get(config_name, 'predictors_start_depth')) 16 | predictors_flags = config_parser.get(config_name, 'predictors_flags') 17 | neigh_level = int(config_parser.get(config_name, 'predictors_neighbours')) 18 | except: 19 | print("Error parsing configuration: ") 20 | sys.exit(0) 21 | 22 | overall_score_function = OverallScoreFunction( 23 | scores_components=[ 24 | ('s0', CurvatureVonMisesPredictor(ariadne)), 25 | ('s1', CurvatureSplinePredictor(ariadne)), 26 | ('s3', LengthPredictor(ariadne)), 27 | ], 28 | score_function=score_function 29 | ) 30 | 31 | visual_predictor = None 32 | curvature_predictor = None 33 | distance_predictor = None 34 | 35 | if 'V' in predictors_flags: 36 | visual_predictor = VisualPredictorColor3DHistogram(ariadne, bins=[histogram_bins, histogram_bins, histogram_bins], distribution_parameters=[ 37 | 0.5001], enable_cache=True) 38 | if 'C' in predictors_flags: 39 | curvature_predictor = CurvatureVonMisesLastPredictor( 40 | ariadne, kappa=1) 41 | 42 | if 'M' in predictors_flags: 43 | curvature_predictor = CurvatureVonMisesPredictor(ariadne) 44 | 45 | if 'D' in predictors_flags: 46 | distance_predictor = DistancePredictorMaxDistance( 47 | ariadne, max_distance=100, bradford_coefficient=0.5) 48 | 49 | overall_predictor = OverallPredictor( 50 | predictors={ 51 | 'visual': visual_predictor, 52 | 'curvature': curvature_predictor, 53 | 'distance': distance_predictor 54 | }, 55 | neighbourhood_levels=list(range(1, neigh_level + 1)), 56 | score_function=overall_score_function, 57 | start_depth=start_depth 58 | ) 59 | return overall_predictor 60 | 61 | 62 | def buildPredictor(ariadne, name="default"): 63 | 64 | if name == "default": 65 | overall_score_function = OverallScoreFunction( 66 | scores_components=[ 67 | ('s0', CurvatureVonMisesPredictor(ariadne)), 68 | ('s1', CurvatureSplinePredictor(ariadne)) 69 | ], 70 | score_function="s0*s1" 71 | ) 72 | 73 | overall_predictor = OverallPredictor( 74 | predictors={ 75 | # 'visual': VisualPredictorColor2DHistogram(ariadne, bins=[8, 8, 8], enable_cache=True), 76 | 'visual': VisualPredictorColor3DHistogram(ariadne, bins=[2, 2, 2], distribution_parameters=[0.5001], enable_cache=True), 77 | 'curvature': CurvatureVonMisesPredictor(ariadne), 78 | 'distance': DistancePredictorMaxDistance(ariadne, max_distance=100, bradford_coefficient=0.5) 79 | }, 80 | neighbourhood_levels=[1, 2, 3], 81 | score_function=overall_score_function 82 | ) 83 | return overall_predictor 84 | 85 | elif name == "simplecurve": 86 | overall_score_function = OverallScoreFunction( 87 | scores_components=[ 88 | ('s0', CurvatureVonMisesPredictor(ariadne)), 89 | ('s1', CurvatureSplinePredictor(ariadne)) 90 | ], 91 | score_function="s0*s1" 92 | ) 93 | 94 | overall_predictor = OverallPredictor( 95 | predictors={ 96 | # 'visual': VisualPredictorColor2DHistogram(ariadne, bins=[8, 8, 8], enable_cache=True), 97 | 'visual': VisualPredictorColor3DHistogram(ariadne, bins=[8, 8, 8], distribution_parameters=[0.5001], enable_cache=True), 98 | 'curvature': CurvatureVonMisesLastPredictor(ariadne, kappa=1), 99 | 'distance': DistancePredictorMaxDistance(ariadne, max_distance=100, bradford_coefficient=0.5) 100 | }, 101 | neighbourhood_levels=[1, 2, 3], 102 | score_function=overall_score_function, 103 | start_depth=2 104 | ) 105 | return overall_predictor 106 | 107 | elif name == "simplecurvenod": 108 | overall_score_function = OverallScoreFunction( 109 | scores_components=[ 110 | ('s0', CurvatureVonMisesPredictor(ariadne)), 111 | ('s1', CurvatureSplinePredictor(ariadne)) 112 | ], 113 | score_function="s0*s1" 114 | ) 115 | 116 | overall_predictor = OverallPredictor( 117 | predictors={ 118 | # 'visual': VisualPredictorColor2DHistogram(ariadne, bins=[8, 8, 8], enable_cache=True), 119 | 'visual': VisualPredictorColor3DHistogram(ariadne, bins=[8, 8, 8], distribution_parameters=[0.5001], enable_cache=True), 120 | 'curvature': CurvatureVonMisesLastPredictor(ariadne, kappa=1), 121 | 'distance': None 122 | }, 123 | neighbourhood_levels=[1, 2, 3], 124 | score_function=overall_score_function 125 | ) 126 | return overall_predictor 127 | 128 | elif name.startswith("dyn_"): # example: dyn_s0*s1@8@1@VCD@3 129 | try: 130 | parameters = name.split("_")[1] 131 | parameters = parameters.split("@") 132 | score_function = parameters[0] 133 | histogram_bins = int(parameters[1]) 134 | start_depth = int(parameters[2]) 135 | predictors_flags = parameters[3] 136 | neigh_level = int(parameters[4]) 137 | except: 138 | print("Error parsing dynamic configuration: '{}'".format(name)) 139 | sys.exit(0) 140 | 141 | overall_score_function = OverallScoreFunction( 142 | scores_components=[ 143 | ('s0', CurvatureVonMisesPredictor(ariadne)), 144 | ('s1', CurvatureSplinePredictor(ariadne)), 145 | ('s3', LengthPredictor(ariadne)), 146 | ], 147 | score_function=score_function 148 | ) 149 | 150 | visual_predictor = None 151 | curvature_predictor = None 152 | distance_predictor = None 153 | 154 | if 'V' in predictors_flags: 155 | visual_predictor = VisualPredictorColor3DHistogram(ariadne, bins=[histogram_bins, histogram_bins, histogram_bins], distribution_parameters=[ 156 | 0.5001], enable_cache=True) 157 | if 'C' in predictors_flags: 158 | curvature_predictor = CurvatureVonMisesLastPredictor( 159 | ariadne, kappa=1) 160 | 161 | if 'M' in predictors_flags: 162 | curvature_predictor = CurvatureVonMisesPredictor(ariadne) 163 | 164 | if 'D' in predictors_flags: 165 | distance_predictor = DistancePredictorMaxDistance( 166 | ariadne, max_distance=100, bradford_coefficient=0.5) 167 | 168 | overall_predictor = OverallPredictor( 169 | predictors={ 170 | 'visual': visual_predictor, 171 | 'curvature': curvature_predictor, 172 | 'distance': distance_predictor 173 | }, 174 | neighbourhood_levels=list(range(1, neigh_level + 1)), 175 | score_function=overall_score_function, 176 | start_depth=start_depth 177 | ) 178 | return overall_predictor 179 | 180 | elif name == "roads": 181 | overall_score_function = OverallScoreFunction( 182 | scores_components=[ 183 | ('s0', CurvatureVonMisesPredictor(ariadne)), 184 | ('s1', CurvatureSplinePredictor(ariadne)) 185 | ], 186 | score_function="s0*s1" 187 | ) 188 | 189 | overall_predictor = OverallPredictor( 190 | predictors={ 191 | # 'visual': VisualPredictorColor2DHistogram(ariadne, bins=[8, 8, 8], enable_cache=True), 192 | 'visual': VisualPredictorColor3DHistogram(ariadne, bins=[8, 8, 8], distribution_parameters=[0.5001], enable_cache=True), 193 | 'curvature': CurvatureVonMisesLastPredictor(ariadne, kappa=0.1), 194 | 'distance': DistancePredictorMaxDistance(ariadne, max_distance=100, bradford_coefficient=0.5) 195 | }, 196 | neighbourhood_levels=[1, 2, 3], 197 | score_function=overall_score_function 198 | ) 199 | return overall_predictor 200 | 201 | elif name == "roads2": 202 | overall_score_function = OverallScoreFunction( 203 | scores_components=[ 204 | ('s0', CurvatureVonMisesPredictor(ariadne)), 205 | ('s1', CurvatureSplinePredictor(ariadne)) 206 | ], 207 | score_function="s1" 208 | ) 209 | 210 | overall_predictor = OverallPredictor( 211 | predictors={ 212 | # 'visual': VisualPredictorColor2DHistogram(ariadne, bins=[8, 8, 8], enable_cache=True), 213 | 'visual': VisualPredictorColor3DHistogram(ariadne, bins=[4, 4, 4], distribution_parameters=[0.5001], enable_cache=True), 214 | 'curvature': CurvatureVonMisesLastPredictor(ariadne, kappa=0.01), 215 | 'distance': DistancePredictorMaxDistance(ariadne, max_distance=100, bradford_coefficient=0.5) 216 | }, 217 | neighbourhood_levels=[1, 2, 4], 218 | score_function=overall_score_function 219 | ) 220 | return overall_predictor 221 | 222 | elif name == "rivers": 223 | overall_score_function = OverallScoreFunction( 224 | scores_components=[ 225 | ('s0', CurvatureVonMisesPredictor(ariadne)), 226 | ('s1', CurvatureSplinePredictor(ariadne)) 227 | ], 228 | score_function="s1" 229 | ) 230 | 231 | overall_predictor = OverallPredictor( 232 | predictors={ 233 | # 'visual': VisualPredictorColor2DHistogram(ariadne, bins=[8, 8, 8], enable_cache=True), 234 | 'visual': VisualPredictorColor3DHistogram(ariadne, bins=[8, 8, 8], distribution_parameters=[0.5001], enable_cache=True), 235 | 'curvature': CurvatureVonMisesLastPredictor(ariadne, kappa=0.1), 236 | 'distance': DistancePredictorMaxDistance(ariadne, max_distance=100, bradford_coefficient=0.5) 237 | }, 238 | neighbourhood_levels=[1, 2, 4], 239 | score_function=overall_score_function 240 | ) 241 | return overall_predictor 242 | 243 | else: 244 | print("Predictor configuration '{}' does not exist!".format(name)) 245 | sys.exit(0) 246 | -------------------------------------------------------------------------------- /ariadne/matchers/visual_matchers.py: -------------------------------------------------------------------------------- 1 | import math 2 | import numpy as np 3 | import skimage.color 4 | from scipy.stats import multivariate_normal 5 | from scipy.stats import norm 6 | import cv2 7 | 8 | class VisualMatcher(object): 9 | 10 | def __init__(self, ariadne): 11 | self.ariadne = ariadne 12 | 13 | def computeCost(self, n1, n2): 14 | return 0.0 15 | 16 | 17 | class SimpleColorMatcher(VisualMatcher): 18 | 19 | def __init__(self, ariadne): 20 | super(SimpleColorMatcher, self).__init__(ariadne) 21 | 22 | def computeCost(self, n1, n2): 23 | node1 = self.ariadne.graph.node(n1) 24 | node2 = self.ariadne.graph.node(n2) 25 | return np.linalg.norm(node1['mean color'] - node2['mean color']) 26 | 27 | 28 | class LabHistogramMatcher(VisualMatcher): 29 | 30 | def __init__(self, ariadne, bins=[8, 8, 8], precompute_histograms=False): 31 | super(LabHistogramMatcher, self).__init__(ariadne) 32 | self.bins = bins 33 | 34 | self.max_distance = math.sqrt(np.sum(np.array(bins).ravel())) 35 | self.histogram_map = {} 36 | if precompute_histograms: 37 | nodes = self.ariadne.graph.nodes() 38 | for i, n in enumerate(nodes): 39 | region = self.ariadne.graph.region(n) 40 | histo = self.regionHistogram(region) 41 | self.histogram_map[region] = histo 42 | print("Histogram percentage", 100.0 * 43 | float(i) / float(len(nodes))) 44 | 45 | def regionHistogram(self, region): 46 | if region in self.histogram_map: 47 | return self.histogram_map[region] 48 | 49 | colors_list = [] 50 | coords = region['coords'] 51 | for p1 in coords: 52 | color = self.ariadne.image[p1[0], p1[1], :] 53 | # colors.append(color) 54 | color_lab = skimage.color.rgb2lab(color.reshape((1, 1, 3))) 55 | colors_lab.append(color_lab) 56 | 57 | # colors = np.array(colors) 58 | colors_lab = np.array(colors_lab).reshape((len(coords), 3)) 59 | l = colors_lab[:, 0] 60 | a = colors_lab[:, 1] 61 | b = colors_lab[:, 2] 62 | hl, _ = np.histogram(l, self.bins[0], range=(0, 100.0)) 63 | ha, _ = np.histogram(a, self.bins[1], range=(-127., 128.)) 64 | hb, _ = np.histogram(b, self.bins[2], range=(-127., 128.)) 65 | 66 | hl = np.array(hl).astype(float) 67 | ha = np.array(ha).astype(float) 68 | hb = np.array(hb).astype(float) 69 | hl = hl / np.max(hl) 70 | ha = ha / np.max(ha) 71 | hb = hb / np.max(hb) 72 | histo = np.concatenate((hl, ha, hb)) 73 | self.histogram_map[region] = histo 74 | return histo 75 | 76 | def computeCost(self, n1, n2): 77 | node1 = self.ariadne.graph.node(n1) 78 | node2 = self.ariadne.graph.node(n2) 79 | r1 = self.ariadne.graph.region(n1) 80 | r2 = self.ariadne.graph.region(n2) 81 | 82 | h1 = self.regionHistogram(r1) 83 | h2 = self.regionHistogram(r2) 84 | return np.linalg.norm(h1 - h2) / self.max_distance 85 | 86 | # return np.linalg.norm(node1['mean color'] - node2['mean color']) 87 | 88 | 89 | class PathVisualMatcher(object): 90 | 91 | def __init__(self, ariadne, path): 92 | self.ariadne = ariadne 93 | self.path = path 94 | 95 | def computePDF(self, n1): 96 | return 0.0 97 | 98 | 99 | class PathLabHistogramMatcher(PathVisualMatcher): 100 | 101 | def __init__(self, ariadne, path, bins=[8, 8, 8], precompute_histograms=False): 102 | super(PathLabHistogramMatcher, self).__init__(ariadne, path) 103 | self.bins = bins 104 | self.max_distance = math.sqrt(np.sum(np.array(bins).ravel())) 105 | self.histogram_map = {} 106 | if precompute_histograms: 107 | nodes = self.ariadne.graph.nodes() 108 | for i, n in enumerate(nodes): 109 | region = self.ariadne.graph.region(n) 110 | histo = self.regionHistogram(region) 111 | self.histogram_map[region] = histo 112 | # print(histo) 113 | print("Histogram percentage", 100.0 * 114 | float(i) / float(len(nodes))) 115 | 116 | def regionHistogram(self, region): 117 | if region in self.histogram_map: 118 | return self.histogram_map[region] 119 | 120 | colors_lab = [] 121 | coords = region['coords'] 122 | for p1 in coords: 123 | color = self.ariadne.image[p1[0], p1[1], :] 124 | # colors.append(color) 125 | color_lab = skimage.color.rgb2lab(color.reshape((1, 1, 3))) 126 | colors_lab.append(color_lab) 127 | 128 | # colors = np.array(colors) 129 | colors_lab = np.array(colors_lab).reshape((len(coords), 3)) 130 | l = colors_lab[:, 0] 131 | a = colors_lab[:, 1] 132 | b = colors_lab[:, 2] 133 | hl, _ = np.histogram(l, self.bins[0], range=(0, 100.0)) 134 | ha, _ = np.histogram(a, self.bins[1], range=(-127., 128.)) 135 | hb, _ = np.histogram(b, self.bins[2], range=(-127., 128.)) 136 | 137 | # print("HISTOGRAMSSSSS", hl, ha, hb) 138 | 139 | hl = hl / float(len(coords)) 140 | hb = hb / float(len(coords)) 141 | ha = ha / float(len(coords)) 142 | 143 | hl = np.array(hl).astype(float) 144 | ha = np.array(ha).astype(float) 145 | hb = np.array(hb).astype(float) 146 | 147 | # print("HISTOGRAMSSSSS ** ", hl, ha, hb) 148 | histo = np.concatenate((hl, ha, hb)) 149 | self.histogram_map[region] = histo 150 | return histo 151 | 152 | def computePDF(self, n2): 153 | if self.path.size() == 0: 154 | return 1.0 155 | 156 | r1 = self.ariadne.graph.region(self.path.last_node) 157 | r2 = self.ariadne.graph.region(n2) 158 | 159 | h1 = self.regionHistogram(r1) 160 | h2 = self.regionHistogram(r2) 161 | mn = multivariate_normal(h1) 162 | return mn.pdf(h2) 163 | 164 | # return np.linalg.norm(node1['mean color'] - node2['mean color']) 165 | 166 | 167 | class ColorPathHistogramMatcher(PathVisualMatcher): 168 | DESCRIPTOR_CEILAB = "DESCRIPTOR_CEILAB" 169 | DESCRIPTOR_BALLARD = "DESCRIPTOR_BALLARD" 170 | 171 | def __init__(self, ariadne, path, bins=[8, 8, 8], precompute_histograms=False, descriptor="DESCRIPTOR_CEILAB",instogram_intersection=True,single_part_normalization=False): 172 | super(ColorPathHistogramMatcher, self).__init__(ariadne, path) 173 | self.bins = bins 174 | self.max_distance = math.sqrt(np.sum(np.array(bins).ravel())) 175 | self.histogram_map = {} 176 | self.descriptor = descriptor 177 | self.histogram_intersection = instogram_intersection 178 | self.single_part_normalization = single_part_normalization 179 | 180 | if precompute_histograms: 181 | nodes = self.ariadne.graph.nodes() 182 | for i, n in enumerate(nodes): 183 | region = self.ariadne.graph.region(n) 184 | histo = self.regionHistogram(region) 185 | self.histogram_map[region] = histo 186 | # print(histo) 187 | print("Histogram percentage", 100.0 * 188 | float(i) / float(len(nodes))) 189 | 190 | def colorFeature(self, color_input): 191 | if self.descriptor == ColorPathHistogramMatcher.DESCRIPTOR_CEILAB: 192 | return skimage.color.rgb2lab(color_input.reshape((1, 1, 3))) 193 | elif self.descriptor == ColorPathHistogramMatcher.DESCRIPTOR_BALLARD: 194 | ballard = np.array([ 195 | color_input[0]+color_input[1]+color_input[2], 196 | color_input[0]-color_input[1], 197 | 2.0*color_input[2]-color_input[0]-color_input[1] 198 | ]) 199 | return ballard 200 | 201 | def colorFeatureRange(self): 202 | if self.descriptor == ColorPathHistogramMatcher.DESCRIPTOR_CEILAB: 203 | return [(0, 100.0),(-127., 128.),(-127., 128.)] 204 | elif self.descriptor == ColorPathHistogramMatcher.DESCRIPTOR_BALLARD: 205 | return [(0.0,3.0),(-1.0,1.0),(-2.0,2.0)] 206 | 207 | def regionHistogram(self, region): 208 | if region in self.histogram_map: 209 | return self.histogram_map[region] 210 | 211 | colors_list = [] 212 | coords = region['coords'] 213 | for p1 in coords: 214 | color = self.ariadne.image[p1[0], p1[1], :] 215 | # colors.append(color) 216 | #color_lab = skimage.color.rgb2lab(color.reshape((1, 1, 3))) 217 | colors_list.append(self.colorFeature(color)) 218 | 219 | # colors = np.array(colors) 220 | colors_list = np.array(colors_list).reshape((len(coords), 3)) 221 | l = colors_list[:, 0] 222 | a = colors_list[:, 1] 223 | b = colors_list[:, 2] 224 | 225 | ranges = self.colorFeatureRange() 226 | hl, _ = np.histogram(l, self.bins[0], range=ranges[0]) 227 | ha, _ = np.histogram(a, self.bins[1], range=ranges[1]) 228 | hb, _ = np.histogram(b, self.bins[2], range=ranges[2]) 229 | 230 | # print("HISTOGRAMSSSSS", hl, ha, hb) 231 | 232 | if self.single_part_normalization: 233 | hl = hl / float(len(coords)) 234 | hb = hb / float(len(coords)) 235 | ha = ha / float(len(coords)) 236 | 237 | hl = np.array(hl).astype(float) 238 | ha = np.array(ha).astype(float) 239 | hb = np.array(hb).astype(float) 240 | 241 | # print("HISTOGRAMSSSSS ** ", hl, ha, hb) 242 | histo = np.concatenate((hl, ha, hb)) 243 | self.histogram_map[region] = histo 244 | return histo 245 | 246 | 247 | def return_intersection(self,hist_1, hist_2): 248 | minima = np.minimum(hist_1, hist_2) 249 | intersection = np.true_divide(np.sum(minima), np.sum(hist_2)) 250 | return intersection 251 | 252 | def computePDF(self, n2): 253 | if self.path.size() == 0: 254 | return 1.0 255 | 256 | r1 = self.ariadne.graph.region(self.path.last_node) 257 | r2 = self.ariadne.graph.region(n2) 258 | 259 | h1 = self.regionHistogram(r1) 260 | h2 = self.regionHistogram(r2) 261 | mn = multivariate_normal(h1) 262 | if self.histogram_intersection: 263 | return self.return_intersection(h1,h2) 264 | else: 265 | return mn.pdf(h2) 266 | 267 | # return np.linalg.norm(node1['mean color'] - node2['mean color']) 268 | 269 | 270 | class Color2DHistogramMatcher(PathVisualMatcher): 271 | DESCRIPTOR_CEILAB = "DESCRIPTOR_CEILAB" 272 | DESCRIPTOR_BALLARD = "DESCRIPTOR_BALLARD" 273 | 274 | def __init__(self, ariadne, bins=[32,32], precompute_histograms=False, descriptor="DESCRIPTOR_CEILAB",instogram_intersection=True,single_part_normalization=False): 275 | super(Color2DHistogramMatcher, self).__init__(ariadne, None) 276 | 277 | self.img = (ariadne.image * 255.0).astype(np.uint8) 278 | self.img = cv2.cvtColor(self.img, cv2.COLOR_RGB2BGR) 279 | self.hsv = cv2.cvtColor(self.img, cv2.COLOR_RGB2HSV) 280 | self.h, self.s, self.v = cv2.split(self.hsv) 281 | 282 | self.bins = bins 283 | self.max_distance = math.sqrt(np.sum(np.array(bins).ravel())) 284 | self.histogram_map = {} 285 | self.descriptor = descriptor 286 | self.histogram_intersection = instogram_intersection 287 | self.single_part_normalization = single_part_normalization 288 | 289 | # if precompute_histograms: 290 | # nodes = self.ariadne.graph.nodes() 291 | # for i, n in enumerate(nodes): 292 | # region = self.ariadne.graph.region(n) 293 | # histo = self.regionHistogram(region) 294 | # self.histogram_map[region] = histo 295 | # # print(histo) 296 | # print("Histogram percentage", 100.0 * 297 | # float(i) / float(len(nodes))) 298 | 299 | def regionHistogram(self, n): 300 | hist = cv2.calcHist( 301 | [self.hsv], 302 | [0, 1], 303 | self.ariadne.graph.maskImage(n), 304 | [32, 32], 305 | [0, 180, 0, 256] 306 | ) 307 | hist = hist / np.sum(hist.ravel()) 308 | return hist 309 | 310 | 311 | def histogramComparison(self,h1, h2): 312 | return cv2.compareHist(h1, h2, cv2.HISTCMP_INTERSECT) 313 | #return cv2.compareHist(h1, h2, cv2.HISTCMP_BHATTACHARYYA) 314 | 315 | 316 | def compare(self, n1,n2): 317 | h1 = self.regionHistogram(n1) 318 | h2 = self.regionHistogram(n2) 319 | return self.histogramComparison(h1,h2) 320 | 321 | def normalizeComparison(self,current_value,max_value): 322 | dx = math.fabs(current_value-max_value) 323 | 324 | delta = 0.05 325 | p1 = norm.pdf(dx+delta,scale=0.02) 326 | p2 = norm.pdf(dx-delta,scale=0.02) 327 | return (delta / 2.0) * (p1 + p2) 328 | #return math.exp(-((current_value - max_value)**2.0)/(2*(0.2**2))) 329 | 330 | -------------------------------------------------------------------------------- /ariadne/predictors/curvature.py: -------------------------------------------------------------------------------- 1 | import math 2 | import numpy as np 3 | from scipy.stats import vonmises 4 | from scipy.interpolate import splprep, splev, spalde 5 | import cv2 6 | 7 | 8 | class VonMisesBuffer(object): 9 | 10 | def __init__(self, k=3): 11 | self.vonmises_range = np.arange(0, math.pi * 2, 0.0001) 12 | self.vonmises_base = vonmises(k) 13 | self.vonmises_values = self.vonmises_base.pdf(self.vonmises_range) 14 | 15 | def pdf(self, x): 16 | 17 | i = int(math.fabs(x) * 10000) 18 | if i >= 0 and i < len(self.vonmises_values): 19 | return self.vonmises_values[i] 20 | return 0.0 21 | 22 | 23 | class CurvaturePredictor(object): 24 | """ 25 | Base class for curvature Predictors 26 | 27 | Parameters 28 | ---------- 29 | ariadne : Ariadne 30 | object associated with a source image 31 | 32 | """ 33 | 34 | def __init__(self, ariadne): 35 | self.ariadne = ariadne 36 | 37 | def computeScore(self, path, initial_direction=None): 38 | return 1.0 39 | 40 | 41 | class CurvatureVonMisesPredictor(CurvaturePredictor): 42 | """ 43 | Curvature predictor using VonMises distribution 44 | 45 | Parameters 46 | ---------- 47 | ariadne : Ariadne 48 | object associated with a source image 49 | 50 | kappa : float 51 | Kappa parameter for the VonMises distribution 52 | 53 | max_angle : float 54 | max allowe angle for two consecutive edges 55 | 56 | """ 57 | 58 | def __init__(self, ariadne, kappa=4, max_angle=math.pi * 0.5): 59 | super(CurvatureVonMisesPredictor, self).__init__(ariadne) 60 | self.vonmises = VonMisesBuffer(k=kappa) 61 | self.max_angle = max_angle 62 | 63 | def computeScore(self, path, initial_direction=None, debug=False): 64 | """ 65 | Computes the score for a given path. Considering also degenerate paths 66 | 67 | Parameters 68 | ---------- 69 | path : AriadnePath 70 | target path 71 | 72 | initial_direction : np.array 73 | initial direction used to compute the score of single-edge paths 74 | 75 | """ 76 | 77 | ####################################### 78 | # Single Node Path 79 | ####################################### 80 | if path.size() <= 1: 81 | return 1.0 82 | 83 | ####################################### 84 | # Single Edge Path 85 | ####################################### 86 | if path.size() == 2: 87 | if initial_direction is None: 88 | return 1.0 89 | else: 90 | points = path.as2DPoints() 91 | p1 = np.array(points[0]) 92 | p2 = np.array(points[1]) 93 | direction = p2 - p1 94 | direction = direction / np.linalg.norm(direction) 95 | comp_direction = initial_direction / \ 96 | np.linalg.norm(initial_direction) 97 | # print("COMPUTERING", np.dot(direction, comp_direction)) 98 | angle = math.acos(np.dot(direction, comp_direction)) 99 | return self.vonmises.pdf(angle) 100 | 101 | ####################################### 102 | # Normal Path 103 | ####################################### 104 | directions = [] 105 | points = path.as2DPoints() 106 | for i in range(1, len(points)): 107 | p1 = np.array(points[i - 1]) 108 | p2 = np.array(points[i]) 109 | direction = p2 - p1 110 | direction = direction / np.linalg.norm(direction) 111 | directions.append(direction) 112 | 113 | thetas = [] 114 | for i in range(1, len(directions)): 115 | d1 = directions[i - 1] 116 | d2 = directions[i] 117 | a1 = math.atan2(d1[1], d1[0]) 118 | a2 = math.atan2(d2[1], d2[0]) 119 | angle = a1 - a2 120 | angle = math.acos(math.cos(angle)) 121 | # try: 122 | # angle = math.acos(np.dot(d1, d2)) 123 | # except: 124 | # angle = 0 125 | thetas.append(angle) 126 | 127 | # if len(thetas) == 0: 128 | # return 0.0 129 | 130 | if math.fabs(thetas[-1]) > self.max_angle: 131 | return 0.0 132 | if debug: 133 | print("Thetas", thetas) 134 | # print("=" * 50) 135 | # print("Thetas", thetas) 136 | if len(thetas) == 1: 137 | return self.vonmises.pdf(thetas[0]) 138 | elif len(thetas) > 1: 139 | posterios = [] 140 | for i in range(1, len(thetas)): 141 | t1 = thetas[i - 1] 142 | t2 = thetas[i] 143 | posterios.append(self.vonmises.pdf(t1 - t2)) 144 | return np.prod(np.array(posterios).ravel())**(1.0 / (len(points) - 3.0)) 145 | 146 | 147 | class CurvatureVonMisesLastPredictor(CurvaturePredictor): 148 | """ 149 | Curvature predictor using VonMises distribution only for last edge 150 | 151 | Parameters 152 | ---------- 153 | ariadne : Ariadne 154 | object associated with a source image 155 | 156 | kappa : float 157 | Kappa parameter for the VonMises distribution 158 | 159 | max_angle : float 160 | max allowe angle for two consecutive edges 161 | 162 | """ 163 | 164 | def __init__(self, ariadne, kappa=4, max_angle=math.pi * 0.5): 165 | super(CurvatureVonMisesLastPredictor, self).__init__(ariadne) 166 | self.vonmises = VonMisesBuffer(k=kappa) 167 | self.max_angle = max_angle 168 | 169 | def computeScore(self, path, initial_direction=None, debug=False): 170 | """ 171 | Computes the score for a given path. Considering also degenerate paths 172 | 173 | Parameters 174 | ---------- 175 | path : AriadnePath 176 | target path 177 | 178 | initial_direction : np.array 179 | initial direction used to compute the score of single-edge paths 180 | 181 | """ 182 | 183 | ####################################### 184 | # Single Node Path 185 | ####################################### 186 | if path.size() <= 1: 187 | return 1.0 188 | 189 | ####################################### 190 | # Single Edge Path 191 | ####################################### 192 | if path.size() == 2: 193 | if initial_direction is None: 194 | return 1.0 195 | else: 196 | points = path.as2DPoints() 197 | p1 = np.array(points[0]) 198 | p2 = np.array(points[1]) 199 | direction = p2 - p1 200 | direction = direction / np.linalg.norm(direction) 201 | comp_direction = initial_direction / \ 202 | np.linalg.norm(initial_direction) 203 | # print("COMPUTERING", np.dot(direction, comp_direction)) 204 | angle = math.acos(np.dot(direction, comp_direction)) 205 | return self.vonmises.pdf(angle) 206 | 207 | ####################################### 208 | # Normal Path 209 | ####################################### 210 | directions = [] 211 | points = path.as2DPoints() 212 | for i in range(len(points) - 2, len(points)): 213 | p1 = np.array(points[i - 1]) 214 | p2 = np.array(points[i]) 215 | direction = p2 - p1 216 | direction = direction / np.linalg.norm(direction) 217 | directions.append(direction) 218 | 219 | try: 220 | angle = math.acos(np.dot(directions[0], directions[1])) 221 | except: 222 | angle = 0.0 223 | return self.vonmises.pdf(angle) 224 | 225 | 226 | class CurvatureSplinePredictor(CurvaturePredictor): 227 | """ 228 | Curvature predictor using B-Splines 229 | 230 | Parameters 231 | ---------- 232 | ariadne : Ariadne 233 | object associated with a source image 234 | 235 | k : int 236 | BSpline Degree 237 | 238 | smoothing : float 239 | BSpline smoothing factor 240 | 241 | periodic: int 242 | If non-zero, data points are considered periodic with period x[m-1] - x[0] 243 | 244 | """ 245 | 246 | def __init__(self, ariadne, k=3, smoothing=0.0, periodic=0): 247 | super(CurvatureSplinePredictor, self).__init__(ariadne) 248 | self.k = k 249 | self.smoothing = smoothing 250 | self.periodic = periodic 251 | # tck, u = splprep(pts.T, u=None, k=k, s=smoothin, per=periodic) 252 | 253 | def computeSpline(self, path): 254 | """ 255 | Curvature predictor using B-Splines 256 | 257 | Parameters 258 | ---------- 259 | path : AriadnePath 260 | target path 261 | 262 | """ 263 | 264 | points = path.as2DPoints() 265 | if len(points) > self.k: 266 | pts = np.array(points) 267 | tck, u = splprep(pts.T, 268 | u=None, 269 | k=self.k, 270 | s=self.smoothing, 271 | per=self.periodic 272 | ) 273 | return {'tck': tck, 'u': u} 274 | return None 275 | 276 | def generateSplineMaskImage(self, path, radius=1, steps=1000, ref_image=None): 277 | if ref_image is None: 278 | ref_image = self.ariadne.image 279 | 280 | mask = np.zeros( 281 | (ref_image.shape[0], ref_image.shape[1]), dtype=np.uint8) 282 | spline = self.computeSpline(path) 283 | if spline is None: 284 | return mask 285 | u = spline['u'] 286 | tck = spline['tck'] 287 | u_new = np.linspace(u.min(), u.max(), steps) 288 | x_new, y_new = splev(u_new, tck, der=0) 289 | for i, x in enumerate(x_new): 290 | cv2.circle(mask, (int(x_new[i]), int( 291 | y_new[i])), radius, (255, 255, 255), -1) 292 | return mask 293 | 294 | def computeCurvatures(self, spline): 295 | u = spline['u'] 296 | tck = spline['tck'] 297 | 298 | der = spalde(u, tck) 299 | 300 | curvatures = np.zeros((len(u), 1)) 301 | for i in range(0, len(u)): 302 | 303 | d1 = (der[0][i][1], der[1][i][1]) 304 | d2 = (der[0][i][2], der[1][i][2]) 305 | x1 = d1[0] 306 | y1 = d1[1] 307 | x2 = d2[0] 308 | y2 = d2[1] 309 | curvature = (x1 * y2 - y1 * x2) / \ 310 | ((x1 * x1 + y1 * y1)**(3.0 / 2.0)) 311 | curvatures[i] = curvature 312 | return curvatures 313 | 314 | def computeCurvature(self, spline, positive_curvatures=True, normalize=True): 315 | curvatures = self.computeCurvatures(spline) 316 | if positive_curvatures: 317 | curvatures = np.absolute(curvatures) 318 | summation = np.sum(curvatures) 319 | if normalize: 320 | summation = summation / float(len(curvatures)) 321 | return summation 322 | 323 | def computeScore(self, path, initial_direction=None): 324 | """ 325 | Computes the score for a given path. Considering also degenerate paths 326 | 327 | Parameters 328 | ---------- 329 | path : AriadnePath 330 | target path 331 | 332 | initial_direction : np.array 333 | initial direction used to compute the score of single-edge paths 334 | 335 | """ 336 | 337 | ####################################### 338 | # Path with k nodes 339 | ####################################### 340 | if path.size() <= self.k: 341 | return 1.0 342 | 343 | ####################################### 344 | # Normal Path 345 | ####################################### 346 | spline = self.computeSpline(path) 347 | if spline is None: 348 | return 1.0 349 | 350 | u = spline['u'] 351 | tck = spline['tck'] 352 | 353 | der = spalde(u, tck) 354 | 355 | total_curvature = 0.0 356 | total_curvature_pos = 0.0 357 | for i in range(0, len(u)): 358 | 359 | d1 = (der[0][i][1], der[1][i][1]) 360 | d2 = (der[0][i][2], der[1][i][2]) 361 | x1 = d1[0] 362 | y1 = d1[1] 363 | x2 = d2[0] 364 | y2 = d2[1] 365 | curvature = (x1 * y2 - y1 * x2) / \ 366 | ((x1 * x1 + y1 * y1)**(3.0 / 2.0)) 367 | total_curvature += curvature 368 | total_curvature_pos += math.fabs(curvature) 369 | 370 | normalized_curvature = total_curvature_pos / float(len(u)) 371 | final_curvature = min(1.0, 1.0 - normalized_curvature) 372 | return final_curvature 373 | 374 | 375 | class LengthPredictor(CurvaturePredictor): 376 | """ 377 | Length predictor 378 | 379 | Parameters 380 | ---------- 381 | ariadne : Ariadne 382 | object associated with a source image 383 | 384 | 385 | 386 | """ 387 | 388 | def __init__(self, ariadne, max_length=1000): 389 | super(LengthPredictor, self).__init__(ariadne) 390 | self.max_length = max_length 391 | 392 | def computeScore(self, path, initial_direction=None, debug=False): 393 | """ 394 | Computes the score for a given path. Considering also degenerate paths 395 | 396 | Parameters 397 | ---------- 398 | path : AriadnePath 399 | target path 400 | 401 | initial_direction : np.array 402 | initial direction used to compute the score of single-edge paths 403 | 404 | """ 405 | points = path.as2DPoints() 406 | length = 0.0 407 | for i in range(1, len(points)): 408 | pf1 = np.array(points[i]).astype(float) 409 | pf2 = np.array(points[i - 1]).astype(float) 410 | length += np.linalg.norm(pf1 - pf2) 411 | 412 | return 1.0+min(length / self.max_length, 1.0) 413 | -------------------------------------------------------------------------------- /ariadne/core.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- encoding: utf-8 -*- 3 | 4 | from skimage.segmentation import slic, felzenszwalb, quickshift, watershed 5 | from skimage.measure import regionprops 6 | from skimage import draw 7 | 8 | import skimage.future.graph as graph 9 | 10 | 11 | from skimage.segmentation import mark_boundaries 12 | 13 | 14 | from skimage.util import img_as_float 15 | from skimage.color import rgb2gray 16 | import skimage.color 17 | from skimage.filters import sobel 18 | 19 | from skimage import io 20 | 21 | import scipy.io 22 | 23 | import matplotlib.pyplot as plt 24 | import argparse 25 | import math 26 | import numpy as np 27 | import cv2 28 | import os 29 | 30 | import networkx as nx 31 | import itertools 32 | 33 | 34 | class ImageSegmentator(object): 35 | SEGMENTATOR_SLIC = "SLIC" 36 | SEGMENTATOR_SLIC_MONO = "SLIC_MONO" 37 | SEGMENTATOR_QUICKSHIFT = "QUICKSHIFT" 38 | SEGMENTATOR_FELZENSZWALB = "FELZENSZWALB" 39 | SEGMENTATOR_WATERSHED = "WATERSHED" 40 | 41 | def __init__(self, segmentator_type=None): 42 | if segmentator_type is None: 43 | segmentator_type = ImageSegmentator.SEGMENTATOR_SLIC 44 | self.segmentator_type = segmentator_type 45 | self.options_map = {} 46 | 47 | def segments(self, image, options=None): 48 | if options is None: 49 | options = self.options_map 50 | print("ACTIVE OPTIONS", options) 51 | if self.segmentator_type == ImageSegmentator.SEGMENTATOR_SLIC: 52 | return slic( 53 | image, 54 | sigma=options['sigma'], 55 | n_segments=options['n_segments'], 56 | convert2lab=True, 57 | multichannel=True, 58 | compactness=options['compactness'] 59 | ) 60 | elif self.segmentator_type == ImageSegmentator.SEGMENTATOR_SLIC_MONO: 61 | gray = skimage.color.rgb2grey(image) 62 | return slic( 63 | gray, 64 | sigma=options['sigma'], 65 | n_segments=options['n_segments'], 66 | convert2lab=False, 67 | multichannel=True, 68 | compactness=options['compactness'] 69 | ) 70 | elif self.segmentator_type == ImageSegmentator.SEGMENTATOR_QUICKSHIFT: 71 | return quickshift( 72 | image, 73 | kernel_size=3, 74 | max_dist=6, 75 | ratio=0.5 76 | ) 77 | elif self.segmentator_type == ImageSegmentator.SEGMENTATOR_FELZENSZWALB: 78 | return felzenszwalb(image, scale=100, sigma=0.5, min_size=50) 79 | 80 | if self.segmentator_type == ImageSegmentator.SEGMENTATOR_WATERSHED: 81 | gradient = sobel(rgb2gray(image)) 82 | return watershed(gradient, markers=options['n_segments'], compactness=0.001) 83 | 84 | 85 | class AriadneGraph(object): 86 | 87 | def __init__(self, image, segments): 88 | self.image = image 89 | self.segments = segments 90 | self.labels = self.segments + 1 91 | self.regions = regionprops(self.labels) 92 | self.graph = graph.rag_mean_color(self.image, self.labels) 93 | 94 | ####################################### 95 | # Centroid Mapping 96 | ####################################### 97 | for region in self.regions: 98 | self.graph.node[region['label']]['centroid'] = region['centroid'] 99 | self.graph.node[region['label']]['centroid_point'] = np.array([ 100 | region['centroid'][1], 101 | region['centroid'][0] 102 | ], dtype=float) 103 | 104 | def generateBoundaryImage(self, image=None, color=(1, 1, 0)): 105 | if image is None: 106 | image = self.image 107 | return mark_boundaries(image, self.labels, color=color) 108 | 109 | def generateEdgesImage(self, image, threshold=np.inf): 110 | if image is None: 111 | image = self.image 112 | for edge in self.graph.edges(): 113 | n1, n2 = edge 114 | 115 | r1, c1 = map(int, self.graph.node[n1]['centroid']) 116 | r2, c2 = map(int, self.graph.node[n2]['centroid']) 117 | 118 | p1 = np.array([c1, r1], dtype=float) 119 | p2 = np.array([c2, r2], dtype=float) 120 | 121 | line = draw.line(r1, c1, r2, c2) 122 | circle = draw.circle(r1, c1, 3) 123 | 124 | if self.graph[n1][n2]['weight'] < threshold: 125 | image[line] = 0, 0, 0 126 | image[circle] = 0, 0, 0 127 | return image 128 | 129 | def generateEdgesImageCV(self, image, threshold=np.inf): 130 | if image is None: 131 | image = self.image 132 | for edge in self.graph.edges(): 133 | n1, n2 = edge 134 | 135 | r1, c1 = map(int, self.graph.node[n1]['centroid']) 136 | r2, c2 = map(int, self.graph.node[n2]['centroid']) 137 | 138 | p1 = np.array([c1, r1], dtype=int) 139 | p2 = np.array([c2, r2], dtype=int) 140 | 141 | cv2.line(image, tuple(p1), tuple(p2), np.array([110, 122, 84.]), 3) 142 | cv2.circle(image, tuple(p1), 10, np.array([56, 38, 50.]), -1) 143 | 144 | return image 145 | 146 | def generateHNodesImage(self, image, nodes=[], radius=5, color=(255, 0, 0), thickness=-1, showNumbers=False): 147 | for i, n in enumerate(nodes): 148 | node = self.graph.node[n] 149 | p = node['centroid_point'].astype(int) 150 | cv2.circle( 151 | image, 152 | tuple(p), 153 | radius=radius, 154 | color=color, 155 | thickness=thickness 156 | ) 157 | if showNumbers: 158 | cv2.putText( 159 | image, 160 | "{}".format(i), 161 | tuple(p + np.array([5, 0])), 162 | cv2.FONT_HERSHEY_PLAIN, 163 | 1.5, 164 | color, 165 | 1 166 | ) 167 | return image 168 | 169 | def nearestNode(self, point, match_region_points=True): 170 | if match_region_points: 171 | ipoint = point.astype(int) 172 | label = self.labels[ipoint[1], ipoint[0]] 173 | return label 174 | else: 175 | min_dist = 10000000.0 176 | min_region = None 177 | for n in self.graph.nodes(): 178 | node = self.graph.node[n] 179 | region_center = np.array([ 180 | node['centroid'][1], 181 | node['centroid'][0] 182 | ], dtype=float) 183 | dist = np.linalg.norm(region_center - point) 184 | if dist < min_dist: 185 | min_dist = dist 186 | min_region = n 187 | return min_region 188 | 189 | def adjacentNodes(self, input_nodes, excluded_nodes=[]): 190 | node_map = set() 191 | for input_node in input_nodes: 192 | for n in self.graph.neighbors(input_node): 193 | if n not in excluded_nodes: 194 | node_map.add(n) 195 | return list(node_map) 196 | 197 | def getNeighbourhood(self, target_node): 198 | neigh = self.adjacentNodes([target_node]) 199 | neigh2 = self.adjacentNodes(neigh, excluded_nodes=neigh + [target_node]) 200 | return neigh + neigh2 201 | 202 | def deepNeighbourhood(self, target_node, level=2, forced_excluded_nodes=[], include_start_seed=False, as_simple_list=False): 203 | 204 | seed = [target_node] 205 | excluded_nodes = [] 206 | founds = [] 207 | if include_start_seed: 208 | founds = [[target_node]] 209 | 210 | excluded_nodes = [target_node] + forced_excluded_nodes 211 | while level > 0: 212 | neigh = self.adjacentNodes(seed, excluded_nodes=excluded_nodes) 213 | founds.append(neigh) 214 | seed = neigh 215 | excluded_nodes = [target_node] + forced_excluded_nodes 216 | for f in founds: 217 | excluded_nodes.extend(f) 218 | level -= 1 219 | if not as_simple_list: 220 | return founds 221 | else: 222 | return list(itertools.chain.from_iterable(founds)) 223 | 224 | def getBoundaryType(self, n): 225 | node = self.node(n) 226 | mask = self.maskImage(n) 227 | indices = np.array(np.nonzero(mask)).T 228 | rows = indices[:, 0] 229 | cols = indices[:, 1] 230 | 231 | bt = '' 232 | 233 | max_row = mask.shape[0] - 1 234 | max_col = mask.shape[1] - 1 235 | 236 | if 0 in rows: 237 | bt += 'N' 238 | if max_col in cols: 239 | bt += 'W' 240 | if max_row in rows: 241 | bt += 'S' 242 | if 0 in cols: 243 | bt += 'E' 244 | 245 | return bt 246 | 247 | def isBoundary(self, n): 248 | return len(self.getBoundaryType(n)) > 0 249 | 250 | def nodes(self): 251 | return self.graph.nodes() 252 | 253 | def node(self, n): 254 | return self.graph.node[n] 255 | 256 | def centroidPoint(self, n): 257 | node = self.node(n) 258 | return node['centroid_point'] 259 | 260 | def region(self, n): 261 | return self.regions[n - 1] 262 | 263 | def maskImage(self, n): 264 | mask = np.zeros(self.labels.shape, dtype=np.uint8) 265 | mask[self.labels == n] = 255 266 | return mask 267 | 268 | 269 | class AriadnePathTip(object): 270 | 271 | def __init__(self, position=None, orientation=None, points=None): 272 | self.position = np.array([0.0, 0.0]) 273 | self.orientation = 0.0 274 | if points is not None: 275 | p1 = np.array(points[0]) 276 | p2 = np.array(points[1]) 277 | direction = p1 - p2 278 | self.position = p1 279 | self.orientation = math.atan2(-direction[1], direction[0]) 280 | else: 281 | self.position = np.array(position) 282 | self.orientation = np.array(orientation) 283 | 284 | def getDirection(self): 285 | return np.array([ 286 | math.cos(self.orientation), 287 | -math.sin(self.orientation) 288 | ]) 289 | 290 | def draw(self, image, color=(255, 0, 255), thickness=4, size=30): 291 | direction = self.getDirection() 292 | p2 = self.position + direction * size 293 | cv2.arrowedLine(image, tuple(self.position), tuple(p2.astype(int)), color, thickness, tipLength=0.5) 294 | 295 | 296 | class AriadnePath(object): 297 | 298 | def __init__(self, ariadne=None, raw_points=np.array([])): 299 | self.ariadne = ariadne 300 | self.subgraph = nx.DiGraph() 301 | self.last_node = None 302 | self.first_node = None 303 | 304 | self.raw_points = [] 305 | if self.ariadne is None: 306 | if raw_points.ndim == 1: 307 | raw_points = raw_points.reshape((int(len(raw_points) / 2)), 2) 308 | for i in range(0, raw_points.shape[0]): 309 | self.raw_points.append(tuple(raw_points[i, :])) 310 | 311 | def clone(self): 312 | path = AriadnePath(self.ariadne) 313 | path.subgraph = nx.DiGraph() 314 | nodes = self.asList() 315 | for n in nodes: 316 | path.addNode(n) 317 | return path 318 | 319 | def size(self): 320 | if self.ariadne is None: 321 | return len(self.as2DPoints()) 322 | else: 323 | return len(self.subgraph.nodes()) 324 | 325 | def sizeInPixel(self): 326 | points = self.as2DPoints() 327 | sizepx = 0.0 328 | for i in range(1, len(points)): 329 | p1 = np.array(points[i - 1]) 330 | p2 = np.array(points[i]) 331 | sizepx += np.linalg.norm(p1 - p2) 332 | return sizepx 333 | 334 | def contains(self, n): 335 | return self.subgraph.has_node(n) 336 | 337 | def addNode(self, n): 338 | if self.subgraph.has_node(n) == True: 339 | return 340 | self.subgraph.add_node(n) 341 | if self.last_node is not None: 342 | self.subgraph.add_edge(self.last_node, n) 343 | if self.first_node is None: 344 | self.first_node = n 345 | self.last_node = n 346 | 347 | def searchSimilar(self, direction=None, predictor=None, max_level=3): 348 | if self.size() == 0: 349 | print("Impossible to search: Path si void!") 350 | return None 351 | 352 | if self.size() == 1 and direction is None: 353 | print("Impossible to search: Path has only one node and no initial DIRECTION provided!") 354 | return None 355 | 356 | nodes = self.asList() 357 | points = self.as2DPoints() 358 | 359 | target_node = nodes[-1] 360 | neighbourhood_list = self.ariadne.graph.deepNeighbourhood(target_node, level=max_level) 361 | neighbourhood = list(itertools.chain.from_iterable(neighbourhood_list)) 362 | 363 | pdfs = [] 364 | 365 | ####################################### 366 | # Max Computeation 367 | ####################################### 368 | max_compare = 0 369 | for n in neighbourhood: 370 | compare = predictor.computeScoreVisual(self.last_node, n) 371 | if compare > max_compare: 372 | max_compare = compare 373 | 374 | ####################################### 375 | # PDF Computation 376 | ####################################### 377 | for n in neighbourhood: 378 | if self.contains(n): 379 | pdfs.append(0.0) 380 | continue 381 | 382 | norm_visual = predictor.computeScoreVisual(self.last_node, n, None) 383 | 384 | new_path = self.clone() 385 | new_path.addNode(n) 386 | direction_pdf = predictor.computeScoreCurvature(new_path, initial_direction=direction) 387 | 388 | # self.removeNode(n) 389 | if direction is None: 390 | cumulative_pdf = norm_visual * direction_pdf 391 | else: 392 | cumulative_pdf = direction_pdf 393 | #print("Match", target_node, n, norm_visual, direction_pdf, "=", cumulative_pdf) 394 | pdfs.append(cumulative_pdf) 395 | 396 | ####################################### 397 | # Arg max 398 | ####################################### 399 | pdfs = np.array(pdfs) 400 | max_i = np.argmax(pdfs) 401 | #print("Max", pdfs[max_i], neighbourhood[max_i]) 402 | return neighbourhood[max_i] 403 | 404 | def searchSimilarV2(self, direction=None, direction_matcher=None, visual_matcher=None, debug_visual_matcher=None): 405 | if self.size() == 0: 406 | print("Impossible to search: Path si void!") 407 | return None 408 | 409 | if self.size() == 1 and direction is None: 410 | print("Impossible to search: Path has only one node and no initial DIRECTION provided!") 411 | return None 412 | 413 | nodes = self.asList() 414 | points = self.as2DPoints() 415 | 416 | target_node = nodes[-1] 417 | neighbourhood = self.ariadne.graph.getNeighbourhood(target_node) 418 | 419 | pdfs = [] 420 | 421 | ####################################### 422 | # Max Computeation 423 | ####################################### 424 | max_compare = 0 425 | for n in neighbourhood: 426 | compare = visual_matcher.compare(self.last_node, n) 427 | if compare > max_compare: 428 | max_compare = compare 429 | 430 | ####################################### 431 | # PDF Computation 432 | ####################################### 433 | for n in neighbourhood: 434 | if self.contains(n): 435 | pdfs.append(0.0) 436 | continue 437 | 438 | visual_pdf = visual_matcher.compare(self.last_node, n) 439 | norm_visual = visual_matcher.normalizeComparison(visual_pdf, max_compare) 440 | 441 | new_path = self.clone() 442 | new_path.addNode(n) 443 | direction_pdf = direction_matcher.computeScore(new_path, force_direction=direction) 444 | 445 | # self.removeNode(n) 446 | if direction is None: 447 | cumulative_pdf = norm_visual * direction_pdf 448 | else: 449 | cumulative_pdf = direction_pdf 450 | print("Match", target_node, n, visual_pdf, direction_pdf, "=", cumulative_pdf, "|||", norm_visual, norm_visual * direction_pdf) 451 | pdfs.append(cumulative_pdf) 452 | 453 | ####################################### 454 | # Arg max 455 | ####################################### 456 | pdfs = np.array(pdfs) 457 | max_i = np.argmax(pdfs) 458 | print("Max", pdfs[max_i], neighbourhood[max_i]) 459 | return neighbourhood[max_i] 460 | 461 | def removeNode(self, n): 462 | if self.subgraph.has_node(n) == False: 463 | return 464 | try: 465 | pred = self.subgraph.predecessors(n)[0] 466 | except: 467 | pred = None 468 | 469 | try: 470 | succ = self.subgraph.successors(n)[0] 471 | except: 472 | succ = None 473 | 474 | if pred is not None and succ is not None: 475 | self.subgraph.add_edge(pred, succ) 476 | 477 | if self.first_node == n: 478 | self.first_node = succ 479 | if self.last_node == n: 480 | self.last_node = pred 481 | 482 | self.subgraph.remove_node(n) 483 | 484 | def asList(self): 485 | if self.first_node is None: 486 | return [] 487 | 488 | path_nodes = [self.first_node] 489 | current_node = self.first_node 490 | try: 491 | next_node = next(self.subgraph.successors(self.first_node)) 492 | while next_node is not None: 493 | path_nodes.append(next_node) 494 | next_node = next(self.subgraph.successors(next_node)) 495 | except: 496 | pass 497 | 498 | return path_nodes 499 | 500 | def getTip(self): 501 | points = self.as2DPoints() 502 | tip = AriadnePathTip(points=points[0:2]) 503 | return tip 504 | 505 | def getTips(self, reverse_order=False): 506 | points = self.as2DPoints() 507 | if len(points) < 2: 508 | return None 509 | tip1 = AriadnePathTip(points=[points[0], points[1]]) 510 | tip2 = AriadnePathTip(points=[points[-1], points[-2]]) 511 | if reverse_order: 512 | return tip2, tip1 513 | else: 514 | return tip1, tip2 515 | 516 | def as2DPoints(self): 517 | if self.ariadne is None: 518 | return self.raw_points 519 | node_list = self.asList() 520 | points = [] 521 | for n in node_list: 522 | node = self.ariadne.graph.graph.node[n] 523 | point = tuple(node['centroid_point'].astype(int)) 524 | points.append(point) 525 | return points 526 | 527 | def endsInRegion(self, region_center, region_radius, squared_region=False): 528 | if self.size() == 0: 529 | return False 530 | 531 | points = self.as2DPoints() 532 | last_point = np.array(points[-1]) 533 | center = np.array(region_center) 534 | dist = np.linalg.norm(center - last_point) 535 | return dist <= region_radius 536 | 537 | def draw(self, img, color=(1, 0, 0), thickness=5, offset=np.array([0, 0]), dtype=int, draw_numbers=False, draw_tips=False): 538 | 539 | points = self.as2DPoints() 540 | for i in range(1, len(points)): 541 | # n1 = self.ariadne.graph.graph.node[e[0]] 542 | # n2 = self.ariadne.graph.graph.node[e[1]] 543 | p1 = points[i - 1] # n1['centroid_point'].astype(int) + offset 544 | p2 = points[i] # n2['centroid_point'].astype(int) + offset 545 | cv2.circle(img, tuple(p1), color=color, 546 | radius=thickness, thickness=-1) 547 | cv2.circle(img, tuple(p2), color=color, 548 | radius=thickness, thickness=-1) 549 | if draw_numbers: 550 | cv2.putText(img, "{}".format(i), tuple( 551 | p1 + np.array([5, 0])), cv2.FONT_HERSHEY_PLAIN, 1.5, color, 1) 552 | cv2.line(img, tuple(p1), tuple(p2), color=color, 553 | thickness=int(thickness * 0.6)) 554 | 555 | if draw_tips and self.size() >= 2: 556 | tips = self.getTips() 557 | tips[0].draw(img, color=(0, 0, 1)) 558 | tips[1].draw(img, color=(0, 1, 0)) 559 | 560 | def drawWithOffset(self, img, color=(1, 0, 0), thickness=5, offset=np.array([0, 0], dtype=int)): 561 | for i, e in enumerate(self.subgraph.edges()): 562 | n1 = self.ariadne.graph.graph.node[e[0]] 563 | n2 = self.ariadne.graph.graph.node[e[1]] 564 | p1 = n1['centroid_point'].astype(int) + offset 565 | p2 = n2['centroid_point'].astype(int) + offset 566 | cv2.circle(img, tuple(p1), color=color, 567 | radius=thickness, thickness=-1) 568 | cv2.putText(img, "{}".format(i), tuple( 569 | p1 + np.array([5, 0])), cv2.FONT_HERSHEY_PLAIN, 1.5, color, 1) 570 | cv2.line(img, tuple(p1), tuple(p2), color=color, 571 | thickness=int(thickness * 0.6)) 572 | 573 | @staticmethod 574 | def loadPathsFromTXT(filename): 575 | f = open(filename, "r") 576 | lines = f.readlines() 577 | paths = [] 578 | for l in lines: 579 | path_data = np.array(list(map(int, l.split()))) 580 | points_data = path_data[1:] 581 | path = AriadnePath(ariadne=None, raw_points=points_data) 582 | paths.append(path) 583 | return paths 584 | 585 | 586 | class AriadnePathFinder(object): 587 | 588 | def __init__(self, ariadne, predictor, name="PathFinder"): 589 | self.ariadne = ariadne 590 | self.predictor = predictor 591 | self.path = AriadnePath(ariadne) 592 | self.initial_direction = None 593 | self.initial_neighboring = None 594 | self.open = True 595 | self.name = name 596 | 597 | def isOpen(self): 598 | return self.open 599 | 600 | def close(self): 601 | self.open = False 602 | 603 | def open(self): 604 | self.open = True 605 | 606 | def initPathWithDirection(self, initial_node, initial_direction=None): 607 | self.path.addNode(initial_node) 608 | self.initial_direction = initial_direction 609 | found = self.searchNextNode(direction=initial_direction) 610 | if found is None: 611 | return False 612 | self.path.addNode(found) 613 | return True 614 | 615 | def initPathWithNeighboring(self, initial_node, neigh_node): 616 | self.path.addNode(initial_node) 617 | self.path.addNode(neigh_node) 618 | return initial_node != neigh_node 619 | 620 | def searchNextNode(self, direction=None, auto_add_node=True): 621 | if self.path.size() == 0: 622 | print("Unable to find next: Path si void!") 623 | return None 624 | 625 | if self.path.size() == 1 and direction is None: 626 | print("Unable to find next: Path has only one node and no initial DIRECTION provided!") 627 | return None 628 | 629 | nodes = self.path.asList() 630 | points = self.path.as2DPoints() 631 | 632 | target_node = nodes[-1] 633 | neighbourhood_list = self.ariadne.graph.deepNeighbourhood( 634 | target_node, 635 | # forced_excluded_nodes=nodes, 636 | level=self.predictor.getNeighbourhoodLevel(self.path.size()) 637 | ) 638 | neighbourhood = list(itertools.chain.from_iterable(neighbourhood_list)) 639 | 640 | ####################################### 641 | # Invalid Neighbourhood 642 | ####################################### 643 | if len(neighbourhood) == 0: 644 | return None 645 | 646 | pdfs = [] 647 | 648 | ####################################### 649 | # Visual Max Score 650 | ####################################### 651 | visual_max_score = 0 652 | for n in neighbourhood: 653 | visual_score = self.predictor.computeScoreVisual(self.path.last_node, n) 654 | if visual_score > visual_max_score: 655 | visual_max_score = visual_score 656 | 657 | ####################################### 658 | # PDF Computation 659 | ####################################### 660 | for n in neighbourhood: 661 | if self.path.contains(n): 662 | pdfs.append(0.0) 663 | continue 664 | 665 | norm_visual_pdf = self.predictor.computeScoreVisual(self.path.last_node, n, reference_value=None) 666 | 667 | new_path = self.path.clone() 668 | new_path.addNode(n) 669 | direction_pdf = self.predictor.computeScoreCurvature(new_path, initial_direction=direction) 670 | 671 | distance_pdf = self.predictor.computeScoreDistance(self.path.last_node, n) 672 | 673 | # if '3' in self.name: 674 | # self.predictor.curvature_predictor.computeScore(new_path, debug=True) 675 | 676 | if direction is None: 677 | cumulative_pdf = norm_visual_pdf * direction_pdf 678 | else: 679 | cumulative_pdf = direction_pdf 680 | 681 | cumulative_pdf = cumulative_pdf * distance_pdf 682 | # if '0' in self.name: 683 | # print("Match", target_node, n, norm_visual_pdf, direction_pdf,distance_pdf, "=", cumulative_pdf) 684 | pdfs.append(cumulative_pdf) 685 | 686 | #print(self.name, pdfs) 687 | ####################################### 688 | # Arg max 689 | ####################################### 690 | pdfs = np.array(pdfs) 691 | max_i = np.argmax(pdfs) 692 | 693 | #print("Max", pdfs[max_i], neighbourhood[max_i]) 694 | if auto_add_node: 695 | self.path.addNode(neighbourhood[max_i]) 696 | return neighbourhood[max_i] 697 | 698 | 699 | class AriadneMultiPathFinder(object): 700 | 701 | def __init__(self, ariadne, predictor): 702 | self.ariadne = ariadne 703 | self.predictor = predictor 704 | self.path_finders = [] 705 | self.iterations = 0 706 | 707 | def isFinished(self): 708 | close_counter = 0 709 | for f in self.path_finders: 710 | if not f.isOpen(): 711 | close_counter += 1 712 | return close_counter == self.size() 713 | 714 | def close(self): 715 | for f in self.path_finders: 716 | f.close() 717 | 718 | def size(self): 719 | return len(self.path_finders) 720 | 721 | def getIterations(self): 722 | return self.iterations 723 | 724 | def startSearchInNeighbourhood(self, start_node, depth=1): 725 | 726 | neighbourhood_raw = self.ariadne.graph.deepNeighbourhood(start_node, level=depth, as_simple_list=False) 727 | 728 | neighbourhood = neighbourhood_raw[-1] 729 | self.path_finders = [] 730 | for i, n2 in enumerate(neighbourhood): 731 | path_finder = AriadnePathFinder(self.ariadne, self.predictor, name="PathFinder_{}".format(i)) 732 | path_finder.initPathWithNeighboring(start_node, n2) 733 | self.path_finders.append(path_finder) 734 | self.iterations = 1 735 | 736 | def nextStep(self): 737 | for i, f in enumerate(self.path_finders): 738 | if f.isOpen(): 739 | f.searchNextNode() 740 | self.iterations += 1 741 | 742 | def getScores(self, single_components=False): 743 | scores = [] 744 | for i, f in enumerate(self.path_finders): 745 | if single_components: 746 | scores.append(self.predictor.score_function.computeScores(f.path)) 747 | else: 748 | scores.append(self.predictor.score_function.computeScore(f.path)) 749 | return scores 750 | 751 | def getBestPathFinder(self): 752 | scores = self.getScores() 753 | index = np.argmax(scores) 754 | return self.path_finders[index] 755 | 756 | 757 | class Ariadne(object): 758 | GAIN_DIRECTION = 2.5 759 | GAIN_COLOR = 1 760 | PROPERTIES_FILE_EXTENSION = "mat" 761 | IMAGE_FILE_EXTENSION = "jpg" 762 | 763 | def __init__(self, image=None, image_file=None, segments_data=None, threshold=0.4, segmentator=ImageSegmentator()): 764 | 765 | ####################################### 766 | # Image loading 767 | ####################################### 768 | if image == None and image_file == None: 769 | import sys 770 | print("Ariadne error! Neither Image or ImageFile passed") 771 | sys.exit(0) 772 | 773 | self.image_path = None 774 | if image == None and image_file is not None: 775 | image_file = os.path.abspath(image_file) 776 | self.image = img_as_float(io.imread(image_file)) 777 | self.image_path = image_file 778 | else: 779 | self.image = image 780 | self.threshold = threshold 781 | 782 | ####################################### 783 | # Objects 784 | ####################################### 785 | self.paths = [] 786 | 787 | ####################################### 788 | # Slic Superpixel Segmentation 789 | ####################################### 790 | if segments_data is None: 791 | self.segments = segmentator.segments(self.image) 792 | else: 793 | self.segments = segments_data 794 | 795 | ####################################### 796 | # Superpixels Graph 797 | ####################################### 798 | self.graph = AriadneGraph(self.image, self.segments) 799 | 800 | def saveSegmentation(self, filename): 801 | """ Save sagmentations to file """ 802 | np.savetxt(filename, self.segments) 803 | 804 | def clearPaths(self): 805 | self.paths = [] 806 | 807 | def createPath(self): 808 | path = AriadnePath(self) 809 | self.paths.append(path) 810 | return path 811 | 812 | def removePath(self, path): 813 | self.paths.remove(path) 814 | 815 | def saveToFile(self, filename): 816 | data = { 817 | "image_path": self.image_path, 818 | "segments": self.segments, 819 | "paths": [], 820 | "paths_points": [] 821 | } 822 | 823 | for p in self.paths: 824 | if len(p.asList()) > 0: 825 | data["paths"].append(p.asList()) 826 | data["paths_points"].append(p.as2DPoints()) 827 | 828 | scipy.io.savemat(filename, data, do_compression=True) 829 | 830 | @staticmethod 831 | def savePathsToTXT(paths, filename): 832 | paths_point = [] 833 | for path in paths: 834 | points = np.array(path.as2DPoints()).ravel() 835 | points = np.insert(points, 0, path.size()) 836 | paths_point.append(points) 837 | 838 | f = open(filename, "w") 839 | for path in paths_point: 840 | for p in path: 841 | f.write(str(p)) 842 | f.write(" ") 843 | f.write("\n") 844 | f.close() 845 | 846 | @staticmethod 847 | def loadFromFile(filename, is_image=False): 848 | image_filename = None 849 | if is_image: 850 | image_filename = filename 851 | filename = Ariadne.getImageProperties(filename) 852 | 853 | data = scipy.io.loadmat(filename) 854 | image_path = data["image_path"][0] 855 | 856 | if not os.path.exists(image_path): 857 | image_path = os.path.splitext(filename)[0] + "." + Ariadne.IMAGE_FILE_EXTENSION 858 | 859 | ar = Ariadne( 860 | image=None, 861 | image_file=image_path, 862 | segments_data=data["segments"] 863 | ) 864 | for p in data["paths"].ravel(): 865 | path = ar.createPath() 866 | for n in p.ravel(): 867 | path.addNode(n) 868 | return ar 869 | 870 | @staticmethod 871 | def hasPropertiesFile(filename): 872 | properties_file = Ariadne.getImageProperties(filename) 873 | return os.path.exists(properties_file) 874 | 875 | @staticmethod 876 | def getImageProperties(filename): 877 | properties_file = os.path.splitext(filename)[0] + "." + Ariadne.PROPERTIES_FILE_EXTENSION 878 | return properties_file 879 | --------------------------------------------------------------------------------