├── .gitignore ├── LICENSE ├── README.rst ├── main.py ├── profile ├── line.profile ├── profiling.py ├── profiling.py.lprof ├── ratslam-bkp.profile └── ratslam.profile ├── ratslam ├── __init__.py ├── _globals.py ├── experience_map.py ├── pose_cells.py ├── view_cells.py └── visual_odometry.py └── result_example.png /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[cod] 2 | 3 | # C extensions 4 | *.so 5 | 6 | # Packages 7 | *.egg 8 | *.egg-info 9 | dist 10 | build 11 | eggs 12 | parts 13 | bin 14 | var 15 | sdist 16 | develop-eggs 17 | .installed.cfg 18 | lib 19 | lib64 20 | __pycache__ 21 | 22 | # Installer logs 23 | pip-log.txt 24 | 25 | # Unit test / coverage reports 26 | .coverage 27 | .tox 28 | nosetests.xml 29 | 30 | # Translations 31 | *.mo 32 | 33 | # Mr Developer 34 | .mr.developer.cfg 35 | .project 36 | .pydevproject 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013 Renato de Pontes Pereira 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of 6 | this software and associated documentation files (the "Software"), to deal in 7 | the Software without restriction, including without limitation the rights to 8 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software is furnished to do so, 10 | subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 17 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 18 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 19 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 20 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ======================== 2 | RATSLAM (PYTHON VERSION) 3 | ======================== 4 | 5 | This is a full Ratslam implementation in python. This implementation is based 6 | on Milford's original implementation [1]_ in matlab, and Christine Lee's python 7 | implementation [2]_. The original data movies can also be found in [1]_. 8 | 9 | 10 | Dependences 11 | ----------- 12 | 13 | The only dependence for this package is Numpy [3]_, thus it does not handle how 14 | to open and manage the movie and image files. For this, I strongly recommend 15 | the use of OpenCV [4]_. 16 | 17 | 18 | Profiling 19 | --------- 20 | A profile utility is provided with this package in the 'profiling' directory, 21 | it is ready to use the RunSnakeRun [5]_ and Line Profiler [6]_. 22 | 23 | 24 | .. [1] https://wiki.qut.edu.au/display/cyphy/RatSLAM+MATLAB 25 | .. [2] https://github.com/coxlab/ratslam-python 26 | .. [3] http://www.numpy.org/ 27 | .. [4] http://opencv.org/ 28 | .. [5] http://www.vrplumber.com/programming/runsnakerun/ 29 | .. [6] http://pythonhosted.org/line_profiler/ -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | # ============================================================================= 2 | # Federal University of Rio Grande do Sul (UFRGS) 3 | # Connectionist Artificial Intelligence Laboratory (LIAC) 4 | # Renato de Pontes Pereira - rppereira@inf.ufrgs.br 5 | # ============================================================================= 6 | # Copyright (c) 2013 Renato de Pontes Pereira, renato.ppontes at gmail dot com 7 | # 8 | # Permission is hereby granted, free of charge, to any person obtaining a copy 9 | # of this software and associated documentation files (the "Software"), to deal 10 | # in the Software without restriction, including without limitation the rights 11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | # copies of the Software, and to permit persons to whom the Software is 13 | # furnished to do so, subject to the following conditions: 14 | # 15 | # The above copyright notice and this permission notice shall be included in 16 | # all copies or substantial portions of the Software. 17 | # 18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 | # SOFTWARE. 25 | # ============================================================================= 26 | 27 | ''' 28 | This is a full Ratslam implementation in python. This implementation is based 29 | on Milford's original implementation [1]_ in matlab, and Christine Lee's python 30 | implementation [2]_. The original data movies can also be found in [1]_. 31 | 32 | This file is the only dependent of OpenCV, which is used to open and convert 33 | the movie files. Thus, you can change only this file to use other media API. 34 | 35 | .. [1] https://wiki.qut.edu.au/display/cyphy/RatSLAM+MATLAB 36 | .. [2] https://github.com/coxlab/ratslam-python 37 | ''' 38 | 39 | import cv2 40 | import numpy as np 41 | from matplotlib import pyplot as plot 42 | import mpl_toolkits.mplot3d.axes3d as p3 43 | 44 | import ratslam 45 | 46 | if __name__ == '__main__': 47 | # Change this line to open other movies 48 | data = r'D:\Bkp\ratslam\data\stlucia_testloop.avi' 49 | 50 | video = cv2.VideoCapture(data) 51 | slam = ratslam.Ratslam() 52 | 53 | loop = 0 54 | _, frame = video.read() 55 | while True: 56 | loop += 1 57 | 58 | # RUN A RATSLAM ITERATION ================================== 59 | _, frame = video.read() 60 | if frame is None: break 61 | 62 | img = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) 63 | slam.digest(img) 64 | # ========================================================== 65 | 66 | # Plot each 50 frames 67 | if loop%50 != 0: 68 | continue 69 | 70 | # PLOT THE CURRENT RESULTS ================================= 71 | b, g, r = cv2.split(frame) 72 | rgb_frame = cv2.merge([r, g, b]) 73 | 74 | plot.clf() 75 | 76 | # RAW IMAGE ------------------- 77 | ax = plot.subplot(2, 2, 1) 78 | plot.title('RAW IMAGE') 79 | plot.imshow(rgb_frame) 80 | ax.get_xaxis().set_ticks([]) 81 | ax.get_yaxis().set_ticks([]) 82 | # ----------------------------- 83 | 84 | # RAW ODOMETRY ---------------- 85 | plot.subplot(2, 2, 2) 86 | plot.title('RAW ODOMETRY') 87 | plot.plot(slam.odometry[0], slam.odometry[1]) 88 | plot.plot(slam.odometry[0][-1], slam.odometry[1][-1], 'ko') 89 | #------------------------------ 90 | 91 | # POSE CELL ACTIVATION -------- 92 | ax = plot.subplot(2, 2, 3, projection='3d') 93 | plot.title('POSE CELL ACTIVATION') 94 | x, y, th = slam.pc 95 | ax.plot(x, y, 'x') 96 | ax.plot3D([0, 60], [y[-1], y[-1]], [th[-1], th[-1]], 'K') 97 | ax.plot3D([x[-1], x[-1]], [0, 60], [th[-1], th[-1]], 'K') 98 | ax.plot3D([x[-1], x[-1]], [y[-1], y[-1]], [0, 36], 'K') 99 | ax.plot3D([x[-1]], [y[-1]], [th[-1]], 'mo') 100 | ax.grid() 101 | ax.axis([0, 60, 0, 60]); 102 | ax.set_zlim(0, 36) 103 | # ----------------------------- 104 | 105 | # EXPERIENCE MAP -------------- 106 | plot.subplot(2, 2, 4) 107 | plot.title('EXPERIENCE MAP') 108 | xs = [] 109 | ys = [] 110 | for exp in slam.experience_map.exps: 111 | xs.append(exp.x_m) 112 | ys.append(exp.y_m) 113 | 114 | plot.plot(xs, ys, 'bo') 115 | plot.plot(slam.experience_map.current_exp.x_m, 116 | slam.experience_map.current_exp.y_m, 'ko') 117 | # ----------------------------- 118 | 119 | plot.tight_layout() 120 | # plot.savefig('C:\\Users\\Renato\\Desktop\\results\\forgif\\' + '%04d.jpg'%loop) 121 | plot.pause(0.1) 122 | # ========================================================== 123 | 124 | print 'DONE!' 125 | print 'n_ templates:', len(slam.view_cells.cells) 126 | print 'n_ experiences:', len(slam.experience_map.exps) 127 | plot.show() -------------------------------------------------------------------------------- /profile/line.profile: -------------------------------------------------------------------------------- 1 | Timer unit: 3.09291e-07 s 2 | 3 | File: ratslam2\_globals.py 4 | Function: compare_segments at line 79 5 | Total time: 0.0143502 s 6 | 7 | Line # Hits Time Per Hit % Time Line Contents 8 | ============================================================== 9 | 79 @profile 10 | 80 def compare_segments(seg1, seg2, slen): 11 | 81 3 53 17.7 0.1 cwl = seg1.size 12 | 82 13 | 83 3 13 4.3 0.0 mindiff = 1e10 14 | 84 3 14 4.7 0.0 minoffset = 0 15 | 85 16 | 86 3 48 16.0 0.1 diffs = np.zeros(slen) 17 | 87 18 | 88 306 1267 4.1 2.7 for offset in xrange(slen+1): 19 | 89 303 1315 4.3 2.8 e = (cwl-offset) 20 | 90 21 | 91 303 6251 20.6 13.5 cdiff = np.abs(seg1[offset:cwl] - seg2[:e]) 22 | 92 303 12607 41.6 27.2 cdiff = np.sum(cdiff)/e 23 | 93 24 | 94 303 1633 5.4 3.5 if cdiff < mindiff: 25 | 95 3 12 4.0 0.0 mindiff = cdiff 26 | 96 3 12 4.0 0.0 minoffset = offset 27 | 97 28 | 98 303 6222 20.5 13.4 cdiff = np.abs(seg1[:e] - seg2[offset:cwl]) 29 | 99 303 12686 41.9 27.3 cdiff = np.sum(cdiff)/e 30 | 100 31 | 101 303 1592 5.3 3.4 if cdiff < mindiff: 32 | 102 300 1305 4.3 2.8 mindiff = cdiff 33 | 103 300 1355 4.5 2.9 minoffset = -offset 34 | 104 35 | 105 3 12 4.0 0.0 return minoffset, mindiff 36 | 37 | -------------------------------------------------------------------------------- /profile/profiling.py: -------------------------------------------------------------------------------- 1 | # ============================================================================= 2 | # Federal University of Rio Grande do Sul (UFRGS) 3 | # Connectionist Artificial Intelligence Laboratory (LIAC) 4 | # Renato de Pontes Pereira - rppereira@inf.ufrgs.br 5 | # ============================================================================= 6 | # Copyright (c) 2013 Renato de Pontes Pereira, renato.ppontes at gmail dot com 7 | # 8 | # Permission is hereby granted, free of charge, to any person obtaining a copy 9 | # of this software and associated documentation files (the "Software"), to deal 10 | # in the Software without restriction, including without limitation the rights 11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | # copies of the Software, and to permit persons to whom the Software is 13 | # furnished to do so, subject to the following conditions: 14 | # 15 | # The above copyright notice and this permission notice shall be included in 16 | # all copies or substantial portions of the Software. 17 | # 18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 | # SOFTWARE. 25 | # ============================================================================= 26 | 27 | import cv2 28 | import numpy as np 29 | import ratslam 30 | 31 | ''' 32 | Utility for profiling. 33 | 34 | Use it with RunSnakeRun [1]_ and Line Profiler [2]_. 35 | 36 | **RunSnakeRun:** 37 | 38 | - For RunSnakeRun, just run this file and open the ratslam.profile file into 39 | the program. I suggest you to backup the profile file in order to not 40 | override it when running the line profiler. 41 | 42 | **Line Profiler:** 43 | 44 | - For Line Profiler, first insert the @profile decoration into some point of 45 | the code, put a break point into the main loop if needed, and finally, run:: 46 | 47 | $ kernprof.py -l profiling.py && python -m line_profiler profiling.py.lprof 48 | 49 | 50 | .. [1] http://www.vrplumber.com/programming/runsnakerun/ 51 | .. [2] http://pythonhosted.org/line_profiler/ 52 | ''' 53 | 54 | def main(): 55 | data = r'D:\Bkp\ratslam\data\stlucia_testloop.avi' 56 | 57 | video = cv2.VideoCapture(data) 58 | slam = ratslam.Ratslam() 59 | 60 | loop = 0 61 | _, frame = video.read() 62 | while True: 63 | loop += 1 64 | _, frame = video.read() 65 | if frame is None: break 66 | 67 | img = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) 68 | slam.digest(img) 69 | 70 | # break 71 | 72 | if loop%100 == 0: print '%0.2f%%'%(100*loop/1000.) 73 | 74 | import cProfile 75 | command = """main()""" 76 | cProfile.runctx(command, globals(), locals(), filename="ratslam.profile" ) -------------------------------------------------------------------------------- /profile/profiling.py.lprof: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/renatopp/ratslam-python/e7ee52a4a5520a93114881bd7662de7e4cb58813/profile/profiling.py.lprof -------------------------------------------------------------------------------- /profile/ratslam-bkp.profile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/renatopp/ratslam-python/e7ee52a4a5520a93114881bd7662de7e4cb58813/profile/ratslam-bkp.profile -------------------------------------------------------------------------------- /profile/ratslam.profile: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/renatopp/ratslam-python/e7ee52a4a5520a93114881bd7662de7e4cb58813/profile/ratslam.profile -------------------------------------------------------------------------------- /ratslam/__init__.py: -------------------------------------------------------------------------------- 1 | # ============================================================================= 2 | # Federal University of Rio Grande do Sul (UFRGS) 3 | # Connectionist Artificial Intelligence Laboratory (LIAC) 4 | # Renato de Pontes Pereira - rppereira@inf.ufrgs.br 5 | # ============================================================================= 6 | # Copyright (c) 2013 Renato de Pontes Pereira, renato.ppontes at gmail dot com 7 | # 8 | # Permission is hereby granted, free of charge, to any person obtaining a copy 9 | # of this software and associated documentation files (the "Software"), to deal 10 | # in the Software without restriction, including without limitation the rights 11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | # copies of the Software, and to permit persons to whom the Software is 13 | # furnished to do so, subject to the following conditions: 14 | # 15 | # The above copyright notice and this permission notice shall be included in 16 | # all copies or substantial portions of the Software. 17 | # 18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 | # SOFTWARE. 25 | # ============================================================================= 26 | 27 | ''' 28 | This is a full Ratslam implementation in python. This implementation is based 29 | on Milford's original implementation [1]_ in matlab, and Christine Lee's python 30 | implementation [2]_. The original data movies can also be found in [1]_. 31 | 32 | The only dependence for this package is Numpy [3]_, thus it does not handle how 33 | to open and manage the movie and image files. For this, I strongly recommend 34 | the use of OpenCV [4]_. 35 | 36 | .. [1] https://wiki.qut.edu.au/display/cyphy/RatSLAM+MATLAB 37 | .. [2] https://github.com/coxlab/ratslam-python 38 | .. [3] http://www.numpy.org/ 39 | .. [4] http://opencv.org/ 40 | ''' 41 | 42 | import numpy as np 43 | from ratslam._globals import * 44 | from ratslam.visual_odometry import VisualOdometry 45 | from ratslam.view_cells import ViewCells 46 | from ratslam.pose_cells import PoseCells 47 | from ratslam.experience_map import ExperienceMap 48 | 49 | class Ratslam(object): 50 | '''Ratslam implementation. 51 | 52 | The ratslam is divided into 4 modules: visual odometry, view cells, pose 53 | cells, and experience map. This class also store the odometry and pose 54 | cells activation in order to plot them. 55 | ''' 56 | 57 | def __init__(self): 58 | '''Initializes the ratslam modules.''' 59 | 60 | self.visual_odometry = VisualOdometry() 61 | self.view_cells = ViewCells() 62 | self.pose_cells = PoseCells() 63 | self.experience_map = ExperienceMap() 64 | 65 | # TRACKING ------------------------------- 66 | x, y, th = self.visual_odometry.odometry 67 | self.odometry = [[x], [y], [th]] 68 | 69 | x_pc, y_pc, th_pc = self.pose_cells.active 70 | self.pc = [[x_pc], [y_pc], [th_pc]] 71 | # ---------------------------------------- 72 | 73 | def digest(self, img): 74 | '''Execute a step of ratslam algorithm for a given image. 75 | 76 | :param img: an gray-scale image as a 2D numpy array. 77 | ''' 78 | 79 | x_pc, y_pc, th_pc = self.pose_cells.active 80 | view_cell = self.view_cells(img, x_pc, y_pc, th_pc) 81 | vtrans, vrot = self.visual_odometry(img) 82 | x_pc, y_pc, th_pc = self.pose_cells(view_cell, vtrans, vrot) 83 | self.experience_map(view_cell, vtrans, vrot, x_pc, y_pc, th_pc) 84 | 85 | # TRACKING ------------------------------- 86 | x, y, th = self.visual_odometry.odometry 87 | self.odometry[0].append(x) 88 | self.odometry[1].append(y) 89 | self.odometry[2].append(th) 90 | self.pc[0].append(x_pc) 91 | self.pc[1].append(y_pc) 92 | self.pc[2].append(th_pc) 93 | # ---------------------------------------- 94 | -------------------------------------------------------------------------------- /ratslam/_globals.py: -------------------------------------------------------------------------------- 1 | # ============================================================================= 2 | # Federal University of Rio Grande do Sul (UFRGS) 3 | # Connectionist Artificial Intelligence Laboratory (LIAC) 4 | # Renato de Pontes Pereira - rppereira@inf.ufrgs.br 5 | # ============================================================================= 6 | # Copyright (c) 2013 Renato de Pontes Pereira, renato.ppontes at gmail dot com 7 | # 8 | # Permission is hereby granted, free of charge, to any person obtaining a copy 9 | # of this software and associated documentation files (the "Software"), to deal 10 | # in the Software without restriction, including without limitation the rights 11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | # copies of the Software, and to permit persons to whom the Software is 13 | # furnished to do so, subject to the following conditions: 14 | # 15 | # The above copyright notice and this permission notice shall be included in 16 | # all copies or substantial portions of the Software. 17 | # 18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 | # SOFTWARE. 25 | # ============================================================================= 26 | 27 | import numpy as np 28 | import itertools 29 | 30 | def min_delta(d1, d2, max_): 31 | delta = np.min([np.abs(d1-d2), max_-np.abs(d1-d2)]) 32 | return delta 33 | 34 | def clip_rad_180(angle): 35 | while angle > np.pi: 36 | angle -= 2*np.pi 37 | while angle <= -np.pi: 38 | angle += 2*np.pi 39 | return angle 40 | 41 | def clip_rad_360(angle): 42 | while angle < 0: 43 | angle += 2*np.pi 44 | while angle >= 2*np.pi: 45 | angle -= 2*np.pi 46 | return angle 47 | 48 | def signed_delta_rad(angle1, angle2): 49 | dir = clip_rad_180(angle2 - angle1) 50 | 51 | delta_angle = abs(clip_rad_360(angle1) - clip_rad_360(angle2)) 52 | 53 | if (delta_angle < (2*np.pi-delta_angle)): 54 | if (dir>0): 55 | angle = delta_angle 56 | else: 57 | angle = -delta_angle 58 | else: 59 | if (dir>0): 60 | angle = 2*np.pi - delta_angle 61 | else: 62 | angle = -(2*np.pi-delta_angle) 63 | return angle 64 | 65 | 66 | def create_pc_weights(dim, var): 67 | dim_center = int(np.floor(dim/2.)) 68 | 69 | weight = np.zeros([dim, dim, dim]) 70 | for x, y, z in itertools.product(xrange(dim), xrange(dim), xrange(dim)): 71 | dx = -(x-dim_center)**2 72 | dy = -(y-dim_center)**2 73 | dz = -(z-dim_center)**2 74 | weight[x, y, z] = 1.0/(var*np.sqrt(2*np.pi))*np.exp((dx+dy+dz)/(2.*var**2)) 75 | 76 | weight = weight/np.sum(weight) 77 | return weight 78 | 79 | # @profile 80 | def compare_segments(seg1, seg2, slen): 81 | cwl = seg1.size 82 | 83 | mindiff = 1e10 84 | minoffset = 0 85 | 86 | diffs = np.zeros(slen) 87 | 88 | for offset in xrange(slen+1): 89 | e = (cwl-offset) 90 | 91 | cdiff = np.abs(seg1[offset:cwl] - seg2[:e]) 92 | cdiff = np.sum(cdiff)/e 93 | 94 | if cdiff < mindiff: 95 | mindiff = cdiff 96 | minoffset = offset 97 | 98 | cdiff = np.abs(seg1[:e] - seg2[offset:cwl]) 99 | cdiff = np.sum(cdiff)/e 100 | 101 | if cdiff < mindiff: 102 | mindiff = cdiff 103 | minoffset = -offset 104 | 105 | return minoffset, mindiff 106 | 107 | # CONSTANTS AND ALGORITHM PARAMETERS ========================================== 108 | # NOTE: it is need a refactoring to set these variables as a model parameter 109 | PC_VT_INJECT_ENERGY = 0.1 110 | PC_DIM_XY = 61 111 | PC_DIM_TH = 36 112 | PC_W_E_VAR = 1 113 | PC_W_E_DIM = 7 114 | PC_W_I_VAR = 2 115 | PC_W_I_DIM = 5 116 | PC_GLOBAL_INHIB = 0.00002 117 | PC_W_EXCITE = create_pc_weights(PC_W_E_DIM, PC_W_E_VAR) 118 | PC_W_INHIB = create_pc_weights(PC_W_I_DIM, PC_W_I_VAR) 119 | PC_W_E_DIM_HALF = int(np.floor(PC_W_E_DIM/2.)) 120 | PC_W_I_DIM_HALF = int(np.floor(PC_W_I_DIM/2.)) 121 | PC_C_SIZE_TH = (2.*np.pi)/PC_DIM_TH 122 | PC_E_XY_WRAP = range(PC_DIM_XY-PC_W_E_DIM_HALF, PC_DIM_XY) + range(PC_DIM_XY) + range(PC_W_E_DIM_HALF) 123 | PC_E_TH_WRAP = range(PC_DIM_TH-PC_W_E_DIM_HALF, PC_DIM_TH) + range(PC_DIM_TH) + range(PC_W_E_DIM_HALF) 124 | PC_I_XY_WRAP = range(PC_DIM_XY-PC_W_I_DIM_HALF, PC_DIM_XY) + range(PC_DIM_XY) + range(PC_W_I_DIM_HALF) 125 | PC_I_TH_WRAP = range(PC_DIM_TH-PC_W_I_DIM_HALF, PC_DIM_TH) + range(PC_DIM_TH) + range(PC_W_I_DIM_HALF) 126 | PC_XY_SUM_SIN_LOOKUP = np.sin(np.multiply(range(1, PC_DIM_XY+1), (2*np.pi)/PC_DIM_XY)) 127 | PC_XY_SUM_COS_LOOKUP = np.cos(np.multiply(range(1, PC_DIM_XY+1), (2*np.pi)/PC_DIM_XY)) 128 | PC_TH_SUM_SIN_LOOKUP = np.sin(np.multiply(range(1, PC_DIM_TH+1), (2*np.pi)/PC_DIM_TH)) 129 | PC_TH_SUM_COS_LOOKUP = np.cos(np.multiply(range(1, PC_DIM_TH+1), (2*np.pi)/PC_DIM_TH)) 130 | PC_CELLS_TO_AVG = 3; 131 | PC_AVG_XY_WRAP = range(PC_DIM_XY-PC_CELLS_TO_AVG, PC_DIM_XY) + range(PC_DIM_XY) + range(PC_CELLS_TO_AVG) 132 | PC_AVG_TH_WRAP = range(PC_DIM_TH-PC_CELLS_TO_AVG, PC_DIM_TH) + range(PC_DIM_TH) + range(PC_CELLS_TO_AVG) 133 | IMAGE_Y_SIZE = 640 134 | IMAGE_X_SIZE = 480 135 | IMAGE_VT_Y_RANGE = slice((480/2 - 80 - 40), (480/2 + 80 - 40)) 136 | IMAGE_VT_X_RANGE = slice((640/2 - 280 + 15), (640/2 + 280 + 15)) 137 | IMAGE_VTRANS_Y_RANGE = slice(270, 430) 138 | IMAGE_VROT_Y_RANGE = slice(75, 240) 139 | IMAGE_ODO_X_RANGE = slice(180+15, 460+15) 140 | VT_GLOBAL_DECAY = 0.1 141 | VT_ACTIVE_DECAY = 1.0 142 | VT_SHIFT_MATCH = 20 143 | VT_MATCH_THRESHOLD = 0.09 144 | EXP_DELTA_PC_THRESHOLD = 1.0 145 | EXP_CORRECTION = 0.5 146 | EXP_LOOPS = 100 147 | VTRANS_SCALE = 100 148 | VISUAL_ODO_SHIFT_MATCH = 140 149 | ODO_ROT_SCALING = np.pi/180./7. 150 | POSECELL_VTRANS_SCALING = 1./10. 151 | # ============================================================================= -------------------------------------------------------------------------------- /ratslam/experience_map.py: -------------------------------------------------------------------------------- 1 | # ============================================================================= 2 | # Federal University of Rio Grande do Sul (UFRGS) 3 | # Connectionist Artificial Intelligence Laboratory (LIAC) 4 | # Renato de Pontes Pereira - rppereira@inf.ufrgs.br 5 | # ============================================================================= 6 | # Copyright (c) 2013 Renato de Pontes Pereira, renato.ppontes at gmail dot com 7 | # 8 | # Permission is hereby granted, free of charge, to any person obtaining a copy 9 | # of this software and associated documentation files (the "Software"), to deal 10 | # in the Software without restriction, including without limitation the rights 11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | # copies of the Software, and to permit persons to whom the Software is 13 | # furnished to do so, subject to the following conditions: 14 | # 15 | # The above copyright notice and this permission notice shall be included in 16 | # all copies or substantial portions of the Software. 17 | # 18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 | # SOFTWARE. 25 | # ============================================================================= 26 | 27 | import numpy as np 28 | from ratslam._globals import * 29 | 30 | class Experience(object): 31 | '''A single experience. 32 | 33 | An Experience object is used to point out a single point in the experience 34 | map, Thus, it must store its position in the map and activation of pose and 35 | view cell modules. 36 | ''' 37 | 38 | def __init__(self, x_pc, y_pc, th_pc, x_m, y_m, facing_rad, view_cell): 39 | '''Initializes the Experience. 40 | 41 | :param x_pc: index x of the current pose cell. 42 | :param y_pc: index y of the current pose cell. 43 | :param th_pc: index th of the current pose cell. 44 | :param x_m: the position of axis x in the experience map. 45 | :param y_m: the position of axis x in the experience map. 46 | :param facing_rad: the orientation of the experience, in radians. 47 | :param view_cell: the last most activated view cell. 48 | ''' 49 | self.x_pc = x_pc 50 | self.y_pc = y_pc 51 | self.th_pc = th_pc 52 | self.x_m = x_m 53 | self.y_m = y_m 54 | self.facing_rad = facing_rad 55 | self.view_cell = view_cell 56 | self.links = [] 57 | 58 | def link_to(self, target, accum_delta_x, accum_delta_y, 59 | accum_delta_facing): 60 | '''Creates a link between this experience and a taget one. 61 | 62 | :param target: the target Experience object. 63 | :param accum_delta_x: accumulator in axis x computed by experience map. 64 | :param accum_delta_y: accumulator in axis y computed by experience map. 65 | :param accum_delta_facing: accumulator of orientation, computed by 66 | experience map. 67 | ''' 68 | d = np.sqrt(accum_delta_x**2 + accum_delta_y**2) 69 | heading_rad = signed_delta_rad( 70 | self.facing_rad, 71 | np.arctan2(accum_delta_y, accum_delta_x) 72 | ) 73 | facing_rad = signed_delta_rad( 74 | self.facing_rad, 75 | accum_delta_facing 76 | ) 77 | link = ExperienceLink(self, target, facing_rad, d, heading_rad) 78 | self.links.append(link) 79 | 80 | class ExperienceLink(object): 81 | '''A representation of connection between experiences.''' 82 | 83 | def __init__(self, parent, target, facing_rad, d, heading_rad): 84 | '''Initializes the link. 85 | 86 | :param parent: the Experience object that owns this link. 87 | :param target: the target Experience object. 88 | :param facing_rad: the angle (in radians) in the map context. 89 | :param d: the euclidean distance between the Experiences. 90 | :param heading_rad: the angle (in radians) between the Experiences. 91 | ''' 92 | self.target = target 93 | self.facing_rad = facing_rad 94 | self.d = d 95 | self.heading_rad = heading_rad 96 | 97 | class ExperienceMap(object): 98 | '''Experience Map module.''' 99 | 100 | def __init__(self): 101 | '''Initializes the experience map.''' 102 | 103 | self.size = 0 104 | self.exps = [] 105 | 106 | self.current_exp = None 107 | self.current_view_cell = None 108 | 109 | self.accum_delta_x = 0 110 | self.accum_delta_y = 0 111 | self.accum_delta_facing = np.pi/2 112 | 113 | self.history = [] #? 114 | 115 | def _create_exp(self, x_pc, y_pc, th_pc, view_cell): 116 | '''Creates a new Experience object. 117 | 118 | This method creates a new experience object, which will be a point the 119 | map. 120 | 121 | :param x_pc: index x of the current pose cell. 122 | :param y_pc: index y of the current pose cell. 123 | :param th_pc: index th of the current pose cell. 124 | :param view_cell: the last most activated view cell. 125 | :return: the new Experience object. 126 | ''' 127 | self.size += 1 128 | x_m = self.accum_delta_x 129 | y_m = self.accum_delta_y 130 | facing_rad = clip_rad_180(self.accum_delta_facing) 131 | 132 | if self.current_exp is not None: 133 | x_m += self.current_exp.x_m 134 | y_m += self.current_exp.y_m 135 | 136 | exp = Experience(x_pc, y_pc, th_pc, x_m, y_m, facing_rad, view_cell) 137 | 138 | if self.current_exp is not None: 139 | self.current_exp.link_to(exp, self.accum_delta_x, self.accum_delta_y, self.accum_delta_facing) 140 | 141 | self.exps.append(exp) 142 | view_cell.exps.append(exp) 143 | 144 | return exp 145 | 146 | 147 | def __call__(self, view_cell, vtrans, vrot, x_pc, y_pc, th_pc): 148 | '''Run an interaction of the experience map. 149 | 150 | :param view_cell: the last most activated view cell. 151 | :param x_pc: index x of the current pose cell. 152 | :param y_pc: index y of the current pose cell. 153 | :param th_pc: index th of the current pose cell. 154 | :param vtrans: the translation of the robot given by odometry. 155 | :param vrot: the rotation of the robot given by odometry. 156 | ''' 157 | 158 | #% integrate the delta x, y, facing 159 | self.accum_delta_facing = clip_rad_180(self.accum_delta_facing + vrot) 160 | self.accum_delta_x += vtrans*np.cos(self.accum_delta_facing) 161 | self.accum_delta_y += vtrans*np.sin(self.accum_delta_facing) 162 | 163 | if self.current_exp is None: 164 | delta_pc = 0 165 | else: 166 | delta_pc = np.sqrt( 167 | min_delta(self.current_exp.x_pc, x_pc, PC_DIM_XY)**2 + \ 168 | min_delta(self.current_exp.y_pc, y_pc, PC_DIM_XY)**2 + \ 169 | min_delta(self.current_exp.th_pc, th_pc, PC_DIM_TH)**2 170 | ) 171 | 172 | # if the vt is new or the pc x,y,th has changed enough create a new 173 | # experience 174 | adjust_map = False 175 | if len(view_cell.exps) == 0 or delta_pc > EXP_DELTA_PC_THRESHOLD: 176 | exp = self._create_exp(x_pc, y_pc, th_pc, view_cell) 177 | 178 | self.current_exp = exp 179 | self.accum_delta_x = 0 180 | self.accum_delta_y = 0 181 | self.accum_delta_facing = self.current_exp.facing_rad 182 | 183 | # if the vt has changed (but isn't new) search for the matching exp 184 | elif view_cell != self.current_exp.view_cell: 185 | 186 | # find the exp associated with the current vt and that is under the 187 | # threshold distance to the centre of pose cell activity 188 | # if multiple exps are under the threshold then don't match (to reduce 189 | # hash collisions) 190 | adjust_map = True 191 | matched_exp = None 192 | 193 | delta_pcs = [] 194 | n_candidate_matches = 0 195 | for (i, e) in enumerate(view_cell.exps): 196 | delta_pc = np.sqrt( 197 | min_delta(e.x_pc, x_pc, PC_DIM_XY)**2 + \ 198 | min_delta(e.y_pc, y_pc, PC_DIM_XY)**2 + \ 199 | min_delta(e.th_pc, th_pc, PC_DIM_TH)**2 200 | ) 201 | delta_pcs.append(delta_pc) 202 | 203 | if delta_pc < EXP_DELTA_PC_THRESHOLD: 204 | n_candidate_matches += 1 205 | 206 | if n_candidate_matches > 1: 207 | pass 208 | 209 | else: 210 | min_delta_id = np.argmin(delta_pcs) 211 | min_delta_val = delta_pcs[min_delta_id] 212 | 213 | if min_delta_val < EXP_DELTA_PC_THRESHOLD: 214 | matched_exp = view_cell.exps[min_delta_id] 215 | 216 | # see if the prev exp already has a link to the current exp 217 | link_exists = False 218 | for linked_exp in [l.target for l in self.current_exp.links]: 219 | if linked_exp == matched_exp: 220 | link_exists = True 221 | 222 | if not link_exists: 223 | self.current_exp.link_to(matched_exp, self.accum_delta_x, self.accum_delta_y, self.accum_delta_facing) 224 | 225 | if matched_exp is None: 226 | matched_exp = self._create_exp(x_pc, y_pc, th_pc, view_cell) 227 | 228 | self.current_exp = matched_exp 229 | self.accum_delta_x = 0 230 | self.accum_delta_y = 0 231 | self.accum_delta_facing = self.current_exp.facing_rad 232 | 233 | self.history.append(self.current_exp) 234 | 235 | if not adjust_map: 236 | return 237 | 238 | 239 | # Iteratively update the experience map with the new information 240 | for i in range(0, EXP_LOOPS): 241 | for e0 in self.exps: 242 | for l in e0.links: 243 | # e0 is the experience under consideration 244 | # e1 is an experience linked from e0 245 | # l is the link object which contains additoinal heading 246 | # info 247 | 248 | e1 = l.target 249 | 250 | # correction factor 251 | cf = EXP_CORRECTION 252 | 253 | # work out where exp0 thinks exp1 (x,y) should be based on 254 | # the stored link information 255 | lx = e0.x_m + l.d * np.cos(e0.facing_rad + l.heading_rad) 256 | ly = e0.y_m + l.d * np.sin(e0.facing_rad + l.heading_rad); 257 | 258 | # correct e0 and e1 (x,y) by equal but opposite amounts 259 | # a 0.5 correction parameter means that e0 and e1 will be 260 | # fully corrected based on e0's link information 261 | e0.x_m = e0.x_m + (e1.x_m - lx) * cf 262 | e0.y_m = e0.y_m + (e1.y_m - ly) * cf 263 | e1.x_m = e1.x_m - (e1.x_m - lx) * cf 264 | e1.y_m = e1.y_m - (e1.y_m - ly) * cf 265 | 266 | # determine the angle between where e0 thinks e1's facing 267 | # should be based on the link information 268 | df = signed_delta_rad(e0.facing_rad + l.facing_rad, 269 | e1.facing_rad) 270 | 271 | # correct e0 and e1 facing by equal but opposite amounts 272 | # a 0.5 correction parameter means that e0 and e1 will be 273 | # fully corrected based on e0's link information 274 | e0.facing_rad = clip_rad_180(e0.facing_rad + df * cf) 275 | e1.facing_rad = clip_rad_180(e1.facing_rad - df * cf) 276 | 277 | return 278 | 279 | -------------------------------------------------------------------------------- /ratslam/pose_cells.py: -------------------------------------------------------------------------------- 1 | # ============================================================================= 2 | # Federal University of Rio Grande do Sul (UFRGS) 3 | # Connectionist Artificial Intelligence Laboratory (LIAC) 4 | # Renato de Pontes Pereira - rppereira@inf.ufrgs.br 5 | # ============================================================================= 6 | # Copyright (c) 2013 Renato de Pontes Pereira, renato.ppontes at gmail dot com 7 | # 8 | # Permission is hereby granted, free of charge, to any person obtaining a copy 9 | # of this software and associated documentation files (the "Software"), to deal 10 | # in the Software without restriction, including without limitation the rights 11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | # copies of the Software, and to permit persons to whom the Software is 13 | # furnished to do so, subject to the following conditions: 14 | # 15 | # The above copyright notice and this permission notice shall be included in 16 | # all copies or substantial portions of the Software. 17 | # 18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 | # SOFTWARE. 25 | # ============================================================================= 26 | 27 | import numpy as np 28 | import itertools 29 | from ratslam._globals import * 30 | 31 | class PoseCells(object): 32 | '''Pose Cell module.''' 33 | 34 | def __init__(self): 35 | '''Initializes the Pose Cell module.''' 36 | 37 | self.cells = np.zeros([PC_DIM_XY, PC_DIM_XY, PC_DIM_TH]) 38 | self.active = a, b, c = [PC_DIM_XY/2, PC_DIM_XY/2, PC_DIM_TH/2] 39 | self.cells[a, b, c] = 1 40 | 41 | def compute_activity_matrix(self, xywrap, thwrap, wdim, pcw): 42 | '''Compute the activation of pose cells.''' 43 | 44 | # The goal is to return an update matrix that can be added/subtracted 45 | # from the posecell matrix 46 | pca_new = np.zeros([PC_DIM_XY, PC_DIM_XY, PC_DIM_TH]) 47 | 48 | # for nonzero posecell values 49 | indices = np.nonzero(self.cells) 50 | 51 | for i,j,k in itertools.izip(*indices): 52 | pca_new[np.ix_(xywrap[i:i+wdim], 53 | xywrap[j:j+wdim], 54 | thwrap[k:k+wdim])] += self.cells[i,j,k]*pcw 55 | 56 | return pca_new 57 | 58 | 59 | def get_pc_max(self, xywrap, thwrap): 60 | '''Find the x, y, th center of the activity in the network.''' 61 | 62 | x, y, z = np.unravel_index(np.argmax(self.cells), self.cells.shape) 63 | 64 | z_posecells = np.zeros([PC_DIM_XY, PC_DIM_XY, PC_DIM_TH]) 65 | 66 | zval = self.cells[np.ix_( 67 | xywrap[x:x+PC_CELLS_TO_AVG*2], 68 | xywrap[y:y+PC_CELLS_TO_AVG*2], 69 | thwrap[z:z+PC_CELLS_TO_AVG*2] 70 | )] 71 | z_posecells[np.ix_( 72 | PC_AVG_XY_WRAP[x:x+PC_CELLS_TO_AVG*2], 73 | PC_AVG_XY_WRAP[y:y+PC_CELLS_TO_AVG*2], 74 | PC_AVG_TH_WRAP[z:z+PC_CELLS_TO_AVG*2] 75 | )] = zval 76 | 77 | # get the sums for each axis 78 | x_sums = np.sum(np.sum(z_posecells, 2), 1) 79 | y_sums = np.sum(np.sum(z_posecells, 2), 0) 80 | th_sums = np.sum(np.sum(z_posecells, 1), 0) 81 | th_sums = th_sums[:] 82 | 83 | # now find the (x, y, th) using population vector decoding to handle 84 | # the wrap around 85 | x = (np.arctan2(np.sum(PC_XY_SUM_SIN_LOOKUP*x_sums), 86 | np.sum(PC_XY_SUM_COS_LOOKUP*x_sums)) * \ 87 | PC_DIM_XY/(2*np.pi)) % (PC_DIM_XY) 88 | 89 | y = (np.arctan2(np.sum(PC_XY_SUM_SIN_LOOKUP*y_sums), 90 | np.sum(PC_XY_SUM_COS_LOOKUP*y_sums)) * \ 91 | PC_DIM_XY/(2*np.pi)) % (PC_DIM_XY) 92 | 93 | th = (np.arctan2(np.sum(PC_TH_SUM_SIN_LOOKUP*th_sums), 94 | np.sum(PC_TH_SUM_COS_LOOKUP*th_sums)) * \ 95 | PC_DIM_TH/(2*np.pi)) % (PC_DIM_TH) 96 | 97 | # print x, y, th 98 | return (x, y, th) 99 | 100 | def __call__(self, view_cell, vtrans, vrot): 101 | '''Execute an interation of pose cells. 102 | 103 | :param view_cell: the last most activated view cell. 104 | :param vtrans: the translation of the robot given by odometry. 105 | :param vrot: the rotation of the robot given by odometry. 106 | :return: a 3D-tuple with the (x, y, th) index of most active pose cell. 107 | ''' 108 | vtrans = vtrans*POSECELL_VTRANS_SCALING 109 | 110 | # if this isn't a new vt then add the energy at its associated posecell 111 | # location 112 | if not view_cell.first: 113 | act_x = np.min([np.max([int(np.floor(view_cell.x_pc)), 1]), PC_DIM_XY]) 114 | act_y = np.min([np.max([int(np.floor(view_cell.y_pc)), 1]), PC_DIM_XY]) 115 | act_th = np.min([np.max([int(np.floor(view_cell.th_pc)), 1]), PC_DIM_TH]) 116 | 117 | # print [act_x, act_y, act_th] 118 | # this decays the amount of energy that is injected at the vt's 119 | # posecell location 120 | # this is important as the posecell Posecells will errounously snap 121 | # for bad vt matches that occur over long periods (eg a bad matches that 122 | # occur while the agent is stationary). This means that multiple vt's 123 | # need to be recognised for a snap to happen 124 | energy = PC_VT_INJECT_ENERGY*(1./30.)*(30 - np.exp(1.2 * view_cell.decay)) 125 | if energy > 0: 126 | self.cells[act_x, act_y, act_th] += energy 127 | #=============================== 128 | 129 | 130 | # local excitation - PC_le = PC elements * PC weights 131 | self.cells = self.compute_activity_matrix(PC_E_XY_WRAP, 132 | PC_E_TH_WRAP, 133 | PC_W_E_DIM, 134 | PC_W_EXCITE) 135 | # print np.max(self.cells) 136 | # raw_input() 137 | 138 | # local inhibition - PC_li = PC_le - PC_le elements * PC weights 139 | self.cells = self.cells-self.compute_activity_matrix(PC_I_XY_WRAP, 140 | PC_I_TH_WRAP, 141 | PC_W_I_DIM, 142 | PC_W_INHIB) 143 | 144 | 145 | # local global inhibition - PC_gi = PC_li elements - inhibition 146 | self.cells[self.cells < PC_GLOBAL_INHIB] = 0 147 | self.cells[self.cells >= PC_GLOBAL_INHIB] -= PC_GLOBAL_INHIB 148 | 149 | # normalization 150 | total = np.sum(self.cells) 151 | self.cells = self.cells/total 152 | 153 | 154 | # Path Integration 155 | # vtrans affects xy direction 156 | # shift in each th given by the th 157 | for dir_pc in xrange(PC_DIM_TH): 158 | direction = np.float64(dir_pc-1) * PC_C_SIZE_TH 159 | # N,E,S,W are straightforward 160 | if (direction == 0): 161 | self.cells[:,:,dir_pc] = \ 162 | self.cells[:,:,dir_pc] * (1.0 - vtrans) + \ 163 | np.roll(self.cells[:,:,dir_pc], 1, 1)*vtrans 164 | 165 | elif direction == np.pi/2: 166 | self.cells[:,:,dir_pc] = \ 167 | self.cells[:,:,dir_pc]*(1.0 - vtrans) + \ 168 | np.roll(self.cells[:,:,dir_pc], 1, 0)*vtrans 169 | 170 | elif direction == np.pi: 171 | self.cells[:,:,dir_pc] = \ 172 | self.cells[:,:,dir_pc]*(1.0 - vtrans) + \ 173 | np.roll(self.cells[:,:,dir_pc], -1, 1)*vtrans 174 | 175 | elif direction == 3*np.pi/2: 176 | self.cells[:,:,dir_pc] = \ 177 | self.cells[:,:,dir_pc]*(1.0 - vtrans) + \ 178 | np.roll(self.cells[:,:,dir_pc], -1, 0)*vtrans 179 | 180 | else: 181 | pca90 = np.rot90(self.cells[:,:,dir_pc], 182 | int(np.floor(direction *2/np.pi))) 183 | dir90 = direction - int(np.floor(direction*2/np.pi)) * np.pi/2 184 | 185 | 186 | # extend the Posecells one unit in each direction (max supported at the moment) 187 | # work out the weight contribution to the NE cell from the SW, NW, SE cells 188 | # given vtrans and the direction 189 | # weight_sw = v * cos(th) * v * sin(th) 190 | # weight_se = (1 - v * cos(th)) * v * sin(th) 191 | # weight_nw = (1 - v * sin(th)) * v * sin(th) 192 | # weight_ne = 1 - weight_sw - weight_se - weight_nw 193 | # think in terms of NE divided into 4 rectangles with the sides 194 | # given by vtrans and the angle 195 | pca_new = np.zeros([PC_DIM_XY+2, PC_DIM_XY+2]) 196 | pca_new[1:-1, 1:-1] = pca90 197 | 198 | weight_sw = (vtrans**2) *np.cos(dir90) * np.sin(dir90) 199 | weight_se = vtrans*np.sin(dir90) - \ 200 | (vtrans**2) * np.cos(dir90) * np.sin(dir90) 201 | weight_nw = vtrans*np.cos(dir90) - \ 202 | (vtrans**2) *np.cos(dir90) * np.sin(dir90) 203 | weight_ne = 1.0 - weight_sw - weight_se - weight_nw 204 | 205 | pca_new = pca_new*weight_ne + \ 206 | np.roll(pca_new, 1, 1) * weight_nw + \ 207 | np.roll(pca_new, 1, 0) * weight_se + \ 208 | np.roll(np.roll(pca_new, 1, 1), 1, 0) * weight_sw 209 | 210 | pca90 = pca_new[1:-1, 1:-1] 211 | pca90[1:, 0] = pca90[1:, 0] + pca_new[2:-1, -1] 212 | pca90[1, 1:] = pca90[1, 1:] + pca_new[-1, 2:-1] 213 | pca90[0, 0] = pca90[0, 0] + pca_new[-1, -1] 214 | 215 | #unrotate the pose cell xy layer 216 | self.cells[:,:,dir_pc] = np.rot90(pca90, 217 | 4 - int(np.floor(direction * 2/np.pi))) 218 | 219 | 220 | # Path Integration - Theta 221 | # Shift the pose cells +/- theta given by vrot 222 | if vrot != 0: 223 | weight = (np.abs(vrot)/PC_C_SIZE_TH)%1 224 | if weight == 0: 225 | weight = 1.0 226 | 227 | shift1 = int(np.sign(vrot) * int(np.floor(abs(vrot)/PC_C_SIZE_TH))) 228 | shift2 = int(np.sign(vrot) * int(np.ceil(abs(vrot)/PC_C_SIZE_TH))) 229 | self.cells = np.roll(self.cells, shift1, 2) * (1.0 - weight) + \ 230 | np.roll(self.cells, shift2, 2) * (weight) 231 | 232 | self.active = self.get_pc_max(PC_AVG_XY_WRAP, PC_AVG_TH_WRAP) 233 | return self.active 234 | -------------------------------------------------------------------------------- /ratslam/view_cells.py: -------------------------------------------------------------------------------- 1 | # ============================================================================= 2 | # Federal University of Rio Grande do Sul (UFRGS) 3 | # Connectionist Artificial Intelligence Laboratory (LIAC) 4 | # Renato de Pontes Pereira - rppereira@inf.ufrgs.br 5 | # ============================================================================= 6 | # Copyright (c) 2013 Renato de Pontes Pereira, renato.ppontes at gmail dot com 7 | # 8 | # Permission is hereby granted, free of charge, to any person obtaining a copy 9 | # of this software and associated documentation files (the "Software"), to deal 10 | # in the Software without restriction, including without limitation the rights 11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | # copies of the Software, and to permit persons to whom the Software is 13 | # furnished to do so, subject to the following conditions: 14 | # 15 | # The above copyright notice and this permission notice shall be included in 16 | # all copies or substantial portions of the Software. 17 | # 18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 | # SOFTWARE. 25 | # ============================================================================= 26 | 27 | import numpy as np 28 | from ratslam._globals import * 29 | 30 | class ViewCell(object): 31 | '''A single view cell. 32 | 33 | A ViewCell object is used to store the information of a single view cell. 34 | ''' 35 | _ID = 0 36 | 37 | def __init__(self, template, x_pc, y_pc, th_pc): 38 | '''Initialize a ViewCell. 39 | 40 | :param template: a 1D numpy array with the cell template. 41 | :param x_pc: the x position relative to the pose cell. 42 | :param y_pc: the y position relative to the pose cell. 43 | :param th_pc: the th position relative to the pose cell. 44 | ''' 45 | self.id = ViewCell._ID 46 | self.template = template 47 | self.x_pc = x_pc 48 | self.y_pc = y_pc 49 | self.th_pc = th_pc 50 | self.decay = VT_ACTIVE_DECAY 51 | self.first = True 52 | self.exps = [] 53 | 54 | ViewCell._ID += 1 55 | 56 | class ViewCells(object): 57 | '''View Cell module.''' 58 | 59 | def __init__(self): 60 | '''Initializes the View Cell module.''' 61 | self.size = 0 62 | self.cells = [] 63 | self.prev_cell = None 64 | 65 | # self.create_cell(np.zeros(561), 30, 30, 18) 66 | 67 | def _create_template(self, img): 68 | '''Compute the sum of columns in subimg and normalize it. 69 | 70 | :param subimg: a sub-image as a 2D numpy array. 71 | :return: the view template as a 1D numpy array. 72 | ''' 73 | subimg = img[IMAGE_VT_Y_RANGE, IMAGE_VT_X_RANGE] 74 | x_sums = np.sum(subimg, 0) 75 | return x_sums/np.sum(x_sums, dtype=np.float32) 76 | 77 | def _score(self, template): 78 | '''Compute the similarity of a given template with all view cells. 79 | 80 | :param template: 1D numpy array. 81 | :return: 1D numpy array. 82 | ''' 83 | scores = [] 84 | for cell in self.cells: 85 | 86 | cell.decay -= VT_GLOBAL_DECAY 87 | if cell.decay < 0: 88 | cell.decay = 0 89 | 90 | _, s = compare_segments(template, cell.template, VT_SHIFT_MATCH) 91 | scores.append(s) 92 | 93 | return scores 94 | 95 | def create_cell(self, template, x_pc, y_pc, th_pc): 96 | '''Create a new View Cell and register it into the View Cell module 97 | 98 | :param template: 1D numpy array. 99 | :param x_pc: index x of the current pose cell. 100 | :param y_pc: index y of the current pose cell. 101 | :param th_pc: index th of the current pose cell. 102 | :return: the new View Cell. 103 | ''' 104 | cell = ViewCell(template, x_pc, y_pc, th_pc) 105 | self.cells.append(cell) 106 | self.size += 1 107 | return cell 108 | 109 | def __call__(self, img, x_pc, y_pc, th_pc): 110 | '''Execute an interation of visual template. 111 | 112 | :param img: the full gray-scaled image as a 2D numpy array. 113 | :param x_pc: index x of the current pose cell. 114 | :param y_pc: index y of the current pose cell. 115 | :param th_pc: index th of the current pose cell. 116 | :return: the active view cell. 117 | ''' 118 | template = self._create_template(img) 119 | scores = self._score(template) 120 | 121 | # TO REMOVE 122 | if scores: 123 | self.activated_cells = np.array(self.cells)[np.array(scores)*template.size VT_MATCH_THRESHOLD: 127 | cell = self.create_cell(template, x_pc, y_pc, th_pc) 128 | self.prev_cell = cell 129 | return cell 130 | 131 | i = np.argmin(scores) 132 | cell = self.cells[i] 133 | cell.decay += VT_ACTIVE_DECAY 134 | 135 | if self.prev_cell != cell: 136 | cell.first = False 137 | 138 | self.prev_cell = cell 139 | return cell 140 | -------------------------------------------------------------------------------- /ratslam/visual_odometry.py: -------------------------------------------------------------------------------- 1 | # ============================================================================= 2 | # Federal University of Rio Grande do Sul (UFRGS) 3 | # Connectionist Artificial Intelligence Laboratory (LIAC) 4 | # Renato de Pontes Pereira - rppereira@inf.ufrgs.br 5 | # ============================================================================= 6 | # Copyright (c) 2013 Renato de Pontes Pereira, renato.ppontes at gmail dot com 7 | # 8 | # Permission is hereby granted, free of charge, to any person obtaining a copy 9 | # of this software and associated documentation files (the "Software"), to deal 10 | # in the Software without restriction, including without limitation the rights 11 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | # copies of the Software, and to permit persons to whom the Software is 13 | # furnished to do so, subject to the following conditions: 14 | # 15 | # The above copyright notice and this permission notice shall be included in 16 | # all copies or substantial portions of the Software. 17 | # 18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 | # SOFTWARE. 25 | # ============================================================================= 26 | 27 | import numpy as np 28 | from ratslam._globals import * 29 | 30 | class VisualOdometry(object): 31 | '''Visual Odometry Module.''' 32 | 33 | def __init__(self): 34 | '''Initializes the visual odometry module.''' 35 | self.old_vtrans_template = np.zeros(IMAGE_ODO_X_RANGE.stop) 36 | self.old_vrot_template = np.zeros(IMAGE_ODO_X_RANGE.stop) 37 | self.odometry = [0., 0., np.pi/2] 38 | 39 | def _create_template(self, subimg): 40 | '''Compute the sum of columns in subimg and normalize it. 41 | 42 | :param subimg: a sub-image as a 2D numpy array. 43 | :return: the view template as a 1D numpy array. 44 | ''' 45 | x_sums = np.sum(subimg, 0) 46 | avint = np.sum(x_sums, dtype=np.float32)/x_sums.size 47 | return x_sums/avint 48 | 49 | def __call__(self, img): 50 | '''Execute an interation of visual odometry. 51 | 52 | :param img: the full gray-scaled image as a 2D numpy array. 53 | :return: the deslocation and rotation of the image from the previous 54 | frame as a 2D tuple of floats. 55 | ''' 56 | subimg = img[IMAGE_VTRANS_Y_RANGE, IMAGE_ODO_X_RANGE] 57 | template = self._create_template(subimg) 58 | 59 | # VTRANS 60 | offset, diff = compare_segments( 61 | template, 62 | self.old_vtrans_template, 63 | VISUAL_ODO_SHIFT_MATCH 64 | ) 65 | vtrans = diff*VTRANS_SCALE 66 | 67 | if vtrans > 10: 68 | vtrans = 0 69 | 70 | self.old_vtrans_template = template 71 | 72 | # VROT 73 | subimg = img[IMAGE_VROT_Y_RANGE, IMAGE_ODO_X_RANGE] 74 | template = self._create_template(subimg) 75 | 76 | offset, diff = compare_segments( 77 | template, 78 | self.old_vrot_template, 79 | VISUAL_ODO_SHIFT_MATCH 80 | ) 81 | vrot = offset*(50./img.shape[1])*np.pi/180; 82 | self.old_vrot_template = template 83 | 84 | # Update raw odometry 85 | self.odometry[2] += vrot 86 | self.odometry[0] += vtrans*np.cos(self.odometry[2]) 87 | self.odometry[1] += vtrans*np.sin(self.odometry[2]) 88 | 89 | return vtrans, vrot -------------------------------------------------------------------------------- /result_example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/renatopp/ratslam-python/e7ee52a4a5520a93114881bd7662de7e4cb58813/result_example.png --------------------------------------------------------------------------------