├── CMakeLists.txt ├── LICENSE ├── README.md ├── config └── jackal.rviz ├── csv ├── complex_out.csv └── long_out.csv ├── images ├── location_drawer.png └── range_drawer.png ├── launch ├── auto_mover.launch ├── live_plotting.launch ├── multi_full_jackal.launch └── multi_jackal_ukf.launch ├── package.xml ├── path └── route1.xml ├── requirements.txt ├── rosbag ├── 2021-02-15-11-48-56.bag ├── complex_2021-02-22-08-56-56.bag ├── long_complex_2021-02-25-10-39-09.bag └── output.bag └── src ├── __init__.py ├── actual_robot_position.py ├── csv_creator.py ├── csv_creator2.py ├── efk ├── __init__.py ├── fusion_ekf.py └── kalman_filter.py ├── efk_uwb_localization.py ├── jackal.py ├── jackal_motion.py ├── live_plotter.py ├── location_drawer.py ├── range_drawer.py ├── rsme_plotter.py ├── tag_ids.json ├── ukf ├── __init__.py ├── datapoint.py ├── fusion_ukf.py ├── measurement_predictor.py ├── state.py ├── state_predictor.py ├── state_updater.py └── util.py └── ukf_uwb_localization.py /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0.2) 2 | project(uwb_localization) 3 | 4 | ## Compile as C++11, supported in ROS Kinetic and newer 5 | # add_compile_options(-std=c++11) 6 | 7 | ## Find catkin macros and libraries 8 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) 9 | ## is used, also find other catkin packages 10 | find_package(catkin REQUIRED COMPONENTS 11 | rospy 12 | ) 13 | 14 | ## System dependencies are found with CMake's conventions 15 | # find_package(Boost REQUIRED COMPONENTS system) 16 | 17 | 18 | ## Uncomment this if the package has a setup.py. This macro ensures 19 | ## modules and global scripts declared therein get installed 20 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html 21 | # catkin_python_setup() 22 | 23 | ################################################ 24 | ## Declare ROS messages, services and actions ## 25 | ################################################ 26 | 27 | ## To declare and build messages, services or actions from within this 28 | ## package, follow these steps: 29 | ## * Let MSG_DEP_SET be the set of packages whose message types you use in 30 | ## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...). 31 | ## * In the file package.xml: 32 | ## * add a build_depend tag for "message_generation" 33 | ## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET 34 | ## * If MSG_DEP_SET isn't empty the following dependency has been pulled in 35 | ## but can be declared for certainty nonetheless: 36 | ## * add a exec_depend tag for "message_runtime" 37 | ## * In this file (CMakeLists.txt): 38 | ## * add "message_generation" and every package in MSG_DEP_SET to 39 | ## find_package(catkin REQUIRED COMPONENTS ...) 40 | ## * add "message_runtime" and every package in MSG_DEP_SET to 41 | ## catkin_package(CATKIN_DEPENDS ...) 42 | ## * uncomment the add_*_files sections below as needed 43 | ## and list every .msg/.srv/.action file to be processed 44 | ## * uncomment the generate_messages entry below 45 | ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...) 46 | 47 | ## Generate messages in the 'msg' folder 48 | # add_message_files( 49 | # FILES 50 | # Message1.msg 51 | # Message2.msg 52 | # ) 53 | 54 | ## Generate services in the 'srv' folder 55 | # add_service_files( 56 | # FILES 57 | # Service1.srv 58 | # Service2.srv 59 | # ) 60 | 61 | ## Generate actions in the 'action' folder 62 | # add_action_files( 63 | # FILES 64 | # Action1.action 65 | # Action2.action 66 | # ) 67 | 68 | ## Generate added messages and services with any dependencies listed here 69 | # generate_messages( 70 | # DEPENDENCIES 71 | # std_msgs # Or other packages containing msgs 72 | # ) 73 | 74 | ################################################ 75 | ## Declare ROS dynamic reconfigure parameters ## 76 | ################################################ 77 | 78 | ## To declare and build dynamic reconfigure parameters within this 79 | ## package, follow these steps: 80 | ## * In the file package.xml: 81 | ## * add a build_depend and a exec_depend tag for "dynamic_reconfigure" 82 | ## * In this file (CMakeLists.txt): 83 | ## * add "dynamic_reconfigure" to 84 | ## find_package(catkin REQUIRED COMPONENTS ...) 85 | ## * uncomment the "generate_dynamic_reconfigure_options" section below 86 | ## and list every .cfg file to be processed 87 | 88 | ## Generate dynamic reconfigure parameters in the 'cfg' folder 89 | # generate_dynamic_reconfigure_options( 90 | # cfg/DynReconf1.cfg 91 | # cfg/DynReconf2.cfg 92 | # ) 93 | 94 | ################################### 95 | ## catkin specific configuration ## 96 | ################################### 97 | ## The catkin_package macro generates cmake config files for your package 98 | ## Declare things to be passed to dependent projects 99 | ## INCLUDE_DIRS: uncomment this if your package contains header files 100 | ## LIBRARIES: libraries you create in this project that dependent projects also need 101 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need 102 | ## DEPENDS: system dependencies of this project that dependent projects also need 103 | catkin_package( 104 | # INCLUDE_DIRS include 105 | # LIBRARIES uwb_localization 106 | # CATKIN_DEPENDS rospy 107 | # DEPENDS system_lib 108 | ) 109 | 110 | ########### 111 | ## Build ## 112 | ########### 113 | 114 | ## Specify additional locations of header files 115 | ## Your package locations should be listed before other locations 116 | include_directories( 117 | # include 118 | ${catkin_INCLUDE_DIRS} 119 | ) 120 | 121 | ## Declare a C++ library 122 | # add_library(${PROJECT_NAME} 123 | # src/${PROJECT_NAME}/uwb_localization.cpp 124 | # ) 125 | 126 | ## Add cmake target dependencies of the library 127 | ## as an example, code may need to be generated before libraries 128 | ## either from message generation or dynamic reconfigure 129 | # add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) 130 | 131 | ## Declare a C++ executable 132 | ## With catkin_make all packages are built within a single CMake context 133 | ## The recommended prefix ensures that target names across packages don't collide 134 | # add_executable(${PROJECT_NAME}_node src/uwb_localization_node.cpp) 135 | 136 | ## Rename C++ executable without prefix 137 | ## The above recommended prefix causes long target names, the following renames the 138 | ## target back to the shorter version for ease of user use 139 | ## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node" 140 | # set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "") 141 | 142 | ## Add cmake target dependencies of the executable 143 | ## same as for the library above 144 | # add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) 145 | 146 | ## Specify libraries to link a library or executable target against 147 | # target_link_libraries(${PROJECT_NAME}_node 148 | # ${catkin_LIBRARIES} 149 | # ) 150 | 151 | ############# 152 | ## Install ## 153 | ############# 154 | 155 | # all install targets should use catkin DESTINATION variables 156 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html 157 | 158 | ## Mark executable scripts (Python etc.) for installation 159 | ## in contrast to setup.py, you can choose the destination 160 | # catkin_install_python(PROGRAMS 161 | # scripts/my_python_script 162 | # DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} 163 | # ) 164 | 165 | ## Mark executables for installation 166 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html 167 | # install(TARGETS ${PROJECT_NAME}_node 168 | # RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} 169 | # ) 170 | 171 | ## Mark libraries for installation 172 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html 173 | # install(TARGETS ${PROJECT_NAME} 174 | # ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} 175 | # LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} 176 | # RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} 177 | # ) 178 | 179 | ## Mark cpp header files for installation 180 | # install(DIRECTORY include/${PROJECT_NAME}/ 181 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} 182 | # FILES_MATCHING PATTERN "*.h" 183 | # PATTERN ".svn" EXCLUDE 184 | # ) 185 | 186 | ## Mark other files for installation (e.g. launch and bag files, etc.) 187 | # install(FILES 188 | # # myfile1 189 | # # myfile2 190 | # DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} 191 | # ) 192 | 193 | ############# 194 | ## Testing ## 195 | ############# 196 | 197 | ## Add gtest based cpp test target and link libraries 198 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_uwb_localization.cpp) 199 | # if(TARGET ${PROJECT_NAME}-test) 200 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME}) 201 | # endif() 202 | 203 | ## Add folders to be run by python nosetests 204 | # catkin_add_nosetests(test) 205 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2021, AUVSL 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | 2. Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | 3. Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Jackal Ad-Hoc Ultra-wideband localization 2 | 3 |

4 | 5 | 6 | 7 | GitHub commit activity 8 | 9 | Stars 10 | 11 | Forks 12 | 13 | Issues 14 | 15 | GitHub 16 |

17 | 18 | ## 1. Table of contents 19 | 20 | - [Jackal Ad-Hoc Ultra-wideband localization](#jackal-ad-hoc-ultra-wideband-localization) 21 | - [1. Table of contents](#1-table-of-contents) 22 | - [2. Project Description](#2-project-description) 23 | - [3. Installation](#3-installation) 24 | - [4. Files](#4-files) 25 | - [4.1. ukf_uwb_localization.py](#41-ukf_uwb_localizationpy) 26 | - [4.2. ekf_uwb_localization.py](#42-ekf_uwb_localizationpy) 27 | - [4.3. csv_creator.py](#43-csv_creatorpy) 28 | - [4.4. live_plotter.py](#44-live_plotterpy) 29 | - [4.5. location_drawer.py](#45-location_drawerpy) 30 | - [4.6. range_drawer.py](#46-range_drawerpy) 31 | - [4.7. actual_robot_position.py](#47-actual_robot_positionpy) 32 | 33 | ## 2. Project Description 34 | 35 | This project is meant to implement a localization algorithm where a [Jackal](https://clearpathrobotics.com/jackal-small-unmanned-ground-vehicle/) is combined with [Decawave Ultra-widebands](https://www.decawave.com/product/mdek1001-deployment-kit/) and is meant to be able to localize itself with both mobile and stationary anchors in the world. 36 | 37 | ## 3. Installation 38 | 39 | This project uses [`ROS Melodic`](http://wiki.ros.org/melodic) as its ROS backend. 40 | 41 | To setup: 42 | 43 | 1. Either create a catkin workspace or navigate to its `src` folder 44 | 2. `git clone git@github.com:PulkitRustagi/UWB-Localization.git` 45 | 3. This project is also associated with [UWB-Jackal-World](https://github.com/AUVSL/UWB-Jackal-World) 46 | - Follow this page if you want to install the Gazebo environement for this project, though it is not necessary. 47 | 4. If you choose to not install Gazebo environement be sure to have the `gtec_rosmsgs` installed 48 | - Create a `gtec` folder in your catkin's `src` 49 | - `git clone https://github.com/valentinbarral/rosmsgs` 50 | - This installs the custom UWB ranging messages 51 | - Navigate to your base `cakin_ws` folder 52 | - `rospack profile` 53 | - `catkin_make` 54 | - `source ~/catkin_ws/devel/setup.bash` 55 | - This step is very important. Be sure to run this every time you open up a new terminal. If you do not you will get errors when trying to run the world, that you cannot see certain packages. 56 | - To make things easier if you only have a single ROS build and `catkin_ws`, then you can run: 57 | - `echo "source ~/catkin_ws/devel/setup.bash" > ~/.bashrc` 58 | - This will allow you to not have to run the source command every time. 59 | 60 | ## 4. Files 61 | 62 | ### 4.1. ukf_uwb_localization.py 63 | 64 | This runs an Unscented Kalman Filter based on a "Constant turn rate and velocity magnitude" (CTRV) non linear process model. 65 | 66 | This is the program that you should run for the more accurate localization estimations and is what this project is focussing on. 67 | 68 | The CTRV assumes that the acceleration is the noise and that the velocity and the turn rate is constant. This is better suited to mobile robots who turn following the bycicle model. 69 | 70 | This unscented kalman filter outputs with a rate of 60HZ and takes in the Jackal's Odometry and the UWB range with its associated anchor point location. 71 | 72 | For a better explanation of the process model and how this UKF works in general: 73 | - https://fevemania.github.io/blog/mathematic-formula-note-of-unscented-kalman-filter/ 74 | 75 | To use this: 76 | 77 | - `rosrun uwb_localization ukf_uwb_localization.py` 78 | 79 | 80 | ### 4.2. ekf_uwb_localization.py 81 | 82 | This runs an Kalman Filter with a linear process model for the x, y, and z dimensions. This model however is more suited to UAV which are unconstrained in their motion and is thus discontinued for this project. 83 | 84 | This kalman filter outputs with a rate of 60HZ and takes in the Jackal's Odometry and the UWB range with its associated anchor point location. 85 | 86 | To use this: 87 | 88 | - `rosrun uwb_localization ekf_uwb_localization.py` 89 | 90 | ### 4.3. csv_creator.py 91 | 92 | This converts a rosbag with the format: 93 | 94 | `rosbag record /ground_truth/state /gtec/toa/anchors /gtec/toa/ranging /odometry/filtered` 95 | 96 | In to a csv file that be read and used offline. This can be usefull if you want to remove ROS from the equation all together. 97 | 98 | To use this: 99 | 100 | 1. `rossun uwb_localization csv_creator.py` 101 | 2. Run the rosbag you want to convert (i.e. `rosbag play *.bag`) 102 | - Be carefull because this csv_creator assumes that the anchors are currently stationary 103 | 104 | ### 4.4. live_plotter.py 105 | 106 | This is an interface to create live matplotlib plots. Use the `LivePlotter` class to create the live plots. 107 | Ignore the warnings or errors that apprear when running this. 108 | 109 | ### 4.5. location_drawer.py 110 | 111 | This plots in real-time the location of the robot's positions in a matplotlib graph. 112 | 113 | ![Location Plotter Example](images/location_drawer.png) 114 | 115 | By default it plots: 116 | - The actual robot's position (`/ground_truth/state`) 117 | - The odometry position (`/odometry/filtered`) 118 | - The UWB localization output (`/jackal/uwb/odom`) 119 | 120 | To use: 121 | - `rosrun uwb_localization location_drawer.py` 122 | - Default parameters 123 | - `rosrun uwb_localization location_drawer.py /jackal/uwb/odom` 124 | - Custom odometry message 125 | 126 | 127 | ### 4.6. range_drawer.py 128 | 129 | This plots in real-time the range measurements of the worlds anchor points as they reach the tags in a matplotlib graph. 130 | 131 | For these plots it is normal to have range measurements that come and go due to the nature of UWB sensors being obstructed by line of sight. 132 | 133 | ![Range Plotter Example](images/range_drawer.png) 134 | 135 | To use: 136 | - `rosrun uwb_localization range_drawer.py` 137 | 138 | ### 4.7. actual_robot_position.py 139 | 140 | This is a deprecated program that used to output the robot's position based on its base_link tf. 141 | -------------------------------------------------------------------------------- /config/jackal.rviz: -------------------------------------------------------------------------------- 1 | Panels: 2 | - Class: rviz/Displays 3 | Help Height: 78 4 | Name: Displays 5 | Property Tree Widget: 6 | Expanded: 7 | - /Global Options1 8 | - /Odometry1/Covariance1/Position1 9 | - /Odometry1/Covariance1/Orientation1 10 | Splitter Ratio: 0.5 11 | Tree Height: 555 12 | - Class: rviz/Selection 13 | Name: Selection 14 | - Class: rviz/Tool Properties 15 | Expanded: 16 | - /2D Pose Estimate1 17 | - /2D Nav Goal1 18 | - /Publish Point1 19 | Name: Tool Properties 20 | Splitter Ratio: 0.5886790156364441 21 | - Class: rviz/Views 22 | Expanded: 23 | - /Current View1 24 | Name: Views 25 | Splitter Ratio: 0.5 26 | - Class: rviz/Time 27 | Experimental: false 28 | Name: Time 29 | SyncMode: 0 30 | SyncSource: "" 31 | Preferences: 32 | PromptSaveOnExit: true 33 | Toolbars: 34 | toolButtonStyle: 2 35 | Visualization Manager: 36 | Class: "" 37 | Displays: 38 | - Alpha: 0.5 39 | Cell Size: 1 40 | Class: rviz/Grid 41 | Color: 160; 160; 164 42 | Enabled: true 43 | Line Style: 44 | Line Width: 0.029999999329447746 45 | Value: Lines 46 | Name: Grid 47 | Normal Cell Count: 0 48 | Offset: 49 | X: 0 50 | Y: 0 51 | Z: 0 52 | Plane: XY 53 | Plane Cell Count: 10 54 | Reference Frame: 55 | Value: true 56 | - Alpha: 1 57 | Class: rviz/RobotModel 58 | Collision Enabled: false 59 | Enabled: true 60 | Links: 61 | All Links Enabled: true 62 | Expand Joint Details: false 63 | Expand Link Details: false 64 | Expand Tree: false 65 | Link Tree Style: Links in Alphabetic Order 66 | base_link: 67 | Alpha: 1 68 | Show Axes: false 69 | Show Trail: false 70 | chassis_link: 71 | Alpha: 1 72 | Show Axes: false 73 | Show Trail: false 74 | Value: true 75 | front_fender_link: 76 | Alpha: 1 77 | Show Axes: false 78 | Show Trail: false 79 | Value: true 80 | front_left_wheel_link: 81 | Alpha: 1 82 | Show Axes: false 83 | Show Trail: false 84 | Value: true 85 | front_mount: 86 | Alpha: 1 87 | Show Axes: false 88 | Show Trail: false 89 | front_right_wheel_link: 90 | Alpha: 1 91 | Show Axes: false 92 | Show Trail: false 93 | Value: true 94 | imu_link: 95 | Alpha: 1 96 | Show Axes: false 97 | Show Trail: false 98 | left_tag: 99 | Alpha: 1 100 | Show Axes: false 101 | Show Trail: false 102 | Value: true 103 | mid_mount: 104 | Alpha: 1 105 | Show Axes: false 106 | Show Trail: false 107 | navsat_link: 108 | Alpha: 1 109 | Show Axes: false 110 | Show Trail: false 111 | Value: true 112 | rear_fender_link: 113 | Alpha: 1 114 | Show Axes: false 115 | Show Trail: false 116 | Value: true 117 | rear_left_wheel_link: 118 | Alpha: 1 119 | Show Axes: false 120 | Show Trail: false 121 | Value: true 122 | rear_mount: 123 | Alpha: 1 124 | Show Axes: false 125 | Show Trail: false 126 | rear_right_wheel_link: 127 | Alpha: 1 128 | Show Axes: false 129 | Show Trail: false 130 | Value: true 131 | right_tag: 132 | Alpha: 1 133 | Show Axes: false 134 | Show Trail: false 135 | Value: true 136 | Name: RobotModel 137 | Robot Description: robot_description 138 | TF Prefix: "" 139 | Update Interval: 0 140 | Value: true 141 | Visual Enabled: true 142 | - Class: rviz/MarkerArray 143 | Enabled: true 144 | Marker Topic: /gtec/toa/anchors 145 | Name: MarkerArray 146 | Namespaces: 147 | "": true 148 | Queue Size: 1 149 | Value: true 150 | - Angle Tolerance: 0.10000000149011612 151 | Class: rviz/Odometry 152 | Covariance: 153 | Orientation: 154 | Alpha: 0.5 155 | Color: 255; 255; 127 156 | Color Style: Unique 157 | Frame: Local 158 | Offset: 1 159 | Scale: 1 160 | Value: true 161 | Position: 162 | Alpha: 0.05000000074505806 163 | Color: 204; 51; 204 164 | Scale: 1 165 | Value: true 166 | Value: false 167 | Enabled: true 168 | Keep: 5 169 | Name: Odometry 170 | Position Tolerance: 0.10000000149011612 171 | Shape: 172 | Alpha: 1 173 | Axes Length: 1 174 | Axes Radius: 0.10000000149011612 175 | Color: 255; 25; 0 176 | Head Length: 0.30000001192092896 177 | Head Radius: 0.10000000149011612 178 | Shaft Length: 1 179 | Shaft Radius: 0.05000000074505806 180 | Value: Arrow 181 | Topic: /odometry/filtered 182 | Unreliable: false 183 | Value: true 184 | Enabled: true 185 | Global Options: 186 | Background Color: 48; 48; 48 187 | Default Light: true 188 | Fixed Frame: world 189 | Frame Rate: 30 190 | Name: root 191 | Tools: 192 | - Class: rviz/Interact 193 | Hide Inactive Objects: true 194 | - Class: rviz/MoveCamera 195 | - Class: rviz/Select 196 | - Class: rviz/FocusCamera 197 | - Class: rviz/Measure 198 | - Class: rviz/SetInitialPose 199 | Theta std deviation: 0.2617993950843811 200 | Topic: /initialpose 201 | X std deviation: 0.5 202 | Y std deviation: 0.5 203 | - Class: rviz/SetGoal 204 | Topic: /move_base_simple/goal 205 | - Class: rviz/PublishPoint 206 | Single click: true 207 | Topic: /clicked_point 208 | Value: true 209 | Views: 210 | Current: 211 | Class: rviz/Orbit 212 | Distance: 15.894097328186035 213 | Enable Stereo Rendering: 214 | Stereo Eye Separation: 0.05999999865889549 215 | Stereo Focal Distance: 1 216 | Swap Stereo Eyes: false 217 | Value: false 218 | Focal Point: 219 | X: 1.8004734516143799 220 | Y: -0.7027328610420227 221 | Z: 1.7531139850616455 222 | Focal Shape Fixed Size: true 223 | Focal Shape Size: 0.05000000074505806 224 | Invert Z Axis: false 225 | Name: Current View 226 | Near Clip Distance: 0.009999999776482582 227 | Pitch: 0.9803974628448486 228 | Target Frame: 229 | Value: Orbit (rviz) 230 | Yaw: 6.21858549118042 231 | Saved: ~ 232 | Window Geometry: 233 | Displays: 234 | collapsed: false 235 | Height: 846 236 | Hide Left Dock: false 237 | Hide Right Dock: true 238 | QMainWindow State: 000000ff00000000fd000000040000000000000156000002b4fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003b000002b4000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f000002b4fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b000002b4000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004b00000003efc0100000002fb0000000800540069006d00650100000000000004b00000024400fffffffb0000000800540069006d0065010000000000000450000000000000000000000354000002b400000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 239 | Selection: 240 | collapsed: false 241 | Time: 242 | collapsed: false 243 | Tool Properties: 244 | collapsed: false 245 | Views: 246 | collapsed: true 247 | Width: 1200 248 | X: 333 249 | Y: 60 250 | -------------------------------------------------------------------------------- /images/location_drawer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PulkitRustagi/UWB-Localization/b4407fa217e453237653971bff0bd5ff3d354e3a/images/location_drawer.png -------------------------------------------------------------------------------- /images/range_drawer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PulkitRustagi/UWB-Localization/b4407fa217e453237653971bff0bd5ff3d354e3a/images/range_drawer.png -------------------------------------------------------------------------------- /launch/auto_mover.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /launch/live_plotting.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /launch/multi_full_jackal.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /launch/multi_jackal_ukf.launch: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | uwb_localization 4 | 0.0.0 5 | The uwb_localization package 6 | 7 | 8 | 9 | 10 | marius 11 | 12 | 13 | 14 | 15 | 16 | TODO 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | catkin 52 | rospy 53 | rospy 54 | rospy 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | -------------------------------------------------------------------------------- /path/route1.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | scipy==1.2.3 2 | rospkg_modules==1.3.0 3 | matplotlib==2.2.5 4 | numpy==1.16.6 5 | rospkg==1.3.0 -------------------------------------------------------------------------------- /rosbag/2021-02-15-11-48-56.bag: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PulkitRustagi/UWB-Localization/b4407fa217e453237653971bff0bd5ff3d354e3a/rosbag/2021-02-15-11-48-56.bag -------------------------------------------------------------------------------- /rosbag/complex_2021-02-22-08-56-56.bag: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PulkitRustagi/UWB-Localization/b4407fa217e453237653971bff0bd5ff3d354e3a/rosbag/complex_2021-02-22-08-56-56.bag -------------------------------------------------------------------------------- /rosbag/long_complex_2021-02-25-10-39-09.bag: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PulkitRustagi/UWB-Localization/b4407fa217e453237653971bff0bd5ff3d354e3a/rosbag/long_complex_2021-02-25-10-39-09.bag -------------------------------------------------------------------------------- /rosbag/output.bag: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PulkitRustagi/UWB-Localization/b4407fa217e453237653971bff0bd5ff3d354e3a/rosbag/output.bag -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | -------------------------------------------------------------------------------- /src/actual_robot_position.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # coding=utf-8 3 | from __future__ import print_function 4 | 5 | import sys 6 | 7 | import rospy 8 | import tf 9 | from geometry_msgs.msg import Point 10 | 11 | if __name__ == "__main__": 12 | rospy.init_node("robot_position_publisher_node") 13 | 14 | if len(sys.argv) < 3: 15 | raise ValueError("Expecting arugments: world_link robot_base_link") 16 | 17 | else: 18 | world_link, robot_base_link = sys.argv[1], sys.argv[2] 19 | 20 | name = "/data_drawer/robot_pose" 21 | 22 | pub = rospy.Publisher(name, Point, queue_size=1) 23 | tf_listener = tf.TransformListener() 24 | 25 | rate = rospy.Rate(100) 26 | 27 | point = Point() 28 | 29 | while not rospy.is_shutdown(): 30 | try: 31 | (trans, rot) = tf_listener.lookupTransform(world_link, robot_base_link, rospy.Time(0)) 32 | 33 | point.x, point.y = trans[0], trans[1] 34 | 35 | pub.publish(point) 36 | except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): 37 | print("Unable to find transform") 38 | 39 | rate.sleep() 40 | -------------------------------------------------------------------------------- /src/csv_creator.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # coding=utf-8 3 | 4 | import rospy 5 | from gtec_msgs.msg import Ranging 6 | from nav_msgs.msg import Odometry 7 | from tf.transformations import euler_from_quaternion 8 | from visualization_msgs.msg import MarkerArray 9 | 10 | from ukf.datapoint import DataType 11 | 12 | 13 | class Recorder(object): 14 | def __init__(self, out="out.csv"): 15 | self.data = [] 16 | self.out = out 17 | 18 | self.anchor_poses = dict() 19 | 20 | self.tag_offset = { 21 | 1: [0, 0.162, 0.184], 22 | 0: [0, -0.162, 0.184] 23 | } 24 | 25 | anchors = '/gtec/toa/anchors' 26 | toa_ranging = '/gtec/toa/ranging' 27 | 28 | self.anchors_sub = rospy.Subscriber(anchors, MarkerArray, callback=self.add_anchors) 29 | ranging_sub = rospy.Subscriber(toa_ranging, Ranging, callback=self.add_ranging) 30 | 31 | odometry = '/odometry/filtered' 32 | odometry = rospy.Subscriber(odometry, Odometry, callback=self.create_odometry_callback(DataType.ODOMETRY)) 33 | 34 | odometry = '/ground_truth/state' 35 | odometry = rospy.Subscriber(odometry, Odometry, callback=self.create_odometry_callback(DataType.GROUND_TRUTH)) 36 | 37 | def create_odometry_callback(self, id): 38 | 39 | def add_odometry(msg): 40 | t = self.get_time() 41 | 42 | px = msg.pose.pose.position.x 43 | py = msg.pose.pose.position.y 44 | pz = msg.pose.pose.position.z 45 | 46 | v = msg.twist.twist.linear.x 47 | theta = euler_from_quaternion(( 48 | msg.pose.pose.orientation.x, 49 | msg.pose.pose.orientation.y, 50 | msg.pose.pose.orientation.z, 51 | msg.pose.pose.orientation.w 52 | ))[2] 53 | 54 | theta_yaw = msg.twist.twist.angular.z 55 | 56 | self.data.append((id, px, py, pz, v, theta, theta_yaw, t)) 57 | 58 | return add_odometry 59 | 60 | def add_anchors(self, msg): 61 | # type: (MarkerArray) -> None 62 | 63 | for marker in msg.markers: 64 | self.anchor_poses[marker.id] = [marker.pose.position.x, marker.pose.position.y, marker.pose.position.z] 65 | 66 | def get_time(self): 67 | return rospy.Time.now().to_nsec() 68 | 69 | def add_ranging(self, msg): 70 | # type: (Ranging) -> None 71 | t = self.get_time() 72 | 73 | if msg.anchorId in self.anchor_poses: 74 | anchor_pose = self.anchor_poses[msg.anchorId] 75 | anchor_distance = msg.range / 1000. 76 | 77 | tag = self.tag_offset[msg.tagId] 78 | 79 | self.data.append((DataType.UWB, anchor_distance, 80 | anchor_pose[0], anchor_pose[1], anchor_pose[2], 81 | tag[0], tag[1], tag[2], t)) 82 | 83 | def save(self): 84 | print("Saving", len(self.data), "datapoints") 85 | 86 | with open(self.out, "w") as file: 87 | file.writelines(",".join(map(str, d)) + '\n' for d in self.data) 88 | 89 | 90 | if __name__ == "__main__": 91 | rospy.init_node("csv_recorder", anonymous=True) 92 | 93 | rec = Recorder() 94 | 95 | rospy.spin() 96 | 97 | rec.save() 98 | -------------------------------------------------------------------------------- /src/csv_creator2.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # coding=utf-8 3 | 4 | import rospy 5 | from gtec_msgs.msg import Ranging 6 | from nav_msgs.msg import Odometry 7 | from sensor_msgs.msg import Imu 8 | from tf.transformations import euler_from_quaternion 9 | from visualization_msgs.msg import MarkerArray 10 | 11 | from ukf.datapoint import DataType 12 | 13 | 14 | class Recorder(object): 15 | def __init__(self, out="out.csv"): 16 | self.data = [] 17 | self.out = out 18 | 19 | self.anchor_poses = dict() 20 | 21 | self.tag_offset = { 22 | 1: [0, 0.162, 0.184], 23 | 0: [0, -0.162, 0.184], 24 | 25 | 3: [0, 0.162, 0.184], 26 | 2: [0, -0.162, 0.184] 27 | } 28 | 29 | anchors = '/gtec/toa/anchors' 30 | toa_ranging = '/gtec/toa/ranging' 31 | 32 | self.anchors_sub = rospy.Subscriber(anchors, MarkerArray, callback=self.add_anchors) 33 | ranging_sub = rospy.Subscriber(toa_ranging, Ranging, callback=self.add_ranging) 34 | 35 | id = 2 36 | 37 | odometry = '/Jackal{}/odometry/filtered'.format(id) 38 | odometry = rospy.Subscriber(odometry, Odometry, callback=self.create_odometry_callback(DataType.ODOMETRY)) 39 | 40 | odometry = '/Jackal{}/uwb/odom'.format(id) 41 | odometry = rospy.Subscriber(odometry, Odometry, callback=self.create_odometry_callback(DataType.ODOMETRY)) 42 | 43 | odometry = '/Jackal{}/ground_truth/state'.format(id) 44 | odometry = rospy.Subscriber(odometry, Odometry, callback=self.create_odometry_callback(DataType.GROUND_TRUTH)) 45 | 46 | imu = '/Jackal{}/imu/data'.format(id) 47 | imu = rospy.Subscriber(imu, Imu, callback=self.add_imu) 48 | 49 | print("Ready") 50 | 51 | def add_imu(self, msg): 52 | # type: (Imu) -> None 53 | t = self.get_time() 54 | 55 | orien = msg.orientation 56 | ang_vel = msg.angular_velocity 57 | lin_acc = msg.linear_acceleration 58 | 59 | self.data.append((DataType.IMU, t, orien.x, orien.y, orien.z, orien.w, ang_vel.x, ang_vel.y, ang_vel.z, 60 | lin_acc.x, lin_acc.y, lin_acc.z)) 61 | 62 | def create_odometry_callback(self, id): 63 | 64 | def add_odometry(msg): 65 | t = self.get_time() 66 | 67 | px = msg.pose.pose.position.x 68 | py = msg.pose.pose.position.y 69 | pz = msg.pose.pose.position.z 70 | 71 | v = msg.twist.twist.linear.x 72 | theta = euler_from_quaternion(( 73 | msg.pose.pose.orientation.x, 74 | msg.pose.pose.orientation.y, 75 | msg.pose.pose.orientation.z, 76 | msg.pose.pose.orientation.w 77 | ))[2] 78 | 79 | theta_yaw = msg.twist.twist.angular.z 80 | 81 | self.data.append((id, t, px, py, pz, v, theta, theta_yaw, msg.pose.pose.orientation.x, 82 | msg.pose.pose.orientation.y, 83 | msg.pose.pose.orientation.z, 84 | msg.pose.pose.orientation.w)) 85 | 86 | return add_odometry 87 | 88 | def add_anchors(self, msg): 89 | # type: (MarkerArray) -> None 90 | 91 | for marker in msg.markers: 92 | self.anchor_poses[marker.id] = [marker.pose.position.x, marker.pose.position.y, marker.pose.position.z] 93 | 94 | def get_time(self): 95 | return rospy.Time.now().to_nsec() 96 | 97 | def add_ranging(self, msg): 98 | # type: (Ranging) -> None 99 | t = self.get_time() 100 | 101 | if msg.anchorId in self.anchor_poses: 102 | anchor_pose = self.anchor_poses[msg.anchorId] 103 | anchor_distance = msg.range / 1000. 104 | 105 | tag = self.tag_offset[msg.tagId] 106 | 107 | self.data.append((DataType.UWB, t, anchor_distance, 108 | anchor_pose[0], anchor_pose[1], anchor_pose[2], 109 | tag[0], tag[1], tag[2])) 110 | 111 | def save(self): 112 | print("Saving", len(self.data), "datapoints") 113 | 114 | with open(self.out, "w") as file: 115 | file.writelines(",".join(map(str, d)) + '\n' for d in self.data) 116 | 117 | 118 | if __name__ == "__main__": 119 | rospy.init_node("csv_recorder2", anonymous=True) 120 | 121 | rec = Recorder() 122 | 123 | rospy.spin() 124 | 125 | rec.save() 126 | -------------------------------------------------------------------------------- /src/efk/__init__.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | -------------------------------------------------------------------------------- /src/efk/fusion_ekf.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import time 3 | 4 | import numpy as np 5 | 6 | from kalman_filter import KalmanFilter 7 | 8 | 9 | class FusionEKF(object): 10 | def __init__(self): 11 | self.kalman_filter = KalmanFilter() 12 | self.is_initialized = False 13 | self.previous_timestamp = 0 14 | 15 | self.noise_ax = 1 16 | self.noise_ay = 1 17 | self.noise_az = 1 18 | 19 | self.noise_vector = np.diag([self.noise_ax, self.noise_ay, self.noise_az]) 20 | 21 | self.kalman_filter.P = np.array([ 22 | [1, 0, 0, 0, 0, 0], 23 | [0, 1, 0, 0, 0, 0], 24 | [0, 0, 1, 0, 0, 0], 25 | [0, 0, 0, 1, 0, 0], 26 | [0, 0, 0, 0, 1, 0], 27 | [0, 0, 0, 0, 0, 1] 28 | ]) 29 | 30 | self.kalman_filter.x = np.zeros((6, 1)) 31 | 32 | # self.uwb_std = 23.5 / 1000 33 | self.uwb_std = 23.5 / 10 34 | 35 | self.R_UWB = np.array([ 36 | [self.uwb_std ** 2] 37 | ]) 38 | 39 | self.kalman_filter.R = self.R_UWB 40 | 41 | def process_measurement(self, anchor_pose, anchor_distance): 42 | if not self.is_initialized: 43 | self.is_initialized = True 44 | self.previous_timestamp = time.time() 45 | return 46 | 47 | current_time = time.time() 48 | dt = current_time - self.previous_timestamp 49 | self.previous_timestamp = current_time 50 | 51 | self.calulate_F_matrix(dt) 52 | self.calculate_Q_matrix(dt) 53 | 54 | self.kalman_filter.predict() 55 | 56 | self.kalman_filter.update(anchor_pose, anchor_distance) 57 | 58 | # print("X:", self.kalman_filter.x) 59 | # print("P:", self.kalman_filter.P) 60 | # print(self.kalman_filter.Q) 61 | 62 | def calulate_F_matrix(self, dt): 63 | self.kalman_filter.F = np.array([ 64 | [1, 0, 0, dt, 0, 0], 65 | [0, 1, 0, 0, dt, 0], 66 | [0, 0, 1, 0, 0, dt], 67 | [0, 0, 0, 1, 0, 0], 68 | [0, 0, 0, 0, 1, 0], 69 | [0, 0, 0, 0, 0, 1] 70 | ]) 71 | 72 | def calculate_Q_matrix(self, dt): 73 | a = [ 74 | [(dt ** 4) / 4, (dt ** 3) / 2], 75 | [(dt ** 3) / 2, dt ** 2] 76 | ] 77 | 78 | self.kalman_filter.Q = np.kron(a, self.noise_vector) 79 | 80 | 81 | if __name__ == '__main__': 82 | anchor_pose = np.array([10, 10, 10], dtype=float) 83 | anchor_pose2 = np.array([0, 10, 10], dtype=float) 84 | 85 | robot_pose = np.array([0, 0, 0], dtype=float) 86 | 87 | fusion = FusionEKF() 88 | 89 | s = time.time() 90 | 91 | for i in range(100): 92 | for a in [anchor_pose, anchor_pose2]: 93 | distance = np.linalg.norm(a - robot_pose) 94 | 95 | fusion.process_measurement(a, distance) 96 | 97 | c = time.time() 98 | 99 | robot_pose[0] += (c - s) * .1 100 | 101 | s = c 102 | 103 | print(robot_pose[0]) 104 | -------------------------------------------------------------------------------- /src/efk/kalman_filter.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | from __future__ import print_function 3 | 4 | import numpy as np 5 | 6 | 7 | class KalmanFilter(object): 8 | def __init__(self): 9 | self.threshold = 100 10 | self.x = None 11 | self.P = None 12 | self.F = None 13 | self.H = None 14 | self.R = None 15 | self.Q = None 16 | self.S = None 17 | self.K = None 18 | self.I = np.identity(6) 19 | 20 | def init(self, x, P, F, H, R, Q): 21 | self.x = x 22 | self.P = P 23 | self.F = F 24 | self.H = H 25 | self.R = R 26 | self.Q = Q 27 | 28 | def predict(self): 29 | self.x = np.matmul(self.F, self.x) 30 | self.P = np.matmul(np.matmul(self.F, self.P), self.F.transpose()) + self.Q 31 | 32 | def update(self, anchor_p, anchor_range): 33 | d = anchor_range 34 | 35 | p = self.x[:3] 36 | 37 | anchor_p = anchor_p.reshape((3, -1)) 38 | 39 | d_k = np.linalg.norm(p - anchor_p) 40 | 41 | self.H = np.zeros((1, 3 * 2)) 42 | self.H[:, :3] = np.transpose(p - anchor_p) / d_k 43 | 44 | H_transpose = np.transpose(self.H) 45 | 46 | self.S = np.matmul(np.matmul(self.H, self.P), H_transpose) + self.R 47 | 48 | distance_sub = d - d_k 49 | inverse_S = np.linalg.inv(self.S) 50 | 51 | Dm = np.sqrt(distance_sub * inverse_S * distance_sub) 52 | 53 | print(Dm) 54 | 55 | if np.all(Dm < self.threshold): 56 | self.K = np.matmul(np.matmul(self.P, H_transpose), inverse_S) 57 | 58 | self.x = self.x + self.K * distance_sub 59 | self.P = np.matmul(self.I - np.matmul(self.K, self.H), self.P) 60 | -------------------------------------------------------------------------------- /src/efk_uwb_localization.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # coding=utf-8 3 | 4 | import numpy as np 5 | import rospy 6 | import tf 7 | from gtec_msgs.msg import Ranging 8 | from nav_msgs.msg import Odometry 9 | from visualization_msgs.msg import MarkerArray 10 | 11 | from efk.fusion_ekf import FusionEKF 12 | 13 | 14 | class UWBLocalization(object): 15 | """ 16 | A deprecated localization node using an EKF which estimates the position of the left and right tags seperately 17 | """ 18 | 19 | def __init__(self): 20 | """ 21 | Setups the EKF localization of the tags 22 | """ 23 | self.kalman_filter_tag_0 = FusionEKF() 24 | self.kalman_filter_tag_1 = FusionEKF() 25 | 26 | self.anchor_poses = dict() 27 | 28 | anchors = '/gtec/toa/anchors' 29 | toa_ranging = '/gtec/toa/ranging' 30 | 31 | anchors_sub = rospy.Subscriber(anchors, MarkerArray, callback=self.add_anchors) 32 | ranging_sub = rospy.Subscriber(toa_ranging, Ranging, callback=self.add_ranging) 33 | 34 | publish_tag_0 = '/jackal/uwb/pose/0' 35 | publish_tag_1 = '/jackal/uwb/pose/1' 36 | 37 | self.estimated_pose_tag_0 = rospy.Publisher(publish_tag_0, Odometry, queue_size=1) 38 | self.estimated_pose_tag_1 = rospy.Publisher(publish_tag_1, Odometry, queue_size=1) 39 | 40 | self.pose = Odometry() 41 | 42 | self.tf_listener = tf.TransformListener() 43 | 44 | def change_pose_ref(self, odom, tag, world='/world', base='/base_link'): 45 | """ 46 | Retrieves the tf tag for the robot and then translates the odom position 47 | @param odom: The odometry that needs to be translated 48 | @param tag: The name of the tag to use i.e. /right_tag 49 | @param world: The base reference frame 50 | @param base: the transformed reference frame 51 | @return: the translated reference frame 52 | """ 53 | try: 54 | (trans, rot) = self.tf_listener.lookupTransform(world, tag, rospy.Time(0)) 55 | 56 | pose = odom.pose.pose.position 57 | 58 | pose.x -= trans[0] 59 | pose.y -= trans[1] 60 | pose.z -= trans[2] 61 | 62 | (trans, rot) = self.tf_listener.lookupTransform(world, base, rospy.Time(0)) 63 | 64 | pose.x += trans[0] 65 | pose.y += trans[1] 66 | pose.z += trans[2] 67 | 68 | return True 69 | except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): 70 | return False 71 | 72 | def add_anchors(self, msg): 73 | # type: (MarkerArray) -> None 74 | """ 75 | Function that handles the MarkerArray of anchor positions and updates the anchor pose dict 76 | @param msg: the MarkerArray topic message 77 | """ 78 | 79 | for marker in msg.markers: 80 | self.anchor_poses[marker.id] = np.array( 81 | [marker.pose.position.x, marker.pose.position.y, marker.pose.position.z]) 82 | 83 | def add_ranging(self, msg): 84 | # type: (Ranging) -> None 85 | """ 86 | Function that handles UWB range data and processes it through the Kalman filter 87 | @param msg: The Ranging topic message 88 | """ 89 | 90 | if msg.anchorId in self.anchor_poses: 91 | anchor_pose = self.anchor_poses[msg.anchorId] 92 | anchor_distance = msg.range / 1000. 93 | 94 | if msg.tagId == 0: 95 | self.kalman_filter_tag_0.process_measurement(anchor_pose, anchor_distance) 96 | elif msg.tagId == 1: 97 | self.kalman_filter_tag_1.process_measurement(anchor_pose, anchor_distance) 98 | 99 | def run(self): 100 | """ 101 | The step function that publishes the position information of the tags 102 | @return: 103 | """ 104 | rate = rospy.Rate(60) 105 | 106 | while not rospy.is_shutdown(): 107 | self.pose.pose.pose.position.x = self.kalman_filter_tag_0.kalman_filter.x[0] 108 | self.pose.pose.pose.position.y = self.kalman_filter_tag_0.kalman_filter.x[1] 109 | self.pose.pose.pose.position.z = self.kalman_filter_tag_0.kalman_filter.x[2] 110 | 111 | self.pose.twist.twist.linear.x = self.kalman_filter_tag_0.kalman_filter.x[3] 112 | self.pose.twist.twist.linear.y = self.kalman_filter_tag_0.kalman_filter.x[4] 113 | self.pose.twist.twist.linear.z = self.kalman_filter_tag_0.kalman_filter.x[5] 114 | 115 | # r = self.change_pose_ref(self.pose, '/right_tag') 116 | 117 | # if r: 118 | self.estimated_pose_tag_0.publish(self.pose) 119 | 120 | self.pose.pose.pose.position.x = self.kalman_filter_tag_1.kalman_filter.x[0] 121 | self.pose.pose.pose.position.y = self.kalman_filter_tag_1.kalman_filter.x[1] 122 | self.pose.pose.pose.position.z = self.kalman_filter_tag_1.kalman_filter.x[2] 123 | 124 | self.pose.twist.twist.linear.x = self.kalman_filter_tag_1.kalman_filter.x[3] 125 | self.pose.twist.twist.linear.y = self.kalman_filter_tag_1.kalman_filter.x[4] 126 | self.pose.twist.twist.linear.z = self.kalman_filter_tag_1.kalman_filter.x[5] 127 | 128 | # r = self.change_pose_ref(self.pose, '/left_tag') 129 | 130 | # if r: 131 | self.estimated_pose_tag_1.publish(self.pose) 132 | 133 | rate.sleep() 134 | 135 | 136 | if __name__ == "__main__": 137 | rospy.init_node("uwb_localization_kalman") 138 | loc = UWBLocalization() 139 | loc.run() 140 | 141 | rospy.spin() 142 | -------------------------------------------------------------------------------- /src/jackal.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # coding=utf-8 3 | 4 | import json 5 | import os 6 | 7 | import numpy as np 8 | import rospkg 9 | import rospy 10 | from gtec_msgs.msg import Ranging 11 | from nav_msgs.msg import Odometry 12 | from scipy.optimize import least_squares 13 | from tf.transformations import euler_from_quaternion 14 | from visualization_msgs.msg import MarkerArray 15 | 16 | from jackal_motion import JackalMotion 17 | from ukf_uwb_localization import UKFUWBLocalization, get_tag_ids, get_time 18 | 19 | 20 | class Jackal(object): 21 | localized_param_key = "is_localized" 22 | jackal_publish_path = 'uwb/odom' 23 | num_known_anchor_tolerance = 3 24 | num_datapoint_num_tolerance = 50 25 | 26 | def get_tags(self, tags_file="tag_ids.json"): 27 | """ 28 | Returns the recorded associated robots with each anchor id and tag id for each of the UWB sensors. 29 | @param tags_file: the json file to read to load the data 30 | @return: Returns the base json file as a dict, the inverted tag dict that has the tag id as the key and the 31 | robot as value, the inverted anchor dict that has the anchor id as the key and the robot as value 32 | """ 33 | rospack = rospkg.RosPack() 34 | package_location = rospack.get_path('uwb_localization') 35 | 36 | tags_file = os.path.join(package_location, 'src', tags_file) 37 | 38 | with open(tags_file, 'r') as f: 39 | tag_data = json.load(f) 40 | 41 | tag_to_robot = dict() 42 | anchor_to_robot = dict() 43 | 44 | for key, values in tag_data.items(): 45 | 46 | if values['right_tag'] not in tag_to_robot or tag_to_robot[values['right_tag']] == '/': 47 | tag_to_robot[values['right_tag']] = key 48 | 49 | if values['left_tag'] not in tag_to_robot or tag_to_robot[values['left_tag']] == '/': 50 | tag_to_robot[values['left_tag']] = key 51 | 52 | if values['anchor'] not in anchor_to_robot or anchor_to_robot[values['anchor']] == '/': 53 | anchor_to_robot[values['anchor']] = key 54 | 55 | return tag_data, tag_to_robot, anchor_to_robot 56 | 57 | def __init__(self, timeout_duration=5, auto_position_checker_dt=-1): 58 | """ 59 | The constructor for the Jackal robot pose estimator 60 | @param timeout_duration: During the initial pose estimation if there is not enough data to estimate the pose, 61 | the timeout time before the robot only uses relative positioning 62 | @param auto_position_checker_dt: In the case that there is a huge gap between the actual and current position 63 | estimation, this parameter informs you when to rerun the initial position estimation code with all the current 64 | UWB and odometery data, in order to reset the initial estimation. 65 | """ 66 | p = [1.0001, 11.0, 14.0001, 20.9001, 1.0001, 0.0001, 0.0001, 3.9001, 4.9001, 1.0, 0, 0.0001, 0.0001, 0.0001, 67 | 2.0001, 0.0001, 0.0001] 68 | 69 | self.ns = rospy.get_namespace() 70 | 71 | if self.ns == '/': 72 | self.ns = "/Jackal1/" 73 | self.ranging_data = [] 74 | self.odometry_data = None 75 | self.odom_times = None 76 | self.right_tag, self.left_tag, self.anchor = get_tag_ids(self.ns) 77 | 78 | self.start_time = None 79 | self.delta_t = rospy.Duration(timeout_duration) 80 | self.time_override = False 81 | 82 | self.auto_position_checker_dt = auto_position_checker_dt 83 | self.checker_start_time = None 84 | 85 | self.x_initial = 0 86 | self.y_initial = 0 87 | self.theta_initial = 0 88 | self.trilateration_error = 0 89 | 90 | _, self.tag_to_robot, self.anchor_to_robot = self.get_tags() 91 | 92 | self.other_odometry = dict() 93 | 94 | print("Namespace:", self.ns) 95 | 96 | self.is_localized = False 97 | rospy.set_param(Jackal.localized_param_key, self.is_localized) 98 | 99 | self.loc = UKFUWBLocalization(p[0], p[1:7], accel_std=p[7], yaw_accel_std=p[8], alpha=p[9], beta=p[10], 100 | namespace=self.ns, right_tag=self.right_tag, left_tag=self.left_tag) 101 | 102 | self.d = np.linalg.norm(self.loc.tag_offset[self.right_tag] - self.loc.tag_offset[self.left_tag]) 103 | 104 | print(self.d) 105 | 106 | rospy.set_param("left_id", self.left_tag) 107 | rospy.set_param("right_id", self.right_tag) 108 | rospy.set_param("anchor", self.anchor) 109 | 110 | anchors = '/gtec/toa/anchors' 111 | toa_ranging = '/gtec/toa/ranging' 112 | 113 | if self.ns is not None: 114 | odometry = self.ns + 'odometry/filtered' 115 | else: 116 | odometry = '/odometry/filtered' 117 | 118 | self.anchor_poses = dict() 119 | 120 | ranging_sub = rospy.Subscriber(toa_ranging, Ranging, callback=self.add_ranging) 121 | 122 | anchors_sub = rospy.Subscriber(anchors, MarkerArray, callback=self.add_anchors) 123 | 124 | odometry_sub = rospy.Subscriber(odometry, Odometry, callback=self.add_odometry) 125 | 126 | self.motion = JackalMotion(self.ns) 127 | 128 | def create_other_robot_odometry_recorder(self, robot_name): 129 | """ 130 | This creates and setups up an odometry listener for a robot using the odometry/filtered topic 131 | @param robot_name: The robot whose odometry needs to be listened to in the format i.e "robot/" 132 | """ 133 | 134 | def add_odometry(msg): 135 | # type: (Odometry) -> None 136 | """ 137 | Function that handles odometry data 138 | @param msg: The Odometry topic message 139 | """ 140 | 141 | t = get_time() 142 | 143 | px = msg.pose.pose.position.x 144 | py = msg.pose.pose.position.y 145 | pz = msg.pose.pose.position.z 146 | 147 | v = msg.twist.twist.linear.x 148 | theta = euler_from_quaternion(( 149 | msg.pose.pose.orientation.x, 150 | msg.pose.pose.orientation.y, 151 | msg.pose.pose.orientation.z, 152 | msg.pose.pose.orientation.w 153 | ))[2] 154 | 155 | theta_yaw = msg.twist.twist.angular.z 156 | 157 | if self.other_odometry[robot_name]['data'] is None: 158 | self.odometry_data[robot_name]['data'] = np.array((px, py, pz, v, theta, theta_yaw)) 159 | else: 160 | self.odometry_data[robot_name]['data'] = np.vstack( 161 | (self.odometry_data[robot_name]['data'], (px, py, pz, v, theta, theta_yaw))) 162 | if self.odometry_data[robot_name]['time'] is None: 163 | self.odometry_data[robot_name]['time'] = np.array([t], dtype=np.uint64) 164 | else: 165 | self.odometry_data[robot_name]['time'] = np.append(self.odometry_data[robot_name]['time'], t) 166 | 167 | self.odometry_data[robot_name]['data'] = None 168 | self.odometry_data[robot_name]['time'] = None 169 | 170 | self.other_odometry[robot_name]['sub'] = rospy.Subscriber(robot_name + "odometry/filtered", Odometry, 171 | callback=add_odometry) 172 | 173 | def add_anchors(self, msg): 174 | # type: (MarkerArray) -> None 175 | """ 176 | Function that handles the MarkerArray of anchor positions and updates the anchor pose dict 177 | @param msg: the MarkerArray topic message 178 | """ 179 | 180 | for marker in msg.markers: 181 | if marker.id not in self.anchor_to_robot: 182 | self.anchor_poses[marker.id] = np.array( 183 | [marker.pose.position.x, marker.pose.position.y, marker.pose.position.z]) 184 | 185 | def add_ranging(self, msg): 186 | # type: (Ranging) -> None 187 | """ 188 | Function that handles UWB range data and makes sure that only the data of localized robots gets added to the 189 | list of valid sensor data to use 190 | @param msg: The Ranging topic message 191 | """ 192 | t = get_time() 193 | 194 | if self.tag_to_robot[msg.tagId] == self.ns: 195 | is_mobile = msg.anchorId in self.anchor_to_robot 196 | 197 | if is_mobile: 198 | robot = self.anchor_to_robot[msg.anchorId] 199 | 200 | localized = self.check_if_localized(robot) 201 | 202 | if localized: 203 | pose = self.get_current_robot_pose(robot) 204 | else: 205 | pose = None 206 | else: 207 | localized = True 208 | 209 | if msg.anchorId not in self.anchor_poses: 210 | return 211 | 212 | pose = self.anchor_poses[msg.anchorId] 213 | 214 | self.ranging_data.append( 215 | { 216 | "time": t, 217 | "anchorID": msg.anchorId, 218 | "tagID": msg.tagId, 219 | "range": msg.range / 1000., 220 | 'localized': localized, 221 | 'pose': pose 222 | } 223 | ) 224 | 225 | def add_odometry(self, msg): 226 | # type: (Odometry) -> None 227 | """ 228 | Function that handles odometry data and adds it to the data list, also using the estimated initial x,y,theta 229 | translates the data 230 | @param msg: The Odometry topic message 231 | """ 232 | 233 | t = get_time() 234 | 235 | px = msg.pose.pose.position.x + self.x_initial 236 | py = msg.pose.pose.position.y + self.y_initial 237 | pz = msg.pose.pose.position.z 238 | 239 | v = msg.twist.twist.linear.x 240 | theta = euler_from_quaternion(( 241 | msg.pose.pose.orientation.x, 242 | msg.pose.pose.orientation.y, 243 | msg.pose.pose.orientation.z, 244 | msg.pose.pose.orientation.w 245 | ))[2] 246 | 247 | theta_yaw = msg.twist.twist.angular.z + self.theta_initial 248 | 249 | # print(t, px, py, pz, v, theta, theta_yaw) 250 | # print(self.odometry_data.dtype) 251 | # print(len(self.odom_times), self.odom_times.dtype) 252 | 253 | if self.odometry_data is None: 254 | self.odometry_data = np.array((px, py, pz, v, theta, theta_yaw)) 255 | else: 256 | self.odometry_data = np.vstack((self.odometry_data, (px, py, pz, v, theta, theta_yaw))) 257 | if self.odom_times is None: 258 | self.odom_times = np.array([t], dtype=np.uint64) 259 | else: 260 | self.odom_times = np.append(self.odom_times, t) 261 | 262 | def get_current_robot_pose(self, robot_name): 263 | """ 264 | Gets the estimated current position of a positioned robot using uwb/odom topic 265 | @param robot_name: The robot's to extract the estimated position. If the robot does not exists then this will 266 | wait for the message indefinitely 267 | @return: The robots estimated x, y, z 268 | """ 269 | path_name = robot_name + Jackal.jackal_publish_path 270 | 271 | msg = rospy.wait_for_message(path_name, Odometry) 272 | 273 | return np.array([msg.pose.pose.position.x, msg.pose.pose.position.y, msg.pose.pose.position.z]) 274 | 275 | def explore_recorded_data(self): 276 | """ 277 | Separates the ododmetry and range data as well as the associated robot into a dict of localized and unlocalized 278 | nodes 279 | @return: A dict of the localize and unlocalized data to use 280 | """ 281 | data = { 282 | "localized": { 283 | "data": [], 284 | 'robots': [] 285 | }, 286 | "unlocalized": { 287 | 'data': [], 288 | "robots": [] 289 | } 290 | } 291 | 292 | for range_data in self.ranging_data: 293 | if range_data['localized']: 294 | container = data['localized'] 295 | else: 296 | container = data['unlocalized'] 297 | 298 | container['data'].append(range_data) 299 | container['robots'].append(range_data['anchorID']) 300 | 301 | return data 302 | 303 | def check_if_localized(self, robot_name): 304 | """ 305 | Checks, using a robot's rosparam key, if the robot is localized or not 306 | @param robot_name: The name of the robot to check if it has been localized or not 307 | @return: If it has been localized returns True, false otherwise 308 | """ 309 | parameter_name = robot_name + Jackal.localized_param_key 310 | 311 | return rospy.has_param(parameter_name) and rospy.get_param(parameter_name) 312 | 313 | def stop(self): 314 | """ 315 | Do not use 316 | Sends a stop signal to the Jackal 317 | """ 318 | self.motion.v = 0 319 | 320 | def spread_message(self, robots): 321 | """ 322 | Do not use as it not implemented 323 | Sends a wave of messages asking the neighboring robots to send its localization information such as its 324 | UWB and odometry data 325 | @param robots: The robots to ask the data from 326 | """ 327 | for robot in robots: 328 | # rospy.Publisher() 329 | pass 330 | 331 | def step(self): 332 | """ 333 | This is the most important function as it handles the localization of the Jackal at each timestep. 334 | The process starts as the robot unlocalized and then once there is enough information for it to trilaterate it 335 | will, thus transforming its odometry data from a local reference field to a global field. Once that is done it 336 | toggles its tag and defines itself as being localized and uses an UKF to continue refining the measurement noise. 337 | """ 338 | 339 | if self.is_localized or self.time_override: 340 | self.loc.step() 341 | self.start_time = None 342 | 343 | if self.time_override: 344 | recoreded_data = self.explore_recorded_data() 345 | 346 | total_knowns = len(set(recoreded_data['localized']['robots'])) 347 | # print(total_knowns) 348 | 349 | total_unique_poses = 0 350 | 351 | if len(recoreded_data['localized']['data']) > 0: 352 | total_unique_poses = \ 353 | np.unique(np.array([np.round(d['pose'], 1) for d in recoreded_data['localized']['data']]), 354 | axis=0).shape[0] 355 | 356 | if total_knowns >= Jackal.num_known_anchor_tolerance or ( 357 | total_unique_poses - total_knowns) * 2 >= Jackal.num_known_anchor_tolerance: 358 | total_data_points = len(recoreded_data['localized']['data']) 359 | 360 | if total_data_points > Jackal.num_datapoint_num_tolerance: 361 | self.time_override = False 362 | elif self.auto_position_checker_dt > 0: 363 | t = rospy.get_rostime().secs 364 | 365 | # print(t, self.checker_start_time, self.auto_position_checker_dt ) 366 | 367 | if self.checker_start_time is None: 368 | self.checker_start_time = t 369 | elif (t - self.checker_start_time) > self.auto_position_checker_dt: 370 | self.checker_start_time = t 371 | 372 | data = self.explore_recorded_data()['localized']['data'] 373 | 374 | sub = np.array((self.x_initial, self.y_initial, 0, 0, self.theta_initial, 0)) 375 | 376 | print("Caculating") 377 | self.odometry_data -= sub 378 | result, cost = self.trilaterate_position(data, (self.x_initial, self.y_initial, self.theta_initial)) 379 | self.odometry_data += sub 380 | 381 | print("Current", np.array((self.x_initial, self.y_initial, self.theta_initial)), "Current Error", 382 | self.trilateration_error, "Result", result, "Error", cost) 383 | 384 | if cost < self.trilateration_error: 385 | self.odometry_data -= np.array((self.x_initial, self.y_initial, 0, 0, self.theta_initial, 0)) 386 | 387 | self.trilateration_error = cost 388 | 389 | self.x_initial = result[0] 390 | self.y_initial = result[1] 391 | self.theta_initial = result[4] 392 | 393 | self.odometry_data += result 394 | 395 | latest_pose = self.odometry_data[-1] 396 | 397 | self.setup_ukf(latest_pose) 398 | 399 | 400 | else: 401 | if self.start_time is None: 402 | self.start_time = rospy.Time.now() 403 | else: 404 | self.time_override = (rospy.Time.now() - self.start_time) > self.delta_t 405 | 406 | if self.time_override: 407 | print("Time overriden") 408 | self.setup_ukf(np.zeros(6)) 409 | 410 | recoreded_data = self.explore_recorded_data() 411 | 412 | total_knowns = len(set(recoreded_data['localized']['robots'])) 413 | 414 | total_unique_poses = 0 415 | 416 | if len(recoreded_data['localized']['data']) > 0: 417 | total_unique_poses = np.unique( 418 | np.array([np.round(d['pose'], 1) for d in recoreded_data['localized']['data']]), axis=0) 419 | # print(total_unique_poses.shape[0]) 420 | total_unique_poses = total_unique_poses.shape[0] 421 | 422 | # print(total_knowns, total_unique_poses) 423 | 424 | if total_knowns >= Jackal.num_known_anchor_tolerance or ( 425 | total_unique_poses - total_knowns) * 2 >= Jackal.num_known_anchor_tolerance: 426 | total_data_points = len(recoreded_data['localized']['data']) 427 | # print(total_data_points) 428 | 429 | if total_data_points > Jackal.num_datapoint_num_tolerance: 430 | pose, self.trilateration_error = self.trilaterate_position(recoreded_data['localized']['data']) 431 | 432 | self.x_initial = pose[0] 433 | self.y_initial = pose[1] 434 | self.theta_initial = pose[4] 435 | 436 | self.is_localized = True 437 | 438 | self.odometry_data += pose 439 | 440 | latest_pose = self.odometry_data[-1] 441 | 442 | self.setup_ukf(latest_pose) 443 | 444 | print(pose) 445 | print("Robot has been localized now moving to using robot UKF to localize") 446 | else: 447 | self.spread_message(set(recoreded_data['unlocalized']['robots'])) 448 | # Since unable to localize in world reference frame continue to use odometery for only relative motion 449 | pass 450 | 451 | # self.motion.step() 452 | rospy.set_param(self.ns + Jackal.localized_param_key, self.is_localized) 453 | 454 | def setup_ukf(self, initial_x): 455 | """ 456 | Setups the initial UFK parameters such as the initial position, heading and covariance matrix 457 | @param initial_x: The initial state variable 458 | """ 459 | self.loc.set_initial(self.x_initial, self.y_initial, self.theta_initial) 460 | self.loc.clear_data() 461 | self.loc.intialize(initial_x, np.identity(6)) 462 | 463 | def find_closest_odometry(self, range_data): 464 | """ 465 | Goes through each UWB range measurement and then associates it with the closest odometry data through 466 | interpolation 467 | @param range_data: The UWB range measurements 468 | @return: The list of the interpolated odometry measurements 469 | """ 470 | closest = np.zeros((len(range_data), 4)) 471 | 472 | for i, datapoint in enumerate(range_data): 473 | t = datapoint['time'] 474 | 475 | start, end = self.find_closest_sorted(t) 476 | interpol = self.odom_interpolation(start, end, t) 477 | 478 | # print start, end, t, self.odom_times[start], self.odom_times[end], interpol[[1, 2, 3, 5]] 479 | 480 | closest[i] = interpol[[0, 1, 2, 4]] 481 | # print start, end, interpol 482 | # print closest 483 | 484 | return closest 485 | 486 | def odom_interpolation(self, start, end, t): 487 | """ 488 | Interpolates between the odometry measurements at index start and end for time t, for the position, velcoity 489 | and yaw rate estimates normal linear interpolation is used; however, for the yaw since it is bound by -180 and 490 | 180 angular interpolation is used 491 | @param start: The start index of the odometry for the interpolation 492 | @param end: The end index of the odometry for the interpolation 493 | @param t: The current time that you want to interpolate to 494 | @return: The interpolated odometry measurement 495 | """ 496 | odom_initial = self.odometry_data[start] 497 | odom_final = self.odometry_data[end] 498 | 499 | dt = (self.odom_times[end] - self.odom_times[start]) 500 | 501 | if dt != 0: 502 | percent = (t - self.odom_times[start]) / dt 503 | else: 504 | return odom_initial 505 | 506 | # print start, end, self.odom_times[end], self.odom_times[start],percent 507 | 508 | blend = (1 - percent) * odom_initial + percent * odom_final 509 | 510 | angle_start = odom_initial[4] % (2 * np.pi) 511 | angle_end = odom_final[4] % (2 * np.pi) 512 | 513 | rotation = ((angle_start - angle_end) + np.pi) % (2 * np.pi) - np.pi 514 | blend[4] = (odom_initial[4] + rotation * percent) % (2 * np.pi) 515 | 516 | return blend 517 | 518 | def find_closest_sorted(self, t): 519 | """ 520 | Finds the closest odometry datapoint given the time t in order to help interpolate 521 | @param t: The time to find the closest value 522 | @return: The odom index before time t, the odom index after time t 523 | """ 524 | # print times.shape 525 | 526 | idx = np.searchsorted(self.odom_times, t, side='left') 527 | 528 | if idx != 0 and (idx >= len(self.odom_times) - 1 or self.odom_times[idx] > t): 529 | if idx == len(self.odom_times): 530 | idx -= 1 531 | 532 | return idx - 1, idx 533 | return idx, idx + 1 534 | 535 | def trilaterate_position(self, range_data, initial_pose=(0, 0, 0)): 536 | """ 537 | The function that handles the trilateration function for the initial position and heading 538 | @param range_data: The UWB range data 539 | @param initial_pose: The initial position estimate to use for the Non linnear least sqaures 540 | @return: The estimated initial position estimate of the robot 541 | """ 542 | if len(self.odometry_data) > len(range_data): 543 | odometry = self.find_closest_odometry(range_data) 544 | else: 545 | odometry = np.zeros((len(range_data), 4)) 546 | 547 | # import json 548 | 549 | # np.save("/home/marius/catkin_ws/src/uwb_localization/odometry", odometry) 550 | 551 | # print(odometry) 552 | # print(self.odom_times) 553 | # print(self.odometry_data) 554 | 555 | # new_d = range_data 556 | 557 | # for i in range(len(new_d)): 558 | # new_d[i]['pose'] = new_d[i]['pose'].tolist() 559 | 560 | # json.dump( new_d, open("/home/marius/catkin_ws/src/uwb_localization/range_data.json", 'w'), ) 561 | # print("range_data.json") 562 | 563 | res = least_squares(self.trilateration_function, initial_pose, args=(range_data, odometry)) 564 | 565 | if res.cost > 50: 566 | local_mininum, error = self.check_for_local_mininum(res, range_data, odometry) 567 | else: 568 | local_mininum = res.x 569 | error = self.rmse(res.fun) 570 | 571 | return np.array([local_mininum[0], local_mininum[1], 0, 0, local_mininum[2], 0]), error 572 | 573 | def rmse(self, residuals): 574 | """ 575 | Function that calcualtes the RMSE of residuals 576 | @param residuals: The residuals to calcualte the RMSE from 577 | @return: the final RMSE value 578 | """ 579 | return np.sqrt(np.mean((residuals) ** 2)) 580 | 581 | def check_for_local_mininum(self, current_min_sol, range_data, odometry): 582 | """ 583 | In the case that the Non linear least sqaured runs into a local minimum, this function reruns the caclulation 584 | but with a set of much different initial position estimates and only keeps the one with the lowest error 585 | @param current_min_sol: The current solution to the non linear least sqaures problem 586 | @param range_data: The UWB data 587 | @param odometry: The odometry range data 588 | @return: The new best estimate, the error of the state 589 | """ 590 | x, y, z = current_min_sol.x 591 | 592 | scores = [[current_min_sol.cost, current_min_sol.x, self.rmse(current_min_sol.fun)]] 593 | 594 | for x_scale in [2, -2]: 595 | for y_scale in [2, -2]: 596 | res = least_squares(self.trilateration_function, [x * x_scale, y * y_scale, z], 597 | args=(range_data, odometry)) 598 | scores.append([res.cost, res.x, self.rmse(res.fun)]) 599 | 600 | min_result = min(scores, key=lambda key: key[0]) 601 | 602 | return min_result[1], min_result[2] 603 | 604 | def trilateration_function(self, input_x, distances, odometry_data): 605 | """ 606 | Calculates the residuals for the current input, distances and odometry inorder to calculates the Non Linear 607 | Least Squares 608 | @param input_x: The input of the function, initial x, y, and theta parameters 609 | @param distances: The UWB range measurements 610 | @param odometry_data: The associated odometry to the range measurements 611 | @return: An array for the residuals calculated as the difference between the range measurements and the range 612 | from the offset odometry data 613 | """ 614 | # x[0] = x_start 615 | # x[1] = y_start 616 | # x[2] = theta 617 | 618 | _, _, theta = input_x 619 | 620 | xy = input_x[:2] 621 | 622 | residuals = [ 623 | # (x1 - x2) ** 2 + (y1 - y2) ** 2 - self.d ** 2, 624 | ] 625 | 626 | for i, distance in enumerate(distances): 627 | anchor = distance['pose'] 628 | tagID = distance['tagID'] 629 | distance = distance['range'] 630 | 631 | odometry = odometry_data[i] 632 | # 0 = x, 1 = y, 2 = z, 3 = theta 633 | xy_odom = odometry[:2] 634 | theta_odom = odometry[3] 635 | 636 | z = self.loc.tag_offset[tagID][2] + odometry[2] 637 | 638 | xy_tag = self.loc.tag_offset[tagID][:2] 639 | 640 | xy_world = self.rotate(xy_odom, theta) + xy 641 | xy_tag = self.rotate(xy_tag, theta + theta_odom) + xy_world 642 | 643 | x, y = xy_tag 644 | 645 | residuals.append((x - anchor[0]) ** 2 + (y - anchor[1]) ** 2 + (z - anchor[2]) ** 2 - distance ** 2) 646 | 647 | return residuals 648 | 649 | def rotate(self, xy, rot_angle): 650 | """ 651 | Rotates a 2D point by a certain angle 652 | @param xy: The 2D point 653 | @param rot_angle: the angle to rotate by 654 | @return: The rotated 2D point 655 | """ 656 | c = np.cos(rot_angle) 657 | s = np.sin(rot_angle) 658 | 659 | result = np.matmul(np.array([[c, -s], 660 | [s, c]]), xy) 661 | 662 | return result 663 | 664 | 665 | if __name__ == "__main__": 666 | rospy.init_node("full_jackal", anonymous=True) 667 | 668 | jackal = Jackal() 669 | 670 | rate = rospy.Rate(60) 671 | 672 | while not rospy.is_shutdown(): 673 | jackal.step() 674 | 675 | rate.sleep() 676 | 677 | print("End") 678 | -------------------------------------------------------------------------------- /src/jackal_motion.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # coding=utf-8 3 | 4 | import rospy 5 | from geometry_msgs.msg import Twist 6 | 7 | 8 | class JackalMotion(object): 9 | """ 10 | Class meant to publish to the Jackal motion commands 11 | """ 12 | 13 | def __init__(self, namespace): 14 | """ 15 | Setups publisher helper. Publishes to jackal_velocity_controller/cmd_vel 16 | @param namespace: the robot you want to publish to .i.e Jackal1/ 17 | """ 18 | self.v = 0 19 | self.w = 0 20 | 21 | self.cmd = Twist() 22 | 23 | self.vel_pub = rospy.Publisher(namespace + 'jackal_velocity_controller/cmd_vel', Twist, queue_size=1) 24 | 25 | def step(self): 26 | """ 27 | Step function that publishes the velocity commands 28 | """ 29 | self.cmd.linear.x = self.v 30 | self.cmd.angular.z = self.w 31 | 32 | self.vel_pub.publish(self.cmd) 33 | -------------------------------------------------------------------------------- /src/live_plotter.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | from __future__ import print_function 3 | 4 | import matplotlib.pyplot as plt 5 | from matplotlib.animation import FuncAnimation 6 | 7 | 8 | class LivePlotter(object): 9 | """ 10 | A helper class for creating live plotting matplotlib windows 11 | """ 12 | 13 | def __init__(self, update_interval=1000, alpha=1.0, window_name=None, time_removal=None, lengend_loc='best', 14 | font_size=None): 15 | """ 16 | Setups the live plotting window 17 | @param update_interval: How fast the animated window updates its values 18 | @param alpha: The alpha value of lines added if there are many to make things a little easier to view 19 | @param window_name: The matplotlib window name 20 | @param time_removal: How long before you want to remove the legend, None to keep it indefinitely 21 | @param lengend_loc: The location of the legend as defined by matplotlib parameter choices 22 | @param font_size: The font size of the legend as defined by matplotlib parameter choices 23 | """ 24 | self.fig = plt.figure() 25 | self.ax = self.fig.add_subplot(1, 1, 1) 26 | self.fig.tight_layout() 27 | if window_name is not None: 28 | self.fig.canvas.set_window_title(window_name) 29 | 30 | self.update_interval = update_interval 31 | 32 | self.alpha = alpha 33 | self.data = dict() 34 | self.objects = dict() 35 | self.ani = None 36 | self.time_removal = time_removal 37 | self.legend_loc = lengend_loc 38 | self.font_size = font_size 39 | 40 | def add_data_point(self, object_name, x, y): 41 | """ 42 | Adds data to the live plotting window 43 | @param object_name: The line object to add the data to. If it does not exist a new object will be created 44 | otherwise the data will just be appended to 45 | @param x: the x data to add 46 | @param y: the y data to add 47 | """ 48 | if object_name not in self.data: 49 | self.data[object_name] = { 50 | "x": [], 51 | "y": [] 52 | } 53 | 54 | line, = self.ax.plot([], [], label=object_name, alpha=self.alpha) 55 | 56 | self.objects[object_name] = line 57 | 58 | self.data[object_name]["x"].append(x) 59 | self.data[object_name]["y"].append(y) 60 | 61 | self.objects[object_name].set_xdata(self.data[object_name]['x']) 62 | self.objects[object_name].set_ydata(self.data[object_name]['y']) 63 | 64 | def func_animate(self, i): 65 | """ 66 | The animation function called at each interval step 67 | @param i: the current index of the animation 68 | @return: the objects to draw 69 | """ 70 | try: 71 | if self.time_removal is None or i < self.time_removal: 72 | # if self.time_removal is not None: 73 | # print(i) 74 | 75 | self.ax.legend(loc=self.legend_loc, fontsize=self.font_size) 76 | else: 77 | self.ax.legend_ = None 78 | self.ax.relim() 79 | self.ax.autoscale_view() 80 | except ValueError: 81 | print("Error graphing") 82 | 83 | return self.objects.values() 84 | 85 | def show(self): 86 | """ 87 | Shows and starts the live plotter func loop 88 | """ 89 | self.ani = FuncAnimation(self.fig, self.func_animate, interval=self.update_interval) 90 | 91 | plt.show() 92 | 93 | 94 | if __name__ == '__main__': 95 | plotter = LivePlotter() 96 | plotter.show() 97 | -------------------------------------------------------------------------------- /src/location_drawer.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # coding=utf-8 3 | 4 | import matplotlib 5 | 6 | matplotlib.use('Qt5Agg') 7 | 8 | import rospy 9 | from nav_msgs.msg import Odometry 10 | from live_plotter import LivePlotter 11 | from geometry_msgs.msg import Point 12 | import sys 13 | 14 | 15 | class PositionPlotter(object): 16 | """ 17 | A class to help plot the position of Odometry data 18 | """ 19 | 20 | def __init__(self, robot_name='jackal', position_links=None): 21 | """ 22 | Setups the PositionPLotter class 23 | @param robot_name: the name of the robot 24 | @param position_links: the Subscriber topics to plot in the window. If it is None then the topics are searched 25 | all topics ending with "uwb/odom" (UKF position estimate), "odometry/filtered" ( odometry positioning ), 26 | "ground_truth/state" ( ground truth state ) 27 | """ 28 | if position_links is None: 29 | position_links = [] 30 | 31 | topic_names = rospy.get_published_topics() 32 | 33 | for (topic, _) in topic_names: 34 | if topic.endswith('uwb/odom') or topic.endswith('odometry/filtered') or topic.endswith( 35 | 'ground_truth/state'): 36 | position_links.append(topic) 37 | 38 | print(position_links) 39 | 40 | # self.live_plotter = LivePlotter(alpha=0.5, window_name="Location Drawer", time_removal=750, font_size='x-small') 41 | self.live_plotter = LivePlotter(alpha=0.5, window_name="Location Drawer") 42 | self.live_plotter.ax.set_aspect("equal") 43 | 44 | self.robot_name = robot_name 45 | 46 | self.robot_position_topic = '/data_drawer/robot_pose' 47 | 48 | self.robot_position_sub = rospy.Subscriber(self.robot_position_topic, Point, self.add_robot_pose) 49 | 50 | self.subscribers = dict() 51 | 52 | for position_link in position_links: 53 | self.subscribers[position_link] = rospy.Subscriber(position_link, Odometry, 54 | self.create_position_subscriber_func(position_link)) 55 | 56 | def create_position_subscriber_func(self, name): 57 | """ 58 | Helper factory function to create an odometry and live plotter data adder 59 | @param name: Name of the topic name 60 | @return: return the custom pose addition software 61 | """ 62 | 63 | def add_pose(msg): 64 | # type: (Odometry) -> None 65 | """ 66 | Extracts the position x, y values from the Odometry msg and adds it to the live plotter 67 | @param msg: the Odometry pose to process 68 | """ 69 | 70 | x = msg.pose.pose.position.x 71 | y = msg.pose.pose.position.y 72 | 73 | self.live_plotter.add_data_point(name, x, y) 74 | 75 | return add_pose 76 | 77 | def add_robot_pose(self, msg): 78 | """ 79 | Deprecated 80 | Added robot position 81 | @param msg: the Odometry pose 82 | """ 83 | self.live_plotter.add_data_point(self.robot_name, msg.x, msg.y) 84 | 85 | def run(self): 86 | """ 87 | Run the plotter 88 | """ 89 | self.live_plotter.show() 90 | 91 | 92 | if __name__ == "__main__": 93 | rospy.init_node("location_drawer_node") 94 | 95 | myargv = rospy.myargv(argv=sys.argv)[1:] 96 | 97 | if len(myargv) == 0: 98 | myargv = None 99 | 100 | data_plotter = PositionPlotter(position_links=myargv) 101 | data_plotter.run() 102 | rospy.spin() 103 | -------------------------------------------------------------------------------- /src/range_drawer.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # coding=utf-8 3 | 4 | import matplotlib 5 | 6 | matplotlib.use('Qt5Agg') 7 | 8 | import rospy 9 | from gtec_msgs.msg import Ranging 10 | from live_plotter import LivePlotter 11 | import sys 12 | 13 | 14 | class RangePlotter(object): 15 | """ 16 | A class to help plot the UWB range data 17 | """ 18 | 19 | def __init__(self, tags=None, n=-1): 20 | """ 21 | Setups the RangePLotter class 22 | @param tags: The tags to plot in the range plotter. If it is None, plot everything 23 | @param n: The n latest range measurements 24 | """ 25 | toa_ranging = '/gtec/toa/ranging' 26 | 27 | # self.live_plotter = LivePlotter(window_name="Range Drawer", lengend_loc="upper left", font_size='xx-small') 28 | self.live_plotter = LivePlotter(window_name="Range Drawer") 29 | 30 | self.n = n 31 | 32 | if tags is None: 33 | self.tags = None 34 | else: 35 | self.tags = set(map(int, tags)) 36 | 37 | ranging_sub = rospy.Subscriber(toa_ranging, Ranging, callback=self.add_ranging) 38 | 39 | def add_ranging(self, msg): 40 | # type: (Ranging) -> None 41 | """ 42 | Adds the range data if it part of the accepted tag ids to the live plotter 43 | @param msg: the Ranging msg 44 | """ 45 | 46 | anchor_id = msg.anchorId 47 | tag_id = msg.tagId 48 | 49 | if self.tags is None or tag_id in self.tags: 50 | label = "tag{}_anchor{}".format(tag_id, anchor_id) 51 | 52 | time = rospy.Time.now().to_sec() 53 | 54 | self.live_plotter.add_data_point(label, time, msg.range) 55 | 56 | def run(self): 57 | """ 58 | Running the live plotter 59 | """ 60 | self.live_plotter.show() 61 | 62 | 63 | if __name__ == "__main__": 64 | rospy.init_node("data_drawer_node") 65 | 66 | myargv = rospy.myargv(argv=sys.argv)[1:] 67 | 68 | print(myargv) 69 | 70 | if len(myargv) == 0: 71 | myargv = None 72 | 73 | data_plotter = RangePlotter(myargv) 74 | data_plotter.run() 75 | -------------------------------------------------------------------------------- /src/rsme_plotter.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # coding=utf-8 3 | 4 | import matplotlib 5 | 6 | matplotlib.use('Qt5Agg') 7 | 8 | import rospy 9 | from nav_msgs.msg import Odometry 10 | from live_plotter import LivePlotter 11 | import sys 12 | from ukf_uwb_localization import get_time 13 | 14 | 15 | class RSMEPlotter(object): 16 | """ 17 | Do not use as it is not finished. 18 | Class is meant to plot the RSME of the robot estimate position in live 19 | """ 20 | 21 | def __init__(self, target, actual): 22 | """ 23 | Setups the RMSE live plotter 24 | @param target: the target Odometry topic 25 | @param actual: the actual Odometry topic 26 | """ 27 | self.live_plotter = LivePlotter(alpha=0.5, window_name="RMSE Drawer") 28 | # self.live_plotter.ax.set_aspect("equal") 29 | 30 | self.subscribers = { 31 | 'actual': [], 32 | 'target': [] 33 | } 34 | 35 | self.actual_position_sub = rospy.Subscriber(actual, Odometry, self.add_actual) 36 | self.target_position_sub = rospy.Subscriber(target, Odometry, self.add_target) 37 | 38 | def get_odometry(self, msg): 39 | """ 40 | Process Odometery topic data 41 | @param msg: the Odometry topic message 42 | @return: A list with the x, y, z, v, theta, theta yaw 43 | """ 44 | px = msg.pose.pose.position.x 45 | py = msg.pose.pose.position.y 46 | pz = msg.pose.pose.position.z 47 | 48 | v = msg.twist.twist.linear.x 49 | theta = euler_from_quaternion(( 50 | msg.pose.pose.orientation.x, 51 | msg.pose.pose.orientation.y, 52 | msg.pose.pose.orientation.z, 53 | msg.pose.pose.orientation.w 54 | ))[2] 55 | 56 | theta_yaw = msg.twist.twist.angular.z 57 | 58 | return [px, py, pz, v, theta, theta_yaw] 59 | 60 | def add_actual(self, msg): 61 | """ 62 | Add the expected position data 63 | @param msg: the Odometry topic data 64 | """ 65 | t = get_time() 66 | 67 | data = self.get_odometry(msg) 68 | 69 | o = [t] 70 | o.extend(data) 71 | 72 | self.subscribers['actual'].append(o) 73 | 74 | def add_target(self, msg): 75 | """ 76 | Add the ground truth odometry data 77 | @param msg: the Odometry topic data 78 | """ 79 | t = get_time() 80 | 81 | data = self.get_odometry(msg) 82 | 83 | o = [t] 84 | o.extend(data) 85 | 86 | self.subscribers['target'].append(o) 87 | 88 | def run(self): 89 | """ 90 | Started live plotter 91 | """ 92 | self.live_plotter.show() 93 | 94 | 95 | if __name__ == "__main__": 96 | rospy.init_node("rsme_drawer_node") 97 | 98 | myargv = rospy.myargv(argv=sys.argv)[1:] 99 | 100 | if len(myargv) != 2: 101 | raise AttributeError("RSME need two arguments: actual target") 102 | 103 | data_plotter = RSMEPlotter(myargv[0], myargv[1]) 104 | data_plotter.run() 105 | rospy.spin() 106 | -------------------------------------------------------------------------------- /src/tag_ids.json: -------------------------------------------------------------------------------- 1 | { 2 | "/": { 3 | "right_tag": 0, 4 | "left_tag": 1, 5 | "anchor": 10 6 | }, 7 | "/Jackal1/": { 8 | "right_tag": 0, 9 | "left_tag": 1, 10 | "anchor": 10 11 | }, 12 | "/Jackal2/": { 13 | "right_tag": 2, 14 | "left_tag": 3, 15 | "anchor": 11 16 | } 17 | } -------------------------------------------------------------------------------- /src/ukf/__init__.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | -------------------------------------------------------------------------------- /src/ukf/datapoint.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | class DataType(object): 3 | """ 4 | A Enum type class meant to represent the sensor id 5 | """ 6 | LIDAR = 1 7 | ODOMETRY = 2 8 | UWB = 3 9 | IMU = 4 10 | GROUND_TRUTH = 0 11 | 12 | 13 | class DataPoint(object): 14 | """ 15 | A class meant to represent a data point to pass into the position estimation system 16 | """ 17 | 18 | def __init__(self, data_type, measurement_data, timestamp, extra=None): 19 | """ 20 | Setups the DataPoint object 21 | @param data_type: the sensor DataType id 22 | @param measurement_data: the primary sensor data, ie.e for UWB the range measurement 23 | @param timestamp: the timestamp that the datapoint arrived 24 | @param extra: a variable for extra data to use, i.e. the anchor position for UWB sensors 25 | """ 26 | self.extra = extra 27 | self.data_type = data_type 28 | self.measurement_data = measurement_data 29 | self.timestamp = timestamp 30 | 31 | def __repr__(self): 32 | """ 33 | Generates a user readable string representation of the object 34 | @return: 35 | """ 36 | return str(self.data_type) + ":" + str(self.timestamp) + ": " + str(self.measurement_data) 37 | -------------------------------------------------------------------------------- /src/ukf/fusion_ukf.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import numpy as np 3 | 4 | from ukf.measurement_predictor import MeasurementPredictor 5 | from ukf.state_predictor import StatePredictor 6 | from ukf.state_updater import StateUpdater 7 | 8 | 9 | class FusionUKF(object): 10 | """ 11 | A class that implements an Unscented Kalman model following a CTRV motion model 12 | """ 13 | 14 | def __init__(self, sensor_std, speed_noise_std=.9, yaw_rate_noise_std=.6, alpha=1, beta=0, k=None): 15 | """ 16 | Setups the UKF constants such as the sigma point weights 17 | @param sensor_std: a dictionary with the sensor noise values. Each point in the dictorary is defined using its 18 | DataType id as its key, the number values the state contains (nz) and then a list of all the sensor noise 19 | values (std). An example is as follows 20 | 21 | >>> sensor_std = { 22 | ... DataType.UWB: { 23 | ... 'std': [1], 24 | ... 'nz': 1 25 | ... }, 26 | ... DataType.ODOMETRY: { 27 | ... 'std': [1, 1, 1, 1, 1, 1], 28 | ... 'nz': 6 29 | ... }, 30 | ... } 31 | 32 | @param speed_noise_std: the acceleration noise parameter 33 | @param yaw_rate_noise_std: the yaw rate noise parameter 34 | @param alpha: a parameter that influences the weight distribution for the sigma points. Acts more like a scale 35 | to the weights 36 | @param beta: a different parameter that influences the weight distribution for the sigma points. Acts more like 37 | a bias/shift to the weights 38 | @param k: a parameter that influences the weight distribution for the sigma points. Acts like the x values of a 39 | function. By default it is defined as 3 - augmented state vector size 40 | """ 41 | # ODOMETRY: beta=0.3 42 | # UWB: beta=0.3 43 | 44 | self.initialized = False 45 | self.timestamp = None 46 | 47 | # Number of total states X, Y, Z, velocity, yaw, yaw rate 48 | self.NX = 6 49 | 50 | # Settings values ----------------------------------- 51 | self.N_AUGMENTED = self.NX + 2 52 | 53 | if k is None: 54 | self.K = 3 - self.N_AUGMENTED 55 | else: 56 | self.K = k 57 | 58 | self.N_SIGMA = self.N_AUGMENTED * 2 + 1 59 | self.ALPHA = alpha 60 | self.LAMBDA = (self.ALPHA ** 2) * (self.NX + self.K) - self.NX 61 | self.SCALE = np.sqrt(self.LAMBDA + self.N_AUGMENTED) 62 | self.W = 0.5 / (self.LAMBDA + self.N_AUGMENTED) 63 | self.W0_M = self.LAMBDA / (self.LAMBDA + self.N_AUGMENTED) 64 | self.W0_C = self.LAMBDA / (self.LAMBDA + self.N_AUGMENTED) + (1 - alpha ** 2 + beta) 65 | 66 | self.WEIGHTS_M = np.full(self.N_SIGMA, self.W) 67 | self.WEIGHTS_M[0] = self.W0_M 68 | 69 | self.WEIGHTS_C = np.full(self.N_SIGMA, self.W) 70 | self.WEIGHTS_C[0] = self.W0_C 71 | # ----------------------------------- 72 | 73 | # Uncertainty Settings ----------------------------------- 74 | self.SPEED_NOISE_STD = speed_noise_std 75 | self.YAW_RATE_NOISE_STD = yaw_rate_noise_std 76 | 77 | self.SPEED_NOISE_VAR = self.SPEED_NOISE_STD ** 2 78 | self.YAW_RATE_NOISE_VAR = self.YAW_RATE_NOISE_STD ** 2 79 | # ----------------------------------- 80 | 81 | # Measurement Uncertainty Settings ----------------------------------- 82 | self.sensor_std = sensor_std 83 | 84 | # self.UWB_RANGE_NOISE = 0.257 # Meters 85 | # self.UWB_RANGE_NOISE = 0.15 # Meters 86 | # self.UWB_RANGE_VAR = self.UWB_RANGE_NOISE ** 2 87 | # ----------------------------------- 88 | 89 | self.x = np.zeros(self.NX) 90 | self.P = np.eye(self.NX) 91 | self.nis = 0 92 | 93 | self.state_predictor = StatePredictor(self.NX, self.N_SIGMA, self.N_AUGMENTED, self.SPEED_NOISE_VAR, 94 | self.YAW_RATE_NOISE_VAR, self.SCALE, self.WEIGHTS_M, self.WEIGHTS_C) 95 | 96 | self.measurement_predictor = MeasurementPredictor(sensor_std, self.N_SIGMA, self.WEIGHTS_M, self.WEIGHTS_C) 97 | 98 | self.state_updater = StateUpdater(self.NX, self.N_SIGMA, self.WEIGHTS_M, self.WEIGHTS_C) 99 | 100 | def initialize(self, x, initial_p, timestamp): 101 | """ 102 | Initializes the UKF's initial state and covariance vectors 103 | @param x: The initial state vector 104 | @param initial_p: the initial covariance vector 105 | @param timestamp: the initial time 106 | """ 107 | self.x[:x.size] = x 108 | self.P = initial_p 109 | self.initialized = True 110 | self.timestamp = timestamp 111 | 112 | def update(self, data): 113 | """ 114 | Process the measurement data into the Kalman Filter. If it is the first datapoint and the UKF has not 115 | initalized then the datapoint must be an odometry measurement. 116 | @param data: the measurement data in the form DataPoint. 117 | """ 118 | if self.initialized: 119 | self.process(data) 120 | else: 121 | self.initialize(data.measurement_data, np.eye(self.NX), data.timestamp) 122 | 123 | def process(self, data): 124 | """ 125 | Goes through the update and predict step of the Kalman Filter using the measurement data. 126 | @param data: the measurement data as a DataPoint 127 | """ 128 | dt = (data.timestamp - self.timestamp) / 1e9 # seconds 129 | # dt = (data.timestamp - self.timestamp) # seconds 130 | 131 | if dt < 0: 132 | print("Error went back in time") 133 | elif dt > 1: 134 | print("Dt between time periods is way too large") 135 | 136 | # STATE PREDICTION 137 | # get predicted state and covariance of predicted state, predicted sigma points in state space 138 | self.state_predictor.process(self.x, self.P, dt) 139 | self.x = self.state_predictor.x 140 | self.P = self.state_predictor.P 141 | sigma_x = self.state_predictor.sigma 142 | 143 | # MEASUREMENT PREDICTION 144 | # get predicted measurement, covariance of predicted measurement, predicted sigma points in measurement space 145 | self.measurement_predictor.process(sigma_x, data) 146 | predicted_z = self.measurement_predictor.z 147 | S = self.measurement_predictor.S 148 | sigma_z = self.measurement_predictor.sigma_z 149 | 150 | # STATE UPDATE 151 | # updated the state and covariance of state... also get the nis 152 | self.state_updater.process(self.x, predicted_z, data.measurement_data, S, self.P, sigma_x, sigma_z, 153 | data.data_type) 154 | self.x = self.state_updater.x 155 | self.P = self.state_updater.P 156 | self.nis = self.state_updater.nis 157 | 158 | self.timestamp = data.timestamp 159 | -------------------------------------------------------------------------------- /src/ukf/measurement_predictor.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import numpy as np 3 | 4 | from ukf.datapoint import DataType 5 | from ukf.state import UKFState 6 | from ukf.util import angle_diff 7 | 8 | 9 | class MeasurementPredictor(object): 10 | """ 11 | Class that helps calculating the estimated measurement vector 12 | """ 13 | 14 | def __init__(self, sensor_std, N_SIGMA, WEIGHTS_M, WEIGHTS_C): 15 | """ 16 | Setups MeasurementPredictor. Creates the R matrices given the sensor_std parameter as a diagonal matrix where 17 | the values are squared 18 | @param sensor_std: a dictionary with the sensor noise values. Each point in the dictorary is defined using its 19 | DataType id as its key, the number values the state contains (nz) and then a list of all the sensor noise 20 | values (std). An example is as follows 21 | 22 | >>> sensor_std = { 23 | ... DataType.UWB: { 24 | ... 'std': [1], 25 | ... 'nz': 1 26 | ... }, 27 | ... DataType.ODOMETRY: { 28 | ... 'std': [1, 1, 1, 1, 1, 1], 29 | ... 'nz': 6 30 | ... }, 31 | ... } 32 | 33 | @param N_SIGMA: the number of sigma points 34 | @param WEIGHTS_M: the weight distribution for the vector state calculations 35 | @param WEIGHTS_C: the weight distribution for the covariance matrix calculations 36 | """ 37 | self.WEIGHTS_C = WEIGHTS_C 38 | self.sensor_std = sensor_std 39 | 40 | self.compute_R_matrix() 41 | 42 | self.WEIGHTS_M = WEIGHTS_M 43 | self.N_SIGMA = N_SIGMA 44 | 45 | self.z = None 46 | self.S = None 47 | self.sigma_z = None 48 | self.current_type = None 49 | 50 | self.R = None 51 | self.nz = None 52 | self.anchor_pos = None 53 | self.sensor_offset = None 54 | 55 | def initialize(self, data): 56 | """ 57 | Initializes the parameters for the current sensor data type, such as the correct R matrices and the extra data 58 | information for the UWB sensors 59 | @param data: the current sensor measurement 60 | """ 61 | sensor_type = data.data_type 62 | 63 | self.current_type = sensor_type 64 | 65 | self.R = self.sensor_std[sensor_type]["R"] 66 | self.nz = self.sensor_std[sensor_type]['nz'] 67 | 68 | if sensor_type == DataType.UWB: 69 | self.anchor_pos = data.extra['anchor'] 70 | self.sensor_offset = data.extra['sensor_offset'] 71 | 72 | def rotation_matrix(self, angle): 73 | """ 74 | Defines a 3D rotation matrix rotating the x and y by a certain angle/ angles. 75 | @param angle: the angle/angles to rotate by 76 | @return: a 3D rotation matrix as a numpy array of shape (3, 3, angle.size) 77 | """ 78 | output = np.zeros((3, 3, angle.size)) 79 | 80 | s = np.sin(angle) 81 | c = np.cos(angle) 82 | 83 | output[0, 0, :] = c 84 | output[1, 0, :] = s 85 | output[0, 1, :] = -s 86 | output[1, 1, :] = c 87 | output[2, 2, :] = 1 88 | 89 | return output 90 | 91 | def compute_sigma_z(self, sigma_x): 92 | """ 93 | Calculates, for each of the sigma points, each of their associated estimated measurement value. 94 | @param sigma_x: the predicted state sigma points 95 | @return: a list of the calculated measurement values 96 | """ 97 | sigma = np.zeros((self.nz, self.N_SIGMA)) 98 | 99 | if self.current_type == DataType.LIDAR: 100 | sigma[UKFState.X] = sigma_x[UKFState.X] # px 101 | sigma[UKFState.Y] = sigma_x[UKFState.Y] # py 102 | elif self.current_type == DataType.UWB: 103 | sensor_pose = sigma_x[:UKFState.Z + 1] 104 | 105 | if self.sensor_offset is not None: 106 | angles = sigma_x[UKFState.YAW] 107 | rot = self.rotation_matrix(angles) 108 | 109 | offsets = np.einsum('ijn,j->in', rot, self.sensor_offset) 110 | 111 | sensor_pose = sensor_pose + offsets 112 | 113 | distances = np.linalg.norm(sensor_pose - self.anchor_pos.reshape((-1, 1)), axis=0) 114 | sigma[0] = distances 115 | elif self.current_type == DataType.ODOMETRY: 116 | sigma[UKFState.X] = sigma_x[UKFState.X] # px 117 | sigma[UKFState.Y] = sigma_x[UKFState.Y] # py 118 | sigma[UKFState.Z] = sigma_x[UKFState.Z] # pz 119 | sigma[UKFState.V] = sigma_x[UKFState.V] # v 120 | sigma[UKFState.YAW] = sigma_x[UKFState.YAW] # theta 121 | sigma[UKFState.YAW_RATE] = sigma_x[UKFState.YAW_RATE] # theta_yaw 122 | elif self.current_type == DataType.IMU: 123 | sigma[0] = sigma_x[UKFState.YAW] # theta 124 | 125 | return sigma 126 | 127 | def compute_z(self, sigma): 128 | """ 129 | Calculates the predicted sensor measurement by taking the weighted average of the sigma point's measurement 130 | estimations 131 | @param sigma: the sigma point's associated measurement estimation 132 | @return: the predicted sensor measurement 133 | """ 134 | return np.dot(sigma, self.WEIGHTS_M) 135 | 136 | def compute_S(self, sigma, z): 137 | """ 138 | Computes the sensor noise covariance matrix. 139 | @param sigma: the sigma point measurement estimates 140 | @param z: the predicted sensor measurement 141 | @return: the sensor noise covariance matrix 142 | """ 143 | sub = np.subtract(sigma.T, z).T 144 | 145 | if self.current_type == DataType.ODOMETRY: 146 | sub[UKFState.YAW] = angle_diff(sigma[UKFState.YAW], z[UKFState.YAW]) 147 | elif self.current_type == DataType.IMU: 148 | sub = angle_diff(sigma, z) 149 | 150 | return (np.matmul(self.WEIGHTS_C * sub, sub.T)) + self.R 151 | 152 | def process(self, sigma_x, data): 153 | """ 154 | Calculates the estimated sensor measurement and sensor covariance based on the predicted sigma points 155 | @param sigma_x: the predicted sigma point matrix 156 | @param data: the sensor's measurement data 157 | """ 158 | self.initialize(data) 159 | self.sigma_z = self.compute_sigma_z(sigma_x) 160 | self.z = self.compute_z(self.sigma_z) 161 | self.S = self.compute_S(self.sigma_z, self.z) 162 | 163 | def compute_R_matrix(self): 164 | """ 165 | Creates and caches the sensor noise matrix R given the sensor_std[DataType]['std'] array. 166 | """ 167 | for value in self.sensor_std: 168 | self.sensor_std[value]["R"] = np.diag(np.power(self.sensor_std[value]['std'], 2)) 169 | -------------------------------------------------------------------------------- /src/ukf/state.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | class UKFState(object): 3 | """ 4 | A Enum like object meant to represent what each index of the state vector represents 5 | """ 6 | X = 0 7 | Y = 1 8 | Z = 2 9 | V = 3 10 | YAW = 4 11 | YAW_RATE = 5 12 | LIN_ACCEL = 6 13 | YAW_ACCEL = 7 14 | -------------------------------------------------------------------------------- /src/ukf/state_predictor.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import numpy as np 3 | 4 | from ukf.state import UKFState 5 | from ukf.util import angle_diff 6 | 7 | 8 | class StatePredictor(object): 9 | """ 10 | A class that handles the state prediction step of the UKF following a CTRV model 11 | """ 12 | 13 | def __init__(self, NX, N_SIGMA, N_AUGMENTED, VAR_SPEED_NOISE, VAR_YAW_RATE_NOISE, SCALE, WEIGHTS_M, WEIGHTS_C): 14 | """ 15 | Setups the State Predictor class 16 | @param NX: the number of states, 6 for the CTRV model. x, y, z, v, theta, theta rate 17 | @param N_SIGMA: the number of sigma points to generate 18 | @param N_AUGMENTED: the number of augmented states, 8 for the CTRV model. x, y, z, v, theta, theta rate, 19 | linear acceleration, yaw rate acceleration 20 | @param VAR_SPEED_NOISE: the expect linear acceleration defined as a Gaussian distribution 21 | @param VAR_YAW_RATE_NOISE: the expected angular acceleration defined as a Gaussian distribution 22 | @param SCALE: the sigma point scale 23 | @param WEIGHTS_M: the weights for the state calculation 24 | @param WEIGHTS_C: the weights for the covariance matrix calculation 25 | """ 26 | self.WEIGHTS_M = WEIGHTS_M 27 | self.WEIGHTS_C = WEIGHTS_C 28 | self.NX = NX 29 | self.SCALE = SCALE 30 | self.N_SIGMA = N_SIGMA 31 | self.N_AUGMENTED = N_AUGMENTED 32 | self.VAR_SPEED_NOISE = VAR_SPEED_NOISE 33 | self.VAR_YAW_RATE_NOISE = VAR_YAW_RATE_NOISE 34 | 35 | self.sigma = np.zeros((NX, N_SIGMA)) 36 | self.x = np.zeros(NX) 37 | self.P = np.zeros((NX, N_SIGMA)) 38 | 39 | self.YAW_RATE_THRESHOLD = 0.001 40 | 41 | def compute_augmented_sigma(self, x, P): 42 | """ 43 | Creates the augmented sigma point matrix 44 | @param x: the current state vector 45 | @param P: the current covariance matrix 46 | @return: a matrix with the size N_AUGMENTED x N_SIGMA_POINTS 47 | """ 48 | augmented_x = np.zeros(self.N_AUGMENTED) 49 | augmented_P = np.zeros((self.N_AUGMENTED, self.N_AUGMENTED)) 50 | 51 | augmented_x[:self.NX] = x 52 | augmented_P[:self.NX, :self.NX] = P 53 | augmented_P[self.NX, self.NX] = self.VAR_SPEED_NOISE 54 | augmented_P[self.NX + 1, self.NX + 1] = self.VAR_YAW_RATE_NOISE 55 | 56 | L = np.linalg.cholesky(augmented_P) 57 | augmented_sigma = np.repeat(augmented_x[None], self.N_SIGMA, axis=0).T 58 | 59 | scaled_L = self.SCALE * L 60 | 61 | augmented_sigma[:, 1:self.N_AUGMENTED + 1] += scaled_L 62 | augmented_sigma[:, self.N_AUGMENTED + 1:] -= scaled_L 63 | 64 | return augmented_sigma 65 | 66 | def predict_sigma(self, augmented_sigma, dt): 67 | """ 68 | Coverts the current augmented sigma matrix to the next predicted state given dt time. 69 | @param augmented_sigma: the current sigma point state matrix 70 | @param dt: the time that has elapsed 71 | @return: the sigma point matrix given dt time has passed 72 | """ 73 | predicted_sigma = np.zeros((self.NX, self.N_SIGMA)) 74 | 75 | px = augmented_sigma[UKFState.X] 76 | py = augmented_sigma[UKFState.Y] 77 | pz = augmented_sigma[UKFState.Z] 78 | speed = augmented_sigma[UKFState.V] 79 | yaw = augmented_sigma[UKFState.YAW] 80 | yaw_rate = augmented_sigma[UKFState.YAW_RATE] 81 | speed_noise = augmented_sigma[UKFState.LIN_ACCEL] 82 | yaw_rate_noise = augmented_sigma[UKFState.YAW_ACCEL] 83 | 84 | # PREDICT NEXT STEP USING CTRV Model 85 | 86 | cos_yaw = np.cos(yaw) 87 | sin_yaw = np.sin(yaw) 88 | dt_2 = dt * dt 89 | 90 | # Acceleration noise 91 | p_noise = 0.5 * speed_noise * dt_2 92 | y_noise = 0.5 * yaw_rate_noise * dt_2 93 | 94 | # Velocity change 95 | d_yaw = yaw_rate * dt 96 | d_speed = speed * dt 97 | 98 | # Predicted speed = constant speed + acceleration noise 99 | p_speed = speed + speed_noise * dt 100 | 101 | # Predicted yaw 102 | p_yaw = yaw + d_yaw + y_noise 103 | 104 | # Predicted yaw rate 105 | p_yaw_rate = yaw_rate + yaw_rate_noise * dt 106 | 107 | mask = abs(yaw_rate) <= self.YAW_RATE_THRESHOLD 108 | mask_n = np.logical_not(mask) 109 | 110 | p_px = np.empty(self.N_SIGMA) 111 | p_py = np.empty(self.N_SIGMA) 112 | 113 | p_px[mask] = px[mask] + d_speed[mask] * cos_yaw[mask] + p_noise[mask] * cos_yaw[mask] 114 | p_py[mask] = py[mask] + d_speed[mask] * sin_yaw[mask] + p_noise[mask] * sin_yaw[mask] 115 | 116 | k = speed[mask_n] / yaw_rate[mask_n] 117 | theta = yaw[mask_n] + d_yaw[mask_n] 118 | p_px[mask_n] = px[mask_n] + k * (np.sin(theta) - sin_yaw[mask_n]) + p_noise[mask_n] * cos_yaw[mask_n] 119 | p_py[mask_n] = py[mask_n] + k * (cos_yaw[mask_n] - np.cos(theta)) + p_noise[mask_n] * sin_yaw[mask_n] 120 | 121 | predicted_sigma[UKFState.X] = p_px 122 | predicted_sigma[UKFState.Y] = p_py 123 | predicted_sigma[UKFState.Z] = pz 124 | predicted_sigma[UKFState.V] = p_speed 125 | predicted_sigma[UKFState.YAW] = p_yaw 126 | predicted_sigma[UKFState.YAW_RATE] = p_yaw_rate 127 | 128 | # ------------------ 129 | 130 | return predicted_sigma 131 | 132 | def predict_x(self, predicted_sigma): 133 | """ 134 | Calculates the predicted state vector using the predicted sigma points and their associated weight distribution 135 | @param predicted_sigma: the predicted state sigma point matrix 136 | @return: the predicted state vector in the shape N_X 137 | """ 138 | return np.dot(predicted_sigma, self.WEIGHTS_M) 139 | 140 | def predict_P(self, predicted_sigma, predicted_x): 141 | """ 142 | Calculates the predicted covariance matrix using the predicted sigma points and their associated weight 143 | distribution. 144 | @param predicted_sigma: the predicted state sigma point matrix 145 | @param predicted_x: the predicted state vector 146 | @return: returns the predicted covariance state matrix in the shape N_X x N_X 147 | """ 148 | sub = np.subtract(predicted_sigma.T, predicted_x).T 149 | 150 | sub[UKFState.YAW] = angle_diff(predicted_sigma[UKFState.YAW], predicted_x[UKFState.YAW]) 151 | 152 | return np.matmul(self.WEIGHTS_C * sub, sub.T) 153 | 154 | def process(self, x, P, dt): 155 | """ 156 | Processes the current state and estimates its new positions in dt time 157 | @param x: the current state vector 158 | @param P: the current covariance matrix 159 | @param dt: the elapsed time 160 | """ 161 | augmented_sigma = self.compute_augmented_sigma(x, P) 162 | self.sigma = self.predict_sigma(augmented_sigma, dt) 163 | self.x = self.predict_x(self.sigma) 164 | 165 | self.P = self.predict_P(self.sigma, self.x) 166 | -------------------------------------------------------------------------------- /src/ukf/state_updater.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import numpy as np 3 | 4 | from ukf.datapoint import DataType 5 | from ukf.state import UKFState 6 | from ukf.util import normalize, angle_diff 7 | 8 | 9 | class StateUpdater(object): 10 | """ 11 | Class that updates the current state vector using the predicted state and the actual measurement sensor value 12 | """ 13 | 14 | def __init__(self, NX, N_SIGMA, WEIGHTS_M, WEIGHTS_C): 15 | """ 16 | Setups the State Updater 17 | @param NX: the number of values the state vector has, 6 for a CTRV model. x, y, z, v, theta, theta rate 18 | @param N_SIGMA: the number of sigma points 19 | @param WEIGHTS_M: the weight distribution for the state calculation 20 | @param WEIGHTS_C: the weight distribution for the covariance matrix calculation 21 | """ 22 | self.WEIGHTS_C = WEIGHTS_C 23 | self.N_SIGMA = N_SIGMA 24 | self.WEIGHTS_M = WEIGHTS_M 25 | self.NX = NX 26 | self.x = None 27 | self.P = None 28 | self.nis = None 29 | 30 | def compute_Tc(self, predicted_x, predicted_z, sigma_x, sigma_z, data_type): 31 | """ 32 | Calculates the cross-correlation between sigma points in state space and measurement space 33 | @param predicted_x: the predicted state vector 34 | @param predicted_z: the predicted measurement vector 35 | @param sigma_x: the predicted sigma points 36 | @param sigma_z: the predicted measurement vector 37 | @param data_type: the datatype of the current measurement 38 | @return: the cross-correlation matrix 39 | """ 40 | dx = np.subtract(sigma_x.T, predicted_x).T 41 | 42 | # normalize(dx, UKFState.YAW) 43 | dx[UKFState.YAW] = angle_diff(sigma_x[UKFState.YAW], predicted_x[UKFState.YAW]) 44 | 45 | dz = np.subtract(sigma_z.T, predicted_z) 46 | 47 | if data_type == DataType.ODOMETRY: 48 | dz[:, UKFState.YAW] = angle_diff(sigma_z[UKFState.YAW], predicted_z[UKFState.YAW]) 49 | elif data_type == DataType.IMU: 50 | dz[:, 0] = angle_diff(sigma_z[0], predicted_z[0]) 51 | 52 | # normalize(dz, UKFState.YAW) 53 | 54 | return np.matmul(self.WEIGHTS_C * dx, dz) 55 | 56 | def update(self, z, S, Tc, predicted_z, predicted_x, predicted_P, data_type): 57 | """ 58 | Updates the current position estimate given the actual sensor measurement 59 | @param z: the actual sensor value 60 | @param S: the predicted measurement covariance 61 | @param Tc: the cross-correlation between sigma points in state space and measurement space 62 | @param predicted_z: the predicted measurement vector 63 | @param predicted_x: the predicted state vector 64 | @param predicted_P: the predicted covariance matrix 65 | @param data_type: the current type of sensor 66 | """ 67 | Si = np.linalg.inv(S) 68 | K = np.matmul(Tc, Si) 69 | 70 | dz = z - predicted_z 71 | 72 | if data_type == DataType.ODOMETRY: 73 | dz[UKFState.YAW] = angle_diff(np.atleast_1d(z[UKFState.YAW]), np.atleast_1d(predicted_z[UKFState.YAW])) 74 | elif data_type == DataType.IMU: 75 | dz[0] = angle_diff(np.atleast_1d(z[0]), np.atleast_1d(predicted_z[0])) 76 | 77 | # print(z[UKFState.YAW], predicted_z[UKFState.YAW], dz[UKFState.YAW]) 78 | 79 | # Dm = np.sqrt(np.matmul(np.matmul(dz, Si), dz)) 80 | 81 | self.x = predicted_x + np.matmul(K, dz) 82 | self.P = predicted_P - np.matmul(K, np.matmul(S, K.transpose())) 83 | self.nis = np.matmul(dz.transpose(), np.matmul(Si, dz)) 84 | 85 | def process(self, predicted_x, predicted_z, z, S, predicted_P, sigma_x, sigma_z, data_type): 86 | """ 87 | Process the current data to update the state vector and covariance matrix 88 | @param predicted_x: the predicted state vector 89 | @param predicted_z: the predicted measurement vector 90 | @param z: the actual sensor value 91 | @param S: the predicted measurement covariance 92 | @param predicted_P: the predicted covariance matrix 93 | @param sigma_x: the predicted state sigma points 94 | @param sigma_z: the predicted covariance matrix 95 | @param data_type: the current type of sensor 96 | """ 97 | Tc = self.compute_Tc(predicted_x, predicted_z, sigma_x, sigma_z, data_type) 98 | self.update(z, S, Tc, predicted_z, predicted_x, predicted_P, data_type) 99 | 100 | normalize(self.x, UKFState.YAW) 101 | -------------------------------------------------------------------------------- /src/ukf/util.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import numpy as np 3 | 4 | 5 | def normalize(d, index): 6 | """ 7 | In place normalizes angle values to be between -pi to pi, at a specific index at array 8 | @param d: The array to inplace normalize the angle values 9 | @param index: the index that the angle values are located 10 | """ 11 | d[index] = (d[index] + np.pi) % (2 * np.pi) - np.pi 12 | 13 | 14 | def angle_diff(start, end): 15 | """ 16 | Calculates the bounded angle difference between the start and end value 17 | @param start: the starting angle, in radians 18 | @param end: the ending angle, in radians 19 | @return: the bounded angle difference between -180 and 180, in radians 20 | """ 21 | diff = (end - start + np.pi) % (2 * np.pi) - np.pi 22 | 23 | diff[diff < -np.pi] += 2 * np.pi 24 | 25 | return diff 26 | 27 | # d[index] %= 2 * np.pi 28 | # mask = np.abs(d[index]) > np.pi 29 | # d[index, mask] -= (np.pi * 2) 30 | -------------------------------------------------------------------------------- /src/ukf_uwb_localization.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # coding=utf-8 3 | from __future__ import print_function 4 | 5 | import json 6 | import os 7 | 8 | import numpy as np 9 | import rospkg 10 | import rospy 11 | import tf 12 | from gtec_msgs.msg import Ranging 13 | from nav_msgs.msg import Odometry 14 | from scipy.optimize import least_squares 15 | from tf.transformations import euler_from_quaternion, quaternion_from_euler 16 | from visualization_msgs.msg import MarkerArray 17 | 18 | from ukf.datapoint import DataType, DataPoint 19 | from ukf.fusion_ukf import FusionUKF 20 | 21 | 22 | def get_time(): 23 | return rospy.Time.now().to_nsec() 24 | 25 | 26 | def get_anchors(tags_file="tag_ids.json"): 27 | rospack = rospkg.RosPack() 28 | package_location = rospack.get_path('uwb_localization') 29 | 30 | tags_file = os.path.join(package_location, 'src', tags_file) 31 | 32 | with open(tags_file, 'r') as f: 33 | tag_data = json.load(f) 34 | 35 | anchor_to_robot = dict() 36 | 37 | for key, values in tag_data.items(): 38 | if values['anchor'] not in anchor_to_robot or anchor_to_robot[values['anchor']] == '/': 39 | anchor_to_robot[values['anchor']] = key 40 | 41 | return anchor_to_robot 42 | 43 | 44 | class UKFUWBLocalization(object): 45 | def __init__(self, uwb_std=1, odometry_std=(1, 1, 1, 1, 1, 1), accel_std=1, yaw_accel_std=1, alpha=1, beta=0, 46 | namespace=None, right_tag=0, left_tag=1, x_initial=0, y_initial=0, theta_initial=0): 47 | if namespace is None: 48 | namespace = '/' 49 | 50 | sensor_std = { 51 | DataType.UWB: { 52 | 'std': [uwb_std], 53 | 'nz': 1 54 | }, 55 | DataType.ODOMETRY: { 56 | 'std': odometry_std, 57 | 'nz': 6 58 | } 59 | } 60 | 61 | self.namespace = namespace 62 | self.right_tag = right_tag 63 | self.left_tag = left_tag 64 | self.anchor_to_robot = get_anchors() 65 | 66 | self.ukf = FusionUKF(sensor_std, accel_std, yaw_accel_std, alpha, beta) 67 | 68 | self.anchor_poses = dict() 69 | self.tag_offset = self.retrieve_tag_offsets( 70 | {namespace + "left_tag": left_tag, namespace + "right_tag": right_tag}, namespace=namespace, 71 | right_tag=right_tag, left_tag=left_tag) 72 | 73 | # right: 0 74 | # left: 1 75 | # self.tag_offset = { 76 | # 0:np.array([0, -0.162, 0.184]), 77 | # 1:np.array([0, 0.162, 0.184]) 78 | # } 79 | # print(self.tag_offset) 80 | 81 | anchors = '/gtec/toa/anchors' 82 | toa_ranging = '/gtec/toa/ranging' 83 | 84 | if namespace is None: 85 | publish_odom = '/jackal/uwb/odom' 86 | odometry = '/odometry/filtered' 87 | else: 88 | publish_odom = namespace + 'uwb/odom' 89 | odometry = namespace + 'odometry/filtered' 90 | 91 | anchors_sub = rospy.Subscriber(anchors, MarkerArray, callback=self.add_anchors) 92 | ranging_sub = rospy.Subscriber(toa_ranging, Ranging, callback=self.add_ranging) 93 | odometry = rospy.Subscriber(odometry, Odometry, callback=self.add_odometry) 94 | 95 | self.estimated_pose = rospy.Publisher(publish_odom, Odometry, queue_size=1) 96 | self.odom = Odometry() 97 | 98 | self.sensor_data = [] 99 | 100 | self.initialized = False 101 | self.start_translation = np.array([x_initial, y_initial]) 102 | self.start_rotation = theta_initial 103 | 104 | self.cache_data = [] 105 | 106 | def retrieve_tag_offsets(self, tags, base_link='base_link', namespace=None, right_tag=0, left_tag=1): 107 | transforms = dict() 108 | 109 | listener = tf.TransformListener() 110 | 111 | rate = rospy.Rate(10.0) 112 | 113 | # right: 0 114 | # left: 1 115 | default = { 116 | right_tag: np.array([0, -0.162, 0.184]), 117 | left_tag: np.array([0, 0.162, 0.184]) 118 | } 119 | 120 | if namespace is not None: 121 | base_link = namespace + base_link 122 | 123 | for tag in tags: 124 | timeout = 5 125 | 126 | while not rospy.is_shutdown(): 127 | try: 128 | (trans, rot) = listener.lookupTransform(base_link, tag, rospy.Time(0)) 129 | transforms[tags[tag]] = np.array([trans[0], trans[1], trans[2]]) 130 | break 131 | 132 | except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): 133 | if timeout <= 0: 134 | transforms[tags[tag]] = default[tags[tag]] 135 | break 136 | timeout -= 1 137 | 138 | rate.sleep() 139 | 140 | return transforms 141 | 142 | def add_odometry(self, msg): 143 | t = get_time() 144 | 145 | px = msg.pose.pose.position.x 146 | py = msg.pose.pose.position.y 147 | pz = msg.pose.pose.position.z 148 | 149 | v = msg.twist.twist.linear.x 150 | theta = euler_from_quaternion(( 151 | msg.pose.pose.orientation.x, 152 | msg.pose.pose.orientation.y, 153 | msg.pose.pose.orientation.z, 154 | msg.pose.pose.orientation.w 155 | ))[2] 156 | 157 | theta_yaw = msg.twist.twist.angular.z 158 | 159 | theta_yaw += self.start_rotation 160 | px += self.start_translation[0] 161 | py += self.start_translation[1] 162 | 163 | data = DataPoint(DataType.ODOMETRY, np.array([px, py, pz, v, theta, theta_yaw]), t) 164 | 165 | self.sensor_data.append(data) 166 | 167 | def add_anchors(self, msg): 168 | # type: (MarkerArray) -> None 169 | 170 | for marker in msg.markers: 171 | self.anchor_poses[marker.id] = np.array( 172 | [marker.pose.position.x, marker.pose.position.y, marker.pose.position.z]) 173 | 174 | def is_localized(self, robot_name): 175 | parameter_name = robot_name + "is_localized" 176 | 177 | if rospy.has_param(parameter_name): 178 | return rospy.get_param(parameter_name) 179 | else: 180 | return True 181 | 182 | def add_ranging(self, msg): 183 | # type: (Ranging) -> None 184 | t = get_time() 185 | 186 | if not self.is_localized(self.namespace): 187 | return 188 | 189 | if msg.anchorId in self.anchor_poses: 190 | if msg.anchorId in self.anchor_to_robot: 191 | if not self.is_localized(self.anchor_to_robot[msg.anchorId]): 192 | return 193 | 194 | if msg.tagId in self.tag_offset: 195 | anchor_pose = self.anchor_poses[msg.anchorId] 196 | anchor_distance = msg.range / 1000. 197 | 198 | data = DataPoint(DataType.UWB, anchor_distance, t, extra={ 199 | "anchor": anchor_pose, 200 | 'sensor_offset': self.tag_offset[msg.tagId] 201 | # 'sensor_offset': None 202 | }) 203 | 204 | self.sensor_data.append(data) 205 | 206 | def intialize(self, x, P): 207 | t = get_time() 208 | 209 | self.ukf.initialize(x, P, t) 210 | self.initialized = True 211 | 212 | def process_ukf_data(self): 213 | for data in self.sensor_data: 214 | self.ukf.update(data) 215 | 216 | self.clear_data() 217 | 218 | x, y, z, v, yaw, yaw_rate = self.ukf.x 219 | 220 | self.odom.pose.pose.position.x = x 221 | self.odom.pose.pose.position.y = y 222 | self.odom.pose.pose.position.z = z 223 | self.odom.twist.twist.linear.x = v 224 | self.odom.twist.twist.angular.z = yaw_rate 225 | 226 | angles = quaternion_from_euler(0, 0, yaw) 227 | 228 | self.odom.pose.pose.orientation.x = angles[0] 229 | self.odom.pose.pose.orientation.y = angles[1] 230 | self.odom.pose.pose.orientation.z = angles[2] 231 | self.odom.pose.pose.orientation.w = angles[3] 232 | 233 | self.estimated_pose.publish(self.odom) 234 | 235 | def clear_data(self): 236 | del self.sensor_data[:] 237 | 238 | def set_initial(self, x_initial, y_initial, theta_initial): 239 | self.start_translation = np.array([x_initial, y_initial]) 240 | self.start_rotation = theta_initial 241 | 242 | def step(self, initial_P=None): 243 | if not self.initialized: 244 | d = np.linalg.norm(self.tag_offset[self.right_tag] - self.tag_offset[self.left_tag]) 245 | 246 | self.process_initial_data(self.cache_data, d, initial_P) 247 | else: 248 | self.process_ukf_data() 249 | 250 | def run(self, initial_P=None): 251 | if not self.initialized: 252 | self.initialize_pose(initial_P) 253 | 254 | rate = rospy.Rate(60) 255 | 256 | while not rospy.is_shutdown(): 257 | self.process_ukf_data() 258 | 259 | rate.sleep() 260 | 261 | def func(self, x, d, distances): 262 | # x[0] = left_x 263 | # x[1] = left_y 264 | # x[2] = right_x 265 | # x[3] = right_y 266 | 267 | x1, y1, x2, y2 = x 268 | 269 | residuals = [ 270 | (x1 - x2) ** 2 + (y1 - y2) ** 2 - d ** 2, 271 | ] 272 | 273 | for distance in distances: 274 | anchor = distance['anchor'] 275 | tag = distance['tag'] 276 | distance = distance['dist'] 277 | 278 | z = tag[2] 279 | 280 | if np.all(tag == self.tag_offset[self.left_tag]): 281 | x = x1 282 | y = y1 283 | else: 284 | x = x2 285 | y = y2 286 | 287 | residuals.append((x - anchor[0]) ** 2 + (y - anchor[1]) ** 2 + (z - anchor[2]) ** 2 - distance ** 2) 288 | 289 | return residuals 290 | 291 | def process_initial_data(self, uwb, d, initial_P=None): 292 | for s in self.sensor_data: 293 | if s.data_type == DataType.UWB: 294 | uwb.append({ 295 | 'anchor': s.extra['anchor'], 296 | 'tag': s.extra['sensor_offset'], 297 | 'dist': s.measurement_data 298 | }) 299 | 300 | if len(uwb) > 3: 301 | res = least_squares(self.func, [0, 0, 0, 0], args=(d, uwb)) 302 | 303 | # print(res) 304 | 305 | left = res.x[0:2] 306 | right = res.x[2:4] 307 | 308 | center = (left + right) / 2 309 | v_ab = left - right 310 | theta = np.arccos(v_ab[1] / np.linalg.norm(v_ab)) 311 | 312 | print(center, v_ab, theta, np.degrees(theta)) 313 | 314 | del self.sensor_data[:] 315 | 316 | self.start_translation = center 317 | self.start_rotation = theta 318 | 319 | if initial_P is None: 320 | initial_P = np.identity(6) 321 | 322 | self.intialize(np.array([center[0], center[1], 0, 0, theta, 0]), initial_P) 323 | 324 | def initialize_pose(self, initial_P=None): 325 | 326 | delay = 1 # s 327 | rate = rospy.Rate(delay) 328 | 329 | d = np.linalg.norm(self.tag_offset[self.right_tag] - self.tag_offset[self.left_tag]) 330 | 331 | uwb = [] 332 | 333 | i = 0 334 | 335 | while not rospy.is_shutdown() and not self.initialized: 336 | if i > 4: 337 | x = np.zeros(6) 338 | 339 | for i in range(len(self.sensor_data)): 340 | data = self.sensor_data[-(i + 1)] 341 | 342 | if data.data_type == DataType.ODOMETRY: 343 | x = data.measurement_data 344 | break 345 | 346 | self.intialize(x, initial_P) 347 | 348 | break 349 | 350 | self.process_initial_data(uwb, d, initial_P) 351 | 352 | i += 1 353 | rate.sleep() 354 | 355 | 356 | def get_tag_ids(ns, tags_file='tag_ids.json'): 357 | rospack = rospkg.RosPack() 358 | package_location = rospack.get_path('uwb_localization') 359 | 360 | tags_file = os.path.join(package_location, 'src', tags_file) 361 | 362 | with open(tags_file, 'r') as f: 363 | tag_data = json.load(f) 364 | 365 | print(tag_data) 366 | right_tag = tag_data[ns]['right_tag'] 367 | left_tag = tag_data[ns]['left_tag'] 368 | anchor = tag_data[ns]['anchor'] 369 | print(right_tag, left_tag, anchor) 370 | 371 | return right_tag, left_tag, anchor 372 | 373 | 374 | if __name__ == "__main__": 375 | rospy.init_node("ukf_uwb_localization_kalman") 376 | 377 | ns = rospy.get_namespace() 378 | 379 | print("Namespace:", ns) 380 | 381 | right_tag, left_tag, _ = get_tag_ids(ns) 382 | 383 | intial_pose = rospy.wait_for_message(ns + 'ground_truth/state', Odometry) 384 | x, y, z = intial_pose.pose.pose.position.x, intial_pose.pose.pose.position.y, intial_pose.pose.pose.position.z 385 | v = 0.2 386 | theta = euler_from_quaternion(( 387 | intial_pose.pose.pose.orientation.x, 388 | intial_pose.pose.pose.orientation.y, 389 | intial_pose.pose.pose.orientation.z, 390 | intial_pose.pose.pose.orientation.w 391 | ))[2] 392 | 393 | print("Actual Initial", x, y, v, theta) 394 | 395 | p = [1.0001, 11.0, 14.0001, 20.9001, 1.0001, 0.0001, 0.0001, 3.9001, 4.9001, 1.0, 0, 0.0001, 0.0001, 0.0001, 2.0001, 396 | 0.0001, 0.0001] 397 | 398 | loc = UKFUWBLocalization(p[0], p[1:7], accel_std=p[7], yaw_accel_std=p[8], alpha=p[9], beta=p[10], namespace=ns, 399 | right_tag=right_tag, left_tag=left_tag) 400 | # loc.intialize(np.array([x, y, z, v, theta ]), 401 | # np.identity(6)) 402 | 403 | loc.run() 404 | 405 | rospy.spin() 406 | --------------------------------------------------------------------------------