├── 0-setup
├── Docker Cheat Sheet.md
└── README.md
├── 1-ros_2_introduction
├── README.md
├── ROS 2 cheatsheet.md
└── images
│ ├── andino_robot.png
│ ├── gazebo_logo_white_bg.svg
│ ├── gazebo_teleop.png
│ ├── ros2_tf_frames.png
│ ├── ros2_topic_echo.png
│ ├── ros2_topic_info.png
│ ├── ros2_topic_list.png
│ ├── ros_logo_white_bg.svg
│ ├── rviz_add_camera.png
│ ├── rviz_add_camera_2.png
│ ├── rviz_coordinate_frame.png
│ ├── rviz_fixed_frame.png
│ ├── rviz_logo.png
│ ├── rviz_odom_frame.png
│ └── tf_tree.png
├── 2-slam_and_navigation_demo
├── README.md
└── images
│ ├── mapping_demo.png
│ ├── nav2_demo_with_pose_estimate.png
│ ├── nav2_goal.png
│ ├── nav2_logo.png
│ ├── pose_estimate.png
│ └── subscribe_to_map_topic.png
├── 3-create_ros_2_package
├── README.md
└── images
│ ├── build_warning.png
│ ├── odometry_node.png
│ ├── package_structure.png
│ ├── package_xml.png
│ ├── ros2_run.png
│ ├── setup_py.png
│ ├── turtle_nest_1.png
│ ├── turtle_nest_2.png
│ └── turtle_nest_3.png
├── 4-robot_odometry
├── README.md
└── images
│ ├── andino_odometry.png
│ ├── joint_states.png
│ └── odometry_visualization.png
├── 5-path_planning
├── README.md
└── images
│ ├── bt.png
│ ├── global_costmap.png
│ ├── global_plan.png
│ ├── hello_world.png
│ ├── local_costmap.png
│ ├── local_plan.png
│ ├── nav2_bringup.png
│ ├── nav2_planner_params.png
│ ├── no_plan.png
│ ├── service_not_available.png
│ ├── static_map.png
│ ├── straight_plan.png
│ └── straight_plan_through_obstacle.png
├── README.md
├── docker
├── Dockerfile
├── andino_requirements.txt
├── docker-compose.yaml
└── ros_entrypoint.sh
├── images
├── andino_sim_screenshot.png
├── autonomous_navigation.gif
├── course_banner.png
├── docker_compose_up.png
├── gazebo_rviz.png
├── henki_robotics_logo.png
└── uef_logo.jpg
├── lecture_slides
├── theme_1_robotics_in_society
│ └── theme_1_robotics_in_society_theoretical.pdf
├── theme_2_robotics_applications
│ ├── Theme 2 - Introducing ROS 2, Docker Containerization.pptx.pdf
│ └── theme_2_robotics_applications_theoretical.pdf
├── theme_3_robot_control_theory
│ ├── Theme 3 - Robot control theory. The ROS perspective.pdf
│ └── theme_3_robot_control_theory_theoretical.pdf
├── theme_4_navigation
│ ├── Theme 4 - SLAM Navigation. How do we do them in ROS.pdf
│ └── theme_4_navigation_theoretical.pdf
├── theme_5_robotics_ai
│ ├── Theme 5 - Robotics AI. Practical aspects with ROS.pdf
│ └── theme_5_robotics_ai_theoretical.pdf
└── theme_6_xr_applications_robotics
│ └── theme_6_xr_applications_robotics.pdf
└── packages
├── create_plan_msgs
├── CMakeLists.txt
├── package.xml
└── srv
│ └── CreatePlan.srv
└── custom_nav2_planner
├── CMakeLists.txt
├── global_planner_plugin.xml
├── include
└── custom_nav2_planner
│ └── custom_nav2_planner.hpp
├── package.xml
└── src
└── custom_nav2_planner.cpp
/0-setup/Docker Cheat Sheet.md:
--------------------------------------------------------------------------------
1 | # Docker Cheat Sheet
2 |
3 | List of all the used Docker commands during the exercises.
4 |
5 | **Note:** "docker compose" -commands need to be run in the same location where the `docker-compose.yaml` file is:
6 |
7 | cd robotics_essentials_ros2/docker/
8 |
9 | ### Re-build a container
10 | docker compose up --build
11 |
12 | ### Run a container
13 | docker compose up
14 |
15 | ### Run a container in detached mode
16 | docker compose up -d
17 |
18 | ### Open a new terminal inside the Docker container
19 | docker exec -it robotics_essentials_ros2 bash
20 |
21 | ### List all the running Docker containers
22 | docker ps
23 |
24 | ### Stop a running container
25 | docker stop robotics_essentials_ros2
26 |
27 | ### Remove a running container
28 | docker rm robotics_essentials_ros2
29 |
30 |
--------------------------------------------------------------------------------
/0-setup/README.md:
--------------------------------------------------------------------------------
1 | # Exercises 0 - Setup
2 |
3 | ## Prerequisites
4 | - Ubuntu
5 | - Intel or AMD processor (with x64 processor architecture). This is the reason why the Docker container won't work directly for example on Apple Silicon, Raspberry or Jetson platforms.
6 | - A computer with sufficient processing power to run Gazebo simulation smoothly.
7 | - [Docker](https://docs.docker.com/engine/install/ubuntu/). Follow the link for the official tutorial
8 | and the latest installation instructions. Optionally, you can follow the commands below, but they might get out of date.
9 | - Git (`sudo apt install git`)
10 |
11 |
12 | Docker installation instructions
13 |
14 | ### Installing Docker
15 | ### Firstly follow these steps:
16 | 1. Set up Docker's `apt` repository by running these commands (one-by-one) in a new terminal:
17 | ```
18 | # Add Docker's official GPG key:
19 | sudo apt-get update
20 | sudo apt-get install ca-certificates curl
21 | sudo install -m 0755 -d /etc/apt/keyrings
22 | sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
23 | sudo chmod a+r /etc/apt/keyrings/docker.asc
24 |
25 | # Add the repository to Apt sources:
26 | echo \
27 | "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
28 | $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
29 | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
30 | sudo apt-get update
31 | ```
32 | 2. Install the Docker packages by running the following command in the same terminal:
33 | ```
34 | sudo apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
35 | ```
36 | 3. Verify your installation by running this command:
37 | ```
38 | sudo docker run hello-world
39 | ```
40 |
41 | ### Follow these next steps to be able to run `docker` commands without "sudo"
42 | 1. Create a `docker` user group.
43 | ```
44 | sudo groupadd docker
45 | ```
46 | 2. Add your user to the `docker` group.
47 | ```
48 | sudo usermod -aG docker $USER
49 | ```
50 | 3. Log out and log back in to your system, or run the following command:
51 | ```
52 | newgrp docker
53 | ```
54 | 4. Test that you can run `docker` commands without `sudo`.
55 | ```
56 | docker run hello-world
57 | ```
58 |
59 | ### (Optional) If you are running Docker on a Linux machine equipped with an Nvidia GPU and the proper installed drivers
60 | #### Install the Nvidia Container Toolkit by following these steps:
61 | 1. Configure the production repository:
62 | ```
63 | curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | sudo gpg --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg \
64 | && curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list | \
65 | sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' | \
66 | sudo tee /etc/apt/sources.list.d/nvidia-container-toolkit.list
67 | ```
68 | Optionally, configure the repository to use experimental packages:
69 | ```
70 | sed -i -e '/experimental/ s/^#//g' /etc/apt/sources.list.d/nvidia-container-toolkit.list
71 | ```
72 | 2. Update the packages list from the repository:
73 | ```
74 | sudo apt-get update
75 | ```
76 | 3. Install the NVIDIA Container Toolkit packages:
77 | ```
78 | sudo apt-get install -y nvidia-container-toolkit
79 | ```
80 |
81 | #### Configure the Nvidia Container Toolkit for Docker:
82 | 1. Configure the container runtime by using the nvidia-ctk command:
83 | ```
84 | sudo nvidia-ctk runtime configure --runtime=docker
85 | ```
86 | 2. Restart the Docker daemon:
87 | ```
88 | sudo systemctl restart docker
89 | ```
90 |
91 |
92 |
93 |
94 | All the practical examples in this course are containerized with Docker, so no
95 | ROS 2 or Gazebo simulation installations are required!
96 |
97 | ## Installation
98 |
99 | 1. Clone the repository:
100 |
101 | ```commandline
102 | git clone https://github.com/henki-robotics/robotics_essentials_ros2.git
103 | ```
104 |
105 | 1. Create a new workspace for your exercises. This will be automatically mounted and available from the Docker container
106 | ```commandline
107 | mkdir -p $HOME/exercises_ws/src
108 | ```
109 |
110 | ## Launching the Docker container
111 |
112 | Before anything run the following command in a new terminal. This will give display permissions to your containers:
113 |
114 | ```commandline
115 | xhost +
116 | ```
117 |
118 | > **Note:** This has to be run everytime you restart your computer
119 |
120 | 1. Use docker compose to build and run the Docker container, which includes the ROS 2, Gazebo and simulated Andino installation.
121 | ```commandline
122 | cd robotics_essentials_ros2/docker/
123 | docker compose up
124 | ```
125 |
126 | Wait until the container has been successfully launched
127 |
128 |
129 | 1. Open a new terminal with CTRL+ALT+T (or by using right click on Desktop -> "Open in Terminal") and run this command to start a new terminal inside the Docker container:
130 | ```commandline
131 | docker exec -it robotics_essentials_ros2 bash
132 | ```
133 |
134 | The `docker compose up` command launches the `robotics_essentials_ros2` Docker container, which essentially acts as a virtual environment.
135 | It is a completely isolated environment, that includes all the software dependencies you need, including even the Ubuntu operating system.
136 |
137 | The `docker exec` command on the other hand opens a new terminal inside this Docker container, allowing you to interact with it.
138 |
139 | Refer to this section whenever you need to relaunch your Docker container, or start a new terminal during the upcoming exercises.
140 |
141 | ## Run the simulation
142 |
143 | 1. Verify that everything is running correctly, by starting the simulation with:
144 | ```commandline
145 | ros2 launch andino_gz andino_gz.launch.py
146 | ```
147 |
148 |
149 | Note: If Gazebo doesn't open up in a few minutes, follow the instructions under this dropdown.
150 |
151 | On the first startup, Gazebo will download some assets, so make sure you have a good internet connection.
152 | You might get multiple `[ros_gz_sim]: Requesting list of world names` messages logged in the console.
153 | During this time, Gazebo might freeze, but pressing "Wait" repeatedly should eventually solve the issue.
154 |
155 | Some people have reported this issue being persistent. In this case, you can try to follow the proposed solution
156 | in the [Gazebo GitHub issue #38](https://github.com/gazebosim/gz-sim/issues/38), to disable the firewall for
157 | the duration of the installation using `sudo ufw disable` -command. This has previously helped to solve the issue.
158 |
159 |
160 | 2. Press the play button in simulation to start it.
161 |
162 |
163 |
164 |
165 | You've now successfully installed everything you need for the upcoming exercises!
166 |
167 | Next exercises: [Exercises 1: ROS 2 Introduction](/1-ros_2_introduction/README.md).
168 |
--------------------------------------------------------------------------------
/1-ros_2_introduction/README.md:
--------------------------------------------------------------------------------
1 | # Exercises 1 - ROS 2 introduction
2 |
3 | In the first set of exercises, we will showcase a demo of running a simulated robot, and control it using ROS 2.
4 | You will also learn the very basics of ROS 2 topics and transforms.
5 |
6 |
7 | * [Basic Concepts](#basic-concepts)
8 | * [ROS 2](#ros-2)
9 | * [Gazebo](#gazebo)
10 | * [Rviz](#rviz)
11 | * [Andino](#andino)
12 | * [Launching the Andino robot in a Gazebo simulation](#launching-the-andino-robot-in-a-gazebo-simulation)
13 | * [Control the robot in Gazebo](#control-the-robot-in-gazebo)
14 | * [ROS 2 Topics](#ros-2-topics)
15 | * [Subscribe to a topic](#subscribe-to-a-topic)
16 | * [Publish to a topic](#publish-to-a-topic)
17 | * [RViz](#rviz-1)
18 | * [Subscribe to a new data source: Camera](#subscribe-to-a-new-data-source-camera)
19 | * [TFs - The coordinate transforms](#tfs---the-coordinate-transforms)
20 | * [Commonly used coordinate frames](#commonly-used-coordinate-frames)
21 | * [TF frames in RViz](#tf-frames-in-rviz-)
22 | * [Changing the Fixed Frame](#changing-the-fixed-frame)
23 | * [Visualizing the TF-tree](#visualizing-the-tf-tree)
24 | * [Summary](#summary)
25 |
26 |
27 |
28 |
29 |
30 | ## Basic Concepts
31 | ### ROS 2
32 |
33 |
34 | [ROS 2 (Robot Operating System 2)](https://www.ros.org/) is an open-source framework for building robot software.
35 | It provides a set of tools, libraries, and conventions, including a middleware for internal communication.
36 | It is designed to support real-time performance and multi-robot systems.
37 |
38 |
39 | ### Gazebo
40 |
41 |
42 |
43 | [Gazebo](https://gazebosim.org/home) is a powerful open-source robotics simulator that allows developers to test and validate robot designs in complex environments, offering realistic physics and sensor models.
44 |
45 | ### Rviz
46 |
47 |
48 |
49 | [RViz](https://github.com/ros2/rviz) is a 3D visualization tool for ROS that enables developers to visualize sensor data, robot models, and environment maps, aiding in debugging and monitoring robot behavior.
50 |
51 | ### Andino
52 |
53 |
54 |
55 | Andino is a fully open-source, educational low-cost robot developed by [Ekumen](https://github.com/Ekumen-OS/andino).
56 | It uses ROS 2 to implement its functionalities and has fully functional [Gazebo simulations](https://github.com/Ekumen-OS/andino_gz) of it available.
57 |
58 | ## Launching the Andino robot in a Gazebo simulation
59 |
60 | If you didn't yet, follow the instructions in [Exercises 0 - Setup](/0-setup/README.md) to setup and launch an Andino Gazebo simulation with RViz.
61 |
62 | Here is a quick summary of all the required steps for launching the simulation:
63 |
64 | cd robotics_essentials_ros2/docker/
65 | docker compose up -d
66 | docker exec -it robotics_essentials_ros2 bash
67 | ros2 launch andino_gz andino_gz.launch.py
68 |
69 |
70 | ### Control the robot in Gazebo
71 |
72 | **Exercise 1:**
73 | Open the teleop panel and give commands to move the robot around. Try out all the teleoperation menus, and experiment with all the ways in which you can control Andino. On startup, the Gazebo simulation will most likely be paused. Make sure you firstly press the play button in the bottom left of the Gazebo UI to start the simulation and see the robot moving once you send teleoperation commands.
74 |
75 |
76 |
77 |
78 | ## ROS 2 Topics
79 |
80 | ROS 2 topics are a core communication mechanism in ROS 2 that enable data exchange in a publish/subscribe model.
81 | Publishers send messages to a named topic, while subscribers listen to that topic to receive relevant data.
82 |
83 | ### Subscribe to a topic
84 | By subscribing to a topic, you can read sensor data (lidar, camera) and other useful data (map, odometry) from your robot.
85 |
86 | Open a new terminal inside the Docker container and run the following commands ([How to open terminal in Docker container](/0-setup/Docker%20Cheat%20Sheet.md)):
87 |
88 | 1. List all the available ROS 2 topics
89 | ```commandline
90 | ros2 topic list
91 | ```
92 |
93 |
94 |
95 | 1. Read the sensor data from the laser scanner (Press CTRL+C to stop after a while).
96 | ```commandline
97 | ros2 topic echo /scan
98 | ```
99 |
100 |
101 | 1. Get more info about the /scan topic to learn the message type
102 | ```commandline
103 | ros2 topic info /scan
104 | ```
105 |
106 |
107 | ### Publish to a topic
108 | 1. Move the robot by publishing to cmd_vel topic
109 |
110 | ```commandline
111 | ros2 topic pub /cmd_vel geometry_msgs/msg/Twist "{linear: {x: 0.2}}"
112 | ```
113 |
114 | 1. Send a 0-velocity command to stop the robot
115 |
116 | ```commandline
117 | ros2 topic pub /cmd_vel geometry_msgs/msg/Twist "{linear: {x: 0.0}}"
118 | ```
119 |
120 | **Exercise 2:**
121 |
122 | Publish a message to rotate the robot in its place. First, check what is the message type of the `/cmd_vel` topic using `ros2 topic info` command, and then check the possible message contents with `ros2 interface show `
123 |
124 |
125 |
126 |
127 | Solution:
128 |
129 | - `ros2 topic info /cmd_vel` shows us that the message type is `geometry_msgs/msg/Twist`
130 | - `ros2 interface show geometry_msgs/msg/Twist` shows us that we have the "Angular" `Vector3` field that has the `z` field available, to make the robot rotate.
131 | - We can rotate the robot with a command:
132 |
133 | ```
134 | ros2 topic pub /cmd_vel geometry_msgs/msg/Twist "{angular: {z: 0.5}}"
135 | ```
136 |
137 |
138 |
139 | ## RViz
140 |
141 | RViz is a useful visualization tool that allows us to display data from ROS 2 topics.
142 | With these examples, you will learn how to do that.
143 |
144 | ### Subscribe to a new data source: Camera
145 |
146 | Our robot is constantly publishing images from the simulated camera. Let's see how those images look like!
147 |
148 | 1. Click "Add" -button from the bottom left corner of RViz
149 |
150 | 1. Choose to create visualization "By topic"
151 |
152 | 1. Choose `Camera` under the `/image_raw` and Press ok.
153 |
154 |
155 |
156 | **Tip:** Set overlay alpha to 1 to hide the artifacts on top of the image:
157 |
158 |
159 |
160 |
161 | ## TFs - The coordinate transforms
162 |
163 | In ROS 2, transforms are used to describe the spatial relationships between different coordinate frames in a robotic system.
164 | You can think about TFs as the coordinate frames that sit in the most important locations in your robot and in the environment.
165 | They sit at the center of the robot, at the center of sensors, at joints, and there are so the "Map" and "Odometry" frames.
166 | They allow you to convert positions and orientations from one frame to another, and basically keep track of how each part of
167 | your robot moves in relation to the other parts of the robot. This is crucial for tasks like navigation, sensor fusion, and
168 | manipulation.
169 |
170 | The main component for handling transforms in ROS 2 is the tf2 library. It provides:
171 | * Coordinate Frames: Each sensor or part of a robot has its own coordinate frame (e.g., the robot's base, sensors, end effectors).
172 | * Transformations: These include translations (movement along axes) and rotations (changes in orientation) between frames.
173 |
174 | By using transforms, robots can effectively understand their position in the world and how their sensors and motors are located in relation to their body.
175 |
176 | The relationship between these coordinate frames is determined with tf-tree.
177 | It essentially tells with a tree-like structure what is the child-frame's position in relation to the parent frame.
178 |
179 | ### Commonly used coordinate frames
180 |
181 |
182 |
183 | _Image source: [wiki.ros.org](https://wiki.ros.org/hector_slam/Tutorials/SettingUpForYourRobot)_
184 |
185 | **map**
186 |
187 | The map frame provides a global reference point for the robot's environment, allowing it to understand its position within a larger context.
188 | Typically, the coordinates in the map frame present the robot's coordinates on a 2D map.
189 |
190 | **odom**
191 |
192 | The odom frame represents the robot's position based on its odometry data.
193 | It tracks the robot's movement from its starting point, being subject to drift and inaccuracies.
194 |
195 | **base_footprint**
196 |
197 | The base_footprint frame is a 2D representation of the robot's footprint on the ground, typically used for planning and movement calculations without considering the robot's height.
198 |
199 | **base_link**
200 |
201 | The base_link frame represents the robot's main body and is used as a reference for other components, such as sensors and arms.
202 |
203 | **laser_link**
204 |
205 | The laser_link frame denotes the position of a laser sensor on the robot.
206 | It is essential for interpreting the data collected by the laser for tasks like mapping and obstacle detection, providing a reference for where the sensor is located in relation to other frames.
207 |
208 |
209 | ### TF frames in RViz
210 |
211 | When working with RViz, you will need to use the "Fixed Frame" to determine from which frame's perspective you are
212 | visualizing the data.
213 | This is an important feature to know about, as sometimes the data you are looking to visualize might not be available if you are visualizing a wrong frame.
214 |
215 | #### Changing the Fixed Frame
216 |
217 | 1. On Andino, the default frame is set to "base_footprint".
218 | This means that RViz coordinate origin (0, 0), is set to the robot's footprint.
219 | Move the robot around with teleoperation using Gazebo. You can see that the robot is always located in the center of the grid that RViz visualizes.
220 |
221 |
222 |
223 | 1. Change the "Fixed Frame" under "Global Options" from "base_footprint" to "odom" to use odometry as the coordinate frame instead of the robot base_footprint frame.
224 |
225 |
226 |
227 | 1. Drive the robot around. You will see the robot moving in relation to "odom" frame.
228 |
229 |
230 |
231 | #### Visualizing the TF-tree
232 |
233 | Sometimes it might be useful to check the robot's tf-tree for debugging purposes.
234 | You can do it by opening the "Tree" option under the TF menu.
235 |
236 | > **Tip:** To ensure the odom frame appears correctly at the top of the tree, you may need to press the reset button on the bottom left of Rviz.
237 | > The odom frame tracks the robot's movement in the environment, making it the logical parent frame for accurately tracking the motion of all other frames.
238 |
239 |
240 |
241 |
242 | ## Summary
243 |
244 | By the end of these exercises, you have now learned
245 | - What are ROS 2, Gazebo, and Rviz
246 | - How to launch Andino simulation and control the robot from Gazebo
247 | - What ROS 2 topics are
248 | - How to publish to a topic
249 | - How to subscribe to a topic
250 | - What tf-frames are
251 |
252 | Next exercises: [Exercises 2: SLAM and Navigation Demo](/2-slam_and_navigation_demo/README.md)
253 |
--------------------------------------------------------------------------------
/1-ros_2_introduction/ROS 2 cheatsheet.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/ROS 2 cheatsheet.md
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/andino_robot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/andino_robot.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/gazebo_logo_white_bg.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/gazebo_teleop.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/gazebo_teleop.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/ros2_tf_frames.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/ros2_tf_frames.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/ros2_topic_echo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/ros2_topic_echo.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/ros2_topic_info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/ros2_topic_info.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/ros2_topic_list.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/ros2_topic_list.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/ros_logo_white_bg.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/rviz_add_camera.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/rviz_add_camera.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/rviz_add_camera_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/rviz_add_camera_2.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/rviz_coordinate_frame.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/rviz_coordinate_frame.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/rviz_fixed_frame.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/rviz_fixed_frame.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/rviz_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/rviz_logo.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/rviz_odom_frame.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/rviz_odom_frame.png
--------------------------------------------------------------------------------
/1-ros_2_introduction/images/tf_tree.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/1-ros_2_introduction/images/tf_tree.png
--------------------------------------------------------------------------------
/2-slam_and_navigation_demo/README.md:
--------------------------------------------------------------------------------
1 | # Exercises 2 - SLAM and Navigation Demo
2 | Now it is time to use the robot to build a new 2D map of the simulated environment and autonomously navigate on it.
3 |
4 |
5 | * [Basic Concepts](#basic-concepts)
6 | * [Slam-toolbox](#slam-toolbox)
7 | * [Nav2](#nav2)
8 | * [AMCL](#amcl)
9 | * [Mapping demo](#mapping-demo)
10 | * [Autonomous navigation demo](#autonomous-navigation-demo)
11 | * [ROS 2 Services](#ros-2-services)
12 | * [Summary](#summary)
13 |
14 |
15 | ## Basic Concepts
16 |
17 | ### Slam-toolbox
18 | [Slam-toolbox](https://github.com/SteveMacenski/slam_toolbox/tree/ros2) is an advanced 2D SLAM (Simultaneous Localization and Mapping) solution for ROS 2.
19 | It provides tools for online (real-time) and offline mapping, lifelong mapping, loop closure, and localization.
20 | Slam-toolbox is designed for both real-time applications and mapping large-scale environments efficiently.
21 |
22 | ### Nav2
23 |
24 |
25 |
26 | [Nav2](https://github.com/ros-navigation/navigation2), or the Navigation2 stack, is a framework for ROS 2 that enables robots to navigate autonomously.
27 | It handles path planning, path following, obstacle avoidance, and localization in complex environments.
28 | Nav2 is modular, allowing developers to customize or replace components to suit specific robotic needs.
29 |
30 | ### AMCL
31 | [AMCL](https://github.com/ros-navigation/navigation2/tree/main/nav2_amcl) (Adaptive Monte Carlo Localization) is a probabilistic localization system used in ROS 2, and is part of the Nav2 stack.
32 | It enables robots to determine their position within a known map using particle filters.
33 | AMCL uses sensor data, typically from a LiDAR, to refine the estimated pose of the robot as it navigates.
34 |
35 |
36 | ## Mapping demo
37 | Next, we will run Andino in simulation and map the environment with it.
38 |
39 | 1. Run the simulation inside the Docker container ([How to run Docker container](/0-setup/Docker%20Cheat%20Sheet.md))
40 |
41 | ```commandline
42 | ros2 launch andino_gz andino_gz.launch.py
43 | ```
44 |
45 | 1. Open a new terminal inside the Docker container and run slam-toolbox ([How to open terminal in Docker container](/0-setup/Docker%20Cheat%20Sheet.md))
46 |
47 | ```commandline
48 | ros2 launch andino_gz slam_toolbox_online_async.launch.py
49 | ```
50 | 1. In RViz, subscribe to /map topic to view the map that is being built.
51 |
52 |
53 |
54 | 1. Use Gazebo teleop to drive the robot around in the simulation and map the area.
55 |
56 |
57 |
58 | **Tip:** You can right-click the robot in simulation and choose "Follow" -camera mode to better keep track of your robot.
59 |
60 | 1. Once you are satisfied with the map, open a new terminal inside the Docker container and run this command to save it:
61 | ```commandline
62 | ros2 run nav2_map_server map_saver_cli --free 0.15 --fmt png -f $HOME/andino_map
63 | ```
64 |
65 | ## Autonomous navigation demo
66 |
67 | Before proceeding, stop the previous simulation and slam-toolbox runs with CTRL-C. Make sure you check all your open
68 | terminals to stop everything you had running before.
69 |
70 | 1. Open a new terminal inside the Docker container and launch the Andino simulation with Nav2
71 | ```commandline
72 | ros2 launch andino_gz andino_gz.launch.py nav2:=True
73 | ```
74 |
75 | 1. **Optional:** Navigation will start with a default Andino map. If you wish to switch to the map you just mapped,
76 | open a new terminal inside the container and call a ROS service:
77 | ```commandline
78 | ros2 service call /map_server/load_map nav2_msgs/srv/LoadMap "{map_url: $HOME/andino_map.yaml}"
79 | ```
80 |
81 | 1. Give a "2D Pose Estimate" for AMCL in RViz, so the robot has idea where it starts from.
82 |
83 |
84 |
85 | After this, you will see the robot's location on the map
86 |
87 |
88 |
89 | 1. Give a "Nav2 Goal" from RViz to start the autonomous navigation to a desired location on map. You will see the robot planning a path to the given location and start driving towards the goal
90 |
91 |
92 |
93 |
94 | **Exercise 1:**
95 |
96 | Play with Nav2 by giving different goals for the robot.
97 | * Pay attention to what obstacles the robot is able to see with its laser scanner.
98 | * Pay attention to the planned paths that the robot makes. Does it always follow them precisely?
99 | * Are the planned paths always feasible? Does the robot get stuck?
100 |
101 |
102 | ## ROS 2 Services
103 |
104 | If you changed Nav2 to use your 2D map, you might have noticed that you just called a ROS 2 service.
105 | In addition to ROS 2 topics, services are a complementary communication method.
106 | Instead of simply publishing and subscribing, services work using a request-response model.
107 |
108 | Services can be called with a specific type of service request.
109 | The request is processed and a response received as a result.
110 | For example in the Nav2 demo, we called a `/map_server/load_map` service topic, to
111 | load a new map, with a request of `"{map_url: /home/user/andino_map.yaml}"`, which has the type of `nav2_msgs/srv/LoadMap` message.
112 |
113 | You can list all the available services with:
114 | ```commandline
115 | ros2 service list
116 | ```
117 |
118 | include also their message types with:
119 | ```commandline
120 | ros2 service list -t
121 | ```
122 |
123 | and call any service with the following template:
124 |
125 | ```commandline
126 | ros2 service call
127 | ```
128 |
129 | ## Summary
130 |
131 | By the end of these exercises, you have now learned
132 | - How to create a 2D map using slam-toolbox
133 | - How make the robot autonomously navigate using Nav2
134 | - What ROS 2 services are
135 |
136 | Next exercises: [Exercises 3: 3-create_ros_2_package](/3-create_ros_2_package/README.md)
--------------------------------------------------------------------------------
/2-slam_and_navigation_demo/images/mapping_demo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/2-slam_and_navigation_demo/images/mapping_demo.png
--------------------------------------------------------------------------------
/2-slam_and_navigation_demo/images/nav2_demo_with_pose_estimate.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/2-slam_and_navigation_demo/images/nav2_demo_with_pose_estimate.png
--------------------------------------------------------------------------------
/2-slam_and_navigation_demo/images/nav2_goal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/2-slam_and_navigation_demo/images/nav2_goal.png
--------------------------------------------------------------------------------
/2-slam_and_navigation_demo/images/nav2_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/2-slam_and_navigation_demo/images/nav2_logo.png
--------------------------------------------------------------------------------
/2-slam_and_navigation_demo/images/pose_estimate.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/2-slam_and_navigation_demo/images/pose_estimate.png
--------------------------------------------------------------------------------
/2-slam_and_navigation_demo/images/subscribe_to_map_topic.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/2-slam_and_navigation_demo/images/subscribe_to_map_topic.png
--------------------------------------------------------------------------------
/3-create_ros_2_package/README.md:
--------------------------------------------------------------------------------
1 | # Exercises 3 - Create ROS 2 package
2 | In the upcoming exercise session, you will start writing some Python code.
3 | ROS 2 organizes code neatly in **packages** to separate different
4 | functionalities, and make them to be easily installable and shareable.
5 |
6 | You can follow this tutorial to easily create a new package using the
7 | [Turtle Nest](https://github.com/Jannkar/turtle_nest) GUI tool, or learn how to create packages manually from the command line by following
8 | [the official ROS 2 documentation](https://docs.ros.org/en/humble/Tutorials/Beginner-Client-Libraries/Creating-Your-First-ROS2-Package.html).
9 |
10 | ## Create a new package
11 | 1. Launch the Docker container and open a new terminal inside it ([How to run Docker container](/0-setup/Docker%20Cheat%20Sheet.md))
12 | 2. Run the command `turtle-nest` to bring up the guided package creation
13 |
14 | Use the following settings to create the package:
15 |
16 | **Package name:** `ros2_exercises`
17 |
18 | **Destination:** Path to exercises_ws, for example: `/home/user/exercises_ws/src`
19 |
20 | -> Next
21 |
22 | **Package Type:** `Python`. Uncheck the C++ option
23 |
24 | **Python Node Name:** `odometry_publisher` (Will be used for the next exercises)
25 |
26 | -> Next
27 |
28 | **Maintainer Name:** ``
29 |
30 | **Package License:** `No license`
31 |
32 | -> Create Package
33 |
34 |
35 | Other fields can be left empty / unchecked, as they are not needed in these exercises.
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 | ## Build and source the package
45 |
46 | Next, you will need to compile your code by building the package and then source it for ROS 2 to be able to find it.
47 | Run the following commands:
48 |
49 | cd /home/user/exercises_ws/
50 | colcon build --symlink-install
51 | source install/setup.bash
52 |
53 | This will build and source all the packages inside your exercises_ws -workspace.
54 | We use the `--symlink-install` to make our Python code symbolically linked, so that we don't have to rebuild every time we modify our code.
55 |
56 | **NOTE**: You need to do this build and source step every time you relaunch your Docker container!
57 |
58 | After successfully building, you should see the following message. You can ignore the produced warning message.
59 |
60 |
61 |
62 | ## What our new package includes?
63 |
64 | Let's check what our newly created package contains.
65 |
66 | We created our package in the `exercises_ws` folder.
67 | This folder is mounted inside our container in the `volumes` section of our [docker-compose.yaml](/docker/docker-compose.yaml) file, meaning that its contents are synced between our Ubuntu host and inside the Docker container at the `$HOME/exercises_ws`.
68 | All the code we write there will be automatically synchronized inside the container.
69 |
70 | Open the exercises_ws in your favorite IDE to take a closer look at what the new package contains.
71 | For this course, the examples are mainly done with [PyCharm (Community Edition)](https://www.jetbrains.com/pycharm/download/?section=linux),
72 | which is freely available.
73 |
74 |
75 |
76 | The most important parts of the package are the following:
77 |
78 | ### ros2_exercises -folder
79 | Has the exact same name as your main package.
80 | This is the main place to store all your ROS 2 nodes.
81 |
82 | ### ros2_exercises/odometry_publisher.py
83 | The source code for the Odometry Publisher Node that was automatically created during the package creation process.
84 |
85 | It imports the necessary things, such as rclpy for writing ROS 2 Python code.
86 | A new Class OdometryPublisher is automatically created, that inherits a ROS 2 node to get access to features such as publishing and subscribing.
87 | In the main function, we initialize rclpy and the new node, as well as spin it in an infinite loop, which can be interrupted with CTRL-C.
88 | ROS 2 nodes "spin" to keep checking for new messages or requests so they can respond as soon as something happens.
89 |
90 |
91 |
92 | ### setup.py
93 | Python-related installation file, that holds for example information about the existing Nodes.
94 | If you add new Nodes for your package, you need to declare them here.
95 |
96 |
97 |
98 | ### package.xml
99 | Contains metadata for your package, such as maintainer name and package description.
100 | Also, all the necessary package dependencies should be added here, such as existing core ROS packages or
101 | dependencies to your other ROS packages.
102 |
103 |
104 |
105 | ## Run your newly created node
106 | As the final step, let's test that your newly created ROS 2 node works correctly.
107 | Run:
108 |
109 | ros2 run ros2_exercises odometry_publisher
110 |
111 | This will print a message from your new `OdometryPublisher` Node:
112 |
113 |
114 |
115 | ## Summary
116 |
117 | In this tutorial, you learned:
118 | - How to create a new ROS 2 Python package using the Turtle Nest tool.
119 | - How to build and source your ROS 2 packages.
120 | - What files and folders a usual ROS 2 Python package contains.
121 | - How ROS 2 Python Nodes look like.
122 | - What the `setup.py` and `package.xml` files are and how do they look like.
123 | - How to run your new ROS 2 Node.
124 |
125 | Next exercises: [Exercises 4: Robot Odometry](/4-robot_odometry/README.md)
--------------------------------------------------------------------------------
/3-create_ros_2_package/images/build_warning.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/3-create_ros_2_package/images/build_warning.png
--------------------------------------------------------------------------------
/3-create_ros_2_package/images/odometry_node.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/3-create_ros_2_package/images/odometry_node.png
--------------------------------------------------------------------------------
/3-create_ros_2_package/images/package_structure.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/3-create_ros_2_package/images/package_structure.png
--------------------------------------------------------------------------------
/3-create_ros_2_package/images/package_xml.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/3-create_ros_2_package/images/package_xml.png
--------------------------------------------------------------------------------
/3-create_ros_2_package/images/ros2_run.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/3-create_ros_2_package/images/ros2_run.png
--------------------------------------------------------------------------------
/3-create_ros_2_package/images/setup_py.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/3-create_ros_2_package/images/setup_py.png
--------------------------------------------------------------------------------
/3-create_ros_2_package/images/turtle_nest_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/3-create_ros_2_package/images/turtle_nest_1.png
--------------------------------------------------------------------------------
/3-create_ros_2_package/images/turtle_nest_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/3-create_ros_2_package/images/turtle_nest_2.png
--------------------------------------------------------------------------------
/3-create_ros_2_package/images/turtle_nest_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/3-create_ros_2_package/images/turtle_nest_3.png
--------------------------------------------------------------------------------
/4-robot_odometry/README.md:
--------------------------------------------------------------------------------
1 | # Exercise 4 - Robot Odometry
2 |
3 | * [Exercise 4 - Robot Odometry](#exercise-4---robot-odometry)
4 | * [Common Types of Odometry Data](#common-types-of-odometry-data)
5 | * [Exercise: Calculate Odometry from Wheel Encoders](#exercise-calculate-odometry-from-wheel-encoders)
6 | * [Getting started](#getting-started-)
7 | * [Instructions](#instructions)
8 | * [Visualize the Driven Path](#visualize-the-driven-path)
9 | * [Test the Odometry](#test-the-odometry)
10 | * [Solution](#solution)
11 | * [Summary](#summary)
12 |
13 |
14 |
15 | **Odometry** is a technique used to estimate a robot’s position and orientation over time.
16 | It is one of the essential things in mobile robotics, as good odometry data is often the foundation for localization, mapping and navigation.
17 | Problems in these features likely boil down to bad odometry data, which is
18 | why it is important to understand how odometry is calculated and the basic assumptions it relies on.
19 |
20 |
21 |
22 | _How far Andino has travelled?
23 | We can answer this question by knowing how much each wheel has rotated and using this information to calculate the odometry._
24 |
25 | ### Common Types of Odometry Data
26 |
27 | Odometry can be estimated using data from the robot’s sensors.
28 |
29 | - **Wheel Odometry:** Uses data from wheel encoders, or other types of sensors that can keep track of the position and measure the wheel rotations. By calculating how much each wheel has rotated, the robot can estimate how far it has moved.
30 | - **Visual Odometry:** Uses cameras or depth sensors to estimate movement by analyzing changes in images as the robot moves.
31 | - **Inertial Odometry:** Utilizes inertial measurement units (IMUs) that provide data on acceleration and rotational velocity to estimate position changes.
32 | - **Sensor Fusion Odometry:** Combines multiple sources (e.g., wheel encoders, IMUs, cameras) for more accurate and reliable odometry.
33 |
34 | ## Exercise: Calculate Odometry from Wheel Encoders
35 |
36 | In this practical exercise, we’ll learn how to calculate odometry using data from simulated wheel encoders.
37 | In simulation, Andino uses Gazebo's built-in `OdometryPublisher` plugin to publish odometry data, but here we’ll learn to do the same calculation ourselves using Python.
38 |
39 | Andino publishes the robot's left and right wheel positions and velocities on the `/joint_states` topic. We’ll use this information to estimate the robot’s position and velocity over time, just as you would on a real robot.
40 |
41 |
42 |
43 |
44 | _JointState message from Andino_
45 |
46 | ### Getting started
47 | To get started, copy-paste the following code below into your `OdometryPublisher` Node you created during the previous exercises.
48 | The node is located in
49 | `/home//exercises_ws/src/ros2_exercises/ros2_exercises/odometry_publisher.py`.
50 |
51 | We will use this code as the starting point to publish our odometry data.
52 |
53 | ```
54 | #!/usr/bin/env python3
55 | import math
56 | import matplotlib.pyplot as plt
57 |
58 | import rclpy
59 | from rclpy.node import Node
60 | from tf_transformations import quaternion_from_euler
61 |
62 | from geometry_msgs.msg import Quaternion
63 | from nav_msgs.msg import Odometry
64 | from sensor_msgs.msg import JointState
65 |
66 |
67 | class OdometryPublisher(Node):
68 |
69 | def __init__(self):
70 | super().__init__("odometry_publisher")
71 | # Use these wheel parameters for odometry calculation on Andino
72 | self.wheel_separation = 0.137
73 | self.wheel_radius = 0.033
74 |
75 | # Initializing the robot position and time for robot odometry calculation
76 | self.x = 0.0 # Robot's position on x-axis
77 | self.y = 0.0 # Robot's position on y-axis
78 | self.theta = 0.0 # Robot's orientation angle
79 | self.last_time = self.get_clock().now()
80 |
81 | # TODO: Subscribe to /joint_states topic to listen to data from left and right wheels.
82 | # The message type is JointState and quality of service can be set to 10.
83 | # Use self.joint_states_callback as the subscription callback.
84 |
85 | # TODO: Create odometry publisher. Message type is Odometry, topic can be set to
86 | # /robot_odometry (not to clash with the existing Andino odometry topic) and qos to 10.
87 |
88 | def joint_states_callback(self, joint_states):
89 | # TODO: Read left and right wheel velocities from the JointState message
90 |
91 | # TODO: Get the elapsed time since last odometry calculation, delta time (dt), in seconds.
92 | # Save current time to self.last_time
93 |
94 | # TODO: The wheel velocities we read from the joint_states message are angular
95 | # joint velocities (rad/s). Convert them to linear velocities for each wheel by multiplying
96 | # them with a wheel radius. Then calculate the robot's linear and angular velocities
97 | # with the following formulas:
98 | # linear velocity = (vel right + vel left) / 2.0
99 | # angular velocity = (vel right - vel left) / wheel separation
100 |
101 | # TODO: Now that we know how much time has elapsed since the last calculation,
102 | # what was robot's previous orientation angle (theta) and with what speed the
103 | # robot has moved, we can calculate the new position for the robot. Find out how to
104 | # calculate this for x-axis, y-axis and robot's orientation theta, and
105 | # update the values in self.x, self.y and self.theta.
106 |
107 | # TODO: Create new Odometry message and populate stamp and frame IDs. The parent frame
108 | # ID is "odom" and child frame ID is "base_link".
109 |
110 | # TODO: Add the updated robot's position and orientation to the Odometry message.
111 | # Be careful, the Odometry message accepts the orientation in Quaternion notation!
112 |
113 | # TODO: Add the updated linear and angular velocities in the Odometry message
114 |
115 | # TODO: Publish the odometry message
116 |
117 |
118 | def main(args=None):
119 | rclpy.init(args=args)
120 |
121 | odometry_publisher = OdometryPublisher()
122 |
123 | try:
124 | rclpy.spin(odometry_publisher)
125 | except KeyboardInterrupt:
126 | pass
127 |
128 | odometry_publisher.destroy_node()
129 | rclpy.try_shutdown()
130 |
131 |
132 | if __name__ == '__main__':
133 | main()
134 |
135 | ```
136 |
137 | ### Instructions
138 |
139 | The Python code has different sections denoted with TODO-comments that you should complete.
140 | - Fill in the missing code pieces
141 | - Between each of the steps, we recommend to print out the values and drive the robot in simulation to see how they are changing over time.
142 | This will give you a better understanding of the whole solution.
143 | - For a better learning experience, we do not recommend using generative AI like chatGPT to finish this task.
144 | - You can find the solution to this exercise below, but try to first solve it by your own.
145 | Learning to code is most effective through the process of trial, error, and overcoming challenges.
146 |
147 | To test your solution, run first the Andino simulation in a new terminal:
148 |
149 | ros2 launch andino_gz andino_gz.launch.py
150 |
151 | In another terminal, make sure you've built and sourced your node:
152 |
153 | cd /home/user/exercises_ws/
154 | colcon build --symlink-install
155 | source install/setup.bash
156 |
157 | And then run the node with:
158 |
159 | ros2 run ros2_exercises odometry_publisher --ros-args -p use_sim_time:=True
160 |
161 | You can start and stop your node during development, without restarting the whole simulation.
162 | You also don't have to rebuild the package after every change you make, thanks to `--symlink-install`.
163 | Rebuild is only needed if you stop the whole Docker container.
164 |
165 | ### Visualize the Driven Path
166 |
167 | Once you are done with your solution, you can visualize the calculated odometry by drawing the robot positions.
168 | This will help you to verify your solution and also get a better understanding how odometry data works,
169 | and what are the possible drawbacks of it.
170 |
171 | To visualize the driven path using matplotlib, add the following class to your code:
172 |
173 | ```
174 | class PathVisualizer:
175 | def __init__(self):
176 | plt.ion()
177 | self.fig, self.ax = plt.subplots()
178 |
179 | # Lock the aspect ratio and set equal scaling for x and y
180 | self.ax.set_aspect('equal', adjustable='box')
181 |
182 | self.path_x, self.path_y = [], []
183 |
184 | def visualize(self, x, y):
185 | # Store position history
186 | self.path_x.append(x)
187 | self.path_y.append(y)
188 |
189 | # Plot path
190 | self.ax.clear()
191 | self.ax.plot(self.path_x, self.path_y, 'b-', label='Path')
192 | self.ax.plot(x, y, 'ro', label='Current Position') # Mark current position
193 | self.ax.set_xlabel('X Position (m)')
194 | self.ax.set_ylabel('Y Position (m)')
195 | self.ax.set_title('Traversed Path')
196 | self.ax.legend()
197 | plt.draw()
198 | plt.pause(0.001) # Short pause to update plot
199 |
200 | ```
201 |
202 | Initialize it in `OdometryPublisher`'s init -function:
203 |
204 |
205 | self.path_visualizer = PathVisualizer()
206 |
207 |
208 | And update it in the end of the `joint_states_callback` -function:
209 |
210 | self.path_visualizer.visualize(odom_msg.pose.pose.position.x, odom_msg.pose.pose.position.y)
211 |
212 | ### Test the Odometry
213 |
214 | Drive the robot around in the simulation while visualizing the odometry data.
215 |
216 | - Does the odometry data correspond precisely the driven path?
217 | - Would the data be as precise on a real robot?
218 | - Are you able to find situations where according to odometry data the robot is moving,
219 | while in simulation it isn't. If so, what could you do to improve the odometry estimation?
220 |
221 |
222 |
223 |
224 | ### Solution
225 | Before checking the full solution, make sure you've tried solving the exercise on your own.
226 |
227 |
228 | Click to open the solution
229 |
230 |
231 | ```
232 | #!/usr/bin/env python3
233 | import math
234 | import matplotlib.pyplot as plt
235 |
236 | import rclpy
237 | from rclpy.node import Node
238 | from tf_transformations import quaternion_from_euler
239 |
240 | from geometry_msgs.msg import Quaternion
241 | from nav_msgs.msg import Odometry
242 | from sensor_msgs.msg import JointState
243 |
244 |
245 | class OdometryPublisher(Node):
246 |
247 | def __init__(self):
248 | super().__init__("odometry_publisher")
249 | # Use these wheel parameters for odometry calculation on Andino
250 | self.wheel_separation = 0.137
251 | self.wheel_radius = 0.033
252 |
253 | # Initializing the robot position and time for robot odometry calculation
254 | self.x = 0.0 # Robot's position on x-axis
255 | self.y = 0.0 # Robot's position on y-axis
256 | self.theta = 0.0 # Robot's orientation angle
257 | self.last_time = self.get_clock().now()
258 |
259 | # Subscribe to /joint_states topic to listen to data from left and right wheels.
260 | # The message type is JointState and quality of service can be set to 10.
261 | # Use self.joint_states_callback as the subscription callback.
262 | self.joint_subscription = self.create_subscription(
263 | JointState, '/joint_states', self.joint_states_callback, 10
264 | )
265 |
266 | # Create odometry publisher. Message type is Odometry, topic can be set to
267 | # /robot_odometry (not to clash with the existing Andino odometry topic) and qos to 10.
268 | self.odom_publisher = self.create_publisher(Odometry, "/robot_odometry", 10)
269 |
270 | self.path_visualizer = PathVisualizer()
271 |
272 | def joint_states_callback(self, joint_states):
273 | # Read left and right wheel velocities from the JointState message
274 | joint_names = joint_states.name
275 | left_wheel_index = joint_names.index("left_wheel_joint")
276 | right_wheel_index = joint_names.index("right_wheel_joint")
277 | left_wheel_vel = joint_states.velocity[left_wheel_index]
278 | right_wheel_vel = joint_states.velocity[right_wheel_index]
279 |
280 | # Get the elapsed time since last odometry calculation, delta time (dt), in seconds.
281 | # Save current time to self.last_time
282 | current_time = self.get_clock().now()
283 | dt = (current_time - self.last_time).nanoseconds / 1e9
284 | self.last_time = current_time
285 |
286 | # The wheel velocities we read from the joint_states message are angular
287 | # joint velocities (rad/s). Convert them to linear velocities for each wheel by multiplying
288 | # them with a wheel radius. Then calculate the robot's linear and angular velocities
289 | # with the following formulas:
290 | # linear velocity = (vel right + vel left) / 2.0
291 | # angular velocity = (vel right - vel left) / wheel separation
292 | v_left = left_wheel_vel * self.wheel_radius
293 | v_right = right_wheel_vel * self.wheel_radius
294 | linear_velocity = (v_right + v_left) / 2.0
295 | angular_velocity = (v_right - v_left) / self.wheel_separation
296 |
297 | # Now that we know how much time has elapsed since the last calculation,
298 | # what was robot's previous orientation angle (theta) and with what speed the
299 | # robot has moved, we can calculate the new position for the robot. Find out how to
300 | # calculate this for x-axis, y-axis and robot's orientation theta, and
301 | # update the values in self.x, self.y and self.theta.
302 | delta_x = linear_velocity * math.cos(self.theta) * dt
303 | delta_y = linear_velocity * math.sin(self.theta) * dt
304 | delta_theta = angular_velocity * dt
305 |
306 | self.x += delta_x
307 | self.y += delta_y
308 | self.theta += delta_theta
309 |
310 | # Create new Odometry message and populate stamp and frame IDs. The parent frame
311 | # ID is "odom" and child frame ID is "base_link".
312 | odom_msg = Odometry()
313 | odom_msg.header.stamp = current_time.to_msg()
314 | odom_msg.header.frame_id = "odom"
315 | odom_msg.child_frame_id = "base_link"
316 |
317 | # Add the updated robot's position and orientation to the Odometry message
318 | # Be careful, the Odometry message accepts the orientation in Quaternion notation!
319 | odom_msg.pose.pose.position.x = self.x
320 | odom_msg.pose.pose.position.y = self.y
321 | odom_msg.pose.pose.position.z = 0.0
322 | odom_quat = quaternion_from_euler(0, 0, self.theta)
323 | odom_msg.pose.pose.orientation = Quaternion(
324 | x=odom_quat[0], y=odom_quat[1], z=odom_quat[2], w=odom_quat[3]
325 | )
326 |
327 | # Add the updated linear and angular velocities in the Odometry message
328 | odom_msg.twist.twist.linear.x = linear_velocity
329 | odom_msg.twist.twist.linear.y = 0.0
330 | odom_msg.twist.twist.angular.z = angular_velocity
331 |
332 | # Publish the odometry message
333 | self.odom_publisher.publish(odom_msg)
334 |
335 | self.path_visualizer.visualize(odom_msg.pose.pose.position.x, odom_msg.pose.pose.position.y)
336 |
337 |
338 | class PathVisualizer:
339 | def __init__(self):
340 | plt.ion()
341 | self.fig, self.ax = plt.subplots()
342 |
343 | # Lock the aspect ratio and set equal scaling for x and y
344 | self.ax.set_aspect('equal', adjustable='box')
345 |
346 | self.path_x, self.path_y = [], []
347 |
348 | def visualize(self, x, y):
349 | # Store position history
350 | self.path_x.append(x)
351 | self.path_y.append(y)
352 |
353 | # Plot path
354 | self.ax.clear()
355 | self.ax.plot(self.path_x, self.path_y, 'b-', label='Path')
356 | self.ax.plot(x, y, 'ro', label='Current Position') # Mark current position
357 | self.ax.set_xlabel('X Position (m)')
358 | self.ax.set_ylabel('Y Position (m)')
359 | self.ax.set_title('Traversed Path')
360 | self.ax.legend()
361 | plt.draw()
362 | plt.pause(0.001) # Short pause to update plot
363 |
364 |
365 | def main(args=None):
366 | rclpy.init(args=args)
367 |
368 | odometry_publisher = OdometryPublisher()
369 |
370 | try:
371 | rclpy.spin(odometry_publisher)
372 | except KeyboardInterrupt:
373 | pass
374 |
375 | odometry_publisher.destroy_node()
376 | rclpy.try_shutdown()
377 |
378 |
379 | if __name__ == '__main__':
380 | main()
381 |
382 | ```
383 |
384 |
385 | ## Summary
386 |
387 | By the end of these exercises, you have now learned
388 |
389 | - What robot odometry is
390 | - How to calculate robot's odometry data from wheel velocities
391 | - How to use ROS 2 to subscribe and publish data in Python code
392 |
393 | Next exercises: [Exercises 5: Path Planning](/5-path_planning/README.md)
--------------------------------------------------------------------------------
/4-robot_odometry/images/andino_odometry.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/4-robot_odometry/images/andino_odometry.png
--------------------------------------------------------------------------------
/4-robot_odometry/images/joint_states.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/4-robot_odometry/images/joint_states.png
--------------------------------------------------------------------------------
/4-robot_odometry/images/odometry_visualization.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/4-robot_odometry/images/odometry_visualization.png
--------------------------------------------------------------------------------
/5-path_planning/README.md:
--------------------------------------------------------------------------------
1 | # Exercise 5 - Path Planning
2 |
3 | Path planning is a common problem in robotics that focuses on finding the best route for a robot to move from a start point to a goal, avoiding obstacles along the way.
4 | In ROS 2, this is typically achieved using the `Nav2` stack, which combines sensor data, maps, and algorithms to calculate and execute paths dynamically.
5 | Path planning is a core concept for autonomous navigation, enabling robots to operate in complex environments.
6 |
7 | In this exercise, you will get an overview of what path planning is and how it is implemented in ROS 2.
8 | You will implement a "Hello World" of the path planning, by creating the simplest form of planner: the **Straight Line Planner**.
9 |
10 | The **Straight Line Planner** will plan the path between two points, without considering any obstacles on the way.
11 | This will give you an understanding of how new path planning algorithms can be implemented using ROS 2,
12 | enabling you to take things further on your own.
13 |
14 |
15 | * [Exercise 5 - Path Planning](#exercise-5---path-planning)
16 | * [Basic Concepts](#basic-concepts)
17 | * [Disclaimer on Docker](#disclaimer-on-docker)
18 | * [Nav2 on Andino](#nav2-on-andino)
19 | * [Straight Line Planner](#straight-line-planner)
20 | * [Change the Planner](#change-the-planner)
21 | * [Verify the planner changes](#verify-the-planner-changes)
22 | * [Create New Python Package](#create-new-python-package)
23 | * [Implement Straight Line Planner Node](#implement-straight-line-planner-node)
24 | * [Solution](#solution)
25 | * [Next steps](#next-steps)
26 | * [Summary](#summary)
27 |
28 |
29 | ## Basic Concepts
30 |
31 | **Static Map**
32 |
33 |
34 |
35 | A 2D grid-based representation of the environment, where each cell is marked as free, occupied, or unknown. It is used for global navigation planning and obstacle avoidance.
36 |
37 | **Global Costmap**
38 |
39 |
40 |
41 | The global costmap represents the entire known environment, highlighting obstacles and free space. It helps the planner find a viable path at a high level.
42 |
43 | **Local Costmap**
44 |
45 |
46 |
47 | The local costmap focuses on the area immediately surrounding the robot. It dynamically updates with sensor data to handle unexpected obstacles and ensure safe navigation.
48 |
49 | **Path Planner**
50 |
51 |
52 |
53 | The path planner computes a path from the robot’s start point to its goal while considering the global costmap. In this exercise, you'll implement a basic straight-line planner to understand the fundamentals of path planning algorithms.
54 | The produced path is often referred to as the "Global Plan".
55 |
56 | **Controller**
57 |
58 |
59 |
60 | The controller translates the planned path into low-level motion commands, such as velocity and direction, to guide the robot in real-time.
61 |
62 | **Behavior Trees**
63 |
64 | Behavior trees define how the robot should react in different situations by structuring its actions and decisions hierarchically. They enable complex behaviors like re-planning when obstacles block the path.
65 |
66 |
67 |
68 |
69 | ## Disclaimer on Docker
70 | By this point, we expect you to be familiar on how to start a Docker container using `docker compose up`,
71 | and how to open a new terminal inside it using `exec`.
72 | From now on in the instructions, we won't necessarily separately specify to run the commands, or to open new terminal inside the Docker container.
73 | All the commands are expected to be run inside the Docker, unless otherwise specified.
74 |
75 | You can revise these commands from the [Docker Cheatsheet](/0-setup/Docker%20Cheat%20Sheet.md).
76 |
77 |
78 | ## Disclaimer on ROS 2 workspaces
79 | Inside our `robotics_essentials_ros2` container we have two ROS workspaces. In the `exercises_ws` we are adding our
80 | custom packages and code. In the `ros2_ws` we are keeping all of our external dependencies, like the `andino_gz` package.
81 |
82 |
83 | ## Nav2 on Andino
84 |
85 | As you have seen in previous demos, Nav2 has been integrated with Andino already.
86 | In general, it is simple to take Nav2 and set it up with an existing robot.
87 | Two main steps are needed to do this:
88 | - Launch file that brings up Nav2 stack
89 | - Parameter file that configures all the Nav2 parameters
90 |
91 | Let's take a quick look on how these look like on Andino.
92 |
93 |
94 | 1. You can view the content of the launch file in the terminal with:
95 |
96 | ```
97 | cat $HOME/ros2_ws/src/andino_gz/andino_gz/launch/andino_gz.launch.py
98 | ```
99 |
105 |
106 | Another way is to check the source code directly in the [Andino GitHub repository](https://github.com/Ekumen-OS/andino_gz/blob/humble/andino_gz/launch/andino_gz.launch.py). You can either check the
107 | code straight in your browser, or clone the package and open it in your favourite IDE.
108 |
109 |
110 |
111 | This part invokes a Nav2 launch file called [bringup_launch.py](https://github.com/ros-navigation/navigation2/blob/main/nav2_bringup/launch/bringup_launch.py), that further
112 | starts all the nodes needed for navigation; planner, controller, behavior trees, etc.
113 |
114 |
115 | 2. The second part of the Nav2 setup is the parameter file. You can view it in your terminal with:
116 |
117 | ```
118 | cat $HOME/ros2_ws/src/andino_gz/andino_gz/config/nav2_params.yaml
119 | ```
120 |
121 | This file includes all the parameters that we pass for Nav2 nodes, for example for amcl, controller_server and planner_server.
122 | Take a moment to view what kind of parameters these Nodes take in.
123 |
124 | ## Straight Line Planner
125 |
126 | Let's replace the existing path planner with our custom implementation.
127 |
128 | Nav2, by default, supports adding new path planners by implementing a plugin with C++.
129 | To keep this course's programming language consistent, we are using Python instead.
130 | We have implemented for you a new C++ Nav2 plugin ([take a look at it here](/packages/custom_nav2_planner/src/custom_nav2_planner.cpp)) that requests a new plan by calling a ROS 2 service.
131 | This approach allows us to implement our path planner in Python by simply creating a service that:
132 |
133 | - Waits for new service requests
134 | - Gets a robot's starting pose and goal pose in a service request
135 | - Responds with a plan
136 |
137 | We also have access to the global costmap, which normally would be used for path planning to plan around the obstacles,
138 | but for simplicity we omit using it, and instead create a plan just between two points.
139 |
140 | With all this information available, we are ready to start calculating new paths for our robot.
141 |
142 | If you are interested to check the original implementation of the Straight Line Planner in C++, you can find it from the [Nav2 documentation](https://docs.nav2.org/plugin_tutorials/docs/writing_new_nav2planner_plugin.html).
143 |
144 | ### Change the Planner
145 |
146 | Let's get started by manually changing the planner for Andino in the Nav2 parameter file that is found inside the
147 | `andino_gz` package.
148 |
149 |
150 | 1. Open Andino's parameter file with your favorite text editor.
151 | We will use `nano` as an example:
152 |
153 | ```
154 | nano $HOME/ros2_ws/src/andino_gz/andino_gz/config/nav2_params.yaml
155 | ```
156 |
157 | 1. Find the place where `planner_server` parameters are set.
158 | Comment out the existing `planner_server` parameters and replace them with the following:
159 | ```
160 | planner_server:
161 | ros__parameters:
162 | plugins: ["GridBased"]
163 | use_sim_time: True
164 | GridBased:
165 | plugin: "custom_nav2_planner/CustomPlanner"
166 | ```
167 | The end result should look like this:
168 |
169 |
170 |
171 | This replaces the originally used `NavfnPlanner` with our [CustomPlanner](/packages/custom_nav2_planner/src/custom_nav2_planner.cpp) implementation.
172 |
173 | 1. Save the file by pressing CTRL-O and press ENTER.
174 | 1. Exit with CTRL+X
175 | 1. To apply the changes, finally build and source the `ros2_ws`:
176 | ```
177 | cd $HOME/ros2_ws/
178 | colcon build
179 | source install/setup.bash
180 | ```
181 |
182 |
188 |
189 | ### Verify the planner changes
190 |
191 | Next, we can verify that our planner has been indeed changed.
192 |
193 | 1. Launch your simulation with
194 |
195 | ```commandline
196 | ros2 launch andino_gz andino_gz.launch.py nav2:=True
197 | ```
198 |
199 | 1. Start the simulation, give a pose estimate for the robot, and try to give a Nav2 goal to autonomously navigate
200 |
201 | You should see that your robot does not plan any path, since our custom **Straight Line Planner** is not yet implemented.
202 |
203 |
204 |
205 |
206 | You should also see logs like this:
207 |
208 |
209 |
210 |
211 | Exercise 1:
212 |
213 | - Interestingly, few seconds after sending the goal, you will notice that your robot will start moving a little.
214 | Why is that?
215 |
216 |
217 | Click to open the solution
218 |
219 | Nav2 has a behavior server running to have recovery behaviors if a plan could not be calculated.
220 | This is useful for example if the robot is stuck in a way that it is not possible to calculate the path, or the goal is not reachable.
221 |
222 | During recovery behaviors, the robot will for example try to spin and back up a little, to try to get unstuck.
223 |
224 |
225 | ### Creating a new Python ROS 2 Package
226 |
227 | To add our custom code for the new path planner, let's first create a new Python ROS 2 package.
228 | You can stop your simulation launch with CTRL+C and follow these steps.
229 |
230 | 1. Run Turtle Nest:
231 | ```
232 | turtle-nest
233 | ```
234 | 2. Create a new package with the following options:
235 | - **Package Name**: `path_planner_example`
236 | - **Destination**: `/home/user/exercises_ws/src`
237 | - Note: exercises_ws, not ros2_ws
238 | - **Package type**: `Python`, uncheck C++
239 | - **Python Node Name**: `path_planner_node`
240 | - **License**: No Licence
241 |
242 | Rest of the fields you can leave empty.
243 | 3. Once the package has been created, build and source it:
244 | ```commandline
245 | cd $HOME/exercises_ws/
246 | colcon build --symlink-install
247 | source install/setup.bash
248 | ```
249 | 4. Verify that everything went smoothly by running the node:
250 |
251 | ```
252 | ros2 run path_planner_example path_planner_node --ros-args -p use_sim_time:=True
253 | ```
254 |
255 | You should see a Hello World message from your Node:
256 |
257 |
258 |
259 |
260 | ### Implementing the **Straight Line Planner** Node
261 |
262 | Now, it is time to start implementing the actual code for the planner.
263 |
264 | Open your Node in your favorite IDE.
265 | You will find it on your host machine, outside the container in
266 | `$HOME/exercises_ws/src/path_planner_example/path_planner_example/path_planner_node.py`.
267 |
268 | 1. Copy-paste the code below into your new node as a starting point for your planner:
269 |
270 | ```
271 | #!/usr/bin/env python3
272 | import rclpy
273 | from rclpy.node import Node
274 |
275 | from nav_msgs.msg import Path
276 | from geometry_msgs.msg import PoseStamped
277 | from create_plan_msgs.srv import CreatePlan
278 | from nav2_simple_commander.robot_navigator import BasicNavigator
279 |
280 |
281 | class PathPlannerNode(Node):
282 |
283 | def __init__(self):
284 | super().__init__("path_planner_node")
285 | # self.basic_navigator = BasicNavigator() # Can be uncommented to get Global Costmap in create_plan_cb
286 |
287 | # Creating a new service "create_plan", which is called by our Nav2 C++ planner plugin
288 | # to receive a plan from us.
289 | self.srv = self.create_service(CreatePlan, 'create_plan', self.create_plan_cb)
290 |
291 | def create_plan_cb(self, request, response):
292 | # Getting all the information to plan the path
293 | goal_pose = request.goal
294 | start_pose = request.start
295 | time_now = self.get_clock().now().to_msg()
296 | # global_costmap = self.basic_navigator.getGlobalCostmap() # Can be uncommented to get Global CostMap
297 |
298 | print("----")
299 | print(f"Starting pose: ({start_pose.pose.position.x}, {start_pose.pose.position.y})")
300 | print(f"Goal pose: ({goal_pose.pose.position.x}, {goal_pose.pose.position.y}))")
301 |
302 | response.path = create_straight_plan(start_pose, goal_pose, time_now)
303 | return response
304 |
305 | def create_straight_plan(start, goal, time_now):
306 | """
307 | Creates a straight plan between start and goal points.
308 | Does not use the global costmap to plan around obstacles, as normal planners would.
309 | """
310 | path = Path()
311 |
312 | # TODO Set the frame_id and stamp for the Path header. Use frame_id from the goal header,
313 | # and time_now for the current time.
314 |
315 | # Let's create a straight plan between our start and goal poses.
316 | # It is not enough if we provide only the start and end positions as a path.
317 | # For controller to follow path correctly, we will need to provide also
318 | # points along this straight path with small intervals. There is a function
319 | # "interpolate_coordinates" implemented for you that does this. It only needs
320 | # the coordinates in a tuple format, for example:
321 | # interpolate_coordinates((0, 0), (0, 0.5))
322 | # This will give you coordinates between these two points with 0.1 interval:
323 | # [(0.0, 0.0), (0.0, 0.1), (0.0, 0.2), (0.0, 0.3), (0.0, 0.4), (0.0, 0.5)]
324 | # TODO Interpolate the coordinates between start and goal positions
325 |
326 | # TODO Loop through these interpolated coordinates and create a new PoseStamped()
327 | # message for each of them. You can set the same stamp and frame_id as for the Path().
328 | # Finally, add all of these points into the path.poses -array.
329 |
330 | return path
331 |
332 | def interpolate_coordinates(start, end, increment=0.1):
333 | """
334 | Interpolate coordinates between two points with a fixed increment.
335 | This method calculates the coordinates of the points on the straight-line path that we are computing.
336 |
337 | Args:
338 | start (tuple): Starting coordinate (x1, y1).
339 | end (tuple): Ending coordinate (x2, y2).
340 | increment (float): Distance between interpolated points.
341 |
342 | Returns:
343 | list: List of interpolated points as (x, y) tuples.
344 | """
345 | x1, y1 = start
346 | x2, y2 = end
347 |
348 | # Calculate total distance using the Euclidean formula
349 | dx = x2 - x1
350 | dy = y2 - y1
351 | distance = (dx ** 2 + dy ** 2) ** 0.5
352 |
353 | # Calculate the number of steps
354 | num_steps = int(distance / increment)
355 |
356 | # Generate interpolated points
357 | points = []
358 | for i in range(num_steps + 1): # +1 to include the end point
359 | t = i / num_steps # Normalized step (0.0 to 1.0)
360 | x = x1 + t * dx # Linear interpolation for x
361 | y = y1 + t * dy # Linear interpolation for y
362 | points.append((x, y))
363 |
364 | return points
365 |
366 |
367 | def main(args=None):
368 | rclpy.init(args=args)
369 | path_planner_node = PathPlannerNode()
370 |
371 | try:
372 | rclpy.spin(path_planner_node)
373 | except KeyboardInterrupt:
374 | pass
375 |
376 | path_planner_node.destroy_node()
377 | rclpy.try_shutdown()
378 |
379 |
380 | if __name__ == '__main__':
381 | main()
382 |
383 | ```
384 |
385 | Our newly created ROS node consists of 3 main parts:
386 | - **create_plan_cb():** This is our service callback.
387 | When Nav2 requests for a new path, this function will be called with a request containing the start position and goal position.
388 |
389 | - **create_straight_plan():** This is our main function to do the path planning in, and the only place you need to modify.
390 | Without any changes, it returns an empty path.
391 |
392 | - **interpolate_coordinates:** This function calculates intermediate points on a line between start and goal positions,
393 | with a 0.1 meters interval by default.
394 | You do not need to fully understand the contents of this function, only the way how it can be used.
395 | 1. Run the simulation in one terminal, and the new PathPlannerNode in another one with the following commands:
396 | ```
397 | ros2 launch andino_gz andino_gz.launch.py nav2:=True
398 | ```
399 | ```
400 | ros2 run path_planner_example path_planner_node --ros-args -p use_sim_time:=True
401 | ```
402 | 1. Start the simulation, give a pose estimation and a new Nav2 goal.
403 | You will see your node now printing start and goal positions with a certain interval.
404 | Nav2 keeps requesting replanning of the path periodically by default.
405 | 1. Go through the main parts of code and read the comments to understand what is currently implemented
406 | 1. Start following TODO's to fill up the parts that are missing to produce a full path
407 |
408 |
409 |
410 | ### Solution
411 | The final solution should be able to create a straight path that the robot follows.
412 |
413 |
414 |
415 |
416 |
417 | You can find the full solution below.
418 | Before checking it, make sure you've first tried solving the exercise on your own.
419 |
420 |
421 | Click to open the solution
422 |
423 | ```
424 | #!/usr/bin/env python3
425 | import rclpy
426 | from rclpy.node import Node
427 |
428 | from nav_msgs.msg import Path
429 | from geometry_msgs.msg import PoseStamped
430 | from create_plan_msgs.srv import CreatePlan
431 | from nav2_simple_commander.robot_navigator import BasicNavigator
432 |
433 |
434 | class PathPlannerNode(Node):
435 |
436 | def __init__(self):
437 | super().__init__("path_planner_node")
438 | # self.basic_navigator = BasicNavigator() # Can be uncommented to get Global Costmap in create_plan_cb
439 |
440 | # Creating a new service "create_plan", which is called by our Nav2 C++ planner plugin
441 | # to receive a plan from us.
442 | self.srv = self.create_service(CreatePlan, 'create_plan', self.create_plan_cb)
443 |
444 | def create_plan_cb(self, request, response):
445 | # Getting all the information to plan the path
446 | goal_pose = request.goal
447 | start_pose = request.start
448 | time_now = self.get_clock().now().to_msg()
449 | # global_costmap = self.basic_navigator.getGlobalCostmap() # Can be uncommented to get Global CostMap
450 |
451 | response.path = create_straight_plan(start_pose, goal_pose, time_now)
452 | return response
453 |
454 | def create_straight_plan(start, goal, time_now):
455 | """
456 | Creates a straight plan between start and goal points.
457 | Does not use the global costmap to plan around obstacles, as normal planners would.
458 | """
459 | path = Path()
460 |
461 | # Set the frame_id and stamp for the Path header. Use frame_id from the goal header,
462 | # and time_now for the current time.
463 | path.header.frame_id = goal.header.frame_id
464 | path.header.stamp = time_now
465 |
466 | # Let's create a straight plan between our start and goal poses.
467 | # It is not enough if we provide only the start and end positions as a path.
468 | # For controller to follow path correctly, we will need to provide also
469 | # points along this straight path with small intervals. There is a function
470 | # "interpolate_coordinates" implemented for you that does this. It only needs
471 | # the coordinates in a tuple format, for example:
472 | # interpolate_coordinates((0, 0), (0, 0.5))
473 | # This will give you coordinates between these two points with 0.1 interval:
474 | # [(0.0, 0.0), (0.0, 0.1), (0.0, 0.2), (0.0, 0.3), (0.0, 0.4), (0.0, 0.5)]
475 | # Interpolate the coordinates between start and goal positions
476 | interpolated_coordinates = interpolate_coordinates(
477 | (start.pose.position.x, start.pose.position.y),
478 | (goal.pose.position.x, goal.pose.position.y),
479 | )
480 |
481 | # Loop through these interpolated coordinates and create a new PoseStamped()
482 | # message for each of them. You can set the same stamp and frame_id as for the Path().
483 | # Finally, add all of these points into the path.poses -array.
484 | for point in interpolated_coordinates:
485 | pose = PoseStamped()
486 | pose.pose.position.x = point[0]
487 | pose.pose.position.y = point[1]
488 | pose.header.stamp = time_now
489 | pose.header.frame_id = goal.header.frame_id
490 | path.poses.append(pose)
491 |
492 | return path
493 |
494 | def interpolate_coordinates(start, end, increment=0.1):
495 | """
496 | Interpolate coordinates between two points with a fixed increment.
497 | This method calculates the coordinates of the points on the straight-line path that we are computing.
498 |
499 | Args:
500 | start (tuple): Starting coordinate (x1, y1).
501 | end (tuple): Ending coordinate (x2, y2).
502 | increment (float): Distance between interpolated points.
503 |
504 | Returns:
505 | list: List of interpolated points as (x, y) tuples.
506 | """
507 | x1, y1 = start
508 | x2, y2 = end
509 |
510 | # Calculate total distance using the Euclidean formula
511 | dx = x2 - x1
512 | dy = y2 - y1
513 | distance = (dx ** 2 + dy ** 2) ** 0.5
514 |
515 | # Calculate the number of steps
516 | num_steps = int(distance / increment)
517 |
518 | # Generate interpolated points
519 | points = []
520 | for i in range(num_steps + 1): # +1 to include the end point
521 | t = i / num_steps # Normalized step (0.0 to 1.0)
522 | x = x1 + t * dx # Linear interpolation for x
523 | y = y1 + t * dy # Linear interpolation for y
524 | points.append((x, y))
525 |
526 | return points
527 |
528 |
529 | def main(args=None):
530 | rclpy.init(args=args)
531 | path_planner_node = PathPlannerNode()
532 |
533 | try:
534 | rclpy.spin(path_planner_node)
535 | except KeyboardInterrupt:
536 | pass
537 |
538 | path_planner_node.destroy_node()
539 | rclpy.try_shutdown()
540 |
541 |
542 | if __name__ == '__main__':
543 | main()
544 |
545 | ```
546 |
547 |
548 |
549 | ## Next steps
550 |
551 | If you wish, you can try to improve the path planning by also using the global costmap
552 | to plan around obstacles in the environment. You can for example implement some of the popular
553 | path planning algorithms, such as **Dijkstra** or **A***. Good luck!
554 |
555 | > **Note:** Make sure that your path planning algorithm is fast and efficient.
556 | > Otherwise the path planning might time out, if the path calculation takes for too long.
557 |
558 | ## Summary
559 |
560 | By the end of these exercises, you have now learned:
561 | - The basic navigation concepts
562 | - How to modify Nav2 parameters
563 | - How path planners work and are created in ROS 2
564 |
565 | Congratulations! You've finished all the available exercises! 🎉
--------------------------------------------------------------------------------
/5-path_planning/images/bt.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/bt.png
--------------------------------------------------------------------------------
/5-path_planning/images/global_costmap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/global_costmap.png
--------------------------------------------------------------------------------
/5-path_planning/images/global_plan.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/global_plan.png
--------------------------------------------------------------------------------
/5-path_planning/images/hello_world.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/hello_world.png
--------------------------------------------------------------------------------
/5-path_planning/images/local_costmap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/local_costmap.png
--------------------------------------------------------------------------------
/5-path_planning/images/local_plan.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/local_plan.png
--------------------------------------------------------------------------------
/5-path_planning/images/nav2_bringup.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/nav2_bringup.png
--------------------------------------------------------------------------------
/5-path_planning/images/nav2_planner_params.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/nav2_planner_params.png
--------------------------------------------------------------------------------
/5-path_planning/images/no_plan.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/no_plan.png
--------------------------------------------------------------------------------
/5-path_planning/images/service_not_available.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/service_not_available.png
--------------------------------------------------------------------------------
/5-path_planning/images/static_map.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/static_map.png
--------------------------------------------------------------------------------
/5-path_planning/images/straight_plan.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/straight_plan.png
--------------------------------------------------------------------------------
/5-path_planning/images/straight_plan_through_obstacle.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/5-path_planning/images/straight_plan_through_obstacle.png
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # Robotics & ROS 2 Essentials
4 |
5 | Welcome to an open-source repository that contains materials and exercises needed
6 | to learn the essential robotics skills using ROS 2.
7 |
8 | This material is implemented for the 2024 iteration of the Master's level "Robotics & XR" course taught by
9 | [Dr. Ilkka Jormanainen](https://www.linkedin.com/in/ilkka-jormanainen-5954441/) at the [University of Eastern Finland](https://www.uef.fi/en), in collaboration with [Henki Robotics](https://henkirobotics.com/).
10 |
11 | [](https://www.uef.fi/en)
12 | [](https://henkirobotics.com/)
13 |
14 | ## Course overview
15 |
16 | This course is designed to give you hands-on experience with the basics of robotics using ROS 2 and Gazebo simulation.
17 | The exercises focus on the [Andino robot](https://github.com/Ekumen-OS/andino_gz/tree/humble) from Ekumen and are structured to gradually introduce you to ROS 2 and Docker.
18 |
19 |
20 | No prior experience with ROS 2 or Docker is needed, and since everything runs through Docker, you won’t need to install ROS 2 on your system beforehand.
21 | Along the way, you’ll learn essential concepts like autonomous navigation and mapping for mobile robots.
22 | All the practical coding exercises are done in Python.
23 |
24 |
25 |
26 | ## Exercises
27 |
28 | This repository contains a set of exercises to learn the core concepts taught over the course through practical demos and coding exercises.
29 | Start from the exercises "0 - Setup" and follow the exercises one by one to learn the basics of robotics and ROS 2!
30 |
31 | 0. [Setup](0-setup)
32 | - Setup the exercises
33 | - Run the Gazebo simulation
34 | - Learn to use Docker
35 | 1. [ROS 2 Introduction](1-ros_2_introduction)
36 | - ROS 2 introduction
37 | - Gazebo and Rviz
38 | - ROS 2 topics; publish and subscribe
39 | - Transformations and tf-frames
40 | 2. [SLAM and Navigation Demo](2-slam_and_navigation_demo)
41 | - Create a map using slam-toolbox
42 | - Navigate autonomously using Nav2
43 | - ROS 2 services
44 | 3. [Create your first ROS 2 package](3-create_ros_2_package)
45 | - ROS 2 packages - how to create your own package
46 | - Building and sourcing
47 | - ROS 2 Nodes
48 | 4. [Robot Odometry](4-robot_odometry)
49 | - Calculate and publish your robot's odometry using wheel velocities
50 | - Robot odometry and how to calculate it
51 | - Publish and subscribe to topics from Python code
52 | 5. [Path Planning](5-path_planning)
53 | - Basic navigation concepts
54 | - Modify Nav2 parameters
55 | - Custom path planning using Nav2
56 |
57 | ## Lecture slides
58 |
59 | These slides accompany the practical exercises included in this repository and are meant to be covered at the same time.
60 |
61 | 1. [Theme 1: Robotics in society](lecture_slides/theme_1_robotics_in_society)
62 | 2. [Theme 2: Robotics applications](lecture_slides/theme_2_robotics_applications)
63 | 3. [Theme 3: Robot control theory](lecture_slides/theme_3_robot_control_theory)
64 | 4. [Theme 4: Navigation](lecture_slides/theme_4_navigation)
65 | 5. [Theme 5: Robotics & AI](lecture_slides/theme_5_robotics_ai)
66 | 6. [Theme 6: XR applications in Robotics](lecture_slides/theme_6_xr_applications_robotics)
67 |
68 |
69 | ## Contribute
70 | Did you encounter a problem during your exercises, or did you find a mistake?
71 | Report issues in the "Issues" section in GitHub, and we will help you.
72 |
73 | If you've found a fix to an existing issue, you can open a new Pull Request to fix it and contribute to this course.
74 | We highly appreciate all the contributions!
75 |
--------------------------------------------------------------------------------
/docker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM osrf/ros:humble-desktop-full
2 |
3 | # Setup user for GUI to work without xhost
4 | ARG UNAME=user
5 | ARG UID=1000
6 | ARG GID=1000
7 | RUN groupadd -g ${GID} -o ${UNAME}
8 | RUN useradd -m -u ${UID} -g ${GID} -o -s /bin/bash ${UNAME}
9 | RUN usermod -aG sudo ${UNAME}
10 | RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
11 | USER ${UNAME}
12 |
13 | # Create a custom ros2 overlay workspace and workspace for exercises
14 | ENV ROS2_WS=/home/user/ros2_ws
15 | RUN mkdir -p ${ROS2_WS}/src
16 | ENV EXERCISES_WS=/home/user/exercises_ws
17 | RUN mkdir -p $EXERCISES_WS/src
18 |
19 | # Disable terminal interaction for apt
20 | ENV DEBIAN_FRONTEND=noninteractive
21 |
22 | # Make default shell in Dockerfile bash instead of sh
23 | SHELL ["/bin/bash", "-c"]
24 |
25 | # Copy andino requirement files and install dependencies
26 | COPY docker/andino_requirements.txt .
27 | RUN sudo apt-get update && sudo apt-get install --no-install-recommends -y $(cat andino_requirements.txt)
28 | RUN sudo rm andino_requirements.txt
29 |
30 | # Install Gazebo Fortress
31 | RUN sudo sh -c 'echo "deb http://packages.osrfoundation.org/gazebo/ubuntu-stable `lsb_release -cs` main" > /etc/apt/sources.list.d/gazebo-stable.list'
32 | RUN wget http://packages.osrfoundation.org/gazebo.key -O - | sudo apt-key add -
33 | RUN sudo apt-get update
34 | RUN sudo apt-get install ignition-fortress -y
35 |
36 | # Install dependencies
37 | RUN sudo apt-get update && \
38 | sudo apt-get install -y --no-install-recommends \
39 | ros-$ROS_DISTRO-navigation2 \
40 | ros-$ROS_DISTRO-turtle-nest \
41 | ros-humble-tf-transformations \
42 | && sudo apt-get clean && \
43 | sudo rm -rf /var/lib/apt/lists/*
44 |
45 | # Install andino_gz and other packages with their dependencies
46 | COPY ./packages/ ${ROS2_WS}/src
47 | RUN cd ${ROS2_WS}/src && \
48 | git clone https://github.com/Ekumen-OS/andino_gz.git -b 0.1.1 && \
49 | sudo apt-get update && \
50 | . /opt/ros/${ROS_DISTRO}/setup.bash && \
51 | cd .. && \
52 | rosdep update && rosdep install --from-paths src --ignore-src -r -y --rosdistro ${ROS_DISTRO} && \
53 | sudo apt-get clean && \
54 | sudo rm -rf /var/lib/apt/lists/* && \
55 | colcon build --symlink-install
56 |
57 | # Build exercises_ws
58 | RUN cd ${EXERCISES_WS}/src && \
59 | sudo apt-get update && \
60 | . /opt/ros/${ROS_DISTRO}/setup.bash && \
61 | cd .. && \
62 | rosdep update && rosdep install --from-paths src --ignore-src -r -y --rosdistro ${ROS_DISTRO} && \
63 | sudo apt-get clean && \
64 | sudo rm -rf /var/lib/apt/lists/* && \
65 | colcon build --symlink-install
66 |
67 | # Source ROS workspace automatically when new terminal is opened
68 | RUN sudo echo ". /opt/ros/${ROS_DISTRO}/setup.bash" >> ~/.bashrc && \
69 | sudo echo ". ${EXERCISES_WS}/install/setup.bash" >> ~/.bashrc && \
70 | sudo echo ". ${ROS2_WS}/install/setup.bash" >> ~/.bashrc
71 |
72 | WORKDIR /home/user/exercises_ws
73 |
74 | # Source ROS in the main terminal
75 | COPY docker/ros_entrypoint.sh /ros_entrypoint.sh
76 |
77 | # Source ROS in the main terminal
78 | ENTRYPOINT ["/ros_entrypoint.sh"]
79 |
80 | CMD ["bash"]
--------------------------------------------------------------------------------
/docker/andino_requirements.txt:
--------------------------------------------------------------------------------
1 | apt-utils
2 | bash-completion
3 | build-essential
4 | curl
5 | debian-archive-keyring
6 | debian-keyring
7 | evtest
8 | gdb
9 | git
10 | gnupg2
11 | gpg-agent
12 | joystick
13 | jstest-gtk
14 | locales
15 | lsb-release
16 | mercurial
17 | nano
18 | openssh-server
19 | python3
20 | python3-pip
21 | python3-setuptools
22 | ros-humble-andino-description
23 | ros-humble-andino-slam
24 | ros-humble-nav2-bringup
25 | ros-humble-nav2-common
26 | software-properties-common
27 | sudo
28 | tmux
29 | wget
30 | xterm
--------------------------------------------------------------------------------
/docker/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | robotics_essentials_ros2:
3 | image: robotics_essentials_ros2
4 | build:
5 | context: ..
6 | dockerfile: docker/Dockerfile
7 | container_name: robotics_essentials_ros2
8 | stop_signal: SIGINT
9 | network_mode: host
10 | privileged: true
11 | stdin_open: true
12 | #runtime: nvidia
13 | tty: true
14 | user: user
15 | volumes:
16 | - $HOME/exercises_ws/src:/home/user/exercises_ws/src
17 | - /tmp/.X11-unix:/tmp/.X11-unix
18 | environment:
19 | #- NVIDIA_VISIBLE_DEVICES=all # Makes all NVIDIA devices visible to the container
20 | #- NVIDIA_DRIVER_CAPABILITIES=all # Grants all NVIDIA driver capabilities to the container
21 | #- __NV_PRIME_RENDER_OFFLOAD=1 # Enables NVIDIA PRIME render offload
22 | #- __GLX_VENDOR_LIBRARY_NAME=nvidia # Specifies the GLX vendor library to use (NVIDIA)
23 | - DISPLAY
24 | - QT_X11_NO_MITSHM=1
25 | - ROS_DOMAIN_ID=42
26 | command: bash
27 |
--------------------------------------------------------------------------------
/docker/ros_entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | . "/opt/ros/${ROS_DISTRO}/setup.bash"
5 | . "${ROS2_WS}/install/setup.bash"
6 | . "${EXERCISES_WS}/install/setup.bash"
7 |
8 | exec "$@"
--------------------------------------------------------------------------------
/images/andino_sim_screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/images/andino_sim_screenshot.png
--------------------------------------------------------------------------------
/images/autonomous_navigation.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/images/autonomous_navigation.gif
--------------------------------------------------------------------------------
/images/course_banner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/images/course_banner.png
--------------------------------------------------------------------------------
/images/docker_compose_up.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/images/docker_compose_up.png
--------------------------------------------------------------------------------
/images/gazebo_rviz.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/images/gazebo_rviz.png
--------------------------------------------------------------------------------
/images/henki_robotics_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/images/henki_robotics_logo.png
--------------------------------------------------------------------------------
/images/uef_logo.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/images/uef_logo.jpg
--------------------------------------------------------------------------------
/lecture_slides/theme_1_robotics_in_society/theme_1_robotics_in_society_theoretical.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/lecture_slides/theme_1_robotics_in_society/theme_1_robotics_in_society_theoretical.pdf
--------------------------------------------------------------------------------
/lecture_slides/theme_2_robotics_applications/Theme 2 - Introducing ROS 2, Docker Containerization.pptx.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/lecture_slides/theme_2_robotics_applications/Theme 2 - Introducing ROS 2, Docker Containerization.pptx.pdf
--------------------------------------------------------------------------------
/lecture_slides/theme_2_robotics_applications/theme_2_robotics_applications_theoretical.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/lecture_slides/theme_2_robotics_applications/theme_2_robotics_applications_theoretical.pdf
--------------------------------------------------------------------------------
/lecture_slides/theme_3_robot_control_theory/Theme 3 - Robot control theory. The ROS perspective.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/lecture_slides/theme_3_robot_control_theory/Theme 3 - Robot control theory. The ROS perspective.pdf
--------------------------------------------------------------------------------
/lecture_slides/theme_3_robot_control_theory/theme_3_robot_control_theory_theoretical.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/lecture_slides/theme_3_robot_control_theory/theme_3_robot_control_theory_theoretical.pdf
--------------------------------------------------------------------------------
/lecture_slides/theme_4_navigation/Theme 4 - SLAM Navigation. How do we do them in ROS.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/lecture_slides/theme_4_navigation/Theme 4 - SLAM Navigation. How do we do them in ROS.pdf
--------------------------------------------------------------------------------
/lecture_slides/theme_4_navigation/theme_4_navigation_theoretical.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/lecture_slides/theme_4_navigation/theme_4_navigation_theoretical.pdf
--------------------------------------------------------------------------------
/lecture_slides/theme_5_robotics_ai/Theme 5 - Robotics AI. Practical aspects with ROS.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/lecture_slides/theme_5_robotics_ai/Theme 5 - Robotics AI. Practical aspects with ROS.pdf
--------------------------------------------------------------------------------
/lecture_slides/theme_5_robotics_ai/theme_5_robotics_ai_theoretical.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/lecture_slides/theme_5_robotics_ai/theme_5_robotics_ai_theoretical.pdf
--------------------------------------------------------------------------------
/lecture_slides/theme_6_xr_applications_robotics/theme_6_xr_applications_robotics.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/henki-robotics/robotics_essentials_ros2/5c9d8dd0f825750057b6a1dd9a18c1520104d7cb/lecture_slides/theme_6_xr_applications_robotics/theme_6_xr_applications_robotics.pdf
--------------------------------------------------------------------------------
/packages/create_plan_msgs/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.8)
2 | project(create_plan_msgs)
3 |
4 | find_package(geometry_msgs REQUIRED)
5 | find_package(nav_msgs REQUIRED)
6 | find_package(rosidl_default_generators REQUIRED)
7 |
8 | rosidl_generate_interfaces(${PROJECT_NAME}
9 | srv/CreatePlan.srv
10 | DEPENDENCIES geometry_msgs nav_msgs
11 | )
12 |
13 | ament_export_dependencies(rosidl_default_runtime)
14 |
15 | ament_package()
16 |
--------------------------------------------------------------------------------
/packages/create_plan_msgs/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | create_plan_msgs
6 | 0.0.0
7 | Interfaces for calculating global plan
8 |
9 | Janne Karttunen
10 | Janne Karttunen
11 |
12 | Apache-2.0
13 | geometry_msgs
14 | nav_msgs
15 | ament_cmake
16 | rosidl_default_generators
17 | rosidl_default_runtime
18 |
19 | rosidl_interface_packages
20 |
21 |
22 | ament_cmake
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/packages/create_plan_msgs/srv/CreatePlan.srv:
--------------------------------------------------------------------------------
1 | geometry_msgs/PoseStamped start
2 | geometry_msgs/PoseStamped goal
3 | ---
4 | nav_msgs/Path path
--------------------------------------------------------------------------------
/packages/custom_nav2_planner/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.5)
2 | project(custom_nav2_planner)
3 |
4 | # Default to C99
5 | set(CMAKE_C_STANDARD 99)
6 |
7 |
8 | # Default to C++14
9 | set(CMAKE_CXX_STANDARD 14)
10 |
11 | # find dependencies
12 | find_package(ament_cmake REQUIRED)
13 | find_package(rclcpp REQUIRED)
14 | find_package(rclcpp_action REQUIRED)
15 | find_package(rclcpp_lifecycle REQUIRED)
16 | find_package(std_msgs REQUIRED)
17 | find_package(visualization_msgs REQUIRED)
18 | find_package(nav2_util REQUIRED)
19 | find_package(nav2_msgs REQUIRED)
20 | find_package(nav_msgs REQUIRED)
21 | find_package(geometry_msgs REQUIRED)
22 | find_package(builtin_interfaces REQUIRED)
23 | find_package(tf2_ros REQUIRED)
24 | find_package(nav2_costmap_2d REQUIRED)
25 | find_package(nav2_core REQUIRED)
26 | find_package(pluginlib REQUIRED)
27 | find_package(create_plan_msgs REQUIRED)
28 |
29 | include_directories(
30 | include
31 | )
32 |
33 | set(library_name ${PROJECT_NAME}_plugin)
34 |
35 | set(dependencies
36 | rclcpp
37 | rclcpp_action
38 | rclcpp_lifecycle
39 | std_msgs
40 | visualization_msgs
41 | nav2_util
42 | nav2_msgs
43 | nav_msgs
44 | geometry_msgs
45 | builtin_interfaces
46 | tf2_ros
47 | nav2_costmap_2d
48 | nav2_core
49 | pluginlib
50 | create_plan_msgs
51 | )
52 |
53 | add_library(${library_name} SHARED
54 | src/custom_nav2_planner.cpp
55 | )
56 |
57 | ament_target_dependencies(${library_name}
58 | ${dependencies}
59 | )
60 |
61 | target_compile_definitions(${library_name} PUBLIC "PLUGINLIB__DISABLE_BOOST_FUNCTIONS")
62 |
63 |
64 | pluginlib_export_plugin_description_file(nav2_core global_planner_plugin.xml)
65 |
66 | install(TARGETS ${library_name}
67 | ARCHIVE DESTINATION lib
68 | LIBRARY DESTINATION lib
69 | RUNTIME DESTINATION lib/${PROJECT_NAME}
70 | )
71 |
72 | install(DIRECTORY include/
73 | DESTINATION include/
74 | )
75 |
76 | install(FILES global_planner_plugin.xml
77 | DESTINATION share/${PROJECT_NAME}
78 | )
79 |
80 | if(BUILD_TESTING)
81 | find_package(ament_lint_auto REQUIRED)
82 | # the following line skips the linter which checks for copyrights
83 | # uncomment the line when a copyright and license is not present in all source files
84 | #set(ament_cmake_copyright_FOUND TRUE)
85 | # the following line skips cpplint (only works in a git repo)
86 | # uncomment the line when this package is not in a git repo
87 | #set(ament_cmake_cpplint_FOUND TRUE)
88 | ament_lint_auto_find_test_dependencies()
89 | endif()
90 |
91 |
92 | ament_export_include_directories(include)
93 | ament_export_libraries(${library_name})
94 | ament_export_dependencies(${dependencies})
95 | ament_package()
96 |
--------------------------------------------------------------------------------
/packages/custom_nav2_planner/global_planner_plugin.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | This is an example custom planner plugin that calls a service to get a plan.
4 |
5 |
--------------------------------------------------------------------------------
/packages/custom_nav2_planner/include/custom_nav2_planner/custom_nav2_planner.hpp:
--------------------------------------------------------------------------------
1 | /*
2 | This piece of work has been modified from the nav2 straight_line_planner.hpp.
3 | Original licence below:
4 | */
5 | /*********************************************************************
6 | *
7 | * Software License Agreement (BSD License)
8 | *
9 | * Copyright (c) 2020 Shivang Patel
10 | * All rights reserved.
11 | *
12 | * Redistribution and use in source and binary forms, with or without
13 | * modification, are permitted provided that the following conditions
14 | * are met:
15 | *
16 | * * Redistributions of source code must retain the above copyright
17 | * notice, this list of conditions and the following disclaimer.
18 | * * Redistributions in binary form must reproduce the above
19 | * copyright notice, this list of conditions and the following
20 | * disclaimer in the documentation and/or other materials provided
21 | * with the distribution.
22 | * * Neither the name of Willow Garage, Inc. nor the names of its
23 | * contributors may be used to endorse or promote products derived
24 | * from this software without specific prior written permission.
25 | *
26 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
29 | * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
30 | * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
31 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
32 | * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
33 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
34 | * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
35 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
36 | * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
37 | * POSSIBILITY OF SUCH DAMAGE.
38 | *
39 | * Author: Shivang Patel
40 | *
41 | * Reference tutorial:
42 | * https://navigation.ros.org/tutorials/docs/writing_new_nav2planner_plugin.html
43 | *********************************************************************/
44 |
45 | #ifndef CUSTOM_NAV2_PLANNER__CUSTOM_NAV2_PLANNER_HPP_
46 | #define CUSTOM_NAV2_PLANNER__CUSTOM_NAV2_PLANNER_HPP_
47 |
48 | #include
49 | #include
50 | #include
51 |
52 | #include "rclcpp/rclcpp.hpp"
53 | #include "geometry_msgs/msg/point.hpp"
54 | #include "geometry_msgs/msg/pose_stamped.hpp"
55 |
56 | #include "nav2_core/global_planner.hpp"
57 | #include "nav_msgs/msg/path.hpp"
58 | #include "nav2_util/robot_utils.hpp"
59 | #include "nav2_util/lifecycle_node.hpp"
60 | #include "nav2_costmap_2d/costmap_2d_ros.hpp"
61 |
62 | namespace custom_nav2_planner
63 | {
64 |
65 | class CustomPlanner : public nav2_core::GlobalPlanner
66 | {
67 | public:
68 | CustomPlanner() = default;
69 | ~CustomPlanner() = default;
70 |
71 | // plugin configure
72 | void configure(
73 | const rclcpp_lifecycle::LifecycleNode::WeakPtr & parent,
74 | std::string name, std::shared_ptr tf,
75 | std::shared_ptr costmap_ros) override;
76 |
77 | // plugin cleanup
78 | void cleanup() override;
79 |
80 | // plugin activate
81 | void activate() override;
82 |
83 | // plugin deactivate
84 | void deactivate() override;
85 |
86 | // This method creates a new path for a given start and goal pose
87 | nav_msgs::msg::Path createPlan(
88 | const geometry_msgs::msg::PoseStamped & start,
89 | const geometry_msgs::msg::PoseStamped & goal) override;
90 |
91 | private:
92 | // TF buffer
93 | std::shared_ptr tf_;
94 |
95 | // node ptr
96 | nav2_util::LifecycleNode::SharedPtr node_;
97 |
98 | // Global Costmap
99 | nav2_costmap_2d::Costmap2D * costmap_;
100 |
101 | // The global frame of the costmap
102 | std::string global_frame_, name_;
103 | };
104 |
105 | } // namespace custom_nav2_planner
106 |
107 | #endif // CUSTOM_NAV2_PLANNER__CUSTOM_NAV2_PLANNER_HPP_
108 |
--------------------------------------------------------------------------------
/packages/custom_nav2_planner/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | custom_nav2_planner
5 | 1.0.0
6 | Custom Nav2 planner plugin
7 | Janne Karttunen
8 | BSD-3-Clause
9 |
10 | ament_cmake
11 |
12 | rclcpp
13 | rclcpp_action
14 | rclcpp_lifecycle
15 | std_msgs
16 | visualization_msgs
17 | nav2_util
18 | nav2_msgs
19 | nav_msgs
20 | geometry_msgs
21 | builtin_interfaces
22 | tf2_ros
23 | nav2_costmap_2d
24 | nav2_core
25 | pluginlib
26 | create_plan_msgs
27 |
28 | ament_lint_auto
29 | ament_lint_common
30 |
31 |
32 | ament_cmake
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/packages/custom_nav2_planner/src/custom_nav2_planner.cpp:
--------------------------------------------------------------------------------
1 | /*
2 | This piece of work has been modified from the nav2 straight_line_planner.cpp.
3 | Original licence below:
4 | */
5 |
6 | /*********************************************************************
7 | *
8 | * Software License Agreement (BSD License)
9 | *
10 | * Copyright (c) 2020 Shivang Patel
11 | * All rights reserved.
12 | *
13 | * Redistribution and use in source and binary forms, with or without
14 | * modification, are permitted provided that the following conditions
15 | * are met:
16 | *
17 | * * Redistributions of source code must retain the above copyright
18 | * notice, this list of conditions and the following disclaimer.
19 | * * Redistributions in binary form must reproduce the above
20 | * copyright notice, this list of conditions and the following
21 | * disclaimer in the documentation and/or other materials provided
22 | * with the distribution.
23 | * * Neither the name of Willow Garage, Inc. nor the names of its
24 | * contributors may be used to endorse or promote products derived
25 | * from this software without specific prior written permission.
26 | *
27 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
28 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
29 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
30 | * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
31 | * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
32 | * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
33 | * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
34 | * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
35 | * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
36 | * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
37 | * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
38 | * POSSIBILITY OF SUCH DAMAGE.
39 | *
40 | * Author: Shivang Patel
41 | *
42 | * Reference tutorial:
43 | * https://navigation.ros.org/tutorials/docs/writing_new_nav2planner_plugin.html
44 | *********************************************************************/
45 |
46 | #include
47 | #include
48 | #include
49 | #include
50 | #include "nav2_util/node_utils.hpp"
51 |
52 | #include "custom_nav2_planner/custom_nav2_planner.hpp"
53 | #include "create_plan_msgs/srv/create_plan.hpp"
54 |
55 | using namespace std::chrono_literals;
56 |
57 | namespace custom_nav2_planner
58 | {
59 |
60 | void CustomPlanner::configure(
61 | const rclcpp_lifecycle::LifecycleNode::WeakPtr & parent,
62 | std::string name, std::shared_ptr tf,
63 | std::shared_ptr costmap_ros)
64 | {
65 | node_ = parent.lock();
66 | name_ = name;
67 | tf_ = tf;
68 | costmap_ = costmap_ros->getCostmap();
69 | global_frame_ = costmap_ros->getGlobalFrameID();
70 | }
71 |
72 | void CustomPlanner::cleanup()
73 | {
74 | RCLCPP_INFO(
75 | node_->get_logger(), "CleaningUp plugin %s of type CustomPlanner",
76 | name_.c_str());
77 | }
78 |
79 | void CustomPlanner::activate()
80 | {
81 | RCLCPP_INFO(
82 | node_->get_logger(), "Activating plugin %s of type CustomPlanner",
83 | name_.c_str());
84 | }
85 |
86 | void CustomPlanner::deactivate()
87 | {
88 | RCLCPP_INFO(
89 | node_->get_logger(), "Deactivating plugin %s of type CustomPlanner",
90 | name_.c_str());
91 | }
92 |
93 | nav_msgs::msg::Path CustomPlanner::createPlan(
94 | const geometry_msgs::msg::PoseStamped & start,
95 | const geometry_msgs::msg::PoseStamped & goal)
96 | {
97 | nav_msgs::msg::Path global_path;
98 |
99 | // Checking if the goal and start state is in the global frame
100 | if (start.header.frame_id != global_frame_) {
101 | RCLCPP_ERROR(
102 | node_->get_logger(), "Planner will only accept start position from %s frame",
103 | global_frame_.c_str());
104 | return global_path;
105 | }
106 |
107 | if (goal.header.frame_id != global_frame_) {
108 | RCLCPP_INFO(
109 | node_->get_logger(), "Planner will only accept goal position from %s frame",
110 | global_frame_.c_str());
111 | return global_path;
112 | }
113 |
114 | // Create new client to request a plan from "create_plan" service
115 | rclcpp::Client::SharedPtr client =
116 | node_->create_client("create_plan");
117 |
118 | auto request = std::make_shared();
119 | request->start = start;
120 | request->goal = goal;
121 |
122 | global_path.poses.clear();
123 | global_path.header.stamp = node_->now();
124 | global_path.header.frame_id = global_frame_;
125 |
126 | if (!client->wait_for_service(1s)) {
127 | RCLCPP_WARN(rclcpp::get_logger("rclcpp"), "create_plan service not available.");
128 | return global_path;
129 | }
130 |
131 | auto future = client->async_send_request(request);
132 |
133 | // Wait for the result.
134 | std::future_status status;
135 | auto timeout_duration = std::chrono::seconds(1);
136 | auto start_time = std::chrono::steady_clock::now();
137 |
138 | do {
139 | status = future.wait_for(std::chrono::milliseconds(100));
140 | if (status == std::future_status::ready) {
141 | global_path.poses = future.get()->path.poses;
142 | RCLCPP_INFO(rclcpp::get_logger("rclcpp"), "Got path as a service result");
143 | break;
144 | }
145 | // Check if the timeout has been reached
146 | if (std::chrono::steady_clock::now() - start_time > timeout_duration) {
147 | RCLCPP_ERROR(rclcpp::get_logger("rclcpp"), "Timeout reached while waiting for service result");
148 | return global_path;
149 | }
150 | } while (rclcpp::ok());
151 |
152 | return global_path;
153 | }
154 |
155 | } // namespace custom_nav2_planner
156 |
157 | #include "pluginlib/class_list_macros.hpp"
158 | PLUGINLIB_EXPORT_CLASS(custom_nav2_planner::CustomPlanner, nav2_core::GlobalPlanner)
159 |
--------------------------------------------------------------------------------