├── .gitignore
├── .pre-commit-config.yaml
├── .pylintrc
├── README.md
├── ROS-notes
├── .gitignore
├── 0.ROS1-Cheat-Sheet.md
├── 1.ROS-essentials-Nov21-Lessons-1-77.md
├── 2.ROS-essentials-Motion-Nov21-L82-112.md
├── 3.ROS-essentials-Perception-Dec21-L114.138.md
├── 4.ROS-essentials-rosserial-Dec21.md
├── 5.ROS-navigation-Intro-Jan22-L001-035.md
├── 6.ROS-navigation-TF-Jan22-L036-051.md
├── 7.ROS-navigation-maps-Apr22-L052-062.md
├── 8.ROS-navigation-tuning-Jul22-L063-070.md
├── 9.ROS-navigation-reactive-Aug22-L071-076.md
├── ROS_Assignment_1.md
├── ROS_Assignment_2.md
├── ROS_Assignment_4.md
├── ROS_Assignment_5.md
├── ROS_Assignment_6.md
├── ROS_Assignment_7.md
├── ROS_ETH_Zurich_L1.md
├── ROS_ETH_Zurich_L2.md
├── ROS_ETH_Zurich_L3.md
├── assets
│ ├── images
│ │ ├── 3D_frames.png
│ │ ├── 3D_rotation_matrix.png
│ │ ├── 3D_transformation_matrix.png
│ │ ├── BUG0_plotjuggler.gif
│ │ ├── Kinect_on_ROS.gif
│ │ ├── Kinect_on_ROS_2.gif
│ │ ├── ROS_computation_graph.png
│ │ ├── SLAM_fun.gif
│ │ ├── Screenshot_2021-11-03_22-24-02.png
│ │ ├── Screenshot_2021-11-12_01-07-54.png
│ │ ├── Screenshot_2021-11-17_21-32-15.png
│ │ ├── Screenshot_2021-11-17_23-57-19.png
│ │ ├── Screenshot_2021-11-26_01-29-39.png
│ │ ├── Screenshot_2021-11-26_01-31-33.png
│ │ ├── Screenshot_2021-12-20_22-14-47.png
│ │ ├── Screenshot_2021-12-20_23-14-06.png
│ │ ├── Screenshot_2022-01-11_00-17-46-arrows.png
│ │ ├── Screenshot_2022-01-11_00-32-39.png
│ │ ├── Screenshot_2022-01-11_000545.png
│ │ ├── Screenshot_2022-01-11_000614.png
│ │ ├── Screenshot_2022-01-11_001149.png
│ │ ├── bouncy_robot.gif
│ │ ├── bug1_1st_turn.gif
│ │ ├── drone.png
│ │ ├── euler_angles.png
│ │ ├── frame_tree_minipupper.png
│ │ ├── frames.png
│ │ ├── frames_minipupper.gif
│ │ ├── gazebo_and_navstack.png
│ │ ├── gazebo_and_rviz.png
│ │ ├── gazebo_only.png
│ │ ├── husky.gif
│ │ ├── icon-drone.png
│ │ ├── mymap.png
│ │ ├── pure_rotation.png
│ │ ├── pure_translation.png
│ │ ├── rqt_reconfigure.png
│ │ ├── static_transform.png
│ │ ├── transformation_matrix.png
│ │ └── trigonometry-obstacle-angle.jpg
│ ├── sources
│ │ ├── clean.zip
│ │ ├── drawings.odp
│ │ ├── frames.pdf
│ │ ├── mymap.pgm
│ │ ├── mymap.yaml
│ │ ├── mymap2.pgm
│ │ ├── mymap2.yaml
│ │ ├── pedestrian.png
│ │ ├── robot-car.png
│ │ ├── trigo.odp
│ │ ├── udemy-ROS-essentials-diploma-Jan22.jpg
│ │ ├── udemy-ROS-essentials-diploma-Jan22.pdf
│ │ ├── udemy-ROS-navigation-diploma-Aug22.jpg
│ │ └── udemy-ROS-navigation-diploma-Aug22.pdf
│ └── videos
│ │ ├── Kinect_in_ROS.mp4
│ │ ├── LaserScan_from_Kinect.mp4
│ │ └── SLAM_fun.mp4
├── bagfiles
│ ├── .gitignore
│ └── BUG0_layout.xml
├── ball-detection
│ ├── .gitignore
│ ├── ball_detection.py
│ ├── oranges
│ │ ├── index.jpeg
│ │ ├── orange2.jpeg
│ │ ├── orange3.jpeg
│ │ ├── orange5.jpeg
│ │ ├── oranges.md
│ │ ├── oranges4.jpeg
│ │ ├── oranges6.jpeg
│ │ ├── oranges7.jpeg
│ │ ├── oranges8.jpeg
│ │ └── oranges9.jpeg
│ ├── tennis_ball_listener.py
│ ├── tennis_ball_publisher.py
│ └── tennis_ball_usb_cam_tracker.py
└── course-materials
│ ├── 001-course-overview.pdf
│ ├── 003-ros-overview-slides.key.pdf
│ ├── 018-install-ros.pdf
│ ├── 034-ROScheatsheet.pdf
│ ├── 034-anis-koubaa-ros-course.pdf
│ ├── 034-ros-file-system-notes.pdf
│ ├── 083-ros-motion.pdf
│ ├── 111-Turtlebot3.pdf
│ ├── 114-opencv-ros.pdf
│ ├── 131-laser-scanners.pdf
│ ├── 137-2018-08-13-16-42-15.bag
│ ├── 140-Arduino.tar.gz
│ ├── 145-Arduino.tar.gz
│ ├── Course_02-02-L020-2D-transformations.pdf
│ ├── Course_02-03_L028_3D-transformations.pdf
│ ├── Course_02-04_L036_tf.pdf
│ ├── Course_02-05_L052_map-based-navigation.pdf
│ ├── Course_02-06-L063-navigation-stack-tuning.pdf
│ ├── Course_02-L063-navguide.pdf
│ ├── ETH_Zurich_Exercises_1.pdf
│ ├── ETH_Zurich_Exercises_2.pdf
│ ├── ETH_Zurich_Exercises_3.pdf
│ ├── ETH_Zurich_Slides_1.pdf
│ ├── ETH_Zurich_Slides_2.pdf
│ └── ETH_Zurich_Slides_3.pdf
├── poetry.lock
├── pyproject.toml
└── src
├── .gitignore
├── mhered
├── launch
│ ├── bug.launch
│ ├── clean_py.launch
│ ├── cleaner.launch
│ ├── cleaner_param.launch
│ ├── follower.launch
│ ├── my_turtle.launch
│ ├── robot.launch
│ └── static_transform_publisher.launch
├── msg
│ └── IoTSensor.msg
├── src
│ ├── add2ints_client.py
│ ├── add2ints_server.py
│ ├── bug_0.py
│ ├── bug_1.py
│ ├── bug_1_pid.py
│ ├── bug_2.py
│ ├── bug_student1.py
│ ├── bug_student2.py
│ ├── bug_student3.py
│ ├── clean.py
│ ├── cleaner_robot.py
│ ├── frame_a_to_frame_b_broadcaster.py
│ ├── frame_a_to_frame_b_listener.py
│ ├── images
│ │ ├── copies
│ │ │ ├── copy-flower.jpg
│ │ │ ├── copy-shapes.png
│ │ │ ├── copy-tomato.jpg
│ │ │ ├── copy-tree.jpg
│ │ │ └── tomato-copy.jpg
│ │ ├── flower.jpg
│ │ ├── shapes.png
│ │ ├── tomato.jpg
│ │ └── tree.jpg
│ ├── iot_publisher.py
│ ├── iot_subscriber.py
│ ├── readvideo.py
│ ├── robot_bouncy.py
│ ├── robot_instructor.py
│ ├── robot_lib.py
│ ├── robot_marauder.py
│ ├── robot_student1.py
│ ├── robot_student2.py
│ ├── sandbox.py
│ ├── scan_subscriber.py
│ ├── testcv2.py
│ ├── tf_broadcaster.py
│ ├── tf_listener.py
│ ├── turtlebot2.py
│ └── utils.py
└── srv
│ └── AddTwoInts.srv
├── my_husky.launch
├── ros_best_practices
├── ros_course_part2
├── README.md
└── src
│ ├── topic01_quaternion
│ ├── tf_orientation_tb3_robot.py
│ └── tf_rotation_conversions.py
│ ├── topic02_tf_tutorials
│ ├── launch
│ │ ├── rosrun tf static_transform_publisher command example
│ │ └── static_transform_publisher.launch
│ ├── transform_publisher
│ │ ├── frame_a_to_frame_b_broadcaster.cpp
│ │ ├── frame_a_to_frame_b_broadcaster.py
│ │ └── frame_a_to_frame_b_listener.py
│ ├── turtlebot
│ │ └── turtlebot_move.py
│ └── turtlesim
│ │ ├── broadcast_transform.py
│ │ ├── launch
│ │ ├── start_demo.launch
│ │ └── tf_turtlesim_broadcast.launch
│ │ ├── turtle_tf_broadcaster.py
│ │ └── turtle_tf_listener.py
│ └── topic03_map_navigation
│ ├── navigate_goal.cpp
│ ├── navigate_goal.py
│ └── tb3map
│ ├── tb3_house_map.pgm
│ └── tb3_house_map.yaml
├── ros_service_assignment
├── src
│ ├── area_rect_client.py
│ └── area_rect_server.py
└── srv
│ └── RectangleAreaService.srv
└── teleop_twist_keyboard
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | build/
3 | devel/
4 | logs/
5 | .catkin_workspace
6 |
7 | .catkin_tools
8 |
9 | __pycache__/
10 |
11 |
12 | **/CMakeLists.txt
13 | **/package.xml
14 |
15 | # this is for Mini Pupper
16 | /src/champ/
17 | /src/champ_teleop/
18 | /src/robots/
19 | /src/yocs_velocity_smoother/
20 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | # See https://pre-commit.com for more information
2 | # See https://pre-commit.com/hooks.html for more hooks
3 | repos:
4 | - repo: https://github.com/pre-commit/pre-commit-hooks
5 | rev: v4.0.1
6 | hooks:
7 | - id: check-toml
8 | - id: check-yaml
9 | - id: end-of-file-fixer
10 | - id: mixed-line-ending
11 |
12 | - repo: https://github.com/psf/black
13 | rev: 22.3.0
14 | hooks:
15 | - id: black
16 |
17 | - repo: https://github.com/PyCQA/isort
18 | rev: 5.10.1
19 | hooks:
20 | - id: isort
21 | args: ["--profile", "black"]
22 |
23 | - repo: https://github.com/PyCQA/flake8
24 | rev: 4.0.1
25 | hooks:
26 | - id: flake8
27 | additional_dependencies: [mccabe]
28 | args: ["--max-line-length", "88", "--max-complexity", "10"]
29 |
30 | - repo: https://github.com/PyCQA/pylint/
31 | rev: v2.14.5
32 | hooks:
33 | - id: pylint
34 | exclude: tests/ # Prevent files in tests/ to be passed in to pylint.
35 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Notes on ROS
2 |
3 | This repo contains my notes, assignments and learning material to get started with ROS.
4 |
5 | [ROS 1 cheat sheet](./ROS-notes/0.ROS1-Cheat-Sheet.md)
6 |
7 | 06/09/22: Added pre-commit hooks to improve code quality. Reformatting is currently WIP. Until the needed reformatting is complete i~~t is possible to commit skipping linting checks with: `$ SKIP=flake8,pylint git commit ...`~~(08/09/22) some checks of variable names and use of globals are temporarily disabled with `.pylintrc`:
8 |
9 | * `C0103`, uppercase naming style for constants
10 | * `W0602`, global-variable-not-assigned
11 | * `W0603`, global-statement
12 | * `W0604`, global-at-module-level
13 |
14 | ## ROS for Beginners I: Basics, Motion, and OpenCV
15 |
16 | [ROS for Beginners: Basics, Motion, and OpenCV](https://www.udemy.com/course/ros-essentials/)
17 |
18 |
19 |
20 | Udemy ROS for Beginners course by [Anis Koubaa](https://www.udemy.com/user/anis-koubaa)
21 |
22 | (Purchased on Udemy on 15.07.21 with username spam)
23 |
24 | [Part 1](./ROS-notes/1.ROS-essentials-Nov21-Lessons-1-77.md): ROS basics (Lessons #001-#081)
25 |
26 | * [Assignment 1](./ROS-notes/ROS_Assignment_1.md)
27 | * [Assignment 2](./ROS-notes/ROS_Assignment_2.md)
28 | * Assignment 3
29 | * [Assignment 4](./ROS-notes/ROS_Assignment_4.md)
30 |
31 | [Part 2](./ROS-notes/2.ROS-essentials-Motion-Nov21-L82-112.md): Motion with ROS (Lessons #082-#113)
32 |
33 | * [Assignment 5](./ROS-notes/ROS_Assignment_5.md)
34 |
35 | [Part 3](./ROS-notes/3.ROS-essentials-Perception-Dec21-L114.138.md): Perception: Computer vision with OpenCV and laser scanners (L#114-#139)
36 |
37 | * [Assignment 6](./ROS-notes/ROS_Assignment_6.md)
38 | * [Assignment 7](./ROS-notes/ROS_Assignment_7.md): **bouncy** and **marauder** obstacle avoidance with Turtlebot3
39 |
40 | [Part 4](./ROS-notes/4.ROS-essentials-rosserial-Dec21.md): Arduino bots and sensors with ROS (Lessons #140-#150)
41 |
42 | ### TO DO on ROS for Beginners I as of 23.08.22
43 |
44 | - [ ] Lessons #27 and #28 on virtual machines
45 | - [ ] Lesson #108 on ROS network config
46 | - [ ] Order code in `Arduino` folder
47 | - [ ] order code in `catkin_ws/src` folder
48 |
49 | ## ROS for Beginners II: Localization, Navigation and SLAM
50 | [ROS for Beginners II: Localization, Navigation and SLAM](https://www.udemy.com/course/ros-navigation/)
51 |
52 |
53 |
54 | Udemy ROS for Beginners course by [Anis Koubaa](https://www.udemy.com/user/anis-koubaa)
55 |
56 | (Purchased on Udemy on 15.07.21 with username spam)
57 |
58 | [Part 5](./ROS-notes/5.ROS-navigation-Intro-Jan22-L001-035.md): Introduction (Lessons #001 to #035 and **Assignment 1** Quiz)
59 |
60 | [Part 6](./ROS-notes/6.ROS-navigation-TF-Jan22-L036-051.md): TF (Lessons #036 to #051 + MiniPupper example)
61 |
62 | [Part 7](./ROS-notes/7.ROS-navigation-maps-Apr22-L052-062.md): maps (Lessons #052 to #062)
63 |
64 | [Part 8](./ROS-notes/8.ROS-navigation-tuning-Jul22-L063-070.md): navigation parameter tuning (Lessons #063 to #070)
65 |
66 | [Part 9](./ROS-notes/9.ROS-navigation-reactive-Aug22-L071-076.md): reactive navigation (Lessons #071 to #076 and **Assignment 2: BUG 0**)
67 |
68 | ## ETH Zurich Programming for Robotics (ROS) Course
69 | [2021 Programming for Robotics (ROS) Course homepage](https://rsl.ethz.ch/education-students/lectures/ros.html): ETH Zurich Course 4 lessons + case studies. Videos + course material (lessons, exercises etc) available to download. I originally discovered this course through this Youtube playlist from 2017: [2017 Programming for Robotics (ROS) Course Youtube playlist](https://www.youtube.com/playlist?list=PLE-BQwvVGf8HOvwXPgtDfWoxd4Cc6ghiP).
70 |
71 | [Lesson #1](./ROS-notes/ROS_ETH_Zurich_L1.md)
72 |
73 | [Lesson #2](./ROS-notes/ROS_ETH_Zurich_L2.md)
74 |
75 | [Lesson #3](./ROS-notes/ROS_ETH_Zurich_L3.md)
76 |
77 | ## Articulated Robotics
78 |
79 | [Articulated Robotics](https://articulatedrobotics.xyz/) is a phenomenal series of blog and video tutorials by Josh Newans including a step by step guide to building your own ROS2-based mobile robot with camera and LIDAR. My notes on Articulated Robotics have moved to the [manolobot](https://github.com/mhered/manolobot) repository where I keep my own project inspired in these tutorials.
80 |
81 | ## Other ROS resources to check:
82 |
83 | * [ROS robotics learning website](https://www.rosroboticslearning.com/) , [youtube video of DIY Jetson nano+ RPLidar robot](https://www.youtube.com/watch?v=Uz_i_sjVhIM) and [github repo](https://github.com/bandasaikrishna/Autonomous_Mobile_Robot)
84 | * [JdeRobot robotics education website](https://jderobot.github.io/projects/robotics_education/) and [Robotics-Academy exercises](https://jderobot.github.io/RoboticsAcademy/exercises/)
85 | * [Exploring ROS using a 2-wheeled robot](https://www.youtube.com/playlist?list=PLK0b4e05LnzY2I4sXWTOA4_82cMh6tL-5) series of 13 videos 10-30' long each by The Construct covering URDF modelling, laser scans, obstacle avoidance, motion planning, wall follower, Bug0, 1 & 2, gmapping.
86 |
--------------------------------------------------------------------------------
/ROS-notes/.gitignore:
--------------------------------------------------------------------------------
1 | # bagfiles/
2 | .catkin_workspace
3 |
--------------------------------------------------------------------------------
/ROS-notes/0.ROS1-Cheat-Sheet.md:
--------------------------------------------------------------------------------
1 | # ROS 1 Cheat sheet
2 |
3 | ## Getting info about message in a topic
4 |
5 | With the topic running use rostopic pub and TAB to autocomplete the message structure:
6 |
7 | ```bash
8 | $ rostopic pub + DOUBLE TAB
9 | ```
10 |
--------------------------------------------------------------------------------
/ROS-notes/4.ROS-essentials-rosserial-Dec21.md:
--------------------------------------------------------------------------------
1 | Date: 27.12
2 |
3 | ### (#145-150) Ultrasound sensor with `rosserial`
4 |
5 | [HC-SR04 specsheet](https://cdn.sparkfun.com/datasheets/Sensors/Proximity/HCSR04.pdf):
6 |
7 | Working Voltage DC 5 V
8 | Working Current 15mA
9 | Working Frequency 40Hz
10 | Range 2cm - 4m
11 | MeasuringAngle 15 degree
12 | Trigger Input Signal 10uS TTL pulse
13 | Echo Output Signal Input TTL lever signal and the range in
14 | proportion
15 | Dimension 45x20x15mm
16 |
17 | Formula:
18 |
19 | uS / 58 = centimeters, or
20 | range = high level time * velocity (340M/S) / 2
21 |
22 | use over 60ms measurement cycle to prevent trigger signal to the echo signal
23 |
24 | [sensor_msgs/Range ROS message](http://docs.ros.org/en/noetic/api/sensor_msgs/html/msg/Range.html) definition:
25 |
26 | ```
27 | # Single range reading from an active ranger that emits energy and reports
28 | # one range reading that is valid along an arc at the distance measured.
29 | # This message is not appropriate for laser scanners. See the LaserScan
30 | # message if you are working with a laser scanner.
31 |
32 | # This message also can represent a fixed-distance (binary) ranger. This
33 | # sensor will have min_range===max_range===distance of detection.
34 | # These sensors follow REP 117 and will output -Inf if the object is detected
35 | # and +Inf if the object is outside of the detection range.
36 |
37 | Header header # timestamp in the header is the time the ranger
38 | # returned the distance reading
39 |
40 | # Radiation type enums
41 | # If you want a value added to this list, send an email to the ros-users list
42 | uint8 ULTRASOUND=0
43 | uint8 INFRARED=1
44 |
45 | uint8 radiation_type # the type of radiation used by the sensor
46 | # (sound, IR, etc) [enum]
47 |
48 | float32 field_of_view # the size of the arc that the distance reading is
49 | # valid for [rad]
50 | # the object causing the range reading may have
51 | # been anywhere within -field_of_view/2 and
52 | # field_of_view/2 at the measured range.
53 | # 0 angle corresponds to the x-axis of the sensor.
54 |
55 | float32 min_range # minimum range value [m]
56 | float32 max_range # maximum range value [m]
57 | # Fixed distance rangers require min_range==max_range
58 |
59 | float32 range # range data [m]
60 | # (Note: values < range_min or > range_max
61 | # should be discarded)
62 | # Fixed distance rangers only output -Inf or +Inf.
63 | # -Inf represents a detection within fixed distance.
64 | # (Detection too close to the sensor to quantify)
65 | # +Inf represents no detection within the fixed distance.
66 | # (Object out of range)
67 | ```
68 | Note `header` is of type [std_msgs/Header](http://docs.ros.org/en/noetic/api/std_msgs/html/msg/Header.html)
69 |
70 |
71 | ```
72 | header.seq #uint32 sequential number
73 | header.stamp.secs # timestamp part1: int with # of secs since epoc start
74 | header.stamp.nsecs # timestamp part2: int with # of millisecs (to be added to secs)
75 | # timestamp can be obtained with function .now()
76 | header.frame_id # string with frame ID
77 |
78 | radiation_type = 0 # ULTRASOUND
79 | field_of_view = 0.2618 # 15 degrees from datasheet. He claims it is irrelevant.
80 | min_range = 0.02 # 2cm
81 | max_range = 4 # 4m (he uses 1.5m?)
82 | range = # float with Range data
83 | ```
84 |
85 | Arduino code:
86 |
87 | ```c++
88 | #include
89 | #include
90 | #include
91 |
92 | // create the Arduino node
93 | ros::NodeHandle nh;
94 |
95 | // create a ROS message of type sensor_msg/Range
96 | sensor_msgs::Range range_msg;
97 |
98 | // init publisher: Receives a topic name and reference to the sensor message
99 | ros::Publisher pub_range("/ultrasound_range", &range_msg);
100 |
101 | // needed for header
102 | char frame_id[] = "/ultrasound";
103 |
104 | // input and output pins, these constants won't change
105 | const int trigPin = 9;
106 | const int echoPin = 8;
107 |
108 | // needed for range calculation
109 | float flight_time, distance_cm;
110 |
111 | void setup() {
112 | // Runs once
113 |
114 | // initialize node
115 | nh.initNode();
116 | // advertise the publisher to master
117 | nh.advertise(pub_range);
118 |
119 | // define static parameters of ROS message:
120 | range_msg.radiation_type = sensor_msgs::Range::ULTRASOUND;
121 | range_msg.header.frame_id = frame_id;
122 | range_msg.field_of_view = 0.1; // fake
123 | range_msg.min_range = 0.02; // 2 cm
124 | range_msg.max_range = 0.150; // 1.5 m
125 |
126 | Serial.begin(9600);
127 | pinMode(trigPin, OUTPUT);
128 | pinMode(echoPin, INPUT);
129 |
130 | }
131 |
132 | void loop() {
133 | // Main code, runs repeatedly
134 |
135 | // update range measurement
136 | digitalWrite(trigPin,HIGH);
137 | delayMicroseconds(10);
138 | digitalWrite(trigPin,LOW);
139 |
140 | flight_time = pulseIn(echoPin, HIGH);
141 | distance_cm = 0.017 * flight_time;
142 | range_msg.range = distance_cm;
143 |
144 | // update time stamp
145 | range_msg.header.stamp = nh.now();
146 |
147 | // publish ROS message
148 | pub_range.publish(&range_msg);
149 | nh.spinOnce();
150 |
151 | delay(500);
152 |
153 | }
154 | ```
155 |
156 |
157 |
158 | Run the server to start listening to the serial port where Arduino is connected:
159 |
160 | ```bash
161 | $ roscore
162 | $ rosrun rosserial_python serial_node.py /dev/ttyUSB0 _baud:=9600
163 | ```
164 |
165 | **NOTE: explicitly set baud rate to avoid following error:**
166 |
167 | ```
168 | [ERROR] [1640657586.544482]: Unable to sync with device; possible link problem or link software version mismatch such as hydro rosserial_python with groovy Arduino
169 | ```
170 |
171 | Check topic is available:
172 |
173 | ```
174 | $ rostopic list
175 | $ rostopic info /ultrasound_range
176 | $ rostopic echo /ultrasound_range
177 | ```
178 |
--------------------------------------------------------------------------------
/ROS-notes/5.ROS-navigation-Intro-Jan22-L001-035.md:
--------------------------------------------------------------------------------
1 | ---
2 | typora-copy-images-to: assets/images
3 | ---
4 |
5 | ### (#003) Installation
6 |
7 | Check that the navigation stack and SLAM packages are installed or install them manually with
8 |
9 | ```bash
10 | $ sudo apt-get install ros-noetic-navigation
11 | $ sudo apt-get install ros-noetic-slam-gmapping
12 | ```
13 |
14 | ### (#004-006) Installation and overview of Turtlebot3
15 |
16 | Same as lessons #111-113 of ROS1 course and [Turtlebot.pdf](./course-materials/111-Turtlebot3.pdf ), notes [here](./2.ROS-essentials-Motion-Nov21-L82-112.md).
17 |
18 | #### Example 1 - manually move a robot w/ keyboard in a maze world visualizing in RVIZ (lidar + camera):
19 |
20 | Terminal 1: `roslaunch turtlebot3_gazebo turtlebot3_world.launch` (or simply: `$ tb3maze` using the `~/.bashrc` shortcut)
21 |
22 | Terminal 2:`$ roslaunch turtlebot3_gazebo turtlebot3_gazebo_rviz.launch`
23 |
24 | Terminal 3: `roslaunch turtlebot3_teleop turtlebot3_teleop_key.launch` (or simply `$ tb3teleop`).
25 |
26 | Increase size of scan points from 0.01 to 0.04
27 |
28 | ```bash
29 | $ rosnode list
30 | /gazebo
31 | /gazebo_gui
32 | /robot_state_publisher
33 | /rosout
34 | /rviz
35 | /turtlebot3_teleop_keyboard
36 | ```
37 |
38 | #### Example 2: SLAM for MAP building:
39 |
40 | Terminal 1:`$ roslaunch turtlebot3_slam turtlebot3_slam.launch slam_methods:=gmappping`
41 |
42 | Terminal 2: `roslaunch turtlebot3_teleop turtlebot3_teleop_key.launch` (or simply `$ tb3teleop`).
43 |
44 | ### (#008) Install course github repo
45 |
46 | ```bash
47 | $ cd ~/catkin_ws/src/
48 | $ git clone https://github.com/aniskoubaa/ros_course_part2.git
49 | cd ~/catkin_ws && catkin_make
50 | ```
51 |
52 | ### (#009-014) Launch navigation demo
53 |
54 | 1. Note on path setting for map files: In the file `tb3_house_map.yaml` the first line must point to the full path of the map file `tb3_house_map.pgm`, e.g.
55 |
56 | ````yaml
57 | image: /home/mhered/catkin_ws/src/ros_course_part2/src/topic03_map_navigation/tb3map/tb3_house_map.pgm
58 | ````
59 |
60 | 2) Launch gazebo
61 |
62 | ```bash
63 | $ roslaunch turtlebot3_gazebo turtlebot3_house.launch
64 | ```
65 |
66 | Or using the alias: `$ tb3house`
67 |
68 | 3) Select waffle robot if not selected
69 |
70 | ```bash
71 | $ export TURTLEBOT3_MODEL=waffle
72 | ```
73 |
74 | With the alias simply `$ waffle`
75 |
76 | 4) Run the navigation stack with the map file for **RVIZ** pointing to the full path of the `tb3_house_map.yaml` file:
77 |
78 | ```
79 | $ roslaunch turtlebot3_navigation turtlebot3_navigation.launch map_file:=/home/mhered/catkin_ws/src/ros_course_part2/src/topic03_map_navigation/tb3map/tb3_house_map.yaml
80 | ```
81 |
82 | ### (#15) Initial robot location
83 |
84 | Comparing Gazebo (left image, the "reality" in this case) with RVIZ (right image, the robot perception) there is a mismatch. This is because the robot needs an estimate of its initial pose, otherwise does not know how to match the map with the environment. The robot believes that its location is on the green arrow, when it is on the red arrow. The top yellow arrow shows the shift needed to correct this error. Note that the laserscan information (the green dots represent where the laserscan hits the walls) also appear shifted from the map also by the same amount (bottom yellow arrow) .
85 |
86 | 
87 |
88 | Tell the robot its current location and orientation clicking on **2D Pose Estimate**. After repositioning the map, check that both the robot location and the laserscan match with the map, see below:
89 |
90 | 
91 |
92 | 5. See [RVIZ documentation](http://wiki.ros.org/rviz/UserGuide#The_different_camera_types) to understand how to manipulate the different camera views (use left, middle and right mouse clicks and scroll). I found orthogonal view frustrating at the beginning because it is always looking down. Note as well that the mouse-clicks are slightly different in Gazebo.
93 |
94 |
95 |
96 | MP4 video (with sound ) available in [assets/videos/](./assets/videos)SLAM_fun.mp4
97 |
98 | ### (#016-017) Frames
99 |
100 | To visualize in **RVIZ** the different reference frames click **Add** -> **TF**:
101 |
102 | * `map` Fixed global frame related to the map of the environment, origin of the grid: (0, 0) is at the centre.
103 |
104 | * `odom` odometry reference frame. Note: `odom`frame is shifted vs `map` frame!
105 |
106 | * Several frames attached to the robot.
107 |
108 | Topics:
109 |
110 | `/amcl_pose` global pose of the robot in `map` ref frame
111 |
112 | `/odom`: pose and twist of the robot based in odometry information. Expressed in `odom` frame, related to the odometry of the robot. It represents a relative location rather than a global location. Topic contains a timestamp, `frame_id` is reference frame , pose, twist.
113 |
114 |
115 |
116 | ```
117 | $ rostopic echo /odom
118 | ...
119 | ---
120 | header:
121 | seq: 169794
122 | stamp:
123 | secs: 5659
124 | nsecs: 834000000
125 | frame_id: "odom"
126 | child_frame_id: "base_footprint"
127 | pose:
128 | pose:
129 | position:
130 | x: 3.05850913877631
131 | y: 1.8567955432643994
132 | z: -0.0010082927066695398
133 | orientation:
134 | x: -0.0006763989802751724
135 | y: 0.00144164541567888
136 | z: 0.4239865358476224
137 | w: 0.9056670920171759
138 | covariance: [1e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1e-05, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1000000000000.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1000000000000.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1000000000000.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.001]
139 | twist:
140 | twist:
141 | linear:
142 | x: 3.3582185509467775e-07
143 | y: -9.832493355169816e-07
144 | z: 0.0
145 | angular:
146 | x: 0.0
147 | y: 0.0
148 | z: -9.519550506846002e-06
149 | covariance: [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
150 | ---
151 | ...
152 | ```
153 |
154 | ---
155 |
156 | ### (#018) Quaternions
157 |
158 | Default ROS orientation representation is quaternion (x,y,z,w), not as easy to interpret as roll, pitch, yaw
159 |
160 | ### (#019) Navigation
161 |
162 | 1. Click **2D Nav Goal** to define target pose on map.
163 | 2. Static global planner: first ROS plans a **static path** avoiding static obstacles
164 | 3. Dynamic local planner: then ROS executes the path wit the local path planner which avoids also dynamic obstacles
165 |
166 | Configuration of navigation parameters is key to a good performance.
167 |
168 | Recap of map-based robot navigation:
169 |
170 | * First, localize the robot in its correct initial location using **2D Pose Estimate**.
171 |
172 | * Send a goal location to the robot using **2D Nav Goal** and the robot will head towards the destination location. The navigation stack of ROS is responsible for the planning and the execution of the path.
173 |
174 | ### (#021-027) 2D Pose and Transformations
175 |
176 | 2D Pose = Position x,y + orientation theta with reference to a frame of coordinates F
177 |
178 | The robot can locate itself (e.g. with GPS or odometry) and locate other objects with respect to itself through sensors (e.g LIDAR or a camera). Then what is the location of the person in the world frame?
179 |
180 | 
181 |
182 | * Location and orientation of car frame (x, y, phi)^w_c in the world frame F^world
183 | * Location of person (x, y) in the car frame F^car
184 |
185 | For pure translation (x_t, y_t), the 2D Frame transformation is:
186 |
187 | 
188 |
189 | For pure rotation (phi_t), the 2D Frame transformation is:
190 |
191 | 
192 |
193 | The general transformation matrix, using homogeneous coordinates (x,y,1):
194 |
195 | 
196 |
197 | ### (#028-035) 3D Pose and Transformations
198 |
199 | Right-hand rule to assign sign to Z-axis.
200 |
201 | 
202 |
203 | * Rotation around the X-axis **Rx**: roll or bank
204 |
205 | * Rotation around the Y-axis **Ry**: pitch or attitude
206 |
207 | * Rotation around the Z-axis **Rz**: yaw or heading
208 |
209 | The 3D transformation matrix can be obtained from a 3D Rotation matrix **R** and a 3D Translation Vector **t**:
210 |
211 | 
212 |
213 | 
214 |
215 | * The 3D Rotation matrix **R** can be obtained as the product of the three unitary rotations:
216 |
217 | 
218 |
219 |
220 |
221 | 3 methods to represent a 3D rotation.
222 |
223 | * **Euler** angles or Three-angle representation. Easy to interpret. Two basic types:
224 |
225 | * Euler rotations: may involve repetition (but not successive) of rotations about one axis (XYX, XZX, YXY, YZY, ZXZ, ZYZ). Consequence of Euler theorem.
226 | * Cardan rotations: particular case where the 3 axes are involved (XYZ, XZY, YXZ, YZX, ZXY, ZYX)
227 |
228 | * Rotation about **arbitrary vector**: for any 3D rotation there exists an axis around which this the rotation occurs. Rodrigues Formula allows to determine the 3D rotation matrix from the 3D vector **u** describing the axis and the angle rotated theta
229 |
230 | * **Quaternions** describe 3D rotation with a vector (x,y,z) and a scalar w. Notation in ROS is (x, y, z, w)
231 |
232 | q = x · **i** + y · **j** + z · **k** + w , where **i**, **j**, **k** are unit vectors
233 |
234 | It is possible to obtain any one representation from any other (Euler angles, arbitrary vector, quaternion, and 3D rotation matrix). Quaternions are often preferred in many applications because they are simpler, more compact, numerically more stable, more efficient and avoid gimbal lock. Drawback: not intuitive.
235 |
--------------------------------------------------------------------------------
/ROS-notes/8.ROS-navigation-tuning-Jul22-L063-070.md:
--------------------------------------------------------------------------------
1 | ## Section 8 (#63-70): Systematic tuning of navigation stack parameters to optimize performance
2 |
3 | Recommended source: [ROS navigation tuning guide by Kaiyu Zheng](./course-materials/063-navguide.pdf), original here: https://kaiyuzheng.me/documents/navguide.pdf
4 |
5 | Local path planner produces twist messages.
6 |
7 | Need to define min and max velocities in the yaml config file, e.g.
8 |
9 | ```bash
10 | $ roscd turtlebot3_navigation
11 | $ cd param
12 | $ nano dwa_local_planner_params_waffle.yaml
13 | ```
14 |
15 | max velocities:
16 |
17 | - translation: move forward then echo odom
18 | - rotation: mave robot tunr in place then echo odom
19 |
20 | max accels:
21 |
22 | * measure time to reach max speed using time stamps then divide to calculate accel
23 | * measure time to reach max rotational speed using time stamps then divide to calculate rotation accel
24 |
25 | min velocities: negative so the robot can backtrack to become unstuck
26 |
27 | y-velocity: 0 for non-holonomic robots
28 |
29 | Global path planners in ROS must implement the interface `nav core::BaseGlobalPlanner `
30 |
31 | * `initialize` method
32 |
33 | * 2 `makePlan` methods
34 |
35 | * destructor
36 |
37 | Tutorial to write a path planner: http://wiki.ros.org/navigation/Tutorials/Writing%20A%20Global%20Path%20Planner%20As%20Plugin%20in%20ROS
38 |
39 | 3 built-in global planners in ROS:
40 |
41 | * `carrot_planner` simplest but inappropriate for complex environments. Tries to reach goal even if it is unreachable.
42 | * `navfn` implements Dijkstra or A* algorithms
43 | * `global_planner` replaces `navfn` but has more options
44 |
45 | Local path planners in ROS must implement the interface `nav core::BaseLocalPlanner `
46 |
47 | * `initialize` method executed once
48 |
49 | * `isGoalReached` method
50 |
51 | * `computeVelocityCommands` method
52 |
53 | * `setPlan` method
54 |
55 | * destructor
56 |
57 | ### DWA (Dynamic Window Approach) local path planner
58 |
59 | `dwa_local_planner`package.
60 |
61 | DWA algorithm:
62 |
63 | 1. generate a random set of sample twists (pairs of linear and angular velocities)
64 | 2. for each sample twist simulate the resulting short-term trajectory (circle arcs)
65 | 3. evaluate each trajectory using an objective function that considers e.g. speed and proximity to obstacles, the goal, and the global path
66 | 4. discard illegal trajectories (e.g. colliding with obstacles)
67 | 5. select the twist resulting in the highest scoring trajectory and send it to the robot
68 | 6. repeat until robot reaches goal or gets stuck
69 |
70 | Requires cost maps: *global costmap* contains only static obstacles, the *local costmap* contains also the dynamic ones detected by the laserscan.
71 |
72 | ### Tuning DWA parameters:
73 |
74 | `sim_time` need a balance: longer simulation times may improve performance but require heavier computation and use more battery. Typical 3<`sim_time`<5.
75 |
76 | objective function to minimize:
77 |
78 | ````
79 | cost = p_dist * distance from endpoint to global path + g_dist * distance from endpoint to goal + occdist * max cost along trajectory
80 | ````
81 |
82 | `path_distance_bias` weight that controls how close to global path should local planner stay
83 |
84 | `goal_distance_bias` weight that controls how much to prioritize distance to goal regardless of the ppath
85 |
86 | `occdist_scale` how much should robot avoid obstacles. Too high makes robot indecisive and stuck
87 |
88 | ### Demo
89 |
90 | 1. #### Launch the gazebo simulation environment for Turtlebot3 waffle in a house scenario:
91 |
92 | ```bash
93 | (Terminal 1) $ roslaunch turtlebot3_gazebo turtlebot3_house.launch
94 | ```
95 |
96 | 2. #### Launch the Turtlebot3 navigation stack with the map created:
97 |
98 | ```bash
99 | (Terminal 2) $ roslaunch turtlebot3_navigation turtlebot3_navigation.launch map_file:=/home/mhered/catkin_ws/ROS-notes/assets/sources/mymap2.yaml
100 | ```
101 |
102 | 3. #### Position the robot in the map with 2D Pose Estimate
103 |
104 | 4. #### Play with parameters and send goal locations to observe behaviour
105 |
106 | Parameters can be edited directly in the yaml config file e.g. `dwa_local_planner_params_waffle.yaml` or dynamically with `rqt_reconfigure` that allows to edit live the parameter server:
107 |
108 | ```bash
109 | (Terminal 3) $ rosrun rqt_reconfigure rqt_reconfigure
110 | ```
111 |
112 | #### 
113 |
114 | ###
115 |
--------------------------------------------------------------------------------
/ROS-notes/ROS_Assignment_1.md:
--------------------------------------------------------------------------------
1 | # Assignment 1
2 |
3 | What is the first command you must run in ROS?`roscore`
4 |
5 | What is the command to run the Turtlesim simulator? `rosrun turtlesim turtlesim_node`
6 |
7 | What is the command to find the list of all ROS nodes? `rosnode list`
8 |
9 | What is the command to find the list of all ROS topics? `rostopic list`
10 |
11 | What is the topic that tells about the position of the turtle? `turtle1/pose`
12 |
13 | What is the topic that sends command to the turtle to make it move? `turtle1/cmd_vel`
14 |
15 | What is the command that tells you information about the topic about velocity? `rostopic info turtle1/cmd_vel`
16 |
17 | What is the node used to publish velocity commands to the turtle? `/teleop_turtle`
18 |
19 | What is the node used to subscribe to velocity commands to the turtle? `/turtlesim`
20 |
21 | What is the command that allows to see the type of message for velocity topic? `rostopic info turtlesim/cmd_vel`
22 |
23 | What is the content of the velocity message? Explain its content.
24 |
25 | ````
26 | $ rosmsg show geometry_msgs/Twist
27 | geometry_msgs/Vector3 linear
28 | float64 x
29 | float64 y
30 | float64 z
31 | geometry_msgs/Vector3 angular
32 | float64 x
33 | float64 y
34 | float64 z
35 | ````
36 |
37 | What is the content of the position message? Explain its content
38 |
39 | ```
40 | $ rosmsg show turtlesim/Pose
41 | float32 x
42 | float32 y
43 | float32 theta
44 | float32 linear_velocity
45 | float32 angular_velocity
46 | ```
47 |
--------------------------------------------------------------------------------
/ROS-notes/ROS_Assignment_2.md:
--------------------------------------------------------------------------------
1 | # Assignment 2
2 |
3 | Questions for this assignment
4 |
5 | 1. Find the topic name of the pose (position and orientation) of turtlesim and its message type. Display the content of message of the pose.
6 |
7 | Name: `/turtle1/pose`
8 |
9 | Type: `turtlesim/Pose`
10 |
11 | Content:
12 |
13 | ```
14 | float32 x
15 | float32 y
16 | float32 theta
17 | float32 linear_velocity
18 | float32 angular_velocity
19 | ```
20 |
21 | 2. Find the topic name of the velocity command of turtlesim and its message type. Display the content of message of the velocity command. Remember that velocity command is the topic that makes the robot move.
22 |
23 | Name: `/turtle1/cmd_vel`
24 |
25 | Type: `geometry_msgs/Twist`
26 |
27 | Content:
28 |
29 | ```
30 | geometry_msgs/Vector3 linear
31 | float64 x
32 | float64 y
33 | float64 z
34 | geometry_msgs/Vector3 angular
35 | float64 x
36 | float64 y
37 | float64 z
38 | ```
39 |
40 | Write a simple ROS program called turtlesim_pose.py, which subscribes to the topic of the pose, and then prints the position of the robot in the callback function. We provide you a program with missing code that you need to complete.
41 |
42 | ```python
43 | #!/usr/bin/env python
44 |
45 | import rospy
46 |
47 | # task 1. import the Pose type from the module turtlesim
48 | from turtlesim.msg import Pose
49 |
50 |
51 | def pose_callback(message):
52 | # task 4. display the x, y, and theta received from the message
53 | print("Pose callback")
54 | print(f"x = {message.x:10.2}")
55 | print(f"y = {message.y:10.2}")
56 | print(f"yaw = {message.theta:10.2}")
57 |
58 |
59 | if __name__ == '__main__':
60 | try:
61 | rospy.init_node('turtlesim_motion_pose', anonymous=True)
62 |
63 | # task 2. subscribe to the topic of the pose of the Turtlesim
64 |
65 | rospy.Subscriber('/turtle1/pose', Pose, pose_callback)
66 |
67 | # task 3. spin
68 | rospy.spin()
69 |
70 | except rospy.ROSInterruptException:
71 | rospy.loginfo("Node terminated.")
72 | ```
73 |
74 |
75 |
76 | Note: need to run `chmod +x /home/mhered/catkin_ws/src/ros_essentials_cpp/src/my_assignment/my_turtle_pose.py` to make python file executable!
77 |
78 | complete the previous code to add a publisher to the velocity and make the robot move for a certain distance.
79 |
80 | Complete the missing code in this following program
81 |
82 | ```python
83 | #!/usr/bin/env python
84 |
85 | import rospy
86 |
87 | from geometry_msgs.msg import Twist
88 | from turtlesim.msg import Pose
89 |
90 | import math
91 | import time
92 |
93 | from std_srvs.srv import Empty
94 |
95 | x=0
96 | y=0
97 | z=0
98 | yaw=0
99 |
100 | def pose_callback(message):
101 | global x
102 | global y, z, yaw
103 | x = message.x
104 | y = message.y
105 | yaw = message.theta
106 | # print(f"Pose (x, y, yaw): ({x:10.3}, {y:10.3}, {yaw:10.3})")
107 |
108 | def move_turtle(speed, distance):
109 | # declare a Twist message to send velocity commands
110 | velocity_message = Twist()
111 |
112 | # get current location from the global variable before entering the loop
113 | x0 = x
114 | y0 = y
115 | # z0=z
116 | # yaw0=yaw
117 |
118 | # task 1. assign the x coordinate of linear velocity to the speed.
119 | velocity_message.linear.x = speed
120 | velocity_message.linear.y = 0.0
121 | velocity_message.linear.z = 0.0
122 | velocity_message.angular.x = 0.0
123 | velocity_message.angular.y = 0.0
124 | velocity_message.angular.z = 0.0
125 |
126 |
127 | distance_moved = 0.0
128 | loop_rate = rospy.Rate(10) # we publish the velocity at 10 Hz (10 times per second)
129 |
130 | # task 2. create a publisher for the velocity message on the appropriate topic.
131 |
132 | # what am I missing??
133 |
134 | while True:
135 | rospy.loginfo("Turtlesim moves forward")
136 |
137 | # task 3. publish the velocity message
138 | velocity_publisher.publish(velocity_message)
139 | loop_rate.sleep()
140 |
141 | # rospy.Duration(1.0)
142 | # measure the distance moved
143 | distance_moved = distance_moved + abs(0.5 * math.sqrt(((x-x0) ** 2) + ((y-y0) ** 2)))
144 | print(distance_moved)
145 | if not (distance_moved < distance):
146 | rospy.loginfo("reached")
147 | break
148 |
149 | # task 4. publish a velocity message zero to make the robot stop after the distance is reached**
150 | velocity_message.linear.x = 0.0
151 | velocity_message.linear.y = 0.0
152 | velocity_message.linear.z = 0.0
153 | velocity_message.angular.x = 0.0
154 | velocity_message.angular.y = 0.0
155 | velocity_message.angular.z = 0.0
156 |
157 | velocity_publisher.publish(velocity_message)
158 |
159 | if __name__ == '__main__':
160 | try:
161 | rospy.init_node('my_turtle_pose_node', anonymous=True)
162 |
163 | position_topic = "/turtle1/pose"
164 | pose_subscriber = rospy.Subscriber(position_topic, Pose, pose_callback)
165 |
166 | # task 5. declare velocity publisher
167 | velocity_topic = '/turtle1/cmd_vel'
168 | velocity_publisher = rospy.Publisher(velocity_topic, Twist, queue_size=10)
169 |
170 | time.sleep(1)
171 | print('move: ')
172 |
173 | # call to the function
174 | move_turtle(1.0, 5.0)
175 |
176 | time.sleep(2)
177 | print('start reset: ')
178 | rospy.wait_for_service('reset')
179 | reset_turtle = rospy.ServiceProxy('reset', Empty)
180 | reset_turtle()
181 | print('end reset: ')
182 | rospy.spin()
183 | except rospy.ROSInterruptException:
184 | rospy.loginfo("node terminated.")
185 |
186 | ```
187 |
188 |
189 |
190 | Note: need to run `chmod +x /home/mhered/catkin_ws/src/ros_essentials_cpp/src/my_assignment/my_turtle_pose2.py` to make python file executable!
191 |
--------------------------------------------------------------------------------
/ROS-notes/ROS_Assignment_4.md:
--------------------------------------------------------------------------------
1 | # Assignment 4
2 |
3 | In this exercise, you will develop a new ROS service by applying the concepts that you have learned in this section. The objective is to create a ROS service where a client sends two float numbers, width and height to the server, then the server will respond the area of the rectangle. First, create a new ROS package, and call it `ros_service_assignment`.You first need to define a service file called `RectangleAeraService.srv`.The request part should contains two float values: the width and the height. Use float32 as a type. Refer to this ROS page for more information about available types in ROS. http://wiki.ros.org/msg The response part should contain the area of the rectangle, which is width*height. Write a Client and Server that implements this application using Python or C++.Test your application and make sure that it works.
4 |
5 | Questions for this assignment
6 |
7 | **Note: please use {...} to format the instruction for better readability and make it easier for me to review.**
8 |
9 | 1. What is the command used to create a ROS package called ros_service_assignment?Make sure to clarify the path where to create it.
10 |
11 | from `~/catkin_ws/src/` issue:
12 |
13 | `$ catkin_create_pkg ros_service_assignment std_msgs rospy roscpp`
14 |
15 | 2. What is the name of the folder when to create the service file?Provide the absolute path to the file (from the root).
16 |
17 | `/home/mhered/catkin_ws/src/ros_service_assignment/srv`
18 |
19 | 3. What is the content of the service file RectangleAreaService.srv?
20 |
21 | ```
22 | # Service definition file
23 | # request message
24 | float64 height
25 | float64 width
26 | ---
27 | # response message
28 | float64 area
29 | ```
30 |
31 | 4. What are the changes you need to do in the CMakeLists.txt. Copy/paste the whole CMakeLists.txt.
32 |
33 | add dependencies:
34 |
35 | ```
36 | ```
37 |
38 |
39 |
40 | 5. What are the changes you need to do the package.xml? Copy/paste the whole package.xml.
41 |
42 | add dependencies to `message_generation` and `message_runtime`:
43 |
44 | ```
45 | ```
46 |
47 |
48 |
49 | 6. What is the command to build the new service and generate executable files?
50 |
51 | from `~/catkin_ws`issue:
52 |
53 | $ catkin_make
54 |
55 | 7. How to make sure that service files are now created?
56 |
57 | ```
58 | $ rossrv show ros_service_assignment/RectangleAreaService
59 |
60 | float32 height
61 | float32 width
62 | ---
63 | float32 area
64 | ```
65 |
66 | 8. **Note: please use {...} to format the instruction for better readability and make it easier for me to review.** Write the server application (C++ or Python)
67 |
68 | ```python
69 | #!/usr/bin/env python
70 |
71 | import rospy
72 | import time
73 |
74 | # import service definition
75 | from ros_service_assignment.srv import RectangleAreaService, RectangleAreaServiceRequest, RectangleAreaServiceResponse
76 |
77 | def handle_area_rect(request):
78 | area = request.width * request.height
79 | print(f"Server will return: {request.width} + {request.height} = {area}")
80 | time.sleep(2) # 2 seconds
81 | return RectangleAreaServiceResponse(area)
82 |
83 | def area_rect_server():
84 |
85 | rospy.init_node('area_rect_server')
86 |
87 | # create service listening to incoming requests
88 | # given service name, type and handle function
89 | my_service = rospy.Service('area_rect', RectangleAreaService, handle_area_rect)
90 |
91 | print('Server is ready to compute areas.')
92 | # start service to wait for incoming requests
93 | rospy.spin()
94 |
95 | if __name__ == "__main__":
96 | area_rect_server()
97 | ```
98 |
99 |
100 |
101 | 9. **Note: please use {...} to format the instruction for better readability and make it easier for me to review. **Write the client application (C++ or Python)
102 |
103 | ```python
104 | #!/usr/bin/env python
105 |
106 |
107 | import sys
108 | import rospy
109 |
110 | # import service definition
111 | from ros_service_assignment.srv import RectangleAreaService, RectangleAreaServiceRequest, RectangleAreaServiceResponse
112 |
113 |
114 | def area_rect_client(x, y):
115 |
116 | # keep client waiting until service is alive
117 | rospy.wait_for_service('area_rect')
118 |
119 | try:
120 | # create a client object, the service name must be same defined in the server
121 | my_request = rospy.ServiceProxy('area_rect', RectangleAreaService)
122 |
123 | # what is this?
124 | response = my_request(width, height)
125 | return response.area
126 |
127 | except rospy.ServiceException(e):
128 | print(f"Service call failed: {e}")
129 |
130 | if __name__ == "__main__":
131 | if len(sys.argv) == 3:
132 | width = int(sys.argv[1])
133 | height = int(sys.argv[2])
134 | else:
135 | print("%s [width heigth]"%sys.argv[0])
136 | sys.exit(1)
137 |
138 | print(f"Client will request {width} + {height}...")
139 | result = area_rect_client(width, height)
140 | print(f"Server returned {width} + {height} = {result}")
141 |
142 | ```
143 |
144 |
145 |
146 | 10. **Note: please use {...} to format the instruction for better readability and make it easier for me to review.**What are the commands to test that the application works fine.
147 |
148 | Terminal 1:
149 |
150 | `$ roscore`
151 |
152 | Terminal 2:
153 |
154 | ```bash
155 | $ rosrun ros_service_assignment area_rect_server.py
156 | Server is ready to compute areas.
157 | ```
158 |
159 | Terminal 3:
160 |
161 | ```bash
162 | $ rosrun ros_service_assignment area_rect_client.py 2 3
163 | Client will request 2 + 3...
164 | Server returned 2 + 3 = 6.0
165 | ```
166 |
--------------------------------------------------------------------------------
/ROS-notes/ROS_Assignment_5.md:
--------------------------------------------------------------------------------
1 | # Assignment 5
2 |
3 | Develop a Python script that implements the turtlesim cleaning application discussed in the lecture.
4 |
5 | Include methods `move`, `rotate` etc with their appropriate parameters.
6 |
7 | Present a menu with two options:
8 |
9 | 1- spiral cleaning
10 |
11 | 2- grid cleaning
12 |
13 | Submit a zip folder containing:
14 |
15 | 1. `clean.py`: Python script of the cleaning application
16 | 2. `clean_py.launch`, a launch file that starts the `turtlesim` node and the `clean.py` script both together
17 |
18 | Submission: [ZIP file](./assets/sources/clean.zip)
19 |
20 | ## Potential Improvements
21 |
22 | - [ ] command to abort cleaning and resume
23 | - [ ] rename parameters to specify angles are in degrees
24 |
25 |
26 |
27 | Remember convention:
28 |
29 | clockwise = to the right = positive angle
30 |
31 | anticlockwise = to the left = negative angle
32 |
--------------------------------------------------------------------------------
/ROS-notes/ROS_Assignment_6.md:
--------------------------------------------------------------------------------
1 | # Assignment 6
2 |
3 | #### **Task 1. Using OpenCV (Without ROS)**
4 |
5 | In the first task, you will read a video file using just OpenCV functionalities, (without using ROS). The video file is located on [GitHub repository in this link](https://github.com/aniskoubaa/ros_essentials_cpp/tree/master/src/topic03_perception/video) and is called [`tennis-ball-video.mp4`](https://github.com/aniskoubaa/ros_essentials_cpp/blob/master/src/topic03_perception/video/tennis-ball-video.mp4). So, the algorithm, will consists in the following steps:
6 |
7 | - read the video file using OpenCV
8 | - For each frame read
9 | - apply the ball detection algorithm to detect the ball(s) in the image
10 | - display the result of the ball detection
11 | - continue processing the video until a key is pressed.
12 |
13 | To make your task easier, start from the code in the file [ball_detection.py](https://github.com/aniskoubaa/ros_essentials_cpp/blob/master/src/topic03_perception/ball_detection.py), which you use as a template and starting point. Make necessary modification so that instead of reading a single image, you read a video stream and get the images from it, one by one. If you do not know how to read a video file, refer to the lecture `OpenCV: Video Streams Input (Python)` in OpenCV section or to the code [read_video.py](https://github.com/aniskoubaa/ros_essentials_cpp/blob/master/src/topic03_perception/read_video.py).
14 |
15 | Here is a screenshot of a sample output
16 |
17 | 
18 |
19 | #### **Task 2. Using OpenCV + ROS and Reading from a Video File**
20 |
21 | In task 2, you will do the same thing as Task 1, but we will have two different files for the tracking application.
22 |
23 | 1. The first file is an Image publisher node file, which will read the video stream frame by frame, and publishes them through a specific topic.
24 | 2. The second file is an Image subscriber node file, which will subscribe to the image topic of tennis ball, extracts the image from ROS topic, converts it to an OpenCV image using `cv_bridge`, and applies the ball detection algorithm. Review the lecture about CvBridge: Bridging OpenCV and ROS or see the code [image_pub_sub.py](https://github.com/aniskoubaa/ros_essentials_cpp/blob/master/src/topic03_perception/image_pub_sub.py).
25 |
26 | Here is the description of the two files.
27 |
28 | **Publisher ROS Node**
29 |
30 | - Create a new node called `tennis_ball_publisher.py` (or cpp)
31 | - You need to create a new ROS topic called `tennis_ball_image` of type `sensor_msgs/Image`.
32 | - After you read a frame from the video stream using OpenCV, you need to publish every frame on the topic `tennis_ball_image`
33 | - Loop the video forever and keep publishing the frames until the node is stopped.
34 |
35 | **Subscriber ROS Node**
36 |
37 | - Create a new node called `tennis_ball_listener.py` (or cpp). Use the code the code [image_pub_sub.py](https://github.com/aniskoubaa/ros_essentials_cpp/blob/master/src/topic03_perception/image_pub_sub.py) as a reference.
38 | - You need to subscribe new ROS topic called `tennis_ball_image` of type `sensor_msgs/Image`.
39 | - In the image callback, you need to convert the receive frame into an OpenCV format, then apply the ball_detection algorithm on the received frame. Display every frame after detecting the ball, its contour and surrounding circle.
40 |
41 | #### **Task 3. Using OpenCV + ROS and Reading from a USB Camera**
42 |
43 | In task 3, you will do the same thing as Task 2 but the stream will be read from the USB camera.
44 |
45 | You will create only one file of a ROS node called `tennis_ball_usb_cam_tracker.py`. The node should do the following:
46 |
47 | - Subscribe to the topic of video stream of type `sensor_msgs/Image` coming from the camera. How to find it? To find the topic name, you need first to start the usb_camera in ROS as follow.
48 | Open a terminal and write the command
49 |
50 | ```
51 | > roscore
52 | > rosrun usb_cam usb_cam_node _pixel_format:=yuyv
53 | ```
54 |
55 | in another terminal you can run `rostopic list` to see all the topics coming from the usb_cam. Choose the topic containing a raw image and use it in your subscriber. Define a callback function for this subscriber called `image_callback` where you will process every incoming frame.
56 |
57 | - In the image callback, you need to convert the receive frame into an OpenCV format, then apply the ball_detection algorithm on the received frame. Display every frame after detecting the ball, its contour and surrounding circle.
58 |
59 | - To test your code you need a yellow ball. If you do not have any yellow ball, you can get any image of a tennis ball on your mobile phone and then showing to your USB camera and make it move to see how your application is able to track it.
60 |
61 | Congratulations, you are now able to process any image coming from a ROS node either from a USB camera or any other video streaming sources.
62 |
63 | #### Questions for this assignment
64 |
65 | **Task 1 Code: Using OpenCV (Without ROS)**
66 |
67 | Attach a screenshot of the output of your work
68 |
69 | **Task 2. Publisher Node**
70 |
71 | **Note: please use {...} to format the instruction for better readability and make it easier for me to review.**
72 |
73 | **Task 2. Subscriber Node**
74 |
75 | Attach a screenshot of the output of your work
76 |
77 | **Note: please use {...} to format the instruction for better readability and make it easier for me to review.**
78 |
79 | **Task 3. Ball Tracking with USB Camera Stream**
80 |
81 | Attach a screenshot of the output of your work
82 |
83 | **Note: please use {...} to format the instruction for better readability and make it easier for me to review.**
84 |
--------------------------------------------------------------------------------
/ROS-notes/ROS_Assignment_7.md:
--------------------------------------------------------------------------------
1 | # Assignment 7
2 |
3 | ## The Move-Stop-Rotate Behavior for Obstacle Avoidance
4 |
5 | We will first start with a very simple program to make the robot move around
6 |
7 | 1. First, make sure to install Turtlebot 3 Robot, as shown in the Section "Getting Started with Turtlebot3". It is recommended to use the latest version of ROS, which is ROS Noetic, although it should work on any version.
8 | 2. Start the Turtlebot3 Robot in the maze environment `roslaunch turtlebot3_gazebo turtlebot3_world.launch`
9 | 3. Start RVIZ for Visualization `roslaunch turtlebot3_gazebo turtlebot3_gazebo_rviz.launch`
10 | 4. Observe the laser scanner in red dots in RVIZ. You can check the thickness of the laser scanner by changing the Size (m) attribute in rviz to 0.04 instead of 0.01. This will provide better visualization of the laser scanner topic.
11 | 5. Run the `teleop` node `roslaunch turtlebot3_teleop turtlebot3_teleop_key.launch`. Make the robot move and observe how the laser scanner layout is changing in `rviz`.
12 | 6. Now, develop a ROS program in Python AND in C++ to make the robot move in a straight line, and stop when the closest obstacle gets closer than 0.6 meters. Use a laser beam around the center of [-5,+5] degrees.
13 | 7. Then, develop another function that makes the robot rotate until the straight distance to obstacles in the same beam aperture is greater than 3 meters.
14 | 8. Make a loop to repeat these two functions and observe if the robot succeeds in moving without hitting the obstacle forever or not.
15 | 9. Try the same program in the house environment `roslaunch turtlebot3_gazebo turtlebot3_house.launch`. What are your observations?
16 |
17 | ## A PID Controller
18 |
19 | The motion behavior in the previous solution is not smooth as there are several motions interrupted by a stop.
20 |
21 | The objective of this second assignment is to develop a Proportional controller that regulates the angular speed and the linear speed such as the robot moves smoothly without hitting obstacles.
22 |
23 | - The linear velocity can be adjusted to be proportional to the distance (similar to what we did in Go-to-Goal Behavior)
24 |
25 | - The angular velocity can be adjusted based on the distance to the obstacles on the left side and the distance to obstacles on the right side.
26 |
27 | - If the distance to the left side obstacle is much closer than the right side obstacles, then make the robot rotate smoothly to the right.
28 |
29 | - If the distance to the right side obstacle is much closer than the left side obstacles, then make the robot rotate smoothly to the left.
30 |
31 | - Test the code on Turtlebot3 in the maze environment and house environment.
32 |
33 | - Test it on a real robot if you have one (any kind of real robot).
34 |
35 |
36 |
37 | ## 15/08/22
38 |
39 | * create `bumper.launch` to launch environment: `turtlebot3`, `rviz`, `teleop`, and `scan_subscriber`
40 | * fix `scan_subscriber.py` : sort data to plot `/scan` in -180 180 degree range and fix bug with shallow copy of `clean_ydata`
41 |
42 | ## 16/08/22
43 |
44 | * add `bumper_add.py` to implement the Move-Stop-Rotate Behavior for Obstacle Avoidance
45 | * subscribe to `/odom`to get robot pose
46 | * subscribe to `/scan` get distance to object in beam +/-5 degrees, dealing with NaN - `global fwd_clearance`
47 | * publish `cmd_vel` commands to move the robot `global x, y, yaw`
48 | * Loop:
49 | * behavior 1 - move straight to goal until obstacle closer than 0.6m
50 | * behavior 2 - rotate until direction with clearance >3m found
51 |
52 | ## 17/08/22
53 |
54 | * test and fine-tune `bumper_app.py` in **maze** and **house** environments
55 |
56 | 
57 |
58 | * Add `smooth_bumper_app.py` with first implementation of the PID controller based on go-to-goal behavior
59 |
60 | ## 18/08/22
61 |
62 | * Update and rename `bumper_app.py` to [`robot_bouncy.py`](../src/mhered/src/robot_bouncy.py)
63 | * Update and rename `smooth_bumper_app.py` to [`robot_marauder.py`](../src/mhered/src/robot_marauder.py)
64 | * Eliminate `smooth_bumper.launch`
65 | * Rename `bumper.launch` to [`robot.launch`](../src/mhered/launch/robot.launch) and modify to take parameters:
66 | * `robot_behavior:= , marauder, instructor, student1, student2`
67 | * `world:= , house`
68 | * `open_rviz:= true , `
69 | * `plot_laser:= true , `
70 |
71 | ```bash
72 | $ roslaunch mhered robot.launch robot_behavior:=instructor plot_laser:=true
73 | ```
74 |
75 | * update this .md file
76 |
77 | * test alternative implementations:
78 | * instructor solution: [robot_instructor.py](../src/mhered/src/robot_marauder.py) - ok but got stuck
79 | * PID implementation by student 1 ([Mohammed Zia Ahmed Khan](https://www.udemy.com/user/mohammed-zia-ahmed-khan-2/)) : [robot_student1.py](../src/mhered/src/robot_student1.py) - behaves as a slow bouncy, quite good, no collision
80 | * PID implementation by student 2 ([Wellington Noberto](https://www.udemy.com/user/wellington-noberto/)) : [robot_student2.py](../src/mhered/src/robot_student2.py) - ok but skids and got stuck
81 |
--------------------------------------------------------------------------------
/ROS-notes/ROS_ETH_Zurich_L1.md:
--------------------------------------------------------------------------------
1 | ---
2 | typora-copy-images-to: assets/images
3 | ---
4 |
5 | # Lesson #1
6 |
7 | [Video (Youtube) L1](https://www.youtube.com/watch?v=aL7zLnaEdAg) | [Slides L1](./course-materials/ETH_Zurich_Slides_1.pdf)
8 |
9 | ## What is ROS?
10 |
11 | ROS (Robot Operating System) is the de facto standard for robot programming, is a middleware between your OS and your robot programming, usually made of multiple programs running potentially in different computers.
12 |
13 | 4 elements:
14 |
15 | 1. Plumbing: ROS allows multiple individual programs (aka processes, nodes) running potentially in different computers to communicate and provides device drivers.
16 | 2. Basic tools to reuse: simulation, visualization, GUI, data logging
17 | 3. Off-the-shelf capabilities (control, planning, perception, mapping manipulation)
18 | 4. Ecosystem: software organized in packages, docs, tutorials.
19 |
20 | ## ROS philosophy
21 |
22 | 1. Peer to peer: individual programs communicate p2p over a defined API (ROS topics, services)
23 | 2. Distributed: comms run over wireless so nodes can be distributed in different machines
24 | 3. Multilingual: client libraries for C++, python, matlab, jva etc and you can mix and match
25 | 4. Lightweight: existing standalone libraries in thin ROS wrapper
26 | 5. Free and open source
27 |
28 | ## ROS elements
29 |
30 | ### ROS workspace environment
31 |
32 | Workspace environment defines context for current workspace
33 |
34 | Load default workspace with
35 |
36 | ```bash
37 | $ source /opt/ros/noetic/setup.bash
38 | ```
39 |
40 | Overlay your catkin workspace with
41 |
42 | ```bash
43 | $ cd ~/catkin_ws
44 | $ source devel/setup.bash
45 | ```
46 |
47 | check current workspace with
48 |
49 | ```bash
50 | $ echo $ROS_PACKAGE_PATH
51 | ```
52 |
53 | See setup
54 |
55 | ```bash
56 | $ cat ~/.bashrc
57 | ```
58 |
59 | ### ROS master
60 |
61 | [ROS master](https://wiki.ros.org/Master) is a special node that manages communication between nodes, every node registers at startup. Launched - among other elements - with:
62 |
63 | ```bash
64 | $ roscore
65 | ```
66 |
67 | ### ROS nodes
68 |
69 | [ROS nodes]() are the single-purpose individual programs that make your robot programming. They are managed individually and organized in packages.
70 |
71 | Execute a node:
72 |
73 | ```bash
74 | $ rosrun pkg_name node_name
75 | ```
76 |
77 | List active nodes:
78 |
79 | ```bash
80 | $ rosnode list
81 | ```
82 |
83 | Show info on a node:
84 |
85 | ```bash
86 | $ rosnode info node_name
87 | ```
88 |
89 | ### ROS topics
90 |
91 | Once nodes register to master they can communicate over [ROS topics](https://wiki.ros.org/rostopic). Topics are names for streams of messages.
92 |
93 | Nodes can publish or subscribe (listen) to topics - typically 1 to many
94 |
95 | List active topics:
96 |
97 | ```bash
98 | $ rostopic list
99 | ```
100 |
101 | Subscribe to topic and show contents on the console
102 |
103 | ```bash
104 | $ rostopic echo /topic
105 | ```
106 |
107 | Analyze the frequency:
108 |
109 | ```bash
110 | $ rostopic hz /topic
111 | ```
112 |
113 | Show info on a topic:
114 |
115 | ```bash
116 | $ rostopic info /topic
117 | ```
118 |
119 | ### ROS messages
120 |
121 | [ROS messages](https://wiki.ros.org/Messages) structure and format is defined in a text file with `.msg` extension. Contains arrays of type `int`, `float`, `string` etc and can be nested (messages can contain other messages)
122 |
123 | See the type of a topic
124 |
125 | ```bash
126 | $ rostopic type /topic
127 | ```
128 |
129 | Manually publish a message to a topic (TAB for autocomplete)
130 |
131 | ```bash
132 | $ rostopic pub /topic type args
133 | ```
134 |
135 | ## The `catkin build` system
136 |
137 | Command to generate executables, libraries a interfaces: if you see in a tutorial `catkin_make` use instead `catkin build`. See [here](https://robotics.stackexchange.com/questions/16604/ros-catkin-make-vs-catkin-build) why `catkin build` is better than `catkin_make` and [here]() the official documentation of the `catkin_tools` package.
138 |
139 | Note: Build packages from your catkin workspace and **then** always re-source to update the environment and ensure the system is aware of the changes:
140 |
141 | ```bash
142 | $ cd ~/catkin_ws
143 | $ catkin build pkg_name
144 | $ source devel/setup.bash
145 | ```
146 |
147 | This creates `src` folder for source code, `build` for cache and `devel` for built targets. Do not touch the last two. You can wipe them to start anew with:
148 |
149 | ```bash
150 | $ catkin clean
151 | ```
152 |
153 | Note: Initially `catkin build` didn't work in my install but following [these instructions](https://stackoverflow.com/a/66142177/15472802) I installed a missing package:
154 |
155 | ```bash
156 | $ sudo apt install python3-catkin-tools python3-osrf-pycommon
157 | ```
158 |
159 | Then deleted `devel` and `build` folders, run again and re-sourced environment et voilá.
160 |
161 | ## The good way of getting code from internet
162 |
163 | This is cleaner when later you will have multiple `catkin_ws`
164 |
165 | 1. clone the repo to a `~/git` folder
166 |
167 | ```bash
168 | $ cd ~/git
169 | $ git clone https://github.com/ethz-asl/ros_best_practices
170 | ```
171 |
172 | 2. Symlink it to `catkin_ws`
173 |
174 | ```bash
175 | $ ln -s ~/git/ros_best_practices/ ~/catkin_ws/src/
176 | ```
177 |
178 | 3. Build it and re-source environment
179 |
180 | ```bash
181 | $ catkin build ros_package_template
182 | $ source devel/setup.bash
183 | ```
184 |
185 | 4. execute it:
186 |
187 | ```bash
188 | $ roslaunch ros_package_template ros_package_template.launch
189 | ```
190 |
191 | ## ROS launch
192 |
193 | tool to launch several nodes at the same time. Launches `roscore` if not yet running. Described in XML `.launch` files. If in a package it will find it for you - dont need to be at the path.
194 |
195 | ```bash
196 | $ roslaunch pkg_name file_name.launch
197 | ```
198 |
199 | Take a look at the file:
200 |
201 | ```xml
202 |
203 |
204 |
205 |
206 | ```
207 |
208 | - ``: root element of the file
209 |
210 | - ``: a node to launch
211 | - `name`: name of the instance of node, free to choose
212 | - `pkg`: package
213 | - `type`: node executable
214 | - `output`: where to write log messages, `screen` to console `log` to file
215 | - ``: arguments
216 | - `ǹame`: name
217 | - `default`: default value if not passed
218 | - Are passed at launch with: `$ roslaunch launchfile.launch arg_name:=value`
219 | - Value can be accessed in launch file with `$(arg arg_name)`
220 | - ``: conditional loop
221 | - `<ìnclude file ="pkg_name" />`: nest launch files
222 | - `$(find pkg_name)`: don't use absolute path but relative to packages, use this to find the system path to a package
223 | - ``: pass down arguments to nested file
224 |
225 | Note syntax of self-closing tags are
226 |
227 | ## Gazebo simulator
228 |
229 | simulate 3D rigid body dynamics, sensors including noise, 3D visualization, interactive dropping elements etc. Tree with objects. Can start, pause, accelerate simu. Has gallery of robots and environments. Its a standalone program but has a ROS interface.
230 |
231 | Run it with :
232 |
233 | ```bash
234 | $ rosrun gazebo_ros gazebo
235 | ```
236 |
237 | End of Lesson #1
238 |
239 | # Exercise #1
240 |
241 | [Exercise Sheet](./course-materials/ETH_Zurich_Exercises_1.pdf)
242 |
243 | 1. set up the Husky simulation:
244 |
245 | http://wiki.ros.org/husky_gazebo/Tutorials/Simulating%20Husky
246 |
247 | 2. launch it and inspect it
248 |
249 | ```bash
250 | $ rosnode list
251 | $ rostopic list
252 | $ rostopic echo /topic
253 | $ rostopic hz /topic
254 | $ rqt_graph
255 | ```
256 |
257 | 3. command a desired velocity:
258 |
259 | ```bash
260 | $ rostopic pub /cmd_vel geometry_msgs/Twist "linear:
261 | x: 5.0
262 | y: 0.0
263 | z: 0.0
264 | angular:
265 | x: 0.0
266 | y: 0.0
267 | z: 0.0"
268 | ```
269 |
270 | 4. find online `teleop_twist_keyboard`, clone it, compile from source (40%)
271 |
272 | http://wiki.ros.org/teleop_twist_keyboard
273 |
274 | ```bash
275 | $ cd ~/git
276 | $ git clone https://github.com/ros-teleop/teleop_twist_keyboard.git # clone
277 | $ ln -s ~/git/teleop_twist_keyboard/ ~/catkin_ws/src/ # symlink
278 | $ catkin build teleop_twist_keyboard # compile from source
279 | $ source devel/setup.bash # source environment
280 | $ rosrun teleop_twist_keyboard teleop_twist_keyboard.py # execute
281 | Moving around:
282 | u i o
283 | j k l
284 | m , .
285 | $ roscd teleop_twist_keyboard # check
286 | $ pwd
287 | ~/catkin_ws/src/teleop_twist_keyboard # ok!
288 |
289 | ```
290 |
291 | 5. write a launch file to launch simulation in a different world with teleop node and drive Husky (60%)
292 |
293 | Launch file should replicate:
294 |
295 | ```bash
296 | $ roslaunch husky_gazebo husky_empty_world.launch world_name:=worlds/willowgarage.world
297 | $ rosrun teleop_twist_keyboard teleop_twist_keyboard.py
298 | ```
299 |
300 | `my_husky.launch`:
301 |
302 | ```xml
303 |
304 |
305 |
306 |
307 |
308 |
309 | ```
310 |
311 | Executed with:
312 |
313 | ```bash
314 | $ roslaunch src/my_husky.launch
315 | ```
316 |
317 | 
318 |
--------------------------------------------------------------------------------
/ROS-notes/ROS_ETH_Zurich_L2.md:
--------------------------------------------------------------------------------
1 | # Lesson #2
2 |
3 | [Video (Youtube) L2](https://youtu.be/v9RgXcosuww) | [Slides L2](./course-materials/ETH_Zurich_Slides_2.pdf)
4 |
5 | * ROS packages
6 | * Working with Eclipse IDE for C++
7 | * ROS C++ library `roscpp` basics
8 | * Implementing subscribers and publishers
9 | * parameter server
10 | * RVIZ
11 |
12 | ## ROS packages
13 |
14 | ROS sfw is organized into [ROS packages](https://wiki.ros.org/Packages) which contain source code, launch files, config files, message definitions, data and docs
15 |
16 |
17 |
18 | ## Working with Eclipse IDE for C++
19 |
20 |
21 |
22 | ## ROS C++ library `roscpp` basics
23 |
24 |
25 |
26 | ## Implementing subscribers and publishers
27 |
28 |
29 |
30 | ## The parameter server
31 |
32 |
33 |
34 | ## RVIZ
35 |
36 |
37 |
38 | # Exercise #2
39 |
40 | [Exercise Sheet](./course-materials/ETH_Zurich_Exercises_2.pdf)
41 |
42 | Create a subscriber to the `/scan` topic in C++
43 |
44 | use parameters for `scan_subscriber_topic_name` and `scan_subscriber_queue_size`
45 |
--------------------------------------------------------------------------------
/ROS-notes/ROS_ETH_Zurich_L3.md:
--------------------------------------------------------------------------------
1 | # Lesson #3
2 |
3 | [Video (Youtube) L3](https://www.youtube.com/watch?v=WcArOQuJRDI) | [Slides](./course-materials/ETH_Zurich_Slides_3.pdf)
4 |
5 | # Exercise #3
6 |
7 | [Exercise Sheet](./course-materials/ETH_Zurich_Exercises_3.pdf)
8 |
--------------------------------------------------------------------------------
/ROS-notes/assets/images/3D_frames.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/3D_frames.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/3D_rotation_matrix.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/3D_rotation_matrix.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/3D_transformation_matrix.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/3D_transformation_matrix.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/BUG0_plotjuggler.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/BUG0_plotjuggler.gif
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Kinect_on_ROS.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Kinect_on_ROS.gif
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Kinect_on_ROS_2.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Kinect_on_ROS_2.gif
--------------------------------------------------------------------------------
/ROS-notes/assets/images/ROS_computation_graph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/ROS_computation_graph.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/SLAM_fun.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/SLAM_fun.gif
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2021-11-03_22-24-02.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2021-11-03_22-24-02.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2021-11-12_01-07-54.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2021-11-12_01-07-54.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2021-11-17_21-32-15.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2021-11-17_21-32-15.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2021-11-17_23-57-19.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2021-11-17_23-57-19.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2021-11-26_01-29-39.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2021-11-26_01-29-39.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2021-11-26_01-31-33.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2021-11-26_01-31-33.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2021-12-20_22-14-47.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2021-12-20_22-14-47.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2021-12-20_23-14-06.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2021-12-20_23-14-06.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2022-01-11_00-17-46-arrows.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2022-01-11_00-17-46-arrows.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2022-01-11_00-32-39.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2022-01-11_00-32-39.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2022-01-11_000545.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2022-01-11_000545.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2022-01-11_000614.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2022-01-11_000614.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/Screenshot_2022-01-11_001149.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/Screenshot_2022-01-11_001149.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/bouncy_robot.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/bouncy_robot.gif
--------------------------------------------------------------------------------
/ROS-notes/assets/images/bug1_1st_turn.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/bug1_1st_turn.gif
--------------------------------------------------------------------------------
/ROS-notes/assets/images/drone.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/drone.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/euler_angles.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/euler_angles.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/frame_tree_minipupper.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/frame_tree_minipupper.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/frames.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/frames.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/frames_minipupper.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/frames_minipupper.gif
--------------------------------------------------------------------------------
/ROS-notes/assets/images/gazebo_and_navstack.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/gazebo_and_navstack.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/gazebo_and_rviz.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/gazebo_and_rviz.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/gazebo_only.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/gazebo_only.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/husky.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/husky.gif
--------------------------------------------------------------------------------
/ROS-notes/assets/images/icon-drone.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/icon-drone.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/mymap.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/mymap.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/pure_rotation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/pure_rotation.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/pure_translation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/pure_translation.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/rqt_reconfigure.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/rqt_reconfigure.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/static_transform.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/static_transform.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/transformation_matrix.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/transformation_matrix.png
--------------------------------------------------------------------------------
/ROS-notes/assets/images/trigonometry-obstacle-angle.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/images/trigonometry-obstacle-angle.jpg
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/clean.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/clean.zip
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/drawings.odp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/drawings.odp
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/frames.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/frames.pdf
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/mymap.pgm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/mymap.pgm
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/mymap.yaml:
--------------------------------------------------------------------------------
1 | image: ./mymap.pgm
2 | resolution: 0.050000
3 | origin: [-10.000000, -10.000000, 0.000000]
4 | negate: 0
5 | occupied_thresh: 0.65
6 | free_thresh: 0.196
7 |
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/mymap2.pgm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/mymap2.pgm
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/mymap2.yaml:
--------------------------------------------------------------------------------
1 | image: ./mymap2.pgm
2 | resolution: 0.050000
3 | origin: [-10.000000, -10.000000, 0.000000]
4 | negate: 0
5 | occupied_thresh: 0.65
6 | free_thresh: 0.196
7 |
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/pedestrian.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/pedestrian.png
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/robot-car.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/robot-car.png
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/trigo.odp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/trigo.odp
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/udemy-ROS-essentials-diploma-Jan22.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/udemy-ROS-essentials-diploma-Jan22.jpg
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/udemy-ROS-essentials-diploma-Jan22.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/udemy-ROS-essentials-diploma-Jan22.pdf
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/udemy-ROS-navigation-diploma-Aug22.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/udemy-ROS-navigation-diploma-Aug22.jpg
--------------------------------------------------------------------------------
/ROS-notes/assets/sources/udemy-ROS-navigation-diploma-Aug22.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/sources/udemy-ROS-navigation-diploma-Aug22.pdf
--------------------------------------------------------------------------------
/ROS-notes/assets/videos/Kinect_in_ROS.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/videos/Kinect_in_ROS.mp4
--------------------------------------------------------------------------------
/ROS-notes/assets/videos/LaserScan_from_Kinect.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/videos/LaserScan_from_Kinect.mp4
--------------------------------------------------------------------------------
/ROS-notes/assets/videos/SLAM_fun.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/assets/videos/SLAM_fun.mp4
--------------------------------------------------------------------------------
/ROS-notes/bagfiles/.gitignore:
--------------------------------------------------------------------------------
1 | *.bag
2 |
--------------------------------------------------------------------------------
/ROS-notes/bagfiles/BUG0_layout.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__/
2 | tennis-ball-video.mp4
3 |
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/ball_detection.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import cv2
4 | import numpy as np
5 |
6 | # global variables
7 | global color_lower_bound, color_upper_bound
8 | global is_bkg_black
9 |
10 | is_bkg_black = False
11 |
12 | # tennis ball in video
13 | color_lower_bound = (30, 150, 100)
14 | color_upper_bound = (50, 255, 255)
15 |
16 | # tennis ball in my camera
17 | # color_lower_bound = (20, 90, 150)
18 | # color_upper_bound = (30, 150, 255)
19 |
20 | # Kinga bottle cap
21 | # color_lower_bound = (100, 80, 80)
22 | # color_upper_bound = (110, 255, 255)
23 |
24 | # Oranges
25 | # color_lower_bound = (0, 120, 120)
26 | # color_upper_bound = (23, 255, 255)
27 |
28 |
29 | def filter_color(rgb_image, color_lower_bound, color_upper_bound):
30 | # convert the image into the HSV color space
31 | hsv_image = cv2.cvtColor(rgb_image, cv2.COLOR_BGR2HSV)
32 |
33 | # define a mask using the lower and upper bounds of the yellow color
34 | mask = cv2.inRange(hsv_image, color_lower_bound, color_upper_bound)
35 |
36 | return mask
37 |
38 |
39 | def find_contours(mask):
40 | contours, hierarchy = cv2.findContours(
41 | mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE
42 | )
43 | return contours
44 |
45 |
46 | def draw_ball_contours(rgb_image, contours, is_bkg_black):
47 | if is_bkg_black:
48 | contours_image = np.zeros(rgb_image.shape, "uint8")
49 | else:
50 | contours_image = rgb_image
51 |
52 | for c in contours:
53 | area = cv2.contourArea(c)
54 |
55 | if area > 100:
56 | # add contour in green
57 | cv2.drawContours(contours_image, [c], -1, (150, 250, 150), 2)
58 |
59 | # add contour centroid pink
60 | cx, cy = get_contour_centroid(c)
61 | cv2.circle(contours_image, (cx, cy), 5, (150, 150, 255), -1)
62 |
63 | # add min enclosing circle in yellow
64 | ((x, y), radius) = cv2.minEnclosingCircle(c)
65 | cv2.circle(contours_image, (cx, cy), (int)(radius), (0, 255, 255), 2)
66 |
67 | return contours_image
68 |
69 |
70 | def get_contour_centroid(contour):
71 | M = cv2.moments(contour)
72 | cx = -1
73 | cy = -1
74 | if M["m00"] != 0:
75 | cx = int(M["m10"] / M["m00"])
76 | cy = int(M["m01"] / M["m00"])
77 | return cx, cy
78 |
79 |
80 | def process_frame(rgb_frame, flip_image):
81 | global color_lower_bound, color_upper_bound
82 | global is_bkg_black
83 |
84 | # blur
85 | rgb_frame_blur = cv2.GaussianBlur(rgb_frame, (5, 5), 0)
86 | # detect color
87 | mask = filter_color(rgb_frame_blur, color_lower_bound, color_upper_bound)
88 | # find contours
89 | contours = find_contours(mask)
90 | # draw contours
91 | contoured_frame = draw_ball_contours(rgb_frame, contours, is_bkg_black)
92 |
93 | # mirror for display
94 | if flip_image:
95 | contoured_frame = cv2.flip(contoured_frame, 1)
96 |
97 | return contoured_frame
98 |
99 |
100 | def main():
101 |
102 | frame_wait_ms = 50
103 |
104 | # Set video_source = filename
105 | # or = 0 to use the live camera as source
106 |
107 | # video_source = 0
108 | video_source = "tennis-ball-video.mp4"
109 |
110 | video_capture = cv2.VideoCapture(video_source)
111 |
112 | frame_counter = 0
113 | while True:
114 | ret, rgb_frame = video_capture.read()
115 |
116 | if video_source == 0:
117 | pass
118 | else:
119 | # If video source is a file, loop indefinitely
120 | frame_counter += 1
121 | # When the last frame is reached
122 | # Reset the capture and the frame_counter
123 | if frame_counter == video_capture.get(cv2.CAP_PROP_FRAME_COUNT):
124 | frame_counter = 0
125 | video_capture.set(cv2.CAP_PROP_POS_FRAMES, frame_counter)
126 |
127 | flip_image = video_source == 0
128 | processed_frame = process_frame(rgb_frame, flip_image)
129 | cv2.imshow("Frame", processed_frame)
130 |
131 | # stop if any key is pressed
132 | if cv2.waitKey(frame_wait_ms) > -1: # & 0xFF == ord('q'):
133 | break
134 |
135 | video_capture.release()
136 | cv2.destroyAllWindows()
137 |
138 |
139 | if __name__ == "__main__":
140 | main()
141 |
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/oranges/index.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/ball-detection/oranges/index.jpeg
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/oranges/orange2.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/ball-detection/oranges/orange2.jpeg
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/oranges/orange3.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/ball-detection/oranges/orange3.jpeg
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/oranges/orange5.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/ball-detection/oranges/orange5.jpeg
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/oranges/oranges.md:
--------------------------------------------------------------------------------
1 | index.jpg
2 |
3 | H 31-48
4 |
5 | S 50-100
6 |
7 | V 90-100
8 |
9 | orange2.jpg
10 |
11 | H 30-36
12 |
13 | S 55-100
14 |
15 | V 80-100
16 |
17 | orange3.jpg
18 |
19 | H 25-34
20 |
21 | S 75-100
22 |
23 | V 90-100
24 |
25 | oranges7.jpg
26 |
27 | H 19-41
28 |
29 | S 90-100
30 |
31 | V 65-100
32 |
33 | oranges7.jpg
34 |
35 | H 19-42
36 |
37 | S 40-100
38 |
39 | V 55-100
40 |
41 |
42 |
43 | Note:
44 |
45 | | Range | HSV picker pinetools (https://pinetools.com/image-color-picker) | ROS |
46 | | ----- | ------------------------------------------------------------ | ----- |
47 | | H | 0-360 | 0-180 |
48 | | S | 0-100 | 0-255 |
49 | | V | 0-100 | 0-255 |
50 |
51 | Summary:
52 |
53 | H 0-45 --> 0-23
54 |
55 | S 50-100 --> 120-255
56 |
57 | V 50-100 --> 120-255
58 |
59 |
60 |
61 | Tennis ball:
62 |
63 | H 40-60 --> 20-30
64 |
65 | S 35-60 --> 90-150
66 |
67 | V 60-100 --> 150-255
68 |
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/oranges/oranges4.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/ball-detection/oranges/oranges4.jpeg
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/oranges/oranges6.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/ball-detection/oranges/oranges6.jpeg
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/oranges/oranges7.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/ball-detection/oranges/oranges7.jpeg
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/oranges/oranges8.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/ball-detection/oranges/oranges8.jpeg
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/oranges/oranges9.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/ball-detection/oranges/oranges9.jpeg
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/tennis_ball_listener.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import cv2
4 | import rospy
5 | from ball_detection import process_frame
6 | from cv_bridge import CvBridge, CvBridgeError
7 | from sensor_msgs.msg import Image
8 |
9 |
10 | def image_callback(ros_image):
11 | print("received frame")
12 | global bridge, frame_wait_ms
13 |
14 | # convert ros_image into an opencv-compatible image frame
15 | try:
16 | rgb_frame = bridge.imgmsg_to_cv2(ros_image, "bgr8")
17 | except CvBridgeError as e:
18 | print("Error in CVbridge: ", e)
19 |
20 | # 1. process frame
21 | processed_frame = process_frame(rgb_frame)
22 |
23 | # 2. display processed_frame
24 | cv2.imshow("Frame", processed_frame)
25 |
26 | # 3. wait
27 | cv2.waitKey(frame_wait_ms)
28 |
29 |
30 | def main():
31 | rospy.init_node("image_converter", anonymous=True)
32 |
33 | # create a bridge
34 | global bridge
35 | bridge = CvBridge()
36 |
37 | global frame_wait_ms
38 | frame_wait_ms = 1
39 |
40 | # create a subscriber node and subscribe to a camera
41 | # for turtlebot3 waffle
42 | # image_topic="/camera/rgb/image_raw/compressed"
43 | # for usb cam
44 | # image_topic = "/usb_cam/image_raw"
45 | # for task2
46 | image_topic = "/tennis_ball_image"
47 |
48 | image_sub = rospy.Subscriber(image_topic, Image, image_callback)
49 | try:
50 | rospy.spin()
51 | except KeyboardInterrupt:
52 | print("Shutting down")
53 | cv2.destroyAllWindows()
54 |
55 |
56 | if __name__ == "__main__":
57 | main()
58 |
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/tennis_ball_publisher.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import cv2
4 | import rospy
5 | from cv_bridge import CvBridge, CvBridgeError
6 | from sensor_msgs.msg import Image
7 |
8 |
9 | def publisher():
10 |
11 | global video_capture
12 | global bridge
13 |
14 | # create a new publisher:
15 | # specify topic name, type of message & queue size
16 | # topic name is "tennis_ball_image"
17 | pub = rospy.Publisher("tennis_ball_image", Image, queue_size=10)
18 |
19 | # initialize the node
20 | # anonymous=True flag so that rospy will choose a unique name
21 |
22 | rospy.init_node("Video_Publisher_node", anonymous=True)
23 | # set the loop rate
24 | rate = rospy.Rate(50) # 50Hz
25 |
26 | # keep publishing until a Ctrl-C is pressed
27 | sensor_msg = Image()
28 |
29 | i = 0
30 | while not rospy.is_shutdown():
31 | try:
32 | ret, rgb_frame = video_capture.read()
33 | sensor_msg = bridge.cv2_to_imgmsg(rgb_frame, "bgr8")
34 | except CvBridgeError as e:
35 | print(e)
36 | rospy.loginfo(f"Publication #{i}\n")
37 | pub.publish(sensor_msg)
38 | rate.sleep()
39 | i += 1
40 |
41 |
42 | if __name__ == "__main__":
43 |
44 | global video_source, video_capture
45 | video_source = 0
46 | # video_source = 'tennis-ball-video.mp4'
47 | video_capture = cv2.VideoCapture(video_source)
48 |
49 | # create a bridge
50 | global bridge
51 | bridge = CvBridge()
52 |
53 | try:
54 | publisher()
55 | except rospy.ROSInterruptException:
56 | pass
57 |
58 | video_capture.release()
59 | cv2.destroyAllWindows()
60 |
--------------------------------------------------------------------------------
/ROS-notes/ball-detection/tennis_ball_usb_cam_tracker.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import cv2
4 | import rospy
5 | from ball_detection import process_frame
6 | from cv_bridge import CvBridge, CvBridgeError
7 | from sensor_msgs.msg import Image
8 |
9 |
10 | def image_callback(ros_image):
11 | global bridge
12 |
13 | # convert ros_image into an opencv-compatible image frame
14 | try:
15 | rgb_frame = bridge.imgmsg_to_cv2(ros_image, "bgr8")
16 | except CvBridgeError as e:
17 | print(e)
18 |
19 | # 1. process frame
20 | processed_frame = process_frame(rgb_frame, True)
21 |
22 | # 2. display processed_frame
23 | cv2.imshow("Frame", processed_frame)
24 |
25 | # 3. wait
26 | cv2.waitKey(frame_wait_ms)
27 |
28 |
29 | def main():
30 | rospy.init_node("image_converter", anonymous=True)
31 | # create a bridge
32 | global bridge
33 | bridge = CvBridge()
34 |
35 | global frame_wait_ms
36 | frame_wait_ms = 1
37 |
38 | # create a subscriber node and subscribe to a camera
39 | # for turtlebot3 waffle
40 | # image_topic="/camera/rgb/image_raw/compressed"
41 | # for usb cam
42 | image_topic = "/usb_cam/image_raw"
43 |
44 | image_sub = rospy.Subscriber(image_topic, Image, image_callback)
45 |
46 | try:
47 | rospy.spin()
48 | except KeyboardInterrupt:
49 | print("Shutting down")
50 | cv2.destroyAllWindows()
51 |
52 |
53 | if __name__ == "__main__":
54 | main()
55 |
--------------------------------------------------------------------------------
/ROS-notes/course-materials/001-course-overview.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/001-course-overview.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/003-ros-overview-slides.key.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/003-ros-overview-slides.key.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/018-install-ros.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/018-install-ros.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/034-ROScheatsheet.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/034-ROScheatsheet.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/034-anis-koubaa-ros-course.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/034-anis-koubaa-ros-course.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/034-ros-file-system-notes.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/034-ros-file-system-notes.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/083-ros-motion.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/083-ros-motion.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/111-Turtlebot3.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/111-Turtlebot3.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/114-opencv-ros.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/114-opencv-ros.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/131-laser-scanners.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/131-laser-scanners.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/137-2018-08-13-16-42-15.bag:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/137-2018-08-13-16-42-15.bag
--------------------------------------------------------------------------------
/ROS-notes/course-materials/140-Arduino.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/140-Arduino.tar.gz
--------------------------------------------------------------------------------
/ROS-notes/course-materials/145-Arduino.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/145-Arduino.tar.gz
--------------------------------------------------------------------------------
/ROS-notes/course-materials/Course_02-02-L020-2D-transformations.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/Course_02-02-L020-2D-transformations.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/Course_02-03_L028_3D-transformations.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/Course_02-03_L028_3D-transformations.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/Course_02-04_L036_tf.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/Course_02-04_L036_tf.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/Course_02-05_L052_map-based-navigation.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/Course_02-05_L052_map-based-navigation.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/Course_02-06-L063-navigation-stack-tuning.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/Course_02-06-L063-navigation-stack-tuning.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/Course_02-L063-navguide.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/Course_02-L063-navguide.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/ETH_Zurich_Exercises_1.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/ETH_Zurich_Exercises_1.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/ETH_Zurich_Exercises_2.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/ETH_Zurich_Exercises_2.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/ETH_Zurich_Exercises_3.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/ETH_Zurich_Exercises_3.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/ETH_Zurich_Slides_1.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/ETH_Zurich_Slides_1.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/ETH_Zurich_Slides_2.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/ETH_Zurich_Slides_2.pdf
--------------------------------------------------------------------------------
/ROS-notes/course-materials/ETH_Zurich_Slides_3.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/ROS-notes/course-materials/ETH_Zurich_Slides_3.pdf
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "ROS-notes"
3 | version = "0.1.0"
4 | description = "My notes and exercises to learn ROS"
5 | authors = ["Manuel Heredia "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "^3.8"
9 | numpy = "^1.23.2"
10 |
11 | [tool.poetry.dev-dependencies]
12 | pre-commit = "^2.20.0"
13 |
14 | [build-system]
15 | requires = ["poetry-core>=1.0.0"]
16 | build-backend = "poetry.core.masonry.api"
17 |
--------------------------------------------------------------------------------
/src/.gitignore:
--------------------------------------------------------------------------------
1 | freenect_stack/
2 | turtlebot3/
3 | turtlebot3_msgs/
4 | turtlebot3_simulations/
5 | ros_essentials_cpp/
6 | ros_basics_tutorials/
7 | CMakeLists.txt
8 |
--------------------------------------------------------------------------------
/src/mhered/launch/bug.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 | -->
30 |
31 |
32 |
33 |
34 |
35 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/src/mhered/launch/clean_py.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
10 |
11 |
--------------------------------------------------------------------------------
/src/mhered/launch/cleaner.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/src/mhered/launch/cleaner_param.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/src/mhered/launch/follower.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
15 |
16 |
17 |
18 |
19 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/src/mhered/launch/my_turtle.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/src/mhered/launch/robot.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 | -->
30 |
31 |
32 |
33 |
34 |
35 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/src/mhered/launch/static_transform_publisher.launch:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
--------------------------------------------------------------------------------
/src/mhered/msg/IoTSensor.msg:
--------------------------------------------------------------------------------
1 | int32 id
2 | string name
3 | float32 temp
4 | float32 humidity
5 |
--------------------------------------------------------------------------------
/src/mhered/src/add2ints_client.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 |
4 | import sys
5 |
6 | import rospy
7 |
8 | # import service definition
9 | from mhered.srv import AddTwoInts
10 |
11 | # from mhered.srv import AddTwoIntsRequest, AddTwoIntsResponse
12 |
13 |
14 | def add_two_ints_client(x, y):
15 |
16 | # keep client waiting until service is alive
17 | rospy.wait_for_service("add_two_ints")
18 |
19 | try:
20 | # create a client object
21 | # the service name must be same defined in the server
22 | my_request = rospy.ServiceProxy("add_two_ints", AddTwoInts)
23 |
24 | # what is this?
25 | response = my_request(x, y)
26 | return response.sum
27 |
28 | except rospy.ServiceException(e):
29 | print(f"Service call failed: {e}")
30 |
31 |
32 | if __name__ == "__main__":
33 | if len(sys.argv) == 3:
34 | x = int(sys.argv[1])
35 | y = int(sys.argv[2])
36 | else:
37 | print("%s [x y]" % sys.argv[0])
38 | sys.exit(1)
39 |
40 | print(f"Client will request {x} + {y}...")
41 | result = add_two_ints_client(x, y)
42 | print(f"Server returned {x} + {y} = {result}")
43 |
--------------------------------------------------------------------------------
/src/mhered/src/add2ints_server.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 |
4 | import rospy
5 |
6 | # import service definition
7 | from mhered.srv import AddTwoInts, AddTwoIntsRequest, AddTwoIntsResponse
8 |
9 |
10 | def handle_add_two_ints(request):
11 | sum = request.a + request.b
12 | print(f"Server will return: {request.a} + {request.b} = {sum}")
13 | # time.sleep(2) # 2 seconds
14 | return AddTwoIntsResponse(sum)
15 |
16 |
17 | def add_two_ints_server():
18 |
19 | rospy.init_node("add_two_ints_server")
20 |
21 | # create service listening to incoming requests
22 | # given service name, type and handle function
23 | my_service = rospy.Service("add_two_ints", AddTwoInts, handle_add_two_ints)
24 |
25 | print("Server is ready to add integers.")
26 | # start service to wait for incoming requests
27 | rospy.spin()
28 |
29 |
30 | if __name__ == "__main__":
31 | add_two_ints_server()
32 |
--------------------------------------------------------------------------------
/src/mhered/src/bug_student1.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Student 1: Tony Tao
3 |
4 | import math
5 |
6 | import rospy
7 | import tf
8 | from geometry_msgs.msg import Twist
9 | from sensor_msgs.msg import LaserScan
10 |
11 |
12 | def go_straight(xdist, ydist):
13 | angular = 1 * math.atan2(ydist, xdist)
14 | linear = 0.1 * math.sqrt(xdist**2 + ydist**2)
15 | cmd = Twist()
16 | cmd.linear.x = linear
17 | cmd.angular.z = angular
18 | print(linear, angular)
19 | return cmd
20 |
21 |
22 | def follow_wall(ranges, wall_thresh):
23 | cmd = Twist()
24 | wall_found = detect_walls2(ranges, wall_thresh)
25 | if wall_found:
26 | cmd.angular.z = -0.6
27 | else:
28 | cmd.linear.x = 0.2
29 | return cmd
30 |
31 |
32 | def laserCallback(laserScan):
33 | global listener
34 | global tb3_velpub
35 |
36 | scans = laserScan.ranges
37 | wall_thresh = 0.3 # in meters
38 | goal_thresh = 0.05
39 |
40 | try:
41 | (trans, rot) = listener.lookupTransform(
42 | "base_footprint", "goal_frame", rospy.Time(0)
43 | )
44 | except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
45 | print("lookup failed")
46 | quaternion = rot
47 | rpy = tf.transformations.euler_from_quaternion(quaternion)
48 | print("translation vector: (", trans[0], ",", trans[1], ",", trans[2], ")")
49 | print("rotation angles: roll=", rpy[0], " pitch=", rpy[1], " yaw=", rpy[2])
50 | xdist = trans[0]
51 | ydist = trans[1]
52 | yaw = rpy[2]
53 | cmd = Twist()
54 | wall_detected = detect_walls(scans, xdist, ydist, yaw, wall_thresh)
55 | print(wall_detected)
56 |
57 | # case 1: reached goal
58 | if math.sqrt(xdist**2 + ydist**2) < goal_thresh:
59 | tb3_velpub.publish(cmd)
60 | print("reached goal!!")
61 |
62 | # case 2: no obstacle detected
63 | elif not wall_detected:
64 | cmd = go_straight(xdist, ydist)
65 | tb3_velpub.publish(cmd)
66 | print("driving straight")
67 |
68 | # case 3: obstacle detected
69 | # follow the wall
70 | else:
71 | cmd = follow_wall(scans, wall_thresh)
72 | tb3_velpub.publish(cmd)
73 | print("following wall")
74 | print()
75 |
76 |
77 | def get_minimum(lst):
78 | temp = []
79 | for i in lst:
80 | if not math.isnan(i):
81 | temp.append(i)
82 | return min(temp)
83 |
84 |
85 | def detect_walls(scan_range, robotx, roboty, yaw, wall_thresh):
86 | if yaw < 0:
87 | yaw = math.pi + abs(yaw)
88 | yaw = -yaw
89 | range = 10
90 | length = len(scan_range)
91 |
92 | # angle to look (goal direction)
93 | lookangle = int(math.degrees(math.atan2(roboty, robotx) + yaw))
94 |
95 | # check look angle
96 | lookangleBroadCaster = tf.TransformBroadcaster()
97 | rot_quaternion = tf.transformations.quaternion_from_euler(
98 | 0, 0, math.atan2(roboty, robotx)
99 | )
100 | trans = (0, 0, 0)
101 | curr_time = rospy.Time.now()
102 | lookangleBroadCaster.sendTransform(
103 | trans, rot_quaternion, curr_time, "look_frame", "base_footprint"
104 | )
105 |
106 | ##
107 | midIndex = lookangle
108 | if midIndex - range < 0:
109 | beam = (
110 | scan_range[(midIndex - range) % length : length + 1]
111 | + scan_range[0 : midIndex + range + 1]
112 | )
113 | elif midIndex + range >= length:
114 | beam = (
115 | scan_range[0 : midIndex + range - length]
116 | + scan_range[midIndex - range : length + 1]
117 | )
118 | else:
119 | beam = scan_range[midIndex - range : midIndex + range + 1]
120 | min_dist = get_minimum(beam)
121 | print(min_dist)
122 | if min_dist < wall_thresh:
123 | return True
124 | return False
125 |
126 |
127 | def detect_walls2(scan_range, wall_thresh):
128 | window = 50
129 | length = len(scan_range)
130 | beam = scan_range[length - window : length + 1] + scan_range[0:window]
131 | min_dist = get_minimum(beam)
132 | print(min_dist)
133 | if min_dist < wall_thresh:
134 | return True
135 | return False
136 |
137 |
138 | def main():
139 | rospy.init_node("bug0")
140 |
141 | global listener
142 | global tb3_velpub
143 |
144 | listener = tf.TransformListener()
145 | listener.waitForTransform("map", "goal_frame", rospy.Time(), rospy.Duration(10.0))
146 |
147 | tb3_velpub = rospy.Publisher("cmd_vel", Twist, queue_size=10)
148 | rospy.Subscriber("scan", LaserScan, laserCallback)
149 | rospy.spin()
150 |
151 |
152 | if __name__ == "__main__":
153 | main()
154 |
--------------------------------------------------------------------------------
/src/mhered/src/bug_student2.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python3
2 | # Student 2: Sellaoui Ramzi
3 |
4 | import math
5 |
6 | # import ros stuff
7 | import rospy
8 | from gazebo_msgs.msg import ModelState
9 | from gazebo_msgs.srv import SetModelState
10 |
11 | # import ros message
12 | from geometry_msgs.msg import Point
13 | from nav_msgs.msg import Odometry
14 | from sensor_msgs.msg import LaserScan
15 |
16 | # import ros service
17 | from std_srvs.srv import *
18 | from tf import transformations
19 |
20 | srv_client_go_to_point_ = None
21 | srv_client_wall_follower_ = None
22 | yaw_ = 0
23 | yaw_error_allowed_ = 5 * (math.pi / 180) # 5 degrees
24 | position_ = Point()
25 | initial_position_ = Point()
26 | initial_position_.x = rospy.get_param("initial_x", -0.7)
27 | initial_position_.y = rospy.get_param("initial_y", 0.0)
28 | initial_position_.z = 0
29 | desired_position_ = Point()
30 | desired_position_.x = rospy.get_param("des_pos_x", 2.0)
31 | desired_position_.y = rospy.get_param("des_pos_y", 0.4)
32 | desired_position_.z = 0
33 | regions_ = None
34 | state_desc_ = ["Go to point", "wall following"]
35 | state_ = 0
36 | # 0 - go to point
37 | # 1 - wall following
38 |
39 | # callbacks
40 |
41 |
42 | def clbk_odom(msg):
43 | global position_, yaw_
44 |
45 | # position
46 | position_ = msg.pose.pose.position
47 |
48 | # yaw
49 | quaternion = (
50 | msg.pose.pose.orientation.x,
51 | msg.pose.pose.orientation.y,
52 | msg.pose.pose.orientation.z,
53 | msg.pose.pose.orientation.w,
54 | )
55 | euler = transformations.euler_from_quaternion(quaternion)
56 | yaw_ = euler[2]
57 |
58 |
59 | def clbk_laser(msg):
60 | global regions_
61 | """
62 | regions_ = {
63 | 'right': min(min(msg.ranges[0:143]), 10),
64 | 'fright': min(min(msg.ranges[144:287]), 10),
65 | 'front': min(min(msg.ranges[288:431]), 10),
66 | 'fleft': min(min(msg.ranges[432:575]), 10),
67 | 'left': min(min(msg.ranges[576:719]), 10),
68 | }
69 | """
70 | regions_ = {
71 | "right": min(min(msg.ranges[0:71]), 10),
72 | "fright": min(min(msg.ranges[72:143]), 10),
73 | "front": min(min(msg.ranges[144:235]), 10),
74 | "fleft": min(min(msg.ranges[236:287]), 10),
75 | "left": min(min(msg.ranges[288:359]), 10),
76 | }
77 |
78 |
79 | def change_state(state):
80 | global state_, state_desc_
81 | global srv_client_wall_follower_, srv_client_go_to_point_
82 | state_ = state
83 | log = "state changed: %s" % state_desc_[state]
84 | rospy.loginfo(log)
85 | if state_ == 0:
86 | resp = srv_client_go_to_point_(True)
87 | resp = srv_client_wall_follower_(False)
88 | if state_ == 1:
89 | resp = srv_client_go_to_point_(False)
90 | resp = srv_client_wall_follower_(True)
91 |
92 |
93 | def normalize_angle(angle):
94 | if math.fabs(angle) > math.pi:
95 | angle = angle - (2 * math.pi * angle) / (math.fabs(angle))
96 | return angle
97 |
98 |
99 | def main():
100 | global regions_, position_, desired_position_, state_, yaw_, yaw_error_allowed_
101 | global srv_client_go_to_point_, srv_client_wall_follower_
102 |
103 | rospy.init_node("bug0")
104 |
105 | sub_laser = rospy.Subscriber("/scan", LaserScan, clbk_laser)
106 | sub_odom = rospy.Subscriber("/odom", Odometry, clbk_odom)
107 |
108 | rospy.wait_for_service("/go_to_point_switch")
109 | rospy.wait_for_service("/wall_follower_switch")
110 | rospy.wait_for_service("/gazebo/set_model_state")
111 |
112 | srv_client_go_to_point_ = rospy.ServiceProxy("/go_to_point_switch", SetBool)
113 | srv_client_wall_follower_ = rospy.ServiceProxy("/wall_follower_switch", SetBool)
114 | srv_client_set_model_state = rospy.ServiceProxy(
115 | "/gazebo/set_model_state", SetModelState
116 | )
117 |
118 | # set robot position
119 | model_state = ModelState()
120 | model_state.model_name = "m2wr"
121 | model_state.pose.position.x = initial_position_.x
122 | model_state.pose.position.y = initial_position_.y
123 | resp = srv_client_set_model_state(model_state)
124 |
125 | # initialize going to the point
126 | change_state(0)
127 |
128 | rate = rospy.Rate(20)
129 | while not rospy.is_shutdown():
130 | if regions_ == None:
131 | continue
132 |
133 | if state_ == 0:
134 | if regions_["front"] > 0.15 and regions_["front"] < 1:
135 | change_state(1)
136 |
137 | elif state_ == 1:
138 | desired_yaw = math.atan2(
139 | desired_position_.y - position_.y, desired_position_.x - position_.x
140 | )
141 | err_yaw = normalize_angle(desired_yaw - yaw_)
142 |
143 | # less than 30 degrees
144 | if (
145 | math.fabs(err_yaw) < (math.pi / 6)
146 | and regions_["front"] > 1.5
147 | and regions_["fright"] > 1
148 | and regions_["fleft"] > 1
149 | ):
150 | print("less than 30")
151 | change_state(0)
152 |
153 | # between 30 and 90
154 | if (
155 | err_yaw > 0
156 | and math.fabs(err_yaw) > (math.pi / 6)
157 | and math.fabs(err_yaw) < (math.pi / 2)
158 | and regions_["left"] > 1.5
159 | and regions_["fleft"] > 1
160 | ):
161 | print("between 30 and 90 - to the left")
162 | change_state(0)
163 |
164 | if (
165 | err_yaw < 0
166 | and math.fabs(err_yaw) > (math.pi / 6)
167 | and math.fabs(err_yaw) < (math.pi / 2)
168 | and regions_["right"] > 1.5
169 | and regions_["fright"] > 1
170 | ):
171 | print("between 30 and 90 - to the right")
172 | change_state(0)
173 |
174 | rate.sleep()
175 |
176 |
177 | if __name__ == "__main__":
178 | main()
179 |
--------------------------------------------------------------------------------
/src/mhered/src/bug_student3.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python3
2 | # Student 3: Rizky Andhika Akbar
3 |
4 | # import ros stuff
5 | import rospy
6 | from geometry_msgs.msg import Twist
7 | from sensor_msgs.msg import LaserScan
8 |
9 | pub_ = None
10 | regions_ = {
11 | "right": 0,
12 | "fright": 0,
13 | "front": 0,
14 | "fleft": 0,
15 | "left": 0,
16 | }
17 | state_ = 0
18 | state_dict_ = {
19 | 0: "find the wall",
20 | 1: "turn left",
21 | 2: "follow the wall",
22 | }
23 |
24 |
25 | def clbk_laser(msg):
26 | global regions_
27 | regions_ = {
28 | "right": min(min(msg.ranges[0:143]), 10),
29 | "fright": min(min(msg.ranges[144:287]), 10),
30 | "front": min(min(msg.ranges[288:431]), 10),
31 | "fleft": min(min(msg.ranges[432:575]), 10),
32 | "left": min(min(msg.ranges[576:713]), 10),
33 | }
34 |
35 | take_action()
36 |
37 |
38 | def change_state(state):
39 | global state_, state_dict_
40 | if state is not state_:
41 | rospy.loginfo("Wall follower - [%s] - %s" % (state, state_dict_[state]))
42 | state_ = state
43 |
44 |
45 | def take_action():
46 | global regions_
47 | regions = regions_
48 | msg = Twist()
49 | linear_x = 0
50 | angular_z = 0
51 |
52 | state_description = ""
53 |
54 | d = 1.5
55 |
56 | if regions["front"] > d and regions["fleft"] > d and regions["fright"] > d:
57 | state_description = "case 1 - nothing"
58 | change_state(0)
59 | elif regions["front"] < d and regions["fleft"] > d and regions["fright"] > d:
60 | state_description = "case 2 - front"
61 | change_state(1)
62 | elif regions["front"] > d and regions["fleft"] > d and regions["fright"] < d:
63 | state_description = "case 3 - fright"
64 | change_state(2)
65 | elif regions["front"] > d and regions["fleft"] < d and regions["fright"] > d:
66 | state_description = "case 4 - fleft"
67 | change_state(0)
68 | elif regions["front"] < d and regions["fleft"] > d and regions["fright"] < d:
69 | state_description = "case 5 - front and fright"
70 | change_state(1)
71 | elif regions["front"] < d and regions["fleft"] < d and regions["fright"] > d:
72 | state_description = "case 6 - front and fleft"
73 | change_state(1)
74 | elif regions["front"] < d and regions["fleft"] < d and regions["fright"] < d:
75 | state_description = "case 7 - front and fleft and fright"
76 | change_state(1)
77 | elif regions["front"] > d and regions["fleft"] < d and regions["fright"] < d:
78 | state_description = "case 8 - fleft and fright"
79 | change_state(0)
80 | else:
81 | state_description = "unknown case"
82 | rospy.loginfo(regions)
83 |
84 |
85 | def find_wall():
86 | msg = Twist()
87 | msg.linear.x = 0.2
88 | msg.angular.z = -0.3
89 | return msg
90 |
91 |
92 | def turn_left():
93 | msg = Twist()
94 | msg.angular.z = 0.3
95 | return msg
96 |
97 |
98 | def follow_the_wall():
99 | global regions_
100 |
101 | msg = Twist()
102 | msg.linear.x = 0.5
103 | return msg
104 |
105 |
106 | def main():
107 | global pub_
108 |
109 | rospy.init_node("reading_laser")
110 |
111 | pub_ = rospy.Publisher("/cmd_vel", Twist, queue_size=1)
112 |
113 | sub = rospy.Subscriber("/m2wr/laser/scan", LaserScan, clbk_laser)
114 |
115 | rate = rospy.Rate(20)
116 | while not rospy.is_shutdown():
117 | msg = Twist()
118 | if state_ == 0:
119 | msg = find_wall()
120 | elif state_ == 1:
121 | msg = turn_left()
122 | elif state_ == 2:
123 | msg = follow_the_wall()
124 | pass
125 | else:
126 | rospy.loginfo("Unknown state!")
127 |
128 | pub_.publish(msg)
129 |
130 | rate.sleep()
131 |
132 |
133 | if __name__ == "__main__":
134 | main()
135 |
--------------------------------------------------------------------------------
/src/mhered/src/cleaner_robot.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import math
4 | import time
5 |
6 | import rospy
7 | from geometry_msgs.msg import Twist
8 | from std_srvs.srv import Empty
9 | from turtlesim.msg import Pose
10 |
11 | x = 0.0
12 | y = 0.0
13 | yaw = 0.0
14 |
15 |
16 | def pose_callback(pose_message):
17 | """Pose callback method"""
18 | global x, y, yaw
19 | x = pose_message.x
20 | y = pose_message.y
21 | yaw = pose_message.theta
22 |
23 |
24 | def move(velocity_publisher, speed, distance, is_forward):
25 | """Straight motion method"""
26 |
27 | # declare a Twist message to send velocity commands
28 | velocity_message = Twist()
29 |
30 | # get current location from the global variable before entering the loop
31 | global x, y
32 | # save initial coordinates
33 | x0 = x
34 | y0 = y
35 |
36 | if is_forward:
37 | velocity_message.linear.x = abs(speed)
38 | else:
39 | velocity_message.linear.x = -abs(speed)
40 |
41 | distance_moved = 0.0
42 | # we publish the velocity at 10 Hz (10 times per second)
43 | loop_rate = rospy.Rate(50)
44 |
45 | rospy.loginfo("Straight motion")
46 | while True:
47 |
48 | velocity_publisher.publish(velocity_message)
49 | loop_rate.sleep()
50 |
51 | distance_moved = abs(math.sqrt(((x - x0) ** 2) + ((y - y0) ** 2)))
52 | print(
53 | f"Distance moved: {distance_moved:10.4}"
54 | + f" Pose: ({x:8.4}, {y:8.4}, {yaw:8.4})"
55 | )
56 | if not (distance_moved < distance):
57 | rospy.loginfo("Distance reached")
58 | break
59 |
60 | # stop the robot after the distance is reached
61 | velocity_message.linear.x = 0.0
62 | velocity_publisher.publish(velocity_message)
63 |
64 |
65 | def rotate(velocity_publisher, omega_degrees, angle_degrees, is_clockwise):
66 | """Rotation in place method"""
67 |
68 | # declare a Twist message to send velocity commands
69 | velocity_message = Twist()
70 |
71 | omega = math.radians(omega_degrees)
72 |
73 | if is_clockwise:
74 | velocity_message.angular.z = -abs(omega)
75 | else:
76 | velocity_message.angular.z = abs(omega)
77 |
78 | # we publish the velocity at 10 Hz (10 times per second)
79 | loop_rate = rospy.Rate(50)
80 |
81 | rospy.loginfo("Rotation in place")
82 |
83 | # get initial timestamp
84 | t0 = rospy.Time.now().to_sec()
85 |
86 | while True:
87 |
88 | velocity_publisher.publish(velocity_message)
89 |
90 | # get initial timestamp
91 | t1 = rospy.Time.now().to_sec()
92 | curr_yaw_degrees = (t1 - t0) * omega_degrees
93 | loop_rate.sleep()
94 |
95 | print(
96 | f"Angle rotated: {curr_yaw_degrees:10.4}"
97 | + f" Pose: ({x:8.4}, {y:8.4}, {yaw:8.4})"
98 | )
99 | if not (curr_yaw_degrees < angle_degrees):
100 | rospy.loginfo("Angle reached")
101 | break
102 |
103 | # stop the robot after the angle is reached
104 | velocity_message.angular.z = 0.0
105 | velocity_publisher.publish(velocity_message)
106 |
107 |
108 | def set_yaw(velocity_publisher, orientation_degrees):
109 | """Set absolute orientation method"""
110 |
111 | # declare a Twist message to send velocity commands
112 | velocity_message = Twist()
113 |
114 | # get current location from the global variable before entering the loop
115 | global yaw
116 |
117 | yaw_degrees = math.degrees(yaw)
118 |
119 | # subtract angles, wrapping result to [-180, 180]
120 | angle_degrees = ((orientation_degrees - yaw_degrees + 180) % 360) - 180
121 |
122 | # rotate towards smallest angle difference
123 | if angle_degrees < 0:
124 | is_clockwise = True
125 | else:
126 | is_clockwise = False
127 |
128 | rotate(velocity_publisher, 15, abs(angle_degrees), is_clockwise)
129 | rospy.loginfo(f"Orientation set to {orientation_degrees}")
130 |
131 |
132 | def go_to(velocity_publisher, goal):
133 | """Go to goal method"""
134 |
135 | # declare a Twist message to send velocity commands
136 | velocity_message = Twist()
137 |
138 | # use current location from the global variable
139 | # (constantly updated by pose_callback())
140 |
141 | global x, y, yaw
142 |
143 | x_goal = goal[0]
144 | y_goal = goal[1]
145 |
146 | THRESHOLD = 0.1
147 | K_DISTANCE = 0.6
148 | K_ANGLE = 15
149 |
150 | # we publish the velocity at 10 Hz (10 times per second)
151 | loop_rate = rospy.Rate(50)
152 |
153 | rospy.loginfo(f"Go to goal: {goal}")
154 |
155 | while True:
156 |
157 | distance_to_goal = abs(math.sqrt(((x_goal - x) ** 2) + ((y_goal - y) ** 2)))
158 | angle_to_goal = math.atan2(y_goal - y, x_goal - x)
159 |
160 | print(
161 | f"Distance to goal: {distance_to_goal:10.4}"
162 | + f" Pose: ({x:8.4}, {y:8.4}, {yaw:8.4})"
163 | )
164 |
165 | if distance_to_goal < THRESHOLD:
166 | rospy.loginfo("Goal reached")
167 | break
168 |
169 | velocity_message.linear.x = K_DISTANCE * distance_to_goal
170 | velocity_message.angular.z = K_ANGLE * (angle_to_goal - yaw)
171 |
172 | velocity_publisher.publish(velocity_message)
173 | loop_rate.sleep()
174 |
175 | # stop the robot after the distance is reached
176 | velocity_message.linear.x = 0.0
177 | velocity_message.angular.z = 0.0
178 | velocity_publisher.publish(velocity_message)
179 |
180 |
181 | def spiral(velocity_publisher, omega, d_vel):
182 | """Spiral method"""
183 |
184 | # use current location from the global variable
185 | # (constantly updated by pose_callback())
186 | global x, y
187 |
188 | # declare a Twist message to send velocity commands
189 | velocity_message = Twist()
190 |
191 | OMEGA = omega
192 | VEL_INCREMENT = d_vel
193 |
194 | # we publish the velocity at 10 Hz (10 times per second)
195 | loop_rate = rospy.Rate(50)
196 |
197 | rospy.loginfo("Spiral")
198 |
199 | while x > 0.5 and x < 10.5 and y > 0.5 and y < 10.5:
200 |
201 | velocity_message.linear.x += VEL_INCREMENT
202 | velocity_message.angular.z = OMEGA
203 |
204 | print(
205 | f"Linear speed: {velocity_message.linear.x:10.4}"
206 | + f" Pose: ({x:8.4}, {y:8.4}, {yaw:8.4})"
207 | )
208 |
209 | velocity_publisher.publish(velocity_message)
210 | loop_rate.sleep()
211 |
212 | rospy.loginfo("Edge reached")
213 |
214 | # stop the robot after condition is fulfilled
215 | velocity_message.linear.x = 0.0
216 | velocity_message.angular.z = 0.0
217 | velocity_publisher.publish(velocity_message)
218 |
219 |
220 | def cleaner_app(velocity_publisher, WAIT, LIN_SPEED, ROT_SPEED):
221 | """Cleaner robot method"""
222 |
223 | # save initial position
224 | global x, y, yaw
225 | x0, y0, yaw0 = x, y, yaw
226 |
227 | go_to(velocity_publisher=velocity_publisher, goal=(0.5, 0.5))
228 | time.sleep(WAIT)
229 |
230 | set_yaw(velocity_publisher=velocity_publisher, orientation_degrees=90)
231 | time.sleep(WAIT)
232 |
233 | for i in range(6):
234 |
235 | move(
236 | velocity_publisher=velocity_publisher,
237 | speed=LIN_SPEED,
238 | distance=10.0,
239 | is_forward=True,
240 | )
241 | time.sleep(WAIT)
242 |
243 | rotate(
244 | velocity_publisher=velocity_publisher,
245 | omega_degrees=ROT_SPEED,
246 | angle_degrees=90,
247 | is_clockwise=True,
248 | )
249 | time.sleep(WAIT)
250 |
251 | move(
252 | velocity_publisher=velocity_publisher,
253 | speed=LIN_SPEED,
254 | distance=1.0,
255 | is_forward=True,
256 | )
257 | time.sleep(WAIT)
258 |
259 | rotate(
260 | velocity_publisher=velocity_publisher,
261 | omega_degrees=ROT_SPEED,
262 | angle_degrees=90,
263 | is_clockwise=True,
264 | )
265 | time.sleep(WAIT)
266 |
267 | move(
268 | velocity_publisher=velocity_publisher,
269 | speed=LIN_SPEED,
270 | distance=10.0,
271 | is_forward=True,
272 | )
273 |
274 | time.sleep(WAIT)
275 |
276 | rotate(
277 | velocity_publisher=velocity_publisher,
278 | omega_degrees=ROT_SPEED,
279 | angle_degrees=90,
280 | is_clockwise=False,
281 | )
282 |
283 | time.sleep(WAIT)
284 |
285 | move(
286 | velocity_publisher=velocity_publisher,
287 | speed=LIN_SPEED,
288 | distance=1.0,
289 | is_forward=True,
290 | )
291 |
292 | time.sleep(WAIT)
293 |
294 | rotate(
295 | velocity_publisher=velocity_publisher,
296 | omega_degrees=ROT_SPEED,
297 | angle_degrees=90,
298 | is_clockwise=False,
299 | )
300 |
301 | # return to initial position
302 | go_to(velocity_publisher=velocity_publisher, goal=(x0, y0))
303 | time.sleep(WAIT)
304 |
305 | set_yaw(
306 | velocity_publisher=velocity_publisher, orientation_degrees=math.degrees(yaw0)
307 | )
308 |
309 |
310 | if __name__ == "__main__":
311 | try:
312 | rospy.init_node("my_turtle_pose_node", anonymous=True)
313 |
314 | # declare velocity publisher
315 | cmd_vel_topic = "/turtle1/cmd_vel"
316 | velocity_publisher = rospy.Publisher(cmd_vel_topic, Twist, queue_size=10)
317 |
318 | # declare pose subscriber
319 | pose_topic = "/turtle1/pose"
320 | pose_subscriber = rospy.Subscriber(pose_topic, Pose, pose_callback)
321 |
322 | time.sleep(1)
323 |
324 | # get ROS parameters (or default)
325 | WAIT = rospy.get_param("WAIT", 0.5)
326 | LIN_SPEED = rospy.get_param("LIN_SPEED", 4.0)
327 | ROT_SPEED = rospy.get_param("ROT_SPEED", 30.0)
328 |
329 | # call the function
330 | cleaner_app(velocity_publisher, WAIT, LIN_SPEED, ROT_SPEED)
331 |
332 | print("start reset: ")
333 | time.sleep(5)
334 | rospy.wait_for_service("reset")
335 | reset_turtle = rospy.ServiceProxy("reset", Empty)
336 | reset_turtle()
337 | print("end reset: ")
338 |
339 | rospy.spin()
340 | except rospy.ROSInterruptException:
341 | rospy.loginfo("node terminated.")
342 |
--------------------------------------------------------------------------------
/src/mhered/src/frame_a_to_frame_b_broadcaster.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import time
4 |
5 | import rospy
6 | import tf
7 |
8 | if __name__ == "__main__":
9 | # init the node
10 | rospy.init_node("frame_a_frame_b_broadcaster_node")
11 |
12 | time.sleep(2)
13 |
14 | # create a transformation broadcaster (publisher)
15 | transform_broadcaster = tf.TransformBroadcaster()
16 |
17 | while not rospy.is_shutdown():
18 |
19 | # create a quaternion
20 | rotation_quaternion = tf.transformations.quaternion_from_euler(
21 | 0.2, 0.3, 0.1
22 | ) # Roll Pitch Yaw
23 |
24 | # translation vector
25 | translation_vector = (1.0, 2.0, 3.0)
26 |
27 | # time
28 | current_time = rospy.Time.now()
29 |
30 | transform_broadcaster.sendTransform(
31 | translation_vector, rotation_quaternion, current_time, "frame_b", "frame_a"
32 | ) # child frame, parent frame
33 | time.sleep(0.5) # frequency: we publish every .5 sec i.e. 2Hz
34 |
35 | rospy.spin()
36 |
--------------------------------------------------------------------------------
/src/mhered/src/frame_a_to_frame_b_listener.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import rospy
4 | import tf
5 |
6 | if __name__ == "__main__":
7 | rospy.init_node("frame_a_frame_b_listener_node")
8 |
9 | listener = tf.TransformListener()
10 | rate = rospy.Rate(1.0)
11 | listener.waitForTransform("/frame_a", "/frame_b", rospy.Time(), rospy.Duration(4.0))
12 |
13 | while not rospy.is_shutdown():
14 | try:
15 | (trans, rot) = listener.lookupTransform(
16 | "/frame_a", "/frame_b", rospy.Time(0)
17 | )
18 | except (
19 | tf.LookupException,
20 | tf.ConnectivityException,
21 | tf.ExtrapolationException,
22 | ):
23 | continue
24 |
25 | quaternion = rot
26 | rpy = tf.transformations.euler_from_quaternion(quaternion)
27 | print("transformation between frame_a and frame_b detected")
28 | print(f"translation vector XYZ: ({trans[0]:.5}, {trans[1]:.5}, {trans[2]:.5})")
29 | print(f"rotation angles RPY: ({rpy[0]:.5}, {rpy[1]:.5}, {rpy[2]:.5})")
30 |
31 | rate.sleep()
32 |
--------------------------------------------------------------------------------
/src/mhered/src/images/copies/copy-flower.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/src/mhered/src/images/copies/copy-flower.jpg
--------------------------------------------------------------------------------
/src/mhered/src/images/copies/copy-shapes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/src/mhered/src/images/copies/copy-shapes.png
--------------------------------------------------------------------------------
/src/mhered/src/images/copies/copy-tomato.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/src/mhered/src/images/copies/copy-tomato.jpg
--------------------------------------------------------------------------------
/src/mhered/src/images/copies/copy-tree.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/src/mhered/src/images/copies/copy-tree.jpg
--------------------------------------------------------------------------------
/src/mhered/src/images/copies/tomato-copy.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/src/mhered/src/images/copies/tomato-copy.jpg
--------------------------------------------------------------------------------
/src/mhered/src/images/flower.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/src/mhered/src/images/flower.jpg
--------------------------------------------------------------------------------
/src/mhered/src/images/shapes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/src/mhered/src/images/shapes.png
--------------------------------------------------------------------------------
/src/mhered/src/images/tomato.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/src/mhered/src/images/tomato.jpg
--------------------------------------------------------------------------------
/src/mhered/src/images/tree.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/src/mhered/src/images/tree.jpg
--------------------------------------------------------------------------------
/src/mhered/src/iot_publisher.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from random import uniform
4 |
5 | import rospy
6 |
7 | from mhered.msg import IoTSensor
8 |
9 |
10 | def publisher():
11 | # create a new publisher:
12 | # specify topic name, type of message & queue size
13 |
14 | pub = rospy.Publisher("iot_sensor_topic", IoTSensor, queue_size=10)
15 |
16 | # initialize the node
17 | # anonymous=True flag so that rospy will choose a unique name
18 |
19 | rospy.init_node("IoT_Publisher_node", anonymous=True)
20 | # set the loop rate
21 | rate = rospy.Rate(1) # 1Hz
22 |
23 | # keep publishing until a Ctrl-C is pressed
24 | sensor_msg = IoTSensor()
25 | sensor_msg.id = 1
26 | sensor_msg.name = "sensor #1"
27 |
28 | i = 0
29 | while not rospy.is_shutdown():
30 | sensor_msg.temp = 25.4 + uniform(-1, 1)
31 | sensor_msg.humidity = 33 + uniform(-3, 3)
32 |
33 | rospy.loginfo(f"Publication #{i}\n {sensor_msg}")
34 | pub.publish(sensor_msg)
35 | rate.sleep()
36 | i += 1
37 |
38 |
39 | if __name__ == "__main__":
40 | try:
41 | publisher()
42 | except rospy.ROSInterruptException:
43 | pass
44 |
--------------------------------------------------------------------------------
/src/mhered/src/iot_subscriber.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import rospy
4 |
5 | from mhered.msg import IoTSensor
6 |
7 |
8 | def sensor_callback(message):
9 |
10 | id = message.id
11 | name = message.name
12 | temp = message.temp
13 | humidity = message.humidity
14 |
15 | formatted_msg = f"I heard: {id} {name} {temp:.5} {humidity:.5} "
16 |
17 | # get_caller_id(): Get fully resolved name of local node
18 | rospy.loginfo(rospy.get_caller_id() + "\n" + formatted_msg)
19 |
20 |
21 | def subscriber():
22 |
23 | rospy.init_node("IoT_Subscriber_node", anonymous=True)
24 |
25 | rospy.Subscriber("iot_sensor_topic", IoTSensor, sensor_callback)
26 |
27 | # spin() simply keeps python from exiting until this node is stopped
28 | rospy.spin()
29 |
30 |
31 | if __name__ == "__main__":
32 | subscriber()
33 |
--------------------------------------------------------------------------------
/src/mhered/src/readvideo.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import cv2
4 |
5 | video_capture = cv2.VideoCapture(0)
6 |
7 | while True:
8 | ret, frame = video_capture.read()
9 | frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
10 | frame = cv2.resize(frame, (0, 0), fx=0.5, fy=0.5)
11 | cv2.rectangle(frame, (10, 10), (211, 211), (255, 0, 0), 5)
12 | cv2.imshow("Frame", frame)
13 | if cv2.waitKey(10) & 0xFF == ord("q"):
14 | break
15 |
16 | video_capture.release()
17 | cv2.destroyAllWindows()
18 |
--------------------------------------------------------------------------------
/src/mhered/src/robot_bouncy.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | **Bouncy Robot**
5 |
6 | Assignment 2 Part 1: Move-Stop-Rotate Behavior for Obstacle Avoidance
7 |
8 | Loop:
9 | Behavior 1: Move the robot in a straight line until a laser beam of [-5,+5] deg
10 | around the centre line detects an obstacle closer than 0.6 m
11 |
12 | Behavior 2: Rotate the robot until the same laser beam detects no obstacles
13 | closer than 3m
14 |
15 | The robot should move forever without hitting obstacles.
16 | Tested on Turtlebot3 in the maze and house environments.
17 |
18 | """
19 |
20 |
21 | import math
22 | import time
23 |
24 | import numpy as np
25 | import rospy
26 | from geometry_msgs.msg import Twist
27 | from nav_msgs.msg import Odometry
28 | from sensor_msgs.msg import LaserScan
29 | from tf.transformations import euler_from_quaternion
30 |
31 | # use current location from the global variables
32 | # (constantly updated by odom_callback())
33 | global x, y, yaw
34 | x = 0.0
35 | y = 0.0
36 | yaw = 0.0
37 |
38 | # use clearances from the global variables
39 | # (constantly updated by scan_callback())
40 | global fwd_clearance
41 | fwd_clearance = 0.0
42 |
43 | # get global params
44 | global LASER_RANGE, BEAM_ANGLE
45 | global ANGLE_TOL, SAFETY_DIST, MIN_CLEARANCE
46 | global WAIT, LIN_SPEED, ROT_SPEED, RATE
47 |
48 |
49 | def odom_callback(odom_message):
50 | """
51 | Constantly extract robot pose from /odom nav_msgs/Odometry
52 | """
53 |
54 | # update global variables
55 | global x, y, yaw
56 |
57 | x = odom_message.pose.pose.position.x
58 | y = odom_message.pose.pose.position.y
59 | q = odom_message.pose.pose.orientation
60 | q_as_list = [q.x, q.y, q.z, q.w]
61 | (_, _, yaw) = euler_from_quaternion(q_as_list)
62 |
63 |
64 | def scan_callback(message):
65 | """
66 | Constantly update global fwd_clearance from /scan sensor_msgs/LaserScan
67 | """
68 |
69 | global fwd_clearance
70 |
71 | # parameters
72 | global BEAM_ANGLE, LASER_RANGE
73 |
74 | ranges = np.asarray(message.ranges)
75 | angles = np.degrees(
76 | (message.angle_min + message.angle_increment * np.asarray(range(len(ranges))))
77 | )
78 |
79 | # shift angles >180 from [180 - 360) to [-180, 0)
80 | angles_shifted = np.where(angles > 180, angles - 360, angles)
81 |
82 | angles_indeces = angles_shifted.argsort()
83 | angles_sorted = angles_shifted[angles_indeces[::-1]]
84 | ranges_sorted = ranges[angles_indeces[::-1]]
85 |
86 | # remove Inf and NaN values from ydata to compute axes limits
87 | # deep copy to avoid modifying ydata
88 | clean_ranges_sorted = ranges_sorted.copy()
89 | clean_ranges_sorted[~np.isfinite(clean_ranges_sorted)] = LASER_RANGE
90 |
91 | # slice the beam at -BEAM_ANGLE +BEAM_ANGLE
92 | fwd_ranges = [
93 | r for (a, r) in zip(angles_sorted, clean_ranges_sorted) if abs(a) < BEAM_ANGLE
94 | ]
95 | fwd_clearance = min(fwd_ranges)
96 | print(f"\nCLEARANCE FWD: {fwd_clearance:8.4}\n")
97 |
98 |
99 | def move_fwd(velocity_publisher, speed):
100 | """Straight motion method"""
101 |
102 | # declare a Twist message to send velocity commands
103 | velocity_message = Twist()
104 |
105 | # get current location from the global variable before entering the loop
106 | global x, y, fwd_clearance
107 | global SAFETY_DIST, RATE
108 | # save initial coordinates
109 | x0 = x
110 | y0 = y
111 |
112 | velocity_message.linear.x = speed
113 |
114 | distance_moved = 0.0
115 | # we publish the velocity at RATE Hz (RATE times per second)
116 | loop_rate = rospy.Rate(RATE)
117 |
118 | rospy.loginfo("Straight motion")
119 | while True:
120 |
121 | velocity_publisher.publish(velocity_message)
122 | loop_rate.sleep()
123 |
124 | distance_moved = abs(math.sqrt(((x - x0) ** 2) + ((y - y0) ** 2)))
125 | print(
126 | f"** Moving fwd: {distance_moved:6.4}m Pose: {x:6.4}m, {y:6.4}m, {math.degrees(yaw):6.4}deg"
127 | )
128 | if fwd_clearance < SAFETY_DIST:
129 | rospy.loginfo("** Obstacle reached")
130 | break
131 |
132 | rospy.loginfo("** Stopping")
133 | # stop the robot when obstacle is reached
134 | velocity_message.linear.x = 0.0
135 | velocity_publisher.publish(velocity_message)
136 |
137 |
138 | def rotate_in_place(velocity_publisher, omega_degrees):
139 | """Rotation in place method"""
140 |
141 | # use clearances from the global variables
142 | # (constantly updated by scan_callback())
143 | global fwd_clearance
144 |
145 | global RATE, MIN_CLEARANCE
146 | # declare a Twist message to send velocity commands
147 | velocity_message = Twist()
148 |
149 | omega = math.radians(omega_degrees)
150 |
151 | # publish velocity message to rotate
152 | velocity_message.angular.z = omega
153 | # at RATE Hz (RATE times per second)
154 | loop_rate = rospy.Rate(RATE)
155 |
156 | # get initial timestamp
157 | t0 = rospy.Time.now().to_sec()
158 |
159 | while True:
160 |
161 | velocity_publisher.publish(velocity_message)
162 |
163 | # get initial timestamp
164 | t1 = rospy.Time.now().to_sec()
165 | curr_yaw_degrees = (t1 - t0) * omega_degrees
166 | loop_rate.sleep()
167 |
168 | print(
169 | f"** Rotating: {curr_yaw_degrees:6.4}deg Pose: ({x:6.4}m, {y:6.4}m, {math.degrees(yaw):6.4}deg)"
170 | )
171 | if fwd_clearance > MIN_CLEARANCE:
172 | rospy.loginfo("** Found clearance")
173 | break
174 |
175 | # stop the robot after the angle is reached
176 | velocity_message.angular.z = 0.0
177 | velocity_publisher.publish(velocity_message)
178 | rospy.loginfo("** Stopping rotation")
179 |
180 |
181 | def bouncy_robot(velocity_publisher):
182 | """Bouncy robot"""
183 |
184 | # use current location from the global variables
185 | # (constantly updated by odom_callback())
186 | global x, y, yaw
187 |
188 | # use clearances from the global variables
189 | # (constantly updated by scan_callback())
190 | global fwd_clearance
191 |
192 | # get global params
193 | global LASER_RANGE, BEAM_ANGLE
194 | global ANGLE_TOL, SAFETY_DIST, MIN_CLEARANCE
195 | global WAIT, LIN_SPEED, ROT_SPEED, RATE
196 |
197 | while True:
198 | # Behavior 1: move fwd until blocked by obstacle
199 | print("** BEHAVIOR 1: MOVING FORWARD\n\n")
200 | move_fwd(velocity_publisher=velocity_publisher, speed=LIN_SPEED)
201 | print("** REACHED OBSTACLE\n\n")
202 | time.sleep(WAIT)
203 |
204 | # Behavior 2: rotate until clearance found
205 | print("** BEHAVIOR 2: ROTATING\n\n")
206 | rotate_in_place(velocity_publisher=velocity_publisher, omega_degrees=ROT_SPEED)
207 | print("** FOUND DIRECTION CLEAR FROM OBSTACLES\n\n")
208 | time.sleep(WAIT)
209 |
210 |
211 | if __name__ == "__main__":
212 | try:
213 |
214 | # declare the node
215 | rospy.init_node("bouncy_node", anonymous=True)
216 |
217 | # declare velocity publisher
218 | cmd_vel_topic = "/cmd_vel"
219 | velocity_publisher = rospy.Publisher(cmd_vel_topic, Twist, queue_size=10)
220 |
221 | # declare /odom subscriber
222 | odom_topic = "/odom"
223 | odom_subscriber = rospy.Subscriber(odom_topic, Odometry, odom_callback)
224 |
225 | # declare /scan subscriber
226 | scan_topic = "/scan"
227 | scan_subscriber = rospy.Subscriber(scan_topic, LaserScan, scan_callback)
228 |
229 | time.sleep(2.0)
230 |
231 | # get ROS parameters (or default)
232 | global LASER_RANGE, BEAM_ANGLE
233 | global ANGLE_TOL, SAFETY_DIST, MIN_CLEARANCE
234 | global WAIT, LIN_SPEED, ROT_SPEED, RATE
235 |
236 | LASER_RANGE = rospy.get_param("LASER_RANGE", 5.0) # m
237 | BEAM_ANGLE = rospy.get_param("BEAM_ANGLE", 5.0) # degrees
238 |
239 | ANGLE_TOL = rospy.get_param("ANGLE_TOL", 1.0) # degrees
240 | SAFETY_DIST = rospy.get_param("SAFETY_DIST", 0.6) # m
241 | MIN_CLEARANCE = rospy.get_param("MIN_CLEARANCE", 3.0) # m
242 |
243 | WAIT = rospy.get_param("WAIT", 0.5) # s
244 | LIN_SPEED = rospy.get_param("LIN_SPEED", 0.4) # m/s
245 | ROT_SPEED = rospy.get_param("ROT_SPEED", 15.0) # degrees/s
246 | RATE = rospy.get_param("RATE", 10.0) # Hz
247 |
248 | # launch the bouncy robot app
249 | bouncy_robot(velocity_publisher)
250 |
251 | rospy.spin()
252 |
253 | except rospy.ROSInterruptException:
254 | rospy.loginfo("Node terminated")
255 |
--------------------------------------------------------------------------------
/src/mhered/src/robot_instructor.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import time
4 |
5 | import rospy
6 | from geometry_msgs.msg import Twist
7 | from sensor_msgs.msg import LaserScan
8 |
9 | vels = Twist()
10 |
11 |
12 | def scan_callback(a):
13 | move(a)
14 |
15 |
16 | def move(s):
17 | loop_rate = rospy.Rate(10)
18 | vels.linear.x = 0.4
19 | vels.angular.z = 0
20 |
21 | # stop moving if obstacles detected closer than 0.6 in ranges -5:5
22 | if (
23 | s.ranges[0] < 0.6
24 | or s.ranges[1] < 0.6
25 | or s.ranges[2] < 0.6
26 | or s.ranges[3] < 0.6
27 | or s.ranges[4] < 0.6
28 | or s.ranges[5] < 0.6
29 | or s.ranges[-5] < 0.6
30 | or s.ranges[-4] < 0.6
31 | or s.ranges[-3] < 0.6
32 | or s.ranges[-2] < 0.6
33 | or s.ranges[-1] < 0.6
34 | ):
35 | vels.linear.x = 0
36 | # publish vels to /cmd_vel
37 | pub.publish(vels)
38 |
39 | # if stopped, rotate
40 | if vels.linear.x == 0:
41 | rotate(s)
42 | # publish vels to /cmd_vel
43 | pub.publish(vels)
44 |
45 | # publish vels to /cmd_vel
46 | loop_rate.sleep()
47 | pub.publish(vels)
48 |
49 |
50 | def rotate(b):
51 | loop_rate = rospy.Rate(10)
52 | # stop rotating if clearance >3 ahead
53 | if (
54 | b.ranges[0] < 3
55 | or b.ranges[1] < 3
56 | or b.ranges[2] < 3
57 | or b.ranges[3] < 3
58 | or b.ranges[4] < 3
59 | or b.ranges[5] < 3
60 | ):
61 | vels.angular.z = 1
62 | # pub.publish(vels)
63 | elif (
64 | b.ranges[-5] < 3
65 | or b.ranges[-4] < 3
66 | or b.ranges[-3] < 3
67 | or b.ranges[-2] < 3
68 | or b.ranges[-1] < 3
69 | ):
70 | vels.angular.z = -1
71 | # pub.publish(vels)
72 | else:
73 | pub.publish(vels)
74 |
75 | pub.publish(vels)
76 | loop_rate.sleep()
77 |
78 |
79 | if __name__ == "__main__":
80 | # launch node
81 | rospy.init_node("scan_node12", anonymous=True)
82 | # declare publisher
83 | pub = rospy.Publisher("/cmd_vel", Twist, queue_size=100)
84 | # declare subscriber to /scan
85 | rospy.Subscriber("scan", LaserScan, scan_callback)
86 | time.sleep(2)
87 | rospy.spin()
88 |
--------------------------------------------------------------------------------
/src/mhered/src/robot_lib.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Library of robot reusable behaviours:
4 | * rotate(vel_publisher, omega_degrees, angle_degrees, is_clockwise, rate=50)
5 | * ...
6 |
7 | """
8 | import math
9 |
10 | import rospy
11 | from geometry_msgs.msg import Twist
12 |
13 |
14 | def rotate(vel_publisher, omega_degrees, angle_degrees, is_clockwise, rate=50):
15 | """Rotation in place method"""
16 |
17 | # declare a Twist message to send velocity commands
18 | velocity_message = Twist()
19 |
20 | omega = math.radians(omega_degrees)
21 |
22 | if is_clockwise:
23 | velocity_message.angular.z = -abs(omega)
24 | else:
25 | velocity_message.angular.z = abs(omega)
26 |
27 | # publish the velocity at RATE Hz (RATE times per second)
28 | loop_rate = rospy.Rate(rate)
29 |
30 | rospy.loginfo("Rotation in place")
31 |
32 | # get initial timestamp
33 | t_0 = rospy.Time.now().to_sec()
34 |
35 | while True:
36 |
37 | vel_publisher.publish(velocity_message)
38 |
39 | # get initial timestamp
40 | t_1 = rospy.Time.now().to_sec()
41 | curr_yaw_degrees = (t_1 - t_0) * omega_degrees
42 | loop_rate.sleep()
43 |
44 | rospy.loginfo(
45 | f"Angle rotated: {curr_yaw_degrees:8.4}deg "
46 | # + f"Pose: ({x:8.4}m, {y:8.4}m, {math.degrees(yaw):8.4}deg)"
47 | )
48 |
49 | if curr_yaw_degrees >= angle_degrees:
50 | rospy.loginfo("Angle reached")
51 | break
52 |
53 | # stop the robot after the angle is reached
54 | velocity_message.angular.z = 0.0
55 | vel_publisher.publish(velocity_message)
56 |
--------------------------------------------------------------------------------
/src/mhered/src/robot_marauder.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """
4 | **Marauder Robot**
5 |
6 | Assignment 2 Part 2: PID controller
7 |
8 | Proportional controller of angular and linear speed such as the robot moves
9 | smoothly without hitting obstacles.
10 |
11 | * Make linear velocity proportional to the fwd distance to obstacles - similar
12 | to Go-to-Goal Behavior
13 |
14 | * Make angular velocity proportional to the distance to obstacles on the left
15 | and right sides: rotate smoothly to the right if obstacles on the left are
16 | much closer than on the right (or to the left in the opposite case)
17 |
18 | The robot should move forever without hitting obstacles.
19 | Tested on Turtlebot3 in the maze and house environments.
20 |
21 | """
22 |
23 |
24 | import time
25 |
26 | import numpy as np
27 | import rospy
28 | from geometry_msgs.msg import Twist
29 | from nav_msgs.msg import Odometry
30 | from sensor_msgs.msg import LaserScan
31 | from tf.transformations import euler_from_quaternion
32 |
33 | # use current location from the global variables
34 | # (constantly updated by odom_callback())
35 | global x, y, yaw, fwd_clearance
36 | x = 0.0
37 | y = 0.0
38 | yaw = 0.0
39 | fwd_clearance = 0.0
40 | left_clearance = 0.0
41 | right_clearance = 0.0
42 |
43 | # get global params
44 | global LASER_RANGE, BEAM_ANGLE
45 | global ANGLE_TOL, SAFETY_DIST, MIN_CLEARANCE
46 | global WAIT, LIN_SPEED, ROT_SPEED, RATE
47 |
48 |
49 | def odom_callback(odom_message):
50 | """
51 | Constantly extract robot pose from /odom nav_msgs/Odometry
52 | """
53 |
54 | # update global variables
55 | global x, y, yaw
56 |
57 | x = odom_message.pose.pose.position.x
58 | y = odom_message.pose.pose.position.y
59 | q = odom_message.pose.pose.orientation
60 | q_as_list = [q.x, q.y, q.z, q.w]
61 | (_, _, yaw) = euler_from_quaternion(q_as_list)
62 |
63 |
64 | def scan_callback(message):
65 | """
66 | Constantly update global fwd_clearance from /scan sensor_msgs/LaserScan
67 | """
68 |
69 | global fwd_clearance, right_clearance, left_clearance
70 |
71 | # parameters
72 | global BEAM_ANGLE, LASER_RANGE
73 |
74 | ranges = np.asarray(message.ranges)
75 | angles = np.degrees(
76 | (message.angle_min + message.angle_increment * np.asarray(range(len(ranges))))
77 | )
78 |
79 | # shift angles >180 from [180 - 360) to [-180, 0)
80 | angles_shifted = np.where(angles > 180, angles - 360, angles)
81 |
82 | angles_indeces = angles_shifted.argsort()
83 | angles_sorted = angles_shifted[angles_indeces[::-1]]
84 | ranges_sorted = ranges[angles_indeces[::-1]]
85 |
86 | # remove Inf and NaN values from ydata to compute axes limits
87 | # deep copy to avoid modifying ydata
88 | clean_ranges_sorted = ranges_sorted.copy()
89 | clean_ranges_sorted[~np.isfinite(clean_ranges_sorted)] = LASER_RANGE
90 |
91 | # slice the beam at -BEAM_ANGLE +BEAM_ANGLE
92 | fwd_ranges = [
93 | r for (a, r) in zip(angles_sorted, clean_ranges_sorted) if abs(a) < BEAM_ANGLE
94 | ]
95 | fwd_clearance = min(fwd_ranges)
96 |
97 | right_ranges = [
98 | r for (a, r) in zip(angles_sorted, clean_ranges_sorted) if (a > 30 and a < 100)
99 | ]
100 | right_clearance = min(right_ranges)
101 |
102 | left_ranges = [
103 | r
104 | for (a, r) in zip(angles_sorted, clean_ranges_sorted)
105 | if (a < -30 and a > -100)
106 | ]
107 | left_clearance = min(left_ranges)
108 |
109 | print(
110 | f"\nCLEARANCE FWD: {fwd_clearance:8.4}"
111 | f" LEFT: {left_clearance:8.4}"
112 | f" RIGHT: {right_clearance:8.4}\n"
113 | )
114 |
115 |
116 | def marauder_robot(velocity_publisher):
117 | """Marauder robot based on Go to goal method"""
118 |
119 | # use current location from the global variables
120 | # (constantly updated by odom_callback())
121 | global x, y, yaw
122 | # use current clearances from the global variables
123 | # (constantly updated by scan_callback())
124 | global fwd_clearance, left_clearance, right_clearance
125 |
126 | # get global params
127 | global LASER_RANGE, BEAM_ANGLE
128 | global WAIT, RATE
129 | global THRESHOLD, K_DISTANCE, K_ANGLE, SAFETY_DIST
130 |
131 | # declare a Twist message to send velocity commands
132 | velocity_message = Twist()
133 |
134 | # publish the velocity at RATE Hz (RATE times per second)
135 | loop_rate = rospy.Rate(RATE)
136 |
137 | while True:
138 | vel_lin = vel_ang = 0
139 | if fwd_clearance > SAFETY_DIST:
140 | vel_lin = K_DISTANCE * (fwd_clearance - SAFETY_DIST)
141 |
142 | if abs(right_clearance - left_clearance) > THRESHOLD:
143 | vel_ang = K_ANGLE * (right_clearance - left_clearance)
144 |
145 | velocity_message.linear.x = vel_lin
146 | velocity_message.angular.z = vel_ang
147 |
148 | # THRESHOLD here to avoid fluctuations?
149 | velocity_publisher.publish(velocity_message)
150 | loop_rate.sleep()
151 |
152 |
153 | if __name__ == "__main__":
154 | try:
155 |
156 | # declare the node
157 | rospy.init_node("marauder_node", anonymous=True)
158 |
159 | # declare velocity publisher
160 | cmd_vel_topic = "/cmd_vel"
161 | velocity_publisher = rospy.Publisher(cmd_vel_topic, Twist, queue_size=10)
162 |
163 | # declare /odom subscriber
164 | odom_topic = "/odom"
165 | odom_subscriber = rospy.Subscriber(odom_topic, Odometry, odom_callback)
166 |
167 | # declare /scan subscriber
168 | scan_topic = "/scan"
169 | scan_subscriber = rospy.Subscriber(scan_topic, LaserScan, scan_callback)
170 |
171 | time.sleep(2.0)
172 |
173 | # get ROS parameters (or default)
174 | global LASER_RANGE, BEAM_ANGLE
175 | global WAIT, RATE
176 | global THRESHOLD, K_DISTANCE, K_ANGLE, SAFETY_DIST
177 |
178 | LASER_RANGE = rospy.get_param("LASER_RANGE", 5.0) # m
179 | BEAM_ANGLE = rospy.get_param("BEAM_ANGLE", 5.0) # degrees
180 |
181 | WAIT = rospy.get_param("WAIT", 0.5) # s
182 | RATE = rospy.get_param("RATE", 10.0) # Hz
183 |
184 | THRESHOLD = rospy.get_param("THRESHOLD", 0.15) # m/s
185 | K_DISTANCE = rospy.get_param("K_DISTANCE", 0.25) # 1/s
186 | K_ANGLE = rospy.get_param("K_ANGLE", 10.0) # 1/s
187 | SAFETY_DIST = rospy.get_param("SAFETY_DIST", 0.6) # m
188 |
189 | # launch the marauder robot app
190 | marauder_robot(velocity_publisher)
191 |
192 | rospy.spin()
193 |
194 | except rospy.ROSInterruptException:
195 | rospy.loginfo("Node terminated")
196 |
--------------------------------------------------------------------------------
/src/mhered/src/robot_student1.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import math
4 | import time
5 | from math import pi
6 |
7 | import numpy as np
8 | import rospy
9 | from geometry_msgs.msg import Twist
10 | from nav_msgs.msg import Odometry
11 | from sensor_msgs.msg import LaserScan
12 | from std_msgs.msg import String
13 |
14 | # topics
15 | laserDataTopic = "/scan"
16 | velCmdTopic = "/cmd_vel"
17 | odomTopic = "/odom"
18 |
19 | # simulatipon variables
20 | half_front_vision_range = 15
21 | LIN_VEL = 0.2
22 | LIN_VEL_accel = 300
23 | ANG_VEL = pi / 12
24 | min_dist = 0.6
25 | delimiting_sensor_range = 3
26 | kpLinear = 0.15
27 | kpAngular = 0.1
28 |
29 |
30 | class cTurtle:
31 | def __init__(self) -> None:
32 | self.front_vision = []
33 | self.Vx = 0
34 | self.center_ray_index = -1
35 | self.process_time = 0.000043
36 |
37 | # configuration of velocity messages
38 | # moving message
39 | self.vel_msg = Twist()
40 | self.vel_msg.linear.x = LIN_VEL
41 | self.vel_msg.linear.y = 0
42 | self.vel_msg.linear.z = 0
43 | self.vel_msg.angular.x = 0
44 | self.vel_msg.angular.y = 0
45 | self.vel_msg.angular.z = 0
46 |
47 | # stopping message
48 | self.stop_msg = Twist()
49 | self.stop_msg.linear.x = 0
50 | self.stop_msg.linear.y = 0
51 | self.stop_msg.linear.z = 0
52 | self.stop_msg.angular.x = 0
53 | self.stop_msg.angular.y = 0
54 | self.stop_msg.angular.z = 0
55 |
56 | # left rotation message
57 | self.rot_left_msg = Twist()
58 | self.rot_left_msg.linear.x = 0
59 | self.rot_left_msg.linear.y = 0
60 | self.rot_left_msg.linear.z = 0
61 | self.rot_left_msg.angular.x = 0
62 | self.rot_left_msg.angular.y = 0
63 | self.rot_left_msg.angular.z = ANG_VEL
64 |
65 | # right rotation message
66 | self.rot_right_msg = Twist()
67 | self.rot_right_msg.linear.x = 0
68 | self.rot_right_msg.linear.y = 0
69 | self.rot_right_msg.linear.z = 0
70 | self.rot_right_msg.angular.x = 0
71 | self.rot_right_msg.angular.y = 0
72 | self.rot_right_msg.angular.z = -ANG_VEL
73 |
74 | rospy.init_node("somethingnotrequired", anonymous=True)
75 | self.rate = rospy.Rate(20)
76 | self.sub = rospy.Subscriber(laserDataTopic, LaserScan, self.scanCB)
77 | self.pub = rospy.Publisher(velCmdTopic, Twist, queue_size=0)
78 | self.od_sub = rospy.Subscriber(odomTopic, Odometry, self.odomCB)
79 |
80 | def odomCB(self, msg):
81 | x1 = msg.pose.pose.position.x
82 | y1 = msg.pose.pose.position.y
83 | # a1=self.a
84 | t0 = time.time()
85 | self.rate.sleep()
86 | x2 = msg.pose.pose.position.x
87 | y2 = msg.pose.pose.position.y
88 | # a2=self.a
89 | t1 = time.time()
90 | self.Vx == abs(math.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)) / (t1 - t0)
91 | # self.rate.sleep()
92 |
93 | def scanCB(self, message):
94 | self.front_vision = (
95 | message.ranges[-half_front_vision_range:]
96 | + message.ranges[:half_front_vision_range]
97 | )
98 |
99 | def move_straight(self):
100 | print(
101 | "moving straight\t[%.2f,\t%.2f,\t%.2f]"
102 | % (
103 | self.front_vision[0],
104 | self.front_vision[self.center_ray_index],
105 | self.front_vision[-1],
106 | )
107 | )
108 | if np.isinf(self.front_vision[self.center_ray_index]):
109 | self.vel_msg.linear.x = math.sqrt(3.5 - min_dist) * kpLinear + 0.05
110 | else:
111 | self.vel_msg.linear.x = (
112 | math.sqrt(
113 | np.min(
114 | [
115 | self.front_vision[-1],
116 | self.front_vision[self.center_ray_index],
117 | self.front_vision[0],
118 | ]
119 | )
120 | - min_dist
121 | )
122 | * kpLinear
123 | + 0.06
124 | )
125 | self.pub.publish(self.vel_msg)
126 |
127 | def stop_moving(self):
128 | print("stopping")
129 | self.pub.publish(self.stop_msg)
130 |
131 | def turn(self):
132 | # print(self.front_vision[0], self.front_vision[-1])
133 | if self.front_vision[-1] < self.front_vision[0]:
134 | while self.front_vision[-1] < delimiting_sensor_range:
135 | self.rot_right_msg.angular.z = -(
136 | (delimiting_sensor_range - self.front_vision[-1]) * kpAngular + 0.05
137 | )
138 | print(
139 | "turning right\t[%.2f,\t%.2f,\t%.2f]"
140 | % (
141 | self.front_vision[0],
142 | self.front_vision[self.center_ray_index],
143 | self.front_vision[-1],
144 | )
145 | )
146 | self.pub.publish(self.rot_right_msg)
147 | self.rate.sleep()
148 | elif self.front_vision[-1] > self.front_vision[0]:
149 | while self.front_vision[0] < delimiting_sensor_range:
150 | self.rot_right_msg.angular.z = (
151 | delimiting_sensor_range - self.front_vision[0]
152 | ) * kpAngular + 0.05
153 | print(
154 | "turning left\t[%.2f,\t%.2f,\t%.2f]"
155 | % (
156 | self.front_vision[0],
157 | self.front_vision[self.center_ray_index],
158 | self.front_vision[-1],
159 | )
160 | )
161 | self.pub.publish(self.rot_left_msg)
162 | self.rate.sleep()
163 |
164 | def core(self):
165 | self.start = time.process_time()
166 | if len(self.front_vision):
167 | self.center_ray_index = int(len(self.front_vision) / 2)
168 | if (
169 | self.front_vision[self.center_ray_index] <= min_dist
170 | or self.front_vision[-1] <= min_dist
171 | or self.front_vision[0] <= min_dist
172 | ):
173 | while self.Vx > 0.02:
174 | self.stop_moving()
175 | self.rate.sleep()
176 | self.turn()
177 | else:
178 | self.move_straight()
179 | else:
180 | self.stop_moving()
181 | print("NULL")
182 | self.rate.sleep()
183 |
184 |
185 | if __name__ == "__main__":
186 | help_ = cTurtle()
187 | try:
188 | while 1:
189 | help_.core()
190 | except rospy.ROSInterruptException:
191 | for x in range(15):
192 | help_.stop_moving()
193 | print("\n\nNode Terminated\n")
194 | rospy.spin()
195 |
--------------------------------------------------------------------------------
/src/mhered/src/robot_student2.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import math
4 |
5 | import rospy
6 | from geometry_msgs.msg import Twist
7 | from sensor_msgs.msg import LaserScan
8 |
9 | distance = 99
10 |
11 |
12 | def move(velocity_publisher):
13 | DIST = 0.6
14 | global distance
15 |
16 | velocity_message = Twist()
17 |
18 | loop_rate = rospy.Rate(10)
19 | while distance > DIST:
20 |
21 | linear_speed = 0.3
22 | velocity_message.linear.x = linear_speed
23 | velocity_publisher.publish(velocity_message)
24 |
25 | velocity_message.linear.x = 0
26 | velocity_publisher.publish(velocity_message)
27 |
28 |
29 | def rotate(velocity_publisher):
30 | MAX_DIST = 2
31 | global distance
32 |
33 | velocity_message = Twist()
34 |
35 | loop_rate = rospy.Rate(10)
36 | while distance < MAX_DIST:
37 |
38 | linear_speed = 0
39 | angular_speed = 1.5
40 | velocity_message.linear.x = linear_speed
41 | velocity_message.angular.z = angular_speed
42 | velocity_publisher.publish(velocity_message)
43 |
44 | velocity_message.angular.x = 0
45 | velocity_publisher.publish(velocity_message)
46 |
47 |
48 | def autonomous_robot(velocity_publisher):
49 | global distance
50 |
51 | velocity_message = Twist()
52 |
53 | loop_rate = rospy.Rate(10)
54 | while True:
55 | K_linear = 0.3
56 | K_angular = 0.5
57 |
58 | linear_speed = distance * K_linear
59 | angular_speed = 1 / distance * K_angular
60 |
61 | velocity_message.linear.x = linear_speed
62 | velocity_message.angular.z = angular_speed
63 | velocity_publisher.publish(velocity_message)
64 |
65 |
66 | def scan_callback(scan_data):
67 | global distance
68 | MIN_ANGLE = -10
69 | MAX_ANGLE = 10
70 | # Find minimum range
71 |
72 | distance = get_distance(scan_data.ranges, MIN_ANGLE, MAX_ANGLE)
73 | print("minumum distance: ", distance)
74 |
75 |
76 | def get_distance(ranges, min_angle, max_angle):
77 | if min_angle < 0:
78 | range = ranges[0:max_angle] + ranges[min_angle:0]
79 | else:
80 | range = ranges[min_angle:max_angle]
81 |
82 | ranges = [x for x in range if not math.isnan(x)]
83 |
84 | return min(ranges)
85 |
86 |
87 | if __name__ == "__main__":
88 |
89 | # init new a node and give it a name
90 | rospy.init_node("scan_node", anonymous=True)
91 | # subscribe to the topic /scan.
92 | rospy.Subscriber("scan", LaserScan, scan_callback)
93 | velocity_publisher = rospy.Publisher("cmd_vel", Twist, queue_size=10)
94 |
95 | # time.sleep(2)
96 | autonomous_robot(velocity_publisher)
97 |
98 | rospy.spin()
99 |
--------------------------------------------------------------------------------
/src/mhered/src/sandbox.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import math
4 |
5 |
6 | def phi(L1, L2, alpha):
7 |
8 | L3 = math.sqrt(L1**2 + L2**2 - 2 * L1 * L2 * math.cos(alpha))
9 | cos_phi = L2 * math.sin(alpha) / L3
10 |
11 | # round to avoid numerical errors
12 | phi = math.degrees(math.acos(round(cos_phi, 8)))
13 |
14 | # by convention when wall on the left phi > 0 inwards, i.e. ccwise
15 | if L2 > L1 / math.cos(alpha):
16 | return phi
17 | else:
18 | return -phi
19 |
20 |
21 | if __name__ == "__main__":
22 |
23 | test_cases = [
24 | {
25 | "r": 1.0,
26 | "r_alpha": 0.5 * math.sqrt(2),
27 | "alpha": math.radians(45),
28 | "phi_result": -45.0,
29 | },
30 | {
31 | "r": 0.6,
32 | "r_alpha": 0.6 / math.cos(math.radians(30.0)),
33 | "alpha": math.radians(30.0),
34 | "phi_result": 0.0,
35 | },
36 | {
37 | "r": 2.0,
38 | "r_alpha": 2.0 * math.cos(math.radians(30.0)),
39 | "alpha": math.radians(30.0),
40 | "phi_result": -30.0,
41 | },
42 | ]
43 |
44 | for item in test_cases:
45 | phi_i = phi(item["r"], item["r_alpha"], item["alpha"])
46 | print(f"phi={repr(phi_i)}")
47 | should_be_msg = f"should be {item['phi_result']}"
48 | assert round(phi_i, 5) == item["phi_result"], should_be_msg
49 |
50 |
51 | # at end of wall? -> rotate 78deg
52 | print(phi(0.6, 5, math.radians(10)))
53 |
54 | # keep straight
55 | print(phi(0.6, 0.60926, math.radians(10)))
56 |
57 | # 5deg inwards
58 | print(phi(0.6, 0.6188, math.radians(10)))
59 |
60 | # 5 deg outwards
61 | print(phi(0.6, 0.6, math.radians(10)))
62 |
63 | # 10deg inwards
64 | print(phi(0.6, 0.629, math.radians(10)))
65 |
66 | # 10 deg outwards
67 | print(phi(0.6, 0.5906, math.radians(10)))
68 |
--------------------------------------------------------------------------------
/src/mhered/src/scan_subscriber.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import datetime
4 |
5 | import matplotlib.pyplot as plt
6 | import numpy as np
7 | import rospy
8 | from sensor_msgs.msg import LaserScan
9 |
10 | """
11 | definition of sensor_msgs.msg/LaserScan:
12 |
13 | std_msgs/Header header
14 | uint32 seq
15 | time stamp
16 | string frame_id
17 | float32 angle_min
18 | float32 angle_max
19 | float32 angle_increment
20 | float32 time_increment
21 | float32 scan_time
22 | float32 range_min
23 | float32 range_max
24 | float32[] ranges
25 | float32[] intensities
26 | """
27 |
28 |
29 | def scan_callback(message):
30 | timestamp = message.header.stamp
31 | secs = timestamp.secs
32 | nsecs = timestamp.nsecs
33 | time_in_secs = secs + (nsecs / 1000000000)
34 | formatted_time = datetime.datetime.fromtimestamp(time_in_secs).isoformat()
35 | formatted_msg = f"Scan received on {formatted_time}"
36 | # get_caller_id(): Get fully resolved name of local node
37 | # rospy.loginfo(rospy.get_caller_id() + "\n" + formatted_msg)
38 |
39 | global axes, line
40 |
41 | ranges = np.asarray(message.ranges)
42 | angles = (
43 | 180
44 | / np.pi
45 | * (message.angle_min + message.angle_increment * np.asarray(range(len(ranges))))
46 | )
47 |
48 | # this to shift angles >180 from [180 - 360) to [-180, 0)
49 | # angles = np.array(angles)
50 |
51 | angles_shifted = np.where(angles > 180, angles - 360, angles)
52 |
53 | angles_indeces = angles_shifted.argsort()
54 | angles_sorted = angles_shifted[angles_indeces[::-1]]
55 | ranges_sorted = ranges[angles_indeces[::-1]]
56 |
57 | xdata = angles_sorted
58 | ydata = ranges_sorted
59 |
60 | # update plot
61 | line.set_data(xdata, ydata)
62 |
63 | # adjust axes limits to dataset + margin (in percentage)
64 | margin = 0.05
65 | axes.set_xlim(min(xdata) * (1 - margin), max(xdata) * (1 + margin))
66 | # remove Inf and NaN values from ydata to compute axes limits
67 | # deep copy to avoid modifying ydata
68 | clean_ydata = ydata.copy()
69 | clean_ydata[~np.isfinite(clean_ydata)] = message.range_max
70 | axes.set_ylim(min(clean_ydata) * (1 - margin), max(clean_ydata) * (1 + margin))
71 |
72 |
73 | def scan_subscriber():
74 | # init new node
75 | rospy.init_node("scan_subscriber_node", anonymous=True)
76 | # subscribe to topic /scan
77 | rospy.Subscriber("scan", LaserScan, scan_callback)
78 |
79 | # spin() keeps python from exiting until this node is stopped
80 | # rospy.spin()
81 |
82 | # replaced by this as seen here:
83 | # https://stackoverflow.com/questions/35145555/python-real-time-plotting-ros-data
84 | plt.show(block=True)
85 |
86 |
87 | if __name__ == "__main__":
88 |
89 | # turn on interactive mode
90 | plt.ion()
91 | # create a figure and axes
92 | global axes, line
93 | fig, axes = plt.subplots()
94 | (line,) = axes.plot([], "r-")
95 | plt.show()
96 |
97 | scan_subscriber()
98 |
--------------------------------------------------------------------------------
/src/mhered/src/testcv2.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import cv2
4 | import numpy as np
5 |
6 | image_name = "shapes.png"
7 | title = image_name + " Image"
8 | image_path = "/home/mhered/catkin_ws/src/mhered/src/images/"
9 |
10 | # read image
11 | img = cv2.imread(image_path + image_name)
12 |
13 | # create a window
14 | cv2.namedWindow(title, cv2.WINDOW_NORMAL)
15 |
16 | # show image
17 | cv2.imshow(title, img)
18 |
19 | # wait for key press then close
20 | cv2.waitKey(0)
21 | cv2.destroyAllWindows()
22 |
23 | # create a copy in copies folder (folder must exist)
24 | copy_name = "copy-" + image_name
25 | copy_full_path = image_path + "copies/" + copy_name
26 |
27 | # print("Writing image ", copy_full_path)
28 | cv2.imwrite(copy_full_path, img)
29 |
30 | img_copy = cv2.imread(copy_full_path)
31 |
32 | title_copy = copy_name + " Image"
33 |
34 | # create a window
35 | cv2.namedWindow(title_copy, cv2.WINDOW_NORMAL)
36 |
37 | # show image
38 | cv2.imshow(title_copy, img)
39 |
40 | # wait for key press
41 | cv2.waitKey(0)
42 |
43 | # read image as BGR
44 | img_color = cv2.imread(image_path + image_name, cv2.IMREAD_COLOR)
45 | blue, green, red = cv2.split(img_color)
46 | bgr_channels_img = np.concatenate((blue, green, red), axis=1)
47 | cv2.imshow("BGR channels", bgr_channels_img)
48 |
49 | # convert to HSV
50 |
51 | img_hsv = cv2.cvtColor(img_color, cv2.COLOR_BGR2HSV)
52 | h, s, v = cv2.split(img_hsv)
53 | hsv_channels_img = np.concatenate((h, s, v), axis=1)
54 | cv2.imshow("HSV channels", hsv_channels_img)
55 |
56 | # wait for key press then close
57 | cv2.waitKey(0)
58 | cv2.destroyAllWindows()
59 |
--------------------------------------------------------------------------------
/src/mhered/src/tf_broadcaster.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python3
2 |
3 | import rospy
4 | import tf
5 | from turtlesim.msg import Pose
6 |
7 |
8 | def pose_callback(pose_msg, turtlename):
9 | """ """
10 | # create a transform broadcaster
11 | transform_broadcaster = tf.TransformBroadcaster()
12 |
13 | # obtain orientation quaternion from 2D Pose msg in degrees
14 | rotation_quaternion = tf.transformations.quaternion_from_euler(0, 0, pose_msg.theta)
15 |
16 | # assemble translation vector from 2D Pose msg
17 | translation_vector = [pose_msg.x, pose_msg.y, 0]
18 |
19 | # obtain time stamp
20 | time_stamp = rospy.Time.now()
21 |
22 | # publish transform between turtle frame and world
23 | # args: (translation, quaternion, time stamp, child frame, parent frame)
24 | transform_broadcaster.sendTransform(
25 | translation_vector,
26 | rotation_quaternion,
27 | time_stamp,
28 | turtlename + "_frame",
29 | "world",
30 | )
31 |
32 |
33 | if __name__ == "__main__":
34 |
35 | # init code
36 | rospy.init_node("turtle_tf_broadcaster")
37 |
38 | # get param from launch file
39 | turtlename = rospy.get_param("~turtle")
40 |
41 | # subscribe to the Pose topic
42 | rospy.Subscriber("/%s/pose" % turtlename, Pose, pose_callback, turtlename)
43 | rospy.spin()
44 |
--------------------------------------------------------------------------------
/src/mhered/src/tf_listener.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import math
4 |
5 | import geometry_msgs.msg
6 | import rospy
7 | import tf
8 | import turtlesim.msg
9 | import turtlesim.srv
10 |
11 | if __name__ == "__main__":
12 |
13 | # init node
14 | rospy.init_node("turtle_tf_listener")
15 |
16 | # create a new transform listener
17 | transform_listener = tf.TransformListener()
18 |
19 | # spawn the follower turtlesim
20 | rospy.wait_for_service("spawn")
21 | spawner = rospy.ServiceProxy("spawn", turtlesim.srv.Spawn)
22 | spawner(4, 2, 0, "turtle_follower")
23 |
24 | turtle_follower_vel = rospy.Publisher(
25 | "turtle_follower/cmd_vel", geometry_msgs.msg.Twist, queue_size=1
26 | )
27 |
28 | rate = rospy.Rate(10.0)
29 |
30 | while not rospy.is_shutdown():
31 | try:
32 | (translation, rotation) = transform_listener.lookupTransform(
33 | "/turtle_follower_frame", "/turtle1_frame", rospy.Time(0)
34 | )
35 | except (
36 | tf.LookupException,
37 | tf.ConnectivityException,
38 | tf.ExtrapolationException,
39 | ):
40 | continue
41 |
42 | # relative coordinates of follower in turtle1_frame
43 | x = translation[0]
44 | y = translation[1]
45 |
46 | angular_vel = 4 * math.atan2(y, x)
47 | linear_vel = 0.5 * math.sqrt(x * x + y * y)
48 |
49 | cmd = geometry_msgs.msg.Twist()
50 | cmd.linear.x = linear_vel
51 | cmd.angular.z = angular_vel
52 |
53 | turtle_follower_vel.publish(cmd)
54 |
55 | rate.sleep()
56 |
--------------------------------------------------------------------------------
/src/mhered/src/turtlebot2.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import math
4 | import time
5 |
6 | import rospy
7 | from geometry_msgs.msg import Twist
8 | from std_srvs.srv import Empty
9 | from turtlesim.msg import Pose
10 |
11 | # from clean import move, rotate, pose_callback
12 |
13 | x = 0.0
14 | y = 0.0
15 | yaw = 0.0
16 |
17 |
18 | def pose_callback(pose_message):
19 | """Pose callback method"""
20 | global x, y, yaw
21 | x = pose_message.x
22 | y = pose_message.y
23 | yaw = pose_message.theta
24 |
25 |
26 | def move(velocity_publisher, speed, distance, is_forward):
27 | """Straight motion method"""
28 |
29 | # declare a Twist message to send velocity commands
30 | velocity_message = Twist()
31 |
32 | # get current location from the global variable before entering the loop
33 | global x, y
34 | # save initial coordinates
35 | x0 = x
36 | y0 = y
37 |
38 | if is_forward:
39 | velocity_message.linear.x = abs(speed)
40 | else:
41 | velocity_message.linear.x = -abs(speed)
42 |
43 | distance_moved = 0.0
44 | loop_rate = rospy.Rate(50) # we publish the velocity at 10 Hz (10 times per second)
45 |
46 | print(velocity_message)
47 | rospy.loginfo("Straight motion")
48 |
49 | while True:
50 |
51 | velocity_publisher.publish(velocity_message)
52 | print(velocity_message)
53 | loop_rate.sleep()
54 |
55 | distance_moved = abs(math.sqrt((x - x0) ** 2 + (y - y0) ** 2))
56 | print(
57 | f"Distance moved: {distance_moved:10.4} Pose: ({x:8.4}, {y:8.4}, {yaw:8.4})"
58 | )
59 | if distance_moved > distance:
60 | rospy.loginfo("Distance reached")
61 | break
62 |
63 | # stop the robot after the distance is reached
64 | velocity_message.linear.x = 0.0
65 | velocity_publisher.publish(velocity_message)
66 |
67 |
68 | def rotate(velocity_publisher, omega_degrees, angle_degrees, is_clockwise):
69 | """Rotation in place method"""
70 |
71 | # declare a Twist message to send velocity commands
72 | velocity_message = Twist()
73 |
74 | omega = math.radians(omega_degrees)
75 |
76 | if is_clockwise:
77 | velocity_message.angular.z = -abs(omega)
78 | else:
79 | velocity_message.angular.z = abs(omega)
80 |
81 | loop_rate = rospy.Rate(50) # we publish the velocity at 10 Hz (10 times per second)
82 |
83 | rospy.loginfo("Rotation in place")
84 |
85 | # get initial timestamp
86 | t0 = rospy.Time.now().to_sec()
87 |
88 | while True:
89 |
90 | velocity_publisher.publish(velocity_message)
91 |
92 | # get initial timestamp
93 | t1 = rospy.Time.now().to_sec()
94 | curr_yaw_degrees = (t1 - t0) * omega_degrees
95 | loop_rate.sleep()
96 |
97 | print(
98 | f"Angle rotated: {curr_yaw_degrees:10.4} Pose: ({x:8.4}, {y:8.4}, {yaw:8.4})"
99 | )
100 | if not (curr_yaw_degrees < angle_degrees):
101 | rospy.loginfo("Angle reached")
102 | break
103 |
104 | # stop the robot after the angle is reached
105 | velocity_message.angular.z = 0.0
106 | velocity_publisher.publish(velocity_message)
107 |
108 |
109 | def set_yaw(velocity_publisher, orientation_degrees):
110 | """Set absolute orientation method"""
111 |
112 | # declare a Twist message to send velocity commands
113 | velocity_message = Twist()
114 |
115 | # get current location from the global variable before entering the loop
116 | global yaw
117 |
118 | yaw_degrees = math.degrees(yaw)
119 |
120 | # subtract angles, wrapping result to [-180, 180]
121 | angle_degrees = ((orientation_degrees - yaw_degrees + 180) % 360) - 180
122 |
123 | # rotate towards smallest angle difference
124 | if angle_degrees < 0:
125 | is_clockwise = True
126 | else:
127 | is_clockwise = False
128 |
129 | rotate(velocity_publisher, 15, abs(angle_degrees), is_clockwise)
130 | rospy.loginfo(f"Orientation set to {orientation_degrees}")
131 |
132 |
133 | def go_to(velocity_publisher, goal):
134 | """Go to goal method"""
135 |
136 | # declare a Twist message to send velocity commands
137 | velocity_message = Twist()
138 |
139 | # use current location from the global variable (constantly updated by pose_callback())
140 | global x, y, yaw
141 |
142 | x_goal = goal[0]
143 | y_goal = goal[1]
144 |
145 | THRESHOLD = 0.1
146 | K_DISTANCE = 0.6
147 | K_ANGLE = 15
148 |
149 | loop_rate = rospy.Rate(50) # we publish the velocity at 10 Hz (10 times per second)
150 |
151 | rospy.loginfo(f"Go to goal: {goal}")
152 |
153 | while True:
154 |
155 | distance_to_goal = abs(math.sqrt(((x_goal - x) ** 2) + ((y_goal - y) ** 2)))
156 | angle_to_goal = math.atan2(y_goal - y, x_goal - x)
157 |
158 | print(
159 | f"Distance to goal: {distance_to_goal:10.4} Pose: ({x:8.4}, {y:8.4}, {yaw:8.4})"
160 | )
161 |
162 | if distance_to_goal < THRESHOLD:
163 | rospy.loginfo("Goal reached")
164 | break
165 |
166 | velocity_message.linear.x = K_DISTANCE * distance_to_goal
167 | velocity_message.angular.z = K_ANGLE * (angle_to_goal - yaw)
168 |
169 | velocity_publisher.publish(velocity_message)
170 | loop_rate.sleep()
171 |
172 | # stop the robot after the distance is reached
173 | velocity_message.linear.x = 0.0
174 | velocity_message.angular.z = 0.0
175 | velocity_publisher.publish(velocity_message)
176 |
177 |
178 | if __name__ == "__main__":
179 | try:
180 | rospy.init_node("my_turtle_pose_node", anonymous=True)
181 |
182 | # declare velocity publisher
183 | cmd_vel_topic = "/turtle1/cmd_vel"
184 | velocity_publisher = rospy.Publisher(cmd_vel_topic, Twist, queue_size=10)
185 |
186 | # declare pose subscriber
187 | pose_topic = "/turtle1/pose"
188 | pose_subscriber = rospy.Subscriber(pose_topic, Pose, pose_callback)
189 |
190 | time.sleep(1.0) # needed otherwise move() is skipped
191 | # funny rotate() and go_to() dont need it
192 | move(velocity_publisher, 0.3, 1.0, True)
193 | time.sleep(1.0)
194 | go_to(velocity_publisher, (5, 5))
195 | time.sleep(1.0)
196 | rotate(velocity_publisher, 90, 90, True)
197 |
198 | rospy.spin()
199 |
200 | except rospy.ROSInterruptException:
201 | rospy.loginfo("node terminated.")
202 |
--------------------------------------------------------------------------------
/src/mhered/src/utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """
3 | Auxiliary functions for bug0, bug1, bug2 robot algorithms
4 | get_t_from_stamp()
5 | robot_coordinates()
6 | get_clearance()
7 | wrap_to_180()
8 | find_nearest()
9 | """
10 |
11 |
12 | import math
13 | import time
14 |
15 | import numpy as np # pylint: disable=E0401
16 |
17 |
18 | def get_t_from_stamp(stamp):
19 | """
20 | Receive a timestamp from a Header message
21 | (with fields stamp.secs and stamp.nsecs)
22 | Return t in seconds
23 | """
24 |
25 | sec = stamp.secs
26 | nsec = stamp.nsecs
27 | t = sec + nsec / 1e9 # 1s = 10**9 nanosecs
28 | return t
29 |
30 |
31 | def robot_coordinates(x_goal, y_goal, x, y, yaw):
32 | """
33 | Return the relative distance and angle (in degrees wrapped at -180 180)
34 | of a goal (x_goal, y_goal) as seen from the robot at pose (x,y,yaw)
35 | """
36 | xR_goal = x_goal - x
37 | yR_goal = y_goal - y
38 |
39 | distance = math.sqrt(xR_goal**2 + yR_goal**2)
40 | direction_global = math.atan2(yR_goal, xR_goal)
41 | direction_relative = direction_global - yaw
42 |
43 | angle_to_goal_deg = wrap_to_180(math.degrees(direction_relative))
44 |
45 | return distance, angle_to_goal_deg
46 |
47 |
48 | def get_clearance(angles, ranges, beam_dir, beam_aperture):
49 | """
50 | Take an array of ranges measured at specified angles
51 | Return clearance in a specified beam direction and aperture
52 |
53 | Note: angles, beam_dir, beam aperture must all be in consistent units
54 | (i.e. all in degrees or all in radians)
55 | """
56 |
57 | max_angle = beam_dir + beam_aperture
58 | min_angle = beam_dir - beam_aperture
59 |
60 | ranges_of_interest = [
61 | r for (a, r) in zip(angles, ranges) if min_angle < a < max_angle
62 | ]
63 | clearance = np.mean(ranges_of_interest)
64 |
65 | return clearance
66 |
67 |
68 | def wrap_to_180(angle):
69 | """
70 | Wrap angle in degrees to [-180 : 180) range
71 | """
72 | wrapped_angle = ((angle + 180) % 360) - 180
73 | return wrapped_angle
74 |
75 |
76 | def find_nearest(array, value):
77 | """
78 | Given an `array` , and given a `value` , returns an index j such that
79 | `value` is between array[j] and array[j+1].
80 | `array` must be monotonic increasing.
81 | j=-1 or j=len(array) is returned to indicate that `value` is out of range
82 | below and above respectively.
83 | Source: https://stackoverflow.com/a/41856629/15472802
84 | """
85 |
86 | n = len(array)
87 |
88 | # value out of range below
89 | if value < array[0]:
90 | return -1
91 | # and above
92 | if value > array[n - 1]:
93 | return n
94 |
95 | # edge cases at bottom
96 | if value == array[0]:
97 | return 0
98 | # and top
99 | if value == array[n - 1]:
100 | return n - 1
101 |
102 | # general case
103 | j_lower = 0 # Initialize lower
104 | j_upper = n - 1 # and upper limits.
105 | while j_upper - j_lower > 1: # If we are not yet done,
106 | j_midpt = (j_upper + j_lower) >> 1 # compute midpoint with a bitshift
107 | if value >= array[j_midpt]:
108 | j_lower = j_midpt # and replace either the lower limit
109 | else:
110 | j_upper = j_midpt # or the upper limit, as appropriate.
111 | # Repeat until the test condition is satisfied.
112 | return j_lower
113 |
114 |
115 | def main():
116 | """
117 | Testing (WIP)
118 | """
119 | ANGLE = -183.7
120 |
121 | angles = np.asarray(range(-180, 180, 1))
122 | ranges = angles / 10.0
123 |
124 | t0 = time.time()
125 | ind = find_nearest(angles, ANGLE)
126 | dt = time.time() - t0
127 | print(
128 | f"dt={dt*1e9:.1f}nsecs - "
129 | + f"find_nearest() - range at {angles[ind]:.2f}:"
130 | + f" {ranges[ind]:.2f}"
131 | )
132 |
133 | test_cases = [
134 | {
135 | "angle": 181.0,
136 | "wrap_result": -179.0,
137 | },
138 | {
139 | "angle": 0.0,
140 | "wrap_result": 0.0,
141 | },
142 | {
143 | "angle": 360.0,
144 | "wrap_result": 0.0,
145 | },
146 | {
147 | "angle": -181.0,
148 | "wrap_result": 179.0,
149 | },
150 | {
151 | "angle": -7.0 * 360.0 + 27.0,
152 | "wrap_result": 27.0,
153 | },
154 | {
155 | "angle": 11.0 * 360.0 - 53.0,
156 | "wrap_result": -53.0,
157 | },
158 | {
159 | "angle": np.asarray(range(0, 360, 30)),
160 | "wrap_result": np.asarray(
161 | [
162 | 0.0,
163 | 30.0,
164 | 60.0,
165 | 90.0,
166 | 120.0,
167 | 150.0,
168 | -180.0,
169 | -150.0,
170 | -120.0,
171 | -90.0,
172 | -60.0,
173 | -30.0,
174 | ]
175 | ),
176 | },
177 | ]
178 |
179 | for item in test_cases:
180 | wrapped_angle_i = wrap_to_180(item["angle"])
181 | print(f"angle = {item['angle']} --> wrapped = {repr(wrapped_angle_i)}")
182 | should_be_msg = f"should be {item['wrap_result']}"
183 | assert (
184 | np.round(wrapped_angle_i, 5) == item["wrap_result"]
185 | ).all(), should_be_msg
186 |
187 | print(wrap_to_180(np.asarray(range(0, 360, 30))))
188 |
189 |
190 | if __name__ == "__main__":
191 | main()
192 |
--------------------------------------------------------------------------------
/src/mhered/srv/AddTwoInts.srv:
--------------------------------------------------------------------------------
1 | int64 a
2 | int64 b
3 | ---
4 | int64 sum
5 |
--------------------------------------------------------------------------------
/src/my_husky.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/src/ros_best_practices:
--------------------------------------------------------------------------------
1 | /home/mhered/git/ros_best_practices/
--------------------------------------------------------------------------------
/src/ros_course_part2/README.md:
--------------------------------------------------------------------------------
1 | # ros_course_part2
2 | This is the repository of the Udemy course on navigation
3 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic01_quaternion/tf_orientation_tb3_robot.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # this is a simple program that subscribes to the odom topic and gets the position and orientation of the robot
4 | import math
5 |
6 | import rospy
7 | import tf
8 | from nav_msgs.msg import Odometry
9 |
10 |
11 | def odomPoseCallback(odom_msg):
12 | """
13 | callback function the odom pose (position+orientation) of the robot
14 | """
15 |
16 | print("odom pose callback")
17 | # get the position of the robot
18 | print("x = ", odom_msg.pose.pose.position.x)
19 | print("y = ", odom_msg.pose.pose.position.y)
20 | # get the velocity of the robot
21 | print("vx = ", odom_msg.twist.twist.linear.x)
22 | print("vz = ", odom_msg.twist.twist.angular.z)
23 |
24 | # print the values of the orientation in quaternion
25 | print("qx=", odom_msg.pose.pose.orientation.x)
26 | print("qy=", odom_msg.pose.pose.orientation.y)
27 | print("qz=", odom_msg.pose.pose.orientation.z)
28 | print("qw=", odom_msg.pose.pose.orientation.w)
29 |
30 | # formulate a quaternion as a list
31 | quaternion = (
32 | odom_msg.pose.pose.orientation.x,
33 | odom_msg.pose.pose.orientation.y,
34 | odom_msg.pose.pose.orientation.z,
35 | odom_msg.pose.pose.orientation.w,
36 | )
37 |
38 | # convert the quaternion to roll-pitch-yaw
39 | rpy = tf.transformations.euler_from_quaternion(quaternion)
40 | # extract the values of roll, pitch and yaw from the array
41 | roll = rpy[0]
42 | pitch = rpy[1]
43 | yaw = rpy[2]
44 |
45 | # print the roll, pitch and yaw
46 | print(math.degrees(roll), " ", math.degrees(pitch), " ", math.degrees(yaw))
47 | print("the orientation of the robot is: ", math.degrees(yaw))
48 |
49 |
50 | if __name__ == "__main__":
51 | try:
52 | # init the node
53 | rospy.init_node("turtlesim_motion_pose", anonymous=True)
54 |
55 | # subscribe to the odom topic
56 | position_topic = "/odom"
57 | pose_subscriber = rospy.Subscriber(position_topic, Odometry, odomPoseCallback)
58 | # spin
59 | rospy.spin()
60 |
61 | except rospy.ROSInterruptException:
62 | rospy.loginfo("node terminated.")
63 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic01_quaternion/tf_rotation_conversions.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | # this script converts between Roll-Pitch-Yaw angles and Quaternions
4 |
5 | import math
6 |
7 | import rospy
8 | import tf
9 | from geometry_msgs.msg import Twist
10 | from nav_msgs.msg import Odometry
11 |
12 | print("-----------------------------------------")
13 | print("Roll-Pitch-Yaw Conversion to Quaternions")
14 | print("-----------------------------------------")
15 | # we define some random angles roll, pitch and yaw in radians
16 | roll = math.radians(30)
17 | pitch = math.radians(42)
18 | yaw = math.radians(58)
19 |
20 | # we print these three angles
21 | print(
22 | "roll = ",
23 | math.degrees(roll),
24 | "pitch = ",
25 | math.degrees(pitch),
26 | "yaw = ",
27 | math.degrees(yaw),
28 | )
29 |
30 | # convert the roll-pitch-yaw angles to a quaternion using ROS TF Library
31 | quaternion = tf.transformations.quaternion_from_euler(roll, pitch, yaw)
32 | print("-----------------------------------------")
33 | print('The resulting quaternion using "quaternion_from_euler" function: ')
34 | for i in range(4):
35 | print(quaternion[i])
36 |
37 |
38 | # quaternion[0] = -3.88256895463e-06
39 | # quaternion[1] = 0.0015896463485
40 | # quaternion[2] = 0.001397167245
41 | # quaternion[3] = 0.999997760464
42 |
43 | # perform the opposite converion
44 | rpy = tf.transformations.euler_from_quaternion(quaternion)
45 | roll_from_quaternion = rpy[0]
46 | pitch_from_quaternion = rpy[1]
47 | yaw_from_quaternion = rpy[2]
48 |
49 | print("-----------------------------------------")
50 | print('convert back to roll-pitch-yaw using "euler_from_quaternion" function: ')
51 | print(
52 | "roll = ",
53 | math.degrees(roll_from_quaternion),
54 | "pitch = ",
55 | math.degrees(pitch_from_quaternion),
56 | "yaw = ",
57 | math.degrees(yaw_from_quaternion),
58 | )
59 | print("we get the same initial roll-picth-roll values")
60 | print("-----------------------------------------")
61 | # define a new quaternion
62 | print("define a new quaternion manually as a list")
63 | q = (-3.88256895463e-06, 0.0015896463485, 0.001397167245, 0.0)
64 |
65 | # perform the opposite converion
66 | rpy = tf.transformations.euler_from_quaternion(q)
67 | roll_from_quaternion = rpy[0]
68 | pitch_from_quaternion = rpy[1]
69 | yaw_from_quaternion = rpy[2]
70 | print("-----------------------------------------")
71 | print('convert back to roll-pitch-yaw using "euler_from_quaternion" function: ')
72 | print(
73 | "roll = ",
74 | math.degrees(roll_from_quaternion),
75 | "pitch = ",
76 | math.degrees(pitch_from_quaternion),
77 | "yaw = ",
78 | math.degrees(yaw_from_quaternion),
79 | )
80 | print("-----------------------------------------")
81 | print("you can change these values in the file to make other converions")
82 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic02_tf_tutorials/launch/rosrun tf static_transform_publisher command example:
--------------------------------------------------------------------------------
1 | rosrun tf static_transform_publisher 1 2 3 0.1 0.2 0.3 frame_a frame_b 10
2 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic02_tf_tutorials/launch/static_transform_publisher.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic02_tf_tutorials/transform_publisher/frame_a_to_frame_b_broadcaster.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 |
4 | int main(int argc, char** argv){
5 | ros::init(argc, argv, "frame_a_to_frame_b_broadcaster");
6 | ros::NodeHandle node;
7 |
8 | tf::TransformBroadcaster br;
9 | tf::Transform transform;
10 |
11 | ros::Rate rate(2.0);
12 | while (node.ok()){
13 | transform.setOrigin( tf::Vector3(1.0, 2.0, 3.0) );
14 | transform.setRotation( tf::Quaternion(0, 0, 0, 1) );
15 | br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "frame_b", "frame_a"));
16 | rate.sleep();
17 | }
18 | return 0;
19 | };
20 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic02_tf_tutorials/transform_publisher/frame_a_to_frame_b_broadcaster.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import time
4 |
5 | import rospy
6 | import tf
7 |
8 | if __name__ == "__main__":
9 | # init the node
10 | rospy.init_node("frame_a_frame_b_broadcaster_node")
11 |
12 | time.sleep(2)
13 |
14 | # create a transformation broadcaster (publisher)
15 | transform_broadcaster = tf.TransformBroadcaster()
16 |
17 | while not rospy.is_shutdown():
18 |
19 | # create a quaternion
20 | rotation_quaternion = tf.transformations.quaternion_from_euler(
21 | 0.2, 0.3, 0.1
22 | ) # Roll Pitch Yaw
23 |
24 | # translation vector
25 | translation_vector = (1.0, 2.0, 3.0)
26 |
27 | # time
28 | current_time = rospy.Time.now()
29 |
30 | transform_broadcaster.sendTransform(
31 | translation_vector, rotation_quaternion, current_time, "frame_b", "frame_a"
32 | ) # child frame, parent frame
33 | time.sleep(0.5) # frequency: we publish every .5 sec i.e. 2Hz
34 |
35 | rospy.spin()
36 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic02_tf_tutorials/transform_publisher/frame_a_to_frame_b_listener.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import math
3 |
4 | import geometry_msgs.msg
5 | import rospy
6 | import tf
7 | import turtlesim.srv
8 |
9 | if __name__ == "__main__":
10 | rospy.init_node("frame_a_frame_b_listener_node")
11 |
12 | listener = tf.TransformListener()
13 | rate = rospy.Rate(1.0)
14 | listener.waitForTransform("/frame_a", "/frame_b", rospy.Time(), rospy.Duration(4.0))
15 |
16 | while not rospy.is_shutdown():
17 | try:
18 | (trans, rot) = listener.lookupTransform(
19 | "/frame_a", "/frame_b", rospy.Time(0)
20 | )
21 | except (
22 | tf.LookupException,
23 | tf.ConnectivityException,
24 | tf.ExtrapolationException,
25 | ):
26 | continue
27 |
28 | quaternion = rot
29 | rpy = tf.transformations.euler_from_quaternion(quaternion)
30 | print("transformation between frame_a and frame_b detected")
31 | print("translation vector: (", trans[0], ",", trans[1], ",", trans[2], ")")
32 | print("rotation angles: roll=", rpy[0], " pitch=", rpy[1], " yaw=", rpy[2])
33 |
34 | rate.sleep()
35 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic02_tf_tutorials/turtlesim/broadcast_transform.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import math
3 | import time
4 |
5 | import rospy
6 | import tf
7 |
8 | if __name__ == "__main__":
9 | # init the node
10 | rospy.init_node("turtle_tf_broadcaster")
11 |
12 | time.sleep(2)
13 | transform_broadcaster = tf.TransformBroadcaster()
14 |
15 | while not rospy.is_shutdown():
16 |
17 | # convert 90 degrees to quaternion
18 | rotation_quaternion = tf.transformations.quaternion_from_euler(
19 | 0, 0, math.radians(90)
20 | )
21 |
22 | # translation vector
23 | translation_vector = (1.2, 1.3, 0)
24 |
25 | # time
26 | current_time = rospy.Time.now()
27 |
28 | transform_broadcaster.sendTransform(
29 | translation_vector,
30 | rotation_quaternion,
31 | current_time,
32 | "robot_frame",
33 | "world",
34 | )
35 | time.sleep(0.5)
36 |
37 | rospy.spin()
38 |
39 |
40 | if __name__ == "__main__":
41 | # init the node
42 | rospy.init_node("turtle_tf_broadcaster")
43 |
44 | time.sleep(2)
45 |
46 | while True:
47 | transform_broadcaster = tf.TransformBroadcaster()
48 |
49 | # convert 90 degrees to quaternion
50 | rotation_quaternion = tf.transformations.quaternion_from_euler(
51 | 0, 0, math.radians(90)
52 | )
53 |
54 | # translation vector
55 | translation_vector = (1.2, 1.3, 0)
56 |
57 | # time
58 | current_time = rospy.Time.now()
59 |
60 | transform_broadcaster.sendTransform(
61 | translation_vector,
62 | rotation_quaternion,
63 | current_time,
64 | "robot_frame",
65 | "world",
66 | )
67 | time.sleep(0.5)
68 |
69 | rospy.spin()
70 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic02_tf_tutorials/turtlesim/launch/start_demo.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic02_tf_tutorials/turtlesim/launch/tf_turtlesim_broadcast.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic02_tf_tutorials/turtlesim/turtle_tf_broadcaster.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 | import tf
5 | from turtlesim.msg import Pose
6 |
7 |
8 | def pose_callback(msg, turtlename):
9 | # create a transform broadcaster
10 | transform_broadcaster = tf.TransformBroadcaster()
11 | # convert 90 degrees to quaternion
12 | rotation_quaternion = tf.transformations.quaternion_from_euler(0, 0, msg.theta)
13 | # translation vector
14 | translation_vector = (msg.x, msg.y, 0)
15 | # time
16 | current_time = rospy.Time.now()
17 |
18 | transform_broadcaster.sendTransform(
19 | translation_vector,
20 | rotation_quaternion,
21 | current_time,
22 | turtlename + "_frame",
23 | "world",
24 | )
25 |
26 |
27 | if __name__ == "__main__":
28 | # init the node
29 | rospy.init_node("turtle_tf_broadcaster")
30 | # get the parameter of the turtle name
31 | turtlename = rospy.get_param("~turtle")
32 | # subscribe to the Pose topic
33 | rospy.Subscriber("/%s/pose" % turtlename, Pose, pose_callback, turtlename)
34 | rospy.spin()
35 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic02_tf_tutorials/turtlesim/turtle_tf_listener.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import math
3 |
4 | import geometry_msgs.msg
5 | import roslib
6 |
7 | # roslib.load_manifest('learning_tf')
8 | import rospy
9 | import tf
10 | import turtlesim.srv
11 |
12 | if __name__ == "__main__":
13 | # init node
14 | rospy.init_node("turtle_tf_listener")
15 |
16 | # create a new transform listerner
17 | transform_listener = tf.TransformListener()
18 |
19 | # create a second turtle by calling the service
20 | rospy.wait_for_service("spawn")
21 | spawner = rospy.ServiceProxy("spawn", turtlesim.srv.Spawn)
22 | spawner(4, 2, 0, "turtle2")
23 |
24 | turtle_follower_velocity = rospy.Publisher(
25 | "turtle2/cmd_vel", geometry_msgs.msg.Twist, queue_size=1
26 | )
27 |
28 | rate = rospy.Rate(10.0)
29 | while not rospy.is_shutdown():
30 | try:
31 |
32 | (translation, rotation) = transform_listener.lookupTransform(
33 | "/turtle2_frame", "/turtle1_frame", rospy.Time(0)
34 | )
35 | except (
36 | tf.LookupException,
37 | tf.ConnectivityException,
38 | tf.ExtrapolationException,
39 | ):
40 | continue
41 |
42 | x_follower_in_turtle1_frame = translation[0]
43 | y_follower_in_turtle1_frame = translation[1]
44 |
45 | angular = 4 * math.atan2(
46 | y_follower_in_turtle1_frame, x_follower_in_turtle1_frame
47 | )
48 | linear = 0.5 * math.sqrt(
49 | x_follower_in_turtle1_frame**2 + y_follower_in_turtle1_frame**2
50 | )
51 |
52 | cmd = geometry_msgs.msg.Twist()
53 | cmd.linear.x = linear
54 | cmd.angular.z = angular
55 | turtle_follower_velocity.publish(cmd)
56 |
57 | rate.sleep()
58 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic03_map_navigation/navigate_goal.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 |
5 | typedef actionlib::SimpleActionClient MoveBaseClient;
6 |
7 | int main(int argc, char** argv){
8 | ros::init(argc, argv, "simple_navigation_goals");
9 |
10 | //tell the action client that we want to spin a thread by default
11 | MoveBaseClient ac("move_base", true);
12 |
13 | //wait for the action server to come up
14 | while(!ac.waitForServer(ros::Duration(5.0))){
15 | ROS_INFO("Waiting for the move_base action server to come up");
16 | }
17 |
18 | move_base_msgs::MoveBaseGoal goal;
19 |
20 | //we'll send a goal to the robot to move 1 meter forward
21 | goal.target_pose.header.frame_id = "base_link";
22 | goal.target_pose.header.stamp = ros::Time::now();
23 |
24 | goal.target_pose.pose.position.x = 1.0;
25 | goal.target_pose.pose.orientation.w = 1.0;
26 |
27 | ROS_INFO("Sending goal");
28 | ac.sendGoal(goal);
29 |
30 | ac.waitForResult();
31 |
32 | if(ac.getState() == actionlib::SimpleClientGoalState::SUCCEEDED)
33 | ROS_INFO("Hooray, the base moved 1 meter forward");
34 | else
35 | ROS_INFO("The base failed to move forward 1 meter for some reason");
36 |
37 | return 0;
38 | }
39 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic03_map_navigation/navigate_goal.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from math import degrees, radians
3 |
4 | import actionlib
5 | import rospy
6 | from actionlib_msgs.msg import *
7 | from geometry_msgs.msg import Point
8 | from move_base_msgs.msg import MoveBaseAction, MoveBaseGoal
9 |
10 |
11 | # this method will make the robot move to the goal location
12 | def move_to_goal(xGoal, yGoal):
13 |
14 | # define a client for to send goal requests to the move_base server through a SimpleActionClient
15 | ac = actionlib.SimpleActionClient("move_base", MoveBaseAction)
16 |
17 | # wait for the action server to come up
18 | while not ac.wait_for_server(rospy.Duration.from_sec(5.0)):
19 | rospy.loginfo("Waiting for the move_base action server to come up")
20 |
21 | goal = MoveBaseGoal()
22 |
23 | # set up the frame parameters
24 | goal.target_pose.header.frame_id = "map"
25 | goal.target_pose.header.stamp = rospy.Time.now()
26 |
27 | # moving towards the goal*/
28 |
29 | goal.target_pose.pose.position = Point(xGoal, yGoal, 0)
30 | goal.target_pose.pose.orientation.x = 0.0
31 | goal.target_pose.pose.orientation.y = 0.0
32 | goal.target_pose.pose.orientation.z = 0.0
33 | goal.target_pose.pose.orientation.w = 1.0
34 |
35 | rospy.loginfo("Sending goal location ...")
36 | ac.send_goal(goal)
37 |
38 | ac.wait_for_result(rospy.Duration(60))
39 |
40 | if ac.get_state() == GoalStatus.SUCCEEDED:
41 | rospy.loginfo("You have reached the destination")
42 | return True
43 |
44 | else:
45 | rospy.loginfo("The robot failed to reach the destination")
46 | return False
47 |
48 |
49 | if __name__ == "__main__":
50 | rospy.init_node("map_navigation", anonymous=False)
51 | x_goal = -2.02880191803
52 | y_goal = 4.02200937271
53 | print("start go to goal")
54 | move_to_goal(x_goal, y_goal)
55 | rospy.spin()
56 |
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic03_map_navigation/tb3map/tb3_house_map.pgm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mhered/ROS-notes/09ffa6c01ff763ca22140eee25c73ab659d51238/src/ros_course_part2/src/topic03_map_navigation/tb3map/tb3_house_map.pgm
--------------------------------------------------------------------------------
/src/ros_course_part2/src/topic03_map_navigation/tb3map/tb3_house_map.yaml:
--------------------------------------------------------------------------------
1 | image: /home/mhered/catkin_ws/src/ros_course_part2/src/topic03_map_navigation/tb3map/tb3_house_map.pgm
2 | resolution: 0.050000
3 | origin: [-10.000000, -10.000000, 0.000000]
4 | negate: 0
5 | occupied_thresh: 0.65
6 | free_thresh: 0.196
7 |
--------------------------------------------------------------------------------
/src/ros_service_assignment/src/area_rect_client.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 |
4 | import sys
5 |
6 | import rospy
7 |
8 | # import service definition
9 | from ros_service_assignment.srv import (
10 | RectangleAreaService,
11 | RectangleAreaServiceRequest,
12 | RectangleAreaServiceResponse,
13 | )
14 |
15 |
16 | def area_rect_client(x, y):
17 |
18 | # keep client waiting until service is alive
19 | rospy.wait_for_service("area_rect")
20 |
21 | try:
22 | # create a client object, the service name must be same defined in the server
23 | my_request = rospy.ServiceProxy("area_rect", RectangleAreaService)
24 |
25 | # what is this?
26 | response = my_request(width, height)
27 | return response.area
28 |
29 | except rospy.ServiceException(e):
30 | print(f"Service call failed: {e}")
31 |
32 |
33 | if __name__ == "__main__":
34 | if len(sys.argv) == 3:
35 | width = int(sys.argv[1])
36 | height = int(sys.argv[2])
37 | else:
38 | print("%s [width heigth]" % sys.argv[0])
39 | sys.exit(1)
40 |
41 | print(f"Client will request {width} + {height}...")
42 | result = area_rect_client(width, height)
43 | print(f"Server returned {width} + {height} = {result}")
44 |
--------------------------------------------------------------------------------
/src/ros_service_assignment/src/area_rect_server.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 |
4 | import time
5 |
6 | import rospy
7 |
8 | # import service definition
9 | from ros_service_assignment.srv import (
10 | RectangleAreaService,
11 | RectangleAreaServiceRequest,
12 | RectangleAreaServiceResponse,
13 | )
14 |
15 |
16 | def handle_area_rect(request):
17 | area = request.width * request.height
18 | print(f"Server will return: {request.width} + {request.height} = {area}")
19 | time.sleep(2) # 2 seconds
20 | return RectangleAreaServiceResponse(area)
21 |
22 |
23 | def area_rect_server():
24 |
25 | rospy.init_node("area_rect_server")
26 |
27 | # create service listening to incoming requests
28 | # given service name, type and handle function
29 | my_service = rospy.Service("area_rect", RectangleAreaService, handle_area_rect)
30 |
31 | print("Server is ready to compute areas.")
32 | # start service to wait for incoming requests
33 | rospy.spin()
34 |
35 |
36 | if __name__ == "__main__":
37 | area_rect_server()
38 |
--------------------------------------------------------------------------------
/src/ros_service_assignment/srv/RectangleAreaService.srv:
--------------------------------------------------------------------------------
1 | # Service definition file
2 | # request message
3 |
4 | float32 height
5 | float32 width
6 | ---
7 | # response message
8 | float32 area
9 |
--------------------------------------------------------------------------------
/src/teleop_twist_keyboard:
--------------------------------------------------------------------------------
1 | /home/mhered/git/teleop_twist_keyboard/
--------------------------------------------------------------------------------