├── .ci.rosinstall
├── .github
└── workflows
│ └── industrial_ci.yml
├── .gitignore
├── CMakeLists.txt
├── LICENSE
├── README.en.md
├── README.md
├── config
├── joy_dualshock3.yml
├── joy_dualshock4.yml
└── joy_f710.yml
├── launch
├── direction_control.launch
├── line_follower.launch
├── mouse_with_lidar.launch
├── object_tracking.launch
├── slam_gmapping.launch
└── teleop.launch
├── package.xml
├── rviz
└── slam.rviz
└── scripts
├── camera.bash
├── direction_control.py
├── joystick_control.py
├── line_follower.py
└── object_tracking.py
/.ci.rosinstall:
--------------------------------------------------------------------------------
1 | - git:
2 | local-name: raspimouse_ros_2
3 | uri: https://github.com/ryuichiueda/raspimouse_ros_2
4 | version: master
5 | - git:
6 | local-name: rt_usb_9axisimu_driver
7 | uri: https://github.com/rt-net/rt_usb_9axisimu_driver
8 | version: master
9 |
--------------------------------------------------------------------------------
/.github/workflows/industrial_ci.yml:
--------------------------------------------------------------------------------
1 | name: industrial_ci
2 |
3 | on:
4 | push:
5 | paths-ignore:
6 | - '**.md'
7 | pull_request:
8 | paths-ignore:
9 | - '**.md'
10 | schedule:
11 | - cron: "0 2 * * 0" # Weekly on Sundays at 02:00
12 |
13 | jobs:
14 | industrial_ci:
15 | continue-on-error: ${{ matrix.experimental }}
16 | strategy:
17 | matrix:
18 | env:
19 | - { ROS_DISTRO: noetic, ROS_REPO: main }
20 | experimental: [false]
21 | include:
22 | - env: { ROS_DISTRO: noetic, ROS_REPO: testing }
23 | experimental: true
24 | env:
25 | UPSTREAM_WORKSPACE: .ci.rosinstall
26 | runs-on: ubuntu-latest
27 | steps:
28 | - uses: actions/checkout@v2
29 | - uses: "ros-industrial/industrial_ci@master"
30 | env: ${{ matrix.env }}
31 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | build/
2 | bin/
3 | lib/
4 | msg_gen/
5 | srv_gen/
6 | msg/*Action.msg
7 | msg/*ActionFeedback.msg
8 | msg/*ActionGoal.msg
9 | msg/*ActionResult.msg
10 | msg/*Feedback.msg
11 | msg/*Goal.msg
12 | msg/*Result.msg
13 | msg/_*.py
14 |
15 | # Generated by dynamic reconfigure
16 | *.cfgc
17 | /cfg/cpp/
18 | /cfg/*.py
19 |
20 | # Ignore generated docs
21 | *.dox
22 | *.wikidoc
23 |
24 | # eclipse stuff
25 | .project
26 | .cproject
27 |
28 | # qcreator stuff
29 | CMakeLists.txt.user
30 |
31 | srv/_*.py
32 | *.pcd
33 | *.pyc
34 | qtcreator-*
35 | *.user
36 |
37 | /planning/cfg
38 | /planning/docs
39 | /planning/src
40 |
41 | *~
42 |
43 | # Emacs
44 | .#*
45 |
46 | # Catkin custom files
47 | CATKIN_IGNORE
48 |
49 | # SketchUp backup files
50 | *~.skp
51 |
52 | # Swap files
53 | *.swp
54 |
55 | # MacOS metadata files
56 | .DS_store
57 |
--------------------------------------------------------------------------------
/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(raspimouse_ros_examples)
3 |
4 | find_package(catkin REQUIRED COMPONENTS
5 | roslint
6 | )
7 |
8 | catkin_package()
9 |
10 | include_directories(
11 | ${catkin_INCLUDE_DIRS}
12 | )
13 |
14 | file(GLOB python_scripts scripts/*.py)
15 | catkin_install_python(
16 | PROGRAMS ${python_scripts}
17 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
18 | )
19 |
20 | roslint_python()
21 | roslint_add_test()
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/README.en.md:
--------------------------------------------------------------------------------
1 | [English](README.en.md) | [日本語](README.md)
2 |
3 | # raspimouse_ros_examples
4 |
5 | [](https://github.com/rt-net/raspimouse_ros_examples/actions?query=workflow%3Aindustrial_ci+branch%3Amaster)
6 |
7 | ROS examples for Raspberry Pi Mouse.
8 |
9 | Samples for navigation is [here](https://github.com/rt-net/raspimouse_slam_navigation_ros).
10 | ROS 2 examples is [here](https://github.com/rt-net/raspimouse_ros2_examples).
11 |
12 |
13 |
14 | ## Requirements
15 |
16 | - Raspberry Pi Mouse
17 | - https://rt-net.jp/products/raspberrypimousev3/
18 | - Linux OS
19 | - Ubuntu server 16.04
20 | - Ubuntu server 18.04
21 | - Ubuntu server 20.04
22 | - https://ubuntu.com/download/raspberry-pi
23 | - Device Driver
24 | - [rt-net/RaspberryPiMouse](https://github.com/rt-net/RaspberryPiMouse)
25 | - ROS
26 | - [Kinetic Kame](http://wiki.ros.org/kinetic/Installation/Ubuntu)
27 | - [Melodic Morenia](http://wiki.ros.org/melodic/Installation/Ubuntu)
28 | - [Noetic Ninjemys](http://wiki.ros.org/noetic/Installation/Ubuntu)
29 | - Raspberry Pi Mouse ROS package
30 | - https://github.com/ryuichiueda/raspimouse_ros_2
31 | - Remote Computer (Optional)
32 | - ROS
33 | - [Kinetic Kame](http://wiki.ros.org/kinetic/Installation/Ubuntu)
34 | - [Melodic Morenia](http://wiki.ros.org/melodic/Installation/Ubuntu)
35 | - [Noetic Ninjemys](http://wiki.ros.org/noetic/Installation/Ubuntu)
36 | - Raspberry Pi Mouse ROS package
37 | - https://github.com/ryuichiueda/raspimouse_ros_2
38 |
39 | ## Installation
40 |
41 | ```sh
42 | cd ~/catkin_ws/src
43 | # Clone ROS packages
44 | git clone https://github.com/ryuichiueda/raspimouse_ros_2
45 | git clone -b $ROS_DISTRO-devel https://github.com/rt-net/raspimouse_ros_examples
46 | # For direction control example
47 | git clone https://github.com/rt-net/rt_usb_9axisimu_driver
48 |
49 | # Install dependencies
50 | rosdep install -r -y --from-paths . --ignore-src
51 |
52 | # make & install
53 | cd ~/catkin_ws && catkin_make
54 | source devel/setup.bash
55 | ```
56 |
57 | ## License
58 |
59 | This repository is licensed under the Apache 2.0, see [LICENSE](./LICENSE) for details.
60 |
61 | ## How To Use Examples
62 |
63 | - [keyboard_control](#keyboard_control)
64 | - [joystick_control](#joystick_control)
65 | - [object_tracking](#object_tracking)
66 | - [line_follower](#line_follower)
67 | - [SLAM](#SLAM)
68 | - [direction_control](#direction_control)
69 |
70 | ---
71 |
72 | ### keyboard_control
73 |
74 | This is an example to use [teleop_twist_keyboard](http://wiki.ros.org/teleop_twist_keyboard) package to send velocity command for Raspberry Pi Mouse.
75 |
76 | #### Requirements
77 |
78 | - Keyboard
79 |
80 | #### How to use
81 |
82 | Launch nodes with the following command:
83 |
84 | ```sh
85 | roslaunch raspimouse_ros_examples teleop.launch key:=true
86 |
87 | # Control from remote computer
88 | roslaunch raspimouse_ros_examples teleop.launch key:=true mouse:=false
89 | ```
90 |
91 | Then, call `/motor_on` service to enable motor control with the following command:
92 |
93 | ```sh
94 | rosservice call /motor_on
95 | ```
96 | [back to example list](#how-to-use-examples)
97 |
98 | ---
99 |
100 | ### joystick_control
101 |
102 | This is an example to use joystick controller to control a Raspberry Pi Mouse.
103 |
104 | #### Requirements
105 |
106 | - Joystick Controller
107 | - [Logicool Wireless Gamepad F710](https://gaming.logicool.co.jp/ja-jp/products/gamepads/f710-wireless-gamepad.html#940-0001440)
108 | - [SONY DUALSHOCK 3](https://www.jp.playstation.com/ps3/peripheral/cechzc2j.html)
109 |
110 | #### How to use
111 |
112 | Launch nodes with the following command:
113 |
114 | ```sh
115 | roslaunch raspimouse_ros_examples teleop.launch joy:=true
116 |
117 | # Use DUALSHOCK 3
118 | roslaunch raspimouse_ros_examples teleop.launch joy:=true joyconfig:="dualshock3"
119 |
120 | # Control from remote computer
121 | roslaunch raspimouse_ros_examples teleop.launch joy:=true mouse:=false
122 | ```
123 |
124 | This picture shows the default key configuration.
125 |
126 | 
127 |
128 | #### Configure
129 |
130 | Key assignments can be edited with key numbers in [./config/joy_f710.yml](./config/joy_f710.yml) or [./config/joy_dualshock3.yml](./config/joy_dualshock3.yml).
131 |
132 | ```yaml
133 | button_shutdown_1 : 8
134 | button_shutdown_2 : 9
135 |
136 | button_motor_off : 8
137 | button_motor_on : 9
138 |
139 | button_cmd_enable : 4
140 | ```
141 |
142 | #### Videos
143 |
144 | [](https://youtu.be/GswxdB8Ia0Y)
145 |
146 | [back to example list](#how-to-use-examples)
147 |
148 | ---
149 |
150 | ### object_tracking
151 |
152 |
153 |
154 | This is an example to use RGB camera images and OpenCV library for object tracking.
155 |
156 | #### Requirements
157 |
158 | - Web camera
159 | - [Logicool HD WEBCAM C310N](https://www.logicool.co.jp/ja-jp/product/hd-webcam-c310n)
160 | - Camera mount
161 | - [Raspberry Pi Mouse Option kit No.4 \[Webcam mount\]](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1299_1395&products_id=3584&language=en)
162 | - Orange ball(Optional)
163 | - [Soft Ball (Orange)](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1299_1307&products_id=3701&language=en)
164 | - Software
165 | - python
166 | - opencv
167 | - numpy
168 | - v4l-utils
169 |
170 | #### Installation
171 |
172 | Install a camera mount and a web camera to Raspberry Pi Mouse, then connect the camera to the Raspberry Pi.
173 |
174 | Next, install the v4l-utils package with the following command:
175 |
176 | ```sh
177 | sudo apt install v4l-utils
178 | ```
179 | #### How to use
180 |
181 | Turn off automatic adjustment parameters of a camera (auto focus, auto while balance, etc.) with the following command:
182 |
183 | ```sh
184 | rosrun raspimouse_ros_examples camera.bash
185 | ```
186 |
187 | Then, launch nodes with the following command:
188 |
189 | ```sh
190 | roslaunch raspimouse_ros_examples object_tracking.launch
191 | ```
192 | This sample publishes `binary` and `object` topics for the object detection image.
193 | These images can be viewed with [RViz](http://wiki.ros.org/ja/rviz)
194 | or [rqt_image_view](http://wiki.ros.org/rqt_image_view).
195 |
196 |
197 |
198 | #### Configure
199 |
200 | Edit [`./scripts/object_tracking.py`](./scripts/object_tracking.py) to change a color of tracking target.
201 |
202 | ```python
203 | def detect_ball(self):
204 | # ~~~ 省略 ~~~
205 | min_hsv, max_hsv = self.set_color_orange()
206 | # min_hsv, max_hsv = self.set_color_green()
207 | # min_hsv, max_hsv = self.set_color_blue()
208 | ```
209 |
210 | If object tracking is unstable, please edit the following lines.
211 |
212 | ```python
213 | def set_color_orange(self):
214 | # [H(0~180), S(0~255), V(0~255)]
215 | # min_hsv_orange = np.array([15, 200, 80])
216 | min_hsv_orange = np.array([15, 150, 40])
217 | max_hsv_orange = np.array([20, 255, 255])
218 | return min_hsv_orange, max_hsv_orange
219 | ```
220 |
221 | #### Videos
222 |
223 | [](https://youtu.be/U6_BuvrjyFc)
224 |
225 | [back to example list](#how-to-use-examples)
226 |
227 | ---
228 |
229 | ### line_follower
230 |
231 |
232 |
233 | This is an example for line following.
234 |
235 | #### Requirements
236 |
237 | - Line following sensor
238 | - [Raspberry Pi Mouse Option kit No.3 \[Line follower\]](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1299_1395&products_id=3591&language=en)
239 | - Field and lines for following (Optional)
240 |
241 | #### Installation
242 |
243 | Install a line following sensor unit to Raspberry Pi Mouse.
244 |
245 | #### How to use
246 |
247 | Launch nodes with the following command:
248 |
249 | ```sh
250 | roslaunch raspimouse_ros_examples line_follower.launch
251 |
252 | # Control from remote computer
253 | roslaunch raspimouse_ros_examples line_follower.launch mouse:=false
254 | ```
255 |
256 | Next, place Raspberry Pi Mouse on a field and press SW2 to sample sensor values on the field.
257 |
258 |
259 |
260 | Then, place Raspberry Pi Mouse to detect a line and press SW1 to sample sensor values on the line.
261 |
262 |
263 |
264 | Last, place Raspberry Pi Mouse on the line and press SW0 to start line following.
265 |
266 |
267 |
268 | Press SW0 again to stop the following.
269 |
270 | #### Configure
271 |
272 | Edit [`./scripts/line_follower.py`](./scripts/line_follower.py) to change a velocity command.
273 |
274 | ```python
275 | def _publish_cmdvel_for_line_following(self):
276 | VEL_LINER_X = 0.08 # m/s
277 | VEL_ANGULAR_Z = 0.8 # rad/s
278 | LOW_VEL_ANGULAR_Z = 0.5 # rad/s
279 |
280 | cmd_vel = Twist()
281 | ```
282 |
283 | #### Videos
284 |
285 | [](https://youtu.be/oPm0sW2V_tY)
286 |
287 | [back to example list](#how-to-use-examples)
288 |
289 | ---
290 |
291 | ### SLAM
292 |
293 |
294 |
295 | This is an example to use LiDAR for SLAM (Simultaneous Localization And Mapping).
296 |
297 | #### Requirements
298 |
299 | - LiDAR
300 | - [URG-04LX-UG01](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1348_1296&products_id=2816&language=en)
301 |
302 | - [LDS-01](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1348_5&products_id=3676&language=en)
303 | - [LiDAR Mount](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1299_1395&products_id=3867&language=en)
304 | - Joystick Controller (Optional)
305 |
306 | This sample does not support RPLIDAR because its package [rplidar_ros](https://github.com/Slamtec/rplidar_ros) have not released a ROS Noetic version yet.
307 |
308 | #### Installation
309 |
310 | Install a LiDAR to the Raspberry Pi Mouse.
311 |
312 | - URG-04LX-UG01
313 | -
314 |
316 | - LDS-01
317 | -
318 |
319 | #### How to use
320 |
321 | Launch nodes on Raspberry Pi Mouse with the following command:
322 |
323 | ```sh
324 | # URG
325 | roslaunch raspimouse_ros_examples mouse_with_lidar.launch urg:=true port:=/dev/ttyACM0
326 |
327 | # LDS
328 | roslaunch raspimouse_ros_examples mouse_with_lidar.launch lds:=true port:=/dev/ttyUSB0
329 | ```
330 |
331 | Next, launch `teleop.launch` to control Raspberry Pi Mouse with the following command:
332 |
333 | ```sh
334 | # joystick control
335 | roslaunch raspimouse_ros_examples teleop.launch mouse:=false joy:=true joyconfig:=dualshock3
336 | ```
337 |
338 | Then, launch SLAM packages (on a remote computer recommend) with the following command:
339 |
340 | ```sh
341 | # URG
342 | roslaunch raspimouse_ros_examples slam_gmapping.launch urg:=true
343 |
344 | # LDS
345 | roslaunch raspimouse_ros_examples slam_gmapping.launch lds:=true
346 | ```
347 |
348 | After moving Raspberry Pi Mouse and makeing a map, run a node to save the map with the following command:
349 |
350 | ```sh
351 | mkdir ~/maps
352 | rosrun map_server map_saver -f ~/maps/mymap
353 | ```
354 |
355 | #### Configure
356 |
357 | Edit [./launch/slam_gmapping.launch](./launch/slam_gmapping.launch) to configure parameters of [gmapping](http://wiki.ros.org/gmapping) package.
358 |
359 | ```xml
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 | ```
373 |
374 | #### Videos
375 |
376 | [](https://youtu.be/gWozU47UqVE)
377 |
378 | [](https://youtu.be/hV68UqAntfo)
379 |
380 | [back to example list](#how-to-use-examples)
381 |
382 | ---
383 |
384 | ### direction_control
385 |
386 |
387 |
388 | This is an example to use an IMU sensor for direction control.
389 |
390 | #### Requirements
391 |
392 | - [USB output 9 degrees IMU sensor module](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1348_1&products_id=3416&language=en)
393 | - [LiDAR Mount](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1299_1395&products_id=3867)
394 | - RT-USB-9axisIMU ROS Package.
395 | - https://github.com/rt-net/rt_usb_9axisimu_driver
396 |
397 | #### Installation
398 |
399 | Install the IMU sensor module to the LiDAR mount.
400 |
401 |
402 |
403 | Install the LiDAR mount to the Raspberry Pi Mouse.
404 |
405 |
406 |
407 | #### How to use
408 |
409 | Launch nodes on Raspberry Pi Mouse with the following command:
410 |
411 | ```sh
412 | roslaunch raspimouse_ros_examples direction_control.launch
413 | ```
414 |
415 | Then, press SW0 ~ SW2 to change the control mode as following,
416 |
417 | - SW0: Calibrate the gyroscope bias and reset a heading angle of Raspberry Pi Mouse to 0 rad.
418 | - SW1: Start a direction control to keep the heading angle to 0 rad.
419 | - Press SW0 ~ SW2 or tilt the body to sideways to finish the control.
420 | - SW2: Start a direction control to change the heading angle to `-π ~ π rad`.
421 | - Press SW0 ~ SW2 or tilt the body to sideways to finish the control.
422 |
423 | #### Configure
424 |
425 | Edit [`./scripts/direction_control.py`](./scripts/direction_control.py)
426 | to configure gains of a PID controller for the direction control.
427 |
428 | ```python
429 | class DirectionController(object):
430 | # ---
431 | def __init__(self):
432 | # ---
433 | # for angle control
434 | self._omega_pid_controller = PIDController(10, 0, 20)
435 | ```
436 |
437 | #### Videos
438 |
439 | [](https://youtu.be/LDpC2wqIoU4)
440 |
441 | [back to example list](#how-to-use-examples)
442 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [English](README.en.md) | [日本語](README.md)
2 |
3 | # raspimouse_ros_examples
4 |
5 | [](https://github.com/rt-net/raspimouse_ros_examples/actions?query=workflow%3Aindustrial_ci+branch%3Amaster)
6 |
7 | Raspberry Pi MouseのROSサンプルコード集です。
8 |
9 | ナビゲーションのサンプルは[こちら](https://github.com/rt-net/raspimouse_slam_navigation_ros)。
10 | ROS 2のサンプルコード集は[こちら](https://github.com/rt-net/raspimouse_ros2_examples)。
11 |
12 |
13 |
14 | ## Requirements
15 |
16 | - Raspberry Pi Mouse
17 | - https://rt-net.jp/products/raspberrypimousev3/
18 | - Linux OS
19 | - Ubuntu server 16.04
20 | - Ubuntu server 18.04
21 | - Ubuntu server 20.04
22 | - https://ubuntu.com/download/raspberry-pi
23 | - Device Driver
24 | - [rt-net/RaspberryPiMouse](https://github.com/rt-net/RaspberryPiMouse)
25 | - ROS
26 | - [Kinetic Kame](http://wiki.ros.org/kinetic/Installation/Ubuntu)
27 | - [Melodic Morenia](http://wiki.ros.org/melodic/Installation/Ubuntu)
28 | - [Noetic Ninjemys](http://wiki.ros.org/noetic/Installation/Ubuntu)
29 | - Raspberry Pi Mouse ROS package
30 | - https://github.com/ryuichiueda/raspimouse_ros_2
31 | - Remote Computer (Optional)
32 | - ROS
33 | - [Kinetic Kame](http://wiki.ros.org/kinetic/Installation/Ubuntu)
34 | - [Melodic Morenia](http://wiki.ros.org/melodic/Installation/Ubuntu)
35 | - [Noetic Ninjemys](http://wiki.ros.org/noetic/Installation/Ubuntu)
36 | - Raspberry Pi Mouse ROS package
37 | - https://github.com/ryuichiueda/raspimouse_ros_2
38 |
39 | ## Installation
40 |
41 | ```sh
42 | cd ~/catkin_ws/src
43 | # Clone ROS packages
44 | git clone https://github.com/ryuichiueda/raspimouse_ros_2
45 | git clone -b $ROS_DISTRO-devel https://github.com/rt-net/raspimouse_ros_examples
46 | # For direction control example
47 | git clone https://github.com/rt-net/rt_usb_9axisimu_driver
48 |
49 | # Install dependencies
50 | rosdep install -r -y --from-paths . --ignore-src
51 |
52 | # make & install
53 | cd ~/catkin_ws && catkin_make
54 | source devel/setup.bash
55 | ```
56 |
57 | ## License
58 |
59 | このリポジトリはApache 2.0ライセンスの元、公開されています。
60 | ライセンスについては[LICENSE](./LICENSE)を参照ください。
61 |
62 | ## How To Use Examples
63 |
64 | - [keyboard_control](#keyboard_control)
65 | - [joystick_control](#joystick_control)
66 | - [object_tracking](#object_tracking)
67 | - [line_follower](#line_follower)
68 | - [SLAM](#SLAM)
69 | - [direction_control](#direction_control)
70 |
71 | ---
72 |
73 | ### keyboard_control
74 |
75 | [teleop_twist_keyboard](http://wiki.ros.org/teleop_twist_keyboard)を使ってRaspberryPiMouseを動かします。
76 |
77 | #### Requirements
78 |
79 | - Keyboard
80 |
81 | #### How to use
82 |
83 | 次のコマンドでノードを起動します。
84 |
85 | ```sh
86 | roslaunch raspimouse_ros_examples teleop.launch key:=true
87 |
88 | # Control from remote computer
89 | roslaunch raspimouse_ros_examples teleop.launch key:=true mouse:=false
90 | ```
91 |
92 | ノードが起動したら`/motor_on`サービスをコールします。
93 |
94 | ```sh
95 | rosservice call /motor_on
96 | ```
97 | [back to example list](#how-to-use-examples)
98 |
99 | ---
100 |
101 | ### joystick_control
102 |
103 | ジョイスティックコントローラでRaspberryPiMouseを動かすコード例です。
104 |
105 | #### Requirements
106 |
107 | - Joystick Controller
108 | - [Logicool Wireless Gamepad F710](https://gaming.logicool.co.jp/ja-jp/products/gamepads/f710-wireless-gamepad.html#940-0001440)
109 | - [SONY DUALSHOCK 3](https://www.jp.playstation.com/ps3/peripheral/cechzc2j.html)
110 |
111 | #### How to use
112 |
113 | 次のコマンドでノードを起動します。
114 |
115 | ```sh
116 | roslaunch raspimouse_ros_examples teleop.launch joy:=true
117 |
118 | # Use DUALSHOCK 3
119 | roslaunch raspimouse_ros_examples teleop.launch joy:=true joyconfig:="dualshock3"
120 |
121 | # Control from remote computer
122 | roslaunch raspimouse_ros_examples teleop.launch joy:=true mouse:=false
123 | ```
124 |
125 | デフォルトのキー割り当てはこちらです。
126 |
127 | 
128 |
129 | #### Configure
130 |
131 | [./config/joy_f710.yml](./config/joy_f710.yml)、[./config/joy_dualshock3.yml](./config/joy_dualshock3.yml)
132 | のキー番号を編集することで、キー割り当てを変更できます。
133 |
134 | ```yaml
135 | button_shutdown_1 : 8
136 | button_shutdown_2 : 9
137 |
138 | button_motor_off : 8
139 | button_motor_on : 9
140 |
141 | button_cmd_enable : 4
142 | ```
143 |
144 | #### Videos
145 |
146 | [](https://youtu.be/GswxdB8Ia0Y)
147 |
148 | [back to example list](#how-to-use-examples)
149 |
150 | ---
151 |
152 | ### object_tracking
153 |
154 |
155 |
156 | 色情報をもとにオレンジ色のボールの追跡を行うコード例です。
157 | USB接続のWebカメラとOpenCVを使ってボール追跡をします。
158 |
159 | #### Requirements
160 |
161 | - Webカメラ
162 | - [Logicool HD WEBCAM C310N](https://www.logicool.co.jp/ja-jp/product/hd-webcam-c310n)
163 | - カメラマウント
164 | - [Raspberry Pi Mouse オプションキット No.4 \[Webカメラマウント\]](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1299_1395&products_id=3584)
165 | - ボール(Optional)
166 | - [ソフトボール(オレンジ)](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1299_1307&products_id=3701)
167 | - Software
168 | - python
169 | - opencv
170 | - numpy
171 | - v4l-utils
172 |
173 | #### Installation
174 |
175 | Raspberry Pi Mouseにカメラマウントを取り付け,WebカメラをRaspberry Piに接続します.
176 |
177 | 次のコマンドで、カメラ制御用のパッケージ(v4l-utils)をインストールします。
178 |
179 | ```sh
180 | sudo apt install v4l-utils
181 | ```
182 | #### How to use
183 |
184 | 次のスクリプトを実行して、カメラの自動調節機能(自動露光,オートホワイトバランス等)を切ります。
185 |
186 | ```sh
187 | rosrun raspimouse_ros_examples camera.bash
188 | ```
189 |
190 | 次のコマンドでノードを起動します。
191 |
192 | ```sh
193 | roslaunch raspimouse_ros_examples object_tracking.launch
194 | ```
195 |
196 | 物体検出画像は`binary`と`object`というトピックとして発行されます。
197 | これらの画像は[RViz](http://wiki.ros.org/ja/rviz)
198 | や[rqt_image_view](http://wiki.ros.org/rqt_image_view)
199 | で表示できます。
200 |
201 |
202 |
203 | #### Configure
204 |
205 | 追跡対象の色を変更するには
206 | [`./scripts/object_tracking.py`](./scripts/object_tracking.py)を編集します。
207 |
208 | ```python
209 | def detect_ball(self):
210 | # ~~~ 省略 ~~~
211 | min_hsv, max_hsv = self.set_color_orange()
212 | # min_hsv, max_hsv = self.set_color_green()
213 | # min_hsv, max_hsv = self.set_color_blue()
214 | ```
215 |
216 | 反応が悪い時にはカメラの露光や関数内のパラメータを調整して下さい.
217 |
218 | ```python
219 | def set_color_orange(self):
220 | # [H(0~180), S(0~255), V(0~255)]
221 | # min_hsv_orange = np.array([15, 200, 80])
222 | min_hsv_orange = np.array([15, 150, 40])
223 | max_hsv_orange = np.array([20, 255, 255])
224 | return min_hsv_orange, max_hsv_orange
225 | ```
226 |
227 | #### Videos
228 |
229 | [](https://youtu.be/U6_BuvrjyFc)
230 |
231 | [back to example list](#how-to-use-examples)
232 |
233 | ---
234 |
235 | ### line_follower
236 |
237 |
238 |
239 | ライントレースのコード例です。
240 |
241 | #### Requirements
242 |
243 | - ライントレースセンサ
244 | - [Raspberry Pi Mouse オプションキット No.3 \[ライントレース\]](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1299_1395&products_id=3591)
245 | - フィールドとライン (Optional)
246 |
247 | #### Installation
248 |
249 | Raspberry Pi Mouseにライントレースセンサを取り付けます。
250 |
251 |
252 | #### How to use
253 |
254 | 次のコマンドでノードを起動します。
255 |
256 | ```sh
257 | roslaunch raspimouse_ros_examples line_follower.launch
258 |
259 | # Control from remote computer
260 | roslaunch raspimouse_ros_examples line_follower.launch mouse:=false
261 | ```
262 |
263 | Raspberry Pi Mouseをフィールドに置き、SW2を押してフィールド上のセンサ値をサンプリングします。
264 |
265 |
266 |
267 | 次に、センサとラインが重なるようにRaspberry Pi Mouseを置き、SW1を押してライン上のセンサ値をサンプリングします。
268 |
269 |
270 |
271 | 最後に、ライン上にRaspberry Pi Mouseを置き、SW0を押してライントレースを開始します。
272 |
273 |
274 |
275 | もう一度SW0を押すとライントレースを停止します。
276 |
277 | #### Configure
278 |
279 | 走行速度を変更するには[`./scripts/line_follower.py`](./scripts/line_follower.py)を編集します。
280 |
281 | ```python
282 | def _publish_cmdvel_for_line_following(self):
283 | VEL_LINER_X = 0.08 # m/s
284 | VEL_ANGULAR_Z = 0.8 # rad/s
285 | LOW_VEL_ANGULAR_Z = 0.5 # rad/s
286 |
287 | cmd_vel = Twist()
288 | ```
289 |
290 | #### Videos
291 |
292 | [](https://youtu.be/oPm0sW2V_tY)
293 |
294 | [back to example list](#how-to-use-examples)
295 |
296 | ---
297 |
298 | ### SLAM
299 |
300 |
301 |
302 | LiDARを使ってSLAM(自己位置推定と地図作成)を行うサンプルです。
303 |
304 | #### Requirements
305 |
306 | - LiDAR
307 | - [URG-04LX-UG01](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1348_1296&products_id=2816)
308 |
309 | - [LDS-01](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1348_5&products_id=3676)
310 | - [LiDAR Mount](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1299_1395&products_id=3867)
311 | - Joystick Controller (Optional)
312 |
313 | RPLIDARについては、パッケージ[rplidar_ros](https://github.com/Slamtec/rplidar_ros)
314 | がROS Noetic向けにリリースされていないため動作確認していません。
315 |
316 | #### Installation
317 |
318 | Raspberry Pi MouseにLiDARを取り付けます。
319 |
320 | - URG-04LX-UG01
321 | -
322 |
324 | - LDS-01
325 | -
326 |
327 | #### How to use
328 |
329 | Raspberry Pi Mouse上で次のコマンドでノードを起動します。
330 |
331 | ```sh
332 | # URG
333 | roslaunch raspimouse_ros_examples mouse_with_lidar.launch urg:=true port:=/dev/ttyACM0
334 |
335 | # LDS
336 | roslaunch raspimouse_ros_examples mouse_with_lidar.launch lds:=true port:=/dev/ttyUSB0
337 |
338 | ```
339 |
340 | Raspberry Pi Mouseを動かすため`teleop.launch`を起動します
341 |
342 | ```sh
343 | # joystick control
344 | roslaunch raspimouse_ros_examples teleop.launch mouse:=false joy:=true joyconfig:=dualshock3
345 | ```
346 |
347 | 次のコマンドでSLAMパッケージを起動します。(Remote computerでの実行推奨)
348 |
349 | ```sh
350 | # URG
351 | roslaunch raspimouse_ros_examples slam_gmapping.launch urg:=true
352 |
353 | # LDS
354 | roslaunch raspimouse_ros_examples slam_gmapping.launch lds:=true
355 | ```
356 |
357 | Raspberry Pi Mouseを動かして地図を作成します。
358 |
359 | 次のコマンドで作成した地図を保存します。
360 |
361 | ```sh
362 | mkdir ~/maps
363 | rosrun map_server map_saver -f ~/maps/mymap
364 | ```
365 |
366 | #### Configure
367 |
368 | [./launch/slam_gmapping.launch](./launch/slam_gmapping.launch)で[gmapping](http://wiki.ros.org/gmapping)パッケージのパラメータを調整します。
369 |
370 | ```xml
371 |
372 |
373 |
374 |
375 |
376 |
377 |
378 |
379 |
380 |
381 |
382 |
383 | ```
384 |
385 | #### Videos
386 |
387 | [](https://youtu.be/gWozU47UqVE)
388 |
389 |
390 |
391 | [back to example list](#how-to-use-examples)
392 |
393 | ---
394 |
395 | ### direction_control
396 |
397 |
398 |
399 | IMUセンサを使用した角度制御のコード例です。
400 |
401 | #### Requirements
402 |
403 | - [USB出力9軸IMUセンサモジュール](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1348_1&products_id=3416&language=ja)
404 | - [LiDAR Mount](https://www.rt-shop.jp/index.php?main_page=product_info&cPath=1299_1395&products_id=3867)
405 | - RT-USB-9axisIMU ROS Package.
406 | - https://github.com/rt-net/rt_usb_9axisimu_driver
407 |
408 | #### Installation
409 |
410 | LiDAR MountにIMUセンサモジュールを取り付けます。
411 |
412 |
413 |
414 | Raspberry Pi Mouse にLiDAR Mountを取り付けます。
415 |
416 |
417 |
418 | #### How to use
419 |
420 | 次のコマンドでノードを起動します。
421 |
422 | ```sh
423 | roslaunch raspimouse_ros_examples direction_control.launch
424 | ```
425 |
426 | SW0 ~ SW2を押して動作モードを切り替えます。
427 |
428 | - SW0: ジャイロセンサのバイアスをキャリブレーションし、ラズパイマウスの方位角を`0 rad`にリセットします
429 | - SW1: 方位角を`0 rad`に維持する角度制御を開始します
430 | - SW0 ~ SW2を押すか、ラズパイマウス本体を横に傾けると終了します
431 | - SW2: 方位角を`-π ~ π rad`に変化させる角度制御を開始します
432 | - SW0 ~ SW2を押すか、ラズパイマウス本体を横に傾けると終了します
433 |
434 | #### Configure
435 |
436 | 角度制御に使うPID制御器のゲインを変更するには[`./scripts/direction_control.py`](./scripts/direction_control.py)を編集します。
437 |
438 | ```python
439 | class DirectionController(object):
440 | # ---
441 | def __init__(self):
442 | # ---
443 | # for angle control
444 | self._omega_pid_controller = PIDController(10, 0, 20)
445 | ```
446 |
447 | #### Videos
448 |
449 | [](https://youtu.be/LDpC2wqIoU4)
450 |
451 | [back to example list](#how-to-use-examples)
452 |
--------------------------------------------------------------------------------
/config/joy_dualshock3.yml:
--------------------------------------------------------------------------------
1 | button_shutdown_1 : 8
2 | button_shutdown_2 : 9
3 |
4 | button_motor_off : 8
5 | button_motor_on : 9
6 |
7 | button_cmd_enable : 4
8 | axis_cmd_linear_x : 1
9 | axis_cmd_angular_z : 3
10 |
11 | analog_d_pad : false
12 | d_pad_up : 13
13 | d_pad_down : 14
14 | d_pad_left : 15
15 | d_pad_right : 16
16 | # for analog_d_pad
17 | d_pad_up_is_positive : true
18 | d_pad_right_is_positive : false
19 |
20 | button_buzzer_enable : 5
21 | dpad_buzzer0 : "up"
22 | dpad_buzzer1 : "right"
23 | dpad_buzzer2 : "down"
24 | dpad_buzzer3 : "left"
25 | button_buzzer4 : 2
26 | button_buzzer5 : 1
27 | button_buzzer6 : 0
28 | button_buzzer7 : 3
29 |
30 | button_sensor_sound_en : 7
31 | button_config_enable : 6
32 |
--------------------------------------------------------------------------------
/config/joy_dualshock4.yml:
--------------------------------------------------------------------------------
1 | button_shutdown_1 : 8
2 | button_shutdown_2 : 9
3 |
4 | button_motor_off : 8
5 | button_motor_on : 9
6 |
7 | button_cmd_enable : 4
8 | axis_cmd_linear_x : 1
9 | axis_cmd_angular_z : 3
10 |
11 | analog_d_pad : true
12 | d_pad_up : 7
13 | d_pad_down : 7
14 | d_pad_left : 6
15 | d_pad_right : 6
16 | # for analog_d_pad
17 | d_pad_up_is_positive : true
18 | d_pad_right_is_positive : false
19 |
20 | button_buzzer_enable : 5
21 | dpad_buzzer0 : "up"
22 | dpad_buzzer1 : "right"
23 | dpad_buzzer2 : "down"
24 | dpad_buzzer3 : "left"
25 | button_buzzer4 : 2
26 | button_buzzer5 : 1
27 | button_buzzer6 : 0
28 | button_buzzer7 : 3
29 |
30 | button_sensor_sound_en : 7
31 | button_config_enable : 6
32 |
--------------------------------------------------------------------------------
/config/joy_f710.yml:
--------------------------------------------------------------------------------
1 | button_shutdown_1 : 8
2 | button_shutdown_2 : 9
3 |
4 | button_motor_off : 8
5 | button_motor_on : 9
6 |
7 | button_cmd_enable : 4
8 | axis_cmd_linear_x : 1
9 | axis_cmd_angular_z : 2
10 |
11 | analog_d_pad : true
12 | d_pad_up : 5
13 | d_pad_down : 5
14 | d_pad_left : 4
15 | d_pad_right : 4
16 | # for analog_d_pad
17 | d_pad_up_is_positive : true
18 | d_pad_right_is_positive : false
19 |
20 | button_buzzer_enable : 5
21 | dpad_buzzer0 : "up"
22 | dpad_buzzer1 : "right"
23 | dpad_buzzer2 : "down"
24 | dpad_buzzer3 : "left"
25 | button_buzzer4 : 3
26 | button_buzzer5 : 2
27 | button_buzzer6 : 1
28 | button_buzzer7 : 0
29 |
30 | button_sensor_sound_en : 7
31 | button_config_enable : 6
32 |
--------------------------------------------------------------------------------
/launch/direction_control.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/launch/line_follower.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/launch/mouse_with_lidar.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/launch/object_tracking.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/launch/slam_gmapping.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/launch/teleop.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | raspimouse_ros_examples
4 | 0.0.1
5 | The raspimouse_ros_examples package
6 | ShotaAk
7 | tentenNT
8 | Apache License 2.0
9 |
10 | catkin
11 | roslint
12 | rospy
13 | std_srvs
14 | sensor_msgs
15 | std_msgs
16 | geometry_msgs
17 | raspimouse_ros_2
18 | cv_bridge
19 | cv_camera
20 | joy
21 | teleop_twist_keyboard
22 | gmapping
23 | urg_node
24 | tf
25 | map_server
26 | hls_lfcd_lds_driver
27 | rt_usb_9axisimu_driver
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/rviz/slam.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 78
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /Global Options1
8 | - /Odometry1/Shape1
9 | Splitter Ratio: 0.5
10 | Tree Height: 759
11 | - Class: rviz/Selection
12 | Name: Selection
13 | - Class: rviz/Tool Properties
14 | Expanded:
15 | - /2D Pose Estimate1
16 | - /2D Nav Goal1
17 | - /Publish Point1
18 | Name: Tool Properties
19 | Splitter Ratio: 0.5886790156364441
20 | - Class: rviz/Views
21 | Expanded:
22 | - /Current View1
23 | Name: Views
24 | Splitter Ratio: 0.5
25 | - Class: rviz/Time
26 | Experimental: false
27 | Name: Time
28 | SyncMode: 0
29 | SyncSource: LaserScan
30 | Preferences:
31 | PromptSaveOnExit: true
32 | Toolbars:
33 | toolButtonStyle: 2
34 | Visualization Manager:
35 | Class: ""
36 | Displays:
37 | - Alpha: 0.5
38 | Cell Size: 1
39 | Class: rviz/Grid
40 | Color: 160; 160; 164
41 | Enabled: true
42 | Line Style:
43 | Line Width: 0.029999999329447746
44 | Value: Lines
45 | Name: Grid
46 | Normal Cell Count: 0
47 | Offset:
48 | X: 0
49 | Y: 0
50 | Z: 0
51 | Plane: XY
52 | Plane Cell Count: 10
53 | Reference Frame:
54 | Value: true
55 | - Alpha: 1
56 | Autocompute Intensity Bounds: true
57 | Autocompute Value Bounds:
58 | Max Value: 10
59 | Min Value: -10
60 | Value: true
61 | Axis: Z
62 | Channel Name: intensity
63 | Class: rviz/LaserScan
64 | Color: 255; 255; 255
65 | Color Transformer: Intensity
66 | Decay Time: 0
67 | Enabled: true
68 | Invert Rainbow: false
69 | Max Color: 255; 255; 255
70 | Max Intensity: 4096
71 | Min Color: 0; 0; 0
72 | Min Intensity: 0
73 | Name: LaserScan
74 | Position Transformer: XYZ
75 | Queue Size: 10
76 | Selectable: true
77 | Size (Pixels): 3
78 | Size (m): 0.009999999776482582
79 | Style: Flat Squares
80 | Topic: /scan
81 | Unreliable: false
82 | Use Fixed Frame: true
83 | Use rainbow: true
84 | Value: true
85 | - Alpha: 0.699999988079071
86 | Class: rviz/Map
87 | Color Scheme: map
88 | Draw Behind: false
89 | Enabled: true
90 | Name: Map
91 | Topic: /map
92 | Unreliable: false
93 | Use Timestamp: false
94 | Value: true
95 | - Angle Tolerance: 0.10000000149011612
96 | Class: rviz/Odometry
97 | Covariance:
98 | Orientation:
99 | Alpha: 0.5
100 | Color: 255; 255; 127
101 | Color Style: Unique
102 | Frame: Local
103 | Offset: 1
104 | Scale: 1
105 | Value: true
106 | Position:
107 | Alpha: 0.30000001192092896
108 | Color: 204; 51; 204
109 | Scale: 1
110 | Value: true
111 | Value: false
112 | Enabled: true
113 | Keep: 10
114 | Name: Odometry
115 | Position Tolerance: 0.10000000149011612
116 | Shape:
117 | Alpha: 0.5
118 | Axes Length: 1
119 | Axes Radius: 0.10000000149011612
120 | Color: 255; 25; 0
121 | Head Length: 0.20000000298023224
122 | Head Radius: 0.10000000149011612
123 | Shaft Length: 0.009999999776482582
124 | Shaft Radius: 0.05000000074505806
125 | Value: Arrow
126 | Topic: /odom
127 | Unreliable: false
128 | Value: true
129 | - Class: rviz/TF
130 | Enabled: true
131 | Frame Timeout: 15
132 | Frames:
133 | All Enabled: true
134 | base_link:
135 | Value: true
136 | laser:
137 | Value: true
138 | map:
139 | Value: true
140 | odom:
141 | Value: true
142 | Marker Scale: 1
143 | Name: TF
144 | Show Arrows: true
145 | Show Axes: true
146 | Show Names: true
147 | Tree:
148 | map:
149 | odom:
150 | base_link:
151 | laser:
152 | {}
153 | Update Interval: 0
154 | Value: true
155 | Enabled: true
156 | Global Options:
157 | Background Color: 48; 48; 48
158 | Default Light: true
159 | Fixed Frame: map
160 | Frame Rate: 30
161 | Name: root
162 | Tools:
163 | - Class: rviz/Interact
164 | Hide Inactive Objects: true
165 | - Class: rviz/MoveCamera
166 | - Class: rviz/Select
167 | - Class: rviz/FocusCamera
168 | - Class: rviz/Measure
169 | - Class: rviz/SetInitialPose
170 | Theta std deviation: 0.2617993950843811
171 | Topic: /initialpose
172 | X std deviation: 0.5
173 | Y std deviation: 0.5
174 | - Class: rviz/SetGoal
175 | Topic: /move_base_simple/goal
176 | - Class: rviz/PublishPoint
177 | Single click: true
178 | Topic: /clicked_point
179 | Value: true
180 | Views:
181 | Current:
182 | Class: rviz/Orbit
183 | Distance: 9.60938835144043
184 | Enable Stereo Rendering:
185 | Stereo Eye Separation: 0.05999999865889549
186 | Stereo Focal Distance: 1
187 | Swap Stereo Eyes: false
188 | Value: false
189 | Focal Point:
190 | X: -0.0008559883572161198
191 | Y: 0.002144112717360258
192 | Z: 0.3886895477771759
193 | Focal Shape Fixed Size: true
194 | Focal Shape Size: 0.05000000074505806
195 | Invert Z Axis: false
196 | Name: Current View
197 | Near Clip Distance: 0.009999999776482582
198 | Pitch: 1.0097967386245728
199 | Target Frame:
200 | Value: Orbit (rviz)
201 | Yaw: 3.5385818481445312
202 | Saved: ~
203 | Window Geometry:
204 | Displays:
205 | collapsed: false
206 | Height: 1025
207 | Hide Left Dock: false
208 | Hide Right Dock: false
209 | QMainWindow State: 000000ff00000000fd00000004000000000000015600000363fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003d00000363000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f00000363fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000003d00000363000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073d0000003efc0100000002fb0000000800540069006d006501000000000000073d000002eb00fffffffb0000000800540069006d00650100000000000004500000000000000000000004cc0000036300000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
210 | Selection:
211 | collapsed: false
212 | Time:
213 | collapsed: false
214 | Tool Properties:
215 | collapsed: false
216 | Views:
217 | collapsed: false
218 | Width: 1920
219 | X: 1920
220 | Y: 0
221 |
--------------------------------------------------------------------------------
/scripts/camera.bash:
--------------------------------------------------------------------------------
1 | #Turn off the auto exposure function of the camera
2 | #corresponding to /dev/video0.
3 | #Adjust the focus and exposure if necessary.
4 | #This script requires v4l2-ctl
5 | #sudo apt install v4l-utils
6 |
7 | #!/bin/bash
8 | v4l2-ctl -d /dev/video0 --all
9 | v4l2-ctl -d /dev/video0 --set-ctrl white_balance_temperature_auto=0
10 | v4l2-ctl -d /dev/video0 --set-ctrl white_balance_temperature=4000
11 | #v4l2-ctl -d /dev/video0 --set-ctrl focus_auto=0
12 | #v4l2-ctl -d /dev/video0 --set-ctrl focus_absolute=0
13 | #v4l2-ctl --set-ctrl exposure_auto=1
14 | #v4l2-ctl --set-ctrl exposure_absolute=500
15 |
--------------------------------------------------------------------------------
/scripts/direction_control.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: UTF-8
3 |
4 | # Copyright 2020 RT Corporation
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # http://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 |
18 | import rospy
19 | import math
20 | import numpy
21 | from sensor_msgs.msg import Imu
22 | from std_msgs.msg import UInt16
23 | from geometry_msgs.msg import Twist
24 | from geometry_msgs.msg import Vector3
25 | from std_srvs.srv import Trigger
26 | from raspimouse_ros_2.msg import ButtonValues
27 |
28 |
29 | class PIDController(object):
30 | def __init__(self, p_gain, i_gain, d_gain):
31 |
32 | self._p_gain = p_gain
33 | self._i_gain = i_gain
34 | self._d_gain = d_gain
35 |
36 | self._error_1 = 0.0
37 | self._error_2 = 0.0
38 | self._output = 0.0
39 |
40 | def update(self, current, target):
41 | error = target - current
42 |
43 | delta_output = self._p_gain * (error - self._error_1)
44 | delta_output += self._i_gain * (error)
45 | delta_output += self._d_gain * (error - 2*self._error_1 + self._error_2)
46 |
47 | self._output += delta_output
48 |
49 | self._error_2 = self._error_1
50 | self._error_1 = error
51 |
52 | return self._output
53 |
54 |
55 | class DirectionController(object):
56 | _MODE_NONE = 0
57 | _MODE_CALIBRATION = 1
58 | _MODE_KEEP_ZERO_RADIAN = 2
59 | _MODE_ROTATION = 3
60 |
61 | def __init__(self):
62 | self._mouse_buttons = ButtonValues()
63 | self._imu_data_raw = Imu()
64 |
65 | # for acceleration low pass filter
66 | self._filtered_acc = Vector3()
67 | self._prev_acc = Vector3()
68 |
69 | # for self.update()
70 | self._current_mode = self._MODE_NONE
71 | self._has_motor_enabled = False
72 |
73 | # for angle control
74 | self._omega_pid_controller = PIDController(10, 0, 20)
75 | self._target_angle = 0.0
76 | self._increase_target_angle = True
77 |
78 | # for heading_angle calculation
79 | self._heading_angle = 0.0
80 | self._omega_bias = 0.0
81 | self._prev_imu_timestamp = rospy.Time()
82 |
83 | self._sub_buttons = rospy.Subscriber(
84 | 'buttons', ButtonValues, self._callback_buttons, queue_size=1)
85 | self._sub_imu = rospy.Subscriber(
86 | 'imu/data_raw', Imu, self._callback_imu, queue_size=1)
87 |
88 | self._pub_cmdvel = rospy.Publisher('cmd_vel', Twist, queue_size=1)
89 | self._pub_buzzer = rospy.Publisher('buzzer', UInt16, queue_size=1)
90 |
91 | try:
92 | rospy.wait_for_service("motor_on", timeout=5)
93 | rospy.wait_for_service("motor_off", timeout=5)
94 | except rospy.exceptions.ROSException as e:
95 | rospy.logerr("Service not found")
96 | rospy.signal_shutdown(e.message)
97 | else:
98 | rospy.on_shutdown(self._motor_off)
99 |
100 | def _callback_buttons(self, msg):
101 | self._mouse_buttons = msg
102 |
103 | def _callback_imu(self, msg):
104 | self._imu_data_raw = msg
105 |
106 | self._calculate_heading_angle(
107 | self._imu_data_raw.angular_velocity.z,
108 | self._imu_data_raw.header.stamp
109 | )
110 |
111 | self._filter_acceleration(self._imu_data_raw.linear_acceleration)
112 |
113 | def _motor_on(self):
114 | rospy.ServiceProxy("motor_on", Trigger).call()
115 | rospy.loginfo("motor_on")
116 |
117 | def _motor_off(self):
118 | rospy.ServiceProxy("motor_off", Trigger).call()
119 | rospy.loginfo("motor_off")
120 |
121 | def _calculate_heading_angle(self, omega_ref, timestamp):
122 | ALPHA = 1.0
123 |
124 | if not self._prev_imu_timestamp:
125 | self._prev_imu_timestamp = timestamp
126 |
127 | omega = ALPHA * (omega_ref - self._omega_bias)
128 | diff_timestamp = timestamp - self._prev_imu_timestamp
129 |
130 | self._heading_angle += omega * diff_timestamp.to_sec()
131 | self._prev_imu_timestamp = timestamp
132 |
133 | def _filter_acceleration(self, acc):
134 | ALPHA = 0.1
135 |
136 | # Simple low pass filter
137 | self._filtered_acc.x = ALPHA * acc.x + (1.0 - ALPHA) * self._prev_acc.x
138 | self._filtered_acc.y = ALPHA * acc.y + (1.0 - ALPHA) * self._prev_acc.y
139 | self._filtered_acc.z = ALPHA * acc.z + (1.0 - ALPHA) * self._prev_acc.z
140 | self._prev_acc = self._filtered_acc
141 |
142 | def _gyro_calibration(self):
143 | SAMPLE_NUM = 100
144 | WAIT_TIME = 0.01
145 | rospy.loginfo("Gyro Calibration")
146 |
147 | # Multisampling
148 | samples = []
149 | prev_imu_seq = 0
150 | for i in range(SAMPLE_NUM):
151 | if prev_imu_seq != self._imu_data_raw.header.seq:
152 | samples.append(self._imu_data_raw.angular_velocity.z)
153 | prev_imu_seq = self._imu_data_raw.header.seq
154 | rospy.sleep(WAIT_TIME)
155 |
156 | if samples:
157 | self._omega_bias = numpy.mean(samples)
158 | else:
159 | rospy.logwarn("No imu_data received.")
160 |
161 | # Reset variables for heading angle calculation
162 | self._heading_angle = 0.0
163 | self._prev_imu_timestamp = self._imu_data_raw.header.stamp
164 |
165 | def _angle_control(self, target_angle=0.0):
166 | SIGN = -1.0
167 |
168 | cmdvel = Twist()
169 | cmdvel.angular.z = SIGN * self._omega_pid_controller.update(target_angle, self._heading_angle)
170 |
171 | self._pub_cmdvel.publish(cmdvel)
172 |
173 | def _keep_zero_radian(self):
174 | self._angle_control(0.0)
175 |
176 | def _rotation(self, start_angle=-math.pi*0.5, end_angle=math.pi*0.5):
177 | ADD_ANGLE = math.radians(2)
178 |
179 | if start_angle > end_angle:
180 | rospy.logwarn("Set start_angle < end_angle.")
181 | return
182 |
183 | if self._increase_target_angle:
184 | self._target_angle += ADD_ANGLE
185 | else:
186 | self._target_angle -= ADD_ANGLE
187 |
188 | if self._target_angle >= end_angle:
189 | self._target_angle = end_angle
190 | self._increase_target_angle = False
191 | elif self._target_angle <= start_angle:
192 | self._target_angle = start_angle
193 | self._increase_target_angle = True
194 |
195 | self._angle_control(self._target_angle)
196 |
197 | def _beep_buzzer(self, freq, beep_time=0):
198 | self._pub_buzzer.publish(freq)
199 | rospy.sleep(beep_time)
200 | self._pub_buzzer.publish(0)
201 |
202 | def _beep_success(self):
203 | self._beep_buzzer(1000, 0.1)
204 | rospy.sleep(0.1)
205 | self._beep_buzzer(1000, 0.1)
206 |
207 | def _beep_failure(self):
208 | for i in range(4):
209 | self._beep_buzzer(500, 0.1)
210 | rospy.sleep(0.1)
211 |
212 | def _suggest_mode_from_buttons(self):
213 | suggest = self._MODE_NONE
214 | if self._mouse_buttons.front:
215 | suggest = self._MODE_CALIBRATION
216 |
217 | # wait for release the button
218 | while self._mouse_buttons.front:
219 | pass
220 |
221 | elif self._mouse_buttons.mid:
222 | suggest = self._MODE_KEEP_ZERO_RADIAN
223 |
224 | # wait for release the button
225 | while self._mouse_buttons.mid:
226 | pass
227 |
228 | elif self._mouse_buttons.rear:
229 | suggest = self._MODE_ROTATION
230 |
231 | # wait for release the button
232 | while self._mouse_buttons.rear:
233 | pass
234 |
235 | return suggest
236 |
237 | def _has_stop_signal(self):
238 | output = False
239 |
240 | # Any button has pressed.
241 | if self._mouse_buttons.front or self._mouse_buttons.mid or self._mouse_buttons.rear:
242 | output = True
243 | while self._mouse_buttons.front or self._mouse_buttons.mid or self._mouse_buttons.rear:
244 | pass
245 |
246 | # The mouse body has rotated.
247 | if self._filtered_acc.z > 0.0:
248 | output = True
249 |
250 | return output
251 |
252 | def _select_mode(self):
253 | suggest = self._MODE_NONE
254 | suggest = self._suggest_mode_from_buttons()
255 |
256 | if suggest == self._MODE_CALIBRATION:
257 | rospy.loginfo("Calibration")
258 | elif suggest == self._MODE_KEEP_ZERO_RADIAN:
259 | rospy.loginfo("Keep zero radian")
260 | elif suggest == self._MODE_ROTATION:
261 | rospy.loginfo("Rotation")
262 |
263 | if suggest != self._MODE_NONE:
264 | self._current_mode = suggest
265 | self._beep_success()
266 | rospy.sleep(1.0)
267 |
268 | def update(self):
269 | if self._current_mode == self._MODE_NONE:
270 | self._select_mode()
271 |
272 | elif self._current_mode == self._MODE_CALIBRATION:
273 | rospy.sleep(1)
274 | self._gyro_calibration()
275 | self._current_mode = self._MODE_NONE
276 | self._beep_success()
277 |
278 | else:
279 | if not self._has_motor_enabled:
280 | self._motor_on()
281 | self._has_motor_enabled = True
282 |
283 | if self._current_mode == self._MODE_KEEP_ZERO_RADIAN:
284 | self._keep_zero_radian()
285 |
286 | elif self._current_mode == self._MODE_ROTATION:
287 | self._rotation()
288 |
289 | if self._has_stop_signal():
290 | self._motor_off()
291 | self._has_motor_enabled = False
292 | self._target_angle = 0.0
293 | self._beep_failure()
294 | self._current_mode = self._MODE_NONE
295 |
296 |
297 | def main():
298 | rospy.init_node('direction_control')
299 |
300 | controller = DirectionController()
301 |
302 | r = rospy.Rate(60)
303 | while not rospy.is_shutdown():
304 | controller.update()
305 | r.sleep()
306 |
307 |
308 | if __name__ == '__main__':
309 | main()
310 |
--------------------------------------------------------------------------------
/scripts/joystick_control.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: UTF-8
3 |
4 | # Copyright 2020 RT Corporation
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # http://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 |
18 | import rospy
19 | import math
20 | from sensor_msgs.msg import Joy
21 | from std_msgs.msg import UInt16
22 | from geometry_msgs.msg import Twist
23 | from std_srvs.srv import Trigger
24 | from raspimouse_ros_2.msg import LightSensorValues
25 | from raspimouse_ros_2.msg import ButtonValues
26 | from raspimouse_ros_2.msg import LedValues
27 |
28 |
29 | class JoyWrapper(object):
30 | def __init__(self):
31 |
32 | self._BUTTON_SHUTDOWN_1 = rospy.get_param('~button_shutdown_1')
33 | self._BUTTON_SHUTDOWN_2 = rospy.get_param('~button_shutdown_2')
34 |
35 | self._BUTTON_MOTOR_ON = rospy.get_param('~button_motor_on')
36 | self._BUTTON_MOTOR_OFF = rospy.get_param('~button_motor_off')
37 |
38 | self._BUTTON_CMD_ENABLE = rospy.get_param('~button_cmd_enable')
39 | self._AXIS_CMD_LINEAR_X = rospy.get_param('~axis_cmd_linear_x')
40 | self._AXIS_CMD_ANGULAR_Z = rospy.get_param('~axis_cmd_angular_z')
41 |
42 | self._ANALOG_D_PAD = rospy.get_param('~analog_d_pad')
43 | self._D_PAD_UP = rospy.get_param('~d_pad_up')
44 | self._D_PAD_DOWN = rospy.get_param('~d_pad_down')
45 | self._D_PAD_LEFT = rospy.get_param('~d_pad_left')
46 | self._D_PAD_RIGHT = rospy.get_param('~d_pad_right')
47 | self._D_UP_IS_POSITIVE = rospy.get_param('~d_pad_up_is_positive')
48 | self._D_RIGHT_IS_POSITIVE = rospy.get_param('~d_pad_right_is_positive')
49 |
50 | self._BUTTON_BUZZER_ENABLE = rospy.get_param('~button_buzzer_enable')
51 | self._DPAD_BUZZER0 = rospy.get_param('~dpad_buzzer0')
52 | self._DPAD_BUZZER1 = rospy.get_param('~dpad_buzzer1')
53 | self._DPAD_BUZZER2 = rospy.get_param('~dpad_buzzer2')
54 | self._DPAD_BUZZER3 = rospy.get_param('~dpad_buzzer3')
55 | self._BUTTON_BUZZER4 = rospy.get_param('~button_buzzer4')
56 | self._BUTTON_BUZZER5 = rospy.get_param('~button_buzzer5')
57 | self._BUTTON_BUZZER6 = rospy.get_param('~button_buzzer6')
58 | self._BUTTON_BUZZER7 = rospy.get_param('~button_buzzer7')
59 |
60 | self._BUTTON_SENSOR_SOUND_EN = rospy.get_param('~button_sensor_sound_en')
61 | self._BUTTON_CONFIG_ENABLE = rospy.get_param('~button_config_enable')
62 |
63 | # for _joy_velocity_config()
64 | self._MAX_VEL_LINEAR_X = 2.0 # m/s
65 | self._MAX_VEL_ANGULAR_Z = 2.0 * math.pi # rad/s
66 | self._DEFAULT_VEL_LINEAR_X = 0.5 # m/s
67 | self._DEFAULT_VEL_ANGULAR_Z = 1.0 * math.pi # rad/s
68 |
69 | self._joy_msg = None
70 | self._lightsensor = LightSensorValues()
71 | self._mouse_buttons = ButtonValues()
72 | self._cmdvel_has_value = False
73 | self._buzzer_has_value = False
74 | self._sensor_sound_has_value = False
75 | self._vel_linear_x = self._DEFAULT_VEL_LINEAR_X
76 | self._vel_angular_z = self._DEFAULT_VEL_ANGULAR_Z
77 |
78 | self._pub_cmdvel = rospy.Publisher('cmd_vel', Twist, queue_size=1)
79 | self._pub_buzzer = rospy.Publisher('buzzer', UInt16, queue_size=1)
80 | self._pub_leds = rospy.Publisher('leds', LedValues, queue_size=1)
81 | self._sub_joy = rospy.Subscriber('joy', Joy, self._callback_joy, queue_size=1)
82 | self._sub_lightsensor = rospy.Subscriber(
83 | 'lightsensors', LightSensorValues, self._callback_lightsensor, queue_size=1)
84 | self._sub_buttons = rospy.Subscriber(
85 | 'buttons', ButtonValues, self._callback_buttons, queue_size=1)
86 |
87 | try:
88 | rospy.wait_for_service("motor_on", timeout=5)
89 | rospy.wait_for_service("motor_off", timeout=5)
90 | except rospy.exceptions.ROSException as e:
91 | rospy.logerr("Service not found")
92 | rospy.signal_shutdown(e.message)
93 | else:
94 | rospy.on_shutdown(self._motor_off)
95 | self._motor_on()
96 |
97 | def _callback_joy(self, msg):
98 | self._joy_msg = msg
99 |
100 | def _callback_lightsensor(self, msg):
101 | self._lightsensor = msg
102 |
103 | def _callback_buttons(self, msg):
104 | self._mouse_buttons = msg
105 |
106 | def _motor_on(self):
107 | rospy.ServiceProxy("motor_on", Trigger).call()
108 | rospy.loginfo("motor_on")
109 |
110 | def _motor_off(self):
111 | rospy.ServiceProxy("motor_off", Trigger).call()
112 | rospy.loginfo("motor_off")
113 |
114 | def _joy_dpad(self, joy_msg, target_pad, positive_on):
115 | # d pad inputs of f710 controller are analog
116 | # d pad inputs of dualshock3 controller are digital
117 | if self._ANALOG_D_PAD:
118 | if positive_on:
119 | return joy_msg.axes[target_pad] > 0
120 | else:
121 | return joy_msg.axes[target_pad] < 0
122 | else:
123 | return joy_msg.buttons[target_pad]
124 |
125 | def _dpad_up(self, joy_msg):
126 | positive_on = self._D_UP_IS_POSITIVE
127 | return self._joy_dpad(joy_msg, self._D_PAD_UP, positive_on)
128 |
129 | def _dpad_down(self, joy_msg):
130 | positive_on = not self._D_UP_IS_POSITIVE
131 | return self._joy_dpad(joy_msg, self._D_PAD_DOWN, positive_on)
132 |
133 | def _dpad_left(self, joy_msg):
134 | positive_on = not self._D_RIGHT_IS_POSITIVE
135 | return self._joy_dpad(joy_msg, self._D_PAD_LEFT, positive_on)
136 |
137 | def _dpad_right(self, joy_msg):
138 | positive_on = self._D_RIGHT_IS_POSITIVE
139 | return self._joy_dpad(joy_msg, self._D_PAD_RIGHT, positive_on)
140 |
141 | def _dpad(self, joy_msg, target):
142 | if target == "up":
143 | return self._dpad_up(joy_msg)
144 | elif target == "down":
145 | return self._dpad_down(joy_msg)
146 | elif target == "left":
147 | return self._dpad_left(joy_msg)
148 | elif target == "right":
149 | return self._dpad_right(joy_msg)
150 | else:
151 | return False
152 |
153 | def _joy_shutdown(self, joy_msg):
154 | if joy_msg.buttons[self._BUTTON_SHUTDOWN_1] and\
155 | joy_msg.buttons[self._BUTTON_SHUTDOWN_2]:
156 |
157 | self._pub_cmdvel.publish(Twist())
158 | self._pub_buzzer.publish(UInt16())
159 | self._pub_leds.publish(LedValues())
160 | self._motor_off()
161 | rospy.signal_shutdown('finish')
162 |
163 | def _joy_motor_onoff(self, joy_msg):
164 | if joy_msg.buttons[self._BUTTON_MOTOR_ON]:
165 | self._motor_on()
166 |
167 | if joy_msg.buttons[self._BUTTON_MOTOR_OFF]:
168 | self._motor_off()
169 |
170 | def _joy_cmdvel(self, joy_msg):
171 | cmdvel = Twist()
172 | if joy_msg.buttons[self._BUTTON_CMD_ENABLE]:
173 | cmdvel.linear.x = self._vel_linear_x * joy_msg.axes[self._AXIS_CMD_LINEAR_X]
174 | cmdvel.angular.z = self._vel_angular_z * joy_msg.axes[self._AXIS_CMD_ANGULAR_Z]
175 | rospy.loginfo(cmdvel)
176 | self._pub_cmdvel.publish(cmdvel)
177 |
178 | self._cmdvel_has_value = True
179 | else:
180 | if self._cmdvel_has_value:
181 | self._pub_cmdvel.publish(cmdvel)
182 | self._cmdvel_has_value = False
183 |
184 | def _joy_buzzer_freq(self, joy_msg):
185 | freq = UInt16()
186 | buttons = [
187 | self._dpad(joy_msg, self._DPAD_BUZZER0),
188 | self._dpad(joy_msg, self._DPAD_BUZZER1),
189 | self._dpad(joy_msg, self._DPAD_BUZZER2),
190 | self._dpad(joy_msg, self._DPAD_BUZZER3),
191 | joy_msg.buttons[self._BUTTON_BUZZER4],
192 | joy_msg.buttons[self._BUTTON_BUZZER5],
193 | joy_msg.buttons[self._BUTTON_BUZZER6],
194 | joy_msg.buttons[self._BUTTON_BUZZER7],
195 | ]
196 | # buzzer frequency Hz
197 | SCALES = [
198 | 523, 587, 659, 699,
199 | 784, 880, 987, 1046
200 | ]
201 |
202 | if joy_msg.buttons[self._BUTTON_BUZZER_ENABLE]:
203 | for i, button in enumerate(buttons):
204 | if button:
205 | freq.data = SCALES[i]
206 | break
207 | self._pub_buzzer.publish(freq)
208 | rospy.loginfo(freq)
209 |
210 | self._buzzer_has_value = True
211 | else:
212 | if self._buzzer_has_value:
213 | self._pub_buzzer.publish(freq)
214 | self._buzzer_has_value = False
215 |
216 | def _joy_lightsensor_sound(self, joy_msg):
217 | freq = UInt16()
218 | if joy_msg.buttons[self._BUTTON_SENSOR_SOUND_EN]:
219 | rospy.loginfo(self._lightsensor)
220 | freq.data += self._positive(self._lightsensor.left_side)
221 | freq.data += self._positive(self._lightsensor.left_forward)
222 | freq.data += self._positive(self._lightsensor.right_forward)
223 | freq.data += self._positive(self._lightsensor.right_side)
224 |
225 | self._pub_buzzer.publish(freq)
226 | self._sensor_sound_has_value = True
227 | else:
228 | if self._sensor_sound_has_value:
229 | self._pub_buzzer.publish(freq)
230 | self._sensor_sound_has_value = False
231 |
232 | def _positive(self, value):
233 | if value < 0:
234 | return 0
235 | else:
236 | return value
237 |
238 | def _joy_velocity_config(self, joy_msg):
239 | ADD_VEL_LINEAR_X = 0.1 # m/s
240 | ADD_VEL_ANGULAR_Z = 0.1 * math.pi # rad/s
241 | BUZZER_FREQ_ADD = 880 # Hz
242 | BUZZER_FREQ_SUB = 440 # Hz
243 | BUZZER_FREQ_RESET = 660 # Hz
244 | BUZZER_BEEP_TIME = 0.2 # sec
245 |
246 | if joy_msg.buttons[self._BUTTON_CONFIG_ENABLE]:
247 | if self._mouse_buttons.front:
248 | self._vel_linear_x = self._config_velocity(
249 | self._vel_linear_x, ADD_VEL_LINEAR_X,
250 | 0, self._MAX_VEL_LINEAR_X)
251 | self._vel_angular_z = self._config_velocity(
252 | self._vel_angular_z, ADD_VEL_ANGULAR_Z,
253 | 0, self._MAX_VEL_ANGULAR_Z)
254 |
255 | self._beep_buzzer(BUZZER_FREQ_ADD, BUZZER_BEEP_TIME)
256 | # wait for release the button
257 | while self._mouse_buttons.front:
258 | pass
259 | elif self._mouse_buttons.rear:
260 | self._vel_linear_x = self._config_velocity(
261 | self._vel_linear_x, -ADD_VEL_LINEAR_X,
262 | 0, self._MAX_VEL_LINEAR_X)
263 | self._vel_angular_z = self._config_velocity(
264 | self._vel_angular_z, -ADD_VEL_ANGULAR_Z,
265 | 0, self._MAX_VEL_ANGULAR_Z)
266 |
267 | self._beep_buzzer(BUZZER_FREQ_SUB, BUZZER_BEEP_TIME)
268 | # wait for release the button
269 | while self._mouse_buttons.rear:
270 | pass
271 | elif self._mouse_buttons.mid:
272 | self._vel_linear_x = self._DEFAULT_VEL_LINEAR_X
273 | self._vel_angular_z = self._DEFAULT_VEL_ANGULAR_Z
274 |
275 | self._beep_buzzer(BUZZER_FREQ_RESET, BUZZER_BEEP_TIME)
276 | # wait for release the button
277 | while self._mouse_buttons.mid:
278 | pass
279 |
280 | rospy.loginfo(
281 | "linear_x:" + str(self._vel_linear_x) +
282 | ", angular_z:" + str(self._vel_angular_z)
283 | )
284 |
285 | def _config_velocity(self, current, add, lowerlimit, upperlimit):
286 | output = current + add
287 |
288 | if output < lowerlimit:
289 | output = lowerlimit
290 | if output > upperlimit:
291 | output = upperlimit
292 |
293 | return output
294 |
295 | def _beep_buzzer(self, freq, beep_time=0):
296 | self._pub_buzzer.publish(freq)
297 | rospy.sleep(beep_time)
298 | self._pub_buzzer.publish(0)
299 |
300 | def _joy_leds(self, joy_msg):
301 | led_values = LedValues()
302 |
303 | if joy_msg.buttons[self._BUTTON_CMD_ENABLE]:
304 | led_values.right_side = True
305 |
306 | if joy_msg.buttons[self._BUTTON_BUZZER_ENABLE]:
307 | led_values.right_forward = True
308 |
309 | if joy_msg.buttons[self._BUTTON_SENSOR_SOUND_EN]:
310 | led_values.left_forward = True
311 |
312 | if joy_msg.buttons[self._BUTTON_CONFIG_ENABLE]:
313 | led_values.left_side = True
314 |
315 | self._pub_leds.publish(led_values)
316 |
317 | def update(self):
318 | if self._joy_msg is None:
319 | return
320 |
321 | self._joy_motor_onoff(self._joy_msg)
322 | self._joy_cmdvel(self._joy_msg)
323 | self._joy_buzzer_freq(self._joy_msg)
324 | self._joy_lightsensor_sound(self._joy_msg)
325 | self._joy_velocity_config(self._joy_msg)
326 | self._joy_leds(self._joy_msg)
327 |
328 | self._joy_shutdown(self._joy_msg)
329 |
330 |
331 | def main():
332 | rospy.init_node('joystick_control')
333 |
334 | joy_wrapper = JoyWrapper()
335 |
336 | r = rospy.Rate(60)
337 | while not rospy.is_shutdown():
338 | joy_wrapper.update()
339 |
340 | r.sleep()
341 |
342 |
343 | if __name__ == '__main__':
344 | main()
345 |
--------------------------------------------------------------------------------
/scripts/line_follower.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: UTF-8
3 |
4 | # Copyright 2020 RT Corporation
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # http://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 |
18 | import rospy
19 | import math
20 | from std_msgs.msg import UInt16
21 | from geometry_msgs.msg import Twist
22 | from std_srvs.srv import Trigger
23 | from raspimouse_ros_2.msg import LightSensorValues
24 | from raspimouse_ros_2.msg import ButtonValues
25 | from raspimouse_ros_2.msg import LedValues
26 |
27 |
28 | class LineFollower(object):
29 | def __init__(self):
30 | self._SENSORS = {"left": 0, "mid_left": 0, "mid_right": 0, "right": 0}
31 | self._sensor_line_values = dict(self._SENSORS)
32 | self._sensor_field_values = dict(self._SENSORS)
33 | self._line_thresholds = dict(self._SENSORS)
34 | self._line_is_detected_by_sensor = dict(self._SENSORS)
35 | self._present_sensor_values = dict(self._SENSORS)
36 |
37 | self._line_values_are_sampled = False
38 | self._field_values_are_sampled = False
39 | self._can_publish_cmdvel = False
40 |
41 | self._mouse_buttons = ButtonValues()
42 |
43 | self._pub_cmdvel = rospy.Publisher('cmd_vel', Twist, queue_size=1)
44 | self._pub_buzzer = rospy.Publisher('buzzer', UInt16, queue_size=1)
45 | self._pub_leds = rospy.Publisher('leds', LedValues, queue_size=1)
46 |
47 | self._sub_lightsensor = rospy.Subscriber(
48 | 'lightsensors', LightSensorValues, self._callback_lightsensor, queue_size=1)
49 | self._sub_buttons = rospy.Subscriber(
50 | 'buttons', ButtonValues, self._callback_buttons, queue_size=1)
51 |
52 | try:
53 | rospy.wait_for_service("motor_on", timeout=5)
54 | rospy.wait_for_service("motor_off", timeout=5)
55 | except rospy.exceptions.ROSException as e:
56 | rospy.logerr("Service not found")
57 | rospy.signal_shutdown(e.message)
58 | else:
59 | rospy.on_shutdown(self._on_shutdown)
60 |
61 | def _on_shutdown(self):
62 | self._pub_leds.publish(LedValues())
63 | self._motor_off()
64 |
65 | def _motor_on(self):
66 | rospy.ServiceProxy("motor_on", Trigger).call()
67 | rospy.loginfo("motor_on")
68 |
69 | def _motor_off(self):
70 | rospy.ServiceProxy("motor_off", Trigger).call()
71 | rospy.loginfo("motor_off")
72 |
73 | def _callback_buttons(self, msg):
74 | self._mouse_buttons = msg
75 |
76 | def _callback_lightsensor(self, msg):
77 | # The order of the front distance sensors and the line following sensors are not same
78 | self._present_sensor_values["left"] = msg.right_forward
79 | self._present_sensor_values["mid_left"] = msg.right_side
80 | self._present_sensor_values["mid_right"] = msg.left_side
81 | self._present_sensor_values["right"] = msg.left_forward
82 |
83 | if self._sampling_is_done():
84 | self._update_line_detection()
85 |
86 | def _update_line_detection(self):
87 | for key in self._SENSORS:
88 | is_positive = self._present_sensor_values[key] > self._line_thresholds[key]
89 |
90 | if self._line_is_bright() == is_positive:
91 | self._line_is_detected_by_sensor[key] = True
92 | else:
93 | self._line_is_detected_by_sensor[key] = False
94 |
95 | def _beep_buzzer(self, freq, beep_time=0):
96 | self._pub_buzzer.publish(freq)
97 | rospy.sleep(beep_time)
98 | self._pub_buzzer.publish(0)
99 |
100 | def _beep_start(self):
101 | self._beep_buzzer(1000, 0.5)
102 |
103 | def _beep_success(self):
104 | self._beep_buzzer(1000, 0.1)
105 | rospy.sleep(0.1)
106 | self._beep_buzzer(1000, 0.1)
107 |
108 | def _beep_failure(self):
109 | for i in range(4):
110 | self._beep_buzzer(500, 0.1)
111 | rospy.sleep(0.1)
112 |
113 | def _sampling_is_done(self):
114 | if self._line_values_are_sampled and self._field_values_are_sampled:
115 | return True
116 | else:
117 | return False
118 |
119 | def _median(self, sensor1, sensor2):
120 | diff = math.fabs(sensor1 - sensor2)
121 | if sensor1 < sensor2:
122 | return sensor1 + diff * 0.5
123 | else:
124 | return sensor2 + diff * 0.5
125 |
126 | def _line_is_bright(self):
127 | SAMPLE = "right"
128 | if self._sensor_line_values[SAMPLE] > self._sensor_field_values[SAMPLE]:
129 | return True
130 | else:
131 | return False
132 |
133 | def _set_line_thresholds(self):
134 | if not self._sampling_is_done():
135 | return
136 |
137 | for key in self._SENSORS:
138 | self._line_thresholds[key] = self._median(
139 | self._sensor_line_values[key],
140 | self._sensor_field_values[key])
141 |
142 | rospy.loginfo("thresholds:" + str(self._line_thresholds))
143 |
144 | def _get_multisampled_sensor_values(self):
145 | NUM_OF_SAMPLES = 10
146 | WAIT_TIME = 0.1 # sec
147 |
148 | # Multisampling
149 | sensor_values = dict(self._SENSORS)
150 | for i in range(NUM_OF_SAMPLES):
151 | for key in self._SENSORS:
152 | sensor_values[key] += self._present_sensor_values[key]
153 | rospy.sleep(WAIT_TIME)
154 |
155 | for key in self._SENSORS:
156 | sensor_values[key] /= NUM_OF_SAMPLES
157 |
158 | return sensor_values
159 |
160 | def _line_sampling(self):
161 | self._beep_start()
162 | self._sensor_line_values = self._get_multisampled_sensor_values()
163 | self._beep_success()
164 |
165 | rospy.loginfo(self._sensor_line_values)
166 | self._line_values_are_sampled = True
167 | self._set_line_thresholds()
168 |
169 | def _field_sampling(self):
170 | self._beep_start()
171 | self._sensor_field_values = self._get_multisampled_sensor_values()
172 | self._beep_success()
173 |
174 | rospy.loginfo(self._sensor_field_values)
175 | self._field_values_are_sampled = True
176 | self._set_line_thresholds()
177 |
178 | def _indicate_line_detections(self):
179 | led_values = LedValues()
180 |
181 | led_values.right_side = self._line_is_detected_by_sensor["right"]
182 | led_values.right_forward = self._line_is_detected_by_sensor["mid_right"]
183 | led_values.left_forward = self._line_is_detected_by_sensor["mid_left"]
184 | led_values.left_side = self._line_is_detected_by_sensor["left"]
185 | self._pub_leds.publish(led_values)
186 |
187 | def _publish_cmdvel_for_line_following(self):
188 | VEL_LINER_X = 0.08 # m/s
189 | VEL_ANGULAR_Z = 0.8 # rad/s
190 | LOW_VEL_ANGULAR_Z = 0.5 # rad/s
191 |
192 | cmd_vel = Twist()
193 | if not all(self._line_is_detected_by_sensor.values()) and\
194 | any(self._line_is_detected_by_sensor.values()):
195 | cmd_vel.linear.x = VEL_LINER_X
196 |
197 | if self._line_is_detected_by_sensor["left"]:
198 | cmd_vel.angular.z += VEL_ANGULAR_Z
199 |
200 | if self._line_is_detected_by_sensor["right"]:
201 | cmd_vel.angular.z -= VEL_ANGULAR_Z
202 |
203 | if self._line_is_detected_by_sensor["mid_left"]:
204 | cmd_vel.angular.z += LOW_VEL_ANGULAR_Z
205 |
206 | if self._line_is_detected_by_sensor["mid_right"]:
207 | cmd_vel.angular.z -= LOW_VEL_ANGULAR_Z
208 |
209 | self._pub_cmdvel.publish(cmd_vel)
210 |
211 | def update(self):
212 | if self._mouse_buttons.front: # SW0 of Raspberry Pi Mouse
213 | if self._sampling_is_done() and self._can_publish_cmdvel is False:
214 | rospy.loginfo("start following")
215 | self._motor_on()
216 | self._beep_success()
217 | self._can_publish_cmdvel = True
218 | else:
219 | rospy.loginfo("stop following")
220 | self._motor_off()
221 | self._beep_failure()
222 | self._can_publish_cmdvel = False
223 |
224 | elif self._mouse_buttons.mid: # SW1
225 | rospy.loginfo("line sampling:")
226 | self._line_sampling()
227 |
228 | elif self._mouse_buttons.rear: # SW2
229 | rospy.loginfo("field sampling:")
230 | self._field_sampling()
231 |
232 | if self._can_publish_cmdvel:
233 | self._publish_cmdvel_for_line_following()
234 |
235 | self._indicate_line_detections()
236 |
237 |
238 | def main():
239 | rospy.init_node('line_follower')
240 |
241 | line_follower = LineFollower()
242 |
243 | r = rospy.Rate(60)
244 | while not rospy.is_shutdown():
245 | line_follower.update()
246 |
247 | r.sleep()
248 |
249 |
250 | if __name__ == '__main__':
251 | main()
252 |
--------------------------------------------------------------------------------
/scripts/object_tracking.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # coding: utf-8
3 |
4 | # Copyright 2020 RT Corporation
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # http://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 |
18 | import rospy
19 | import cv2
20 | import math
21 | import numpy as np
22 | import copy
23 | from sensor_msgs.msg import Image
24 | from cv_bridge import CvBridge, CvBridgeError
25 | from geometry_msgs.msg import Twist
26 | from std_srvs.srv import Trigger
27 |
28 |
29 | class ObjectTracker():
30 |
31 | def __init__(self):
32 | self._cv_bridge = CvBridge()
33 | self._captured_image = None
34 | self._object_pixels = 0 # Maximum area detected in the current image[pixel]
35 | self._object_pixels_default = 0 # Maximum area detected from the first image[pixel]
36 | self._point_of_centroid = None
37 |
38 | self._pub_binary_image = rospy.Publisher("binary", Image, queue_size=1)
39 | self._pub_pbject_image = rospy.Publisher("object", Image, queue_size=1)
40 | self._pub_cmdvel = rospy.Publisher("/cmd_vel", Twist, queue_size=1)
41 |
42 | self._sub_image = rospy.Subscriber("/cv_camera/image_raw", Image, self._image_callback)
43 |
44 | rospy.wait_for_service("/motor_on")
45 | rospy.wait_for_service("/motor_off")
46 | rospy.on_shutdown(rospy.ServiceProxy("/motor_off", Trigger).call)
47 | rospy.ServiceProxy("/motor_on", Trigger).call()
48 |
49 | def _image_callback(self, img):
50 | try:
51 | self._captured_image = self._cv_bridge.imgmsg_to_cv2(img, "bgr8")
52 | except CvBridgeError as e:
53 | rospy.logerr(e)
54 |
55 | def _pixels(self, cv_image):
56 | return cv_image.shape[0] * cv_image.shape[1]
57 |
58 | def _object_is_detected(self):
59 | # Lower limit of the ratio of the detected area to the screen.
60 | # Object tracking is not performed below this ratio.
61 | LOWER_LIMIT = 0.01
62 |
63 | if self._captured_image is not None:
64 | object_per_image = self._object_pixels / self._pixels(self._captured_image)
65 | return object_per_image > LOWER_LIMIT
66 | else:
67 | return False
68 |
69 | def _object_pixels_ratio(self):
70 | if self._captured_image is not None:
71 | diff_pixels = self._object_pixels - self._object_pixels_default
72 | return diff_pixels / self._pixels(self._captured_image)
73 | else:
74 | return 0
75 |
76 | def _object_is_bigger_than_default(self):
77 | return self._object_pixels_ratio() > 0.01
78 |
79 | def _object_is_smaller_than_default(self):
80 | return self._object_pixels_ratio() < -0.01
81 |
82 | def _set_color_orange(self):
83 | # [H(0~180), S(0~255), V(0~255)]
84 | min_hsv_orange = np.array([15, 200, 80])
85 | max_hsv_orange = np.array([20, 255, 255])
86 | return min_hsv_orange, max_hsv_orange
87 |
88 | def _set_color_green(self):
89 | min_hsv_green = np.array([60, 60, 40])
90 | max_hsv_green = np.array([70, 255, 255])
91 | return min_hsv_green, max_hsv_green
92 |
93 | def _set_color_blue(self):
94 | min_hsv_blue = np.array([105, 90, 40])
95 | max_hsv_blue = np.array([120, 255, 255])
96 | return min_hsv_blue, max_hsv_blue
97 |
98 | def _extract_object_in_binary(self, cv_image):
99 | if cv_image is None:
100 | return None
101 |
102 | min_hsv, max_hsv = self._set_color_orange()
103 | # min_hsv, max_hsv = self._set_color_green()
104 | # min_hsv, max_hsv = self._set_color_blue()
105 |
106 | hsv = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)
107 | binary = cv2.inRange(hsv, min_hsv, max_hsv)
108 | # Morphology
109 | kernel = np.ones((5, 5), np.uint8)
110 | binary = cv2.morphologyEx(binary, cv2.MORPH_CLOSE, kernel, iterations=2)
111 | return binary
112 |
113 | def _calibrate_object_pixels_default(self):
114 | if self._object_pixels_default == 0 and self._object_pixels != 0:
115 | self._object_pixels_default = self._object_pixels
116 |
117 | def _extract_biggest_contour(self, binary_img):
118 | biggest_contour_index = False
119 | biggest_contour_area = 0
120 | contours, hierarchy = cv2.findContours(
121 | binary_img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
122 | for i, cnt in enumerate(contours):
123 | area = cv2.contourArea(cnt)
124 | if biggest_contour_area < area:
125 | biggest_contour_area = area
126 | biggest_contour_index = i
127 |
128 | if biggest_contour_index is False:
129 | return False
130 | else:
131 | return contours[biggest_contour_index]
132 |
133 | def _calculate_centroid_point(self, contour):
134 | point = False
135 | if self._object_is_detected():
136 | M = cv2.moments(contour)
137 | centroid_x = int(M['m10'] / M['m00'])
138 | centroid_y = int(M['m01'] / M['m00'])
139 | point = (centroid_x, centroid_y)
140 |
141 | return point
142 |
143 | def _draw_contour(self, input_image, contour):
144 | return cv2.drawContours(input_image, [contour], 0, (0, 255, 0), 5)
145 |
146 | def _draw_centroid(self, input_image, point_centroid):
147 | return cv2.circle(input_image, point_centroid, 15, (255, 0, 0), thickness=-1)
148 |
149 | def _monitor(self, img, pub):
150 | if img.ndim == 2:
151 | pub.publish(self._cv_bridge.cv2_to_imgmsg(img, "mono8"))
152 | elif img.ndim == 3:
153 | pub.publish(self._cv_bridge.cv2_to_imgmsg(img, "bgr8"))
154 | else:
155 | pass
156 |
157 | def _rotation_velocity(self):
158 | VELOCITY = 0.25 * math.pi
159 | if not self._object_is_detected() or self._point_of_centroid is None:
160 | return 0.0
161 |
162 | half_width = self._captured_image.shape[1] / 2.0
163 | pos_x_rate = (half_width - self._point_of_centroid[0]) / half_width
164 | rot_vel = pos_x_rate * VELOCITY
165 | return rot_vel
166 |
167 | def image_processing(self):
168 | object_image = copy.deepcopy(self._captured_image)
169 | object_binary_img = self._extract_object_in_binary(self._captured_image)
170 |
171 | if object_binary_img is not None:
172 | biggest_contour = self._extract_biggest_contour(object_binary_img)
173 | if biggest_contour is not False:
174 | self._object_pixels = cv2.contourArea(biggest_contour)
175 | self._calibrate_object_pixels_default()
176 |
177 | object_image = self._draw_contour(
178 | object_image, biggest_contour)
179 |
180 | point = self._calculate_centroid_point(biggest_contour)
181 | if point is not False:
182 | self._point_of_centroid = point
183 | object_image = self._draw_centroid(object_image, point)
184 |
185 | self._monitor(object_binary_img, self._pub_binary_image)
186 | self._monitor(object_image, self._pub_pbject_image)
187 |
188 | def control(self):
189 | cmd_vel = Twist()
190 | if self._object_is_detected():
191 | # Move backward and forward by difference from default area
192 | if self._object_is_smaller_than_default():
193 | cmd_vel.linear.x = 0.1
194 | print("forward")
195 | elif self._object_is_bigger_than_default():
196 | cmd_vel.linear.x = -0.1
197 | print("backward")
198 | else:
199 | cmd_vel.linear.x = 0
200 | print("stay")
201 | cmd_vel.angular.z = self._rotation_velocity()
202 | self._pub_cmdvel.publish(cmd_vel)
203 |
204 |
205 | if __name__ == '__main__':
206 | rospy.init_node('object_tracking')
207 | rospy.sleep(3.)
208 | ot = ObjectTracker()
209 |
210 | rate = rospy.Rate(60)
211 | rate.sleep()
212 | while not rospy.is_shutdown():
213 | ot.image_processing()
214 | ot.control()
215 | rate.sleep()
216 |
--------------------------------------------------------------------------------