├── .gitignore
├── .travis.yml
├── CMakeLists.txt
├── Dockerfile
├── LICENSE
├── OAK_Install.sh
├── README.md
├── cfg
└── gravl_cfg.cfg
├── config
├── default_behaviors.yaml
├── hind_brain_config.yaml
└── tractor_groundtruth.yaml
├── gravl.rosinstall
├── images
├── 170607_RhinoAndTrailer.JPG
├── 170721_RosSerialSetupPic.png
├── 170721_RosSerialSetupPic2.png
├── 170808_IonmcDocs_1.jpg
├── 170808_RoboClawDocs_1.jpeg
├── 170808_SteeringPotDocs_1.jpeg
├── 170808_VelocityPotDocs_1 (1).jpeg
├── 171004_LidarDocImage_1.png
├── 171031ackermann_drive_code.png
├── 171216_tractordemo_3.jpg
├── Capture.PNG
├── DownLidarConfig.png
├── FrontLidarConfig.png
├── LidarCodeVar.jpg
├── bestgabor.png
├── lidar_connector.png
├── snowcat_wiringdiagram_full.png
└── worstgabor.png
├── launch
├── README.md
├── base_station.launch
├── bringup.launch
├── bringup_min.launch
├── demos.launch
├── ekf.launch
├── hindbrain.launch
├── ir.launch
├── lidar.launch
├── lidarfollow.launch
├── localization.launch
├── mainstate.launch
├── rtk.launch
├── safety.launch
├── teleop.launch
├── telewaypoint.launch
└── waypoint.launch
├── msg
├── Hemisphere.msg
├── ImuSafety.msg
├── RoboClawStats.msg
└── TwistLabeled.msg
├── package.xml
├── params
└── ekf_params.yaml
├── rviz
├── demo.rviz
└── settings.rviz
├── scripts
├── BackOnTrack.py
├── ConvertToAckermann.py
├── LidarCodeBasic.py
├── ObstacleDetector2D.py
├── PointFollower.py
├── README.md
├── RecordPoints.py
├── TractorOdom.py
├── circle_drive.py
├── gps.py
├── gps_navigation.py
├── gps_navigation_node.py
├── p2p_output.txt
├── road_recognition.py
├── test_imu.py
└── test_pointgrey.py
├── setup.sh
├── src
├── CMakeLists.txt
├── Camera
│ ├── Camera.cpp
│ └── Camera.h
├── DriveState
│ ├── DriveState.cpp
│ └── DriveState.h
├── Hemisphere
│ ├── Hemisphere.cpp
│ └── Hemisphere.h
├── HindBrain
│ ├── .gitignore
│ ├── Estop.cpp
│ ├── Estop.h
│ ├── HindBrain.h
│ ├── HindBrain.ino
│ ├── Makefile
│ ├── SoftSwitch.cpp
│ ├── SoftSwitch.h
│ └── lib
├── ImuBase
│ ├── ImuBase.cpp
│ └── ImuBase.h
├── README.md
├── Watchdog
│ ├── Watchdog.cpp
│ └── Watchdog.h
├── arm_drone.cpp
├── imu_safety
│ ├── ImuSafety.cpp
│ └── ImuSafety.h
├── offb_node.cpp
├── road_detection.cpp
└── snowcatv2
│ ├── motion.cpp
│ ├── motion.h
│ ├── odometry.cpp
│ ├── odometry.h
│ └── snowcatv2.ino
├── teensy_patch
├── core_pins.h
└── pins_teensy.c
└── udev_rules
├── 49-teensy.rules
└── 99-piksi.rules
/.gitignore:
--------------------------------------------------------------------------------
1 | # Prerequisites
2 | *.d
3 |
4 | # Compiled Object files
5 | *.slo
6 | *.lo
7 | *.o
8 | *.obj
9 |
10 | # Precompiled Headers
11 | *.gch
12 | *.pch
13 |
14 | # Compiled Dynamic libraries
15 | *.so
16 | *.dylib
17 | *.dll
18 |
19 | # Fortran module files
20 | *.mod
21 | *.smod
22 |
23 | # Compiled Static libraries
24 | *.lai
25 | *.la
26 | *.a
27 | *.lib
28 |
29 | # Executables
30 | *.exe
31 | *.out
32 | *.app
33 | .pioenvs
34 | .piolibdeps
35 | .clang_complete
36 | .gcc-flags.json
37 | *.ini
38 |
39 | *.clang_complete
40 | *.json
41 | /src/tractor/.poienvs
42 |
43 | # Vim swap files
44 | *.sw*
45 |
46 | # CMake bullshit
47 | scripts/CMakeCache.txt
48 | *.cmake
49 | scripts/Makefile
50 | scripts/CMakeFiles/
51 |
52 | # Ros Bag files and folder
53 | *.bag
54 | bags/
55 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: required
2 |
3 | services:
4 | - docker
5 |
6 | script:
7 | docker build -t gravl .
8 |
9 | notifications:
10 | slack:
11 | secure: WWDVIbPCm6qus8Tk1F3Q1f7ItLHcmFqQMPeCZC0JDIHRyozreBntOWaEu65WIqb51AJNj2hWJ9omX25hkkFDGlXmZqseJJZvKHIGY2v75HNyRgh4Ae/cUFoHs9EDOKwk/a9vlhw91yCyslpBOHhn3atHDWke0VQBjM3wgtcPFQ5vS9LEkeRPeWr1e8FHvwuq/lc+8de+2ka4mJAMnGEu4GUf+erwTWXjt+nknx/1P8HULdbPDzvnggUjACEuBks5AtwLPq2gx9xcnObn2NluIvXVqWAOoT4KJ2/ouvMAUm5oyXSL7ls4Gg0Fufj3Z1pZ6DJWDeH9gKhx84kiG/B28Kc1j3+Gu9MTbihsqia7NTejjVEqWjsmIVPdMWPXqO02qGyOaBAQ5M+/wHxI+/2uo+lVwgrIurfCaXYQInVtpLpgw9woDE1bd9qo5LVBUrJMn2eWjXfdG1OT8JGSj302EsBDxCm544XZIkUcnngi95QTM2kpfL0di/AfT1SJc6vdaTgoKoKHJO9EK0Jq1b6ZBNtTHcsDmCAWZRc/yxL590968B8mMmfg0bgomhgwGeu4sxzb/0DVGko6jt2HH2v9O3pO62lK5faGOKbYu/Gcq59XqtdJL9uwYCqaNGVr2GXdJA6TO76Qzh5gnT+UDMe9v50bO3kUlSsy/vnszKntx0Y=
12 |
--------------------------------------------------------------------------------
/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.11)
2 | project(gravl)
3 |
4 | # Compile as C++11, supported in ROS Kinetic and newer
5 | set(CMAKE_CXX_FLAGS "-std=c++11")
6 |
7 | # Find catkin macros and libraries
8 | # if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
9 | # is used, also find other catkin packages
10 | find_package(catkin REQUIRED COMPONENTS
11 | roscpp
12 | rospy
13 | std_msgs
14 | sensor_msgs
15 | geometry_msgs
16 | image_transport
17 | dynamic_reconfigure
18 | ackermann_msgs
19 | message_generation
20 | mavros_msgs
21 | nav_msgs
22 | tf
23 | tf2
24 | tf2_geometry_msgs
25 | cv_bridge
26 | state_controller
27 | gps_common
28 | )
29 |
30 | # System dependencies are found with CMake's conventions
31 | find_package(Boost REQUIRED COMPONENTS system)
32 |
33 | # OpenCV dependencies
34 | find_package(OpenCV 3.2 REQUIRED)
35 | include_directories( ${OpenCV_INCLUDE_DIRS} )
36 |
37 | # QT dependencies
38 | #find_package(Qt5Widgets)
39 |
40 | IF(Qt5Widgets_FOUND)
41 | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${Qt5Widgets_EXECUTABLE_COMPILE_FLAGS}")
42 | set(CMAKE_INCLUDE_CURRENT_DIR ON)
43 | set(CMAKE_AUTOMOC ON)
44 | set(CMAKE_AUTORCC ON)
45 | set(CMAKE_AUTOUIC ON)
46 |
47 | find_package(Qt5Qml)
48 | find_package(Qt5Quick)
49 | find_package(Qt5Positioning)
50 | ENDIF()
51 |
52 | # Uncomment this if the package has a setup.py. This macro ensures
53 | # modules and global scripts declared therein get installed
54 | # See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
55 | # catkin_python_setup()
56 |
57 | ################################################
58 | ## Declare ROS messages, services and actions ##
59 | ################################################
60 |
61 | # Generate messages in the 'msg' folder
62 | add_message_files(
63 | FILES
64 | Hemisphere.msg
65 | RoboClawStats.msg
66 | ImuSafety.msg
67 | )
68 |
69 | # Generate services in the 'srv' folder
70 | # add_service_files(
71 | # FILES
72 | # Service1.srv
73 | # Service2.srv
74 | # )
75 |
76 | # Generate actions in the 'action' folder
77 | # add_action_files(
78 | # FILES
79 | # Action1.action
80 | # Action2.action
81 | # )
82 |
83 | # Generate added messages and services with any dependencies listed here
84 | generate_messages(
85 | DEPENDENCIES
86 | std_msgs
87 | geometry_msgs
88 | sensor_msgs
89 | )
90 |
91 | ################################################
92 | ## Declare ROS dynamic reconfigure parameters ##
93 | ################################################
94 |
95 | ## Generate dynamic reconfigure parameters in the 'cfg' folder
96 | generate_dynamic_reconfigure_options(
97 | cfg/gravl_cfg.cfg
98 | )
99 |
100 | ###################################
101 | ## catkin specific configuration ##
102 | ###################################
103 | # The catkin_package macro generates cmake config files for your package
104 | # Declare things to be passed to dependent projects
105 | # INCLUDE_DIRS: uncomment this if you package contains header files
106 | # LIBRARIES: libraries you create in this project that dependent projects also need
107 | # CATKIN_DEPENDS: catkin_packages dependent projects also need
108 | # DEPENDS: system dependencies of this project that dependent projects also need
109 | catkin_package(
110 | INCLUDE_DIRS /usr/include/boost/algorithm
111 | # CATKIN_DEPENDS roscpp rospy std_msgs
112 | DEPENDS message_runtime system_lib lrt
113 | )
114 |
115 | ###########
116 | ## Build ##
117 | ###########
118 |
119 | # Specify additional locations of header files
120 | # Your package locations should be listed before other locations
121 | include_directories(
122 | ${catkin_INCLUDE_DIRS}
123 | ${OpenCV_INCLUDE_DIRS}
124 | ${Qt5Widgets_INCLUDE_DIRS}
125 | ${QtQml_INCLUDE_DIRS}
126 | )
127 |
128 | # Declare a C++ executable
129 | # With catkin_make all packages are built within a single CMake context
130 | # The recommended prefix ensures that target names across packages don't collide
131 | add_executable( Camera src/Camera/Camera.cpp )
132 | add_executable( DriveState src/DriveState/DriveState.cpp )
133 | add_executable( Hemisphere src/Hemisphere/Hemisphere.cpp )
134 | add_executable( idk src/arm_drone.cpp )
135 | add_executable( road_detection src/road_detection.cpp )
136 | add_executable(imu_safety src/imu_safety/ImuSafety.cpp)
137 | add_executable(imu_base src/ImuBase/ImuBase.cpp)
138 | add_executable(Watchdog src/Watchdog/Watchdog.cpp)
139 |
140 | # Specify libraries to link a library or executable target against
141 | target_link_libraries( Camera ${OpenCV_LIBS} ${catkin_LIBRARIES} )
142 | target_link_libraries( DriveState ${catkin_LIBRARIES} )
143 | target_link_libraries( Hemisphere ${catkin_LIBRARIES} ${Boost_LIBRARIES} -lrt )
144 | target_link_libraries( idk ${catkin_LIBRARIES} )
145 | target_link_libraries( road_detection ${OpenCV_LIBS} )
146 | target_link_libraries(imu_safety ${catkin_LIBRARIES})
147 | target_link_libraries(imu_base ${catkin_LIBRARIES})
148 | target_link_libraries(Watchdog ${catkin_LIBRARIES})
149 |
150 | # Add cmake target dependencies of the executable
151 | # same as for the library above
152 | add_dependencies(Hemisphere ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
153 | add_dependencies(imu_safety ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS})
154 |
155 |
156 | #IF(Qt5Widgets_FOUND)
157 | # MESSAGE(WARNING ${Qt5Widgets_INCLUDE_DIRS})
158 | # qt5_add_resources(QTEST_RESOURCES src/QT/qml.qrc)
159 | # add_executable( gps_map src/QT/main.cpp src/QT/tractor_controller.cpp ${QTEST_RESOURCES} )
160 | # add_dependencies( gps_map ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS} )
161 | # target_link_libraries( gps_map
162 | # Qt5::Widgets
163 | # Qt5::Qml
164 | # Qt5::Quick
165 | # Qt5::Positioning
166 | # ${catkin_LIBRARIES} )
167 | #ENDIF()
168 |
169 | #############
170 | ## Install ##
171 | #############
172 |
173 | # all install targets should use catkin DESTINATION variables
174 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html
175 |
176 | ## Mark executable scripts (Python etc.) for installation
177 | ## in contrast to setup.py, you can choose the destination
178 | # install(PROGRAMS
179 | # scripts/my_python_script
180 | # DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
181 | # )
182 |
183 | ## Mark executables and/or libraries for installation
184 | # install(TARGETS ${PROJECT_NAME} ${PROJECT_NAME}_node
185 | # ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
186 | # LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
187 | # RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
188 | # )
189 |
190 | ## Mark cpp header files for installation
191 | #install(DIRECTORY include/${PROJECT_NAME}/
192 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
193 | # FILES_MATCHING PATTERN "*.h"
194 | # PATTERN ".svn" EXCLUDE
195 | #)
196 |
197 | ## Mark other files for installation (e.g. launch and bag files, etc.)
198 | # install(FILES
199 | # # myfile1
200 | # # myfile2
201 | # DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
202 | # )
203 |
204 | #############
205 | ## Testing ##
206 | #############
207 |
208 | ## Add gtest based cpp test target and link libraries
209 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_tractor.cpp)
210 | # if(TARGET ${PROJECT_NAME}-test)
211 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
212 | # endif()
213 |
214 | ## Add folders to be run by python nosetests
215 | # catkin_add_nosetests(test)
216 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG ROS_DISTRO=kinetic
2 |
3 | FROM ros:$ROS_DISTRO
4 |
5 | ARG DOCKER_USER=kubo
6 | ARG LIBSBP_V=2.3.10
7 |
8 | MAINTAINER Kawin Nikomborirak concavemail@gmail.com
9 |
10 | RUN bash -c \
11 | 'useradd -lmG video $DOCKER_USER \
12 | && mkdir -p /home/$DOCKER_USER/catkin_ws/src/gravl'
13 |
14 | COPY . /home/$DOCKER_USER/catkin_ws/src/gravl/
15 | COPY gravl.rosinstall /home/$DOCKER_USER/catkin_ws/src/.rosinstall
16 |
17 | RUN bash -c \
18 | 'apt-get update \
19 | && apt-get upgrade -y \
20 | && apt-get install -y wget \
21 | && apt-get install -y sudo \
22 | && cd /home/$DOCKER_USER/catkin_ws \
23 | # && wget -O libsbp.tar.gz https://github.com/swift-nav/libsbp/archive/v$LIBSBP_V.tar.gz \
24 | # && tar xvf libsbp.tar.gz \
25 | # && cd libsbp-$LIBSBP_V/c \
26 | # && mkdir build \
27 | # && cd build \
28 | # && cmake .. \
29 | # && make \
30 | # && make install \
31 | && cd /home/$DOCKER_USER/catkin_ws/src/gravl \
32 | && ./setup.sh \
33 | && cd /home/$DOCKER_USER/catkin_ws \
34 | && wstool update -t src \
35 | && rosdep update \
36 | && source /opt/ros/$ROS_DISTRO/setup.bash \
37 | && rosdep install -iry --from-paths src \
38 | # Need to setup package separately
39 | && cd /home/$DOCKER_USER/catkin_ws/src/ethz_piksi_ros/piksi_multi_rtk_ros \
40 | && source install/install_piksi_multi.sh \
41 | && cd /home/$DOCKER_USER/catkin_ws \
42 | && catkin_make -j1 \
43 | && source /home/$DOCKER_USER/catkin_ws/devel/setup.bash \
44 | && echo "source ~/catkin_ws/devel/setup.bash" >> /home/$DOCKER_USER/.bashrc \
45 | && chown -R $DOCKER_USER /home/$DOCKER_USER'
46 |
47 | WORKDIR /home/$DOCKER_USER/catkin_ws
48 | USER $DOCKER_USER
49 |
50 | WORKDIR /home/$DOCKER_USER
51 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2017
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/OAK_Install.sh:
--------------------------------------------------------------------------------
1 | #/bin/bash
2 |
3 | cd ~/Arduino/libraries
4 |
5 | git clone https://github.com/olinrobotics/oak.git
6 | git clone https://github.com/adafruit/Adafruit_VL53L0X.git
7 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Overview
2 |
3 | This ros package contains base code to run GRAVL's autonomous Kubota tractor.
4 | For current documentation see the [wiki](https://github.com/olinrobotics/Tractor/wiki).
5 |
6 | # Build Status
7 | [](https://travis-ci.org/olinrobotics/gravl)
8 |
9 |
10 | # Quick setup
11 |
12 | - Install [ROS](http://wiki.ros.org/)
13 | - `cd /src>`
14 | - Clone this project: `git clone https://github.com/olinrobotics/gravl.git`
15 | - Navigate to gravl directory `cd `
16 | - Install dependencies pt1: `sudo ./setup.bash`
17 | - Navigate back to the catkin works pace root: `cd ..`
18 | - Install dependencies: `rosdep install -iry --from-paths src`
19 | - Build the platform: `catkin_make`
20 | - Run various routines outlined in the [wiki](https://github.com/olinrobotics/Tractor/wiki)
21 |
22 | # Dependencies
23 | - [State Controller](https://github.com/olinrobotics/state_controller) - For switching between tractor behaviors and states
24 |
25 | # Usage
26 | - [Documentation](https://github.com/olinrobotics/gravl/wiki/Kubo:-Overview) - To run the tractor, see instructions here
27 |
--------------------------------------------------------------------------------
/cfg/gravl_cfg.cfg:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | PACKAGE = "gravl"
3 |
4 | from dynamic_reconfigure.parameter_generator_catkin import *
5 |
6 | gen = ParameterGenerator()
7 |
8 | gen.add("low_filter", int_t, 0, "The lower threshold for the flir filter", 0, 0, 255)
9 | gen.add("high_filter", int_t, 0, "The higher threshold for the flir filter", 255, 0, 255)
10 | gen.add("filter", bool_t, 0, "Filter the image?", True)
11 |
12 | exit(gen.generate(PACKAGE, "gravl", "gravl_cfg"))
--------------------------------------------------------------------------------
/config/default_behaviors.yaml:
--------------------------------------------------------------------------------
1 | # Tractor behavior parameter definitions
2 | #
3 | # Ex: behaviors/[name_of_behavior]: '[priority]'
4 | # - Lower numbers = higher priority
5 | # - Lower number commands override higher number commands
6 |
7 | behaviors/safety: '0'
8 | behaviors/teleop: '1'
9 | behaviors/p2p: '2'
10 | # behaviors/example: '3'
11 |
--------------------------------------------------------------------------------
/config/hind_brain_config.yaml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/config/hind_brain_config.yaml
--------------------------------------------------------------------------------
/config/tractor_groundtruth.yaml:
--------------------------------------------------------------------------------
1 | frequency: 20
2 | sensor_timeout: 0.5
3 | two_d_mode: false
4 | print_diagnostics: true
5 | publish_tf: true
6 | publish_acceleration: true
7 |
8 | map_frame: map
9 | odom_frame: earth
10 | base_link_frame: base_link
11 | world_frame: earth
12 |
--------------------------------------------------------------------------------
/gravl.rosinstall:
--------------------------------------------------------------------------------
1 | - git:
2 | local-name: ethz_piksi_ros
3 | uri: https://github.com/olinrobotics/ethz_piksi_ros.git
4 | - git:
5 | local-name: state_controller
6 | uri: https://github.com/olinrobotics/state_controller
7 | - git:
8 | local-name: hokuyo_node
9 | uri: https://github.com/ros-drivers/hokuyo_node
10 | - git:
11 | local-name: pointgrey_camera_driver
12 | uri: https://github.com/ros-drivers/pointgrey_camera_driver.git
13 | - git:
14 | local-name: gps_common
15 | uri: https://github.com/olinrobotics/gps_common.git
16 |
--------------------------------------------------------------------------------
/images/170607_RhinoAndTrailer.JPG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/170607_RhinoAndTrailer.JPG
--------------------------------------------------------------------------------
/images/170721_RosSerialSetupPic.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/170721_RosSerialSetupPic.png
--------------------------------------------------------------------------------
/images/170721_RosSerialSetupPic2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/170721_RosSerialSetupPic2.png
--------------------------------------------------------------------------------
/images/170808_IonmcDocs_1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/170808_IonmcDocs_1.jpg
--------------------------------------------------------------------------------
/images/170808_RoboClawDocs_1.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/170808_RoboClawDocs_1.jpeg
--------------------------------------------------------------------------------
/images/170808_SteeringPotDocs_1.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/170808_SteeringPotDocs_1.jpeg
--------------------------------------------------------------------------------
/images/170808_VelocityPotDocs_1 (1).jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/170808_VelocityPotDocs_1 (1).jpeg
--------------------------------------------------------------------------------
/images/171004_LidarDocImage_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/171004_LidarDocImage_1.png
--------------------------------------------------------------------------------
/images/171031ackermann_drive_code.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/171031ackermann_drive_code.png
--------------------------------------------------------------------------------
/images/171216_tractordemo_3.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/171216_tractordemo_3.jpg
--------------------------------------------------------------------------------
/images/Capture.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/Capture.PNG
--------------------------------------------------------------------------------
/images/DownLidarConfig.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/DownLidarConfig.png
--------------------------------------------------------------------------------
/images/FrontLidarConfig.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/FrontLidarConfig.png
--------------------------------------------------------------------------------
/images/LidarCodeVar.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/LidarCodeVar.jpg
--------------------------------------------------------------------------------
/images/bestgabor.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/bestgabor.png
--------------------------------------------------------------------------------
/images/lidar_connector.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/lidar_connector.png
--------------------------------------------------------------------------------
/images/snowcat_wiringdiagram_full.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/snowcat_wiringdiagram_full.png
--------------------------------------------------------------------------------
/images/worstgabor.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/olinrobotics/gravl/d09bca91dc435f0c1f829a7cecade7988076f679/images/worstgabor.png
--------------------------------------------------------------------------------
/launch/README.md:
--------------------------------------------------------------------------------
1 | # Overview
2 | Folder for GRAVL launch files
3 |
4 | ## Launch Files
5 | ### `bringup_min.launch`
6 | Launches minimal tractor functionality, no map-level reference
7 | - localization
8 | - serial (teensy)
9 | - teleoperation via joystick
10 | - main front lidar
11 | - State controller
12 |
13 | ### `bringup.launch`
14 | Launches tractor with full environment reference, mapping capability
15 | - all functionality in bringup_min.launch
16 | - rtk (GPS)
17 |
18 | ### `hindbrain.launch`
19 | Launches hindbrain ROS node
20 | - serial (teensy)
21 |
22 | ### `ir.launch`
23 | Launches usb_cam1 node for infrared camera, contains parameters for calibrating
24 | camera
25 |
26 | ### `lidar.launch`
27 | Launches hokuyo node for main lidar
28 |
29 | ### `lidarfollow.launch`
30 | Launches minimal tractor and lidarfollow behavior
31 |
32 |
33 | ### `localization.launch`
34 | Launches static and kinetic tf frames for Kubo localization
35 | - robot_localization (ekf_localization_node)
36 | - temp_tf_broadcaster
37 | - map -> world
38 | - lidar -> base
39 | - lidar2 -> base
40 | - IMU -> base
41 | - RTK GPS -> base
42 |
43 | ### `rtk.launch`
44 | Launches swiftnav RTK GPS
45 |
46 | ### `safety.launch`
47 | Launches tf localization, imu, and safety handling node
48 |
49 | ### `teleop.launch`
50 | Launches nodes for reading and publishing joystick commands
51 | - joystick (joy_node)
52 | - joystick_teleop
53 | - Default controller: gamepad
54 |
55 | ### `telewaypoint.launch`
56 | TODO
57 |
58 | ### `waypoint.launch`
59 | launches gps waypoint converter and RTK GPS
60 |
61 | ### `telewaypoint.launch`
62 | launches gps waypoint navigator along with fully gps-referenced tractor
63 |
--------------------------------------------------------------------------------
/launch/base_station.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/launch/bringup.launch:
--------------------------------------------------------------------------------
1 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/launch/bringup_min.launch:
--------------------------------------------------------------------------------
1 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/launch/demos.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
45 |
--------------------------------------------------------------------------------
/launch/ekf.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/launch/hindbrain.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/launch/ir.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/launch/lidar.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/launch/lidarfollow.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/launch/localization.launch:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/launch/mainstate.launch:
--------------------------------------------------------------------------------
1 |
7 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/launch/rtk.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/launch/safety.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/launch/teleop.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/launch/telewaypoint.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
--------------------------------------------------------------------------------
/launch/waypoint.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/msg/Hemisphere.msg:
--------------------------------------------------------------------------------
1 | std_msgs/Header header
2 | float64 direction
3 |
--------------------------------------------------------------------------------
/msg/ImuSafety.msg:
--------------------------------------------------------------------------------
1 | bool danger
2 | float64 theta
3 |
--------------------------------------------------------------------------------
/msg/RoboClawStats.msg:
--------------------------------------------------------------------------------
1 | std_msgs/Header header
2 | uint16 main_voltage
3 | uint8 logic_voltage
4 | int16 motor1_current
5 | int16 motor2_current
6 | uint16 error
7 |
--------------------------------------------------------------------------------
/msg/TwistLabeled.msg:
--------------------------------------------------------------------------------
1 | std_msgs/Header header
2 | string label
3 | geometry_msgs/Twist twist
4 |
--------------------------------------------------------------------------------
/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | gravl
4 | 0.0.0
5 | Olin's ground robotic autonomous vehicle lab repo
6 |
7 | connor
8 |
9 | MIT
10 |
11 | catkin
12 |
13 | ackermann_msgs
14 | cv_bridge
15 | dynamic_reconfigure
16 | geometry_msgs
17 | image_transport
18 | mavros_msgs
19 | message_generation
20 | nav_msgs
21 | sensor_msgs
22 | std_msgs
23 | tf
24 | tf2_ros
25 | tf2_geometry_msgs
26 | rospy
27 |
28 | roscpp
29 |
30 | state_controller
31 | message_runtime
32 | joy
33 | phidgets_imu
34 | nmea_navsat_driver
35 | rosserial
36 | urg_node
37 | robot_localization
38 | pointgrey_camera_driver
39 | ethz_piksi_ros
40 | hokuyo_node
41 | gps_common
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/params/ekf_params.yaml:
--------------------------------------------------------------------------------
1 |
2 | # The frequency, in Hz, at which the filter will output a position estimate. Note that the filter will not begin
3 | # computation until it receives at least one message from one of the inputs. It will then run continuously at the
4 | # frequency specified here, regardless of whether it receives more measurements. Defaults to 30 if unspecified.
5 | frequency: 30
6 |
7 | # The period, in seconds, after which we consider a sensor to have timed out. In this event, we carry out a predict
8 | # cycle on the EKF without correcting it. This parameter can be thought of as the minimum frequency with which the
9 | # filter will generate new output. Defaults to 1 / frequency if not specified.
10 | sensor_timeout: 0.1
11 |
12 | # ekf_localization_node and ukf_localization_node both use a 3D omnidirectional motion model. If this parameter is
13 | # set to true, no 3D information will be used in your state estimate. Use this if you are operating in a planar
14 | # environment and want to ignore the effect of small variations in the ground plane that might otherwise be detected
15 | # by, for example, an IMU. Defaults to false if unspecified.
16 | two_d_mode: false
17 |
18 | # Use this parameter to provide an offset to the transform generated by ekf_localization_node. This can be used for
19 | # future dating the transform, which is required for interaction with some other packages. Defaults to 0.0 if
20 | # unspecified.
21 | transform_time_offset: 0.0
22 |
23 | # Use this parameter to provide specify how long the tf listener should wait for a transform to become available.
24 | # Defaults to 0.0 if unspecified.
25 | transform_timeout: 0.0
26 |
27 | # If you're having trouble, try setting this to true, and then echo the /diagnostics_agg topic to see if the node is
28 | # unhappy with any settings or data.
29 | print_diagnostics: true
30 |
31 | # Debug settings. Not for the faint of heart. Outputs a ludicrous amount of information to the file specified by
32 | # debug_out_file. I hope you like matrices! Please note that setting this to true will have strongly deleterious
33 | # effects on the performance of the node. Defaults to false if unspecified.
34 | debug: false
35 |
36 | # Defaults to "robot_localization_debug.txt" if unspecified. Please specify the full path.
37 | debug_out_file: /path/to/debug/file.txt
38 |
39 | # Whether to broadcast the transformation over the /tf topic. Defaults to true if unspecified.
40 | publish_tf: true
41 |
42 | # Whether to publish the acceleration state. Defaults to false if unspecified.
43 | publish_acceleration: false
44 |
45 | # REP-105 (http://www.ros.org/reps/rep-0105.html) specifies four principal coordinate frames: base_link, odom, map, and
46 | # earth. base_link is the coordinate frame that is affixed to the robot. Both odom and map are world-fixed frames.
47 | # The robot's position in the odom frame will drift over time, but is accurate in the short term and should be
48 | # continuous. The odom frame is therefore the best frame for executing local motion plans. The map frame, like the odom
49 | # frame, is a world-fixed coordinate frame, and while it contains the most globally accurate position estimate for your
50 | # robot, it is subject to discrete jumps, e.g., due to the fusion of GPS data or a correction from a map-based
51 | # localization node. The earth frame is used to relate multiple map frames by giving them a common reference frame.
52 | # ekf_localization_node and ukf_localization_node are not concerned with the earth frame.
53 | # Here is how to use the following settings:
54 | # 1. Set the map_frame, odom_frame, and base_link frames to the appropriate frame names for your system.
55 | # 1a. If your system does not have a map_frame, just remove it, and make sure "world_frame" is set to the value of
56 | # odom_frame.
57 | # 2. If you are fusing continuous position data such as wheel encoder odometry, visual odometry, or IMU data, set
58 | # "world_frame" to your odom_frame value. This is the default behavior for robot_localization's state estimation nodes.
59 | # 3. If you are fusing global absolute position data that is subject to discrete jumps (e.g., GPS or position updates
60 | # from landmark observations) then:
61 | # 3a. Set your "world_frame" to your map_frame value
62 | # 3b. MAKE SURE something else is generating the odom->base_link transform. Note that this can even be another state
63 | # estimation node from robot_localization! However, that instance should *not* fuse the global data.
64 | map_frame: map # Defaults to "map" if unspecified
65 | odom_frame: odom # Defaults to "odom" if unspecified
66 | base_link_frame: base_link # Defaults to "base_link" if unspecified
67 | world_frame: odom # Defaults to the value of odom_frame if unspecified
68 |
69 | # The filter accepts an arbitrary number of inputs from each input message type (nav_msgs/Odometry,
70 | # geometry_msgs/PoseWithCovarianceStamped, geometry_msgs/TwistWithCovarianceStamped,
71 | # sensor_msgs/Imu). To add an input, simply append the next number in the sequence to its "base" name, e.g., odom0,
72 | # odom1, twist0, twist1, imu0, imu1, imu2, etc. The value should be the topic name. These parameters obviously have no
73 | # default values, and must be specified.
74 | odom0: /gps_odom
75 |
76 | # Each sensor reading updates some or all of the filter's state. These options give you greater control over which
77 | # values from each measurement are fed to the filter. For example, if you have an odometry message as input, but only
78 | # want to use its Z position value, then set the entire vector to false, except for the third entry. The order of the
79 | # values is x, y, z, roll, pitch, yaw, vx, vy, vz, vroll, vpitch, vyaw, ax, ay, az. Note that not some message types
80 | # do not provide some of the state variables estimated by the filter. For example, a TwistWithCovarianceStamped message
81 | # has no pose information, so the first six values would be meaningless in that case. Each vector defaults to all false
82 | # if unspecified, effectively making this parameter required for each sensor.
83 | odom0_config: [true, true, true,
84 | false, false, false,
85 | false, false, false,
86 | false, false, false,
87 | false, false, false]
88 |
89 | # If you have high-frequency data or are running with a low frequency parameter value, then you may want to increase
90 | # the size of the subscription queue so that more measurements are fused.
91 | odom0_queue_size: 2
92 |
93 | # [ADVANCED] Large messages in ROS can exhibit strange behavior when they arrive at a high frequency. This is a result
94 | # of Nagle's algorithm. This option tells the ROS subscriber to use the tcpNoDelay option, which disables Nagle's
95 | # algorithm.
96 | odom0_nodelay: false
97 |
98 | # [ADVANCED] When measuring one pose variable with two sensors, a situation can arise in which both sensors under-
99 | # report their covariances. This can lead to the filter rapidly jumping back and forth between each measurement as they
100 | # arrive. In these cases, it often makes sense to (a) correct the measurement covariances, or (b) if velocity is also
101 | # measured by one of the sensors, let one sensor measure pose, and the other velocity. However, doing (a) or (b) isn't
102 | # always feasible, and so we expose the differential parameter. When differential mode is enabled, all absolute pose
103 | # data is converted to velocity data by differentiating the absolute pose measurements. These velocities are then
104 | # integrated as usual. NOTE: this only applies to sensors that provide pose measurements; setting differential to true
105 | # for twist measurements has no effect.
106 | odom0_differential: false
107 |
108 | # [ADVANCED] When the node starts, if this parameter is true, then the first measurement is treated as a "zero point"
109 | # for all future measurements. While you can achieve the same effect with the differential paremeter, the key
110 | # difference is that the relative parameter doesn't cause the measurement to be converted to a velocity before
111 | # integrating it. If you simply want your measurements to start at 0 for a given sensor, set this to true.
112 | odom0_relative: false
113 |
114 | # [ADVANCED] If your data is subject to outliers, use these threshold settings, expressed as Mahalanobis distances, to
115 | # control how far away from the current vehicle state a sensor measurement is permitted to be. Each defaults to
116 | # numeric_limits::max() if unspecified. It is strongly recommended that these parameters be removed if not
117 | # required. Data is specified at the level of pose and twist variables, rather than for each variable in isolation.
118 | # For messages that have both pose and twist data, the parameter specifies to which part of the message we are applying
119 | # the thresholds.
120 | odom0_pose_rejection_threshold: 5
121 | odom0_twist_rejection_threshold: 1
122 |
123 | # Further input parameter examples
124 | # odom1: example/another_odom
125 | # odom1_config: [false, false, true,
126 | # false, false, false,
127 | # false, false, false,
128 | # false, false, true,
129 | # false, false, false]
130 | # odom1_differential: false
131 | # odom1_relative: true
132 | # odom1_queue_size: 2
133 | # odom1_pose_rejection_threshold: 2
134 | # odom1_twist_rejection_threshold: 0.2
135 | # odom1_nodelay: false
136 | #
137 | # pose0: example/pose
138 | # pose0_config: [true, true, false,
139 | # false, false, false,
140 | # false, false, false,
141 | # false, false, false,
142 | # false, false, false]
143 | # pose0_differential: true
144 | # pose0_relative: false
145 | # pose0_queue_size: 5
146 | # pose0_rejection_threshold: 2 # Note the difference in parameter name
147 | # pose0_nodelay: false
148 |
149 | # twist0: example/twist
150 | # twist0_config: [false, false, false,
151 | # false, false, false,
152 | # true, true, true,
153 | # false, false, false,
154 | # false, false, false]
155 | # twist0_queue_size: 3
156 | # twist0_rejection_threshold: 2
157 | # twist0_nodelay: false
158 | #
159 | imu0: /imu/data
160 | imu0_config: [false, false, false,
161 | true, true, true,
162 | false, false, false,
163 | true, true, true,
164 | true, true, true]
165 | imu0_nodelay: false
166 | imu0_differential: false
167 | imu0_relative: true
168 | imu0_queue_size: 5
169 | imu0_pose_rejection_threshold: 0.8 # Note the difference in parameter names
170 | imu0_twist_rejection_threshold: 0.8 #
171 | imu0_linear_acceleration_rejection_threshold: 0.8 #
172 |
173 | # [ADVANCED] Some IMUs automatically remove acceleration due to gravity, and others don't. If yours doesn't, please set
174 | # this to true, and *make sure* your data conforms to REP-103, specifically, that the data is in ENU frame.
175 | imu0_remove_gravitational_acceleration: true
176 |
177 | # [ADVANCED] The EKF and UKF models follow a standard predict/correct cycle. During prediction, if there is no
178 | # acceleration reference, the velocity at time t+1 is simply predicted to be the same as the velocity at time t. During
179 | # correction, this predicted value is fused with the measured value to produce the new velocity estimate. This can be
180 | # problematic, as the final velocity will effectively be a weighted average of the old velocity and the new one. When
181 | # this velocity is the integrated into a new pose, the result can be sluggish covergence. This effect is especially
182 | # noticeable with LIDAR data during rotations. To get around it, users can try inflating the process_noise_covariance
183 | # for the velocity variable in question, or decrease the variance of the variable in question in the measurement
184 | # itself. In addition, users can also take advantage of the control command being issued to the robot at the time we
185 | # make the prediction. If control is used, it will get converted into an acceleration term, which will be used during
186 | # predicition. Note that if an acceleration measurement for the variable in question is available from one of the
187 | # inputs, the control term will be ignored.
188 | # Whether or not we use the control input during predicition. Defaults to false.
189 | use_control: true
190 | # Whether the input (assumed to be cmd_vel) is a geometry_msgs/Twist or geometry_msgs/TwistStamped message. Defaults to
191 | # false.
192 | stamped_control: false
193 | # The last issued control command will be used in prediction for this period. Defaults to 0.2.
194 | control_timeout: 0.2
195 | # Which velocities are being controlled. Order is vx, vy, vz, vroll, vpitch, vyaw.
196 | control_config: [true, false, false, false, false, true]
197 | # Places limits on how large the acceleration term will be. Should match your robot's kinematics.
198 | acceleration_limits: [1.3, 0.0, 0.0, 0.0, 0.0, 3.4]
199 | # Acceleration and deceleration limits are not always the same for robots.
200 | deceleration_limits: [1.3, 0.0, 0.0, 0.0, 0.0, 4.5]
201 | # If your robot cannot instantaneously reach its acceleration limit, the permitted change can be controlled with these
202 | # gains
203 | acceleration_gains: [0.8, 0.0, 0.0, 0.0, 0.0, 0.9]
204 | # If your robot cannot instantaneously reach its deceleration limit, the permitted change can be controlled with these
205 | # gains
206 | deceleration_gains: [1.0, 0.0, 0.0, 0.0, 0.0, 1.0]
207 |
208 | # [ADVANCED] The process noise covariance matrix can be difficult to tune, and can vary for each application, so it is
209 | # exposed as a configuration parameter. This matrix represents the noise we add to the total error after each
210 | # prediction step. The better the omnidirectional motion model matches your system, the smaller these values can be.
211 | # However, if users find that a given variable is slow to converge, one approach is to increase the
212 | # process_noise_covariance diagonal value for the variable in question, which will cause the filter's predicted error
213 | # to be larger, which will cause the filter to trust the incoming measurement more during correction. The values are
214 | # ordered as x, y, z, roll, pitch, yaw, vx, vy, vz, vroll, vpitch, vyaw, ax, ay, az. Defaults to the matrix below if
215 | # unspecified.
216 | process_noise_covariance: [0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
217 | 0, 0.05, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
218 | 0, 0, 0.06, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
219 | 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
220 | 0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
221 | 0, 0, 0, 0, 0, 0.06, 0, 0, 0, 0, 0, 0, 0, 0, 0,
222 | 0, 0, 0, 0, 0, 0, 0.025, 0, 0, 0, 0, 0, 0, 0, 0,
223 | 0, 0, 0, 0, 0, 0, 0, 0.025, 0, 0, 0, 0, 0, 0, 0,
224 | 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0,
225 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0,
226 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0,
227 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.02, 0, 0, 0,
228 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0,
229 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0,
230 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015]
231 |
232 | # [ADVANCED] This represents the initial value for the state estimate error covariance matrix. Setting a diagonal
233 | # value (variance) to a large value will result in rapid convergence for initial measurements of the variable in
234 | # question. Users should take care not to use large values for variables that will not be measured directly. The values
235 | # are ordered as x, y, z, roll, pitch, yaw, vx, vy, vz, vroll, vpitch, vyaw, ax, ay, az. Defaults to the matrix below
236 | #if unspecified.
237 | initial_estimate_covariance: [1e-9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
238 | 0, 1e-9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
239 | 0, 0, 1e-9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
240 | 0, 0, 0, 1e-9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
241 | 0, 0, 0, 0, 1e-9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
242 | 0, 0, 0, 0, 0, 1e-9, 0, 0, 0, 0, 0, 0, 0, 0, 0,
243 | 0, 0, 0, 0, 0, 0, 1e-9, 0, 0, 0, 0, 0, 0, 0, 0,
244 | 0, 0, 0, 0, 0, 0, 0, 1e-9, 0, 0, 0, 0, 0, 0, 0,
245 | 0, 0, 0, 0, 0, 0, 0, 0, 1e-9, 0, 0, 0, 0, 0, 0,
246 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 1e-9, 0, 0, 0, 0, 0,
247 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1e-9, 0, 0, 0, 0,
248 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1e-9, 0, 0, 0,
249 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1e-9, 0, 0,
250 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1e-9, 0,
251 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1e-9]
252 |
--------------------------------------------------------------------------------
/rviz/demo.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 75
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /Global Options1
8 | - /Status1
9 | - /RobotModel1
10 | - /LaserScan1
11 | - /PointCloud21
12 | - /Marker1
13 | - /Odometry1/Shape1
14 | Splitter Ratio: 0.5
15 | Tree Height: 75
16 | - Class: rviz/Selection
17 | Name: Selection
18 | - Class: rviz/Tool Properties
19 | Expanded:
20 | - /2D Pose Estimate1
21 | - /2D Nav Goal1
22 | - /Publish Point1
23 | Name: Tool Properties
24 | Splitter Ratio: 0.588679016
25 | - Class: rviz/Views
26 | Expanded:
27 | - /Current View1
28 | - /Current View1/Focal Point1
29 | Name: Views
30 | Splitter Ratio: 0.5
31 | - Class: rviz/Time
32 | Experimental: false
33 | Name: Time
34 | SyncMode: 0
35 | SyncSource: Camera
36 | Toolbars:
37 | toolButtonStyle: 2
38 | Visualization Manager:
39 | Class: ""
40 | Displays:
41 | - Alpha: 0.5
42 | Cell Size: 1
43 | Class: rviz/Grid
44 | Color: 160; 160; 164
45 | Enabled: true
46 | Line Style:
47 | Line Width: 0.0299999993
48 | Value: Lines
49 | Name: Grid
50 | Normal Cell Count: 0
51 | Offset:
52 | X: 0
53 | Y: 0
54 | Z: 0
55 | Plane: XY
56 | Plane Cell Count: 10
57 | Reference Frame:
58 | Value: true
59 | - Alpha: 1
60 | Class: rviz/RobotModel
61 | Collision Enabled: false
62 | Enabled: true
63 | Links:
64 | 3d_lidar:
65 | Alpha: 1
66 | Show Axes: false
67 | Show Trail: false
68 | Value: true
69 | 3d_lidar_base_link:
70 | Alpha: 1
71 | Show Axes: false
72 | Show Trail: false
73 | Value: true
74 | All Links Enabled: true
75 | Expand Joint Details: false
76 | Expand Link Details: false
77 | Expand Tree: false
78 | Link Tree Style: Links in Alphabetic Order
79 | base_link:
80 | Alpha: 1
81 | Show Axes: false
82 | Show Trail: false
83 | Value: true
84 | camera:
85 | Alpha: 1
86 | Show Axes: false
87 | Show Trail: false
88 | Value: true
89 | chassis:
90 | Alpha: 1
91 | Show Axes: false
92 | Show Trail: false
93 | Value: true
94 | gps:
95 | Alpha: 1
96 | Show Axes: false
97 | Show Trail: false
98 | Value: true
99 | hemisphere_gps:
100 | Alpha: 1
101 | Show Axes: false
102 | Show Trail: false
103 | Value: true
104 | hitch:
105 | Alpha: 1
106 | Show Axes: false
107 | Show Trail: false
108 | Value: true
109 | imu:
110 | Alpha: 1
111 | Show Axes: false
112 | Show Trail: false
113 | Value: true
114 | left_front_axle_carrier:
115 | Alpha: 1
116 | Show Axes: false
117 | Show Trail: false
118 | left_front_wheel:
119 | Alpha: 1
120 | Show Axes: false
121 | Show Trail: false
122 | Value: true
123 | left_rear_axle_carrier:
124 | Alpha: 1
125 | Show Axes: false
126 | Show Trail: false
127 | left_rear_wheel:
128 | Alpha: 1
129 | Show Axes: false
130 | Show Trail: false
131 | Value: true
132 | left_steering_link:
133 | Alpha: 1
134 | Show Axes: false
135 | Show Trail: false
136 | right_front_axle_carrier:
137 | Alpha: 1
138 | Show Axes: false
139 | Show Trail: false
140 | right_front_wheel:
141 | Alpha: 1
142 | Show Axes: false
143 | Show Trail: false
144 | Value: true
145 | right_rear_axle_carrier:
146 | Alpha: 1
147 | Show Axes: false
148 | Show Trail: false
149 | right_rear_wheel:
150 | Alpha: 1
151 | Show Axes: false
152 | Show Trail: false
153 | Value: true
154 | right_steering_link:
155 | Alpha: 1
156 | Show Axes: false
157 | Show Trail: false
158 | Name: RobotModel
159 | Robot Description: robot_description
160 | TF Prefix: ""
161 | Update Interval: 0
162 | Value: true
163 | Visual Enabled: true
164 | - Alpha: 0.699999988
165 | Class: rviz/Map
166 | Color Scheme: map
167 | Draw Behind: false
168 | Enabled: true
169 | Name: Map
170 | Topic: /move_base/global_costmap/costmap
171 | Unreliable: false
172 | Use Timestamp: false
173 | Value: true
174 | - Alpha: 1
175 | Autocompute Intensity Bounds: true
176 | Autocompute Value Bounds:
177 | Max Value: 10
178 | Min Value: -10
179 | Value: true
180 | Axis: Z
181 | Channel Name: intensity
182 | Class: rviz/LaserScan
183 | Color: 255; 255; 255
184 | Color Transformer: Intensity
185 | Decay Time: 0
186 | Enabled: true
187 | Invert Rainbow: false
188 | Max Color: 255; 255; 255
189 | Max Intensity: 0
190 | Min Color: 0; 0; 0
191 | Min Intensity: 0
192 | Name: LaserScan
193 | Position Transformer: XYZ
194 | Queue Size: 10
195 | Selectable: true
196 | Size (Pixels): 3
197 | Size (m): 0.0500000007
198 | Style: Flat Squares
199 | Topic: /scan
200 | Unreliable: false
201 | Use Fixed Frame: true
202 | Use rainbow: true
203 | Value: true
204 | - Alpha: 1
205 | Buffer Length: 1
206 | Class: rviz/Path
207 | Color: 25; 255; 0
208 | Enabled: true
209 | Head Diameter: 0.300000012
210 | Head Length: 0.200000003
211 | Length: 0.300000012
212 | Line Style: Lines
213 | Line Width: 0.0299999993
214 | Name: Path (local)
215 | Offset:
216 | X: 0
217 | Y: 0
218 | Z: 0
219 | Pose Color: 255; 85; 255
220 | Pose Style: None
221 | Radius: 0.0299999993
222 | Shaft Diameter: 0.100000001
223 | Shaft Length: 0.100000001
224 | Topic: /move_base/TrajectoryPlannerROS/local_plan
225 | Unreliable: false
226 | Value: true
227 | - Alpha: 1
228 | Buffer Length: 1
229 | Class: rviz/Path
230 | Color: 0; 0; 255
231 | Enabled: true
232 | Head Diameter: 0.300000012
233 | Head Length: 0.200000003
234 | Length: 0.300000012
235 | Line Style: Lines
236 | Line Width: 0.0299999993
237 | Name: Path (global)
238 | Offset:
239 | X: 0
240 | Y: 0
241 | Z: 0
242 | Pose Color: 255; 85; 255
243 | Pose Style: None
244 | Radius: 0.0299999993
245 | Shaft Diameter: 0.100000001
246 | Shaft Length: 0.100000001
247 | Topic: /move_base/TrajectoryPlannerROS/global_plan
248 | Unreliable: false
249 | Value: true
250 | - Class: rviz/Camera
251 | Enabled: true
252 | Image Rendering: background and overlay
253 | Image Topic: /camera/image_raw
254 | Name: Camera
255 | Overlay Alpha: 0.5
256 | Queue Size: 2
257 | Transport Hint: raw
258 | Unreliable: false
259 | Value: true
260 | Visibility:
261 | Grid: true
262 | LaserScan: true
263 | Map: true
264 | Marker: true
265 | MarkerArray: true
266 | Odometry: true
267 | Path (global): true
268 | Path (local): true
269 | PointCloud2: true
270 | PoseWithCovariance: true
271 | RobotModel: true
272 | Value: true
273 | Zoom Factor: 1
274 | - Alpha: 1
275 | Autocompute Intensity Bounds: true
276 | Autocompute Value Bounds:
277 | Max Value: 10
278 | Min Value: -10
279 | Value: true
280 | Axis: Z
281 | Channel Name: intensity
282 | Class: rviz/PointCloud2
283 | Color: 255; 255; 255
284 | Color Transformer: Intensity
285 | Decay Time: 0
286 | Enabled: true
287 | Invert Rainbow: false
288 | Max Color: 255; 255; 255
289 | Max Intensity: 999999
290 | Min Color: 0; 0; 0
291 | Min Intensity: 999999
292 | Name: PointCloud2
293 | Position Transformer: XYZ
294 | Queue Size: 10
295 | Selectable: true
296 | Size (Pixels): 3
297 | Size (m): 0.00999999978
298 | Style: Flat Squares
299 | Topic: /laser_pointcloud
300 | Unreliable: false
301 | Use Fixed Frame: true
302 | Use rainbow: true
303 | Value: true
304 | - Class: rviz/Marker
305 | Enabled: true
306 | Marker Topic: /line_vis
307 | Name: Marker
308 | Namespaces:
309 | {}
310 | Queue Size: 100
311 | Value: true
312 | - Class: rviz/MarkerArray
313 | Enabled: true
314 | Marker Topic: /mission_planner/vis_waypoints
315 | Name: MarkerArray
316 | Namespaces:
317 | {}
318 | Queue Size: 100
319 | Value: true
320 | - Angle Tolerance: 0.100000001
321 | Class: rviz/Odometry
322 | Covariance:
323 | Orientation:
324 | Alpha: 0.5
325 | Color: 255; 255; 127
326 | Color Style: Unique
327 | Frame: Local
328 | Offset: 1
329 | Scale: 1
330 | Value: true
331 | Position:
332 | Alpha: 0.300000012
333 | Color: 204; 51; 204
334 | Scale: 1
335 | Value: true
336 | Value: false
337 | Enabled: false
338 | Keep: 100
339 | Name: Odometry
340 | Position Tolerance: 0.100000001
341 | Shape:
342 | Alpha: 1
343 | Axes Length: 1
344 | Axes Radius: 0.100000001
345 | Color: 255; 25; 0
346 | Head Length: 0.300000012
347 | Head Radius: 0.100000001
348 | Shaft Length: 2
349 | Shaft Radius: 0.300000012
350 | Value: Arrow
351 | Topic: /gps_odom
352 | Unreliable: false
353 | Value: false
354 | - Alpha: 1
355 | Axes Length: 1
356 | Axes Radius: 0.100000001
357 | Class: rviz/PoseWithCovariance
358 | Color: 255; 25; 0
359 | Covariance:
360 | Orientation:
361 | Alpha: 0.5
362 | Color: 255; 255; 127
363 | Color Style: Unique
364 | Frame: Local
365 | Offset: 1
366 | Scale: 1
367 | Value: true
368 | Position:
369 | Alpha: 0.300000012
370 | Color: 204; 51; 204
371 | Scale: 1
372 | Value: true
373 | Value: false
374 | Enabled: false
375 | Head Length: 0.5
376 | Head Radius: 0.5
377 | Name: PoseWithCovariance
378 | Shaft Length: 2
379 | Shaft Radius: 0.200000003
380 | Shape: Arrow
381 | Topic: /robot_pose_ekf/odom_combined
382 | Unreliable: false
383 | Value: false
384 | - Alpha: 1
385 | Autocompute Intensity Bounds: true
386 | Autocompute Value Bounds:
387 | Max Value: 10
388 | Min Value: -10
389 | Value: true
390 | Axis: Z
391 | Channel Name: intensity
392 | Class: rviz/PointCloud2
393 | Color: 255; 255; 255
394 | Color Transformer: Intensity
395 | Decay Time: 0
396 | Enabled: true
397 | Invert Rainbow: false
398 | Max Color: 255; 255; 255
399 | Max Intensity: 0
400 | Min Color: 0; 0; 0
401 | Min Intensity: 0
402 | Name: PointCloud2
403 | Position Transformer: XYZ
404 | Queue Size: 10
405 | Selectable: true
406 | Size (Pixels): 3
407 | Size (m): 0.00999999978
408 | Style: Flat Squares
409 | Topic: /lidarx_points
410 | Unreliable: false
411 | Use Fixed Frame: true
412 | Use rainbow: true
413 | Value: true
414 | Enabled: true
415 | Global Options:
416 | Background Color: 48; 48; 48
417 | Default Light: true
418 | Fixed Frame: odom
419 | Frame Rate: 30
420 | Name: root
421 | Tools:
422 | - Class: rviz/Interact
423 | Hide Inactive Objects: true
424 | - Class: rviz/MoveCamera
425 | - Class: rviz/Select
426 | - Class: rviz/FocusCamera
427 | - Class: rviz/Measure
428 | - Class: rviz/SetInitialPose
429 | Topic: /initialpose
430 | - Class: rviz/SetGoal
431 | Topic: /move_base_simple/goal
432 | - Class: rviz/PublishPoint
433 | Single click: true
434 | Topic: /clicked_point
435 | Value: true
436 | Views:
437 | Current:
438 | Class: rviz/Orbit
439 | Distance: 4.62562847
440 | Enable Stereo Rendering:
441 | Stereo Eye Separation: 0.0599999987
442 | Stereo Focal Distance: 1
443 | Swap Stereo Eyes: false
444 | Value: false
445 | Focal Point:
446 | X: 3.6286478
447 | Y: -2.08131957
448 | Z: 2.25637794
449 | Focal Shape Fixed Size: true
450 | Focal Shape Size: 0.0500000007
451 | Invert Z Axis: false
452 | Name: Current View
453 | Near Clip Distance: 0.00999999978
454 | Pitch: 0.52979666
455 | Target Frame:
456 | Value: Orbit (rviz)
457 | Yaw: 5.60818529
458 | Saved: ~
459 | Window Geometry:
460 | Camera:
461 | collapsed: false
462 | Displays:
463 | collapsed: false
464 | Height: 1056
465 | Hide Left Dock: false
466 | Hide Right Dock: false
467 | QMainWindow State: 000000ff00000000fd0000000400000000000002a700000393fc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006300fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000028000000db000000db00fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000c00430061006d0065007200610100000109000002b20000001800ffffff000000010000010f00000393fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000002800000393000000b300fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000007800000003ffc0100000002fb0000000800540069006d00650100000000000007800000031700fffffffb0000000800540069006d00650100000000000004500000000000000000000003be0000039300000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
468 | Selection:
469 | collapsed: false
470 | Time:
471 | collapsed: false
472 | Tool Properties:
473 | collapsed: false
474 | Views:
475 | collapsed: false
476 | Width: 1920
477 | X: 0
478 | Y: 24
479 |
--------------------------------------------------------------------------------
/rviz/settings.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 78
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /Global Options1
8 | - /Status1
9 | - /RobotModel1
10 | - /LaserScan1
11 | - /PointCloud21
12 | - /Marker1
13 | - /Odometry1/Shape1
14 | Splitter Ratio: 0.5
15 | Tree Height: 559
16 | - Class: rviz/Selection
17 | Name: Selection
18 | - Class: rviz/Tool Properties
19 | Expanded:
20 | - /2D Pose Estimate1
21 | - /2D Nav Goal1
22 | - /Publish Point1
23 | Name: Tool Properties
24 | Splitter Ratio: 0.588679016
25 | - Class: rviz/Views
26 | Expanded:
27 | - /Current View1
28 | Name: Views
29 | Splitter Ratio: 0.5
30 | - Class: rviz/Time
31 | Experimental: false
32 | Name: Time
33 | SyncMode: 0
34 | SyncSource: LaserScan
35 | Toolbars:
36 | toolButtonStyle: 2
37 | Visualization Manager:
38 | Class: ""
39 | Displays:
40 | - Alpha: 0.5
41 | Cell Size: 1
42 | Class: rviz/Grid
43 | Color: 160; 160; 164
44 | Enabled: true
45 | Line Style:
46 | Line Width: 0.0299999993
47 | Value: Lines
48 | Name: Grid
49 | Normal Cell Count: 0
50 | Offset:
51 | X: 0
52 | Y: 0
53 | Z: 0
54 | Plane: XY
55 | Plane Cell Count: 10
56 | Reference Frame:
57 | Value: true
58 | - Alpha: 1
59 | Class: rviz/RobotModel
60 | Collision Enabled: false
61 | Enabled: true
62 | Links:
63 | All Links Enabled: true
64 | Expand Joint Details: false
65 | Expand Link Details: false
66 | Expand Tree: false
67 | Link Tree Style: Links in Alphabetic Order
68 | base_link:
69 | Alpha: 1
70 | Show Axes: false
71 | Show Trail: false
72 | Value: true
73 | camera:
74 | Alpha: 1
75 | Show Axes: false
76 | Show Trail: false
77 | Value: true
78 | chassis:
79 | Alpha: 1
80 | Show Axes: false
81 | Show Trail: false
82 | Value: true
83 | gps:
84 | Alpha: 1
85 | Show Axes: false
86 | Show Trail: false
87 | Value: true
88 | hemisphere_gps:
89 | Alpha: 1
90 | Show Axes: false
91 | Show Trail: false
92 | Value: true
93 | hitch:
94 | Alpha: 1
95 | Show Axes: false
96 | Show Trail: false
97 | Value: true
98 | hokuyo:
99 | Alpha: 1
100 | Show Axes: false
101 | Show Trail: false
102 | Value: true
103 | imu:
104 | Alpha: 1
105 | Show Axes: false
106 | Show Trail: false
107 | Value: true
108 | left_front_axle_carrier:
109 | Alpha: 1
110 | Show Axes: false
111 | Show Trail: false
112 | left_front_wheel:
113 | Alpha: 1
114 | Show Axes: false
115 | Show Trail: false
116 | Value: true
117 | left_rear_axle_carrier:
118 | Alpha: 1
119 | Show Axes: false
120 | Show Trail: false
121 | left_rear_wheel:
122 | Alpha: 1
123 | Show Axes: false
124 | Show Trail: false
125 | Value: true
126 | left_steering_link:
127 | Alpha: 1
128 | Show Axes: false
129 | Show Trail: false
130 | right_front_axle_carrier:
131 | Alpha: 1
132 | Show Axes: false
133 | Show Trail: false
134 | right_front_wheel:
135 | Alpha: 1
136 | Show Axes: false
137 | Show Trail: false
138 | Value: true
139 | right_rear_axle_carrier:
140 | Alpha: 1
141 | Show Axes: false
142 | Show Trail: false
143 | right_rear_wheel:
144 | Alpha: 1
145 | Show Axes: false
146 | Show Trail: false
147 | Value: true
148 | right_steering_link:
149 | Alpha: 1
150 | Show Axes: false
151 | Show Trail: false
152 | Name: RobotModel
153 | Robot Description: robot_description
154 | TF Prefix: ""
155 | Update Interval: 0
156 | Value: true
157 | Visual Enabled: true
158 | - Alpha: 0.699999988
159 | Class: rviz/Map
160 | Color Scheme: map
161 | Draw Behind: false
162 | Enabled: true
163 | Name: Map
164 | Topic: /move_base/global_costmap/costmap
165 | Unreliable: false
166 | Use Timestamp: false
167 | Value: true
168 | - Alpha: 1
169 | Autocompute Intensity Bounds: true
170 | Autocompute Value Bounds:
171 | Max Value: 10
172 | Min Value: -10
173 | Value: true
174 | Axis: Z
175 | Channel Name: intensity
176 | Class: rviz/LaserScan
177 | Color: 255; 255; 255
178 | Color Transformer: Intensity
179 | Decay Time: 0
180 | Enabled: true
181 | Invert Rainbow: false
182 | Max Color: 255; 255; 255
183 | Max Intensity: 8.38170197e-38
184 | Min Color: 0; 0; 0
185 | Min Intensity: 8.38170197e-38
186 | Name: LaserScan
187 | Position Transformer: XYZ
188 | Queue Size: 10
189 | Selectable: true
190 | Size (Pixels): 3
191 | Size (m): 0.0500000007
192 | Style: Flat Squares
193 | Topic: /scan
194 | Unreliable: false
195 | Use Fixed Frame: true
196 | Use rainbow: true
197 | Value: true
198 | - Alpha: 1
199 | Buffer Length: 1
200 | Class: rviz/Path
201 | Color: 25; 255; 0
202 | Enabled: true
203 | Head Diameter: 0.300000012
204 | Head Length: 0.200000003
205 | Length: 0.300000012
206 | Line Style: Lines
207 | Line Width: 0.0299999993
208 | Name: Path (local)
209 | Offset:
210 | X: 0
211 | Y: 0
212 | Z: 0
213 | Pose Color: 255; 85; 255
214 | Pose Style: None
215 | Radius: 0.0299999993
216 | Shaft Diameter: 0.100000001
217 | Shaft Length: 0.100000001
218 | Topic: /move_base/TrajectoryPlannerROS/local_plan
219 | Unreliable: false
220 | Value: true
221 | - Alpha: 1
222 | Buffer Length: 1
223 | Class: rviz/Path
224 | Color: 0; 0; 255
225 | Enabled: true
226 | Head Diameter: 0.300000012
227 | Head Length: 0.200000003
228 | Length: 0.300000012
229 | Line Style: Lines
230 | Line Width: 0.0299999993
231 | Name: Path (global)
232 | Offset:
233 | X: 0
234 | Y: 0
235 | Z: 0
236 | Pose Color: 255; 85; 255
237 | Pose Style: None
238 | Radius: 0.0299999993
239 | Shaft Diameter: 0.100000001
240 | Shaft Length: 0.100000001
241 | Topic: /move_base/TrajectoryPlannerROS/global_plan
242 | Unreliable: false
243 | Value: true
244 | - Class: rviz/Camera
245 | Enabled: true
246 | Image Rendering: background and overlay
247 | Image Topic: /camera/image_raw
248 | Name: Camera
249 | Overlay Alpha: 0.5
250 | Queue Size: 2
251 | Transport Hint: raw
252 | Unreliable: false
253 | Value: true
254 | Visibility:
255 | Grid: true
256 | LaserScan: true
257 | Map: true
258 | Marker: true
259 | MarkerArray: true
260 | Odometry: true
261 | Path (global): true
262 | Path (local): true
263 | PointCloud2: true
264 | PoseWithCovariance: true
265 | RobotModel: true
266 | Value: true
267 | Zoom Factor: 1
268 | - Alpha: 1
269 | Autocompute Intensity Bounds: true
270 | Autocompute Value Bounds:
271 | Max Value: 10
272 | Min Value: -10
273 | Value: true
274 | Axis: Z
275 | Channel Name: intensity
276 | Class: rviz/PointCloud2
277 | Color: 255; 255; 255
278 | Color Transformer: Intensity
279 | Decay Time: 0
280 | Enabled: true
281 | Invert Rainbow: false
282 | Max Color: 255; 255; 255
283 | Max Intensity: 999999
284 | Min Color: 0; 0; 0
285 | Min Intensity: 999999
286 | Name: PointCloud2
287 | Position Transformer: XYZ
288 | Queue Size: 10
289 | Selectable: true
290 | Size (Pixels): 3
291 | Size (m): 0.00999999978
292 | Style: Flat Squares
293 | Topic: /laser_pointcloud
294 | Unreliable: false
295 | Use Fixed Frame: true
296 | Use rainbow: true
297 | Value: true
298 | - Class: rviz/Marker
299 | Enabled: true
300 | Marker Topic: /line_vis
301 | Name: Marker
302 | Namespaces:
303 | {}
304 | Queue Size: 100
305 | Value: true
306 | - Class: rviz/MarkerArray
307 | Enabled: true
308 | Marker Topic: /mission_planner/vis_waypoints
309 | Name: MarkerArray
310 | Namespaces:
311 | {}
312 | Queue Size: 100
313 | Value: true
314 | - Angle Tolerance: 0.100000001
315 | Class: rviz/Odometry
316 | Covariance:
317 | Orientation:
318 | Alpha: 0.5
319 | Color: 255; 255; 127
320 | Color Style: Unique
321 | Frame: Local
322 | Offset: 1
323 | Scale: 1
324 | Value: true
325 | Position:
326 | Alpha: 0.300000012
327 | Color: 204; 51; 204
328 | Scale: 1
329 | Value: true
330 | Value: false
331 | Enabled: false
332 | Keep: 100
333 | Name: Odometry
334 | Position Tolerance: 0.100000001
335 | Shape:
336 | Alpha: 1
337 | Axes Length: 1
338 | Axes Radius: 0.100000001
339 | Color: 255; 25; 0
340 | Head Length: 0.300000012
341 | Head Radius: 0.100000001
342 | Shaft Length: 2
343 | Shaft Radius: 0.300000012
344 | Value: Arrow
345 | Topic: /gps_odom
346 | Unreliable: false
347 | Value: false
348 | - Alpha: 1
349 | Axes Length: 1
350 | Axes Radius: 0.100000001
351 | Class: rviz/PoseWithCovariance
352 | Color: 255; 25; 0
353 | Covariance:
354 | Orientation:
355 | Alpha: 0.5
356 | Color: 255; 255; 127
357 | Color Style: Unique
358 | Frame: Local
359 | Offset: 1
360 | Scale: 1
361 | Value: true
362 | Position:
363 | Alpha: 0.300000012
364 | Color: 204; 51; 204
365 | Scale: 1
366 | Value: true
367 | Value: false
368 | Enabled: false
369 | Head Length: 0.5
370 | Head Radius: 0.5
371 | Name: PoseWithCovariance
372 | Shaft Length: 2
373 | Shaft Radius: 0.200000003
374 | Shape: Arrow
375 | Topic: /robot_pose_ekf/odom_combined
376 | Unreliable: false
377 | Value: false
378 | Enabled: true
379 | Global Options:
380 | Background Color: 48; 48; 48
381 | Default Light: true
382 | Fixed Frame: odom
383 | Frame Rate: 30
384 | Name: root
385 | Tools:
386 | - Class: rviz/Interact
387 | Hide Inactive Objects: true
388 | - Class: rviz/MoveCamera
389 | - Class: rviz/Select
390 | - Class: rviz/FocusCamera
391 | - Class: rviz/Measure
392 | - Class: rviz/SetInitialPose
393 | Topic: /initialpose
394 | - Class: rviz/SetGoal
395 | Topic: /move_base_simple/goal
396 | - Class: rviz/PublishPoint
397 | Single click: true
398 | Topic: /clicked_point
399 | Value: true
400 | Views:
401 | Current:
402 | Angle: 0.00999999978
403 | Class: rviz/TopDownOrtho
404 | Enable Stereo Rendering:
405 | Stereo Eye Separation: 0.0599999987
406 | Stereo Focal Distance: 1
407 | Swap Stereo Eyes: false
408 | Value: false
409 | Invert Z Axis: false
410 | Name: Current View
411 | Near Clip Distance: 0.00999999978
412 | Scale: -55.4977036
413 | Target Frame:
414 | Value: TopDownOrtho (rviz)
415 | X: 313696.188
416 | Y: 4684524.5
417 | Saved: ~
418 | Window Geometry:
419 | Camera:
420 | collapsed: false
421 | Displays:
422 | collapsed: false
423 | Height: 1056
424 | Hide Left Dock: false
425 | Hide Right Dock: false
426 | QMainWindow State: 000000ff00000000fd00000004000000000000016b00000393fc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006300fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000028000002c2000000db00fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000c00430061006d00650072006101000002f0000000cb0000001800ffffff000000010000010f00000393fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000002800000393000000b300fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000007800000003ffc0100000002fb0000000800540069006d00650100000000000007800000031700fffffffb0000000800540069006d00650100000000000004500000000000000000000004fa0000039300000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
427 | Selection:
428 | collapsed: false
429 | Time:
430 | collapsed: false
431 | Tool Properties:
432 | collapsed: false
433 | Views:
434 | collapsed: false
435 | Width: 1920
436 | X: 0
437 | Y: 24
438 |
439 |
440 |
--------------------------------------------------------------------------------
/scripts/BackOnTrack.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import rospy
3 | import math
4 | from std_msgs.msg import Bool
5 | from sensor_msgs.msg import LaserScan
6 | from std_msgs.msg import String
7 | from std_msgs.msg import Header
8 | import genpy
9 | from std_msgs.msg import String
10 | from std_msgs.msg import Float64, Float32
11 | #import statistics
12 | from numpy import median
13 | from ackermann_msgs.msg import AckermannDrive
14 | class TrackFollower():
15 | def callback(self,data):
16 | totalDist= [] #Make a new array, this stuff is currently just for debugging, and unneccesary
17 | i = 0 # Using a manual for loop because i dont know python
18 | while i < len(data.ranges): # for loop stuff
19 | totalDist.append(data.ranges[i]) # assign each range to the array to make it canfigurable
20 | if totalDist[i] > 1000000: # converting all 'inf' to an actual number
21 | totalDist[i] = 100000
22 | i += 1 # parsing
23 | self.otherCode(data) #calling below code
24 | #print(totalDist[360])
25 | def listener(self,):
26 |
27 | # In ROS, nodes are uniquely named. If two nodes with the same
28 | # node are launched, the previous one is kicked off. The
29 | # anonymous=True flag means that rospy will choose a unique
30 | # name for our 'listener' node so that multiple listeners can
31 | # run simultaneously.
32 | rospy.init_node('listener', anonymous=True) # idk what this does tbh
33 | rospy.Subscriber('scan',LaserScan,self.callback) # Subscribes to the laser thing, calls the callback function
34 | def otherCode(self,data):
35 | pubAcker = rospy.Publisher('/autodrive', AckermannDrive, queue_size=10) # init publisher
36 | ack_msg = AckermannDrive() # initialize ackermann message
37 | i = 0
38 | speed = 1 #setting the speed
39 | totalDist = [] #Setting arrays
40 | angs = [] #for the angles that are road
41 | while i < len(data.ranges):
42 | totalDist.append(data.ranges[i])
43 | if totalDist[i] > 1000000:
44 | totalDist[i] = 100000
45 | if totalDist[i] > 5 and totalDist[i] < 8: # is the point in the road?
46 | angs.append(math.radians((i - 380.0) * (190.0 / 760.0))) #log the angle (radians)
47 | i += 1
48 | ack_msg.speed = speed # set speed to the ackermann message
49 | ack_msg.steering_angle = median(angs) # set the angle to the ackermann message
50 | pubAcker.publish(ack_msg)
51 | rospy.loginfo(median(angs))
52 | if __name__ == '__main__':
53 | track = TrackFollower()
54 | track.listener()
55 | rospy.spin()
--------------------------------------------------------------------------------
/scripts/ConvertToAckermann.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Subscribes to a twist message and converts it to an ackermann message.
4 | Subscribes to /cmd_twist
5 | Publishes to /cmd_vel
6 |
7 | Assumes all inputs are normalized between -1 and 1
8 |
9 | @edited: 12/02/2018
10 | @author: Amy Phung
11 | """
12 |
13 | import rospy
14 | from geometry_msgs.msg import Twist
15 | from ackermann_msgs.msg import AckermannDrive
16 |
17 | class ConvertToAckermann():
18 | def __init__(self):
19 | # Define variables
20 | self.twist_data = None
21 |
22 | # Define ROS constructs
23 | rospy.init_node("convert_to_ackermann")
24 | self.twist_sub = rospy.Subscriber("/cmd_twist", Twist, self.twist_cb)
25 | self.ack_pub = rospy.Publisher("/cmd_vel", AckermannDrive, queue_size=1)
26 | self.update_rate = rospy.Rate(10)
27 |
28 | def twist_cb(self,msg):
29 | self.twist_data = msg
30 |
31 | def twist_to_ackermann(self,linear_vel, angular_vel):
32 | """
33 | Converts linear and angular velocities to linear velocity and steering angle for
34 | ackermann messages
35 |
36 | Args:
37 | linear_vel - forward linear velocity from Twist message (should be between -1 and 1)
38 | angular_vel - angular velocity from Twist message (should be between -1 and 1)
39 | """
40 | # Assume twist message is angular vel from -1 to 1, velocity is -1 to 1
41 | # steering in degrees from -45 to 45, velocity is -2 to 2
42 |
43 | ack_msg = AckermannDrive()
44 | ack_msg.speed = reMap(linear_vel,1,-1,2,-2)
45 | ack_msg.steering_angle = reMap(angular_vel,1,-1,45,-45)
46 |
47 | return ack_msg
48 |
49 | def run(self):
50 | # Takes no args, executes timed loop for node
51 | while not rospy.is_shutdown():
52 | if self.twist_data == None:
53 | rospy.loginfo('MSG: No twist data published')
54 | self.update_rate.sleep()
55 | continue
56 | linear = self.twist_data.linear.x
57 | angular = self.twist_data.angular.z
58 |
59 | ack_msg = self.twist_to_ackermann(linear, angular)
60 | self.ack_pub.publish(ack_msg)
61 | self.update_rate.sleep()
62 |
63 | def reMap(value, maxInput, minInput, maxOutput, minOutput):
64 |
65 | value = maxInput if value > maxInput else value
66 | value = minInput if value < minInput else value
67 |
68 | inputSpan = maxInput - minInput
69 | outputSpan = maxOutput - minOutput
70 |
71 | scaled_value = float(value - minInput) / float(inputSpan)
72 |
73 | return minOutput + (scaled_value * outputSpan)
74 |
75 | if __name__ == "__main__":
76 | convert_to_ackermann = ConvertToAckermann()
77 | convert_to_ackermann.run()
78 |
--------------------------------------------------------------------------------
/scripts/LidarCodeBasic.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 | import math
5 | from std_msgs.msg import Bool
6 | from sensor_msgs.msg import LaserScan
7 | from std_msgs.msg import String
8 | from std_msgs.msg import Header
9 | import genpy
10 | from std_msgs.msg import String
11 | from std_msgs.msg import Float64
12 | class ObstacleDetection():
13 | def __init__(self):
14 | self.hasSensed = False
15 | def callback(self,data):
16 | totalDist= [] #Make a new array, this stuff is currently just for debugging, and unneccesary
17 | i = 0 # Using a manual for loop because i dont know python
18 | while i < len(data.ranges): # for loop stuff
19 | totalDist.append(data.ranges[i]) # assign each range to the array to make it canfigurable
20 | if totalDist[i] > 1000000: # converting all 'inf' to an actual number
21 | totalDist[i] = 100000
22 | i += 1 # parsing
23 | self.otherCode(data) #calling below code
24 | #print(totalDist[600])
25 | def listener(self):
26 |
27 | # In ROS, nodes are uniquely named. If two nodes with the same
28 | # node are launched, the previous one is kicked off. The
29 | # anonymous=True flag means that rospy will choose a unique
30 | # name for our 'listener' node so that multiple listeners can
31 | # run simultaneously.
32 | rospy.init_node('listener', anonymous=True) # idk what this does tbh
33 | rospy.Subscriber('scan',LaserScan,self.callback) # Subscribes to the laser thing, calls the callback function
34 | def otherCode(self,data):
35 | # Code is supposed to detect if there is an obstacle, and if so, stop the tractor
36 | # Rougly complete, may not work in parcel B
37 | pub0 = rospy.Publisher('/softestop', Bool, queue_size=10) # init your publishers early
38 | pub1 = rospy.Publisher('/scan_verticals', Float64, queue_size=10)
39 | pub2 = rospy.Publisher('/scan_horizontals', Float64, queue_size=10)
40 | totalDist = [] #Setting arrays
41 | verticalDistance = [] # Distance from front of tractor
42 | horizontalDistance = [] # Distance from center of tractor
43 | i = 0
44 | while i < len(data.ranges):
45 | totalDist.append(data.ranges[i])
46 | if totalDist[i] > 1000000:
47 | totalDist[i] = 100000
48 | verticalDistance.append(abs(math.cos(math.radians((i - 380.0) * (190.0 / 760.0))) * totalDist[i])) # Computes the distance from the object
49 | horizontalDistance.append(math.sin(math.radians((i - 380.0) * (190.0 / 760.0))) * totalDist[i]) # Computes the distance parallel to tractor
50 | i += 1
51 | someDistanceAway = 4.5 # Essentially the ground ***
52 | lengthOfTheTractor = 1.2# Horizontal length of the tractor ***
53 | obstaclePoints = 0 # Counts how many points are not the ground
54 | triggerPoints = 0 # Counts number of points breaking threshold
55 | numberOfPointsNeededToTrigger = 15 # How many points must be seen to trigger a stop? ***
56 | sumOfVert = 0 #initializing the sum of the vertical points
57 | sumOfHor = 0#initializing the sum of the horizontal points
58 | i = 0
59 | while i < len(totalDist): #Sweep throught the distances
60 | if(totalDist[i] < someDistanceAway): # Is there an object that is not the ground?
61 | obstaclePoints += 1 # Add a point into the number of obstacle points
62 | sumOfVert += verticalDistance[i] # kind of calculate average Vertical distance eventually
63 | sumOfHor += horizontalDistance[i] # kind of calculate average Horizontal distance eventually
64 | if(abs(horizontalDistance[i]) < (lengthOfTheTractor / 2.0)): #Will the obstacle hit the tractor?
65 | triggerPoints += 1 # Add a point the the number of triggers
66 | i += 1
67 | averageVert = Float64() #average vertical distance of the obstacle
68 | averageHor = Float64() #average horizontal distance of the obstacle
69 | averageNull = Float64() # if there aren't any obstacles
70 | averageNull.data = -1
71 | if(triggerPoints > numberOfPointsNeededToTrigger and not self.hasSensed): # if there is an obstacle that will hit the tractor
72 | # stop the tractor
73 | pub0.publish(True)
74 | self.hasSensed = True
75 | if(triggerPoints <= numberOfPointsNeededToTrigger):
76 | # don't stop the tractor
77 | self.hasSensed = False
78 | if(obstaclePoints > 0):
79 | averageVert.data = sumOfVert / obstaclePoints # Computes average distance of obstacle from tractor
80 | averageHor.data = sumOfHor / obstaclePoints # Computes avearge distance from center of tractor
81 | pub1.publish(averageVert) #Publishes
82 | pub2.publish(averageHor) #Pubslishes
83 | else:
84 | pub1.publish(averageNull) # -1 because no obstacles
85 | pub2.publish(averageNull) # -1 because no obstacles
86 |
87 | if __name__ == '__main__':
88 | obs = ObstacleDetection()
89 | obs.listener()
90 | rospy.spin()
--------------------------------------------------------------------------------
/scripts/ObstacleDetector2D.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import rospy
3 | import math
4 | from std_msgs.msg import Bool, Header, String, Float64, Float32
5 | from sensor_msgs.msg import LaserScan
6 | import genpy
7 | import numpy as np
8 | from geometry_msgs.msg import Point,PointStamped
9 | class ObstacleDetector2D():
10 | """
11 | Detects an obstacle in 2D space, and returns outputs it's 2D position
12 |
13 | Sub: /front/scan - LaserScan
14 | Pub: /point2follow - PointStamped
15 | """
16 | def __init__(self):
17 | self.pub0 = rospy.Publisher('/estop', Bool, queue_size=10) # init your publishers early
18 | self.pubObstPoint = rospy.Publisher('/point2follow', PointStamped, queue_size=10)
19 | rospy.init_node('ObstacleDetector2D', anonymous=True)
20 | self.subScan = rospy.Subscriber('/front/scan',LaserScan,self.callback)
21 | self.rate = rospy.Rate(1)
22 | self.scan = None
23 |
24 | def callback(self,data):
25 | self.scan = data
26 |
27 | def detectObstacles(self):
28 | """
29 | Detects an obstacle made up of a few points that the scan sees as being above the ground.
30 | """
31 | # TODO: Fix this up a little bit and make it more robust
32 | totalDist = [] #Setting arrays
33 | verticalDistance = [] # Distance from front of tractor
34 | horizontalDistance = [] # Distance from center of tractor
35 | for i in range(len(self.scan.ranges)):
36 | totalDist.append(self.scan.ranges[i])
37 | if totalDist[i] > 1000000:
38 | totalDist[i] = 100000
39 | verticalDistance.append(abs(math.cos(math.radians((i - 380.0) * (190.0 / 760.0))) * totalDist[i])) # Computes the distance from the object
40 | horizontalDistance.append(math.sin(math.radians((i - 380.0) * (190.0 / 760.0))) * totalDist[i]) # Computes the distance parallel to tractor
41 | someDistanceAway = 4.5 # Essentially the ground ***
42 | lengthOfTheTractor = 1.2# Horizontal length of the tractor ***
43 | obstaclePoints = [] # Counts how many points are not the ground
44 | triggerPoints = 0 # Counts number of points breaking threshold
45 | numberOfPointsNeededToTrigger = 15 # How many points must be seen to trigger a stop? ***
46 | sumOfVert = [] #initializing the sum of the vertical points
47 | sumOfHor = [] #initializing the sum of the horizontal points
48 | obsNumber = -1#in case there are more than one obstacle
49 | obsCount = 20 #if there is significant distince between obs points, treat as diffferent obs
50 | for i in range(len(totalDist)): #Sweep through the distances
51 | if(totalDist[i] < someDistanceAway): # Is there an object that is not the ground?
52 | if(obsCount > 10):
53 | obsNumber += 1
54 | sumOfVert.append(0) # kind of calculate average Vertical distance eventually
55 | sumOfHor.append(0) # kind of calculate average Horizontal distance eventually
56 | obstaclePoints.append(0) # Add a point into the number of obstacle points
57 | sumOfVert[obsNumber] += verticalDistance[i] # kind of calculate average Vertical distance eventually
58 | sumOfHor[obsNumber] += horizontalDistance[i] # kind of calculate average Horizontal distance eventually
59 | obstaclePoints[obsNumber] += 1
60 | if(abs(horizontalDistance[i]) < (lengthOfTheTractor / 2.0)): #Will the obstacle hit the tractor?
61 | triggerPoints += 1 # Add a point the the number of triggers
62 | obsCount = 0 # reset obsCount
63 | else:
64 | obsCount += 1
65 | sumOfVert = np.array(sumOfVert,dtype=np.float)
66 | sumOfHor = np.array(sumOfHor,dtype=np.float)
67 | newObsPts = np.array(obstaclePoints,dtype = np.float)
68 | for i in range(len(obstaclePoints)):
69 | if(((sumOfVert[i] / newObsPts[i]) < 0.02)):
70 | sumOfHor[i] = 35000
71 | absoSumOfHor = np.absolute(sumOfHor / newObsPts)
72 | targetObstacle = np.argmin(absoSumOfHor)
73 | point = Point()
74 | point.x = sumOfVert[targetObstacle] / obstaclePoints[targetObstacle] # Computes average distance of obstacle from tractor
75 | point.y = sumOfHor[targetObstacle] / obstaclePoints[targetObstacle] # Computes avearge distance from center of tractor
76 | point.z = 0
77 | pointStamped = PointStamped()
78 | pointStamped.point = point
79 | pointStamped.header = Header()
80 | pointStamped.header.frame_id = ('/hood')
81 | self.pubObstPoint.publish(pointStamped)
82 |
83 | def main(self):
84 | while not rospy.is_shutdown():
85 | if(self.scan != None):
86 | self.detectObstacles()
87 | self.rate.sleep()
88 |
89 | if __name__ == '__main__':
90 | obstec = ObstacleDetector2D()
91 | obstec.main()
92 | rospy.spin()
93 |
--------------------------------------------------------------------------------
/scripts/PointFollower.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import rospy
3 | import math
4 | from geometry_msgs.msg import PointStamped
5 | from ackermann_msgs.msg import AckermannDrive
6 | from std_msgs.msg import String
7 | from state_controller.msg import TwistLabeled
8 | from tf import TransformListener
9 |
10 | class PointFollower():
11 | """
12 | Directs the tractor at a point in 2D space, changing speed with distance from point.
13 |
14 | Sub: /point2follow - PointStamped
15 | Pub: /state_controller/cmd_behavior_twist - TwistLabeled
16 | """
17 | def __init__(self):
18 | rospy.init_node('PointFollower', anonymous=True)
19 | self.subPoint = rospy.Subscriber('/point2follow', PointStamped, self.callback)
20 | self.pubDrive = rospy.Publisher('/state_controller/cmd_behavior_twist',TwistLabeled, queue_size=10)
21 | self.goalPoint = None
22 | self.rate = rospy.Rate(1)
23 | self.tf = TransformListener()
24 | self.prevAngle = 0
25 |
26 | def callback(self,data):
27 | #self.goalPoint = self.tf.transformPoint('/hood',data).point
28 | self.goalPoint = data.point
29 |
30 | def follow(self):
31 | """
32 | Follows the point given, moving faster if farther, and stopping if closer than 1 meter.
33 | """
34 | drive_msg = TwistLabeled()
35 | drive_msg.label = String()
36 | drive_msg.label.data = "2D PointFollower"
37 | distance = math.sqrt(self.goalPoint.x**2+self.goalPoint.y**2)
38 | # distance is in meters
39 | if (distance > 1): #If obstacle is more than 1 meter away, go, faster if farther
40 | speed = 0.25 * (distance - 1)
41 | else: # if obstacle is really close, stop moving
42 | speed = 0
43 | if(speed > 1):
44 | speed = 1
45 | drive_msg.twist.linear.x = speed
46 | angle = math.atan2(self.goalPoint.y,self.goalPoint.x) # Finds the angle at which the tractor should turn
47 | angle = math.degrees(angle) # set the angle to the ackermann message
48 | if(angle > 45):
49 | angle = 45
50 | elif(angle < -45):
51 | angle = -45
52 | elif(distance < 1):
53 | angle = self.prevAngle
54 | self.prevAngle = angle
55 | drive_msg.twist.angular.z = angle / 45
56 | self.pubDrive.publish(drive_msg) # publish
57 |
58 | def main(self):
59 | while not rospy.is_shutdown():
60 | if(self.goalPoint != None):
61 | self.follow()
62 | self.rate.sleep()
63 |
64 | if __name__ == '__main__':
65 | follower = PointFollower()
66 | follower.main()
--------------------------------------------------------------------------------
/scripts/README.md:
--------------------------------------------------------------------------------
1 | ### Overview
2 | This folder holds python scripts for GRAVL
3 |
4 | ### Files
5 |
6 | #### `circle_drive.py`
7 | Drives Kubo in a circle at a slow speed to determine wheel slippage. To run, first start a roscore by running the command `roscore` in a new Terminal.
8 | Then, in another Terminal, start the state machine controller by running the command `rosrun tractor State`. Finally, in a new Terminal, execute
9 | the circle driving script by running the command `circle_drive.py`. Make sure that Kubo is set up and running by following the instructions on the homepage
10 | of the Github GRAVL wiki.
11 |
12 | #### `ConvertToAckermann.py`
13 | Converts from `/cmd_twist` geometry_msgs/Twist topic to `/cmd_vel` ackermann_msgs/AckermannDrive topic. Interpolates velocity from [-1.0, 1.0] to [-2.0, 2.0]. Interpolates angle from [-1.0, 1.0] to [-45.0, 45.0]. Run separately through `rosrun gravl ConvertToAckermann.py` - must be running to connect state controller with tractor.
14 |
15 | #### `test_pointgrey.py`
16 | Displays raw imagery from a pointgrey camera publishing through ROS. To run, first setup a pointgrey camera such that it is
17 | publishing over ROS by looking at this wiki page: ([link](https://github.com/olinrobotics/gravl/wiki/Kubo:-Cameras)) Then,
18 | run the pointgrey program by entering `python pointgrey.py` in the Terminal.
19 |
20 | #### `pointgrey_bag.py`
21 | Displays bag file of pointgrey camera imagery. To run, first setup a pointgrey camera such that it is
22 | publishing over ROS by looking at this wiki page: ([link](https://github.com/olinrobotics/gravl/wiki/Kubo:-Cameras)) Then,
23 | run the program by entering `python pointgrey_bag.py "filename"`, where `"filename"` is the name of a rosbag file of the
24 | `/camera/image_raw` topic.
25 |
26 | #### `LidarCodeBasic.py`
27 | Takes a scan topic from LIDAR to track obstacles. To run, first either run a node that produces a scan topic (e.g. rosrun urg_node urg_node) or the bags labeled tractorLidar*. Open rviz with rosrun rviz rviz. Press the + button and add by topic /scan. Change the box that has 'map' to '/laser'. You should be able to visualize the data. Now run the program (rosrun gravl LidarCodeBasic.py) and data will be published. /estop is whether there is an obstacle in the way, /scan_verticals is the distance from the tractor of the obstacle, /scan_horzontals is the distance horizontally from the center of the tractor.
28 |
29 | #### `LidarFollower.py`
30 | Takes a scan topic from LIDAR to track obstacles. To run, first either run a node that produces a scan topic (e.g. rosrun urg_node urg_node) or the bags labeled tractorLidar*. For debugging, open rviz with rosrun rviz rviz. Press the + button and add by topic /scan. Change the box that has 'map' to '/laser'. You should be able to visualize the data. Now run the program (rosrun gravl LidarFollower.py) and data will be published. /autodrive is the steering angle and speed to go towards the 'obstacle', /scan_verticals is the distance from the tractor of the obstacle, /scan_horzontals is the distance horizontally from the center of the tractor.
31 |
32 | #### `BackOnTrack.py`
33 | Takes a scan topic from LIDAR to track obstacles. To run, first either run a node that produces a scan topic (e.g. rosrun urg_node urg_node) or the bags labeled tractorLidar*. For debugging, open rviz with rosrun rviz rviz. Press the + button and add by topic /scan. Change the box that has 'map' to '/laser'. You should be able to visualize the data. Now run the program (rosrun gravl LidarFollower.py) and data will be published. /autodrive is just a place holder for now because i don't know where to publish to, but it publishes the steering angle and speed to go towards the road i think, very experimental, /scan_verticals is the distance from the tractor of the obstacle, /scan_horzontals is the distance horizontally from the center of the tractor.
34 |
35 | #### `gps_navigation_node.py`
36 | Takes latitude, longitude, and heading data from a topic and latitude and longitude data from waypoints and uses this data to produce ackermann drive velocity and steering angle messages . To run, first setup the Hemisphere Vector GPS to publish heading data and the Swift Navigation RTK GPS to publish current latitude and longitude data. Then, run the program by entering `gps_navigation_node.py` in the Terminal.
37 |
38 | #### `recognize_road.py`
39 | Tries to recognize the road.
40 | At the moment, all it does is display an annotated video stream of a road, but will be modified to publish probably ackermann messages.
41 | To run, start `roscore`, play the rosbags which can be found on a usb in the lab, and `rosrun tractor road_recognition`.
42 | The script will subscribe to the topic `/camera/image_raw`.
43 | The important bit of code is the callback which establishes the publisher, and the recognize_road function which at the moment returns an annotated picture, but should soon give the tractor directions.
44 |
45 | #### `RecordPoints.py`
46 | Subscribes to `/tractor_odom` and starts a tkinter GUI with Record Point and Save Data buttons. When Record Point is clicked, it saves the last 2D x-y odometry point recieved. When Save Data is clicked, it writes the data to a file that can be used in the cut_planner package.
47 |
48 | #### `TractorOdom.py`
49 | Uses orientation data from `/imu/data` and position data from `/piksi/navsatfix_best_fix` and creates an odometry message that is published to `/tractor_odom`. Also broadcasts this odometry message as the transform between `/base_link` and `/odom`
50 |
--------------------------------------------------------------------------------
/scripts/RecordPoints.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | """
4 | Subscribes to `/tractor_odom` and starts a tkinter GUI with Record Point and
5 | Save Data buttons. When Record Point is clicked, it saves the last 2D x-y
6 | odometry point recieved. When Save Data is clicked, it writes the data to a
7 | file that can be used in the cut_planner package.
8 | """
9 |
10 | # import keyboard
11 | import tkinter as tk
12 | import rospy
13 | import pandas as pd
14 |
15 | from nav_msgs.msg import Odometry
16 |
17 | class Recorder:
18 | def __init__(self):
19 | rospy.init_node("point_recorder")
20 | self.odom_sub = rospy.Subscriber("/tractor_odom", Odometry, self.odomCB)
21 | self.update_rate = rospy.Rate(10)
22 |
23 | self.root = tk.Tk()
24 | tk.Button(self.root, text="Record Point", command = self.ptCB).pack()
25 | tk.Button(self.root, text="Save Data", command = self.dataCB).pack()
26 |
27 | self.saved_x = []
28 | self.saved_y = []
29 | self.saved_z = []
30 |
31 | self.odom_msg = Odometry()
32 |
33 | def ptCB(self):
34 | print('Recording point')
35 | self.saved_x.append(self.odom_msg.pose.pose.position.x)
36 | self.saved_y.append(self.odom_msg.pose.pose.position.y)
37 | self.saved_z.append(self.odom_msg.pose.pose.position.z)
38 |
39 | def dataCB(self):
40 | """Write recorded points to an output text file
41 |
42 | TODO: make the output file a settable parameter
43 | """
44 | f = open("p2p_output.txt","w+")
45 | print("Saving data...")
46 | f.write("title: p2p_test\n")
47 | f.write("waypoints:\n")
48 |
49 | for i in range(len(self.saved_x)):
50 | f.write(" " + str(i) + ":\n")
51 | f.write(" index: " + str(i) + "\n")
52 | f.write(" point: {x: " + str(self.saved_x[i]) + \
53 | ", y: " + str(self.saved_y[i]) + \
54 | ", z: " + str(self.saved_z[i]) + \
55 | "}\n")
56 | f.write(" frame: odom\n")
57 | f.write(" behavior: p2p\n")
58 | f.write(" forward: true\n")
59 | f.write(" autocontinue: false\n")
60 |
61 | f.write(" " + str(len(self.saved_x)) + ":\n")
62 | f.write(" index: " + str(len(self.saved_x)) + "\n")
63 | f.write(" point: {x: 0, y: 0, z: 0}\n")
64 | f.write(" frame: odom\n")
65 | f.write(" behavior: safety\n")
66 | f.write(" forward: none\n")
67 | f.write(" autocontinue: none\n")
68 |
69 | def odomCB(self, msg):
70 | self.odom_msg = msg
71 |
72 | def run(self):
73 | while not rospy.is_shutdown():
74 | self.root.update()
75 | self.update_rate.sleep()
76 |
77 | if __name__ == "__main__":
78 | rec = Recorder()
79 | rec.run()
80 |
--------------------------------------------------------------------------------
/scripts/TractorOdom.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | """
4 | Uses orientation data from `/imu/data` and position data from
5 | `/piksi/navsatfix_best_fix` and creates an odometry message that is published
6 | to `/tractor_odom`. Also broadcasts this odometry message as the transform
7 | between `/base_link` and `/odom`
8 | """
9 |
10 | import rospy
11 | from sensor_msgs.msg import Imu
12 | from nav_msgs.msg import Odometry
13 | import tf
14 |
15 | class TractorOdom:
16 | def __init__(self):
17 | rospy.init_node("tractor_odom")
18 | self.imu_sub = rospy.Subscriber('/imu/data', Imu, self.imuCB)
19 | self.gps_sub = rospy.Subscriber('/gps_odom', Odometry, self.gpsCB)
20 | self.odom_pub = rospy.Publisher('/tractor_odom', Odometry, queue_size=1)
21 | self.update_rate = rospy.Rate(10)
22 |
23 | self.odom_broadcaster = tf.TransformBroadcaster()
24 |
25 | self._imu_msg = Imu()
26 | self._gps_msg = Odometry()
27 |
28 | def imuCB(self, msg):
29 | self._imu_msg = msg
30 |
31 | def gpsCB(self, msg):
32 | self._gps_msg = msg
33 |
34 | def computeTractorOdom(self):
35 | """Creates an Odometry message from imu orientation data and gps
36 | position data. Also broadcasts this data as a transform between
37 | /base_link and /odom
38 | """
39 | msg = Odometry()
40 | msg.header.frame_id='/odom'
41 | msg.header.stamp = rospy.Time.now()
42 | msg.child_frame_id='/base_link'
43 | msg.pose.pose.position = self._gps_msg.pose.pose.position
44 | msg.pose.pose.orientation = self._imu_msg.orientation
45 | self.odom_pub.publish(msg)
46 |
47 | # Broadcast message as tf
48 | self.odom_broadcaster.sendTransform((msg.pose.pose.position.x,
49 | msg.pose.pose.position.y,
50 | msg.pose.pose.position.z),
51 | (msg.pose.pose.orientation.w,
52 | msg.pose.pose.orientation.x,
53 | msg.pose.pose.orientation.y,
54 | msg.pose.pose.orientation.z),
55 | rospy.Time.now(),
56 | "/base_link",
57 | "/odom")
58 |
59 | def run(self):
60 | rospy.loginfo("Starting Tractor Odom")
61 | while not rospy.is_shutdown():
62 | self.computeTractorOdom()
63 | self.update_rate.sleep()
64 |
65 | if __name__=="__main__":
66 | tOdom = TractorOdom()
67 | tOdom.run()
68 |
--------------------------------------------------------------------------------
/scripts/circle_drive.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import rospy # ROS library for python use
3 | from ackermann_msgs.msg import AckermannDrive # ROS Ackermann Steering message
4 |
5 | # Set Constants
6 |
7 | # Set Variables
8 | tractor_speed = 1
9 | tractor_turn = 45
10 |
11 | '''
12 | Function: drives tractor along continuous circle
13 | @arguments: wheel angle, drive speed
14 | @returns: none
15 | '''
16 | def drive_circle(speed, turn):
17 |
18 | # Setup
19 | ack_msg = AckermannDrive()
20 | rospy.init_node('circle_driver')
21 | drive_publish = rospy.Publisher('autodrive',AckermannDrive, queue_size=1)
22 | r = rospy.Rate(20)
23 |
24 | # Command loop, runs while roscore is up
25 | while not rospy.is_shutdown():
26 | r.sleep()
27 | ack_msg.steering_angle = turn
28 | ack_msg.speed = speed
29 | drive_publish.publish(ack_msg)
30 |
31 | # Runs this section if the file is run in isolation
32 | if __name__ == '__main__':
33 |
34 | try:
35 | drive_circle(tractor_speed, tractor_turn)
36 | except rospy.ROSInterruptException:
37 | pass
38 |
--------------------------------------------------------------------------------
/scripts/gps.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 | from nav_msgs.msg import Odometry
5 | from geometry_msgs.msg import Pose
6 |
7 | def process_odom(msg):
8 | publisher.publish(msg.pose.pose)
9 |
10 | rospy.init_node('converter')
11 |
12 | publisher = rospy.Publisher('/tractor_position', Pose, queue_size=10)
13 | subscriber = rospy.Subscriber('/gps/rtkfix', Odometry, process_odom)
14 |
15 | rospy.spin()
--------------------------------------------------------------------------------
/scripts/gps_navigation.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from math import *
3 | import rospy
4 | from std_msgs.msg import Float32
5 | from sensor_msgs.msg import NavSatFix
6 | from geometry_msgs.msg import TwistStamped
7 | #from ackermann_msgs import AckermannDrive
8 | """
9 | Notes:
10 | ackermann msgs still not working
11 | """
12 |
13 |
14 |
15 | # GPS navigation pseudocode.
16 |
17 | # GPS
18 | # Take in an array of waypoints
19 | # Subscribe to topics for RTK GPS and Hemisphere GPS
20 | # Use timesynchronizer to connect RTK and Hemisphere data.
21 | # Run callback function with next waypoint.
22 | # Rospy.spin()
23 |
24 | # Callback function
25 | # Take in waypoint [latitude longitude],current location [latitude longitude]
26 | # and current angle [assuming degrees for now, for now, assume the gps is
27 | # facing perpendicular to the car to the left so that car turns on 0-180.
28 | # First, calculate desired angle to move. Take tangent of difference in
29 | # longitude over difference in latitude.
30 | # Calculate current error by subtracting the desired - existing angle
31 | # Multiply error by kp1 to get new steering angle
32 | # Multiply error by kp2 to get new velocity
33 | # Tune the p values to be accurate
34 |
35 | #UPDATE WITH CALLBACK###########################################################
36 | current_longitude = 0
37 | current_latitude = 0
38 | current_x = 0
39 | current_y = 0
40 | current_angle = atan2(current_y, current_x) * (180/pi) #must be in degrees
41 | waypoint_longitude = -1
42 | waypoint_latitude = -1
43 | pub = None
44 | #CALIBRATION####################################################################
45 | max_turn_angle = 45 #in degrees
46 | min_turn_angle = -max_turn_angle #since zero degrees is straight, max turns
47 | #right and min turns left
48 |
49 | kp1 = 1 #calibrate for steering angle
50 | kp2 = 1 #calibrate for steering velocity
51 | kp3 = 1 #calibrate for forwards velocity
52 |
53 | max_forward_speed = 2 #speed goes from 0 to 2
54 | min_forward_speed = 0.25 #for calculations
55 | max_turn_speed = 0.1 #calibrate to something reasonable (rad/s)
56 |
57 | #CORE FUNCTIONS#################################################################
58 | def deg_calculate_desired_angle(clat, clong, wlat, wlong):
59 | """
60 | Calculates desired angle based on difference between current latitude and
61 | longitude and the set waypoint latitude and longitude
62 | clat = current latitude
63 | clong = current longitude
64 | wlat = waypoint latitude
65 | wlong = waypoint longitude
66 |
67 | Outputs in degrees
68 | """
69 | longitude_difference = wlong - clong
70 | latitude_difference = wlat - clat
71 | desired_angle_rad = atan2(longitude_difference, latitude_difference)
72 | desired_angle_deg = desired_angle_rad * (180 / pi)
73 | return desired_angle_deg #in degrees
74 |
75 | def deg_calculate_steering_error(current_angle, desired_angle):
76 | error = desired_angle - current_angle
77 | return error #in degrees
78 |
79 | def deg_calculate_steering_angle(error, kp1):
80 | """
81 | Takes error and accounts for it with steering angle. kp1 changes with
82 | steering angle. kp1 changes how "direct" steering is-higher kp1 results in
83 | sharper turning.
84 |
85 | Outputs in degrees
86 | """
87 | steering_angle = None
88 | if error < -max_turn_angle:
89 | steering_angle = -max_turn_angle
90 | elif error > max_turn_angle:
91 | steering_angle = max_turn_angle
92 | else:
93 | steering_angle = kp1 * -error #if error is positive, want negative
94 | return steering_angle #in degrees #motion to compensate
95 |
96 | def deg_calculate_forward_velocity(angle, kp3):
97 | """
98 | Establishes linear relationship between angle and speed
99 | more angle = less speed, can be adjusted with kp3 value
100 | """
101 | slope = (max_forward_speed-min_forward_speed)/max_turn_angle
102 | forward_velocity = -kp3 * slope * abs(angle) + max_forward_speed
103 |
104 | #SUBSCRIBERS####################################################################
105 | def callback_fix(data):
106 | global current_longitude
107 | global current_latitude
108 | current_longitude = data.longitude
109 | current_latitude = data.latitude
110 | update()
111 |
112 | def callback_vel(data):
113 | global current_x
114 | global current_y
115 | current_x = data.twist.linear.x
116 | current_y = data.twist.linear.y
117 | update()
118 |
119 | def run():
120 | rospy.init_node('GPS_Nav', anonymous=True)
121 | rospy.Subscriber('fix', NavSatFix, callback_fix)
122 | rospy.Subscriber('vel', TwistStamped, callback_vel)
123 | global pub
124 | pub = rospy.Publisher('cmds', Float32, queue_size=10)
125 | rospy.spin()
126 |
127 | #CALCULATED VALUES##############################################################
128 | desired_angle = deg_calculate_desired_angle(current_latitude, current_longitude, waypoint_latitude, waypoint_longitude)
129 | steering_error = deg_calculate_steering_error(current_angle, desired_angle)
130 | steering_angle = deg_calculate_steering_angle(steering_error, kp1)
131 | forward_velocity = deg_calculate_forward_velocity(steering_angle, kp3)
132 |
133 | def update():
134 | print "running..." #print statements for vertification
135 | print desired_angle
136 | print steering_error
137 | print steering_angle
138 | print forward_velocity
139 | print current_longitude
140 | print current_latitude
141 | print current_x
142 | print current_y
143 | pub.publish(steering_angle)
144 | pub.publish(forward_velocity)
145 | if __name__=='__main__':
146 | run()
147 |
--------------------------------------------------------------------------------
/scripts/gps_navigation_node.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | from math import *
3 | import rospy
4 | from gravl.msg import Hemisphere
5 | from message_filters import TimeSynchronizer
6 | from sensor_msgs.msg import NavSatFix
7 | # TODO Ensure that this still works with the custom hemisphere messages
8 | from ackermann_msgs.msg import AckermannDrive
9 |
10 | class GPSNavigationNode:
11 | """
12 | ROS Node for GPS navigation using a rtk GPS and hemisphere vector GPS
13 | for an Ackermann-steered vehicle. Must be instantiated with a specific
14 | GPS waypoint list for the vehicle to drive towards in sequence.
15 | """
16 | def __init__(self, waypoint_array=[]):
17 | """
18 | Initializes a GPS Navigation Node.
19 | waypoint_array : An array of waypoints, in the format of
20 | [[lat, long], [lat, long], [lat, long]]
21 | """
22 | # Initial setup that can be reused for any number of waypoints.
23 | self.fix = rospy.Subscriber('fix', NavSatFix)
24 | self.dir = rospy.Subscriber('direction', Hemisphere)
25 | self.nav_data = \
26 | message_filters.TimeSynchronizer([self.fix, self.dir], 10)
27 | self.pub = rospy.Publisher('cmds', AckermannDrive, queue_size = 10)
28 | # Current latiude of the tractor (coordinate).
29 | self.current_longitude = None
30 | # Current longitude of the tractor (coordinate).
31 | self.current_latitude = None
32 | # Angle the car is currently facing (degrees).
33 | self.current_angle = None
34 | # The target longitude of the tractor (coordinate)
35 | self.waypoint_longitude = None
36 | # the target latitude of the car.
37 | self.waypoint_latitude = None
38 | # The maximum angle the tractor can turn at once (degrees).
39 | self.max_turn_angle = 45
40 | # The minimum angle the tractor can turn at once (degrees).
41 | self.min_turn_angle = -max_turn_angle
42 | #Whether or not the waypoint has been reached.
43 | self.waypoint_reached = False
44 | # After synchronizing the data streams, we can register one callback.
45 | self.nav_data.registerCallback(self.update)
46 |
47 | # Set up for the PID controller.
48 | # Proportional constant for steering angle calculation
49 | self.kp1 = 0.1
50 | # Proportional constant for steering velocity calculation.
51 | self.kp2 = 0.1
52 | # Proportional constant for forwards velocity calculation.
53 | self.kp3 = 0.1
54 |
55 | # Iterate through each waypoint set.
56 | for waypoint in waypoint_array:
57 | while not self.waypoint_reached:
58 | self.waypoint_latiude = waypoint[0]
59 | self.waypoint_longitude = waypoint[1]
60 | rospy.spin()
61 | # At this point, waypoint_reached will be set to true by the
62 | # update function, so we'll need to set it to false to ensure
63 | # that it actually navigates to the next point.
64 | self.waypoint_reached = False
65 |
66 | def update(self):
67 | """
68 | Processes gps data and waypoint and publishes new steering angle.
69 | """
70 | # Double check there is data from fix and from the Hemisphere.
71 | if nav_data.longitude is None or nav_data.direction is None:
72 | return
73 |
74 | # Update class variables.
75 | self.current_longitude = nav_data.longitude
76 | self.current_latitude = nav_data.latitude
77 | self.current_angle = nav_data.direciton
78 |
79 | # Figure out how to proceed if the tractorhas not reached.
80 | if (isclose(self.current_latitude, self.waypoint_latitude) and \
81 | isclose(self.current_longitude, self.waypoint_longitude)):
82 | desired_angle = deg_calculate_desired_angle(self.current_latitude,\
83 | self.current_longitude, self.waypoint_latitude, \
84 | self.waypoint_longitude)
85 | current_error = deg_calculate_steering_error(self.current_angle, \
86 | desired_angle)
87 | new_steering_angle = deg_calculate_steering_angle(current_error, \
88 | self.kp1)
89 | new_velocity = deg_calculate_forward_velocity(new_steering_angle, \
90 | self.kp3)
91 |
92 | # Create and publish a drive message.
93 | drive_msg = AckermannDrive()
94 | drive_msg.header.stamp = rospy.Time.now()
95 | drive_msg.steering_angle = new_steering_angle
96 | drive_msg.speed = abs(new_velocity)
97 | self.pub.publish(drive_msg)
98 |
99 | # Otherwise, we're ready to move on to the next waypoint.
100 | else:
101 | self.waypoint_reached = True
102 | return
103 |
104 | #########################CORE FUNCTIONS#########################
105 | # TODO Decide if there's any reason to make this block static.
106 |
107 | def deg_calculate_desired_angle(clat, clong, wlat, wlong):
108 | """
109 | Calculates the desired angle (in degrees) for the car based on the
110 | angle necessary to drive towards the waypoint.
111 | clat : Current latitude coordinate of the vehicle.
112 | clong : Current longitude coordinate of the vehicle.
113 | wlat : Current latitude coordinate of the vehicle.
114 | wlong : Current longtiude coordinaate of the vehicle.
115 | Returns the desired angle in degrees.
116 | """
117 | longitude_difference = wlong - clong
118 | latitude_difference = wlat - clat
119 | desired_angle_rad = atan2(longitude_difference, latitude_difference)
120 | desired_angle_deg = desired_angle_rad * (180 / pi)
121 | return desired_angle_deg
122 |
123 | def deg_calculate_steering_error(current_angle, desired_angle):
124 | """
125 | Calculates the difference between the current vehicle steering angle
126 | and the desired value.
127 |
128 | current_angle : The angle of the vehicle (likely based on sensor
129 | readings).
130 | desired_angle : The ideal angle of the vehicle (likely based on
131 | calculation by means of a GPS waypoint).
132 | Returns the number of degrees to turn to face the desired angle.
133 | """
134 | return desired_angle - current_angle
135 |
136 | def deg_calculate_steering_angle(error, kp1):
137 | """
138 | Calculate the new steering angle for the vehicle.
139 |
140 | error : The difference between the current and desired angle.
141 | kp1 : Proportional constant for PID controller. Should be tuned
142 | empirically.
143 | Returns angle (in degrees) that the vehicle should turn in.
144 | """
145 | steering_angle = None
146 | if error > abs(self.max_turn_angle):
147 | steering_angle = self.max_turn_angle
148 | else:
149 | steering_angle = kp1 * -error
150 | return steering_angle
151 |
152 | def deg_calculate_forward_velocity(angle, kp3):
153 | """
154 | Calculate the new forward velocity for the car based on the turn angle.
155 |
156 | angle : The new steering angle for the vehicle.
157 | kp3 : Proportional constant for the PID controller. Should be
158 | tuned empircally.
159 | Returns the new forward velocity for the vehicle.
160 | """
161 | # TODO Confirm whether or not this math checks out.
162 | forward_velocity = kp3 * \
163 | (self.max_forward_speed/self.max_turn_angle) * angle
164 | if forward_velocity >= self.max_forward_speed:
165 | forward_velocity = self.max_forward_speed
166 | return forward_velocity
167 |
168 |
169 | def run():
170 | rospy.init_node("GPSNavigationNode")
171 | try:
172 | # TODO Go through data set, pick waypoints (might be somewhat
173 | # arbitrary but it would be good to test actual navigation.
174 | waypoint_array = []
175 | gps_nav_node = GPSNavigationNode(waypoint_array)
176 | except rospy.ROSInterruptException:
177 | pass
178 |
179 | if __name__=='__main__':
180 | run()
181 |
--------------------------------------------------------------------------------
/scripts/p2p_output.txt:
--------------------------------------------------------------------------------
1 | title: p2p_test
2 | waypoints:
3 | 0:
4 | index: 0
5 | point: {x: 1.643, y: -8.5, z: -1.106}
6 | frame: odom
7 | behavior: p2p
8 | forward: true
9 | autocontinue: false
10 | 1:
11 | index: 1
12 | point: {x: 2.167, y: -7.848, z: -1.057}
13 | frame: odom
14 | behavior: p2p
15 | forward: true
16 | autocontinue: false
17 | 2:
18 | index: 2
19 | point: {x: 2.848, y: -6.987, z: -1.066}
20 | frame: odom
21 | behavior: p2p
22 | forward: true
23 | autocontinue: false
24 | 3:
25 | index: 3
26 | point: {x: 3.509, y: -6.01, z: -1.083}
27 | frame: odom
28 | behavior: p2p
29 | forward: true
30 | autocontinue: false
31 | 4:
32 | index: 4
33 | point: {x: 4.275, y: -4.989, z: -1.115}
34 | frame: odom
35 | behavior: p2p
36 | forward: true
37 | autocontinue: false
38 | 5:
39 | index: 5
40 | point: {x: 5.076, y: -3.847, z: -1.128}
41 | frame: odom
42 | behavior: p2p
43 | forward: true
44 | autocontinue: false
45 | 6:
46 | index: 6
47 | point: {x: 5.817, y: -2.722, z: -1.157}
48 | frame: odom
49 | behavior: p2p
50 | forward: true
51 | autocontinue: false
52 | 7:
53 | index: 7
54 | point: {x: 6.27, y: -2.009, z: -1.137}
55 | frame: odom
56 | behavior: p2p
57 | forward: true
58 | autocontinue: false
59 | 8:
60 | index: 8
61 | point: {x: 6.889, y: -1.047, z: -1.154}
62 | frame: odom
63 | behavior: p2p
64 | forward: true
65 | autocontinue: false
66 | 9:
67 | index: 9
68 | point: {x: 7.437, y: -0.052, z: -1.134}
69 | frame: odom
70 | behavior: p2p
71 | forward: true
72 | autocontinue: false
73 | 10:
74 | index: 10
75 | point: {x: 7.816, y: 0.94, z: -1.156}
76 | frame: odom
77 | behavior: p2p
78 | forward: true
79 | autocontinue: false
80 | 11:
81 | index: 11
82 | point: {x: 7.943, y: 1.718, z: -1.159}
83 | frame: odom
84 | behavior: p2p
85 | forward: true
86 | autocontinue: false
87 | 12:
88 | index: 12
89 | point: {x: 8.093, y: 2.726, z: -1.227}
90 | frame: odom
91 | behavior: p2p
92 | forward: true
93 | autocontinue: false
94 | 13:
95 | index: 13
96 | point: {x: 8.029, y: 3.686, z: -1.242}
97 | frame: odom
98 | behavior: p2p
99 | forward: true
100 | autocontinue: false
101 | 14:
102 | index: 14
103 | point: {x: 7.518, y: 4.601, z: -1.307}
104 | frame: odom
105 | behavior: p2p
106 | forward: true
107 | autocontinue: false
108 | 15:
109 | index: 15
110 | point: {x: 6.862, y: 5.397, z: -1.302}
111 | frame: odom
112 | behavior: p2p
113 | forward: true
114 | autocontinue: false
115 | 16:
116 | index: 16
117 | point: {x: 6.203, y: 5.897, z: -1.308}
118 | frame: odom
119 | behavior: p2p
120 | forward: true
121 | autocontinue: false
122 | 17:
123 | index: 17
124 | point: {x: 5.272, y: 6.373, z: -1.275}
125 | frame: odom
126 | behavior: p2p
127 | forward: true
128 | autocontinue: false
129 | 18:
130 | index: 18
131 | point: {x: 4.514, y: 6.669, z: -1.267}
132 | frame: odom
133 | behavior: p2p
134 | forward: true
135 | autocontinue: false
136 | 19:
137 | index: 19
138 | point: {x: 3.507, y: 7.106, z: -1.264}
139 | frame: odom
140 | behavior: p2p
141 | forward: true
142 | autocontinue: false
143 | 20:
144 | index: 20
145 | point: {x: 2.378, y: 7.408, z: -1.284}
146 | frame: odom
147 | behavior: p2p
148 | forward: true
149 | autocontinue: false
150 | 21:
151 | index: 21
152 | point: {x: 1.21, y: 7.59, z: -1.268}
153 | frame: odom
154 | behavior: p2p
155 | forward: true
156 | autocontinue: false
157 | 22:
158 | index: 22
159 | point: {x: 0.236, y: 7.629, z: -1.26}
160 | frame: odom
161 | behavior: p2p
162 | forward: true
163 | autocontinue: false
164 | 23:
165 | index: 23
166 | point: {x: -0.754, y: 7.476, z: -1.303}
167 | frame: odom
168 | behavior: p2p
169 | forward: true
170 | autocontinue: false
171 | 24:
172 | index: 24
173 | point: {x: -1.685, y: 7.26, z: -1.314}
174 | frame: odom
175 | behavior: p2p
176 | forward: true
177 | autocontinue: false
178 | 25:
179 | index: 25
180 | point: {x: -2.774, y: 6.73, z: -1.324}
181 | frame: odom
182 | behavior: p2p
183 | forward: true
184 | autocontinue: false
185 | 26:
186 | index: 26
187 | point: {x: -3.597, y: 6.216, z: -1.328}
188 | frame: odom
189 | behavior: p2p
190 | forward: true
191 | autocontinue: false
192 | 27:
193 | index: 27
194 | point: {x: -4.391, y: 5.693, z: -1.311}
195 | frame: odom
196 | behavior: p2p
197 | forward: true
198 | autocontinue: false
199 | 28:
200 | index: 28
201 | point: {x: -5.052, y: 4.871, z: -1.29}
202 | frame: odom
203 | behavior: p2p
204 | forward: true
205 | autocontinue: false
206 | 29:
207 | index: 29
208 | point: {x: -5.701, y: 4.279, z: -1.277}
209 | frame: odom
210 | behavior: p2p
211 | forward: true
212 | autocontinue: false
213 | 30:
214 | index: 30
215 | point: {x: -6.12, y: 3.555, z: -1.27}
216 | frame: odom
217 | behavior: p2p
218 | forward: true
219 | autocontinue: false
220 | 31:
221 | index: 31
222 | point: {x: -6.571, y: 2.857, z: -1.233}
223 | frame: odom
224 | behavior: p2p
225 | forward: true
226 | autocontinue: false
227 | 32:
228 | index: 32
229 | point: {x: -7.127, y: 1.802, z: -1.19}
230 | frame: odom
231 | behavior: p2p
232 | forward: true
233 | autocontinue: false
234 | 33:
235 | index: 33
236 | point: {x: -7.589, y: 1.049, z: -1.158}
237 | frame: odom
238 | behavior: p2p
239 | forward: true
240 | autocontinue: false
241 | 34:
242 | index: 34
243 | point: {x: -7.854, y: 0.645, z: -1.145}
244 | frame: odom
245 | behavior: p2p
246 | forward: true
247 | autocontinue: false
248 | 35:
249 | index: 35
250 | point: {x: -8.337, y: -0.1, z: -1.129}
251 | frame: odom
252 | behavior: p2p
253 | forward: true
254 | autocontinue: false
255 | 36:
256 | index: 36
257 | point: {x: -8.497, y: -0.651, z: -1.118}
258 | frame: odom
259 | behavior: p2p
260 | forward: true
261 | autocontinue: false
262 | 37:
263 | index: 37
264 | point: {x: -8.867, y: -1.379, z: -1.067}
265 | frame: odom
266 | behavior: p2p
267 | forward: true
268 | autocontinue: false
269 | 38:
270 | index: 38
271 | point: {x: -8.908, y: -1.818, z: -1.087}
272 | frame: odom
273 | behavior: p2p
274 | forward: true
275 | autocontinue: false
276 | 39:
277 | index: 39
278 | point: {x: -9.065, y: -2.398, z: -0.979}
279 | frame: odom
280 | behavior: p2p
281 | forward: true
282 | autocontinue: false
283 | 40:
284 | index: 40
285 | point: {x: -9.362, y: -3.04, z: -1.057}
286 | frame: odom
287 | behavior: p2p
288 | forward: true
289 | autocontinue: false
290 | 41:
291 | index: 41
292 | point: {x: -9.336, y: -3.383, z: -1.069}
293 | frame: odom
294 | behavior: p2p
295 | forward: true
296 | autocontinue: false
297 | 42:
298 | index: 42
299 | point: {x: -9.42, y: -3.826, z: -1.008}
300 | frame: odom
301 | behavior: p2p
302 | forward: true
303 | autocontinue: false
304 | 43:
305 | index: 43
306 | point: {x: -9.568, y: -4.396, z: -1.026}
307 | frame: odom
308 | behavior: p2p
309 | forward: true
310 | autocontinue: false
311 | 44:
312 | index: 44
313 | point: {x: -9.62, y: -4.927, z: -1.073}
314 | frame: odom
315 | behavior: p2p
316 | forward: true
317 | autocontinue: false
318 | 45:
319 | index: 45
320 | point: {x: -9.586, y: -5.457, z: -1.073}
321 | frame: odom
322 | behavior: p2p
323 | forward: true
324 | autocontinue: false
325 | 46:
326 | index: 46
327 | point: {x: -9.637, y: -5.885, z: -1.049}
328 | frame: odom
329 | behavior: p2p
330 | forward: true
331 | autocontinue: false
332 | 47:
333 | index: 47
334 | point: {x: -9.645, y: -6.326, z: -1.032}
335 | frame: odom
336 | behavior: p2p
337 | forward: true
338 | autocontinue: false
339 | 48:
340 | index: 48
341 | point: {x: -9.572, y: -6.708, z: -1.033}
342 | frame: odom
343 | behavior: p2p
344 | forward: true
345 | autocontinue: false
346 | 49:
347 | index: 49
348 | point: {x: -9.565, y: -7.325, z: -1.018}
349 | frame: odom
350 | behavior: p2p
351 | forward: true
352 | autocontinue: false
353 | 50:
354 | index: 50
355 | point: {x: -9.487, y: -7.783, z: -1.03}
356 | frame: odom
357 | behavior: p2p
358 | forward: true
359 | autocontinue: false
360 | 51:
361 | index: 51
362 | point: {x: -9.332, y: -8.176, z: -1.01}
363 | frame: odom
364 | behavior: p2p
365 | forward: true
366 | autocontinue: false
367 | 52:
368 | index: 52
369 | point: {x: -9.183, y: -8.524, z: -1.007}
370 | frame: odom
371 | behavior: p2p
372 | forward: true
373 | autocontinue: false
374 | 53:
375 | index: 53
376 | point: {x: -9.055, y: -8.92, z: -0.987}
377 | frame: odom
378 | behavior: p2p
379 | forward: true
380 | autocontinue: false
381 | 54:
382 | index: 54
383 | point: {x: -8.897, y: -9.291, z: -0.965}
384 | frame: odom
385 | behavior: p2p
386 | forward: true
387 | autocontinue: false
388 | 55:
389 | index: 55
390 | point: {x: -8.627, y: -9.786, z: -0.962}
391 | frame: odom
392 | behavior: p2p
393 | forward: true
394 | autocontinue: false
395 | 56:
396 | index: 56
397 | point: {x: -8.465, y: -10.178, z: -0.931}
398 | frame: odom
399 | behavior: p2p
400 | forward: true
401 | autocontinue: false
402 | 57:
403 | index: 57
404 | point: {x: -8.117, y: -10.531, z: -0.891}
405 | frame: odom
406 | behavior: p2p
407 | forward: true
408 | autocontinue: false
409 | 58:
410 | index: 58
411 | point: {x: -7.828, y: -10.777, z: -0.886}
412 | frame: odom
413 | behavior: p2p
414 | forward: true
415 | autocontinue: false
416 | 59:
417 | index: 59
418 | point: {x: -7.584, y: -11.168, z: -0.865}
419 | frame: odom
420 | behavior: p2p
421 | forward: true
422 | autocontinue: false
423 | 60:
424 | index: 60
425 | point: {x: -7.269, y: -11.499, z: -0.873}
426 | frame: odom
427 | behavior: p2p
428 | forward: true
429 | autocontinue: false
430 | 61:
431 | index: 61
432 | point: {x: -6.915, y: -11.764, z: -0.894}
433 | frame: odom
434 | behavior: p2p
435 | forward: true
436 | autocontinue: false
437 | 62:
438 | index: 62
439 | point: {x: -6.49, y: -12.032, z: -0.885}
440 | frame: odom
441 | behavior: p2p
442 | forward: true
443 | autocontinue: false
444 | 63:
445 | index: 63
446 | point: {x: -6.153, y: -12.24, z: -0.869}
447 | frame: odom
448 | behavior: p2p
449 | forward: true
450 | autocontinue: false
451 | 64:
452 | index: 64
453 | point: {x: -5.823, y: -12.379, z: -0.866}
454 | frame: odom
455 | behavior: p2p
456 | forward: true
457 | autocontinue: false
458 | 65:
459 | index: 65
460 | point: {x: -5.466, y: -12.535, z: -0.855}
461 | frame: odom
462 | behavior: p2p
463 | forward: true
464 | autocontinue: false
465 | 66:
466 | index: 66
467 | point: {x: -5.165, y: -12.69, z: -0.841}
468 | frame: odom
469 | behavior: p2p
470 | forward: true
471 | autocontinue: false
472 | 67:
473 | index: 67
474 | point: {x: -4.697, y: -12.791, z: -0.836}
475 | frame: odom
476 | behavior: p2p
477 | forward: true
478 | autocontinue: false
479 | 68:
480 | index: 68
481 | point: {x: -4.377, y: -12.828, z: -0.879}
482 | frame: odom
483 | behavior: p2p
484 | forward: true
485 | autocontinue: false
486 | 69:
487 | index: 69
488 | point: {x: -3.989, y: -12.881, z: -0.882}
489 | frame: odom
490 | behavior: p2p
491 | forward: true
492 | autocontinue: false
493 | 70:
494 | index: 70
495 | point: {x: -3.521, y: -13.14, z: -0.859}
496 | frame: odom
497 | behavior: p2p
498 | forward: true
499 | autocontinue: false
500 | 71:
501 | index: 71
502 | point: {x: -3.188, y: -13.17, z: -0.916}
503 | frame: odom
504 | behavior: p2p
505 | forward: true
506 | autocontinue: false
507 | 72:
508 | index: 72
509 | point: {x: -2.757, y: -13.194, z: -0.938}
510 | frame: odom
511 | behavior: p2p
512 | forward: true
513 | autocontinue: false
514 | 73:
515 | index: 73
516 | point: {x: -2.178, y: -13.072, z: -0.913}
517 | frame: odom
518 | behavior: p2p
519 | forward: true
520 | autocontinue: false
521 | 74:
522 | index: 74
523 | point: {x: -1.695, y: -13.171, z: -0.956}
524 | frame: odom
525 | behavior: p2p
526 | forward: true
527 | autocontinue: false
528 | 75:
529 | index: 75
530 | point: {x: -1.445, y: -13.032, z: -0.974}
531 | frame: odom
532 | behavior: p2p
533 | forward: true
534 | autocontinue: false
535 | 76:
536 | index: 76
537 | point: {x: -0.969, y: -12.837, z: -0.978}
538 | frame: odom
539 | behavior: p2p
540 | forward: true
541 | autocontinue: false
542 | 77:
543 | index: 77
544 | point: {x: -0.637, y: -12.718, z: -0.98}
545 | frame: odom
546 | behavior: p2p
547 | forward: true
548 | autocontinue: false
549 | 78:
550 | index: 78
551 | point: {x: -0.251, y: -12.468, z: -1.001}
552 | frame: odom
553 | behavior: p2p
554 | forward: true
555 | autocontinue: false
556 | 79:
557 | index: 79
558 | point: {x: 0.152, y: -12.237, z: -1.057}
559 | frame: odom
560 | behavior: p2p
561 | forward: true
562 | autocontinue: false
563 | 80:
564 | index: 80
565 | point: {x: 0.367, y: -11.886, z: -1.073}
566 | frame: odom
567 | behavior: p2p
568 | forward: true
569 | autocontinue: false
570 | 81:
571 | index: 81
572 | point: {x: 0.509, y: -11.702, z: -1.049}
573 | frame: odom
574 | behavior: p2p
575 | forward: true
576 | autocontinue: false
577 | 82:
578 | index: 82
579 | point: {x: 0.803, y: -11.355, z: -1.066}
580 | frame: odom
581 | behavior: p2p
582 | forward: true
583 | autocontinue: false
584 | 83:
585 | index: 83
586 | point: {x: 0.989, y: -11.059, z: -1.058}
587 | frame: odom
588 | behavior: p2p
589 | forward: true
590 | autocontinue: false
591 | 84:
592 | index: 84
593 | point: {x: 0.989, y: -11.059, z: -1.058}
594 | frame: odom
595 | behavior: p2p
596 | forward: true
597 | autocontinue: false
598 | 85:
599 | index: 85
600 | point: {x: 0, y: 0, z: 0}
601 | frame: odom
602 | behavior: safety
603 | forward: none
604 | autocontinue: none
605 |
--------------------------------------------------------------------------------
/scripts/road_recognition.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # _/ _/ _/ _/ _/
3 | # _/ _/ _/_/_/ _/ _/ _/ _/_/_/ _/_/ _/
4 | # _/_/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/
5 | # _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/_/
6 | # _/ _/ _/_/_/ _/ _/ _/ _/ _/ _/ _/
7 | # Supposed to display a transformed image from the camera, using the
8 | # recognize_road function.
9 |
10 | import rospy
11 | from cv_bridge import CvBridge, CvBridgeError
12 | import cv2
13 | from sensor_msgs.msg import Image
14 | import numpy as np
15 |
16 |
17 | class road_recognition:
18 | def __init__(self):
19 | self.image = rospy.Subscriber(
20 | '/camera/image_raw', Image, self.callback)
21 | self.bridge = CvBridge()
22 | self.image_pub = rospy.Publisher('trapezoid', Image, queue_size=10)
23 |
24 | def norm(self, x):
25 | '''Finds the y-value of a normal curve centered at the mean
26 | (mu) road width and standard deviation sigma. This is used to
27 | make some road locations more prominent than others.'''
28 | mu = 550
29 | sigma = 100
30 | return np.e**((-(x - mu) ** 2) / (2 * sigma ** 2)) / (np.sqrt(2 * np.pi * sigma ** 2))
31 |
32 | def trap(self, img, line, m, n):
33 | '''Returns mean intensity within a trapezoid with the base on
34 | the bottom of the image. Takes an image, a number of pixels
35 | above the bottom of the image (line) the top of the trapezoid
36 | should be, a left point (m) an right (n) on that line, and
37 | calculates the average pixel intensity of the region inside
38 | the trapezoid'''
39 | white = 0
40 | total = 0
41 | for r in range(line, img.shape[0], 10): # check every 10
42 | # pixels, otherwise this will take too long to run
43 | for c in range(int(r * m / (line - (img.shape[0])) + m * (img.shape[0]) / ((img.shape[0]) - line)),
44 | int((r * (n - img.shape[1]) + line * img.shape[1] -
45 | n * img.shape[0]) / (line - img.shape[0])),
46 | 10):
47 | white += img[r][c]
48 | total += 1
49 | return (white + 0.0) / total * self.norm(n - m) # multiply by
50 | # normal curve to favor m and n some distance apart
51 |
52 | def recognize_road(self, img):
53 | '''The function used by the node to find the road.'''
54 |
55 | # Look for a road 1/3 of the image from the bottom
56 | line = int(img.shape[0] * 2 / 3)
57 |
58 | maxmn = 0
59 | maxm = 0
60 | maxn = 0
61 |
62 | # Sweep possible top vertices for the trapezoid in the trap method, and keep the most intense one.
63 | # check every 100 pixels for speed.
64 | for m in range(0, img.shape[1], 100):
65 | for n in range(m, img.shape[1], 100):
66 | val = self.trap(img, line, m, n)
67 | if val > maxmn:
68 | maxmn = val
69 | maxm = m
70 | maxn = n
71 |
72 | cv2.line(img, (maxm, line), (0, img.shape[0]), 128, 10)
73 | cv2.line(img, (maxn, line), (img.shape[1], img.shape[0]), 128, 10)
74 | cv2.line(img, (maxm, line), (maxn, line), 128, 10)
75 | return img
76 |
77 | def callback(self, data):
78 | try:
79 | cv_image = self.bridge.imgmsg_to_cv2(data, 'mono8')
80 | except CvBridgeError as e:
81 | print(e)
82 |
83 | road = self.recognize_road(cv_image)
84 | try:
85 | self.image_pub.publish(self.bridge.cv2_to_imgmsg(road, "mono8"))
86 | except CvBridgeError as e:
87 | print(e)
88 | cv2.imshow('Image window', road)
89 | cv2.waitKey(1)
90 |
91 |
92 | def main():
93 | road_recognition()
94 | rospy.init_node('road_recognition', anonymous=True)
95 | try:
96 | rospy.spin()
97 | except KeyboardInterrupt:
98 | print('Shutting down')
99 |
100 |
101 | if __name__ == '__main__':
102 | main()
103 |
--------------------------------------------------------------------------------
/scripts/test_imu.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import sys
4 | import time
5 | import rospy
6 | from sensor_msgs.msg import Imu # ROS msg type for IMU
7 | from Phidget22.Devices.Gyroscope import *
8 | from Phidget22.Devices.Accelerometer import *
9 | from Phidget22.PhidgetException import *
10 | from Phidget22.Phidget import *
11 | from Phidget22.Net import *
12 |
13 | rospy.init_node('imu')
14 | pub = rospy.Publisher('imu_data', Imu, queue_size = 10)
15 |
16 | if __name__ == __main__:
17 |
18 | # Setting up Phidget object
19 | try:
20 | ch = Gyroscope()
21 | except RuntimeError as e:
22 | print("Runtime Exception %s" % e.details)
23 | print("Press Enter to Exit...\n")
24 | readin = sys.stdin.read(1)
25 | exit(1)
26 | try:
27 | ch.open()
28 | except PhidgetException as e:
29 | print (“Phidget Exception %i: %s” % (e.code, e.details))
30 |
31 | # Opening ROS publisher
32 | rospy.init('imu_data', anonymous=True)
33 | r = rospy.Rate(20)
34 | while not rospy.is_shutdown():
35 | pub.publish()
36 | r.sleep()
37 |
--------------------------------------------------------------------------------
/scripts/test_pointgrey.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import rospy
3 | from cv_bridge import CvBridge, CvBridgeError
4 | from sensor_msgs.msg import Image
5 | import cv2
6 |
7 | def img_callback(data):
8 | ''' DOCSTRING:
9 | Given img data from usb cam, saves img for later use; called every
10 | time img recieved from usb cam
11 | '''
12 | try:
13 | global cv_image
14 | cv_image = bridge.imgmsg_to_cv2(data, "bgr8")
15 |
16 | except CvBridgeError as e:
17 | print('ERR:%d',e)
18 |
19 | bridge = CvBridge()
20 | cv_image = None
21 |
22 | rospy.init_node('test_cam1')
23 | rospy.Subscriber('/camera/image_raw', Image, img_callback) #Subscribes to camera feed; calls img_callback upon receipt of image
24 |
25 | r = rospy.Rate(20)
26 | while not rospy.is_shutdown():
27 | r.sleep()
28 | if (cv_image != None):
29 | cv2.imshow('Image_raw', cv_image)
30 | cv2.waitKey(1)
31 |
--------------------------------------------------------------------------------
/setup.sh:
--------------------------------------------------------------------------------
1 | #/bin/bash
2 |
3 | # Requirements for camera
4 | apt install -y libraw1394-11
5 | apt install -y libgtkmm-2.4-dev
6 | apt install -y libglademm-2.4-dev
7 | apt install -y libgtkglextmm-x11-1.2-dev
8 | apt install -y libusb-1.0-0
9 |
10 | git clone https://github.com/RhobanDeps/flycapture
11 | cd flycapture
12 | yes | sh install_flycapture.sh
13 |
14 | # Requirements for GPS
15 | apt install -y libgps-dev
16 | apt install -y gpsd
17 | apt install -y ros-kinetic-navigation
18 |
--------------------------------------------------------------------------------
/src/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | /opt/ros/indigo/share/catkin/cmake/toplevel.cmake
2 |
--------------------------------------------------------------------------------
/src/Camera/Camera.cpp:
--------------------------------------------------------------------------------
1 | /******************************************************************************
2 | * Camera
3 | * @file Camera.cpp
4 | * @author Carl Moser
5 | * @email carl.moser@students.olin.edu
6 | * @version 1.0
7 | *
8 | * This takes in the image from the FLIR thermal camera and filters it
9 | *
10 | ******************************************************************************/
11 |
12 |
13 | #include
14 | #include "Camera.h"
15 |
16 | cv::Mat threshold;
17 | volatile int low, high;
18 | volatile bool filter;
19 |
20 | int main(int argc, char **argv)
21 | {
22 | ros::init(argc, argv, "flir");
23 | ros::NodeHandle n;
24 | ros::Subscriber flir = n.subscribe("/camera/usb_cam1/image_raw", 10, FLIRCallback);
25 | dynamic_reconfigure::Server server;
26 | dynamic_reconfigure::Server::CallbackType f;
27 | f = boost::bind(&dynamicCallback, _1, _2);
28 | server.setCallback(f);
29 | std::cout << "OpenCV version : " << CV_VERSION << std::endl;
30 | cv::namedWindow("flir");
31 | ros::spin();
32 | return 0;
33 | }
34 |
35 | void FLIRCallback(const sensor_msgs::ImageConstPtr& msg){
36 | cv_bridge::CvImagePtr cv_ptr;
37 | cv_ptr = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::MONO16);
38 | cv::inRange(cv_ptr->image, low, high, threshold);
39 | if(filter){
40 | cv::imshow("flir", threshold);
41 | }
42 | else{
43 | cv::imshow("flir", cv_ptr->image);
44 | }
45 | cv::waitKey(1);
46 | }
47 |
48 | void dynamicCallback(gravl::gravl_cfgConfig &config, uint32_t level){
49 | low = config.low_filter;
50 | high = config.high_filter;
51 | filter = config.filter;
52 | }
53 |
--------------------------------------------------------------------------------
/src/Camera/Camera.h:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 | #include
4 | #include
5 |
6 | #include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 |
13 | void FLIRCallback(const sensor_msgs::ImageConstPtr& msg);
14 | void dynamicCallback(gravl::gravl_cfgConfig &config, uint32_t level);
15 |
--------------------------------------------------------------------------------
/src/DriveState/DriveState.cpp:
--------------------------------------------------------------------------------
1 | /******************************************************************************
2 | * DriveState
3 | * @file DriveState.cpp
4 | * @author Carl Moser
5 | * @email carl.moser@students.olin.edu
6 | * @version 1.0
7 | *
8 | * This takes in messages from teledrive and autodrive and publishes them to
9 | * drive based on the value published to auto (default-false)
10 | * auto = true: autodrive -> drive
11 | * auto = false: teledrive -> drive
12 | *
13 | ******************************************************************************/
14 |
15 |
16 | #include "DriveState.h"
17 |
18 | /*
19 | * DriveState constructor
20 | */
21 | DriveState::DriveState(){
22 | state = n.subscribe("auto", 10, &DriveState::stateCB, this);
23 | telesub = n.subscribe("teledrive", 10, &DriveState::teleCB, this);
24 | drivepub = n.advertise("drive", 1000);
25 | }
26 |
27 | /*
28 | * Callback function for auto subscriber
29 | */
30 | void DriveState::stateCB(const std_msgs::Bool &msg){
31 | if(msg.data){
32 | telesub.shutdown();
33 | autosub = n.subscribe("autodrive", 10, &DriveState::autoCB, this);
34 | }
35 | else{
36 | autosub.shutdown();
37 | telesub = n.subscribe("teledrive", 10, &DriveState::teleCB, this);
38 | }
39 | }
40 |
41 | /*
42 | * Callback function for teledrive subscriber - publishes to drivepub
43 | */
44 | void DriveState::teleCB(const ackermann_msgs::AckermannDrive &msg){
45 | drivepub.publish(msg);
46 | }
47 |
48 | /*
49 | * Callback function for autodrive subscriber - publishes to drivepub
50 | */
51 | void DriveState::autoCB(const ackermann_msgs::AckermannDrive &msg){
52 | drivepub.publish(msg);
53 | }
54 |
55 | // main function
56 | int main(int argc, char **argv)
57 | {
58 | ros::init(argc, argv, "state");
59 | DriveState ds;
60 | ros::spin();
61 | }
62 |
--------------------------------------------------------------------------------
/src/DriveState/DriveState.h:
--------------------------------------------------------------------------------
1 | #ifndef DRIVE_STATE_H
2 | #define DRIVE_STATE_H
3 |
4 | #include
5 | #include
6 | #include
7 |
8 | class DriveState{
9 | public:
10 | explicit DriveState();
11 | private:
12 | ros::NodeHandle n;
13 | ros::Subscriber state;
14 | ros::Subscriber telesub;
15 | ros::Subscriber autosub;
16 | ros::Publisher drivepub;
17 | ackermann_msgs::AckermannDrive drive_msg;
18 | void stateCB(const std_msgs::Bool &msg);
19 | void teleCB(const ackermann_msgs::AckermannDrive &msg);
20 | void autoCB(const ackermann_msgs::AckermannDrive &msg);
21 | };
22 |
23 | #endif //DRIVE_STATE_H
24 |
--------------------------------------------------------------------------------
/src/Hemisphere/Hemisphere.cpp:
--------------------------------------------------------------------------------
1 | /******************************************************************************
2 | * Hemisphere
3 | * @file Hemisphere.cpp
4 | * @author Carl Moser
5 | * @email carl.moser@students.olin.edu
6 | * @version 1.0
7 | *
8 | * This takes the true heading from the Hemisphere GPS and publishes it
9 | *
10 | ******************************************************************************/
11 |
12 |
13 | #include "Hemisphere.h"
14 | #include
15 |
16 | // Source for serial stuff http://www.tldp.org/HOWTO/text/Serial-Programming-HOWTO
17 |
18 | /*
19 | * Constructor - advertises the heading, connects to the hemisphere
20 | */
21 | Hemisphere::Hemisphere(){
22 | heading = n.advertise("heading", 1000);
23 | std::string serial_port;
24 | if(!n.getParam("port", serial_port)){
25 | ROS_ERROR("No serial port set");
26 | exit(1);
27 | }
28 | fd = open(serial_port.c_str(), O_RDWR|O_NOCTTY);
29 | if(fd<0){
30 | ROS_ERROR("Could not open port");
31 | exit(1);
32 | }
33 | tcgetattr(fd, &oldtio);
34 | bzero(&newtio, sizeof(newtio));
35 |
36 | newtio.c_cflag = BAUDRATE|CRTSCTS|CS8|CLOCAL|CREAD;
37 | newtio.c_iflag = IGNPAR|ICRNL;
38 | newtio.c_oflag = 0;
39 | newtio.c_lflag = ICANON;
40 |
41 | tcflush(fd, TCIFLUSH);
42 | tcsetattr(fd,TCSANOW,&newtio);
43 | }
44 |
45 | /*
46 | * Publishes the true heading
47 | */
48 | void Hemisphere::publish(){
49 | hem.header.stamp = ros::Time::now();
50 | hem.direction = strtof(parsed.at(2).c_str(), 0);
51 | heading.publish(hem);
52 | }
53 |
54 | /*
55 | * Reads the serial stream from the Hemisphere for the heading,
56 | * parses it, and then calls publish
57 | */
58 | void Hemisphere::run(){
59 | while(ros::ok()){
60 | read(fd,buf,255);
61 | std::string nmea = buf;
62 | boost::split(parsed, nmea, [](char c){return c == ',';});
63 | if(parsed.at(0) == "$PASHR"){
64 | this->publish();
65 | }
66 | ros::spinOnce();
67 | }
68 | }
69 |
70 |
71 | int main(int argc, char **argv)
72 | {
73 | ros::init(argc, argv, "hemisphere");
74 | Hemisphere h;
75 | h.run();
76 | }
77 |
--------------------------------------------------------------------------------
/src/Hemisphere/Hemisphere.h:
--------------------------------------------------------------------------------
1 | #ifndef HEMISPHERE_H
2 | #define HEMISPHERE_H
3 |
4 | #include
5 | #include
6 | #include
7 | #include
8 | #include
9 | #include
10 |
11 | #include
12 | #include
13 | #include
14 |
15 | #define BAUDRATE B19200
16 | #define _POSIX_SOURCE 1
17 |
18 | class Hemisphere{
19 | public:
20 | explicit Hemisphere();
21 | void run();
22 |
23 | private:
24 | void publish();
25 | ros::NodeHandle n;
26 | ros::Publisher heading;
27 | gravl::Hemisphere hem;
28 |
29 | int fd;
30 | struct termios oldtio, newtio;
31 | char buf[255];
32 | std::vector parsed;
33 | };
34 |
35 | #endif //HEMISPHERE_H
--------------------------------------------------------------------------------
/src/HindBrain/.gitignore:
--------------------------------------------------------------------------------
1 | build*/
2 |
--------------------------------------------------------------------------------
/src/HindBrain/Estop.cpp:
--------------------------------------------------------------------------------
1 | #include "Estop.h"
2 |
3 | /*
4 | @file estop.cpp
5 | Class for OAK estop (Olin Autonomous Kore)
6 | @author Carl Moser
7 | @maintainer Olin GRAVL
8 | @email olingravl@gmail.com
9 | @version 1.1
10 | @date 2020-02-14
11 |
12 | This is meant to be a modular class for any robot within the lab
13 | Attaches to pin and reads low as estop, publishes estops to /hardestop,
14 | estops based on msgs received on /softestop.
15 | */
16 |
17 |
18 | Estop::Estop(ros::NodeHandle *nh, const int pin, const unsigned int debounceTime):pin(pin),debounceTime(debounceTime) {
19 | // Constructor - init publisher, subscriber, attach interrupt to pin.
20 | hardEStop = new ros::Publisher("/hardestop", &stopped);
21 | softEStop = new ros::Subscriber("/softestop", &Estop::softStopCB, this);
22 | nh->advertise(*hardEStop);
23 | nh->subscribe(*softEStop);
24 | last_mill = millis();
25 | pinMode(pin, INPUT_PULLUP);
26 | attachInterrupt(digitalPinToInterrupt(pin), &Estop::globalStop, CHANGE);
27 | stopped.data = !digitalRead(pin);
28 | }
29 |
30 |
31 | void Estop::globalStop(void *instance){
32 | // Global function that calls the object function
33 | static_cast(instance)->onChange();
34 | }
35 |
36 |
37 | bool Estop::isStopped(){
38 | return softStopped|stopped.data;
39 | }
40 |
41 |
42 | void Estop::onChange() {
43 | // Update state & call appropriate start|stop func on pin change.
44 | if (millis() - last_mill >= debounceTime) {
45 | if (digitalRead(pin)) {
46 | stopped.data = true;
47 | hardEStop->publish(&stopped);
48 | (*Estop::stopfunc)();
49 | } else {
50 | stopped.data = false;
51 | hardEStop->publish(&stopped);
52 | (*Estop::startfunc)();
53 | }
54 | stopped.data = !stopped.data;
55 | hardEStop->publish(&stopped);
56 | if (stopped.data) {
57 | (*stopfunc)();
58 | } else {
59 | if (!softStopped)
60 | (*startfunc)();
61 | }
62 | last_mill = millis();
63 | }
64 | }
65 |
66 |
67 | void Estop::softStopCB(const std_msgs::Bool &message) {
68 | // Update state & call appropriate start|stop function on msg receipt.
69 | if (message.data) {
70 | (*stopfunc)();
71 | softStopped = true;
72 | } else {
73 | (*startfunc)();
74 | softStopped = false;
75 | }
76 | }
77 |
78 |
79 | void Estop::onStop(void (*func)()){
80 | stopfunc = func;
81 | }
82 |
83 |
84 | void Estop::offStop(void (*func)()){
85 | startfunc = func;
86 | }
87 |
--------------------------------------------------------------------------------
/src/HindBrain/Estop.h:
--------------------------------------------------------------------------------
1 | /*
2 | @file estop.h
3 | Software header for OAK estop (Olin Autonomous Kore)
4 | @author Carl Moser
5 | @maintainer Olin GRAVL
6 | @email olingravl@gmail.com
7 | @version 1.1
8 | @date 2020-02-14
9 | */
10 |
11 | #ifndef ESTOP_H
12 | #define ESTOP_H
13 |
14 | #include "ros.h"
15 | #include "std_msgs/Bool.h"
16 |
17 | static void dummyFunc() {return;}
18 |
19 | /*
20 | Class that attaches an estop to a rostopic.
21 |
22 | Usage:
23 | Define function to call on stop, function to call on start
24 | Instantiate class with nodehandle, hardware estop read pin, and debounce time in ms.
25 |
26 | */
27 | class Estop {
28 | public:
29 | explicit Estop(ros::NodeHandle *nh, const int pin, const unsigned int debounceTime);
30 | static void globalStop(void* instance);
31 | void onStop(void (*func)());
32 | void offStop(void (*func)());
33 | bool isStopped();
34 |
35 | private:
36 | ros::Publisher *hardEStop;
37 | ros::Subscriber *softEStop;
38 | std_msgs::Bool stopped;
39 | bool softStopped = false;
40 | const unsigned int debounceTime;
41 | const int pin;
42 | long last_mill;
43 | void (*stopfunc)() = dummyFunc;
44 | void (*startfunc)() = dummyFunc;
45 | void onChange();
46 | void softStopCB(const std_msgs::Bool &message);
47 | };
48 |
49 | #endif
50 |
--------------------------------------------------------------------------------
/src/HindBrain/HindBrain.h:
--------------------------------------------------------------------------------
1 | /*
2 | @file HindBrain.h
3 | Purpose: Config file for HindBrain.ino
4 | */
5 |
6 | #ifndef HIND_BRAIN_H
7 | #define HIND_BRAIN_H
8 |
9 | // https://wiki.ros.org/rosserial_arduino/Tutorials/Hello%20World
10 | #define USE_USBCON
11 |
12 | // Libraries
13 | #include // Microcontroller standard API
14 | #include // Hitch height encoder lib
15 |
16 | #include // rosserial library
17 | #include // Watchdog hf connection monitor
18 | #include // rosserial String msg
19 | #include // rosserial Hitch msg
20 |
21 | #include "Estop.h" // OAK Estop
22 | #include "SoftSwitch.h" // OAK SoftSwitch
23 | #include "RoboClaw.h" // Motor controller API
24 | #include "ackermann_msgs/AckermannDrive.h" // rosserial Steering msg
25 |
26 | // Arduino Pins
27 | const byte SERIAL_PIN_0 = 0; // Using either pin for input causes errors
28 | const byte SERIAL_PIN_1 = 1;
29 | const byte ESTOP_RELAY_PIN = 2;
30 | const byte AUTO_LIGHT_PIN = 3;
31 | const byte ESTOP_SENSE_PIN = 4; // Not implemented
32 | const byte HITCH_ENC_A_PIN = 18;
33 | const byte HITCH_ENC_B_PIN = 19;
34 |
35 | // General Constants
36 | const bool DEBUG = false;
37 | const int WATCHDOG_TIMEOUT = 250; // ms
38 | #define SERIAL_BAUD_RATE 115200 // hz
39 |
40 | // Roboclaw Constants
41 | // Note: addresses must only be unique for 2+ roboclaws on 1 serial port
42 | #define RC1_ADDRESS 0x80 // Vehicle Control Unit
43 | #define RC2_ADDRESS 0x80 // Implement Control Unit
44 | #define RC_BAUD_RATE 38400 // hz
45 |
46 | const unsigned int RC_TIMEOUT = 10000; // us
47 | const byte ESTOP_DEBOUNCE_TIME = 50; // ms
48 |
49 | // Velocity Motor Ranges
50 | const int VEL_CMD_MIN = 1200; // Roboclaw cmd for min speed
51 | const int VEL_CMD_STOP = 1029; // Roboclaw cmd for stopped
52 | const int VEL_CMD_MAX = 850; // Roboclaw cmd for max speed
53 | const int VEL_MSG_MIN = -2; // Ackermann msg min speed
54 | const int VEL_MSG_STOP = 0; // Ackermann msg for stopped
55 | const int VEL_MSG_MAX = 2; // Ackermann msg max speed
56 |
57 | // Steering Motor Ranges
58 | const int STEER_CMD_LEFT = 500; // Roboclaw cmd for max left turn
59 | const int STEER_CMD_CENTER = 975; // Roboclaw cmd for straight
60 | const int STEER_CMD_RIGHT = 1275; // Roboclaw cmd for max right turn
61 | const int STEER_MSG_LEFT = 45; // Ackermann msg min steering angle
62 | const int STEER_MSG_CENTER = 0; // Ackermann msg center steering angle
63 | const int STEER_MSG_RIGHT = -30; // Ackermann msg max steering angle
64 |
65 | // Hitch Actuator Ranges
66 | const int H_ACTUATOR_MAX = 1660; // Retracted Actuator - Move hitch up
67 | const int H_ACTUATOR_MIN = 656; // Extended Actuator - Move hitch down
68 | const int H_ACTUATOR_CENTER = 1162; // Neutral Actuator - Stop moving hitch
69 | const int H_ACTUATOR_RANGE = H_ACTUATOR_MAX - H_ACTUATOR_MIN;
70 |
71 | // Encoder Constants
72 | const float ENC_STOP_THRESHOLD = 0.0381; // Blade accuracy stop threshold (m)
73 |
74 | // Callback prototypes
75 | void ackermannCB(const ackermann_msgs::AckermannDrive&);
76 | void hitchCB(const geometry_msgs::Pose&);
77 | void userInputCB(const std_msgs::String&);
78 | void watchdogCB(const std_msgs::Empty&);
79 |
80 | // Function prototypes
81 | void checkSerial(ros::NodeHandle *nh);
82 | void eStartTractor();
83 | void eStopTractor();
84 | void runStartupSequence();
85 | void stopEngine();
86 | void stopRoboClaw(RoboClaw *rc1, RoboClaw *rc2);
87 | void updateCurrDrive();
88 | void updateCurrHitchPose();
89 | void updateRoboClaw(int velMsg, int steerMsg, int hitchMsg);
90 | char checkUserInput();
91 | int computeHitchMsg();
92 | int steerAckToCmd(float ack_steer);
93 | int velAckToCmd(float ack_vel);
94 | float mapPrecise(float x, float inMin, float inMax, float outMin, float outMax);
95 |
96 | #endif
97 |
--------------------------------------------------------------------------------
/src/HindBrain/HindBrain.ino:
--------------------------------------------------------------------------------
1 | /*
2 | @file HindBrain.ino
3 | Purpose: Provides firmware interface from software to hardware, runs
4 | realtime control/safety loop
5 |
6 | @author Connor Novak
7 | @maintainer Olin GRAVL
8 | @email olingravl@gmail.com
9 | @version 0.2.0
10 | @date 2020-02-15
11 | */
12 |
13 | // Header file
14 | #include "HindBrain.h"
15 |
16 | // RoboClaws
17 | auto rc1_serial = &Serial1;
18 | auto rc2_serial = &Serial2;
19 | RoboClaw rc1(rc1_serial, RC_TIMEOUT);
20 | RoboClaw rc2(rc2_serial, RC_TIMEOUT);
21 |
22 | // Components
23 | Encoder hitchEncoder(HITCH_ENC_A_PIN, HITCH_ENC_B_PIN);
24 | Estop *eStop;
25 | OAKSoftSwitch *autoLight;
26 |
27 | // States
28 | boolean isEStopped = false;
29 | boolean isAuto = false;
30 |
31 | // Global variables
32 | unsigned long watchdogTimer;
33 | char usrMsg = '\0';
34 |
35 | signed int steerMsg = STEER_MSG_CENTER;
36 | unsigned int velMsg = VEL_MSG_STOP;
37 | unsigned int hitchMsg = H_ACTUATOR_CENTER ;
38 |
39 | // ROS nodes, publishers, subscribers
40 | ros::NodeHandle nh;
41 | ackermann_msgs::AckermannDrive currDrivePose;
42 | geometry_msgs::Pose currHitchPose;
43 | geometry_msgs::Pose desiredHitchPose;
44 |
45 | ros::Subscriber driveSub("/cmd_vel", &ackermannCB);
46 | ros::Subscriber hitchSub("/cmd_hitch", &hitchCB);
47 | ros::Subscriber ping("/safety_clock", &watchdogCB);
48 | ros::Subscriber userInput("/user_input", &userInputCB);
49 |
50 | ros::Publisher pubDrive("/curr_drive", &currDrivePose);
51 | ros::Publisher hitchPose("/hitch_pose", &currHitchPose);
52 |
53 |
54 | void setup() {
55 |
56 | // Initialize estop and auto-light switch
57 | eStop = new Estop(&nh, ESTOP_SENSE_PIN, ESTOP_DEBOUNCE_TIME);
58 | eStop->onStop(eStopTractor);
59 | eStop->offStop(eStartTractor);
60 | pinMode(ESTOP_RELAY_PIN, OUTPUT);
61 | autoLight = new OAKSoftSwitch(&nh, "/auto_light", AUTO_LIGHT_PIN);
62 |
63 | // Stop engine for safety
64 | stopEngine();
65 |
66 | // Open serial communication with roboclaw
67 | rc1.begin(RC_BAUD_RATE);
68 | rc2.begin(RC_BAUD_RATE);
69 |
70 | // Set up ROS node, subscribers, publishers
71 | nh.getHardware()->setBaud(SERIAL_BAUD_RATE);
72 | nh.initNode();
73 | nh.subscribe(driveSub);
74 | nh.subscribe(hitchSub);
75 | nh.subscribe(ping);
76 | nh.subscribe(userInput);
77 | nh.advertise(pubDrive);
78 | nh.advertise(hitchPose);
79 |
80 | // Wait for connection
81 | while(true) {
82 | if(nh.connected() && (millis() - watchdogTimer < WATCHDOG_TIMEOUT)) {
83 | break;
84 | }
85 | nh.spinOnce();
86 | delay(1);
87 | }
88 |
89 | nh.loginfo("Hindbrain connected, running startup calibration sequence");
90 | delay(500);
91 | runStartupSequence();
92 | watchdogTimer = millis();
93 | }
94 |
95 |
96 | void loop() {
97 | // Check for connectivity with mid-brain
98 | checkSerial(&nh);
99 |
100 | // Update current published pose
101 | updateCurrDrive();
102 | updateCurrHitchPose();
103 |
104 | hitchMsg = computeHitchMsg();
105 |
106 | // Send updated motor commands to roboclaws
107 | if (!isEStopped) {
108 | updateRoboClaw(velMsg, steerMsg, hitchMsg);
109 | } else {
110 | stopRoboClaw(&rc1, &rc2);
111 | }
112 |
113 | // Update node
114 | nh.spinOnce();
115 | delay(1);
116 | }
117 |
118 |
119 | void ackermannCB(const ackermann_msgs::AckermannDrive &msg) {
120 | // Save steer and vel cmds to global vars.
121 | steerMsg = steerAckToCmd(msg.steering_angle);
122 | velMsg = velAckToCmd(msg.speed);
123 | }
124 |
125 |
126 | void hitchCB(const geometry_msgs::Pose &msg){
127 | desiredHitchPose.position.z = msg.position.z; // In meters from flat ground
128 | // TODO: Copy over all desired attributes
129 | }
130 |
131 |
132 | void watchdogCB(const std_msgs::Empty &msg) {
133 | // Update watchdog timer on receipt of msg
134 | watchdogTimer = millis();
135 | }
136 |
137 |
138 | void userInputCB(const std_msgs::String &msg) {
139 | // Update input global var with msg only if msg is 1 char.
140 | if (strlen(msg.data) == 1) {
141 | usrMsg = *msg.data;
142 | #ifdef DEBUG
143 | char j[20];
144 | snprintf(j, sizeof(j), "Received %s", msg.data);
145 | nh.loginfo(j);
146 | #endif
147 | }
148 | }
149 |
150 |
151 | void waitForUserVerification() {
152 | // Block main code until 'y' is received on /user_input.
153 | while (true) {
154 | auto cmd = checkUserInput();
155 | if (cmd == 'y') {
156 | break;
157 | }
158 | nh.spinOnce();
159 | delay(1);
160 | }
161 | }
162 |
163 |
164 | void runStartupSequence() {
165 | // Run startup & calibration sequence with appropriate user input.
166 | //TODO create prompt publishing topic
167 |
168 | nh.loginfo("Remove pins on steering and velocity, then publish 'y' to /user_input topic");
169 | waitForUserVerification();
170 |
171 | nh.loginfo("Verification received, homing steering and velocity actuators . . .");
172 | delay(500);
173 | rc1.SpeedAccelDeccelPositionM1(RC1_ADDRESS, 0, 300, 0, velMsg, 1);
174 | rc1.SpeedAccelDeccelPositionM2(RC1_ADDRESS, 0, 500, 0, steerMsg, 1);
175 | rc2.SpeedAccelDeccelPositionM2(RC2_ADDRESS, 0, 300, 0, hitchMsg, 1);
176 | delay(500);
177 |
178 | nh.loginfo("Re-install pins on steering and velocity, then publish 'y' to /user_input topic");
179 | waitForUserVerification();
180 | nh.loginfo("Verification received, vehicle ready to run.");
181 | }
182 |
183 |
184 | int steerAckToCmd(float ack){
185 | // Return RoboClaw command given ackermann steering msg.
186 | float cmd;
187 |
188 | // Convert from input message to output command
189 | if (ack > STEER_MSG_CENTER) {
190 | cmd = map(ack, STEER_MSG_CENTER, STEER_MSG_LEFT, STEER_CMD_CENTER, STEER_CMD_LEFT);
191 | } else if (ack < STEER_MSG_CENTER) {
192 | cmd = map(ack, STEER_MSG_RIGHT, STEER_MSG_CENTER, STEER_CMD_RIGHT, STEER_CMD_CENTER);
193 | } else {
194 | cmd = STEER_CMD_CENTER;
195 | }
196 |
197 | // Safety limits for signal
198 | if (cmd < STEER_CMD_LEFT) {
199 | cmd = STEER_CMD_LEFT;
200 | nh.logwarn("ERR: oversteering left");
201 | }
202 | else if (cmd > STEER_CMD_RIGHT) {
203 | cmd = STEER_CMD_RIGHT;
204 | nh.logwarn("ERR: oversteering right");
205 | }
206 |
207 | return cmd;
208 | }
209 |
210 |
211 | int velAckToCmd(float ack){
212 | // Return RoboClaw command given ackermann velocity msg.
213 | float cmd;
214 |
215 | // Convert from range of input signal to range of output signal
216 | cmd = map(ack, VEL_MSG_MIN, VEL_MSG_MAX, VEL_CMD_MIN, VEL_CMD_MAX);
217 |
218 | // Safety limits for signal; feels switched bc high vals = low speed
219 | if (cmd < VEL_CMD_MAX) {
220 | cmd = VEL_CMD_MAX;
221 | }
222 | else if(cmd > VEL_CMD_MIN) {
223 | cmd = VEL_CMD_MIN;
224 | }
225 |
226 | return cmd;
227 | }
228 |
229 |
230 | void checkSerial(ros::NodeHandle *nh) {
231 | // Given node, estops if watchdog has timed out
232 | // https://answers.ros.org/question/124481/rosserial-arduino-how-to-check-on-device-if-in-sync-with-host/
233 |
234 | if(millis() - watchdogTimer >= WATCHDOG_TIMEOUT) {
235 | if(!isEStopped) {
236 | nh->logerror("Lost connectivity . . .");
237 | eStopTractor();
238 | }
239 | }
240 | }
241 |
242 |
243 | char checkUserInput() {
244 | // Get most recent user-sent input (if any), reset global var if necessary.
245 | auto outMsg = usrMsg;
246 | usrMsg = '\0';
247 | return outMsg;
248 | }
249 |
250 |
251 | void updateRoboClaw(int velMsg, int steerMsg, int hitchMsg) {
252 | // Given velocity, steering, and hitch message, sends vals to RoboClaw
253 |
254 | // Write velocity to RoboClaw
255 | rc1.SpeedAccelDeccelPositionM1(RC1_ADDRESS, 100000, 1000, 0, velMsg, 1);
256 |
257 | // Write steering to RoboClaw if tractor is moving, else returns debug msg
258 | // TODO: add sensor for motor on or not; this is what actually matters.
259 | if (velMsg < VEL_CMD_MIN - 100) {
260 | rc1.SpeedAccelDeccelPositionM2(RC1_ADDRESS, 0, 1000, 0, steerMsg, 1);
261 | } else {
262 | nh.logwarn("Tractor not moving, steering message rejected");
263 | }
264 |
265 | // Write hitch to RoboClaw
266 | rc2.SpeedAccelDeccelPositionM2(RC2_ADDRESS, 100000, 1000, 0, hitchMsg, 1);
267 |
268 | // roslog msgs if debugging
269 | #ifdef DEBUG
270 | char j[56];
271 | snprintf(j, sizeof(j), "DBG: steerMsg = %d, velMsg = %d, hitchMsg = %d", steerMsg, velMsg, hitchMsg);
272 | nh.loginfo(j);
273 | #endif
274 | }
275 |
276 |
277 | void stopRoboClaw(RoboClaw *rc1, RoboClaw *rc2) {
278 | // Given roboclaw to stop, publishes messages such that Roboclaw is safe
279 |
280 | // Send velocity pedal to stop position
281 | rc1->SpeedAccelDeccelPositionM1(RC1_ADDRESS, 100000, 1000, 0, VEL_CMD_STOP, 0);
282 |
283 | // Stop steering motor
284 | rc1->SpeedM2(RC1_ADDRESS, 0);
285 |
286 | // Send hitch actuator to stop position
287 | rc2->SpeedAccelDeccelPositionM2(RC2_ADDRESS, 100000, 1000, 0, H_ACTUATOR_CENTER, 0);
288 | }
289 |
290 |
291 | void updateCurrDrive() {
292 | // Read encoder values, convert to ackermann drive, publish
293 | // TODO Fix mapping for steering
294 |
295 | uint32_t encoder1, encoder2;
296 | rc1.ReadEncoders(RC1_ADDRESS, encoder1, encoder2);
297 | currDrivePose.speed = mapPrecise(encoder1, VEL_CMD_MIN, VEL_CMD_MAX, VEL_MSG_MIN, VEL_MSG_MAX);
298 | currDrivePose.steering_angle = mapPrecise(encoder2, STEER_CMD_RIGHT, STEER_CMD_LEFT, STEER_MSG_RIGHT, STEER_MSG_LEFT);
299 | pubDrive.publish(&currDrivePose);
300 | }
301 |
302 |
303 | void updateCurrHitchPose(){
304 | // Read encoder value, convert to hitch height in meters, publish
305 | // TODO: What is the hitch height measured from? Where is 0?
306 | float encoderValInch;
307 | float hitchHeight;
308 | long hitchEncoderValue;
309 |
310 | hitchEncoderValue = hitchEncoder.read();
311 | encoderValInch = hitchEncoderValue / 1000.0; // Inches
312 | hitchHeight = encoderValInch * -1.1429 * 0.0254; // Meters TODO: What is the -1.1429?
313 | currHitchPose.position.z = hitchHeight;
314 | hitchPose.publish(&currHitchPose);
315 | }
316 |
317 |
318 | int computeHitchMsg() {
319 | // Take current and desired hitch position to compute actuator position
320 | int msg;
321 | auto desired = desiredHitchPose.position.z;
322 | auto current = currHitchPose.position.z;
323 | auto error = desired - current;
324 |
325 | // If hitch height is "good enough," move lever to center
326 | if (abs(error) < ENC_STOP_THRESHOLD) {
327 | msg = H_ACTUATOR_CENTER;
328 | } else {
329 | if (error > 0) {
330 | // If hitch is too high, move lever forwards
331 | msg = H_ACTUATOR_MIN;
332 | } else {
333 | // If hitch is too low, move lever backwards
334 | msg = H_ACTUATOR_MAX;
335 | }
336 | }
337 | return msg;
338 | }
339 |
340 |
341 | void stopEngine() {
342 | // Toggle engine stop relay
343 |
344 | digitalWrite(ESTOP_RELAY_PIN, HIGH);
345 | delay(2000);
346 | digitalWrite(ESTOP_RELAY_PIN, LOW);
347 | }
348 |
349 |
350 | void eStopTractor() {
351 | // Estop tractor
352 | isEStopped = true;
353 |
354 | nh.logerror("Tractor has E-Stopped");
355 | stopRoboClaw(&rc1, &rc2);
356 | stopEngine();
357 | }
358 |
359 |
360 | void eStartTractor() {
361 | // Disactivate isEStopped state
362 | isEStopped = false;
363 |
364 | // Logs verification msg
365 | char i[24];
366 | snprintf(i, sizeof(i), "MSG: EStop Disactivated");
367 | nh.loginfo(i);
368 | }
369 |
370 |
371 | float mapPrecise(float x, float inMin, float inMax, float outMin, float outMax) {
372 | // Emulate Arduino map() function, uses floats for precision.
373 | return (x - inMin) * (outMax - outMin) / (inMax - inMin) + outMin;
374 | }
375 |
--------------------------------------------------------------------------------
/src/HindBrain/Makefile:
--------------------------------------------------------------------------------
1 | ARDUINO_DIR = /opt/arduino
2 | ARDMK_DIR = ~/dev/Arduino-Makefile
3 | AVR_TOOLS_DIR = /usr/include
4 | BOARD_TAG = teensy35
5 | USER_LIB_PATH += ./lib/
6 | ARDUINO_LIBS = ros_lib RoboClaw Encoder
7 | include $(ARDMK_DIR)/Teensy.mk
8 |
--------------------------------------------------------------------------------
/src/HindBrain/SoftSwitch.cpp:
--------------------------------------------------------------------------------
1 | #include "SoftSwitch.h"
2 |
3 | /*
4 | @file soft_switch.cpp
5 | Software class for OAK soft_switch (Olin Autonomous Kore)
6 | @author Carl Moser
7 | @maintainer Olin GRAVL
8 | @email olingravl@gmail.com
9 | @version 1.1
10 | @date 2020-02-14
11 | */
12 |
13 |
14 | OAKSoftSwitch::OAKSoftSwitch(ros::NodeHandle *nh, const char* name, const int pin):pin(pin){
15 | // Constructor - init subscriber & attach pin.
16 | signalIn = new ros::Subscriber(name, &OAKSoftSwitch::softCB, this);
17 | nh->subscribe(*signalIn);
18 | pinMode(pin, OUTPUT);
19 | }
20 |
21 |
22 | void OAKSoftSwitch::softCB(const std_msgs::Bool &sig){
23 | // Toggle estop pin to received state.
24 | digitalWrite(pin, sig.data);
25 | }
26 |
--------------------------------------------------------------------------------
/src/HindBrain/SoftSwitch.h:
--------------------------------------------------------------------------------
1 | /*
2 | @file soft_switch.h
3 | Software header for OAK soft_switch (Olin Autonomous Kore)
4 | @author Carl Moser
5 | @maintainer Olin GRAVL
6 | @email olingravl@gmail.com
7 | @version 1.1
8 | @date 2020-02-14
9 | */
10 |
11 | #ifndef OAK_SOFT_SWITCH_H
12 | #define OAK_SOFT_SWITCH_H
13 |
14 | #include "ros.h"
15 | #include "std_msgs/Bool.h"
16 |
17 | /*
18 | Class that connects a digital pin to a boolean rostopic.
19 |
20 | Usage:
21 | Instantiate class with nodehandle, topic name, & hardware pin number.
22 | */
23 | class OAKSoftSwitch {
24 |
25 | public:
26 | explicit OAKSoftSwitch(ros::NodeHandle *nh, const char* name, const int pin);
27 |
28 | private:
29 | ros::Subscriber *signalIn;
30 | const int pin;
31 | void softCB(const std_msgs::Bool &sig);
32 |
33 | };
34 |
35 | #endif
36 |
--------------------------------------------------------------------------------
/src/HindBrain/lib:
--------------------------------------------------------------------------------
1 | /opt/arduino/libraries
--------------------------------------------------------------------------------
/src/ImuBase/ImuBase.cpp:
--------------------------------------------------------------------------------
1 | #include "ImuBase.h"
2 | #include
3 |
4 | ImuBase::ImuBase()
5 | : pub(n.advertise("imu_base", 1000))
6 | , sub(n.subscribe("/imu/data", 1000, &ImuBase::ImuBase::callback, this))
7 | , rate(ros::Rate(10))
8 | , tl(tfBuffer)
9 | {
10 | }
11 |
12 | void ImuBase::callback(const sensor_msgs::Imu::ConstPtr& msg)
13 | {
14 | tf2::doTransform(msg->orientation, pub_val.orientation, transform);
15 | tf2::doTransform(msg->angular_velocity, pub_val.angular_velocity, transform);
16 | tf2::doTransform(msg->linear_acceleration, pub_val.linear_acceleration, transform);
17 | }
18 |
19 | void ImuBase::spin()
20 | {
21 | while (ros::ok())
22 | {
23 | try
24 | {
25 | transform = tfBuffer.lookupTransform("base_link", "base_imu", ros::Time(0));
26 | }
27 | catch (tf2::TransformException &ex)
28 | {
29 | ROS_ERROR("%s", ex.what());
30 | ros::Duration(1).sleep();
31 | continue;
32 | }
33 |
34 | pub.publish(pub_val);
35 | ros::spinOnce();
36 | rate.sleep();
37 | }
38 | }
39 |
40 | int main(int argc, char** argv)
41 | {
42 | ros::init(argc, argv, "ImuBase");
43 | ImuBase imu_base;
44 | imu_base.spin();
45 | }
46 |
--------------------------------------------------------------------------------
/src/ImuBase/ImuBase.h:
--------------------------------------------------------------------------------
1 | /*
2 | * @file ImuBase.h
3 | * @author Kawin Nikomborirak
4 | * @date 2018-10-03
5 | *
6 | * Subscribes to /imu/data and publishes the transformed imu message
7 | * to /imu_base.
8 | */
9 |
10 | #pragma once
11 |
12 | #include
13 | #include
14 | #include
15 |
16 | class ImuBase
17 | {
18 | public:
19 | ImuBase();
20 |
21 | /* Run the node. */
22 | void spin();
23 |
24 | private:
25 | void callback(const sensor_msgs::Imu::ConstPtr& msg);
26 |
27 | sensor_msgs::Imu pub_val;
28 | ros::NodeHandle n;
29 | tf2_ros::Buffer tfBuffer;
30 | tf2_ros::TransformListener tl;
31 | geometry_msgs::TransformStamped transform;
32 | const ros::Publisher pub;
33 | const ros::Subscriber sub;
34 | ros::Rate rate;
35 | };
36 |
--------------------------------------------------------------------------------
/src/README.md:
--------------------------------------------------------------------------------
1 | # Overview
2 | This folder holds the C++ code for Kubo, the lab's autonomous tractor.
3 |
4 | ## 1. Nodes
5 |
6 |
7 | ### 1.1 Camera
8 |
9 | #### 1.1.1 Subscribed Topics
10 | ROS node that takes in a black and white image and filters out values that are not in the range between _low_filter_ and _high_filter_
11 | - __*/camera/usb_cam1/image_raw*__ ([sensor_msgs/Image](http://docs.ros.org/api/sensor_msgs/html/msg/Image.html))
12 | Black and white image to be modified and displayed
13 |
14 | #### 1.1.2 Parameters
15 | - __*low_filter*__ (int, default: 0)
16 | The low threshold for the image
17 | - __*high_filter*__ (int, default: 0)
18 | The high threshold for the image
19 | - __*filter*__ (bool, default: true)
20 | Filter the image?
21 |
22 |
23 |
24 | ### 1.2 DriveState
25 | ROS node that publishes an AckermannDrive message passed on from teloperation and autonomous input based on a boolean
26 |
27 | #### 1.2.1 Published Topics
28 | - __*drive*__ ([ackermann_msgs/AckermannDrive](http://docs.ros.org/api/ackermann_msgs/html/msg/AckermannDrive.html))
29 | Forwarded drive message
30 |
31 | #### 1.2.2 Subscribed Topics
32 | - __*auto*__ ([std_msgs/Bool](http://docs.ros.org/api/std_msgs/html/msg/Bool.html))
33 | Sets the published data to teleoperation or autonomous
34 | - __*autodrive*__ ([ackermann_msgs/AckermannDrive](http://docs.ros.org/api/ackermann_msgs/html/msg/AckermannDrive.html))
35 | Autonomous input to be forwarded
36 | - __*teledrive*__ ([ackermann_msgs/AckermannDrive](http://docs.ros.org/api/ackermann_msgs/html/msg/AckermannDrive.html))
37 | Teleoperation input to be forwarded
38 |
39 |
40 |
41 | ### 1.3 Hemisphere
42 | ROS driver node that publishes true heading from the Hemisphere gps
43 |
44 | #### 1.3.1 Published Topics
45 | - __*heading*__ ([gravl/Hemisphere](https://github.com/olinrobotics/gravl/blob/master/msg/Hemisphere.msg))
46 | The true heading of the Hemisphere gps
47 |
48 |
49 |
50 | ### 1.4 gps_map
51 | ROS node for showing the position/heading on a map using Qt
52 |
53 | #### 1.4.1 Subscribed Topics
54 | - __*/gps/fix*__ ([sensor_msgs/NavSatFix](http://docs.ros.org/api/sensor_msgs/html/msg/NavSatFix.html))
55 | GPS position of the tractor
56 | - __*heading*__ ([gravl/Hemisphere](https://github.com/olinrobotics/gravl/blob/master/msg/Hemisphere.msg))
57 | The true heading of the Hemisphere gps
58 |
59 |
60 |
61 | ### 1.5 Teleop
62 | ROS node for teleoperation of ackermann steering vehicles
63 |
64 | #### 1.5.1 Published Topics
65 | - __*auto*__ ([std_msgs/Bool](http://docs.ros.org/api/std_msgs/html/msg/Bool.html))
66 | Sets the published data to teleoperation or autonomous
67 | - __*softestop*__ ([std_msgs/Bool](http://docs.ros.org/api/std_msgs/html/msg/Bool.html))
68 | Software estop
69 | - __*teledrive*__ ([ackermann_msgs/AckermannDrive](http://docs.ros.org/api/ackermann_msgs/html/msg/AckermannDrive.html))
70 | Teleoperation output
71 |
72 | #### 1.5.2 Subscribed Topics
73 | - __*joy*__ ([sensor_msgs/Joy](http://docs.ros.org/api/sensor_msgs/html/msg/Joy.html))
74 | Gamepad input for teleoperation
75 |
76 |
77 | ## Misc stuff
78 |
79 | ### hind_brain
80 | This folder contains Arduino code for Kubo's hind brain, running on an Arduino
81 | Teensy. The code is run upon startup of Kubo's electronics; to interface with
82 | the code, follow the tractor startup instructions on the home page of the Github
83 | gravl wiki.
84 |
85 | ### `road_detection.cpp`
86 | Takes a single image file of a road and theoretically draws a line of the direction to go in on an image.
87 | To run with an image file named road.jpg:
88 | ```
89 | cd scripts
90 | cmake .
91 | make
92 | ./road_detection road.jpg
93 | ```
94 |
95 | ### `hello_world.cpp`
96 | Basic c++ hello world program
--------------------------------------------------------------------------------
/src/Watchdog/Watchdog.cpp:
--------------------------------------------------------------------------------
1 | #include "Watchdog.h"
2 |
3 | Watchdog::Watchdog()
4 | : rate(ros::Rate(10))
5 | , clockpub(n.advertise("safety_clock", 1))
6 | {
7 | }
8 |
9 | void Watchdog::spin() {
10 | //Publishes empty message while roscore is running
11 |
12 | while(ros::ok()) {
13 | clockpub.publish(tick);
14 | ros::spinOnce();
15 | rate.sleep();
16 | }
17 |
18 | }
19 |
20 | int main(int argc, char **argv) {
21 | ros::init(argc, argv, "watchdog");
22 | Watchdog chip;
23 | chip.spin();
24 | }
25 |
--------------------------------------------------------------------------------
/src/Watchdog/Watchdog.h:
--------------------------------------------------------------------------------
1 | /*
2 | * @file Watchdog.h
3 | * @author Connor Novak
4 | * @date 2018-11-14
5 | *
6 | * Publishes high-frequency topic for watchdog on hindbrain to monitor.
7 | */
8 |
9 | #ifndef WATCHDOG_H
10 | #define WATCHDOG_H
11 |
12 | #include
13 | #include
14 |
15 | class Watchdog {
16 |
17 | public:
18 | explicit Watchdog();
19 | void spin();
20 |
21 | private:
22 | ros::NodeHandle n;
23 | ros::Publisher clockpub;
24 | std_msgs::Empty tick;
25 | ros::Rate rate;
26 |
27 | };
28 | #endif
29 |
--------------------------------------------------------------------------------
/src/arm_drone.cpp:
--------------------------------------------------------------------------------
1 | /**
2 | * @file hello_world.cpp
3 | * @brief Sends arming, takeoff, and landing commands to drone using mavros
4 | based on http://docs.erlerobotics.com/simulation/vehicles/erle_copter/tutorial_3
5 | * @author Amy Phung
6 | * @author Connor Novak
7 | */
8 |
9 |
10 | /*
11 | USAGE:
12 | -Verify CMakeLists.txt has lines
13 | * add_executable(arm_drone src/arm_drone.cpp)
14 | * target_link_libraries(arm_drone ${catkin_LIBRARIES})
15 | * add_dependencies(arm_drone)
16 | -catkin_make package
17 | -roslaunch mavros px4.launch
18 | -rosservice call /mavros/set_mode 0 "GUIDED"
19 | -rostopic echo /mavros/state to verify GUIDED mode
20 | -rosrun test_package arm_drone
21 | */
22 |
23 | #include
24 |
25 | #include
26 | #include
27 | #include
28 | #include
29 |
30 | int main(int argc, char **argv)
31 | {
32 |
33 | int rate = 10;
34 |
35 | ros::init(argc, argv, "mavros_takeoff");
36 | ros::NodeHandle n;
37 |
38 | ros::Rate r(rate);
39 |
40 |
41 | //Mode setting not currently working
42 | // ros::ServiceClient cl = n.serviceClient("/mavros/set_mode");
43 | // mavros_msgs::SetMode srv_setMode;
44 | // srv_setMode.request.base_mode = 0;
45 | // srv_setMode.request.custom_mode = "GUIDED";
46 |
47 |
48 | ////////////////////////////////////////////
49 | ///////////////////ARM//////////////////////
50 | ////////////////////////////////////////////
51 | ros::ServiceClient arming_cl = n.serviceClient("/mavros/cmd/arming");
52 | mavros_msgs::CommandBool srv;
53 | srv.request.value = true;
54 | if(arming_cl.call(srv)){
55 | ROS_ERROR("ARM send ok %d", srv.response.success);
56 | }else{
57 | ROS_ERROR("Failed arming or disarming");
58 | }
59 |
60 | ////////////////////////////////////////////
61 | /////////////////TAKEOFF////////////////////
62 | ////////////////////////////////////////////
63 | ros::ServiceClient takeoff_cl = n.serviceClient("/mavros/cmd/takeoff");
64 | mavros_msgs::CommandTOL srv_takeoff;
65 | srv_takeoff.request.altitude = 10;
66 | srv_takeoff.request.latitude = 0;
67 | srv_takeoff.request.longitude = 0;
68 | srv_takeoff.request.min_pitch = 0;
69 | srv_takeoff.request.yaw = 0;
70 | if(takeoff_cl.call(srv_takeoff)){
71 | ROS_ERROR("srv_takeoff send ok %d", srv_takeoff.response.success);
72 | }else{
73 | ROS_ERROR("Failed Takeoff");
74 | }
75 |
76 | ////////////////////////////////////////////
77 | /////////////////DO STUFF///////////////////
78 | ////////////////////////////////////////////
79 | sleep(10);
80 |
81 | ////////////////////////////////////////////
82 | ///////////////////LAND/////////////////////
83 | ////////////////////////////////////////////
84 | ros::ServiceClient land_cl = n.serviceClient("/mavros/cmd/land");
85 | mavros_msgs::CommandTOL srv_land;
86 | srv_land.request.altitude = 10;
87 | srv_land.request.latitude = 0;
88 | srv_land.request.longitude = 0;
89 | srv_land.request.min_pitch = 0;
90 | srv_land.request.yaw = 0;
91 | if(land_cl.call(srv_land)){
92 | ROS_INFO("srv_land send ok %d", srv_land.response.success);
93 | }else{
94 | ROS_ERROR("Failed Land");
95 | }
96 |
97 | while (n.ok())
98 | {
99 | ros::spinOnce();
100 | r.sleep();
101 | }
102 |
103 |
104 | return 0;
105 | }
106 |
--------------------------------------------------------------------------------
/src/imu_safety/ImuSafety.cpp:
--------------------------------------------------------------------------------
1 | #include "ImuSafety.h"
2 | #include
3 |
4 | ImuSafety::ImuSafety()
5 | : pub(n.advertise("imu_safe", 1000))
6 | , sub(n.subscribe("/imu/data", 1000, &ImuSafety::ImuSafety::callback, this))
7 | , rate(ros::Rate(10))
8 | {
9 | // Default to 10 degrees
10 | n.param("maxRoll", max_roll, .1745);
11 | }
12 |
13 | void ImuSafety::callback(const sensor_msgs::Imu::ConstPtr& msg)
14 | {
15 | tf::Quaternion q_orig;
16 | quaternionMsgToTF(msg->orientation, q_orig);
17 | double dummy_var;
18 | pub_val.danger = false;
19 | ((tf::Matrix3x3) q_orig).getRPY(pub_val.theta, dummy_var, dummy_var);
20 | pub_val.danger = abs(pub_val.theta) > max_roll;
21 | }
22 |
23 | void ImuSafety::spin()
24 | {
25 | while (ros::ok())
26 | {
27 | pub.publish(pub_val);
28 | ros::spinOnce();
29 | rate.sleep();
30 | }
31 | }
32 |
33 | int main(int argc, char** argv)
34 | {
35 | ros::init(argc, argv, "ImuSafety");
36 | ImuSafety imu_safety;
37 | imu_safety.spin();
38 | }
39 |
--------------------------------------------------------------------------------
/src/imu_safety/ImuSafety.h:
--------------------------------------------------------------------------------
1 | /*
2 | * @file ImuSafety.h
3 | * @author Kawin Nikomborirak
4 | * @date 2018-10-03
5 | *
6 | * Subscribes to /imu/data and publishes a roll of the tractor and
7 | * whether or not the roll is dangerous.
8 | */
9 |
10 | #pragma once
11 |
12 | #include
13 | #include
14 | #include
15 |
16 | class ImuSafety
17 | {
18 | public:
19 | ImuSafety();
20 |
21 | /* Run the node. */
22 | void spin();
23 |
24 | private:
25 | void callback(const sensor_msgs::Imu::ConstPtr& msg);
26 |
27 | gravl::ImuSafety pub_val;
28 | ros::NodeHandle n;
29 | const ros::Publisher pub;
30 | const ros::Subscriber sub;
31 | ros::Rate rate;
32 |
33 | /* The maximum tractor roll that is considered 'safe'. */
34 | double max_roll;
35 | };
36 |
--------------------------------------------------------------------------------
/src/offb_node.cpp:
--------------------------------------------------------------------------------
1 | /**
2 | * @file offb_node.cpp
3 | * @brief Offboard control example node, written with MAVROS version 0.19.x, PX4 Pro Flight
4 | * Stack and tested in Gazebo SITL
5 | */
6 |
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 |
13 | mavros_msgs::State current_state;
14 | void state_cb(const mavros_msgs::State::ConstPtr& msg){
15 | current_state = *msg;
16 | }
17 |
18 | int main(int argc, char **argv)
19 | {
20 | ros::init(argc, argv, "offb_node");
21 | ros::NodeHandle nh;
22 |
23 | ros::Subscriber state_sub = nh.subscribe
24 | ("mavros/state", 10, state_cb);
25 | ros::Publisher local_pos_pub = nh.advertise
26 | ("mavros/setpoint_position/local", 10);
27 | ros::ServiceClient arming_client = nh.serviceClient
28 | ("mavros/cmd/arming");
29 | ros::ServiceClient set_mode_client = nh.serviceClient
30 | ("mavros/set_mode");
31 |
32 | //the setpoint publishing rate MUST be faster than 2Hz
33 | ros::Rate rate(20.0);
34 |
35 | // wait for FCU connection
36 | while(ros::ok() && current_state.connected){
37 | ros::spinOnce();
38 | rate.sleep();
39 | }
40 |
41 | geometry_msgs::PoseStamped pose;
42 | pose.pose.position.x = 0;
43 | pose.pose.position.y = 0;
44 | pose.pose.position.z = 2;
45 |
46 | //send a few setpoints before starting
47 | for(int i = 100; ros::ok() && i > 0; --i){
48 | local_pos_pub.publish(pose);
49 | ros::spinOnce();
50 | rate.sleep();
51 | }
52 |
53 | mavros_msgs::SetMode offb_set_mode;
54 | offb_set_mode.request.custom_mode = "OFFBOARD";
55 |
56 | mavros_msgs::CommandBool arm_cmd;
57 | arm_cmd.request.value = true;
58 |
59 | ros::Time last_request = ros::Time::now();
60 |
61 | while(ros::ok()){
62 | if( current_state.mode != "OFFBOARD" &&
63 | (ros::Time::now() - last_request > ros::Duration(5.0))){
64 | if( set_mode_client.call(offb_set_mode) &&
65 | offb_set_mode.response.mode_sent){
66 | ROS_INFO("Offboard enabled");
67 | }
68 | last_request = ros::Time::now();
69 | } else {
70 | if( !current_state.armed &&
71 | (ros::Time::now() - last_request > ros::Duration(5.0))){
72 | if( arming_client.call(arm_cmd) &&
73 | arm_cmd.response.success){
74 | ROS_INFO("Vehicle armed");
75 | }
76 | last_request = ros::Time::now();
77 | }
78 | }
79 |
80 | local_pos_pub.publish(pose);
81 |
82 | ros::spinOnce();
83 | rate.sleep();
84 | }
85 |
86 | return 0;
87 | }
88 |
--------------------------------------------------------------------------------
/src/road_detection.cpp:
--------------------------------------------------------------------------------
1 | /*
2 | * By Margo Crawford
3 | */
4 | //#include
5 | //#include
6 | //#include
7 | #include
8 | #include
9 | #include
10 | #include
11 | #include
12 | #include
13 | //#include
14 |
15 | using namespace cv;
16 | using namespace std;
17 |
18 | //images
19 | Mat src, src_gray;
20 | Mat dst, blurred, cdst;
21 | //threshold values, who knows what they mean
22 | int edgeThreshold = 1;
23 | int lowThreshold = 100;
24 | int const max_lowThreshold = 100;
25 | int ratio = 3;
26 | int kernel_size = 3;
27 | //name of the image window
28 | char* edgemap_window_name = "Edge Map";
29 | char* lines_window_name = "Hough Lines";
30 |
31 | /*
32 | * Comparator for sorting vec4i by line length, longest first
33 | */
34 | bool vec4_linelength_comparator(Vec4i i, Vec4i j) {
35 | int i_length = sqrt ( pow((i[0] - i[2]), 2) + pow((i[1] - i[3]), 2) );
36 | int j_length = sqrt ( pow((j[0] - j[2]), 2) + pow((j[1] - j[3]), 2) );
37 | return (i_length > j_length);
38 | }
39 |
40 | /*
41 | * Given two lines, see if they are almost colinear, that is, there slopes are close
42 | * and they aren't close to the same part of the image as well. Should make it easier
43 | * to work with Hough Transforms that aren't quite tuned perfectly.
44 | */
45 | bool are_almost_colinear(Vec4i l1, Vec4i l2) {
46 | // thresholds for how close the slope and y' distance should be for it to count
47 | int slope_threshold = 2;
48 | int dist_threshold = 10;
49 | //slopes
50 | int slope_l1 = (l1[1] - l1[3]) / (l1[0] - l1[2]);
51 | int slope_l2 = (l2[1] - l2[3]) / (l2[0] - l2[2]);
52 | //check whether slopes are close enough
53 | if (( slope_l1 + slope_threshold > slope_l2 ) and ( slope_l1 - slope_threshold < slope_l2 )) {
54 | //TODO check if order of endpoints is significant and make sure
55 | // we get the endpoint of each thats closest to eachother
56 | int a = l2[0];
57 | int b = l2[1];
58 | int c = l1[2];
59 | int d = l1[3];
60 | //The algebraic bs that I worked out, based on the following system of equations:
61 | // y - d = 1/slope_l1(x - c)
62 | // y - b = slope_l2(x - a)
63 | int x = ( d - b + slope_l2*a - (1/slope_l1)*c ) / ( slope_l2 - (1/slope_l1) );
64 | int y = slope_l2*(x - c) + d;
65 | int dist = sqrt ( pow( (x-c), 2 ) + pow( (y-d), 2 ) );
66 | if ( dist < dist_threshold ) {
67 | return true;
68 | }
69 | }
70 | return false;
71 | }
72 |
73 | int main( int argc, char **argv ) {
74 | //check if you've provided an image name
75 | if (argc != 2) {
76 | cout << "Provide an image filename as an argument" << endl;
77 | return -1;
78 | }
79 | // load the image
80 | src = imread(argv[1], CV_LOAD_IMAGE_COLOR);
81 | // see if the image loading worked
82 | if (!src.data) {
83 | cout << "Could not find image. " << std::endl;
84 | return -1;
85 | }
86 | //make a window for the image
87 | namedWindow(edgemap_window_name, CV_NORMAL);
88 | namedWindow(lines_window_name, CV_NORMAL);
89 | // make a matrix the same size as src
90 | //dst.create( src.size(), src.type() );
91 | // make a grayscale image
92 | cvtColor( src, src_gray, CV_BGR2GRAY );
93 | // do a gaussian? blur
94 | blur(src_gray, blurred, Size(3,3));
95 | // do the edge detection
96 | Canny(blurred, dst, lowThreshold, lowThreshold*ratio, kernel_size);
97 | if (!dst.empty()) {
98 | cout << dst.channels();
99 | //make a vector of lines and do Hough Lines stuff
100 | vector lines;
101 | HoughLinesP(dst, lines, 1, CV_PI/180, 80, 30, 10);
102 | // sort the lines
103 | sort ( lines.begin(), lines.end(), vec4_linelength_comparator);
104 | Size s = dst.size();
105 | cdst = Mat::zeros(s.height, s.width, CV_8UC3);
106 | // getting average slope of the lines
107 | float total_slope = 0;
108 | float total_intercept = 0;
109 | // make a hashmap of the consolidated lines
110 | unordered_set consolidated_lines;
111 | // display the top lines
112 | for( size_t i = 0; i < 10; i++ )
113 | {
114 | Vec4i l = lines[i];
115 | line( cdst, Point(l[0], l[1]), Point(l[2], l[3]), Scalar(0,0,255), 3, CV_AA);
116 | float slope = (l[0] - l[2]) / (l[1] - l[3]);
117 | float intercept = l[0] - slope*l[1];
118 | // hard coding a thing that avoids the horizon
119 | if (slope < 10.0) {
120 | total_slope += slope;
121 | total_intercept += intercept;
122 | }
123 | for ( auto it = consolidated_lines.begin(); it != consolidated_lines.end(); ++it ) {
124 | int l1_index = *it;
125 | Vec4i l1 = lines[l1_index];
126 | }
127 |
128 | }
129 | // average of the top two lines
130 | float avg_x = (lines[0][0] + lines[1][0]) / 2.0;
131 | float avg_y = (lines[0][1] + lines[1][1]) / 2.0;
132 | // midpoint of the image
133 | float mid_x = cdst.size().width / 2.0;
134 |
135 | float avg_slope = total_slope / 10.0;
136 | printf("%f", avg_slope);
137 | //std::cout << avg_slope;
138 | float avg_intercept = total_intercept / 10.0;
139 | //line( cdst, Point(mid_x, cdst.size().height), Point(avg_x, avg_y), Scalar(0,255,0), 3, CV_AA);
140 | line( cdst, Point(mid_x, cdst.size().height), Point(mid_x + (avg_slope*cdst.size().height), 0), Scalar(255, 0, 0), 3, CV_AA);
141 | //show the image
142 | imshow(edgemap_window_name, dst);
143 |
144 | imshow(lines_window_name, cdst);
145 |
146 | waitKey(0);
147 | return 0;
148 | } else {
149 | cout << "no output array";
150 | return -1;
151 | }
152 | }
153 |
--------------------------------------------------------------------------------
/src/snowcatv2/motion.cpp:
--------------------------------------------------------------------------------
1 | #include "motion.h"
2 |
3 | Motion::Motion(ros::NodeHandle *nh, HardwareSerial *rc, const long baud, const uint8_t address):address(address), lspeed(0), rspeed(0){
4 | status = new ros::Publisher("/roboclawstatus", &stats);
5 | cmd = new ros::Subscriber("/cmd_vel", &Motion::vel_callback, this);
6 | nh->advertise(*status);
7 | nh->subscribe(*cmd);
8 | roboclaw = new RoboClaw(rc, 10000);
9 | roboclaw->begin(baud);
10 | roboclaw->SpeedM1(address, 0);
11 | roboclaw->SpeedM2(address, 0);
12 | }
13 |
14 | void Motion::stat_pub(){
15 | stats.main_voltage = roboclaw->ReadMainBatteryVoltage(address);
16 | stats.logic_voltage = roboclaw->ReadLogicBatteryVoltage(address);
17 | roboclaw->ReadCurrents(address, stats.motor1_current, stats.motor2_current);
18 | stats.error = roboclaw->ReadError(address);
19 | status->publish(&stats);
20 | }
21 |
22 | void Motion::vel_callback(const geometry_msgs::Twist &vel){
23 | //Speed is quad pulses/sec
24 | vel.angular.z;
25 | lspeed = QPPS_PER_REV * REV_PER_METER * vel.linear.x;
26 | rspeed = QPPS_PER_REV * REV_PER_METER * vel.linear.x;
27 | roboclaw->SpeedM1(address, 1);
28 | roboclaw->SpeedM2(address, 1);
29 | }
30 |
--------------------------------------------------------------------------------
/src/snowcatv2/motion.h:
--------------------------------------------------------------------------------
1 | #ifndef MOTION_H
2 | #define MOTION_H
3 |
4 | #include "RoboClaw.h"
5 | #include "ros.h"
6 | #include "geometry_msgs/Twist.h"
7 | #include
8 |
9 | #define QPPS_PER_REV 300
10 | #define REV_PER_METER 120 //Placeholder
11 |
12 | class Motion{
13 | public:
14 | explicit Motion(ros::NodeHandle *nh, HardwareSerial *rc, const long baud, const uint8_t address);
15 |
16 | private:
17 | RoboClaw *roboclaw;
18 | ros::Publisher *status;
19 | ros::Subscriber *cmd;
20 | gravl::RoboClawStats stats;
21 | const uint8_t address;
22 | int32_t lspeed;
23 | int32_t rspeed;
24 | void stat_pub();
25 | void vel_callback(const geometry_msgs::Twist &vel);
26 | };
27 |
28 | #endif //MOTION_H
29 |
--------------------------------------------------------------------------------
/src/snowcatv2/odometry.cpp:
--------------------------------------------------------------------------------
1 | #include "odometry.h"
2 |
3 | Odometry::Odometry(ros::NodeHandle *nh, const uint8_t left_pin_a, const uint8_t left_pin_b,
4 | const uint8_t right_pin_a, const uint8_t right_pin_b):nh(nh){
5 | broadcaster->init(*nh);
6 | t->header.frame_id = "/odom";
7 | t->child_frame_id = "/base_link";
8 | left_wheel = new Encoder(left_pin_a, left_pin_b);
9 | right_wheel = new Encoder(right_pin_a, right_pin_b);
10 | }
11 |
12 | void Odometry::odomPub(){
13 |
14 | left_position = left_wheel->read();
15 | right_position = right_wheel->read();
16 | left_wheel->write(0);
17 | right_wheel->write(0);
18 |
19 | t->transform.translation.x = 1.0;
20 | t->transform.rotation.x = 0.0;
21 | t->transform.rotation.y = 0.0;
22 | t->transform.rotation.z = 0.0;
23 | t->transform.rotation.w = 1.0;
24 | t->header.stamp = nh->now();
25 | broadcaster->sendTransform(*t);
26 | }
--------------------------------------------------------------------------------
/src/snowcatv2/odometry.h:
--------------------------------------------------------------------------------
1 | #ifndef ODOMETRY_H
2 | #define ODOMETRY_H
3 |
4 | #include "ros.h"
5 | #include
6 | #include
7 | #include
8 |
9 | class Odometry{
10 | public:
11 | explicit Odometry(ros::NodeHandle *nh);
12 | void odomPub();
13 |
14 | private:
15 | ros::NodeHandle *nh;
16 | geometry_msgs::TransformStamped *t;
17 | tf::TransformBroadcaster *broadcaster;
18 | Encoder *left_wheel;
19 | Encoder *right_wheel;
20 | long left_position;
21 | long right_position;
22 | };
23 |
24 | #endif //ODOMETRY_H
25 |
--------------------------------------------------------------------------------
/src/snowcatv2/snowcatv2.ino:
--------------------------------------------------------------------------------
1 | /**********************************************************************
2 | * Snowcat Code (Teensy 3.5)
3 | * @file snowcatv2.ino
4 | * @author: Carl Moser
5 | * @email: carl.moser@students.olin.edu
6 | * @version: 1.0
7 | *
8 | * SnowCat code updated to use the OAK architecture
9 | * This is the first "full" test of OAK
10 | *
11 | **********************************************************************/
12 |
13 | // Include Libraries
14 | #include
15 | #include
16 | #include
17 | #include
18 | #include "motion.h"
19 | #include "odometry.h"
20 |
21 | // Motion
22 | #define RC_SERIAL Serial1
23 | #define RC_BAUD 38400
24 | #define RC_ADDRESS 0x80
25 |
26 | // TOF array
27 | #define NUM_SENSOR 3
28 | #define LEFT_TOF 1
29 | #define CENTER_TOF 2
30 | #define RIGHT_TOF 3
31 |
32 | // Plow
33 | #define PLOW_SERVO_PIN 4
34 |
35 | // Lights
36 | #define LIGHT_PIN 5
37 |
38 | // OAK
39 | OAKServo *plow;
40 | OAKSoftSwitch *light;
41 | OAKVL53 *tof_array[NUM_SENSOR];
42 | const String names[] = {"Left", "Center", "Right"};
43 | const byte pins[] = {LEFT_TOF, CENTER_TOF, RIGHT_TOF};
44 | const byte addresses[] = {0x31,0x30,0x29};
45 |
46 | // Variables
47 | ros::NodeHandle nh;
48 | Motion *motion;
49 |
50 |
51 | void setup() {
52 | nh.initNode();
53 |
54 | /*******************************************
55 | *
56 | * Setting up the TOF array
57 | *
58 | *******************************************/
59 | for(int i = 0; i < NUM_SENSOR; i++){
60 | pinMode(pins[i], OUTPUT);
61 | digitalWrite(pins[i], LOW);
62 | }
63 | delay(15); // Give the TOF sensors time to reset
64 | for(int i = 0; i < NUM_SENSOR; i++){
65 | digitalWrite(pins[i], HIGH);
66 | tof_array[i] = new OAKVL53(&nh, names[i].c_str(), 50, addresses[i]);
67 | }
68 |
69 | /*******************************************
70 | *
71 | * Setting up other OAK stuff
72 | *
73 | *******************************************/
74 | plow = new OAKServo(&nh, "/plow", PLOW_SERVO_PIN);
75 | light = new OAKSoftSwitch(&nh, "/light", LIGHT_PIN);
76 | motion = new Motion(&nh, &RC_SERIAL, RC_BAUD, RC_ADDRESS);
77 | }
78 |
79 | void loop() {
80 | nh.spinOnce();
81 | for(int i = 0; i < NUM_SENSOR; i++){
82 | tof_array[i]->publish();
83 | }
84 | }
85 |
86 |
--------------------------------------------------------------------------------
/udev_rules/49-teensy.rules:
--------------------------------------------------------------------------------
1 | # UDEV Rules for Teensy boards, http://www.pjrc.com/teensy/
2 | #
3 | # The latest version of this file may be found at:
4 | # http://www.pjrc.com/teensy/49-teensy.rules
5 | #
6 | # This file must be placed at:
7 | #
8 | # /etc/udev/rules.d/49-teensy.rules (preferred location)
9 | # or
10 | # /lib/udev/rules.d/49-teensy.rules (req'd on some broken systems)
11 | #
12 | # To install, type this command in a terminal:
13 | # sudo cp 49-teensy.rules /etc/udev/rules.d/49-teensy.rules
14 | #
15 | # After this file is installed, physically unplug and reconnect Teensy.
16 | #
17 | ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789B]?", ENV{ID_MM_DEVICE_IGNORE}="1" , SYMLINK+="teensy teensy_$attr{serial}"
18 | ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789A]?", ENV{MTP_NO_PROBE}="1" , SYMLINK+="teensy teensy_$attr{serial}"
19 | SUBSYSTEMS=="usb", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789ABCD]?", MODE:="0666", SYMLINK+="teensy teensy_$attr{serial}"
20 | KERNEL=="ttyACM*", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789B]?", MODE:="0666", SYMLINK+="teensy teensy_$attr{serial}"
21 | #
22 | # If you share your linux system with other users, or just don't like the
23 | # idea of write permission for everybody, you can replace MODE:="0666" with
24 | # OWNER:="yourusername" to create the device owned by you, or with
25 | # GROUP:="somegroupname" and mange access using standard unix groups.
26 | #
27 | #
28 | # If using USB Serial you get a new device each time (Ubuntu 9.10)
29 | # eg: /dev/ttyACM0, ttyACM1, ttyACM2, ttyACM3, ttyACM4, etc
30 | # apt-get remove --purge modemmanager (reboot may be necessary)
31 | #
32 | # Older modem proding (eg, Ubuntu 9.04) caused very slow serial device detection.
33 | # To fix, add this near top of /lib/udev/rules.d/77-nm-probe-modem-capabilities.rules
34 | # SUBSYSTEMS=="usb", ATTRS{idVendor}=="16c0", ATTRS{idProduct}=="04[789]?", GOTO="nm_modem_probe_end"
35 | #
--------------------------------------------------------------------------------
/udev_rules/99-piksi.rules:
--------------------------------------------------------------------------------
1 | ATTRS{idProduct}=="6014", ATTRS{idVendor}=="0403", MODE="666", SYMLINK+="piksi piksi_$attr{serial}", GROUP="dialout"
2 | ATTRS{idProduct}=="8398", ATTRS{idVendor}=="0403", MODE="666", SYMLINK+="piksi piksi_$attr{serial}", GROUP="dialout"
3 | ATTRS{idProduct}=="A4A7", ATTRS{idVendor}=="0525", MODE="666", SYMLINK+="piksi piksi_$attr{serial}", GROUP="dialout"
4 |
--------------------------------------------------------------------------------