├── .catkin_workspace
├── .gitignore
├── .gitmodules
├── CONTRIBUTING.md
├── README.md
├── config
├── README.md
├── dynamics
│ └── README.md
├── launch
│ ├── README.md
│ └── full_stack
│ │ ├── perception.launch
│ │ └── planning.launch
└── sensors
│ ├── extrinsics.example.json
│ └── intrinsics.example.json
├── docs
├── CLAP_Flow.jpg
└── index.md
└── src
├── cognition
├── README.md
├── object_locator
│ ├── CMakeLists.txt
│ ├── README.md
│ ├── nodes
│ │ └── nearest_locator
│ ├── package.xml
│ ├── setup.py
│ └── src
│ │ └── zzz_cognition_object_locator
│ │ ├── __init__.py
│ │ └── nearest_locator.py
├── prediction
│ └── .placeholder
├── protocol
│ ├── CMakeLists.txt
│ ├── msg
│ │ ├── JunctionMapState.msg
│ │ ├── LaneState.msg
│ │ ├── MapState.msg
│ │ ├── MultiLaneMapState.msg
│ │ └── RoadObstacle.msg
│ ├── package.xml
│ ├── setup.py
│ └── src
│ │ └── zzz_cognition_msgs
│ │ ├── __init__.py
│ │ └── utils.py
└── reference_path
│ ├── CMakeLists.txt
│ ├── nodes
│ └── put_buffer
│ ├── package.xml
│ ├── setup.py
│ └── src
│ └── zzz_cognition_reference_path
│ ├── __init__.py
│ └── path_buffer.py
├── control
├── README.md
├── latlon_controllers
│ ├── CMakeLists.txt
│ ├── nodes
│ │ └── pure_persuit
│ ├── package.xml
│ ├── setup.py
│ └── src
│ │ └── zzz_control_latlon_controllers
│ │ ├── __init__.py
│ │ ├── lateral.py
│ │ ├── longitudinal.py
│ │ └── main.py
└── protocol
│ ├── CMakeLists.txt
│ ├── msg
│ ├── AuxiliaryCommand.msg
│ └── ControlCommand.msg
│ └── package.xml
├── driver
├── README.md
├── actuators
│ └── dataspeed_adapter
│ │ ├── CMakeLists.txt
│ │ ├── README.md
│ │ ├── nodes
│ │ ├── convert_mkz_command
│ │ └── convert_mkz_report
│ │ ├── package.xml
│ │ ├── setup.py
│ │ └── src
│ │ └── zzz_driver_actuators_dataspeed_adapter
│ │ ├── __init__.py
│ │ └── mkz.py
├── datasets
│ ├── benchmark
│ │ ├── CMakeLists.txt
│ │ ├── REAMDE.md
│ │ ├── nodes
│ │ │ ├── decision
│ │ │ └── tracking
│ │ ├── package.xml
│ │ ├── setup.py
│ │ └── src
│ │ │ └── zzz_driver_datasets_benchmark
│ │ │ ├── __init__.py
│ │ │ ├── decision.py
│ │ │ ├── detection.py
│ │ │ ├── odometry.py
│ │ │ └── tracking.py
│ └── kitti
│ │ ├── CMakeLists.txt
│ │ ├── LICENSE.md
│ │ ├── README.md
│ │ ├── TODO.md
│ │ ├── package.xml
│ │ ├── scripts
│ │ └── convert_bag
│ │ ├── setup.py
│ │ └── src
│ │ └── zzz_driver_datasets_kitti
│ │ ├── __init__.py
│ │ ├── enums.py
│ │ ├── odometry.py
│ │ ├── raw.py
│ │ └── utils.py
├── platforms
│ ├── autoware_adapter
│ │ └── .placeholder
│ └── mcity_adapter
│ │ ├── CMakeLists.txt
│ │ ├── msg
│ │ ├── BSM.msg
│ │ ├── BSMVec.msg
│ │ ├── Control.msg
│ │ ├── SPaT.msg
│ │ ├── SPaTVec.msg
│ │ └── VehicleState.msg
│ │ ├── nodes
│ │ └── convert_topic
│ │ └── package.xml
├── protocol
│ ├── CMakeLists.txt
│ ├── msg
│ │ ├── AuxiliaryReport.msg
│ │ ├── ChassisReport.msg
│ │ ├── ControlReport.msg
│ │ ├── ResourceReport.msg
│ │ ├── RigidBodyState.msg
│ │ └── RigidBodyStateStamped.msg
│ ├── package.xml
│ ├── setup.py
│ └── src
│ │ └── zzz_driver_msgs
│ │ ├── __init__.py
│ │ └── utils.py
├── sensors
│ ├── astuff_adapter
│ │ ├── CMakeLists.txt
│ │ ├── nodes
│ │ │ └── convert_topic
│ │ └── package.xml
│ └── velodyne
│ │ └── .placeholder
└── simulators
│ ├── carla
│ └── carla_adapter
│ │ ├── CMakeLists.txt
│ │ ├── config
│ │ ├── static_sensors.example.json
│ │ └── vehicle_sensors.example.json
│ │ ├── launch
│ │ ├── server.launch
│ │ └── server.launch.yaml
│ │ ├── nodes
│ │ └── convert_topic
│ │ ├── package.xml
│ │ └── scripts
│ │ ├── server.tmuxp.yaml
│ │ ├── spawn_agent
│ │ ├── use_bridge
│ │ ├── auxiliary.launch
│ │ └── main.launch
│ │ └── use_srunner
│ │ ├── ZZZAgent.py
│ │ ├── main.launch
│ │ └── start.sh
│ └── lgsvl
│ └── README.md
├── library
├── CMakeLists.txt
├── README.md
├── msgs
│ └── .placeholder
├── package.xml
├── setup.py
├── src
│ └── zzz_common
│ │ ├── __init__.py
│ │ ├── dynamic_models.py
│ │ ├── geometry.py
│ │ ├── kinematics.py
│ │ ├── logger.py
│ │ ├── msg
│ │ ├── __init__.py
│ │ └── utils.py
│ │ └── params.py
└── test
│ └── test_geometry.py
├── monitor
└── README.md
├── navigation
├── map_provider
│ └── sumo
│ │ ├── CMakeLists.txt
│ │ ├── README.md
│ │ ├── nodes
│ │ └── local_map_generator
│ │ ├── package.xml
│ │ ├── setup.py
│ │ └── src
│ │ └── zzz_navigation_map_provider_sumo
│ │ ├── __init__.py
│ │ └── local_map.py
├── pose_reporter
│ ├── CMakeLists.txt
│ ├── README.md
│ ├── nodes
│ │ └── manual
│ ├── package.xml
│ └── srv
│ │ └── UpdateOrigin.srv
└── protocol
│ ├── CMakeLists.txt
│ ├── msg
│ ├── Lane.msg
│ ├── LaneBoundary.msg
│ ├── LanePoint.msg
│ ├── LaneSituation.msg
│ ├── Map.msg
│ └── MapString.msg
│ ├── package.xml
│ ├── setup.py
│ └── src
│ └── zzz_navigation_msgs
│ ├── __init__.py
│ └── utils.py
├── perception
├── README.md
├── detection
│ ├── camera_detectors
│ │ ├── CMakeLists.txt
│ │ ├── README.md
│ │ ├── launch
│ │ │ └── example.launch
│ │ ├── nodes
│ │ │ ├── traffic_light_detector
│ │ │ └── yolo
│ │ ├── package.xml
│ │ ├── setup.py
│ │ └── src
│ │ │ └── zzz_perception_detection_camera_detectors
│ │ │ └── __init__.py
│ ├── camera_filters
│ │ ├── CMakeLists.txt
│ │ ├── nodes
│ │ │ ├── ipm_projector
│ │ │ └── pseudo_lidar_generator
│ │ ├── package.xml
│ │ ├── setup.py
│ │ └── src
│ │ │ └── zzz_perception_detection_camera_filters
│ │ │ ├── __init__.py
│ │ │ └── ipm.py
│ ├── fused_detectors
│ │ ├── CMakeLists.txt
│ │ ├── nodes
│ │ │ └── frustum
│ │ ├── package.xml
│ │ └── setup.py
│ ├── lidar_detectors
│ │ ├── CMakeLists.txt
│ │ ├── README.md
│ │ ├── include
│ │ │ └── zzz_perception_detection_lidar_detectors
│ │ │ │ ├── EuclideanCluster2dDetector.h
│ │ │ │ ├── EuclideanClusterDetector.h
│ │ │ │ └── LidarDetector.h
│ │ ├── nodes
│ │ │ └── euclidean_cluster_detector.cpp
│ │ ├── package.xml
│ │ └── src
│ │ │ ├── EuclideanClusterDetector.cpp
│ │ │ └── LidarDetector.cpp
│ └── lidar_filters
│ │ ├── CMakeLists.txt
│ │ ├── README.md
│ │ ├── nodes
│ │ ├── extrinsic
│ │ └── lshape
│ │ ├── package.xml
│ │ ├── setup.py
│ │ └── src
│ │ └── zzz_perception_detection_lidar_filters
│ │ ├── __init__.py
│ │ ├── estimators.py
│ │ └── transformers.py
├── localization
│ ├── README.md
│ └── imu_wheel_odom
│ │ ├── CMakeLists.txt
│ │ ├── README.md
│ │ ├── nodes
│ │ └── locator
│ │ ├── package.xml
│ │ ├── setup.py
│ │ └── src
│ │ └── zzz_perception_localization_imu_wheel_odom
│ │ └── __init__.py
├── protocol
│ ├── CMakeLists.txt
│ ├── msg
│ │ ├── BoundingBox.msg
│ │ ├── BoundingBox2D.msg
│ │ ├── DetectionBox.msg
│ │ ├── DetectionBox2D.msg
│ │ ├── DetectionBox2DArray.msg
│ │ ├── DetectionBoxArray.msg
│ │ ├── Dimension2DWithCovariance.msg
│ │ ├── DimensionWithCovariance.msg
│ │ ├── LaneDetection.msg
│ │ ├── LaneDetectionArray.msg
│ │ ├── ObjectClass.msg
│ │ ├── ObjectSignals.msg
│ │ ├── Pose2DWithCovariance.msg
│ │ ├── TrackingBox.msg
│ │ └── TrackingBoxArray.msg
│ └── package.xml
└── tracking
│ ├── object_filters
│ ├── CMakeLists.txt
│ ├── README.md
│ ├── nodes
│ │ ├── criteria
│ │ └── to_static
│ ├── package.xml
│ ├── setup.py
│ └── src
│ │ └── zzz_perception_tracking_object_filters
│ │ ├── __init__.py
│ │ ├── selectors.py
│ │ └── transformers.py
│ └── object_trackers
│ ├── CMakeLists.txt
│ ├── nodes
│ └── box_tracker
│ ├── package.xml
│ ├── setup.py
│ └── src
│ └── zzz_perception_tracking_object_trackers
│ ├── __init__.py
│ ├── associate.py
│ ├── filters.py
│ └── trackers.py
├── planning
├── README.md
├── decision
│ ├── lane_models
│ │ ├── CMakeLists.txt
│ │ ├── README.md
│ │ ├── nodes
│ │ │ ├── idm_lane_utility
│ │ │ └── rls_train
│ │ ├── package.xml
│ │ ├── setup.py
│ │ └── src
│ │ │ └── zzz_planning_decision_lane_models
│ │ │ ├── __init__.py
│ │ │ ├── lateral.py
│ │ │ ├── learning.py
│ │ │ ├── longitudinal.py
│ │ │ └── main.py
│ └── safeguard
│ │ ├── CMakeLists.txt
│ │ ├── nodes
│ │ └── reachable_set
│ │ ├── package.xml
│ │ ├── setup.py
│ │ └── src
│ │ └── zzz_planning_decision_safeguard
│ │ ├── __init__.py
│ │ └── reachable_set.py
├── protocol
│ ├── CMakeLists.txt
│ ├── msg
│ │ └── DecisionTrajectory.msg
│ └── package.xml
├── routing
│ └── .placeholder
└── trajectory
│ └── .placeholder
├── tools
├── README.md
├── command_calibration.py
└── select_submsg.py
└── visualization
├── README.md
├── rviz
└── box_visualizer
│ ├── CMakeLists.txt
│ ├── README.md
│ ├── nodes
│ ├── detection
│ ├── detection2d
│ └── tracking
│ ├── package.xml
│ ├── setup.py
│ └── src
│ └── zzz_visualization_rviz_box_visualizer
│ ├── __init__.py
│ ├── detection.py
│ ├── detection2d.py
│ ├── tracking.py
│ └── utils.py
└── web
├── dash
├── CMakeLists.txt
├── README.md
├── nodes
│ └── dashboard
├── package.xml
├── setup.py
└── src
│ └── zzz_visualization_web_dash
│ └── __init__.py
└── xviz
└── .place_holder
/.catkin_workspace:
--------------------------------------------------------------------------------
1 | # This file currently only serves to mark the location of a catkin workspace for tool integration
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # ROS stuff
2 | devel/
3 | logs/
4 | build/
5 | bin/
6 | lib/
7 | install/
8 | msg_gen/
9 | srv_gen/
10 | msg/*Action.msg
11 | msg/*ActionFeedback.msg
12 | msg/*ActionGoal.msg
13 | msg/*ActionResult.msg
14 | msg/*Feedback.msg
15 | msg/*Goal.msg
16 | msg/*Result.msg
17 | msg/_*.py
18 | build_isolated/
19 | devel_isolated/
20 |
21 | # Generated by dynamic reconfigure
22 | *.cfgc
23 | /cfg/cpp/
24 | /cfg/*.py
25 |
26 | # Ignore generated docs
27 | *.dox
28 | *.wikidoc
29 |
30 | # eclipse stuff
31 | .project
32 | .cproject
33 |
34 | # qcreator stuff
35 | CMakeLists.txt.user
36 |
37 | srv/_*.py
38 | *.pcd
39 | *.pyc
40 | qtcreator-*
41 | *.user
42 |
43 | /planning/cfg
44 | /planning/docs
45 | /planning/src
46 |
47 | *~
48 |
49 | # Emacs
50 | .#*
51 |
52 | # Catkin custom files
53 | CATKIN_IGNORE
54 |
55 | # Integration
56 | .vscode/
57 |
58 | # Reference files
59 | ref/
60 |
61 | # Recorded rosbags
62 | *.bag
63 |
64 | # Catkin generated
65 | src/CMakeLists.txt
66 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "src/perception/detection/camera_detectors/src/yolov3"]
2 | path = src/perception/detection/camera_detectors/src/yolov3
3 | url = https://github.com/cmpute/zzz_yolov3.git
4 | [submodule "src/perception/detection/camera_filters/src/pseudo_lidar"]
5 | path = src/perception/detection/camera_filters/src/pseudo_lidar
6 | url = https://github.com/minghanz/pseudo_lidar.git
7 | [submodule "src/tools/openai_baselines"]
8 | path = src/tools/openai_baselines
9 | url = https://github.com/cmpute/baselines.git
10 | [submodule "src/perception/detection/fused_detectors/src/frustum_pointnets"]
11 | path = src/perception/detection/fused_detectors/src/frustum_pointnets
12 | url = https://github.com/minghanz/frustum-pointnets.git
13 | [submodule "src/driver/simulators/carla/carla_bridge"]
14 | path = src/driver/simulators/carla/carla_bridge
15 | url = https://github.com/carla-simulator/ros-bridge.git
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Root structure
2 | - `/docs` folder contains the overall documentation
3 | - `/ref` folder is for importing external projects. This folder is ignored from git.
4 | - ROS generated folders `\devel`, `\build`, `\install` are also ignored from git.
5 |
6 | # Package structure
7 | A package contains several essential folders:
8 |
9 | - **cfg**: dynamically reconfigurable parameters
10 | - **include**: code for headers (*.hpp, *.pyd, *.pyi)
11 | - **src**: code for implementations (*.cpp, *.py, *.pyx)
12 | - **msg**: message definitions (*.msg)
13 | - **action**: action definitions (*.action)
14 | - **srv**: service definitions (*.srv)
15 | - **launch**: launch files (*.launch)
16 | - **test**: unit test files
17 | - **nodes**: code for executable node (*.c, *.cpp, *.py, *.pyx)
18 | - **scripts**: code for executable sources (*.py, *.pyx)
19 |
20 | # ROS dependency separation
21 | In order for easier use, we will try to offer non-ROS mode. The existence of environment variable `ZZZ_ROS` determines whether the code is built upon ROS or not. To achieve this flexity, each code should have several principles:
22 | - Majority of the functional code should be wrapped by a ROS-independent function
23 | - A separate file provide ros wrapper for the code to work as a node
24 | - The main implementation (includes python binding) is put into `src` and the wrapper codes should be included in sources under `nodes`
25 |
26 | # Readme Contents
27 | - For message definition packages: Usually related comments are contained in `.msg` files and no readme is needed.
28 | - For algorithm implementation packages: One needs `Module IO`(input topic, output topic, parameters), `Reference`, etc.
29 |
30 | # Protocol strategy
31 | Protocal is defined in its source module. Protocal are defined based on [common_msgs](https://github.com/ros/common_msgs) of ROS and zzz protocals. Please try to prevent depend external dependencies. The protocal should contain as much information as needed. If certain module doesn't need or provide part of the data, then it should directly ignore it.
32 |
33 | # Coordinate system
34 | Global / Geographic: UTM WGS-84
35 | Local / Ground: East-North-Up (ENU)
36 | Body-fixed: Forward-Left-Up (FLU), origin at rear axle center
37 |
38 | # Frames definition
39 | Each sensor has an attached frame, and the extrinsic calibration result should be published as tf.TransformStamped between the sensors. Then the frame attached at geometry center of ego vehicle is defined as `base_link`. The `odom` frame has a coordinate originated at the start point of the vehicle (both position and orientation is fixed to), and the `map` frame has a assigned origin and the axis should be aligned with the ground coordinate (ENU).
40 |
41 | So the transform between sensors and between `map` and `odom` are almost static, while the transform between `odom` and `base_link` are frequently updated with `nav_msgs/Odometry` messages from perception module. Transform between `map` and `odom` will being compensated by a perception or navigation module regularly. If needed, the sensor transform could also be adjusted online regularly.
42 |
43 | # Namespaces
44 |
45 | All node name, topic name, param name and frame name should be relative by default (that means the name is not a absolute name with specified namespace). All the namespace should be managed by roslaunch file.
46 |
47 | # Other principles
48 | - Lane operations should be performed in s-coordinate (one-dimensional coordinate along a curve)
49 |
50 | # References
51 | - [ROS CMakeLists.txt documentation](http://wiki.ros.org/catkin/CMakeLists.txt)
52 | - [How to structure a Python-based ROS package](http://www.artificialhumancompanions.com/structure-python-based-ros-package/)
53 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ZZZ: Cloud Learning compatible Autonomous driving Platform (CLAP)
2 |
3 | [](https://github.com/carla-simulator/carla)
4 | [](https://www.ros.org/about-ros/)
5 |
6 | [](docs/CLAP_Flow.jpg)
7 |
8 | ## Table of Contents
9 |
10 | - [Background](#background)
11 | - [Usage](#usage)
12 | - [Running with CARLA](#CARLA)
13 | - [Running on real vehicles](#AV)
14 | - [Contributing](#contributing)
15 | - [Citation](#citation)
16 | - [Contact](#contact)
17 |
18 | ## Background
19 |
20 | This code is a research oriented full stack autonomous driving platform.
21 | It has been implemented on the real autonomous vechiels as well as CARLA simluator.
22 |
23 |
24 | ## Usage
25 |
26 | ### Running with CARLA: compatible and easily access
27 |
28 | > Please checkout the branch: dev/zhcao/master
29 |
30 | The goals for this repository are:
31 |
32 | 1. Easily setup the environment: mainly using python
33 | 2. Easily transfer to real vehicle driving: Sharing the same code environment with the real vehicle implementment
34 |
35 | ### Running on real vehicles: efficient and stable
36 |
37 | > Please checkout the branch: dev/xiaopengG3
38 |
39 | The goals for this repository are:
40 |
41 | 1. Same codes with the real vehicle driving
42 | 2. Efficient: Codes are written in C++ for efficiency.
43 |
44 | Example driving data can be downloaded from:
45 |
46 | ```bash
47 | https://pan.baidu.com/s/1846ZN8GzlhuyUD5FQEf_jw
48 | password:18xx
49 | Please reading the record.txt for detailed description.
50 | ```
51 |
52 | ## Contributing
53 |
54 | Feel free to dive in!
55 |
56 | Contributors main come from Tsinghua University and University of Michigan.
57 |
58 | - Contributors:
59 | Dr. Zhong Cao (Tsinghua), Yuanxin Zhong (Umich), Minghan Zhu (Umich), Weitao Zhou (Tsinghua)
60 | - Advisors:
61 | Pro. Diang Yang (Tsinghua), Prof. Huei Peng (Umich), Dr. Shaobing Xu (Umich)
62 |
63 | ## Citation
64 |
65 | If you use this codes, please cite our IV2020 paper:
66 |
67 | ```
68 | @inproceedings{zhong2020clap,
69 | title={CLAP: Cloud-and-Learning-compatible Autonomous driving Platform},
70 | author={Zhong, Yuanxin and Cao, Zhong and Zhu, Minghan and Wang, Xinpeng and Yang, Diange and Peng, Huei},
71 | booktitle={2020 IEEE Intelligent Vehicles Symposium (IV)},
72 | pages={1450--1456},
73 | organization={IEEE}
74 | }
75 | ```
76 |
77 | ## Contact
78 |
79 | Please contact us if you have any questions. Zhong Cao (Tsinghua University): caozhong@tsinghua.edu.cn, Yuanxin Zhong (University of Michigan): zyxin@umich.edu
80 |
--------------------------------------------------------------------------------
/config/README.md:
--------------------------------------------------------------------------------
1 | This folder contains parameters for sensors, controllers, vehicle model, etc. Parameters here should be collected before running the system
2 |
--------------------------------------------------------------------------------
/config/dynamics/README.md:
--------------------------------------------------------------------------------
1 | Here goes the vehicle kinematic and dynamic models configuration.
2 |
--------------------------------------------------------------------------------
/config/launch/README.md:
--------------------------------------------------------------------------------
1 | This directory contains useful launch files
--------------------------------------------------------------------------------
/config/launch/full_stack/planning.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
9 |
10 |
11 |
12 |
14 |
15 |
16 |
18 |
19 |
20 |
22 |
23 |
24 |
26 |
27 |
29 |
30 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/config/sensors/extrinsics.example.json:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/config/sensors/extrinsics.example.json
--------------------------------------------------------------------------------
/config/sensors/intrinsics.example.json:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/config/sensors/intrinsics.example.json
--------------------------------------------------------------------------------
/docs/CLAP_Flow.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/docs/CLAP_Flow.jpg
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/src/cognition/README.md:
--------------------------------------------------------------------------------
1 | This module contains packages for environment cognition.
2 |
--------------------------------------------------------------------------------
/src/cognition/object_locator/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_cognition_object_locator)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | nodes/nearest_locator
11 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
12 |
--------------------------------------------------------------------------------
/src/cognition/object_locator/README.md:
--------------------------------------------------------------------------------
1 | This package assign all road objects into lanes.
--------------------------------------------------------------------------------
/src/cognition/object_locator/nodes/nearest_locator:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import os
4 | import rospy
5 | import threading
6 |
7 | from zzz_common.params import parse_private_args
8 | from zzz_driver_msgs.msg import RigidBodyStateStamped
9 | from zzz_navigation_msgs.msg import Map
10 | from zzz_cognition_msgs.msg import MapState
11 | from zzz_perception_msgs.msg import TrafficLightDetectionArray, TrackingBoxArray
12 | from zzz_cognition_object_locator import NearestLocator
13 |
14 |
15 | class LocatorNode(object):
16 | def __init__(self):
17 |
18 | params = parse_private_args(
19 | objects_topic = "/zzz/perception/objects_tracked",
20 | map_input_topic="/zzz/navigation/local_static_map",
21 | map_output_topic="local_dynamic_map/map_with_objects",
22 | pose_topic = "/zzz/navigation/ego_pose",
23 | traffic_light_topic = "/zzz/perception/traffic_lights",
24 | )
25 |
26 | self._map_instance = NearestLocator()
27 |
28 | self._pose_subscriber = rospy.Subscriber(params.pose_topic, RigidBodyStateStamped, self.pose_callback)
29 | self._surrounding_vehicle_subscriber = rospy.Subscriber(params.objects_topic, TrackingBoxArray, self._map_instance.receive_object_list)
30 | self._static_map_subscriber = rospy.Subscriber(params.map_input_topic, Map, self._map_instance.receive_static_map)
31 | self._dynamic_map_publisher = rospy.Publisher(params.map_output_topic, MapState, queue_size=1)
32 | self._traffic_light_subscriber = rospy.Subscriber(params.traffic_light_topic, TrafficLightDetectionArray,
33 | self._map_instance.receive_traffic_light_detection)
34 |
35 | def pose_callback(self, msg):
36 | # TODO(zyxin): Change to a self looping node
37 | self._map_instance.receive_ego_state(msg)
38 | self._map_instance.update()
39 | self._dynamic_map_publisher.publish(self._map_instance.dynamic_map)
40 |
41 | if __name__ == "__main__":
42 |
43 | rospy.init_node("nearest_locator", log_level=rospy.DEBUG)
44 | node = LocatorNode()
45 | rospy.spin()
46 |
--------------------------------------------------------------------------------
/src/cognition/object_locator/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_cognition_object_locator
3 | 0.1.0
4 |
5 | This module provide functionality to locate objects onto lanes
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | std_msgs
17 | nav_msgs
18 | zzz_common
19 | zzz_driver_msgs
20 | zzz_cognition_msgs
21 | zzz_navigation_msgs
22 | zzz_perception_msgs
23 |
24 |
25 |
--------------------------------------------------------------------------------
/src/cognition/object_locator/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_cognition_object_locator'],
8 | package_dir={'': 'src'}
9 | )
10 |
11 | setup(**d)
--------------------------------------------------------------------------------
/src/cognition/object_locator/src/zzz_cognition_object_locator/__init__.py:
--------------------------------------------------------------------------------
1 | from .nearest_locator import NearestLocator
2 |
--------------------------------------------------------------------------------
/src/cognition/prediction/.placeholder:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/cognition/prediction/.placeholder
--------------------------------------------------------------------------------
/src/cognition/protocol/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_cognition_msgs)
3 |
4 | find_package(catkin REQUIRED genmsg std_msgs # essential dependency
5 | geometry_msgs zzz_navigation_msgs zzz_perception_msgs zzz_driver_msgs
6 | )
7 |
8 | catkin_python_setup()
9 |
10 | add_message_files(DIRECTORY msg FILES
11 | JunctionMapState.msg
12 | LaneState.msg
13 | MultiLaneMapState.msg
14 | MapState.msg
15 | RoadObstacle.msg
16 | )
17 |
18 | generate_messages(DEPENDENCIES std_msgs geometry_msgs zzz_perception_msgs zzz_navigation_msgs zzz_driver_msgs)
19 | catkin_package(CATKIN_DEPENDS std_msgs geometry_msgs zzz_perception_msgs zzz_navigation_msgs zzz_driver_msgs)
20 |
--------------------------------------------------------------------------------
/src/cognition/protocol/msg/JunctionMapState.msg:
--------------------------------------------------------------------------------
1 | # This message describe a free-space driving model
2 |
3 | # -------- Reference Path info ----------
4 | LaneState reference_path
5 |
6 | # -------- Other constaints ----------
7 | geometry_msgs/Polygon drivable_area
8 | RoadObstacle[] obstacles
9 |
--------------------------------------------------------------------------------
/src/cognition/protocol/msg/LaneState.msg:
--------------------------------------------------------------------------------
1 | # This message represent a state of lane in dynamic map
2 |
3 | # Static properties of the lane
4 | zzz_navigation_msgs/Lane map_lane
5 |
6 | # distance to lane end
7 | float32 stop_distance # = Inf
8 |
9 | # the front vehicles on this lane, sorted by their distances in ascending order
10 | RoadObstacle[] front_vehicles
11 |
12 | # the rear vehicles on this lane, sorted by their distances in ascending order
13 | RoadObstacle[] rear_vehicles
14 |
--------------------------------------------------------------------------------
/src/cognition/protocol/msg/MapState.msg:
--------------------------------------------------------------------------------
1 | # This message represents a homogeneous representation of dynamic map
2 |
3 | Header header
4 |
5 | # Ego Vehicle Info
6 | zzz_driver_msgs/RigidBodyState ego_state
7 |
8 | # ----- Map Model indicator -----
9 |
10 | # This field gives a suggested map model
11 | uint8 model
12 |
13 | # Junction map is given all the time, while multi-lane map
14 | # and other maps are given when the road is properly structured
15 | uint8 MODEL_JUNCTION_MAP = 0
16 | uint8 MODEL_MULTILANE_MAP = 1
17 |
18 | # Map State Instances
19 | JunctionMapState jmap
20 | MultiLaneMapState mmap
21 |
--------------------------------------------------------------------------------
/src/cognition/protocol/msg/MultiLaneMapState.msg:
--------------------------------------------------------------------------------
1 | # This message describes a multi-lane dynamic map model
2 |
3 |
4 | # distance before next junction
5 | float32 distance_to_junction
6 |
7 | # -------- Multi-lanes info --------
8 | # all lane info
9 | LaneState[] lanes
10 |
11 | # the lane that ego vehicle drives on #FIXME(frenet)
12 | int8 ego_lane_index
13 | float32 ego_mmap_x # always 0
14 | float32 ego_mmap_y
15 | float32 ego_mmap_vx
16 | float32 ego_mmap_vy
17 |
18 | # target lane index (0 refer to right-most lane in right-side driving situation)
19 | # TODO: Change to list
20 | int8 target_lane_index
21 |
--------------------------------------------------------------------------------
/src/cognition/protocol/msg/RoadObstacle.msg:
--------------------------------------------------------------------------------
1 | # This message contains all the information that planning needs for a road object
2 |
3 |
4 | # Unique indentity of the obstacle
5 | uint64 uid
6 |
7 | # The confidence of existence, can be used to determine whether this object is valid, or invalid
8 | # A possible way of get the confidence is by using tracking age
9 | float32 confidence
10 |
11 | # The best guess of obstacle type
12 | zzz_perception_msgs/ObjectClass cls
13 |
14 | # Estimated kinematic properties
15 | zzz_driver_msgs/RigidBodyState state
16 |
17 | bool static # A flag to mark whether the object is static
18 |
19 | # TODO: Add history trajectories
20 |
21 | # ----- Physical Boundary (Optional) -----
22 | uint8 shape_type
23 | uint8 SHAPE_UNDEFINED = 0
24 | uint8 SHAPE_POLYGON = 1
25 |
26 | # Representation of the object if it's represented by polygon
27 | geometry_msgs/Polygon shape
28 | # Null uncertainty of Inf uncertainty means that the shape is not actually generated
29 | float32[] shape_uncertainty
30 |
31 | # ----- High level behavior estimation -----
32 |
33 | uint8 behavior
34 | uint8 BEHAVIOR_UNKNOWN = 0
35 | uint8 BEHAVIOR_STOPPING = 1 # vehicle is going to stop completely or stay stopped
36 | uint8 BEHAVIOR_FOLLOW = 2 # follow lane or straight line
37 | uint8 BEHAVIOR_MOVING_LEFT = 3 # changing to its left lane or branch left
38 | uint8 BEHAVIOR_MOVING_RIGHT = 4 # changing to its right lane or branch right
39 |
40 | # This field is preserved to adapt to yield situation.
41 | # Yield situation includes left turn, right turn, sequencial stop sign, emergency vehicle, etc.
42 | # Yield means if your path conflict with the object's, you should wait for the object
43 | # to go through the conflict point.
44 | uint8 priority
45 | uint8 PRIORITY_UNKNOWN = 0
46 | uint8 PRIORITY_NORMAL = 1
47 | uint8 PRIORITY_CAUTIOUS = 2 # You can move when this obstacle is far away
48 | uint8 PRIORITY_STOP = 3 # Should let this vehicle to go and then you can move
49 |
50 | # -------Multilane Coordinate (Translator) Frenet Formulas----- TODO
51 |
52 | float32 mmap_x
53 | float32 mmap_y
54 | float32 mmap_vx
55 | float32 mmap_vy
56 |
--------------------------------------------------------------------------------
/src/cognition/protocol/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_cognition_msgs
3 | 0.1.0
4 |
5 | Unified interface message definitions for ZZZ cognition modules
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | message_generation
17 | std_msgs
18 | geometry_msgs
19 | zzz_navigation_msgs
20 | zzz_driver_msgs
21 | zzz_perception_msgs
22 | message_runtime
23 | std_msgs
24 | geometry_msgs
25 | zzz_navigation_msgs
26 | zzz_driver_msgs
27 | zzz_perception_msgs
28 |
29 |
--------------------------------------------------------------------------------
/src/cognition/protocol/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_cognition_msgs'],
8 | package_dir={'': 'src'}
9 | )
10 |
11 | setup(**d)
12 |
--------------------------------------------------------------------------------
/src/cognition/protocol/src/zzz_cognition_msgs/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/cognition/protocol/src/zzz_cognition_msgs/__init__.py
--------------------------------------------------------------------------------
/src/cognition/protocol/src/zzz_cognition_msgs/utils.py:
--------------------------------------------------------------------------------
1 | from zzz_common.kinematics import get_absolute_state
2 | from zzz_driver_msgs.msg import RigidBodyStateStamped
3 | from zzz_cognition_msgs.msg import LaneState, MapState, RoadObstacle, JunctionMapState
4 | from zzz_perception_msgs.msg import TrackingBoxArray, ObjectClass
5 |
6 | def default_msg(msg_type):
7 | '''
8 | Setting default values for the messages
9 | '''
10 | if msg_type == LaneState:
11 | msg = LaneState()
12 | msg.stop_distance = float('inf')
13 | elif msg_type == MapState:
14 | msg = MapState()
15 | elif msg_type == RoadObstacle:
16 | msg = RoadObstacle()
17 | elif msg_type == JunctionMapState:
18 | msg = JunctionMapState()
19 |
20 | return msg
21 |
22 | def convert_tracking_box(array, pose):
23 | '''
24 | Convert tracking box into RoadObstacle. Pose should be RigidBodyStateStamped which is got from /zzz/navigation/ego_pose
25 | '''
26 | assert type(array) == TrackingBoxArray
27 | assert type(pose) == RigidBodyStateStamped
28 |
29 | obstacles = []
30 |
31 | for obj in array.targets:
32 | if array.header.frame_id != pose.header.frame_id:
33 | trackpose = RigidBodyStateStamped()
34 | trackpose.header = array.header
35 | trackpose.state.pose = obj.bbox.pose
36 | trackpose.state.twist = obj.twist
37 | trackpose.state.accel = obj.accel
38 | abspose = get_absolute_state(trackpose, pose)
39 |
40 | assert abspose.header.frame_id == 'map'
41 | else:
42 | abspose = RigidBodyStateStamped()
43 | abspose.header = array.header
44 | abspose.state.pose = obj.bbox.pose
45 | abspose.state.twist = obj.twist
46 | abspose.state.accel = obj.accel
47 |
48 | obstacle = RoadObstacle()
49 | obstacle.uid = obj.uid
50 | obstacle.state = abspose.state
51 | if len(obj.classes) > 0:
52 | obstacle.cls = obj.classes[0]
53 | else:
54 | obstacle.cls.classid = ObjectClass.UNKNOWN
55 | obstacle.cls.score = 1
56 | # TODO: Convert obstacle shape
57 |
58 | obstacles.append(obstacle)
59 |
60 | return obstacles
61 |
--------------------------------------------------------------------------------
/src/cognition/reference_path/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_cognition_reference_path)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS nodes/put_buffer
10 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
11 |
--------------------------------------------------------------------------------
/src/cognition/reference_path/nodes/put_buffer:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | __module_name__ = "map_provider"
4 |
5 | import os
6 | import rospy
7 | import threading
8 | from zzz_common.params import parse_private_args
9 |
10 | from zzz_driver_msgs.msg import RigidBodyStateStamped
11 | from zzz_cognition_msgs.msg import MapState
12 | from zzz_cognition_reference_path import PathBuffer
13 | from nav_msgs.msg import Path
14 |
15 | class Node(object):
16 | def __init__(self,):
17 | params = parse_private_args(
18 | map_file = "",
19 | map_input_topic = "local_dynamic_map/map_with_objects",
20 | map_output_topic = "local_dynamic_map/map_with_ref",
21 | map_file_type = "opendrive",
22 | pose_topic = "/zzz/navigation/ego_pose",
23 | reference_path_topic = "/zzz/navigation/reference_path"
24 | )
25 |
26 | self._map_trigger = threading.Event()
27 | self._reference_path_trigger = threading.Event()
28 |
29 | self._map_instance = PathBuffer()
30 |
31 | self._input_map_subscriber = rospy.Subscriber(params.map_input_topic, MapState, self.map_callback)
32 | self._pose_subscriber = rospy.Subscriber(params.pose_topic, RigidBodyStateStamped, self.pose_callback)
33 | self._reference_path_subscriber = rospy.Subscriber(params.reference_path_topic, Path, self.reference_callback)
34 | self._output_map_publisher = rospy.Publisher(params.map_output_topic, MapState, queue_size=1)
35 |
36 | def map_callback(self, msg):
37 | self._map_instance.receive_static_map(msg)
38 | self._map_trigger.set()
39 |
40 | def reference_callback(self, msg):
41 | self._map_trigger.wait()
42 | self._map_instance.receive_reference_path(msg)
43 | self._reference_path_trigger.set()
44 | rospy.loginfo("[%s] Received reference path" % __module_name__)
45 |
46 | def pose_callback(self, msg):
47 | # Note: Here we actually assume that pose is updating at highest frequency
48 | self._map_trigger.wait()
49 | self._reference_path_trigger.wait()
50 |
51 | new_static_map = self._map_instance.receive_ego_state(msg)
52 | rospy.logdebug("Path length: %d", len(new_static_map.jmap.reference_path.map_lane.central_path_points))
53 | self._output_map_publisher.publish(new_static_map)
54 |
55 | if __name__ == "__main__":
56 |
57 | rospy.init_node("put_buffer", log_level=rospy.DEBUG)
58 | node = Node()
59 | rospy.spin()
60 |
--------------------------------------------------------------------------------
/src/cognition/reference_path/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_cognition_reference_path
3 | 0.1.0
4 |
5 | Get dynamic local map
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | std_msgs
18 | roscpp
19 | std_msgs
20 | roscpp
21 | std_msgs
22 | nav_msgs
23 | zzz_common
24 |
25 |
26 |
--------------------------------------------------------------------------------
/src/cognition/reference_path/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_cognition_reference_path'],
8 | package_dir={'': 'src'}
9 | )
10 |
11 | setup(**d)
--------------------------------------------------------------------------------
/src/cognition/reference_path/src/zzz_cognition_reference_path/__init__.py:
--------------------------------------------------------------------------------
1 | from .path_buffer import PathBuffer
2 |
--------------------------------------------------------------------------------
/src/control/README.md:
--------------------------------------------------------------------------------
1 | This module contains essential vehicle controllers.
2 |
--------------------------------------------------------------------------------
/src/control/latlon_controllers/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_control_latlon_controllers)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | nodes/pure_persuit
11 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
12 |
--------------------------------------------------------------------------------
/src/control/latlon_controllers/nodes/pure_persuit:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 |
5 | from zzz_common.params import parse_private_args
6 | from zzz_driver_msgs.msg import RigidBodyStateStamped
7 | from zzz_planning_msgs.msg import DecisionTrajectory
8 | from zzz_control_msgs.msg import ControlCommand
9 | from zzz_control_latlon_controllers import MainController
10 |
11 | import threading
12 |
13 | class ControllerNode(object):
14 | def __init__(self):
15 | params = parse_private_args(
16 | control_topic="command",
17 | pose_topic="/zzz/navigation/ego_pose",
18 | trajectory_topic="/zzz/planning/safeguard_trajectory"
19 | )
20 |
21 | self._pose_subscriber = rospy.Subscriber(params.pose_topic, RigidBodyStateStamped, self.pose_callback)
22 | self._decision_subscriber = rospy.Subscriber(params.trajectory_topic, DecisionTrajectory, self.decision_callback)
23 | self._command_publisher = rospy.Publisher(params.control_topic, ControlCommand, queue_size=1)
24 |
25 | self._control_instance = MainController()
26 |
27 | def pose_callback(self, msg):
28 | # TODO: change to spin model
29 | control_msg = self._control_instance.run_step(msg)
30 | self._command_publisher.publish(control_msg)
31 |
32 | def decision_callback(self, msg):
33 | self._control_instance.update_decision(msg)
34 |
35 | if __name__ == "__main__":
36 | rospy.init_node("latlon_controller", log_level=rospy.DEBUG)
37 | node = ControllerNode()
38 | rospy.spin()
39 |
--------------------------------------------------------------------------------
/src/control/latlon_controllers/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_control_latlon_controllers
3 | 0.1.0
4 |
5 | Controllers based on latitudinal(steer) and longitudinal(throttle) behavior
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 |
17 |
18 |
--------------------------------------------------------------------------------
/src/control/latlon_controllers/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_control_latlon_controllers'],
8 | package_dir={'': 'src'}
9 | )
10 |
11 | setup(**d)
--------------------------------------------------------------------------------
/src/control/latlon_controllers/src/zzz_control_latlon_controllers/__init__.py:
--------------------------------------------------------------------------------
1 | from .main import MainController
2 |
--------------------------------------------------------------------------------
/src/control/latlon_controllers/src/zzz_control_latlon_controllers/longitudinal.py:
--------------------------------------------------------------------------------
1 | from collections import deque
2 | import numpy as np
3 |
4 | class PIDLongitudinalController():
5 | """
6 | PIDLongitudinalController implements longitudinal control using a PID.
7 | """
8 |
9 | def __init__(self):
10 | """
11 | vehicle: actor to apply to local planner logic onto
12 | K_P: Proportional term
13 | K_D: Differential term
14 | K_I: Integral term
15 | dt: time differential in seconds
16 | """
17 | self._K_P = 0.25/3.6
18 | self._K_D = 0.01
19 | self._K_I = 0.012
20 | self._dt = 0.05 # TODO: timestep
21 | self._integ = 0.0
22 | self._e_buffer = deque(maxlen=30)
23 |
24 | def run_step(self, target_speed, current_speed, debug=False):
25 | """
26 | Execute one step of longitudinal control to reach a given target speed.
27 |
28 | target_speed: target speed in Km/h
29 | return: throttle control in the range [0, 1]
30 | """
31 |
32 | return self._pid_control(target_speed, current_speed)
33 |
34 | def _pid_control(self, target_speed, current_speed):
35 | """
36 | Estimate the throttle of the vehicle based on the PID equations
37 |
38 | :param target_speed: target speed in Km/h
39 | :param current_speed: current speed of the vehicle in Km/h
40 | :return: throttle control in the range [0, 1]
41 | """
42 |
43 | target_speed = target_speed*3.6
44 | current_speed = current_speed*3.6
45 |
46 | _e = (target_speed - current_speed)
47 | self._integ += _e * self._dt
48 | self._e_buffer.append(_e)
49 |
50 | if current_speed < 2:
51 | self._integ = 0
52 |
53 | if len(self._e_buffer) >= 2:
54 | _de = (self._e_buffer[-1] - self._e_buffer[-2]) / self._dt
55 | _ie = self._integ
56 |
57 | else:
58 | _de = 0.0
59 | _ie = 0.0
60 |
61 | kp = self._K_P
62 | ki = self._K_I
63 | kd = self._K_D
64 |
65 | if target_speed < 5:
66 | ki = 0
67 | kd = 0
68 |
69 | calculate_value = np.clip((kp * _e) + (kd * _de) + (ki * _ie), -1.0, 1.0)
70 | return calculate_value
71 |
--------------------------------------------------------------------------------
/src/control/latlon_controllers/src/zzz_control_latlon_controllers/main.py:
--------------------------------------------------------------------------------
1 |
2 | import rospy
3 | import tf
4 | import numpy as np
5 |
6 | from zzz_driver_msgs.utils import get_speed
7 | from zzz_control_msgs.msg import ControlCommand
8 | from zzz_control_latlon_controllers.lateral import PurePersuitLateralController
9 | from zzz_control_latlon_controllers.longitudinal import PIDLongitudinalController
10 |
11 | class MainController():
12 | """
13 | PurePersuitController is the combination of a PID controller for longitudinal control and a pure persuilt controller for lateral control
14 | to perform the low level control a vehicle from client side
15 | """
16 |
17 | def __init__(self, lon_controller=None, lat_controller=None):
18 | self._lon_controller = lon_controller if lon_controller is not None else PIDLongitudinalController()
19 | self._lat_controller = lat_controller if lat_controller is not None else PurePersuitLateralController()
20 | self.ego_state = None
21 | self.desired_trajectory = None
22 | self.desired_speed = 30.0
23 |
24 | def update_decision(self, decision):
25 | self.desired_trajectory = decision.trajectory
26 | self.desired_speed = decision.desired_speed
27 |
28 | def ready_for_control(self, short_distance_thres = 5):
29 | if self.desired_trajectory is None or len(self.desired_trajectory.poses) == 0:
30 | rospy.logdebug("Haven't recevied trajectory")
31 | return False
32 |
33 | last_loc = np.array([self.desired_trajectory.poses[-1].pose.position.x, self.desired_trajectory.poses[-1].pose.position.y])
34 | ego_loc = np.array([self.ego_state.pose.pose.position.x, self.ego_state.pose.pose.position.y])
35 | d = np.linalg.norm(ego_loc - last_loc)
36 |
37 | if d < short_distance_thres:
38 | rospy.loginfo("Vehicle stopped since it reached target point (dist:%.3f)", d)
39 | return False
40 |
41 | return True
42 |
43 | def run_step(self, msg):
44 | """
45 | Execute one step of control invoking both lateral and longitudinal PID controllers to track a trajectory
46 | at a given target_speed.
47 | return: control
48 | """
49 |
50 | self.ego_state = msg.state
51 | rospy.logdebug("received target speed:%f, current_speed: %f", self.desired_speed, get_speed(self.ego_state))
52 |
53 | if not self.ready_for_control():
54 | control_msg = ControlCommand()
55 | control_msg.accel = -1
56 | control_msg.steer = 0
57 |
58 | return control_msg
59 |
60 | target_speed = self.desired_speed
61 | trajectory = self.desired_trajectory
62 | current_speed = get_speed(self.ego_state)
63 | ego_pose = self.ego_state.pose.pose
64 |
65 | accel = self._lon_controller.run_step(target_speed, current_speed)
66 | steer = self._lat_controller.run_step(ego_pose, trajectory, current_speed)
67 |
68 | rospy.logdebug("accel = %f, steer = %f", accel, steer)
69 |
70 | control_msg = ControlCommand()
71 | control_msg.accel = accel
72 | control_msg.steer = steer
73 |
74 | return control_msg
75 |
--------------------------------------------------------------------------------
/src/control/protocol/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_control_msgs)
3 |
4 | find_package(catkin REQUIRED genmsg std_msgs # essential dependency
5 | )
6 |
7 | add_message_files(DIRECTORY msg FILES
8 | ControlCommand.msg
9 | )
10 |
11 | generate_messages(DEPENDENCIES std_msgs)
12 | catkin_package(CATKIN_DEPENDS std_msgs)
13 |
--------------------------------------------------------------------------------
/src/control/protocol/msg/AuxiliaryCommand.msg:
--------------------------------------------------------------------------------
1 | # Command to control vehicle accessories
2 |
3 | # Turning signal output
4 | uint8 blinker_flag
5 | uint8 BLINKER_OFF=0
6 | uint8 BLINKER_LEFT=1
7 | uint8 BLINKER_RIGHT=2
8 |
9 | # Wiper switch
10 | uint8 wiper
11 | uint8 WIPER_OFF = 0
12 | uint8 WIPER_CLEAN = 1
13 | uint8 WIPER_LOW = 2
14 | uint8 WIPER_HIGH = 3
15 |
16 | # Head light output
17 | uint8 headlight
18 | uint8 HEADLIGHT_OFF = 0
19 | uint8 HEADLIGHT_LOW_BEAM = 1
20 | uint8 HEADLIGHT_HIGH_BEAM = 2
21 |
--------------------------------------------------------------------------------
/src/control/protocol/msg/ControlCommand.msg:
--------------------------------------------------------------------------------
1 | # Universal vehicle dynamics command output. This message is sent in high frequency
2 |
3 | Header header
4 |
5 | # TODO(zyxin): should these params be percentage or physical values?
6 | # Level of accelaration, unit in m/s^2, throttle is positive, braking is negative
7 | float32 accel
8 | # Level of steering on front wheel, unit in radian, left turning is positive
9 | float32 steer
10 |
11 | # Gear shift, positive means manual transmission, negative means automatic transmission
12 | int8 gear
13 | int8 GEAR_NONE = 0
14 | int8 GEAR_NEUTRAL = -1
15 | int8 GEAR_REVERSE = -2
16 | int8 GEAR_PARKING = -3
17 | int8 GEAR_DRIVE = -4
18 |
19 | # Parking brake switch
20 | bool parking_brake
21 |
--------------------------------------------------------------------------------
/src/control/protocol/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_control_msgs
3 | 0.1.0
4 |
5 | Standard message definitions for ZZZ controller module
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | message_generation
17 | std_msgs
18 | message_runtime
19 | std_msgs
20 |
21 |
--------------------------------------------------------------------------------
/src/driver/README.md:
--------------------------------------------------------------------------------
1 | This module contains packages for various drivers and adapters
2 |
3 | - **Actuators**: Connect control command to actual vehicle
4 | - **Datasets**: Convert datasets into the system
5 | - **Sensors**: Convert sensor data into ROS message
6 | - **Simulators**: Provide drivers for simulators
7 |
--------------------------------------------------------------------------------
/src/driver/actuators/dataspeed_adapter/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_driver_actuators_dataspeed_adapter)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | nodes/convert_mkz_command
11 | nodes/convert_mkz_report
12 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
13 | )
14 |
--------------------------------------------------------------------------------
/src/driver/actuators/dataspeed_adapter/README.md:
--------------------------------------------------------------------------------
1 | https://bitbucket.org/DataspeedInc/dbw_mkz_ros/src/default/
2 |
3 |
4 | XXX_map_command: command value at a conversion map point
5 | XXX_map_actuator: actuator value at a conversion map point
6 |
--------------------------------------------------------------------------------
/src/driver/actuators/dataspeed_adapter/nodes/convert_mkz_command:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import sys
4 | import rospy
5 |
6 | from zzz_common.params import parse_private_args
7 | from zzz_control_msgs.msg import ControlCommand, AuxiliaryCommand
8 | from zzz_driver_actuators_dataspeed_adapter.mkz import \
9 | to_mkz_throttle, to_mkz_brake, to_mkz_steering, to_mkz_gear, to_mkz_turn_signal
10 | from dbw_mkz_msgs.msg import ThrottleCmd, BrakeCmd, SteeringCmd, GearCmd, TurnSignalCmd
11 |
12 | def convert_ControlCommand(msg, args):
13 | assert type(msg) == ControlCommand
14 | pubs, map_cmd, map_act = args
15 |
16 | cmd = to_mkz_throttle(msg, map_cmd['throttle'], map_act['throttle'])
17 | if cmd is not None:
18 | pubs[0].publish(cmd)
19 | else:
20 | cmd = to_mkz_brake(msg, map_cmd['brake'], map_act['brake'])
21 | pubs[1].publish(cmd)
22 |
23 | cmd = to_mkz_steering(msg, map_cmd['steer'], map_act['steer'])
24 | pubs[2].publish(cmd)
25 |
26 | cmd = to_mkz_gear(msg)
27 | pubs[3].publish(cmd)
28 |
29 | def convert_AuxiliaryCommand(msg, pubs):
30 | assert type(msg) == AuxiliaryCommand
31 |
32 | cmd = to_mkz_turn_signal(msg)
33 | pubs[4].publish(cmd)
34 |
35 | if __name__ == "__main__":
36 | rospy.init_node("convert_command")
37 |
38 | # See https://bitbucket.org/DataspeedInc/dbw_mkz_ros/src/default/dbw_mkz_can/src/DbwNode.cpp
39 | params = parse_private_args(
40 | input_control_topic="/zzz/control/command",
41 | input_auxiliary_topic="/zzz/control/aux_command",
42 | output_throttle_topic="throttle_cmd",
43 | output_brake_topic="brake_cmd",
44 | output_steering_topic="steering_cmd",
45 | output_gear_topic="gear_cmd",
46 | output_turn_signal_topic="turn_signal_cmd",
47 | throttle_map_command=[0,1],
48 | throttle_map_actuator=[0,1],
49 | brake_map_command=[0,1],
50 | brake_map_actuator=[0,1],
51 | steer_map_command=[-1,1],
52 | steer_map_actuator=[-1,1],
53 | )
54 |
55 | map_cmd = dict(
56 | throttle=params.throttle_map_command,
57 | brake=params.brake_map_command,
58 | steer=params.steer_map_command
59 | )
60 | map_act = dict(
61 | throttle=params.throttle_map_actuator,
62 | brake=params.brake_map_actuator,
63 | steer=params.steer_map_actuator
64 | )
65 |
66 | pubs = [
67 | rospy.Publisher(params.output_throttle_topic, ThrottleCmd, queue_size=1, latch=True),
68 | rospy.Publisher(params.output_brake_topic, BrakeCmd, queue_size=1, latch=True),
69 | rospy.Publisher(params.output_steering_topic, SteeringCmd, queue_size=1, latch=True),
70 | rospy.Publisher(params.output_gear_topic, GearCmd, queue_size=1, latch=True),
71 | rospy.Publisher(params.output_turn_signal_topic, TurnSignalCmd, queue_size=1, latch=True)
72 | ]
73 | rospy.Subscriber(params.input_control_topic, ControlCommand, convert_ControlCommand, (pubs, map_cmd, map_act))
74 | rospy.Subscriber(params.input_auxiliary_topic, AuxiliaryCommand, convert_ControlCommand, (pubs, map_cmd, map_act))
75 |
76 | rospy.spin()
77 |
--------------------------------------------------------------------------------
/src/driver/actuators/dataspeed_adapter/nodes/convert_mkz_report:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import sys
4 | import rospy
5 |
6 | from zzz_common.params import parse_private_args
7 | from zzz_driver_msgs.msg import ChassisReport, ControlReport
8 | from dbw_mkz_msgs.msg import ThrottleReport, BrakeReport, SteeringReport, GearReport, TurnSignalReport
9 |
10 | if __name__ == "__main__":
11 | rospy.init_node("convert_command")
12 |
13 | # See https://bitbucket.org/DataspeedInc/dbw_mkz_ros/src/default/dbw_mkz_can/src/DbwNode.cpp
14 | params = parse_private_args(
15 | input_throttle_topic="throttle_report",
16 | input_brake_topic="brake_report",
17 | input_steering_topic="steering_report",
18 | input_misc_topic="misc_1_report",
19 | input_wheel_topic="wheel_speed_report",
20 | input_fuel_topic="fuel_level_report",
21 | input_surround_topic="surround_report",
22 | input_driver_assist_topic="driver_assist_report",
23 | output_chassis_topic="/zzz/drivers/chassis_report",
24 | output_control_topic="/zzz/drivers/control_report",
25 | output_auxiliary_topic="/zzz/drivers/aux_report",
26 | output_resource_topic="/zzz/drivers/res_report",
27 | )
28 |
29 | # TODO: register subscriber, create buffer and publish in loop
30 | rospy.spin()
31 |
--------------------------------------------------------------------------------
/src/driver/actuators/dataspeed_adapter/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_driver_actuators_dataspeed_adapter
3 | 0.1.0
4 |
5 | Adapt output to Dataspeed
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | std_msgs
18 | dbw_mkz_msgs
19 |
20 |
21 |
--------------------------------------------------------------------------------
/src/driver/actuators/dataspeed_adapter/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_driver_msgs'],
8 | package_dir={'': 'src'}
9 | )
10 |
11 | setup(**d)
12 |
--------------------------------------------------------------------------------
/src/driver/actuators/dataspeed_adapter/src/zzz_driver_actuators_dataspeed_adapter/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/driver/actuators/dataspeed_adapter/src/zzz_driver_actuators_dataspeed_adapter/__init__.py
--------------------------------------------------------------------------------
/src/driver/actuators/dataspeed_adapter/src/zzz_driver_actuators_dataspeed_adapter/mkz.py:
--------------------------------------------------------------------------------
1 | import rospy
2 | import numpy as np
3 |
4 | from zzz_control_msgs.msg import ControlCommand, AuxiliaryCommand
5 | from dbw_mkz_msgs.msg import ThrottleCmd, BrakeCmd, SteeringCmd, GearCmd, TurnSignalCmd,\
6 | Gear, TurnSignal
7 |
8 | def to_mkz_throttle(cmd, map_command, map_actuator):
9 | assert type(cmd) == ControlCommand
10 |
11 | if cmd.accel >= 0:
12 | new_cmd = ThrottleCmd()
13 | new_cmd.enable = True
14 | new_cmd.pedal_cmd = np.interp(cmd.accel, map_command, map_actuator)
15 | new_cmd.pedal_cmd_type = ThrottleCmd.CMD_PERCENT
16 | return new_cmd
17 | else:
18 | return None
19 |
20 | def to_mkz_brake(cmd, map_command, map_actuator):
21 | assert type(cmd) == ControlCommand
22 |
23 | if cmd.accel < 0:
24 | new_cmd = BrakeCmd()
25 | new_cmd.enable = True
26 | new_cmd.pedal_cmd = np.interp(-cmd.accel, map_command, map_actuator)
27 | new_cmd.pedal_cmd_type = BrakeCmd.CMD_DECEL
28 | return new_cmd
29 | else:
30 | return None
31 |
32 | def to_mkz_steering(cmd, map_command, map_actuator):
33 | assert type(cmd) == ControlCommand
34 |
35 | new_cmd = SteeringCmd()
36 | new_cmd.enable = True
37 | new_cmd.steering_wheel_angle_cmd = np.interp(cmd.steer, map_command, map_actuator)
38 | new_cmd.cmd_type = SteeringCmd.CMD_ANGLE
39 | return new_cmd
40 |
41 | def to_mkz_gear(cmd):
42 | assert type(cmd) == ControlCommand
43 |
44 | new_cmd = GearCmd()
45 | new_cmd.enable = True
46 | if cmd.gear > 0:
47 | new_cmd.cmd.gear = Gear.DRIVE
48 | elif cmd.gear == ControlCommand.GEAR_NONE:
49 | new_cmd.cmd.gear = Gear.NONE
50 | elif cmd.gear == ControlCommand.GEAR_PARKING:
51 | new_cmd.cmd.gear = Gear.PARK
52 | elif cmd.gear == ControlCommand.GEAR_REVERSE:
53 | new_cmd.cmd.gear = Gear.REVERSE
54 | elif cmd.gear == ControlCommand.GEAR_NEUTRAL:
55 | new_cmd.cmd.gear = Gear.NEUTRAL
56 | elif cmd.gear == ControlCommand.GEAR_DRIVE:
57 | new_cmd.cmd.gear = Gear.DRIVE
58 | else:
59 | rospy.logerr("Incorrect gear command value!")
60 | return None
61 |
62 | return new_cmd
63 |
64 | def to_mkz_turn_signal(cmd, pubs):
65 | assert type(cmd) == AuxiliaryCommand
66 |
67 | new_cmd = TurnSignalCmd()
68 | if cmd.blinker_flag == AuxiliaryCommand.BLINKER_OFF:
69 | new_cmd.cmd = TurnSignal.NONE
70 | elif cmd.blinker_flag == AuxiliaryCommand.BLINKER_LEFT:
71 | new_cmd.cmd = TurnSignal.LEFT
72 | elif cmd.blinker_flag == AuxiliaryCommand.BLINKER_RIGHT:
73 | new_cmd.cmd = TurnSignal.RIGHT
74 | elif cmd.blinker_flag == AuxiliaryCommand.BLINKER_LEFT | AuxiliaryCommand.BLINKER_RIGHT:
75 | new_cmd.cmd = TurnSignal.LEFT | TurnSignal.RIGHT
76 | else:
77 | rospy.logerr("Incorrect turning signal command value!")
78 | return None
79 |
80 | return new_cmd
81 |
--------------------------------------------------------------------------------
/src/driver/datasets/benchmark/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_driver_datasets_benchmark)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | nodes/tracking
11 | nodes/decision
12 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
13 | )
14 |
--------------------------------------------------------------------------------
/src/driver/datasets/benchmark/REAMDE.md:
--------------------------------------------------------------------------------
1 | # Reference
2 | Tracking: https://github.com/xinshuoweng/AB3DMOT
3 | ```tex
4 | @article{bernardin2008evaluating,
5 | title={Evaluating multiple object tracking performance: the CLEAR MOT metrics},
6 | author={Bernardin, Keni and Stiefelhagen, Rainer},
7 | journal={Journal on Image and Video Processing},
8 | volume={2008},
9 | pages={1},
10 | year={2008},
11 | publisher={Hindawi Publishing Corp.}
12 | }
13 | @article{Weng2019_3dmot,
14 | archivePrefix = {arXiv},
15 | arxivId = {1907.03961},
16 | author = {Weng, Xinshuo and Kitani, Kris},
17 | eprint = {1907.03961},
18 | journal = {arXiv:1907.03961},
19 | title = {{A Baseline for 3D Multi-Object Tracking}},
20 | url = {https://arxiv.org/pdf/1907.03961.pdf},
21 | year = {2019}
22 | }
23 | ```
24 |
25 | Odometry: https://github.com/MichaelGrupp/evo
26 |
--------------------------------------------------------------------------------
/src/driver/datasets/benchmark/nodes/decision:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import json
4 |
5 | import rospy
6 | from zzz_common.params import parse_private_args
7 | from message_filters import ApproximateTimeSynchronizer, Subscriber
8 |
9 | from zzz_control_msgs.msg import ControlCommand
10 | from zzz_cognition_msgs.msg import MapState
11 | from zzz_perception_msgs.msg import TrackingBoxArray
12 | from zzz_driver_datasets_benchmark.decision import DecisionBenchmark
13 |
14 | class DecisionBenchmarkNode(object):
15 | def __init__(self):
16 | params = parse_private_args(
17 | command_with_ground_truth_topic="/zzz/control/command_true",
18 | command_with_perception_topic="/zzz/control/command",
19 | dynamic_map_with_ground_truth_topic="/zzz/cognition/local_dynamic_map/map_with_ref_true",
20 | dynamic_map_with_perception_topic="/zzz/cognition/local_dynamic_map/map_with_ref",
21 | acc_err_topic="/zzz/benchmark/acc_err", # TODO: move benchmark to top level module (it contains corresponding message)
22 | steer_err_topic="/zzz/benchmark/steer_err",
23 | )
24 |
25 | self._benchmark = DecisionBenchmark()
26 | self._synced_subscriber = ApproximateTimeSynchronizer([
27 | Subscriber(params.command_with_ground_truth_topic, ControlCommand),
28 | Subscriber(params.command_with_perception_topic, ControlCommand),
29 | Subscriber(params.dynamic_map_with_ground_truth_topic, MapState),
30 | Subscriber(params.dynamic_map_with_perception_topic, MapState),
31 | ], 5, 0.1)
32 | self._synced_subscriber.registerCallback(self.decision_callback)
33 |
34 | def decision_callback(self, gtcmd, percmd, gtdmap, perdmap):
35 | rospy.logdebug("Received synced result")
36 | self._benchmark.add_frame(gtcmd, percmd, gtdmap, perdmap)
37 |
38 | def save_results(self):
39 | with open("decision_results.json", "w") as jout:
40 | json.dump(dict(report=self._benchmark.report(), desc=self._benchmark.doc()),
41 | jout, sort_keys=True)
42 |
43 | if __name__ == "__main__":
44 |
45 | rospy.init_node("decision", log_level=rospy.DEBUG)
46 | node = DecisionBenchmarkNode()
47 | rospy.on_shutdown(node.save_results)
48 | rospy.spin()
49 |
--------------------------------------------------------------------------------
/src/driver/datasets/benchmark/nodes/tracking:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import json
4 |
5 | import rospy
6 | from zzz_common.params import parse_private_args
7 | from message_filters import ApproximateTimeSynchronizer, Subscriber
8 |
9 | from zzz_perception_msgs.msg import TrackingBoxArray
10 | from zzz_driver_datasets_benchmark.tracking import TrackingBenchmark
11 |
12 | class TrackingBenchmarkNode(object):
13 | def __init__(self):
14 | params = parse_private_args(
15 | ground_truth_topic="/kitti/tracklets",
16 | result_topic="/zzz/perception/objects_tracked"
17 | )
18 |
19 | self._benchmark = TrackingBenchmark(max_cost=4)
20 | self._synced_subscriber = ApproximateTimeSynchronizer([
21 | Subscriber(params.ground_truth_topic, TrackingBoxArray),
22 | Subscriber(params.result_topic, TrackingBoxArray)
23 | ], 5, 0.1)
24 | self._synced_subscriber.registerCallback(self.boxes_callback)
25 |
26 | def boxes_callback(self, gtbox, trbox):
27 | rospy.logdebug("Received synced result")
28 | self._benchmark.add_frame(gtbox, trbox, cost_type="euclidean")
29 |
30 | def save_results(self):
31 | with open("tracking_results.json", "w") as jout:
32 | json.dump(dict(report=self._benchmark.report(), desc=self._benchmark.doc()),
33 | jout, sort_keys=True)
34 |
35 | if __name__ == "__main__":
36 |
37 | rospy.init_node("tracking", log_level=rospy.DEBUG)
38 | node = TrackingBenchmarkNode()
39 | rospy.on_shutdown(node.save_results)
40 | rospy.spin()
41 |
--------------------------------------------------------------------------------
/src/driver/datasets/benchmark/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_driver_datasets_benchmark
3 | 0.1.0
4 |
5 | Tools for dataset benchmarking
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 |
17 |
--------------------------------------------------------------------------------
/src/driver/datasets/benchmark/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_driver_datasets_benchmark'],
8 | package_dir={'': 'src'},
9 | install_requires=['tqdm']
10 | )
11 |
12 | setup(**d)
--------------------------------------------------------------------------------
/src/driver/datasets/benchmark/src/zzz_driver_datasets_benchmark/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/driver/datasets/benchmark/src/zzz_driver_datasets_benchmark/__init__.py
--------------------------------------------------------------------------------
/src/driver/datasets/benchmark/src/zzz_driver_datasets_benchmark/detection.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | class DetectionBenchmark:
4 | """
5 | Benchmark results target on KITTI/Detection and KITTI/Raw dataset.
6 | """
7 | def __init__(self, min_overlap=0.5, max_truncation=0, min_height=25,
8 | max_occlusion=2, include_tags=None, exclude_tags=None):
9 | pass
10 |
11 | def add_frame(self, result, gt):
12 | '''
13 | Add result from one frame. Result and GT field are expected to be type of zzz_perception_msgs.msg.DetectionBoxArray
14 | '''
15 | pass
16 |
17 | def __del__(self):
18 | pass
19 |
--------------------------------------------------------------------------------
/src/driver/datasets/benchmark/src/zzz_driver_datasets_benchmark/odometry.py:
--------------------------------------------------------------------------------
1 | # TODO: Implement.
2 |
--------------------------------------------------------------------------------
/src/driver/datasets/kitti/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_driver_datasets_kitti)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | scripts/convert_bag
11 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
12 | )
13 |
--------------------------------------------------------------------------------
/src/driver/datasets/kitti/LICENSE.md:
--------------------------------------------------------------------------------
1 | # License for [pykitti](https://github.com/utiasSTARS/pykitti)
2 |
3 | ```
4 | The MIT License (MIT)
5 |
6 | Copyright (c) 2016 Lee Clement
7 |
8 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
11 |
12 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
13 | ```
14 |
15 | # License for [kitti2bag](https://github.com/tomas789/kitti2bag)
16 |
17 | ```
18 | MIT License
19 |
20 | Copyright (c) 2016 Tomas Krejci
21 |
22 | Permission is hereby granted, free of charge, to any person obtaining a copy
23 | of this software and associated documentation files (the "Software"), to deal
24 | in the Software without restriction, including without limitation the rights
25 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
26 | copies of the Software, and to permit persons to whom the Software is
27 | furnished to do so, subject to the following conditions:
28 |
29 | The above copyright notice and this permission notice shall be included in all
30 | copies or substantial portions of the Software.
31 |
32 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
33 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
34 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
35 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
36 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
37 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
38 | SOFTWARE.
39 | ```
40 |
--------------------------------------------------------------------------------
/src/driver/datasets/kitti/README.md:
--------------------------------------------------------------------------------
1 | # KITTI Driver
2 |
3 | This package is based on [pykitti](https://github.com/utiasSTARS/pykitti) @ [d3e1bb](https://github.com/utiasSTARS/pykitti/tree/d3e1bb81676e831886726cc5ed79ce1f049aef2c) and [kitti2bag](https://github.com/tomas789/kitti2bag) @ [75cd26](https://github.com/tomas789/kitti2bag/tree/75cd2629f23ac073a91384c4af1b88f3272c3b24)
4 |
--------------------------------------------------------------------------------
/src/driver/datasets/kitti/TODO.md:
--------------------------------------------------------------------------------
1 | - [ ] Add support for odometry dataset
2 | - [ ] Add support for object dataset
3 | - [ ] Add support for tracking dataset
4 | - [x] Add support for labels and raw tracklets (New ros message may be needed)
5 | - [ ] Add support for direct publishing
6 | - [ ] Move functionality from `autoware/ros/src/util/packages/kitti_pkg`
7 |
--------------------------------------------------------------------------------
/src/driver/datasets/kitti/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_driver_datasets_kitti
3 | 0.1.0
4 |
5 | Driver for dataset input from kitti
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | python-tqdm
17 |
--------------------------------------------------------------------------------
/src/driver/datasets/kitti/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_driver_datasets_kitti'],
8 | package_dir={'': 'src'},
9 | install_requires=['tqdm']
10 | )
11 |
12 | setup(**d)
--------------------------------------------------------------------------------
/src/driver/datasets/kitti/src/zzz_driver_datasets_kitti/__init__.py:
--------------------------------------------------------------------------------
1 | """Driver for loading KITTI data."""
2 |
3 | from zzz_driver_datasets_kitti.raw import RawDataset
4 | from zzz_driver_datasets_kitti.odometry import OdometryDataset
5 | from zzz_driver_datasets_kitti.enums import *
6 |
--------------------------------------------------------------------------------
/src/driver/datasets/kitti/src/zzz_driver_datasets_kitti/enums.py:
--------------------------------------------------------------------------------
1 |
2 | __all__ = ['LABELS_MAP', 'POSE_STATES', 'OCCLUSION_STATES', 'TRUNCATION_STATES']
3 |
4 | LABELS_MAP = {
5 | 'Car': 1,
6 | 'Van': 2,
7 | 'Truck': 3,
8 | 'Pedestrian': 4,
9 | 'Person (sitting)': 5,
10 | 'Cyclist': 6,
11 | 'Tram': 7,
12 | 'Misc': 8,
13 | }
14 |
15 | POSE_STATES = {
16 | 0: "UNSET" ,
17 | 1: "INTERP" ,
18 | 2: "LABELED" ,
19 | }
20 |
21 | OCCLUSION_STATES = {
22 | -1: "OCCLUSION_UNSET" ,
23 | 0 : "VISIBLE" ,
24 | 1 : "PARTLY" ,
25 | 2 : "FULLY" ,
26 | }
27 |
28 | TRUNCATION_STATES = {
29 | -1: "TRUNCATION_UNSET" ,
30 | 0 : "IN_IMAGE" ,
31 | 1 : "TRUNCATED" ,
32 | 2 : "OUT_IMAGE" ,
33 | 99: "BEHIND_IMAGE" ,
34 | }
35 |
--------------------------------------------------------------------------------
/src/driver/platforms/autoware_adapter/.placeholder:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/driver/platforms/autoware_adapter/.placeholder
--------------------------------------------------------------------------------
/src/driver/platforms/mcity_adapter/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_driver_platforms_mcity_adapter)
3 |
4 | find_package(catkin REQUIRED genmsg std_msgs # essential dependency
5 | geometry_msgs
6 | )
7 | catkin_package()
8 |
9 | add_message_files(DIRECTORY msg FILES
10 | BSM.msg
11 | BSMVec.msg
12 | Control.msg
13 | SPaT.msg
14 | SPaTVec.msg
15 | VehicleState.msg
16 | )
17 |
18 | catkin_install_python(PROGRAMS
19 | nodes/convert_topic
20 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
21 | )
22 |
--------------------------------------------------------------------------------
/src/driver/platforms/mcity_adapter/msg/BSM.msg:
--------------------------------------------------------------------------------
1 |
2 | float64 timestamp
3 | int32 id
4 | int16 type
5 | float64 size_x
6 | float64 size_y
7 | float64 size_z
8 |
9 | float64 heading
10 | float64 speed_x
11 | float64 speed_y
12 |
13 | float64 x
14 | float64 y
15 | float64 z
16 |
17 | float64 longitude
18 | float64 latitude
--------------------------------------------------------------------------------
/src/driver/platforms/mcity_adapter/msg/BSMVec.msg:
--------------------------------------------------------------------------------
1 |
2 | float64 timestamp
3 | int32 sensor_id
4 | int32 len
5 | BSM[] bsmVec
--------------------------------------------------------------------------------
/src/driver/platforms/mcity_adapter/msg/Control.msg:
--------------------------------------------------------------------------------
1 |
2 | float64 timestamp
3 | int32 count
4 | float64 brake_cmd
5 | float64 throttle_cmd
6 | float64 steering_cmd
7 | int16 gear_cmd
8 | int16 turn_signal_cmd
9 |
10 |
11 |
--------------------------------------------------------------------------------
/src/driver/platforms/mcity_adapter/msg/SPaT.msg:
--------------------------------------------------------------------------------
1 |
2 | float64 timestamp
3 | int32 id
4 | int16[8] state
5 | float32[8] remain_time
--------------------------------------------------------------------------------
/src/driver/platforms/mcity_adapter/msg/SPaTVec.msg:
--------------------------------------------------------------------------------
1 |
2 | float64 timestamp
3 | int32 sensor_id
4 | int32 len
5 | SPaT[] spatVec
6 |
--------------------------------------------------------------------------------
/src/driver/platforms/mcity_adapter/msg/VehicleState.msg:
--------------------------------------------------------------------------------
1 | float64 timestamp
2 |
3 | #RTK state
4 | string RTK_state_string
5 |
6 | int64 RTK_seq_num
7 | int64 RTK_timestamp_sec
8 | int64 RTK_timestamp_nsec
9 |
10 | int16 RTK_gps_status
11 | int16 RTK_gps_service
12 |
13 | #RTK position
14 | float64 RTK_gps_longitude
15 | float64 RTK_gps_latitude
16 | float64 RTK_gps_altitude
17 |
18 | float64 RTK_gps_UTM_x
19 | float64 RTK_gps_UTM_y
20 | float64 RTK_gps_UTM_z
21 |
22 | float64 RTK_imu_quaternion_x
23 | float64 RTK_imu_quaternion_y
24 | float64 RTK_imu_quaternion_z
25 | float64 RTK_imu_quaternion_w
26 |
27 | float64 RTK_heading
28 | float64 RTK_attitude
29 | float64 RTK_bank
30 |
31 | #RTK ENU speed
32 | float64 RTK_linear_ENU_vx
33 | float64 RTK_linear_ENU_vy
34 | float64 RTK_linear_ENU_vz
35 |
36 |
37 | #RTK local speed and acc
38 | float64 RTK_gps_twist_linear_vx
39 | float64 RTK_gps_twist_linear_vy
40 | float64 RTK_gps_twist_linear_vz
41 |
42 | float64 RTK_imu_linear_acc_x
43 | float64 RTK_imu_linear_acc_y
44 | float64 RTK_imu_linear_acc_z
45 |
46 | #RTK local angular speed
47 | float64 RTK_gps_twist_angular_vx
48 | float64 RTK_gps_twist_angular_vy
49 | float64 RTK_gps_twist_angular_vz
50 |
51 |
52 |
53 | #key vehicle state
54 | float64 x
55 | float64 y
56 | float64 z
57 |
58 | float64 speed_x
59 | float64 speed_y
60 | float64 speed_z
61 |
62 | float64 acc_x
63 | float64 acc_y
64 | float64 acc_z
65 |
66 | float64 heading
67 | float64 yaw_rate
68 |
69 | #by-wire
70 | bool by_wire_enabled
71 |
72 | float64 throttle_cmd
73 | float64 throttle_input
74 | float64 throttle_state
75 | bool throttle_enabled
76 | bool throttle_override
77 | bool throttle_driver
78 | bool throttle_timeout
79 |
80 |
81 | float64 brake_cmd
82 | float64 brake_input
83 | float64 brake_state
84 | float64 brake_torq_cmd
85 | float64 brake_torq_input
86 | float64 brake_torq_state
87 | bool brake_boo_output
88 | bool brake_enabled
89 | bool brake_override
90 | bool brake_driver
91 | bool brake_timeout
92 |
93 |
94 | float64 steer_cmd
95 | float64 steer_state
96 | float64 steer_torque
97 | bool steer_enabled
98 | bool steer_override
99 | bool steer_timeout
100 |
101 | int16 gear_pos
102 |
103 | float64 wheel_v_front_left
104 | float64 wheel_v_front_right
105 | float64 wheel_v_rear_left
106 | float64 wheel_v_rear_right
--------------------------------------------------------------------------------
/src/driver/platforms/mcity_adapter/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_driver_platforms_mcity_adapter
3 | 0.1.0
4 |
5 | Adapt input and output on MCity OpenCAV platform
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 | Shaobing Xu
11 |
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz
14 | https://github.com/cmpute/zzz/issues
15 |
16 | catkin
17 | roscpp
18 | std_msgs
19 | zzz_driver_actuators_dataspeed_adapter
20 |
21 |
22 |
--------------------------------------------------------------------------------
/src/driver/protocol/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_driver_msgs)
3 |
4 | find_package(catkin REQUIRED genmsg std_msgs # essential dependency
5 | geometry_msgs
6 | )
7 |
8 | catkin_python_setup()
9 |
10 | add_message_files(DIRECTORY msg FILES
11 | AuxiliaryReport.msg
12 | ChassisReport.msg
13 | ControlReport.msg
14 | ResourceReport.msg
15 | RigidBodyState.msg
16 | RigidBodyStateStamped.msg
17 | )
18 |
19 | generate_messages(DEPENDENCIES std_msgs geometry_msgs)
20 | catkin_package(CATKIN_DEPENDS std_msgs geometry_msgs)
21 |
--------------------------------------------------------------------------------
/src/driver/protocol/msg/AuxiliaryReport.msg:
--------------------------------------------------------------------------------
1 | # This message provide interface for additional vehicle modules, e.g. ACC, BSD
2 |
3 |
4 | # Forward Collison Warning status
5 | uint8 fcw_flag
6 | uint8 FCW_AVAILABLE = 1
7 | uint8 FCW_ALERT = 2
8 |
9 | # Automatic Emergency Brake status
10 | uint8 aeb_flag
11 | uint8 AEB_AVAILABLE = 1
12 | uint8 AEB_PRECHARGE = 2
13 | uint8 AEB_BRAKING = 4
14 |
15 | # Adaptive Cruise Control status
16 | uint8 acc_flag
17 | uint8 ACC_AVAILABLE = 1
18 | uint8 ACC_BRAKING = 2
19 |
20 | # Lane Departure Warning status
21 | uint8 ldw_flag
22 | uint8 LDW_AVAILABLE = 1
23 | uint8 LDW_LEFT_ALERT = 2
24 | uint8 LDW_RIGHT_ALERT = 4
25 |
26 | # Blind Spot Detection status
27 | uint8 bsd_flag
28 | uint8 BSD_AVAILABLE = 1
29 | uint8 BSD_LEFT_ALERT = 2
30 | uint8 BSD_RIGHT_ALERT = 4
31 |
--------------------------------------------------------------------------------
/src/driver/protocol/msg/ChassisReport.msg:
--------------------------------------------------------------------------------
1 | # Information reported by chassis components
2 |
3 | # Vehicle speed
4 | float32 speed
5 |
6 | # Wheel speed (rad/s)
7 | int32 wheel_speed_fl
8 | int32 wheel_speed_fr
9 | int32 wheel_speed_rl
10 | int32 wheel_speed_rr
11 |
--------------------------------------------------------------------------------
/src/driver/protocol/msg/ControlReport.msg:
--------------------------------------------------------------------------------
1 | # Interface for vehicle control
2 |
3 | # ---------- Trottle ----------
4 | # Throttle control by-wire enabled
5 | bool throttle_available
6 | # Throttle control overtaken
7 | bool throttle_override
8 | # Reported throttle level
9 | float32 throttle_report
10 |
11 | # ---------- Brake ----------
12 | # Braking control by-wire enabled
13 | bool brake_available
14 | # Braking control overtaken
15 | bool brake_override
16 | # Reported braking level
17 | float32 brake_report
18 |
19 | # ---------- Steering ----------
20 | # Steering control by-wire enabled
21 | bool steer_available
22 | # Steering control overtaken
23 | bool steer_override
24 | # Reported steering level
25 | float32 steer_report
26 |
--------------------------------------------------------------------------------
/src/driver/protocol/msg/ResourceReport.msg:
--------------------------------------------------------------------------------
1 | # Information for vehicle resources, can be used for eco driving
2 |
3 | # Fuel level / EV battery level (%, 0 to 100)
4 | float32 energy_level
5 |
6 | # Estimate distance to refill fuel/battery
7 | float32 distance_to_refill
8 |
9 | # Odometer (km)
10 | float32 odometer
--------------------------------------------------------------------------------
/src/driver/protocol/msg/RigidBodyState.msg:
--------------------------------------------------------------------------------
1 | # This message contains commonly used state variables of rigid body
2 |
3 | # ID of frame fixed to the rigid body
4 | string child_frame_id
5 |
6 | # Location and orientatation of the object
7 | geometry_msgs/PoseWithCovariance pose
8 |
9 | # Linear and angular velocity of the object
10 | geometry_msgs/TwistWithCovariance twist
11 |
12 | # Linear and angular acceleration of the object
13 | geometry_msgs/AccelWithCovariance accel
14 |
--------------------------------------------------------------------------------
/src/driver/protocol/msg/RigidBodyStateStamped.msg:
--------------------------------------------------------------------------------
1 | # Stamped version of RigidBodyState
2 |
3 | Header header
4 | RigidBodyState state
5 |
--------------------------------------------------------------------------------
/src/driver/protocol/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_driver_msgs
3 | 0.1.0
4 |
5 | Unified interface message definitions for ZZZ drivers
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | message_generation
17 | std_msgs
18 | geometry_msgs
19 | message_runtime
20 | std_msgs
21 | geometry_msgs
22 |
23 |
--------------------------------------------------------------------------------
/src/driver/protocol/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_driver_msgs'],
8 | package_dir={'': 'src'}
9 | )
10 |
11 | setup(**d)
12 |
--------------------------------------------------------------------------------
/src/driver/protocol/src/zzz_driver_msgs/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/driver/protocol/src/zzz_driver_msgs/__init__.py
--------------------------------------------------------------------------------
/src/driver/protocol/src/zzz_driver_msgs/utils.py:
--------------------------------------------------------------------------------
1 | import math
2 | import tf.transformations as tft
3 |
4 | from zzz_driver_msgs.msg import RigidBodyState, RigidBodyStateStamped
5 | from geometry_msgs.msg import Pose, PoseWithCovariance, Twist, TwistWithCovariance
6 |
7 | def get_speed(msg):
8 | '''
9 | Calculate speed value
10 | '''
11 | if type(msg) == Twist:
12 | return math.sqrt(msg.linear.x**2 + msg.linear.y**2 + msg.linear.z**2)
13 | elif type(msg) == TwistWithCovariance or type(msg) == RigidBodyState:
14 | return get_speed(msg.twist)
15 | elif type(msg) == RigidBodyStateStamped:
16 | return get_speed(msg.state)
17 | else:
18 | raise ValueError("Incorrect message type for get_speed")
19 |
20 | def get_yaw(msg):
21 | '''
22 | Calculate yaw angle assuming on 2d plane
23 | '''
24 | if type(msg) == Pose:
25 | _,_,yaw = tft.euler_from_quaternion([msg.orientation.x, msg.orientation.y, msg.orientation.z, msg.orientation.w])
26 | return yaw
27 | elif type(msg) == PoseWithCovariance or type(msg) == RigidBodyState:
28 | return get_yaw(msg.pose)
29 | elif type(msg) == RigidBodyStateStamped:
30 | return get_yaw(msg.state)
31 | else:
32 | raise ValueError("Incorrect message type for get_yaw")
33 |
34 | def convert_odometry_to_state(msg):
35 | '''
36 | Convert nav_msgs/Odometry to zzz_driver_msgs/RigidBodyStateStamped
37 | '''
38 |
39 | new_msg = RigidBodyStateStamped()
40 | new_msg.header = msg.header
41 | new_msg.state.child_frame_id = msg.child_frame_id
42 |
43 | new_msg.state.pose = msg.pose
44 | new_msg.state.twist = msg.twist
45 | return new_msg
46 |
--------------------------------------------------------------------------------
/src/driver/sensors/astuff_adapter/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_driver_sensors_astuff_adapter)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_install_python(PROGRAMS
8 | nodes/convert_topic
9 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
10 | )
11 |
--------------------------------------------------------------------------------
/src/driver/sensors/astuff_adapter/nodes/convert_topic:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import sys
4 | import rospy
5 |
6 | from zzz_common.params import parse_private_args
7 |
8 | if __name__ == "__main__":
9 | rospy.init_node("convert_topic")
10 |
11 | params = parse_private_args(
12 | input_topic="",
13 | output_topic="",
14 | )
15 |
16 | if not (params.input_topic and params.output_topic):
17 | rospy.logerr("Not enought parameters!")
18 | sys.exit(-1)
19 |
20 | # TODO: register and publish
21 |
22 | rospy.spin()
23 |
--------------------------------------------------------------------------------
/src/driver/sensors/astuff_adapter/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_driver_sensors_astuff_adapter
3 | 0.1.0
4 |
5 | Adapt input from AutonomouStuff messages
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | std_msgs
18 | astuff_sensor_msgs
19 |
20 |
21 |
--------------------------------------------------------------------------------
/src/driver/sensors/velodyne/.placeholder:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/driver/sensors/velodyne/.placeholder
--------------------------------------------------------------------------------
/src/driver/simulators/carla/carla_adapter/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_driver_simulators_carla_adapter)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_install_python(PROGRAMS
8 | nodes/convert_topic
9 | scripts/spawn_agent
10 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
11 | )
12 |
--------------------------------------------------------------------------------
/src/driver/simulators/carla/carla_adapter/config/static_sensors.example.json:
--------------------------------------------------------------------------------
1 | {
2 | "sensors": [
3 | {
4 | "type": "sensor.camera.rgb",
5 | "id": "Spectator",
6 | "x": 0, "y": 0, "z": 50,
7 | "roll": 0, "pitch": -90.0, "yaw": 0,
8 | "width": 800, "height": 600, "fov": 100
9 | }
10 | ]
11 | }
12 |
--------------------------------------------------------------------------------
/src/driver/simulators/carla/carla_adapter/config/vehicle_sensors.example.json:
--------------------------------------------------------------------------------
1 | {
2 | "sensors": [
3 | {
4 | "type": "sensor.camera.rgb",
5 | "id": "Center",
6 | "x": 0.7, "y": 0.0, "z": 1.60,
7 | "roll":0.0, "pitch":0.0, "yaw": 0.0,
8 | "width": 800, "height": 600, "fov":100
9 | },
10 | {
11 | "type": "sensor.camera.rgb",
12 | "id": "Left",
13 | "x": 0.7, "y": -0.4, "z": 1.60,
14 | "roll": 0.0, "pitch": 0.0, "yaw": -45.0,
15 | "width": 800, "height": 600, "fov": 100
16 | },
17 | {
18 | "type": "sensor.camera.rgb",
19 | "id": "Right",
20 | "x": 0.7, "y": 0.4, "z": 1.60,
21 | "roll": 0.0, "pitch": 0.0, "yaw": 45.0,
22 | "width": 800, "height": 600, "fov": 100
23 | },
24 | {
25 | "type": "sensor.camera.rgb",
26 | "id": "Sky",
27 | "x": 0, "y": 0, "z": 50,
28 | "roll": 0.0, "pitch": -90.0, "yaw": 0,
29 | "width": 1920, "height": 1080, "fov": 100
30 | },
31 | {
32 | "type": "sensor.lidar.ray_cast",
33 | "id": "Lidar",
34 | "x": 0.7, "y": -0.4, "z": 1.60,
35 | "roll": 0.0, "pitch": 0.0, "yaw": -45.0,
36 | "range": 5000,
37 | "channels": 32,
38 | "points_per_second": 320000,
39 | "upper_fov": 2.0,
40 | "lower_fov": -26.8,
41 | "rotation_frequency": 20,
42 | "sensor_tick": 0.05
43 | },
44 | {
45 | "type": "sensor.other.gnss",
46 | "id": "GPS",
47 | "x": 0.7, "y": -0.4, "z": 1.60
48 | }
49 | ]
50 | }
--------------------------------------------------------------------------------
/src/driver/simulators/carla/carla_adapter/launch/server.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/src/driver/simulators/carla/carla_adapter/launch/server.launch.yaml:
--------------------------------------------------------------------------------
1 | use_sim_time: true
2 | carla:
3 | host: localhost
4 | port: 2000
5 | synchronous_mode: true
6 | synchronous_mode_wait_for_vehicle_control_command: false
7 | fixed_delta_seconds: 0.05
8 | ego_vehicle:
9 | role_name: ["hero", "ego_vehicle"]
10 |
--------------------------------------------------------------------------------
/src/driver/simulators/carla/carla_adapter/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_driver_simulators_carla_adapter
3 | 0.1.0
4 |
5 | Adapt ZZZ input and output to CARLA
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | std_msgs
18 | roscpp
19 | std_msgs
20 | roscpp
21 | std_msgs
22 |
23 |
24 |
--------------------------------------------------------------------------------
/src/driver/simulators/carla/carla_adapter/scripts/server.tmuxp.yaml:
--------------------------------------------------------------------------------
1 | # This is a example session file for tmux (load this using tmuxp)
2 | # [Note] Recommend to run roscore outside
3 | # [Note] create carla docker container before use:
4 | # docker create -p 2000-2002:2000-2002 --gpus all --name dcarla carlasim/carla /bin/bash CarlaUE4.sh [args]
5 |
6 | session_name: 'carla'
7 | windows:
8 | - window_name: server
9 | layout: main-vertical
10 | start_directory: /mnt/storage/carla/carla_server/bridge # Note: Change to your carla ros-bridge location
11 | shell_command_before:
12 | - export ROS_MASTER_URI=http://172.16.0.1:11311
13 | - export ROS_IP=172.16.0.1
14 | - source devel/setup.bash
15 | - export CARLA_ROOT=/mnt/storage/carla/carla_server # Note: Change to your carla binary location
16 | - export CARLA_VERSION=0.9.6
17 | - export PYTHONPATH=$PYTHONPATH:$CARLA_ROOT/PythonAPI/carla:$CARLA_ROOT/PythonAPI/carla/dist/carla-$CARLA_VERSION-py2.7-linux-x86_64.egg
18 | panes:
19 | - shell_command: # scenario_runner workspace
20 | - export ROOT_SCENARIO_RUNNER=/mnt/storage/carla/carla_server/scenario_runner
21 | - export PYTHONPATH=$PYTHONPATH:$ROOT_SCENARIO_RUNNER
22 | - export TEAM_CODE_ROOT=/home/carla/Carla/zzz/src/driver/simulators/carla/carla_adapter/scripts/use_srunner
23 | - python ${ROOT_SCENARIO_RUNNER}/srunner/challenge/challenge_evaluator_routes.py --scenarios=${ROOT_SCENARIO_RUNNER}/srunner/challenge/all_towns_traffic_scenarios1_3_4.json --agent=$TEAM_CODE_ROOT/ZZZAgent.py
24 | - docker start -a dcarla
25 | - sleep 4s && roslaunch server.launch
26 | - window_name: ws
27 | focus: 'true'
28 | start_directory: /mnt/storage/carla/carla_server/bridge
29 | shell_command_before:
30 | - export ROS_MASTER_URI=http://172.16.0.1:11311
31 | - export ROS_IP=172.16.0.1
32 | - source devel/setup.bash
33 | panes:
34 | - sleep 8s
35 |
--------------------------------------------------------------------------------
/src/driver/simulators/carla/carla_adapter/scripts/use_bridge/auxiliary.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/src/driver/simulators/carla/carla_adapter/scripts/use_bridge/main.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/src/driver/simulators/carla/carla_adapter/scripts/use_srunner/main.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
--------------------------------------------------------------------------------
/src/driver/simulators/carla/carla_adapter/scripts/use_srunner/start.sh:
--------------------------------------------------------------------------------
1 | # This script is for Carla scenario_runner
2 |
3 | . /home/carla/Carla/zzz/devel/setup.sh
4 | roslaunch /home/carla/Carla/zzz/src/driver/simulators/carla/carla_adapter/scripts/use_srunner/main.launch # >zzz.log 2>zzz.err
5 |
--------------------------------------------------------------------------------
/src/driver/simulators/lgsvl/README.md:
--------------------------------------------------------------------------------
1 | This package provides interface to [lgsvl simulator](https://github.com/lgsvl/simulator)
--------------------------------------------------------------------------------
/src/library/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_common)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
--------------------------------------------------------------------------------
/src/library/README.md:
--------------------------------------------------------------------------------
1 | # ZZZ_common
2 |
3 | This directory contains the `zzz_common` package that is commonly used. It also imports other common dependencies like tensor operation library, build configurations, etc.
4 |
5 | TODO: This library should be finally written in C++ or Cython.
6 |
7 | # Reference
8 |
9 | Motion Models:
10 | '''tex
11 | @inproceedings{schubert2008comparison,
12 | title={Comparison and evaluation of advanced motion models for vehicle tracking},
13 | author={Schubert, Robin and Richter, Eric and Wanielik, Gerd},
14 | booktitle={2008 11th international conference on information fusion},
15 | pages={1--6},
16 | year={2008},
17 | organization={IEEE}
18 | }
19 | '''
20 |
--------------------------------------------------------------------------------
/src/library/msgs/.placeholder:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/library/msgs/.placeholder
--------------------------------------------------------------------------------
/src/library/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_common
3 | 0.1.0
4 |
5 | Common used libraries.
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | std_msgs
17 | zzz_driver_msgs
18 |
19 |
20 |
--------------------------------------------------------------------------------
/src/library/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_common'],
8 | package_dir={'': 'src'},
9 | install_requires=['easydict']
10 | )
11 |
12 | setup(**d)
--------------------------------------------------------------------------------
/src/library/src/zzz_common/__init__.py:
--------------------------------------------------------------------------------
1 | from .geometry import dist_from_point_to_line
2 |
--------------------------------------------------------------------------------
/src/library/src/zzz_common/dynamic_models.py:
--------------------------------------------------------------------------------
1 | '''
2 | This module gives implementations of motion models and observation models.
3 | '''
4 |
5 | import numpy as np
6 | from scipy.special import fresnel
7 |
8 | ########## Motion models for tracking where inputs are unknown ##########
9 |
10 | def wrap_angle(theta):
11 | '''
12 | Normalize the angle to [-pi, pi]
13 | '''
14 | return (theta + np.pi) % (2*np.pi) - np.pi
15 |
16 | def motion_BR(state, dt):
17 | '''
18 | Mean of brownian motion will not change while uncertainty goes larger
19 | '''
20 | return np.copy(state)
21 |
22 | def motion_CV(state, dt):
23 | '''
24 | Constant Velocity
25 |
26 | States: [x, y, vx, vy]
27 | '''
28 | state = np.copy(state)
29 | state[0] += state[2] * dt
30 | state[1] += state[3] * dt
31 | return state
32 |
33 | def motion_CA(state, dt):
34 | raise NotImplementedError()
35 |
36 | def motion_CTRV(state, dt):
37 | raise NotImplementedError()
38 |
39 | def motion_CTRA(state, dt):
40 | '''
41 | Constant Turn-Rate and (longitudinal) Acceleration. This model also assume that velocity is the same with heading angle.
42 | CV, CTRV can be modeled by assume value equals zero
43 |
44 | States: [x, y, theta, v, a, w]
45 | [0 1 2 3 4 5]
46 | '''
47 | x, y, th, v, a, w = state
48 | nth = wrap_angle(th + w * dt)
49 | nv = v + a * dt
50 | if np.isclose(w, 0):
51 | nx = x + (nv + v)/2 * np.cos(th) * dt
52 | ny = y + (nv + v)/2 * np.sin(th) * dt
53 | else:
54 | nx = x + ( nv*w*np.sin(nth) + a*np.cos(nth) - v*w*np.sin(th) - a*np.cos(th)) / (w*w)
55 | ny = y + (-nv*w*np.cos(nth) + a*np.sin(nth) + v*w*np.cos(th) - a*np.sin(th)) / (w*w)
56 |
57 | state = np.copy(state)
58 | state[:4] = (nx, ny, nth, nv)
59 | return state
60 |
61 | def motion_CSAA(state, dt):
62 | '''
63 | Constant Steering Angle and Acceleration.
64 |
65 | States: [x, y, theta, v, a, c]
66 | [0 1 2 3 4 5]
67 | '''
68 | x, y, th, v, a, c = state
69 |
70 | gamma1 = (c*v*v)/(4*a) + th
71 | gamma2 = c*dt*v + c*dt*dt*a - th
72 | eta = np.sqrt(2*np.pi)*v*c
73 | zeta1 = (2*a*dt + v)*np.sqrt(c/2*a*np.pi)
74 | zeta2 = v*np.sqrt(c/2*a*np.pi)
75 | sz1, cz1 = fresnel(zeta1)
76 | sz2, cz2 = fresnel(zeta2)
77 |
78 | nx = x + (eta * (np.cos(gamma1)*cz1 + np.sin(gamma1)*sz1 - np.cos(gamma1)*cz2 - np.sin(gamma1)*sz2) +
79 | 2*np.sin(gamma2)*np.sqrt(a*c) + 2*np.sin(th)*np.sqrt(a*c)) / 4*np.sqrt(a*c)*c
80 | ny = y + (eta * (-np.cos(gamma1)*sz1 + np.sin(gamma1)*cz1 - np.sin(gamma1)*cz2 - np.cos(gamma1)*sz2) +
81 | 2*np.cos(gamma2)*np.sqrt(a*c) - 2*np.sin(th)*np.sqrt(a*c)) / 4*np.sqrt(a*c)*c
82 | nth = wrap_angle(th - c*dt*dt*a/2 - c*dt*v)
83 | nv = v + a*dt
84 |
85 | state = np.copy(state)
86 | state[:4] = (nx, ny, nth, nv)
87 | return state
88 |
--------------------------------------------------------------------------------
/src/library/src/zzz_common/logger.py:
--------------------------------------------------------------------------------
1 |
2 | import rospy
3 | import logging
4 |
5 | class LogBridge(logging.Handler):
6 | MAP = {
7 | logging.DEBUG :rospy.logdebug,
8 | logging.INFO :rospy.loginfo,
9 | logging.WARNING :rospy.logwarn,
10 | logging.ERROR :rospy.logerr,
11 | logging.CRITICAL :rospy.logfatal
12 | }
13 |
14 | def emit(self, record):
15 | try:
16 | self.MAP[record.levelno]("%s: %s" % (record.name, record.getMessage()))
17 | except KeyError:
18 | rospy.logerr("unknown log level %s LOG: %s: %s" % (record.levelno, record.name, record.msg))
19 |
--------------------------------------------------------------------------------
/src/library/src/zzz_common/msg/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/library/src/zzz_common/msg/__init__.py
--------------------------------------------------------------------------------
/src/library/src/zzz_common/msg/utils.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/library/src/zzz_common/msg/utils.py
--------------------------------------------------------------------------------
/src/library/src/zzz_common/params.py:
--------------------------------------------------------------------------------
1 | '''
2 | This module provides consistent params loading from rosparam and command line input
3 | '''
4 |
5 | import argparse
6 | import rospy
7 | from easydict import EasyDict as edict
8 |
9 | # TODO: create a fake ArgumentParser to show additional information and support complex arguments
10 | def parse_private_args(**kvargs):
11 | parser = argparse.ArgumentParser()
12 | node_name = rospy.get_name()
13 |
14 | ros_args = {}
15 | for name, default in kvargs.items():
16 | parser.add_argument('--' + name, default=default, type=type(default))
17 | ros_args[name] = rospy.get_param(node_name + '/' + name, default)
18 |
19 | # ------------ ros param system conflict with argparse syntax
20 | # cmd_args = parser.parse_args()
21 | # for k, v in vars(cmd_args).items():
22 | # if k in ros_args and v != ros_args[v]:
23 | # rospy.logwarn("Duplicate arguments {}: {} / {}".format(k, ros_args[k], v))
24 | # if v != kvargs[k]:
25 | # ros_args[k] = v
26 | return edict(ros_args)
27 |
--------------------------------------------------------------------------------
/src/library/test/test_geometry.py:
--------------------------------------------------------------------------------
1 | # TODO: implement testings
2 |
--------------------------------------------------------------------------------
/src/monitor/README.md:
--------------------------------------------------------------------------------
1 | This module should detect and deal with system failures. (Including perception failures, control failures, physical failures etc)
2 |
--------------------------------------------------------------------------------
/src/navigation/map_provider/sumo/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_navigation_map_provider_sumo)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS nodes/local_map_generator
10 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
11 |
--------------------------------------------------------------------------------
/src/navigation/map_provider/sumo/README.md:
--------------------------------------------------------------------------------
1 | This package provides interface to [SUMO](http://sumo.sourceforge.net/).
2 |
3 | # Command
4 |
5 | `netconvert --opendrive-files Town03.xodr --opendrive.import-all-lanes true --offset.disable-normalization true --opendrive.curve-resolution 0.5 -o map03_t.net.xml`
6 |
--------------------------------------------------------------------------------
/src/navigation/map_provider/sumo/nodes/local_map_generator:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import threading
4 | __module_name__ = "map_provider"
5 |
6 | import os
7 | import rospy
8 | import actionlib
9 | from zzz_common.params import parse_private_args
10 | import threading
11 |
12 | from geometry_msgs.msg import Pose
13 | from nav_msgs.msg import Path
14 | from zzz_driver_msgs.msg import RigidBodyStateStamped
15 | from zzz_navigation_msgs.msg import Map, MapString
16 | from zzz_navigation_map_provider_sumo import LocalMap
17 |
18 | MAP_TYPES = {
19 | MapString.MAP_UNKNOWN: "unknown",
20 | MapString.MAP_OPENDRIVE: "opendrive",
21 | MapString.MAP_SUMO: "sumo"
22 | }
23 |
24 | class MapProvider(object):
25 | def __init__(self):
26 | params = parse_private_args(
27 | map_file = "",
28 | map_input_topic = "static_map", # /zzz/navigation/static_map
29 | map_output_topic = "local_static_map", # /zzz/navigation/local_static_map
30 | map_file_type = "opendrive",
31 | pose_topic = "ego_pose", # /zzz/navigation/ego_pose
32 | reference_path_topic = "reference_path" # /zzz/navigation/reference_path
33 | )
34 | self._map_trigger = threading.Event()
35 | self._reference_path_trigger = threading.Event()
36 |
37 | self._map_instance = LocalMap()
38 | if os.path.exists(params.map_file):
39 | if self._map_instance.setup_hdmap(file=params.map_file, mtype=params.map_file_type):
40 | self._map_trigger.set()
41 |
42 | self._static_map_subscriber = rospy.Subscriber(params.map_input_topic, MapString, self.load_map_callback)
43 | self._pose_subscriber = rospy.Subscriber(params.pose_topic, RigidBodyStateStamped, self.pose_callback)
44 | self._reference_path_subscriber = rospy.Subscriber(params.reference_path_topic, Path, self.reference_callback)
45 | self._local_map_publisher = rospy.Publisher(params.map_output_topic, Map, queue_size=1)
46 |
47 | def load_map_callback(self, map):
48 | if self._map_instance.setup_hdmap(
49 | content=map.content, mtype=MAP_TYPES[map.map_type]):
50 | self._map_trigger.set()
51 | rospy.loginfo("[%s] Map loaded" % __module_name__)
52 |
53 | def reference_callback(self, msg):
54 | self._map_trigger.wait()
55 | if self._map_instance.setup_reference_lane_list(msg):
56 | self._reference_path_trigger.set()
57 | rospy.loginfo("[%s] Received reference path" % __module_name__)
58 |
59 | def pose_callback(self, msg):
60 | # Note: Here we actually assume that pose is updating at highest frequency
61 | self._map_trigger.wait()
62 | self._reference_path_trigger.wait()
63 |
64 | new_static_map = self._map_instance.receive_new_pose(msg.state.pose.pose.position.x, msg.state.pose.pose.position.y)
65 | if new_static_map is not None:
66 | self._local_map_publisher.publish(new_static_map)
67 |
68 | if __name__ == "__main__":
69 | rospy.init_node("map_provider", log_level=rospy.INFO)
70 | node = MapProvider()
71 | rospy.spin()
72 |
--------------------------------------------------------------------------------
/src/navigation/map_provider/sumo/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_navigation_map_provider_sumo
3 | 0.1.0
4 |
5 | Adapt ZZZ input and output to CARLA
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | zzz_driver_msgs
17 | zzz_driver_msgs
18 | zzz_driver_msgs
19 |
20 |
21 |
--------------------------------------------------------------------------------
/src/navigation/map_provider/sumo/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_navigation_map_provider_sumo'],
8 | package_dir={'': 'src'}
9 | )
10 |
11 | setup(**d)
--------------------------------------------------------------------------------
/src/navigation/map_provider/sumo/src/zzz_navigation_map_provider_sumo/__init__.py:
--------------------------------------------------------------------------------
1 | from .local_map import LocalMap
--------------------------------------------------------------------------------
/src/navigation/pose_reporter/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_navigation_pose_reporter)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | # catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | nodes/manual
11 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
12 |
--------------------------------------------------------------------------------
/src/navigation/pose_reporter/README.md:
--------------------------------------------------------------------------------
1 | This package subscribe tf and odom messages to convert odometry result into RigidBodyState in map frame.
2 |
3 | The `manual` node just takes one GPS and one odometry as input.
4 |
--------------------------------------------------------------------------------
/src/navigation/pose_reporter/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_navigation_pose_reporter
3 | 0.1.0
4 |
5 | Adapt odometry output to world coordinate
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | zzz_driver_msgs
17 | nav_msgs
18 |
19 |
20 |
--------------------------------------------------------------------------------
/src/navigation/pose_reporter/srv/UpdateOrigin.srv:
--------------------------------------------------------------------------------
1 | # New origin of the given map
2 | sensor_msgs/NavSatFix origin
3 | ---
4 | # A flag indicating the update result
5 | uint8 status
6 |
--------------------------------------------------------------------------------
/src/navigation/protocol/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_navigation_msgs)
3 |
4 | find_package(catkin REQUIRED genmsg std_msgs actionlib_msgs # essential dependency
5 | geometry_msgs
6 | )
7 |
8 | catkin_python_setup()
9 |
10 | add_message_files(DIRECTORY msg FILES
11 | Lane.msg
12 | LaneBoundary.msg
13 | LanePoint.msg
14 | LaneSituation.msg
15 | Map.msg
16 | MapString.msg
17 | )
18 |
19 | generate_messages(DEPENDENCIES std_msgs actionlib_msgs geometry_msgs)
20 | catkin_package(CATKIN_DEPENDS std_msgs actionlib_msgs geometry_msgs)
21 |
--------------------------------------------------------------------------------
/src/navigation/protocol/msg/Lane.msg:
--------------------------------------------------------------------------------
1 | # This message describe a lane with polyline representation
2 |
3 | # ----- Basic properties -----
4 | # the rightest lane is 0, reference lane is -1
5 | int16 index
6 |
7 | # Speed limit (km/h)
8 | float32 speed_limit
9 |
10 | # Length of the lane. Provided for easy computation
11 | float32 length
12 |
13 | # Whether this lane allow two way traffic
14 | # This can be true when the road is too narrow or in the center turning lane
15 | bool bidirectional # = False
16 |
17 | # The situation when lane ends. This field could be updated with dynamic info.
18 | uint8 stop_state # = 0
19 | uint8 STOP_STATE_UNKNOWN = 0
20 | uint8 STOP_STATE_THRU = 1 # e.g. drive through at lane connection, green light
21 | uint8 STOP_STATE_YIELD = 2 # e.g. yellow light, flashing yellow light
22 | uint8 STOP_STATE_STOP = 3 # e.g. red light
23 | uint8 STOP_STATE_STOP_YIELD = 4 # e.g. stop sign, right turn at red light
24 | uint8 STOP_STATE_STOP_YIELD_ALL_WAY = 5 # e.g. flashing red light, all way stop sign
25 |
26 | # ----- Central path representation -----
27 | # The central_path_points field is used when central_path_type is waypoint.
28 | # Otherwise, central_path_coeffs should be used
29 | LanePoint[] central_path_points
30 | float32[] central_path_coeffs
31 |
32 | uint8 central_path_type # = 0
33 | uint8 CENTRAL_PATH_WAYPOINT = 0 # discretized
34 | uint8 CENTRAL_PATH_LINE = 1
35 | uint8 CENTRAL_PATH_CONIC = 2 # conic section, including parabola and hyperbola
36 | uint8 CENTRAL_PATH_POLYNOMIAL = 3 #
37 | uint8 CENTRAL_PATH_BEZIER = 4
38 |
39 | # ----- Boundary representation -----
40 | # The boundary description of current lane.
41 | # Not that the boundary type only describe the behaviour from current lane to neighbour lane or road shoulder
42 | LaneBoundary[] left_boundaries
43 | LaneBoundary[] right_boundaries
44 |
45 | # ----- Auxiliary information of the lane -----
46 | # Road situations on this line. This field could be updated with dynamic info.
47 | LaneSituation[] situations
48 |
--------------------------------------------------------------------------------
/src/navigation/protocol/msg/LaneBoundary.msg:
--------------------------------------------------------------------------------
1 | # This message describe the boundary type of a part of a lane
2 |
3 | # Offset from the start of the lane where this type of boundary take effect
4 | float32 s
5 |
6 | # Boundary type from between `s` and `s` of the next section start
7 | uint8 boundary_type
8 | uint8 BOUNDARY_UNKNOWN = 0
9 | uint8 BOUNDARY_DASHED_WHITE = 1 # neighbour lane has same direction.
10 | uint8 BOUNDARY_DASHED_YELLOW = 2 # neighbour lane has different direction.
11 | uint8 BOUNDARY_SOLID_WHITE = 3 # neighbour lane has same direction, not allowed to change lane.
12 | uint8 BOUNDARY_SOLID_YELLOW = 4 # neighbour lane has different direction, not allowed to change lane.
13 | uint8 BOUNDARY_SOLID_YELLOW_TURN = 5 # neighbour lane has different direction, not allowed to change lane unless turning.
14 | uint8 BOUNDARY_CURB = 6 # neighbour is road shoulder
15 |
16 | # Confidence of the lane boundary classification
17 | float32 confidence
18 |
--------------------------------------------------------------------------------
/src/navigation/protocol/msg/LanePoint.msg:
--------------------------------------------------------------------------------
1 | # This message describe a road point from polyline represented lane
2 |
3 | # The 3D position of the lane point
4 | geometry_msgs/Point position
5 |
6 | # ----- Other useful information -----
7 | # Distance from lane start to current point. The distance of the first point should be zero
8 | float32 s
9 |
10 | # Slope at current position of the road
11 | # Can be used to control the throttle
12 | float32 slope
13 |
14 | # Road curvature at current position of the road
15 | # Can be used to slow down before turning
16 | float32 curvature
17 |
18 | # The yaw angle of tangent line (in radian)
19 | float32 tangent
20 |
21 | # Road width at current position
22 | # Can be used to determine the carefulness of driving
23 | float32 width
24 |
--------------------------------------------------------------------------------
/src/navigation/protocol/msg/LaneSituation.msg:
--------------------------------------------------------------------------------
1 | # This message store any uncommon situations on a road
2 | # Situation can be a map specified object or fresh events
3 |
4 | # The location of the situation in a lane (distance from lane start)
5 | float32 s
6 |
7 | # How long this situation affect along the lane.
8 | # Inf means take effect until lane ends
9 | float32 length
10 |
11 | # Type of situation
12 | uint8 situation_type
13 | uint8 SITUATION_NOT_SPECIFIED = 0
14 | uint8 SITUATION_BLOCKED = 1 # Something blocked this lane, vehicle should stop here
15 | uint8 SITUATION_CROSSWALK = 2 # Crosswalk
16 | uint8 SITUATION_REDUCE_SPEED = 3 # Reducing speed is required. This can be due to speed bump, construction, or school bus stop, etc
17 |
18 | # ------- Situation information -------
19 | # The new speed limit. Zero means the vehicle is required to stop
20 | float32 reduced_max_speed
21 |
22 | # Additional information
23 | string comments
24 |
--------------------------------------------------------------------------------
/src/navigation/protocol/msg/Map.msg:
--------------------------------------------------------------------------------
1 | # This message describes a static local map. This map contains all essential
2 | # information that should be generated by map provider. Generally this map is
3 | # limited in a scope of a road section (i.e. road network edge)
4 |
5 | Header header
6 |
7 | # Whether the map is in a structured environment
8 | bool in_junction # = True
9 |
10 | # Target lane index at the end of the section.
11 | int8 target_lane_index # TODO(zyxin): Change to a list to support multiple lane turning
12 |
13 | # Lanes if it's in a structured road, should be sorted by ascending index
14 | Lane[] lanes
15 |
16 | # Road area if in junction
17 | geometry_msgs/Polygon drivable_area
18 |
--------------------------------------------------------------------------------
/src/navigation/protocol/msg/MapString.msg:
--------------------------------------------------------------------------------
1 | # This message is used for loading geologic map from string content
2 |
3 | Header header
4 |
5 | # Map type enum
6 | uint8 map_type
7 | uint8 MAP_UNKNOWN = 0
8 | uint8 MAP_OPENDRIVE = 1
9 | uint8 MAP_SUMO = 2
10 |
11 | # Map content. It can be encoded, depending on the map_type
12 | string content
13 |
--------------------------------------------------------------------------------
/src/navigation/protocol/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_navigation_msgs
3 | 0.1.0
4 |
5 | Unified interface message definitions for ZZZ navigation modules
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | message_generation
17 | std_msgs
18 | geometry_msgs
19 | actionlib_msgs
20 | message_runtime
21 | std_msgs
22 | geometry_msgs
23 | actionlib_msgs
24 |
25 |
--------------------------------------------------------------------------------
/src/navigation/protocol/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_navigation_msgs'],
8 | package_dir={'': 'src'}
9 | )
10 |
11 | setup(**d)
12 |
--------------------------------------------------------------------------------
/src/navigation/protocol/src/zzz_navigation_msgs/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/navigation/protocol/src/zzz_navigation_msgs/__init__.py
--------------------------------------------------------------------------------
/src/navigation/protocol/src/zzz_navigation_msgs/utils.py:
--------------------------------------------------------------------------------
1 | '''
2 | This package contains useful message conversions
3 | '''
4 |
5 | import numpy as np
6 |
7 | def get_lane_array(lanes):
8 | arrays = []
9 | for lane in lanes:
10 | point_list = [(point.position.x, point.position.y) for point in lane.central_path_points]
11 | arrays.append(np.array(point_list))
12 | return arrays
13 |
--------------------------------------------------------------------------------
/src/perception/README.md:
--------------------------------------------------------------------------------
1 | This module contains packages for sensor data processing.
2 |
3 | # Detection box
4 |
5 | The definition of detection is defined as follows (using vehicle as example):
6 | - X axis is to the direction of vehicle longitudinal moving. Forward is the positive direction. The size in this direction is length.
7 | - Y axis is to the direction of vehicle lateral moving. Right-side is the positive direction. The size in this direction is width.
8 | - Z axis is to the vertical direction of vehicle. Up-side is the positive direction. The size in this direction is height.
9 |
--------------------------------------------------------------------------------
/src/perception/detection/camera_detectors/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_perception_detection_camera_detectors)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS nodes/yolo
10 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
11 |
--------------------------------------------------------------------------------
/src/perception/detection/camera_detectors/README.md:
--------------------------------------------------------------------------------
1 | # Reference
2 | https://github.com/ultralytics/yolov3
3 |
4 | # Result coordinate
5 | x, y are in NED coordinate
6 |
--------------------------------------------------------------------------------
/src/perception/detection/camera_detectors/launch/example.launch:
--------------------------------------------------------------------------------
1 |
2 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/src/perception/detection/camera_detectors/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_perception_detection_camera_detectors
3 | 0.1.0
4 |
5 | Camera image based detectors
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | std_msgs
18 | roscpp
19 | std_msgs
20 | roscpp
21 | std_msgs
22 | nav_msgs
23 | zzz_common
24 |
25 |
26 |
--------------------------------------------------------------------------------
/src/perception/detection/camera_detectors/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['yolov3', 'yolov3.utils', 'zzz_perception_detection_camera_detectors'],
8 | package_dir={'': 'src'},
9 | package_data={'yolov3': ['src/yolov3/cfg/*', 'src/yolov3/data/*', 'src/yolov3/weights/*']},
10 | install_requires={'torch', 'pathlib', 'google-cloud-storage'}
11 | )
12 |
13 | setup(**d)
--------------------------------------------------------------------------------
/src/perception/detection/camera_detectors/src/zzz_perception_detection_camera_detectors/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/perception/detection/camera_detectors/src/zzz_perception_detection_camera_detectors/__init__.py
--------------------------------------------------------------------------------
/src/perception/detection/camera_filters/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_perception_detection_camera_filters)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | nodes/ipm_projector
11 | nodes/pseudo_lidar_generator
12 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
13 | )
14 |
--------------------------------------------------------------------------------
/src/perception/detection/camera_filters/nodes/ipm_projector:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 | import numpy as np
5 | from zzz_perception_msgs.msg import DetectionBox2DArray, DetectionBoxArray, DetectionBox, DetectionBox2D
6 | from zzz_common.params import parse_private_args
7 | from zzz_perception_detection_camera_filters.ipm import CamIntrExtr
8 |
9 | class IPMNode():
10 | def __init__(self):
11 |
12 | params = parse_private_args(
13 | input_topic="objects2d_detected",
14 | output_topic="objects_projected",
15 | output_3d=True,
16 | )
17 |
18 | self._sub = rospy.Subscriber(params.input_topic, DetectionBox2DArray, self.obj_callback)
19 | self.output_3d = params.output_3d
20 | if self.output_3d:
21 | self._pub = rospy.Publisher(params.output_topic, DetectionBoxArray, queue_size=1)
22 | else:
23 | self._pub = rospy.Publisher(params.output_topic, DetectionBox2DArray, queue_size=1)
24 |
25 | self._default_z = 1
26 | self._default_height = 2
27 | self._default_width = 2
28 | self._default_length = 2
29 |
30 | # initialize camera geometry model
31 | # TODO: read from camera info and extrinsics
32 | self._cam = CamIntrExtr(0, 0, 0, 2.4)
33 | self._cam.setIntr(400, 400, fov=100)
34 |
35 | def obj_callback(self, detections):
36 | if self.output_3d:
37 | array = DetectionBoxArray()
38 | else:
39 | array = DetectionBox2DArray()
40 |
41 | for det in detections.detections:
42 | u = np.array([det.bbox.pose.x])
43 | v = np.array([det.bbox.pose.y + det.bbox.dimension.length_y / 2])
44 | lo, la = self._cam.img2world(u, v)
45 |
46 | if self.output_3d:
47 | newdet = DetectionBox()
48 | # TODO: Unify camera coordinate definition
49 | newdet.bbox.pose.pose.position.x = lo[0]
50 | newdet.bbox.pose.pose.position.y = -la[0]
51 | newdet.bbox.pose.pose.position.z = self._default_z
52 | newdet.bbox.pose.covariance = np.diag([2,2,1e6,1e6,1e6,1e6]).flatten().tolist()
53 |
54 | # FIXME: Distinguish input by projection type
55 | newdet.bbox.dimension.length_x = self._default_width
56 | newdet.bbox.dimension.length_y = self._default_length
57 | newdet.bbox.dimension.length_z = self._default_height
58 | newdet.bbox.dimension.covariance = np.diag([1] * 3).flatten().tolist()
59 |
60 | # XXX: Calculate these result
61 | # newdet.bbox.pose.covariance =
62 | # newdet.bbox.dimension.covariance =
63 | newdet.classes = det.classes
64 | array.detections.append(newdet)
65 |
66 | else:
67 | newdet = DetectionBox2D()
68 | newdet.bbox.pose.u = u
69 | newdet.bbox.pose.v = v
70 |
71 | # XXX: Calculate these result
72 | # newdet.bbox.pose.covariance = det.bbox.pose.covariance
73 | # newdet.bbox.dimension = det.dimension
74 |
75 | newdet.classes = det.classes
76 | array.detections.append(newdet)
77 |
78 | array.header = detections.header
79 | self._pub.publish(array)
80 |
81 | def calc_real_world_pos(self, x, labelclass):
82 | u = int(x[0] + x[2])/2
83 | v = int(x[3])
84 | u = np.array([u])
85 | v = np.array([v])
86 | lo, la = self._cam.img2world(u, v) #lo front, la right
87 | pos = (lo[0], la[0], labelclass)
88 | return pos
89 |
90 | if __name__ == "__main__":
91 | rospy.init_node("ipm_projector")
92 | node = IPMNode()
93 | rospy.spin()
94 |
--------------------------------------------------------------------------------
/src/perception/detection/camera_filters/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_perception_detection_camera_filters
3 | 0.1.0
4 |
5 | Utilities to manipulate camera detections
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | std_msgs
18 | roscpp
19 | std_msgs
20 | roscpp
21 | std_msgs
22 | nav_msgs
23 | zzz_common
24 |
25 |
26 |
--------------------------------------------------------------------------------
/src/perception/detection/camera_filters/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_perception_detection_camera_filters', 'pseudo_lidar'],
8 | package_dir={'': 'src'},
9 | install_requires=['numpy']
10 | )
11 |
12 | setup(**d)
13 |
--------------------------------------------------------------------------------
/src/perception/detection/camera_filters/src/zzz_perception_detection_camera_filters/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/perception/detection/camera_filters/src/zzz_perception_detection_camera_filters/__init__.py
--------------------------------------------------------------------------------
/src/perception/detection/fused_detectors/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_perception_detection_fused_detectors)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS nodes/frustum
10 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
11 |
--------------------------------------------------------------------------------
/src/perception/detection/fused_detectors/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_perception_detection_fused_detectors
3 | 0.1.0
4 |
5 | Fusion based detectors
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | std_msgs
18 | roscpp
19 | std_msgs
20 | roscpp
21 | std_msgs
22 | nav_msgs
23 | zzz_common
24 |
25 |
26 |
--------------------------------------------------------------------------------
/src/perception/detection/fused_detectors/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['frustum_pointnets',
8 | 'frustum_pointnets.train',
9 | 'frustum_pointnets.module_wrapper',
10 | 'zzz_perception_detection_fused_detectors'],
11 | package_dir={'': 'src'},
12 | # package_data={'yolov3': ['src/yolov3/cfg/*', 'src/yolov3/data/*', 'src/yolov3/weights/*']},
13 | install_requires={'tensorflow'}
14 | )
15 |
16 | setup(**d)
--------------------------------------------------------------------------------
/src/perception/detection/lidar_detectors/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_perception_detection_lidar_detectors)
3 |
4 | find_package(catkin REQUIRED COMPONENTS
5 | roscpp
6 | sensor_msgs
7 | zzz_perception_msgs
8 | pcl_ros
9 | )
10 |
11 | catkin_package(
12 | CATKIN_DEPENDS sensor_msgs zzz_perception_msgs pcl_ros
13 | # DEPENDS Boost
14 | INCLUDE_DIRS include
15 | LIBRARIES zzz_perception_detection_lidar_detectors
16 | )
17 |
18 | include_directories(
19 | include
20 | ${catkin_INCLUDE_DIRS}
21 | )
22 |
23 | # Add library target
24 | add_library(zzz_perception_detection_lidar_detectors
25 | src/LidarDetector.cpp
26 | src/EuclideanClusterDetector.cpp
27 | )
28 | add_dependencies(zzz_perception_detection_lidar_detectors
29 | ${${PROJECT_NAME}_EXPORTED_TARGETS}
30 | ${catkin_EXPORTED_TARGETS}
31 | )
32 | target_link_libraries(zzz_perception_detection_lidar_detectors
33 | ${catkin_LIBRARIES}
34 | )
35 |
36 | # Add node targets
37 | add_executable(euclidean_cluster_detector nodes/euclidean_cluster_detector.cpp)
38 | set_target_properties(euclidean_cluster_detector PROPERTIES OUTPUT_NAME euclidean_cluster PREFIX "")
39 | add_dependencies(euclidean_cluster_detector
40 | zzz_perception_detection_lidar_detectors
41 | ${${PROJECT_NAME}_EXPORTED_TARGETS}
42 | ${catkin_EXPORTED_TARGETS}
43 | )
44 | target_link_libraries(euclidean_cluster_detector
45 | zzz_perception_detection_lidar_detectors
46 | ${catkin_LIBRARIES}
47 | )
48 |
49 | # Install files
50 | install(TARGETS euclidean_cluster_detector
51 | ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
52 | LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
53 | RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
54 | )
55 |
56 | install(DIRECTORY include/${PROJECT_NAME}/
57 | DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION})
58 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_detectors/README.md:
--------------------------------------------------------------------------------
1 | # Package Summary
2 | This package provides object detection algorithm using lidar point cloud
3 |
4 | # Node `euclidean_cluster`
5 | This node using ground removal and Euclidean Clustring to generate point cloud detections
6 |
7 | ## Input Topic
8 | |Topic|Condition|Type|Description|
9 | |---|---|---|---|
10 | |`points_raw`||`sensor_msgs/PointCloud2`|Input point cloud|
11 | ## Output Topic
12 | |Topic|Condition|Type|Description|
13 | |---|---|---|---|
14 | |`objects_detected`||`zzz_perception_msgs/DetectionBoxArray`|Detected objects|
15 | |`points_cluster`|Debug|`sensor_msgs/PointCloud2`|Points after clustering|
16 | |`points_ground`|Debug|`sensor_msgs/PointCloud2`|Points of removed ground|
17 | ## Parameters
18 | |Parameter|Type|Description|Default|
19 | |---|---|---|---|
20 | |`downsampling_enable`|bool|Enable downsampling pre-processing|`false`|
21 | |`downsampling_leaf_size`|float|The size of grid used in downsampling. Lower the size is, less the points are|`0.1`|
22 | |`crop_enable`|bool|Enable crop pre-processing|`false`|
23 | |`plane_removal_enable`|bool|Enable planes removal pre-processing|`true`|
24 | |`plane_removal_ransac_thres`|float|Plane RANSAC distance threshold|`0.15`|
25 | |`plane_removal_count_thres`|float|Least proportion of the plane points to be removed|`0.1`|
26 | |`cluster_dist_thres`|float|Euclidean Clustering distance threshold|`0.5`|
27 | |`cluster_count_min`|integer|Minimum point count of valid cluster|`100`|
28 | |`cluster_count_max`|integer|Maximum point count of valid cluster|`1000000`|
29 | ## Reference
30 | 1. Point Cloud Library:
31 | ```tex
32 | @InProceedings{Rusu_ICRA2011_PCL,
33 | author = {Radu Bogdan Rusu and Steve Cousins},
34 | title = {{3D is here: Point Cloud Library (PCL)}},
35 | booktitle = {{IEEE International Conference on Robotics and Automation (ICRA)}},
36 | month = {May 9-13},
37 | year = {2011},
38 | address = {Shanghai, China}
39 | }
40 | ```
41 |
42 | 1. Euclidean Clustering:
43 | ```tex
44 | @PhDThesis{RusuDoctoralDissertation,
45 | author = {Radu Bogdan Rusu},
46 | title = {Semantic 3D Object Maps for Everyday Manipulation in Human Living Environments},
47 | school = {Computer Science department, Technische Universitaet Muenchen, Germany},
48 | year = {2009},
49 | month = {October}
50 | }
51 | ```
52 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_detectors/include/zzz_perception_detection_lidar_detectors/EuclideanCluster2dDetector.h:
--------------------------------------------------------------------------------
1 | #ifndef ZZZ_PERCEPTION_DETECTION_LIDAR_DETECTORS_EUCLIDEANCLUSTER2DDETECTOR_H
2 | #define ZZZ_PERCEPTION_DETECTION_LIDAR_DETECTORS_EUCLIDEANCLUSTER2DDETECTOR_H
3 |
4 | #include "zzz_perception_detection_lidar_detectors/LidarDetector.h"
5 |
6 | namespace zzz
7 | {
8 | namespace perception
9 | {
10 | class EuclideanCluster2dDetector : LidarDetector
11 | {
12 | public:
13 | EuclideanCluster2dDetector(ros::NodeHandle &node_handle, ros::NodeHandle &private_handle,
14 | std::string input_topic="points_raw", std::string output_topic="objects_detected");
15 | virtual void detect(sensor_msgs::PointCloud2ConstPtr input);
16 |
17 | private:
18 | };
19 | } // namespace perception
20 | } // namespace zzz
21 |
22 | #endif // ZZZ_PERCEPTION_DETECTION_LIDAR_DETECTORS_EUCLIDEANCLUSTER2DDETECTOR_H
23 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_detectors/include/zzz_perception_detection_lidar_detectors/EuclideanClusterDetector.h:
--------------------------------------------------------------------------------
1 | #ifndef ZZZ_PERCEPTION_DETECTION_LIDAR_DETECTORS_EUCLIDEANCLUSTERDETECTOR_H
2 | #define ZZZ_PERCEPTION_DETECTION_LIDAR_DETECTORS_EUCLIDEANCLUSTERDETECTOR_H
3 |
4 | #include "zzz_perception_detection_lidar_detectors/LidarDetector.h"
5 |
6 | #include
7 | #include
8 | #include
9 | #include "zzz_perception_msgs/DetectionBoxArray.h"
10 |
11 | namespace zzz
12 | {
13 | namespace perception
14 | {
15 | class EuclideanClusterDetector : LidarDetector
16 | {
17 | private: // configurations
18 | bool _downsampling_enable;
19 | float _downsampling_leaf_size;
20 |
21 | bool _crop_enable;
22 | float _crop_min_z;
23 | float _crop_max_z;
24 |
25 | bool _plane_removal_enable;
26 | float _plane_removal_ransac_thres;
27 | float _plane_removal_count_thres;
28 |
29 | float _cluster_dist_thres;
30 | int _cluster_count_min;
31 | int _cluster_count_max;
32 |
33 | public:
34 | EuclideanClusterDetector(ros::NodeHandle &node_handle, ros::NodeHandle &private_handle);
35 | virtual void detect(sensor_msgs::PointCloud2ConstPtr input);
36 |
37 | protected:
38 | #ifndef NDEBUG
39 | ros::Publisher _cluster_publisher;
40 | ros::Publisher _plane_publisher;
41 | #endif
42 |
43 | pcl::PointIndices::Ptr planeRemove(
44 | pcl::PointCloud::ConstPtr input,
45 | pcl::PointIndices::ConstPtr indices);
46 | void segment(pcl::PointCloud::ConstPtr input,
47 | pcl::PointIndices::ConstPtr indices,
48 | std::vector &clusters);
49 | void estimate(pcl::PointCloud::ConstPtr cluster_input,
50 | zzz_perception_msgs::BoundingBox &detection);
51 | };
52 | } // namespace perception
53 | } // namespace zzz
54 |
55 | #endif // ZZZ_PERCEPTION_DETECTION_LIDAR_DETECTORS_EUCLIDEANCLUSTERDETECTOR
56 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_detectors/include/zzz_perception_detection_lidar_detectors/LidarDetector.h:
--------------------------------------------------------------------------------
1 | #ifndef ZZZ_PERCEPTION_DETECTION_LIDAR_DETECTORS_LIDARDETECTOR_H
2 | #define ZZZ_PERCEPTION_DETECTION_LIDAR_DETECTORS_LIDARDETECTOR_H
3 |
4 | #include
5 |
6 | #include
7 | #include
8 |
9 | namespace zzz
10 | {
11 | namespace perception
12 | {
13 | class LidarDetector
14 | {
15 | public:
16 | LidarDetector(ros::NodeHandle &node_handle, ros::NodeHandle &private_handle);
17 | virtual void detect(sensor_msgs::PointCloud2ConstPtr input)=0;
18 |
19 | protected:
20 | ros::Subscriber _input_subscriber;
21 | ros::Publisher _output_publisher;
22 |
23 | std::string _frame_id; // store for debug use
24 | ros::Time _timestamp; // store for debug use
25 | };
26 | } // namespace perception
27 | } // namespace zzz
28 |
29 | #endif // ZZZ_PERCEPTION_DETECTION_LIDAR_DETECTORS_LIDARDETECTOR_H
30 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_detectors/nodes/euclidean_cluster_detector.cpp:
--------------------------------------------------------------------------------
1 | #include
2 | #include
3 |
4 | int main(int argc, char **argv)
5 | {
6 | ros::init(argc, argv, "euclidean_cluster_detector");
7 | ros::NodeHandle nh, pnh("~");
8 | zzz::perception::EuclideanClusterDetector detector(nh, pnh);
9 | ros::spin();
10 | }
11 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_detectors/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_perception_detection_lidar_detectors
3 | 0.1.0
4 |
5 | Implementation of object detectors
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | sensor_msgs
18 | zzz_perception_msgs
19 | pcl_ros
20 | roscpp
21 | sensor_msgs
22 | zzz_perception_msgs
23 | pcl_ros
24 | roscpp
25 | sensor_msgs
26 | zzz_perception_msgs
27 | pcl_ros
28 |
29 |
30 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_detectors/src/LidarDetector.cpp:
--------------------------------------------------------------------------------
1 | #include "zzz_perception_detection_lidar_detectors/LidarDetector.h"
2 | #include
3 |
4 | using namespace std;
5 |
6 | namespace zzz { namespace perception {
7 | LidarDetector::LidarDetector(ros::NodeHandle &node_handle, ros::NodeHandle &private_handle)
8 | {
9 | string input_topic, output_topic;
10 | private_handle.param("input_topic", input_topic, string("points_raw"));
11 | private_handle.param("output_topic", output_topic, string("objects_detected"));
12 |
13 | _input_subscriber = node_handle.subscribe(input_topic, 1, &LidarDetector::detect, this);
14 | _output_publisher = node_handle.advertise(output_topic, 1);
15 | }
16 | }} // namespace zzz::perception
17 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_filters/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_perception_detection_lidar_filters)
3 |
4 | find_package(catkin REQUIRED)
5 |
6 | catkin_package()
7 |
8 | catkin_python_setup()
9 |
10 | catkin_install_python(PROGRAMS
11 | nodes/extrinsic
12 | nodes/lshape
13 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
14 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_filters/README.md:
--------------------------------------------------------------------------------
1 | # Package Summary
2 | This package contains filters on point cloud data and detection results with lidar point cloud.
3 |
4 | # Node `lshape`
5 |
6 | ## Reference
7 | 1. L-Shaped Estimator:
8 | ```tex
9 | @inproceedings{ye2016object,
10 | title={Object detection and tracking using multi-layer laser for autonomous urban driving},
11 | author={Ye, Yutong and Fu, Liming and Li, Bijun},
12 | booktitle={2016 IEEE 19th International Conference on Intelligent Transportation Systems (ITSC)},
13 | pages={259--264},
14 | year={2016},
15 | organization={IEEE}
16 | }
17 | ```
18 |
19 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_filters/nodes/extrinsic:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 |
5 | from zzz_common.params import parse_private_args
6 | from sensor_msgs.msg import PointCloud2
7 | from zzz_perception_detection_lidar_filters.transformers import ExtrinsicFilter
8 |
9 | class FilterNode():
10 | def __init__(self):
11 | params = parse_private_args(
12 | input_topic="/zzz/drivers/lidar/points_raw",
13 | output_topic="points_transformed",
14 | target_frame="base_link"
15 | )
16 |
17 | self._filter = ExtrinsicFilter(**params)
18 | self._subscriber = rospy.Subscriber(params.pop("input_topic"), PointCloud2, self.callback)
19 | self._publisher = rospy.Publisher(params.pop("output_topic"), PointCloud2, queue_size=1)
20 |
21 | def callback(self, msg):
22 | nmsg = self._filter.filter(msg)
23 | if nmsg is not None: self._publisher.publish(nmsg)
24 |
25 | if __name__ == "__main__":
26 | rospy.init_node("criteria_filter")
27 | node = FilterNode()
28 | rospy.spin()
29 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_filters/nodes/lshape:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 |
5 | from zzz_common.params import parse_private_args
6 | from zzz_perception_msgs.msg import DetectionBoxArray
7 | from zzz_perception_detection_lidar_filters.estimators import LShapeFilter
8 |
9 | class FilterNode():
10 | def __init__(self):
11 | params = parse_private_args(
12 | input_topic="objects_detected",
13 | output_topic="objects_filtered"
14 | )
15 |
16 | self._subscriber = rospy.Subscriber(params.pop("input_topic"), DetectionBoxArray, self.callback)
17 | self._publisher = rospy.Publisher(params.pop("output_topic"), DetectionBoxArray, queue_size=1)
18 | self._filter = LShapeFilter(**params)
19 |
20 | def callback(self, msg):
21 | self._publisher.publish(self._filter.filter(msg))
22 |
23 | if __name__ == "__main__":
24 | rospy.init_node("lshape_filter")
25 | node = FilterNode()
26 | rospy.spin()
27 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_filters/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_perception_detection_lidar_filters
3 | 0.1.0
4 |
5 | Implementation of object filters
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | sensor_msgs
18 | tf2_ros
19 | zzz_perception_msgs
20 | pcl_ros
21 |
22 |
23 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_filters/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_perception_detection_lidar_filters'],
8 | package_dir={'': 'src'},
9 | install_requires={'pcl-py'}
10 | )
11 |
12 | setup(**d)
--------------------------------------------------------------------------------
/src/perception/detection/lidar_filters/src/zzz_perception_detection_lidar_filters/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/perception/detection/lidar_filters/src/zzz_perception_detection_lidar_filters/__init__.py
--------------------------------------------------------------------------------
/src/perception/detection/lidar_filters/src/zzz_perception_detection_lidar_filters/estimators.py:
--------------------------------------------------------------------------------
1 |
2 | import tf
3 | import pcl
4 | import numpy as np
5 | from zzz_perception_msgs.msg import DetectionBoxArray
6 | from geometry_msgs.msg import Quaternion
7 | import sensor_msgs.point_cloud2 as pc2
8 |
9 | np.seterr(all='raise')
10 |
11 | class LShapeFilter:
12 | def __init__(self, **params):
13 | self._params = params
14 |
15 | def filter(self, array):
16 | assert type(array) == DetectionBoxArray
17 |
18 | for target in array.detections:
19 | self._fit_shape(target)
20 | return array
21 |
22 | def _fit_shape(self, target):
23 | cloud = pcl.PointCloud(target.source_cloud)
24 | xyz = cloud.xyz
25 |
26 | pmax = np.max(xyz, axis=0)
27 | pmin = np.min(xyz, axis=0)
28 | target.bbox.pose.pose.position.x = (pmax[0] + pmin[0])/2
29 | target.bbox.pose.pose.position.y = (pmax[1] + pmin[1])/2
30 | target.bbox.pose.pose.position.z = (pmax[2] + pmin[2])/2
31 |
32 | angle = xyz[:,1] / xyz[:,0]
33 | imax, imin = np.argmax(angle), np.argmin(angle)
34 | assert imax != imin
35 |
36 | x1, y1, _ = xyz[imin, :]
37 | x2, y2, _ = xyz[imax, :]
38 | dist = np.abs((y2-y1)*xyz[:,0] - (x2-x1)*xyz[:,1] + x2*y1 - x1*y2) / np.sqrt((y2-y1)*(y2-y1) + (x2-x1)*(x2-x1))
39 | iend = np.argmax(dist)
40 |
41 | x0, y0, _ = xyz[iend, :]
42 | dist2min = np.sqrt((x1-x0)*(x1-x0) + (y1-y0)*(y1-y0))
43 | dist2max = np.sqrt((x2-x0)*(x2-x0) + (y2-y0)*(y2-y0))
44 | if dist2min == 0 and dist2max == 0:
45 | rospy.logwarn("[LShape filter] Invalid shape!")
46 |
47 | if dist2max > dist2min:
48 | yaw = np.arctan2(y2-y0, x2-x0)
49 | target.bbox.dimension.length_x = dist2max
50 | target.bbox.dimension.length_y = dist2min
51 | else:
52 | yaw = np.arctan2(y1-y0, x1-x0)
53 | target.bbox.dimension.length_x = dist2min
54 | target.bbox.dimension.length_y = dist2max
55 | q = tf.transformations.quaternion_from_euler(0, 0, yaw)
56 | target.bbox.pose.pose.orientation = Quaternion(*q)
57 |
58 | # TODO: Add reasonable covariances
59 | target.bbox.pose.covariance = np.diag([1] * 6).flatten().tolist()
60 | target.bbox.dimension.covariance = np.diag([1] * 3).flatten().tolist()
61 |
62 | return target
63 |
--------------------------------------------------------------------------------
/src/perception/detection/lidar_filters/src/zzz_perception_detection_lidar_filters/transformers.py:
--------------------------------------------------------------------------------
1 | import rospy
2 | import numpy as np
3 | import pcl
4 | import tf
5 | import tf2_ros as tf2
6 | from sensor_msgs.msg import PointCloud2
7 |
8 | class ExtrinsicFilter:
9 | '''
10 | TODO: Also accept object detection result as input
11 | '''
12 | def __init__(self, **params):
13 | self._params = params
14 | self._tfbuffer = tf2.Buffer()
15 | self._tflistener = tf2.TransformListener(self._tfbuffer)
16 |
17 | def filter(self, msg):
18 | assert type(msg) == PointCloud2
19 |
20 | cloud = pcl.PointCloud(msg)
21 | try:
22 | rt = self._tfbuffer.lookup_transform(msg.header.frame_id, self._params['target_frame'], msg.header.stamp)
23 | except tf2.LookupException as e:
24 | rospy.logwarn("TF lookup error: %s", str(e))
25 | return None
26 |
27 | euler = tf.transformations.quaternion_matrix([rt.transform.rotation.x, rt.transform.rotation.y, rt.transform.rotation.z, rt.transform.rotation.w])
28 | homo = cloud.xyz - [rt.transform.translation.x, rt.transform.translation.y, rt.transform.translation.z]
29 | cloud.xyz[:,:] = homo.dot(euler[:3,:3])
30 |
31 | nmsg = cloud.to_msg()
32 | nmsg.header.stamp = msg.header.stamp
33 | nmsg.header.frame_id = self._params['target_frame']
34 | return nmsg
35 |
36 | class PointCloudBufferFilter:
37 | '''
38 | TODO: Implement multiple point cloud aggregator using icp or other method
39 | '''
40 |
--------------------------------------------------------------------------------
/src/perception/localization/README.md:
--------------------------------------------------------------------------------
1 | This package contains nodes to perform vehicle localization.
--------------------------------------------------------------------------------
/src/perception/localization/imu_wheel_odom/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_perception_localization_imu_wheel_odom)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | nodes/locator
11 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
12 | )
13 |
--------------------------------------------------------------------------------
/src/perception/localization/imu_wheel_odom/README.md:
--------------------------------------------------------------------------------
1 | This package combine wheel encoder and IMU result into a simple odometry. Should act similar as `robot-localization` ROS package.
--------------------------------------------------------------------------------
/src/perception/localization/imu_wheel_odom/nodes/locator:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | if __name__ == "__main__":
4 | pass
5 |
--------------------------------------------------------------------------------
/src/perception/localization/imu_wheel_odom/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_perception_localization_imu_wheel_odom
3 | 0.1.0
4 |
5 | Get dynamic local map
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | rospy
17 | std_msgs
18 | nav_msgs
19 | zzz_common
20 |
21 |
22 |
--------------------------------------------------------------------------------
/src/perception/localization/imu_wheel_odom/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_perception_localization_imu_wheel_odom'],
8 | package_dir={'': 'src'}
9 | )
10 |
11 | setup(**d)
--------------------------------------------------------------------------------
/src/perception/localization/imu_wheel_odom/src/zzz_perception_localization_imu_wheel_odom/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/perception/localization/imu_wheel_odom/src/zzz_perception_localization_imu_wheel_odom/__init__.py
--------------------------------------------------------------------------------
/src/perception/protocol/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_perception_msgs)
3 |
4 | find_package(catkin REQUIRED genmsg std_msgs # essential dependency
5 | geometry_msgs sensor_msgs
6 | )
7 |
8 | add_message_files(DIRECTORY msg FILES
9 | BoundingBox.msg
10 | BoundingBox2D.msg
11 | DetectionBox.msg
12 | DetectionBox2D.msg
13 | DetectionBox2DArray.msg
14 | DetectionBoxArray.msg
15 | Dimension2DWithCovariance.msg
16 | DimensionWithCovariance.msg
17 | LaneDetection.msg
18 | LaneDetectionArray.msg
19 | ObjectClass.msg
20 | ObjectSignals.msg
21 | Pose2DWithCovariance.msg
22 | TrackingBox.msg
23 | TrackingBoxArray.msg
24 | )
25 |
26 | generate_messages(DEPENDENCIES std_msgs geometry_msgs sensor_msgs)
27 | catkin_package(CATKIN_DEPENDS std_msgs geometry_msgs sensor_msgs)
28 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/BoundingBox.msg:
--------------------------------------------------------------------------------
1 |
2 | # A 3D bounding box that can be positioned and rotated about its center (6 DOF). Dimensions of this box are in meters
3 |
4 | # The position and orientation of the rigid body center
5 | geometry_msgs/PoseWithCovariance pose
6 |
7 | # The size of (in meters) the bounding box surrounding the object's center pose.
8 | DimensionWithCovariance dimension
--------------------------------------------------------------------------------
/src/perception/protocol/msg/BoundingBox2D.msg:
--------------------------------------------------------------------------------
1 | # A 2D bounding box that can be rotated about its center. All dimensions are in pixels, but represented using floating-point
2 |
3 | # The position and orientation of the rigid body center
4 | Pose2DWithCovariance pose
5 |
6 | # The size (in meters) of the bounding box surrounding the object relative to the pose of its center.
7 | Dimension2DWithCovariance dimension
8 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/DetectionBox.msg:
--------------------------------------------------------------------------------
1 | # Defines a box-shaped 3D detection result.
2 |
3 | # ----------------- Data -----------------
4 |
5 | # The 3D data that generated these results (i.e. object region cropped out of
6 | # the point cloud). This information is not required for all detectors, so it may be empty.
7 | sensor_msgs/PointCloud2 source_cloud
8 |
9 | # The 2D data in the same area for fusion purpose
10 | sensor_msgs/Image source_img
11 |
12 | # The frame id of source_img
13 | string source_frame
14 |
15 | # ---------------- Properties --------------------
16 |
17 | # Describe several classification result for the object
18 | # This field is required to be sorted in descending order of scores
19 | ObjectClass[] classes
20 |
21 | # This field indicates visual (or sound?) signal from the object
22 | ObjectSignals signal
23 |
24 | # 3D bounding box surrounding the object.
25 | BoundingBox bbox
26 |
27 | # This field is for store auxiliary text or data. (e.g. annotation notes)
28 | string comments
29 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/DetectionBox2D.msg:
--------------------------------------------------------------------------------
1 | # Defines a box-shaped 2D detection result.
2 |
3 | # ----------------- Data -----------------
4 |
5 | # The 2D data that generated these results (i.e. region proposal cropped out of
6 | # the image). Not required for all use cases, so it may be empty.
7 | sensor_msgs/Image source_img
8 |
9 | # ---------------- Properties --------------------
10 |
11 | # Describe several classification result for the object
12 | # This field is required to be sorted in descending order of scores
13 | ObjectClass[] classes
14 |
15 | # This field indicates visual (or sound?) signal from the object
16 | ObjectSignals signal
17 |
18 | # 2D bounding box surrounding the object.
19 | BoundingBox2D bbox
20 |
21 | # This field is for store auxiliary text or data
22 | string comments
23 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/DetectionBox2DArray.msg:
--------------------------------------------------------------------------------
1 | # Describe a list of 2D detections, for a multi-object 2D detector.
2 |
3 | Header header
4 |
5 | # A list of the detected proposals. A multi-proposal detector might generate
6 | # this list with many candidate detections generated from a single input.
7 | DetectionBox2D[] detections
--------------------------------------------------------------------------------
/src/perception/protocol/msg/DetectionBoxArray.msg:
--------------------------------------------------------------------------------
1 | # Describe a list of 3D detections, for a multi-object 3D detector.
2 |
3 | Header header
4 |
5 | # A list of the detected proposals. A multi-proposal detector might generate
6 | # this list with many candidate detections generated from a single input.
7 | DetectionBox[] detections
8 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/Dimension2DWithCovariance.msg:
--------------------------------------------------------------------------------
1 | # Describing the size object in 2D space (in pixels) with uncertainty
2 |
3 | float64 length_x # width
4 | float64 length_y # length
5 |
6 | # Row-major representation of the 2x2 covariance matrix
7 | # In order, the parameters are: (length_x, length_y)
8 | float64[4] covariance
9 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/DimensionWithCovariance.msg:
--------------------------------------------------------------------------------
1 | # Describing the size object in 3D space (in meters) with uncertainty
2 |
3 | float64 length_x # width
4 | float64 length_y # height
5 | float64 length_z # length
6 |
7 | # Row-major representation of the 3x3 covariance matrix
8 | # In order, the parameters are: (length_x, length_y, length_z)
9 | float64[9] covariance
--------------------------------------------------------------------------------
/src/perception/protocol/msg/LaneDetection.msg:
--------------------------------------------------------------------------------
1 | # Geometric representation of center line of the lane
2 | float32[] center_line_coeffs
3 |
4 | # The location at the end of the line
5 | float32 end_location
6 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/LaneDetectionArray.msg:
--------------------------------------------------------------------------------
1 | # This message describes lane detection list
2 |
3 | Header header
4 |
5 | LaneDetection[] detections
6 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/ObjectClass.msg:
--------------------------------------------------------------------------------
1 | # The size of (in meters) the bounding box surrounding the object's center pose.
2 |
3 | # The unique numeric classification ID of object detected
4 | uint32 classid
5 |
6 | # The probability or confidence value of the detected object. By convention, this value should lie in the range 0~1.
7 | float32 score
8 |
9 | # Other information about the class (e.g. class name). Only for debug
10 | string comments
11 |
12 | ##############################################################
13 | ### Here is a hierarchical table of all included types ###
14 | ##############################################################
15 | # Hierarchy is encoded in a 32-bit integer. Each 8 bit stand for a level, and leftmost 8 bit is the top level
16 |
17 | uint32 UNKNOWN = 0 # 0x0000
18 | uint32 UNKNOWN_DYNAMIC = 16 # 0x0010
19 | uint32 UNKNOWN_STATIC = 32 # 0x0020
20 |
21 | uint32 VEHICLE = 1 # 0x0001
22 | uint32 VEHICLE_PASSENGER = 17 # 0x0011, normal passenger_vehicles
23 | uint32 VEHICEL_VAN = 33 # 0x0021
24 | uint32 VEHICLE_TRUCK = 49 # 0x0031
25 | uint32 VEHICLE_BUS = 65 # 0x0041
26 | uint32 VEHICLE_SCHOOLBUS = 321 # 0x0141
27 | uint32 VEHICLE_SCHOOLBUS_STOP = 4417 # 0x1141
28 | uint32 VEHICLE_EMERGENCY = 81 # 0x0051, emergency vehicles, including
29 | uint32 VEHICLE_EMERGENCY_POLICE = 337 # 0x0151
30 | uint32 VEHICLE_EMERGENCY_POLICE_FLASH = 4433 # 0x1151
31 | uint32 VEHICLE_EMERGENCY_FIRE = 593 # 0x0251
32 | uint32 VEHICLE_EMERGENCY_FIRE_FLASH = 4689 # 0x1251
33 | uint32 VEHICLE_EMERGENCY_CIVIL = 849 # 0x0351, including utility vehicle and tow trucks
34 | uint32 VEHICLE_EMERGENCY_CIVIL_FLASH = 4945 # 0x1351
35 |
36 | uint32 HUMAN = 2 # 0x0002
37 | uint32 HUMAN_PEDESTRIAN = 18 # 0x0012
38 | uint32 HUMAN_ROADWORKER = 34 # 0x0022
39 |
40 | uint32 CYCLIST = 3 # 0x0003
41 | uint32 CYCLIST_BICYCLE = 19 # 0x0013
42 | uint32 CYCLIST_MOTORCYCLE = 35 # 0x0023
43 | uint32 CYCLIST_TRICYCLE = 51 # 0x0033
44 |
45 | uint32 ANIMAL = 4 # 0x0004
46 | uint32 ANIMAL_DOGLIKE = 20 # 0x0014, includes dog, cat, wolf, etc.
47 | uint32 ANIMAL_DEERLIKE = 36 # 0x0024, includes deer, etc.
48 | uint32 ANIMAL_COWLIKE = 52 # 0x0034, includes cow, horse, pig, etc.
49 |
50 | uint32 ROAD_OBJECT = 5 # 0x0005, objects in road area
51 | uint32 ROAD_TRAFFIC_CONE = 21 # 0x0015, traffic cone
52 | uint32 ROAD_TRAFFIC_BLOCKER = 37 # 0x0025, traffic blocker, e.g. "Road Closed" sign
53 |
54 | uint32 ROADSIDE_OBJECT = 6 # 0x0006, objects in road side
55 | uint32 ROADSIDE_TRAFFIC_LIGHT = 22 # 0x0016
56 | uint32 ROADSIDE_TRAFFIC_SIGN = 38 # 0x0026
57 | uint32 ROADSIDE_TREE = 54 # 0x0036, including all roadside vegetation
58 |
59 | uint32 LEVEL_MASK_0 = 15 # 0x000f
60 | uint32 LEVEL_MASK_1 = 255 # 0x00ff
61 | uint32 LEVEL_MASK_2 = 4095 # 0x0fff
62 | uint32 LEVEL_MASK_3 = 65535 # 0xffff
63 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/ObjectSignals.msg:
--------------------------------------------------------------------------------
1 | # This message is used to represent detected vehicle light signals or human hand signals
2 |
3 | # Signal flags. Multiple signal emission can exists in the same time.
4 | uint16 flags
5 |
6 | uint16 UNKNOWN = 0 # 0x00
7 | uint16 NONE = 16 # 0x10
8 |
9 | # This area is related to https://en.wikipedia.org/wiki/Automotive_lighting
10 | uint16 VEHICLE_SIGNAL = 1 # 0x01
11 | uint16 VEHICLE_SIGNAL_LEFT_TURN = 17 # 0x11
12 | uint16 VEHICLE_SIGNAL_RIGHT_TURN = 33 # 0x21
13 | uint16 VEHICLE_SIGNAL_HAZARD = 49 # 0x31
14 | uint16 VEHICLE_SIGNAL_BRAKE = 65 # 0x41
15 | uint16 VEHICLE_SIGNAL_REVERSE = 81 # 0x51
16 |
17 | uint16 TRAFFIC_LIGHT = 2 # 0x02
18 | uint16 TRAFFIC_LIGHT_RED = 18 # 0x12
19 | uint16 TRAFFIC_LIGHT_YELLOW = 34 # 0x22
20 | uint16 TRAFFIC_LIGHT_GREEN = 50 # 0x32
21 | uint16 TRAFFIC_LIGHT_GREEN_TURN = 66 # 0x42
22 |
23 | # Confidence of the signal detection
24 | float32 score
25 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/Pose2DWithCovariance.msg:
--------------------------------------------------------------------------------
1 |
2 | float64 x # or u in u-v plane
3 | float64 y # or v in u-v plane
4 | float64 theta
5 |
6 | # Row-major representation of the 3x3 covariance matrix
7 | # In order, the parameters are: (x, y, theta)
8 | float64[9] covariance
9 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/TrackingBox.msg:
--------------------------------------------------------------------------------
1 | # Defines a box-shaped 3D tracking result.
2 |
3 | # Unique id to determine the identification of tracked object.
4 | # This field could be a increasing number from zero or random hash number
5 | uint64 uid
6 |
7 | # Defines the (existence) confidence of the object [-1 ~ 1].
8 | # The confidence can be negative to demonstrate that this object is invalid.
9 | # This confidence should not be fed into decision system. It should be used for validation purpose.
10 | float32 confidence
11 |
12 | # Describe several classification result for the object
13 | # This field is required to be sorted in descending order of scores
14 | ObjectClass[] classes
15 |
16 | # This field contains the behavior identification based on light signal or hand signal
17 | ObjectSignals signal
18 |
19 | # Current 3D bounding box.
20 | BoundingBox bbox
21 |
22 | # Estimated 3D velocity and accelaration
23 | geometry_msgs/TwistWithCovariance twist
24 | geometry_msgs/AccelWithCovariance accel
25 |
26 | # This field is for store auxiliary text or data
27 | string comments
28 |
29 |
--------------------------------------------------------------------------------
/src/perception/protocol/msg/TrackingBoxArray.msg:
--------------------------------------------------------------------------------
1 | # Describe a list of 3D tracking targets, for a multi-object 3D detector.
2 |
3 | Header header
4 |
5 | # A list of the detected targets.
6 | TrackingBox[] targets
7 |
--------------------------------------------------------------------------------
/src/perception/protocol/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_perception_msgs
3 | 0.1.0
4 |
5 | Standard message definitions for ZZZ perception module
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | message_generation
17 | std_msgs
18 | geometry_msgs
19 | sensor_msgs
20 | message_runtime
21 | std_msgs
22 | geometry_msgs
23 | sensor_msgs
24 |
25 |
--------------------------------------------------------------------------------
/src/perception/tracking/object_filters/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_perception_tracking_object_filters)
3 |
4 | find_package(catkin REQUIRED)
5 |
6 | catkin_package()
7 |
8 | catkin_python_setup()
9 |
10 | catkin_install_python(PROGRAMS
11 | nodes/criteria
12 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
13 |
--------------------------------------------------------------------------------
/src/perception/tracking/object_filters/README.md:
--------------------------------------------------------------------------------
1 | # Package Summary
2 | This package contains filters on detected objects.
3 |
--------------------------------------------------------------------------------
/src/perception/tracking/object_filters/nodes/criteria:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 |
5 | from zzz_common.params import parse_private_args
6 | from zzz_perception_msgs.msg import DetectionBoxArray
7 | from zzz_perception_tracking_object_filters.selectors import CriteriaFilter
8 |
9 | class FilterNode():
10 | def __init__(self):
11 | params = parse_private_args(
12 | input_topic="objects_detected",
13 | output_topic="objects_filtered",
14 | max_length_x=10.,
15 | min_length_x=.3,
16 | max_length_y=4.,
17 | min_length_y=.3,
18 | max_length_z=3.,
19 | min_length_z=.3
20 | )
21 |
22 | self._subscriber = rospy.Subscriber(params.pop("input_topic"), DetectionBoxArray, self.callback)
23 | self._publisher = rospy.Publisher(params.pop("output_topic"), DetectionBoxArray, queue_size=1)
24 | self._filter = CriteriaFilter(**params)
25 |
26 | def callback(self, msg):
27 | self._publisher.publish(self._filter.filter(msg))
28 |
29 | if __name__ == "__main__":
30 | rospy.init_node("criteria_filter")
31 | node = FilterNode()
32 | rospy.spin()
33 |
--------------------------------------------------------------------------------
/src/perception/tracking/object_filters/nodes/to_static:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 | import os
5 | import threading
6 |
7 | from zzz_common.params import parse_private_args
8 | from zzz_driver_msgs.msg import RigidBodyStateStamped
9 | from zzz_perception_msgs.msg import DetectionBoxArray
10 | from zzz_perception_tracking_object_filters.transformers import RigidToStaticTransformer
11 |
12 | class FilterNode():
13 | def __init__(self):
14 | params = parse_private_args(
15 | input_topic="objects_detected",
16 | output_topic="objects_transformed",
17 | pose_topic="/zzz/navigation/ego_pose"
18 | )
19 |
20 | self._filter = RigidToStaticTransformer(**params)
21 |
22 | self._input_subscriber = rospy.Subscriber(params.pop("input_topic"), DetectionBoxArray, self.detection_callback)
23 | self._pose_subscriber = rospy.Subscriber(params.pop("pose_topic"), RigidBodyStateStamped, self.pose_callback)
24 | self._publisher = rospy.Publisher(params.pop("output_topic"), DetectionBoxArray, queue_size=1)
25 |
26 | self._pose_trigger = threading.Event()
27 |
28 | def pose_callback(self, msg):
29 | self._filter.receive_pose(msg)
30 | self._pose_trigger.set()
31 |
32 | def detection_callback(self, msg):
33 | self._pose_trigger.wait()
34 | self._publisher.publish(self._filter.filter(msg))
35 |
36 | if __name__ == "__main__":
37 | rospy.init_node("rigid_to_static")
38 | node = FilterNode()
39 | rospy.spin()
40 |
--------------------------------------------------------------------------------
/src/perception/tracking/object_filters/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_perception_tracking_object_filters
3 | 0.1.0
4 |
5 | Implementation of object filters
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | sensor_msgs
18 | tf2_ros
19 | zzz_perception_msgs
20 | pcl_ros
21 |
22 |
23 |
--------------------------------------------------------------------------------
/src/perception/tracking/object_filters/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_perception_tracking_object_filters'],
8 | package_dir={'': 'src'},
9 | install_requires={'pcl-py'}
10 | )
11 |
12 | setup(**d)
--------------------------------------------------------------------------------
/src/perception/tracking/object_filters/src/zzz_perception_tracking_object_filters/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/perception/tracking/object_filters/src/zzz_perception_tracking_object_filters/__init__.py
--------------------------------------------------------------------------------
/src/perception/tracking/object_filters/src/zzz_perception_tracking_object_filters/selectors.py:
--------------------------------------------------------------------------------
1 | from easydict import EasyDict
2 | from zzz_perception_msgs.msg import DetectionBox, DetectionBoxArray
3 |
4 | class CriteriaFilter:
5 | def __init__(self, **params):
6 | self._params = EasyDict(params)
7 |
8 | def filter(self, array):
9 | assert type(array) == DetectionBoxArray
10 |
11 | filtered = DetectionBoxArray()
12 | filtered.header = array.header
13 |
14 | for target in array.detections:
15 | if self._size_filter(target):
16 | filtered.detections.append(target)
17 | return filtered
18 |
19 | def _size_filter(self, target):
20 | assert type(target) == DetectionBox
21 |
22 | if target.bbox.dimension.length_x > self._params.max_length_x \
23 | or target.bbox.dimension.length_x < self._params.min_length_x:
24 | return False
25 | if target.bbox.dimension.length_y > self._params.max_length_y \
26 | or target.bbox.dimension.length_y < self._params.min_length_y:
27 | return False
28 | if target.bbox.dimension.length_z > self._params.max_length_z \
29 | or target.bbox.dimension.length_z < self._params.min_length_z:
30 | return False
31 | return True
32 |
--------------------------------------------------------------------------------
/src/perception/tracking/object_filters/src/zzz_perception_tracking_object_filters/transformers.py:
--------------------------------------------------------------------------------
1 | import rospy
2 | import numpy as np
3 | import tf
4 | import tf.transformations as tft
5 | import tf2_ros as tf2
6 | from zzz_perception_msgs.msg import DetectionBoxArray
7 |
8 | class RigidToStaticTransformer:
9 | '''
10 | This module transform detection/tracking result into a static frame (i.e. odom/map) using RigidBodyState.
11 | '''
12 | def __init__(self, **params):
13 | self._params = params
14 | self._ego_pose = None
15 |
16 | self._tfbuffer = tf2.Buffer()
17 | self._tflistener = tf2.TransformListener(self._tfbuffer)
18 |
19 | def receive_pose(self, msg):
20 | self._ego_pose = msg
21 |
22 | def filter(self, msg):
23 | if self._ego_pose == None:
24 | raise RuntimeError("Transformer only works when poses are received!")
25 |
26 | rt = self._tfbuffer.lookup_transform(msg.header.frame_id, self._ego_pose.state.child_frame_id, msg.header.stamp)
27 | q_static = [rt.transform.rotation.x, rt.transform.rotation.y, rt.transform.rotation.z, rt.transform.rotation.w]
28 | R_static = tft.quaternion_matrix(q_static)[:3,:3]
29 | T_static = [rt.transform.translation.x, rt.transform.translation.y, rt.transform.translation.z]
30 |
31 | q = [self._ego_pose.state.pose.pose.orientation.x, self._ego_pose.state.pose.pose.orientation.y,
32 | self._ego_pose.state.pose.pose.orientation.z, self._ego_pose.state.pose.pose.orientation.w]
33 | R = tft.quaternion_matrix(q)[:3,:3]
34 | T = [self._ego_pose.state.pose.pose.position.x, self._ego_pose.state.pose.pose.position.y, self._ego_pose.state.pose.pose.position.z]
35 |
36 | if type(msg) == DetectionBoxArray:
37 | for detection in msg.detections:
38 | old_pos = [detection.bbox.pose.pose.position.x, detection.bbox.pose.pose.position.y, detection.bbox.pose.pose.position.z]
39 | new_pos = (np.array(old_pos).dot(R_static.T) + T_static).dot(R.T) + T
40 | old_ori = [detection.bbox.pose.pose.orientation.x, detection.bbox.pose.pose.orientation.y,
41 | detection.bbox.pose.pose.orientation.z, detection.bbox.pose.pose.orientation.w]
42 | new_ori = tft.quaternion_multiply(q, tft.quaternion_multiply(q_static, old_ori))
43 |
44 | detection.bbox.pose.pose.position.x = new_pos[0]
45 | detection.bbox.pose.pose.position.y = new_pos[1]
46 | detection.bbox.pose.pose.position.z = new_pos[2]
47 | detection.bbox.pose.pose.orientation.x = new_ori[0]
48 | detection.bbox.pose.pose.orientation.y = new_ori[1]
49 | detection.bbox.pose.pose.orientation.z = new_ori[2]
50 | detection.bbox.pose.pose.orientation.w = new_ori[3]
51 | # TODO: convert covariance
52 |
53 | msg.header.frame_id = self._ego_pose.header.frame_id
54 | return msg
55 | else:
56 | raise TypeError("Not supported type!")
57 |
58 |
--------------------------------------------------------------------------------
/src/perception/tracking/object_trackers/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_perception_tracking_object_trackers)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS nodes/box_tracker
10 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
11 |
--------------------------------------------------------------------------------
/src/perception/tracking/object_trackers/nodes/box_tracker:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | import rospy
3 |
4 | from zzz_common.params import parse_private_args
5 | from zzz_perception_msgs.msg import DetectionBoxArray, TrackingBoxArray
6 | from zzz_perception_tracking_object_trackers.trackers import MultiBoxTracker
7 |
8 | class TrackingNode:
9 | def __init__(self):
10 | params = parse_private_args(
11 | detection_topic="objects_detected",
12 | tracking_topic="objects_tracked"
13 | )
14 |
15 | self._tracker = MultiBoxTracker()
16 | self._detection_subscriber = rospy.Subscriber(params.detection_topic, DetectionBoxArray, self.detection_callback)
17 | self._tracking_publisher = rospy.Publisher(params.tracking_topic, TrackingBoxArray, queue_size=1)
18 |
19 | def detection_callback(self, msg):
20 | self._tracker.update(msg)
21 | self._tracking_publisher.publish(self._tracker.report())
22 |
23 | if __name__=="__main__":
24 | rospy.init_node("box_tracker", log_level=rospy.DEBUG)
25 | tracker = TrackingNode()
26 | rospy.spin()
--------------------------------------------------------------------------------
/src/perception/tracking/object_trackers/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_perception_tracking_object_trackers
3 | 0.1.0
4 |
5 | Module to track detected objects
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | std_msgs
18 | roscpp
19 | std_msgs
20 | roscpp
21 | std_msgs
22 | nav_msgs
23 | zzz_common
24 |
25 |
26 |
--------------------------------------------------------------------------------
/src/perception/tracking/object_trackers/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_perception_tracking_object_trackers'],
8 | package_dir={'': 'src'},
9 | install_requires=['scipy', 'filterpy']
10 | )
11 |
12 | setup(**d)
--------------------------------------------------------------------------------
/src/perception/tracking/object_trackers/src/zzz_perception_tracking_object_trackers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/perception/tracking/object_trackers/src/zzz_perception_tracking_object_trackers/__init__.py
--------------------------------------------------------------------------------
/src/perception/tracking/object_trackers/src/zzz_perception_tracking_object_trackers/associate.py:
--------------------------------------------------------------------------------
1 | '''
2 | This module provide methods to associate detections to existing trackers
3 |
4 | Every method should have as method called associate which takes list of
5 | trackers and detections as input. The result is a list that a list of index
6 | to the trackers. If the detection should be initialized, then use float('nan')
7 | '''
8 |
9 | import numpy as np
10 |
11 | class JointProbabilisticDataAssociationFilter:
12 | def __init__(self, chi_sqr_thres):
13 | self._chi_sqr_thres = chi_sqr_thres
14 |
15 | def measurent_validate(self, input):
16 | raise NotImplementedError()
17 |
18 | def associate(self, trackers, detection):
19 | raise NotImplementedError()
20 |
21 | class NearestNeighborFilter:
22 | '''
23 | A naive nearest neighbor association that connect detections to closet tracker.
24 |
25 | XXX: Other metrics: take covariance, speed or heading angle into account
26 | '''
27 | def __init__(self, dist_thres=3, dist_metric="euclidean"):
28 | self._dist_thres = 3
29 | self._metric = dist_metric
30 |
31 | def associate(self, detections, pose_trackers, feature_trackers):
32 | '''
33 | detections should be list of zzz_perception_msgs/DetectionBox
34 | pose_trackers and feature_trackers are dictionary of trackers
35 | '''
36 | if self._metric != "euclidean":
37 | raise ValueError("Only Euclidean distance nearest neighbor is currently supported")
38 | if len(detections) == 0:
39 | return []
40 |
41 | results = [float('nan')] * len(detections)
42 | tracker_locations = np.zeros((len(pose_trackers), 3))
43 | tracker_keymap = {}
44 | for keyidx_tr, idx_tr in enumerate(pose_trackers.keys()):
45 | tracker_locations[keyidx_tr, :] = pose_trackers[idx_tr].pose_state[:3]
46 | tracker_keymap[keyidx_tr] = idx_tr
47 |
48 | if len(pose_trackers) == 0:
49 | return results
50 | for idx_dt, dt in enumerate(detections):
51 | position = dt.bbox.pose.pose.position
52 | d = np.array([position.x, position.y, position.z]) - tracker_locations
53 | d = np.linalg.norm(d, axis=1)
54 | if np.min(d) < self._dist_thres:
55 | results[idx_dt] = tracker_keymap[np.argmin(d)]
56 |
57 | return results
58 |
59 | class GlobalNearestNeighbor:
60 | '''
61 | Implementation of global nearest neighbor
62 | '''
63 | def __init__(self):
64 | raise NotImplementedError()
65 |
66 | def associate(self, tracker, detection):
67 | raise NotImplementedError()
68 |
69 | NN = NearestNeighborFilter
70 | JPDAF = JointProbabilisticDataAssociationFilter
71 |
--------------------------------------------------------------------------------
/src/planning/README.md:
--------------------------------------------------------------------------------
1 | This module contains packages for autonomous planning.
2 |
3 | - **Decision**: Making decision according to environment
4 | - **Routing**: Global path planner
5 | - **Local**: Local trajectory planner
6 |
--------------------------------------------------------------------------------
/src/planning/decision/lane_models/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_planning_decision_lane_models)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | nodes/idm_lane_utility
11 | nodes/rls_train
12 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
13 |
--------------------------------------------------------------------------------
/src/planning/decision/lane_models/README.md:
--------------------------------------------------------------------------------
1 | TODO: clean the structure of lateral, longitudinal and learning models...
--------------------------------------------------------------------------------
/src/planning/decision/lane_models/nodes/idm_lane_utility:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 | from zzz_common.params import parse_private_args
5 | from zzz_cognition_msgs.msg import MapState
6 | from zzz_planning_decision_lane_models.longitudinal import IDM
7 | from zzz_planning_decision_lane_models.lateral import LaneUtility
8 | from zzz_planning_decision_lane_models import MainDecision
9 | from zzz_planning_msgs.msg import DecisionTrajectory
10 |
11 | class DecisionNode(object):
12 | def __init__(self):
13 |
14 | params = parse_private_args(
15 | decision_trajectory_topic="decision_trajectory",
16 | dynamic_map_topic="/zzz/cognition/local_dynamic_map/map_with_ref"
17 | )
18 |
19 | self._dynamic_map_subscriber = rospy.Subscriber(params.dynamic_map_topic, MapState, self.dynamic_map_callback)
20 | self._decision_trajectory_publisher = rospy.Publisher(params.decision_trajectory_topic, DecisionTrajectory, queue_size=1)
21 |
22 | model_lon = IDM()
23 | model_lat = LaneUtility(model_lon)
24 | self._decision_instance = MainDecision(lon_decision=model_lon, lat_decision=model_lat)
25 | self._last_decision_time = 0
26 |
27 | def dynamic_map_callback(self, msg, decision_dt = 0.75):
28 | current_time = rospy.Time.now().to_sec()
29 | if current_time - self._last_decision_time < decision_dt:
30 | return
31 | self._last_decision_time = current_time
32 |
33 | rospy.logdebug("Current_time: %f",self._last_decision_time)
34 | publish_msg = self._decision_instance.generate_trajectory_with_speed(msg)
35 | rospy.logdebug("PUBLISH: trajectory length = %d, desired_speed = %f",
36 | len(publish_msg.trajectory.poses), publish_msg.desired_speed)
37 |
38 | self._decision_trajectory_publisher.publish(publish_msg)
39 |
40 | if __name__ == "__main__":
41 |
42 | rospy.init_node("decision_node", log_level=rospy.DEBUG)
43 | node = DecisionNode()
44 | rospy.spin()
45 |
--------------------------------------------------------------------------------
/src/planning/decision/lane_models/nodes/rls_train:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 | from zzz_common.params import parse_private_args
5 | from zzz_cognition_msgs.msg import MapState
6 | from zzz_planning_decision_lane_models.longitudinal import IDM
7 | from zzz_planning_decision_lane_models.learning import RLSDecision
8 | from zzz_planning_decision_lane_models import MainDecision
9 | from zzz_planning_msgs.msg import DecisionTrajectory
10 | from carla_msgs.msg import CarlaCollisionEvent # FIXME: This is Carla related
11 |
12 | class RLSNode(object):
13 | def __init__(self):
14 |
15 | params = parse_private_args(
16 | decision_trajectory_topic="decision_trajectory",
17 | dynamic_map_topic="/zzz/cognition/local_dynamic_map/map_with_ref",
18 | collision_topic="/carla/ego_vehicle/collision"
19 | )
20 |
21 | self._dynamic_map_subscriber = rospy.Subscriber(params.dynamic_map_topic, MapState, self.dynamic_map_callback)
22 | self._decision_trajectory_publisher = rospy.Publisher(params.decision_trajectory_topic, DecisionTrajectory, queue_size=1)
23 | self._collision_subscriber = rospy.Subscriber(params.collision_topic, CarlaCollisionEvent, self.collision_callback)
24 |
25 | model_lon = IDM()
26 | model_lat = RLSDecision()
27 | self._decision_instance = MainDecision(lon_decision=model_lon, lat_decision=model_lat)
28 | self._last_decision_time = 0
29 |
30 | def dynamic_map_callback(self, msg, decision_dt = 0.75):
31 | # TODO: decision frequency
32 | # current_time = rospy.Time.now().to_sec()
33 | # if current_time - self._last_decision_time < decision_dt:
34 | # return
35 | # self._last_decision_time = current_time
36 |
37 | rospy.logdebug("Current_time: %f",self._last_decision_time)
38 | publish_msg = self._decision_instance.generate_trajectory_with_speed(msg)
39 | rospy.logdebug("PUBLISH: trajectory length = %d, desired_speed = %f",
40 | len(publish_msg.trajectory.poses), publish_msg.desired_speed)
41 |
42 | self._decision_trajectory_publisher.publish(publish_msg)
43 |
44 | def collision_callback(self, msg):
45 | # collision_id = msg.other_actor_id
46 | self._decision_instance.lateral_model_instance.collision_signal = True
47 | self._decision_instance.lateral_model_instance.collision_times += 1
48 |
49 | if __name__ == "__main__":
50 |
51 | rospy.init_node("rls_node", log_level=rospy.DEBUG)
52 | node = RLSNode()
53 | rospy.spin()
54 |
55 |
--------------------------------------------------------------------------------
/src/planning/decision/lane_models/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_planning_decision_lane_models
3 | 0.1.0
4 |
5 | The main decision part
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | std_msgs
18 | roscpp
19 | std_msgs
20 | roscpp
21 | std_msgs
22 | zzz_planning_msgs
23 |
24 |
25 |
--------------------------------------------------------------------------------
/src/planning/decision/lane_models/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_planning_decision_lane_models'],
8 | package_dir={'': 'src'}
9 | )
10 |
11 | setup(**d)
--------------------------------------------------------------------------------
/src/planning/decision/lane_models/src/zzz_planning_decision_lane_models/__init__.py:
--------------------------------------------------------------------------------
1 | from .main import MainDecision
--------------------------------------------------------------------------------
/src/planning/decision/safeguard/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_planning_decision_safeguard)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | nodes/reachable_set
11 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
12 |
--------------------------------------------------------------------------------
/src/planning/decision/safeguard/nodes/reachable_set:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 |
4 | import rospy
5 |
6 | from zzz_common.params import parse_private_args
7 | from zzz_cognition_msgs.msg import MapState
8 | from zzz_planning_msgs.msg import DecisionTrajectory
9 | from zzz_planning_decision_safeguard.reachable_set import ReachableSet
10 |
11 | class SafeguardNode(object):
12 | def __init__(self):
13 | params = parse_private_args(
14 | decision_trajectory_topic="decision_trajectory", # input
15 | safeguard_trajectory_topic="safeguard_trajectory", # output
16 | dynamic_map_topic="/zzz/cognition/local_dynamic_map/map_with_ref"
17 | )
18 | self._dynamic_map_subscriber = rospy.Subscriber(params.dynamic_map_topic, MapState, self.dynamic_map_callback)
19 | self._decision_trajectory_subscriber = rospy.Subscriber(params.decision_trajectory_topic, DecisionTrajectory, self.decision_trajectory_callback)
20 | self._safeguard_trajectory_publisher = rospy.Publisher(params.safeguard_trajectory_topic, DecisionTrajectory, queue_size=1)
21 | self._safeguard_instance = ReachableSet()
22 |
23 | def dynamic_map_callback(self, dynamic_map):
24 | self._safeguard_instance.update_dynamic_map(dynamic_map)
25 |
26 | def decision_trajectory_callback(self, decision):
27 | decision_msg = self.check_trajectory(decision)
28 | self._safeguard_trajectory_publisher.publish(decision_msg)
29 |
30 | def check_trajectory(self, decision):
31 | safeguard_triggered, safespeed = self._safeguard_instance.check_trajectory(decision.trajectory, decision.desired_speed)
32 |
33 | if safeguard_triggered:
34 | rospy.loginfo("Safeguarded triggered")
35 |
36 | msg = decision
37 | msg.trajectory = decision.trajectory
38 | msg.desired_speed = safespeed
39 |
40 | return msg
41 |
42 | if __name__ == "__main__":
43 |
44 | rospy.init_node("safeguard_node", log_level=rospy.DEBUG)
45 | node = SafeguardNode()
46 | rospy.spin()
47 |
--------------------------------------------------------------------------------
/src/planning/decision/safeguard/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_planning_decision_safeguard
3 | 0.1.0
4 |
5 | The main decision part
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | std_msgs
18 | roscpp
19 | std_msgs
20 | roscpp
21 | std_msgs
22 | zzz_planning_msgs
23 |
24 |
25 |
--------------------------------------------------------------------------------
/src/planning/decision/safeguard/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_planning_decision_safeguard'],
8 | package_dir={'': 'src'}
9 | )
10 |
11 | setup(**d)
--------------------------------------------------------------------------------
/src/planning/decision/safeguard/src/zzz_planning_decision_safeguard/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/planning/decision/safeguard/src/zzz_planning_decision_safeguard/__init__.py
--------------------------------------------------------------------------------
/src/planning/protocol/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_planning_msgs)
3 |
4 | find_package(catkin REQUIRED genmsg std_msgs # essential dependency
5 | nav_msgs
6 | )
7 |
8 | add_message_files(DIRECTORY msg FILES
9 | DecisionTrajectory.msg
10 | )
11 |
12 | generate_messages(DEPENDENCIES std_msgs nav_msgs)
13 | catkin_package(CATKIN_DEPENDS std_msgs nav_msgs)
14 |
--------------------------------------------------------------------------------
/src/planning/protocol/msg/DecisionTrajectory.msg:
--------------------------------------------------------------------------------
1 |
2 |
3 | nav_msgs/Path trajectory
4 |
5 | float32 desired_speed # m/s
6 |
7 | float32 desired_acc # m^2/s
--------------------------------------------------------------------------------
/src/planning/protocol/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_planning_msgs
3 | 0.1.0
4 |
5 | Standard message definitions for ZZZ controller module
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | message_generation
17 | std_msgs
18 | nav_msgs
19 | message_runtime
20 | std_msgs
21 | nav_msgs
22 |
23 |
--------------------------------------------------------------------------------
/src/planning/routing/.placeholder:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/planning/routing/.placeholder
--------------------------------------------------------------------------------
/src/planning/trajectory/.placeholder:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/planning/trajectory/.placeholder
--------------------------------------------------------------------------------
/src/tools/README.md:
--------------------------------------------------------------------------------
1 | This package contains fundamental and commonly used tools for the system
2 |
--------------------------------------------------------------------------------
/src/tools/command_calibration.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 | class LongitudinalCalibration():
4 | """
5 | Calibrate the mapping from v,throttle,brake to acc/dec
6 | """
7 |
8 | def __init__(self):
9 | self.last_speed = None
10 | pass
11 |
12 | def run_step(self, target_speed, current_speed, debug=False, dt=0.05):
13 | """
14 | target is not useful
15 | """
16 | pass
17 | if self.last_speed is not None:
18 | acc = (current_speed - self.last_speed)/dt
19 | # save file
20 | control = 0.1
21 |
--------------------------------------------------------------------------------
/src/tools/select_submsg.py:
--------------------------------------------------------------------------------
1 | '''
2 | This module contains tool to select sub fields from a message. One can display subfields of a message or directly publish part of the subfields
3 | '''
4 |
5 | if __name__ == "__main__":
6 | pass
7 |
--------------------------------------------------------------------------------
/src/visualization/README.md:
--------------------------------------------------------------------------------
1 | This module contains packages for system and data visualization.
2 |
3 | - **web**: Modules for web based rendering
4 | - **rviz**: Modules for visualization in RViz
5 |
--------------------------------------------------------------------------------
/src/visualization/rviz/box_visualizer/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_visualization_rviz_box_visualizer)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | nodes/detection
11 | nodes/detection2d
12 | nodes/tracking
13 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
14 |
--------------------------------------------------------------------------------
/src/visualization/rviz/box_visualizer/README.md:
--------------------------------------------------------------------------------
1 | # Package Summary
2 | This package contains filters on detection results with lidar point cloud.
3 |
4 | # Node `bbox`
5 | This node visualize bounding box detection results
6 |
7 | ## Input Topic
8 | |Topic|Condition|Type|Description|
9 | |---|---|---|---|
10 | |`objects_detected`|`visualize_type` == `DetectionBoxArray`|`zzz_perception_msgs/DetectionBoxArray`|Input detection result|
11 | ## Output Topic
12 | |Topic|Condition|Type|Description|
13 | |---|---|---|---|
14 | |`objects_visual`||`visualization_msgs/MarkerArray`|Detected objects|
15 | ## Parameters
16 | |Parameter|Type|Description|Default|
17 | |---|---|---|---|
18 | |`visualize_type`|string|Visualized message type|`DetectionBoxArray`|
19 | |`marker_lifetime`|float|Duration of generated markers|`0.1`|
20 | |`box_color`|float[4]|Color of boxes (RGBA)|`51.,128.,204.,0.8`|
21 | |`centroid_color`|float[4]|Color of centroids (RGBA)|`77.,121.,255.,0.8`|
22 | |`centroid_scale`|float|Scale of centroid markers|`0.5`|
23 | |`label_color`|float[4]|Color of labels (RGBA)|`255.,255.,255.,1.0`|
24 | |`label_scale`|float|Scale of label markers|`0.5`|
25 | |`label_height`|float|Elevation of label markers|`1.0`|
26 | |`box_max_size`|float|Euclidean Clustering distance threshold|`10`|
27 |
--------------------------------------------------------------------------------
/src/visualization/rviz/box_visualizer/nodes/detection:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 |
5 | from zzz_common.params import parse_private_args
6 | from zzz_perception_msgs.msg import DetectionBoxArray
7 | from visualization_msgs.msg import MarkerArray
8 | from zzz_visualization_rviz_box_visualizer.detection import DetectionBoxVisualizer
9 |
10 | class Node():
11 | def __init__(self):
12 | params = parse_private_args(
13 | input_topic="objects_detected",
14 | output_topic="objects_visual",
15 | marker_namespace="/zzz/detection",
16 |
17 | marker_lifetime=0.2,
18 | label_color=[255,255,255,1],
19 | box_color=[51,102,255,0.8],
20 | centroid_color=[255,255,102,0.8],
21 |
22 | centroid_scale=0.5,
23 | label_scale=0.5,
24 | label_height=1,
25 | box_max_size=10
26 | )
27 |
28 | self._subscriber = rospy.Subscriber(params.pop("input_topic"), DetectionBoxArray, self.callback)
29 | self._publisher = rospy.Publisher(params.pop("output_topic"), MarkerArray, queue_size=1)
30 | self._visualizer = DetectionBoxVisualizer(**params)
31 |
32 | def callback(self, msg):
33 | self._publisher.publish(self._visualizer.visualize(msg))
34 |
35 | if __name__ == "__main__":
36 | rospy.init_node("detection_visualizer")
37 | node = Node()
38 | rospy.spin()
39 |
--------------------------------------------------------------------------------
/src/visualization/rviz/box_visualizer/nodes/detection2d:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 |
5 | from zzz_common.params import parse_private_args
6 | from zzz_perception_msgs.msg import DetectionBox2DArray
7 | from sensor_msgs.msg import Image
8 | from zzz_visualization_rviz_box_visualizer.detection2d import DetectionBox2DVisualizer
9 |
10 | class Node():
11 | def __init__(self):
12 | params = parse_private_args(
13 | input_topic="objects2d_detected",
14 | image_topic="/zzz/drivers/image_raw",
15 | output_topic="objects2d_visual",
16 |
17 | label_color=[255,255,255,1],
18 | box_color=[51,102,255,0.8],
19 |
20 | centroid_scale=0.5,
21 | label_scale=0.5,
22 | label_height=1,
23 | box_max_size=10
24 | )
25 |
26 | self._detections_subscriber = rospy.Subscriber(params.pop("input_topic"), DetectionBox2DArray, self.detection_callback)
27 | self._image_subscriber = rospy.Subscriber(params.pop("image_topic"), Image, self.image_callback)
28 | self._publisher = rospy.Publisher(params.pop("output_topic"), Image, queue_size=1)
29 | self._visualizer = DetectionBox2DVisualizer(**params)
30 |
31 | def image_callback(self, msg):
32 | self._visualizer.receive_image(msg)
33 |
34 | def detection_callback(self, msg):
35 | self._publisher.publish(self._visualizer.visualize(msg))
36 |
37 | if __name__ == "__main__":
38 | rospy.init_node("detection2d_visualizer")
39 | node = Node()
40 | rospy.spin()
41 |
--------------------------------------------------------------------------------
/src/visualization/rviz/box_visualizer/nodes/tracking:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | import rospy
4 |
5 | from zzz_common.params import parse_private_args
6 | from zzz_perception_msgs.msg import TrackingBoxArray
7 | from visualization_msgs.msg import MarkerArray
8 | from zzz_visualization_rviz_box_visualizer.tracking import TrackingBoxVisualizer
9 |
10 | # TODO: directly publish Marker message since we have id for each object
11 | # TODO: Add option to publish message after rigid body transformation
12 |
13 | class Node():
14 | def __init__(self):
15 | params = parse_private_args(
16 | input_topic="objects_tracked",
17 | output_topic="objects_visual",
18 | marker_namespace="/zzz/tracking",
19 |
20 | label_color=[255,255,255,1],
21 | box_color=[255,77,136,0.8],
22 | centroid_color=[255,255,102,0.8],
23 | arrow_color=[255,255,153,0.8],
24 |
25 | centroid_scale=0.5,
26 | label_scale=0.5,
27 | label_height=1,
28 | arrow_speed_scale=1,
29 | arrow_width=0.2,
30 | box_max_size=10
31 | )
32 |
33 | self._subscriber = rospy.Subscriber(params.pop("input_topic"), TrackingBoxArray, self.callback)
34 | self._publisher = rospy.Publisher(params.pop("output_topic"), MarkerArray, queue_size=1)
35 | self._visualizer = TrackingBoxVisualizer(**params)
36 |
37 | def callback(self, msg):
38 | self._publisher.publish(self._visualizer.visualize(msg))
39 |
40 | if __name__ == "__main__":
41 | rospy.init_node("tracking_visualizer")
42 | node = Node()
43 | rospy.spin()
44 |
--------------------------------------------------------------------------------
/src/visualization/rviz/box_visualizer/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_visualization_rviz_box_visualizer
3 | 0.1.0
4 |
5 | Visualize detection result in rviz
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | zzz_perception_msgs
18 | visualization_msgs
19 | roscpp
20 | zzz_perception_msgs
21 | visualization_msgs
22 | roscpp
23 | zzz_perception_msgs
24 | visualization_msgs
25 |
26 |
27 |
--------------------------------------------------------------------------------
/src/visualization/rviz/box_visualizer/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_visualization_rviz_box_visualizer'],
8 | package_dir={'': 'src'},
9 | install_requires={'rospy'}
10 | )
11 |
12 | setup(**d)
--------------------------------------------------------------------------------
/src/visualization/rviz/box_visualizer/src/zzz_visualization_rviz_box_visualizer/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/visualization/rviz/box_visualizer/src/zzz_visualization_rviz_box_visualizer/__init__.py
--------------------------------------------------------------------------------
/src/visualization/rviz/box_visualizer/src/zzz_visualization_rviz_box_visualizer/detection2d.py:
--------------------------------------------------------------------------------
1 | from easydict import EasyDict
2 | import rospy
3 | import math
4 | import cv2
5 | import numpy as np
6 | import cv_bridge
7 |
8 | from sensor_msgs.msg import Image
9 | from zzz_visualization_rviz_box_visualizer.utils import parse_color
10 |
11 | class DetectionBox2DVisualizer:
12 | def __init__(self, **params):
13 | self._marker_id = 0
14 | self._params = EasyDict(params)
15 | self._image = np.zeros((800, 600, 3), np.uint8)
16 | self._bridge = cv_bridge.CvBridge()
17 |
18 | def receive_image(self, img_msg):
19 | self._image = self._bridge.imgmsg_to_cv2(img_msg, "bgr8")
20 |
21 | def add_labels(self, in_objects, in_img):
22 | for obj in in_objects.detections:
23 | # TODO: Implement this
24 | pass
25 | return in_img
26 |
27 | def add_boxes(self, in_objects, in_img):
28 | for obj in in_objects.detections:
29 | in_img = cv2.rectangle(in_img,
30 | (int(obj.bbox.pose.x - obj.bbox.dimension.length_x/2),
31 | int(obj.bbox.pose.y - obj.bbox.dimension.length_y/2)),
32 | (int(obj.bbox.pose.x + obj.bbox.dimension.length_x/2),
33 | int(obj.bbox.pose.y + obj.bbox.dimension.length_y/2)), self._params.box_color, 2) # thickness
34 | return in_img
35 |
36 | def visualize(self, msg):
37 | self._marker_id = 0
38 |
39 | image = self._image
40 | image = self.add_boxes(msg, image)
41 | image = self.add_labels(msg, image)
42 |
43 | return self._bridge.cv2_to_imgmsg(image, encoding="passthrough")
44 |
--------------------------------------------------------------------------------
/src/visualization/rviz/box_visualizer/src/zzz_visualization_rviz_box_visualizer/utils.py:
--------------------------------------------------------------------------------
1 | import rospy
2 | from std_msgs.msg import ColorRGBA
3 |
4 | def check_color(value):
5 | if value > 255.:
6 | return 1
7 | elif value < 0:
8 | return 0.
9 | else:
10 | return value / 255.
11 |
12 | def check_alpha(value):
13 | if value > 1:
14 | return 1
15 | elif value < 0.1:
16 | return 0.1
17 | else:
18 | return value
19 |
20 | # Colors should be in order r, g, b, a
21 | def parse_color(in_color):
22 | color = ColorRGBA()
23 | if len(in_color) == 4:
24 | color.r = check_color(in_color[0])
25 | color.g = check_color(in_color[1])
26 | color.b = check_color(in_color[2])
27 | color.a = check_alpha(in_color[3])
28 | else:
29 | rospy.logerr("Cannot resolve color value. Check configuration please!")
30 | return color
31 |
--------------------------------------------------------------------------------
/src/visualization/web/dash/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(zzz_visualization_web_dash)
3 |
4 | find_package(catkin REQUIRED)
5 | catkin_package()
6 |
7 | catkin_python_setup()
8 |
9 | catkin_install_python(PROGRAMS
10 | nodes/dashboard
11 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION})
12 |
--------------------------------------------------------------------------------
/src/visualization/web/dash/README.md:
--------------------------------------------------------------------------------
1 | # Package Summary
2 | This package contains visualization using Dash by Plot.ly
3 |
--------------------------------------------------------------------------------
/src/visualization/web/dash/package.xml:
--------------------------------------------------------------------------------
1 |
2 | zzz_visualization_web_dash
3 | 0.1.0
4 |
5 | Plot.ly Dash based visualization
6 |
7 | BSD
8 | Yuanxin Zhong
9 | Yuanxin Zhong
10 |
11 | https://github.com/cmpute/zzz
12 | https://github.com/cmpute/zzz
13 | https://github.com/cmpute/zzz/issues
14 |
15 | catkin
16 | roscpp
17 | zzz_perception_msgs
18 | visualization_msgs
19 |
20 |
21 |
--------------------------------------------------------------------------------
/src/visualization/web/dash/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | d = generate_distutils_setup(
7 | packages=['zzz_visualization_web_dash'],
8 | package_dir={'': 'src'},
9 | install_requires={'rospy', 'plot.ly', 'dash'}
10 | )
11 |
12 | setup(**d)
--------------------------------------------------------------------------------
/src/visualization/web/dash/src/zzz_visualization_web_dash/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/visualization/web/dash/src/zzz_visualization_web_dash/__init__.py
--------------------------------------------------------------------------------
/src/visualization/web/xviz/.place_holder:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CLAP-Framework/clap/a14602323d63cccf62a12e6db2979205ff2efe30/src/visualization/web/xviz/.place_holder
--------------------------------------------------------------------------------