├── docs ├── .nojekyll ├── favicon.ico ├── _navbar.md ├── en │ ├── _navbar.md │ ├── _media │ │ └── icon.png │ ├── image │ │ ├── motion │ │ │ ├── motion.png │ │ │ ├── motion_flow.png │ │ │ └── motion_flow_en.png │ │ ├── tracking_base │ │ │ ├── tree.png │ │ │ └── exceptions.png │ │ ├── cyberdog_gazebo │ │ │ ├── flow.jpeg │ │ │ ├── terrain.png │ │ │ ├── build_ros.png │ │ │ ├── coordinate.png │ │ │ ├── heightmap.png │ │ │ ├── gazebo_rviz.png │ │ │ └── joint_and_link.jpg │ │ ├── cyberdog_loco │ │ │ ├── moon_walk.png │ │ │ ├── coordinate.png │ │ │ ├── flow_chart.png │ │ │ ├── coordinate_en.png │ │ │ ├── flow_chart_en.png │ │ │ ├── motion_list_cn.png │ │ │ ├── motion_list_en.png │ │ │ ├── motor_sdk_flow_chart_cn.png │ │ │ └── motor_sdk_flow_chart_en.png │ │ ├── realsense-ros │ │ │ ├── lifecycle.png │ │ │ └── align_algorithm.png │ │ ├── cyberdog_bms │ │ │ ├── cyberdog_bms.png │ │ │ ├── cyberdog_bms_close_flow.png │ │ │ └── cyberdog_bms_open_flow.png │ │ ├── cyberdog_tof │ │ │ ├── cyberdog_tof.png │ │ │ ├── cyberdog_tof_close_flow.png │ │ │ └── cyberdog_tof_open_flow.png │ │ ├── cyberdog_uwb │ │ │ ├── cyberdog_uwb.png │ │ │ ├── cyberdog_uwb_close_flow.png │ │ │ └── cyberdog_uwb_open_flow.png │ │ ├── cyberdog_camera │ │ │ ├── camera_arch.png │ │ │ └── camera_arch_inter.png │ │ ├── cyberdog_face │ │ │ ├── cyberdog_face.png │ │ │ ├── cyberdog_face_flow.png │ │ │ └── cyberdog_face_function.png │ │ ├── cyberdog_flash │ │ │ ├── format_udisk1.png │ │ │ ├── format_udisk2.png │ │ │ ├── format_udisk3.png │ │ │ ├── format_udisk4.png │ │ │ ├── conf_file_format.png │ │ │ └── flash_usb_port.png │ │ ├── cyberdog_laserslam │ │ │ ├── LaserSlam.jpg │ │ │ └── Structure.jpg │ │ ├── algorithm_manager │ │ │ ├── ab_navigation.png │ │ │ ├── lidar_mapping.png │ │ │ ├── uwb_tracking.png │ │ │ ├── uwb_tracking_en.png │ │ │ ├── vision_mapping.png │ │ │ ├── vision_tracking.png │ │ │ ├── ab_navigation_en.png │ │ │ ├── algorithm_manager.png │ │ │ ├── lidar_mapping_en.png │ │ │ ├── vision_mapping_en.png │ │ │ ├── vision_tracking_en.png │ │ │ ├── algorithm_manager_en.png │ │ │ ├── lidar_relocalization.png │ │ │ ├── lidar_relocalization_en.png │ │ │ ├── vision_relocalization.png │ │ │ └── vision_relocalization_en.png │ │ ├── cyberdog_gps │ │ │ ├── cyberdog_gps_close.png │ │ │ ├── cyberdog_gps_flow.png │ │ │ └── cyberdog_gps_open.png │ │ ├── cyberdog_led │ │ │ └── cyberdog_led_flow.png │ │ ├── cyberdog_tracking │ │ │ ├── tracking_arch.png │ │ │ ├── tracking_workflow_cn.png │ │ │ └── tracking_workflow_en.png │ │ ├── cyberdog_vision │ │ │ ├── vision_arch_cn.png │ │ │ ├── vision_arch_en.png │ │ │ ├── vision_workflow_cn.png │ │ │ └── vision_workflow_en.png │ │ ├── device_manager │ │ │ └── device_manager.png │ │ ├── sensor_manager │ │ │ └── sensor_manager.png │ │ ├── cyberdog_touch │ │ │ ├── Touch_data_stream.png │ │ │ └── touch_architecture_diagram.png │ │ ├── cyberdog_action │ │ │ └── cyberdog_action_flow.png │ │ ├── cyberdog_lidar │ │ │ └── cyberdog_lidar_scan.png │ │ ├── cyberdog_miloc │ │ │ ├── miloc_mapping_process.jpg │ │ │ ├── cyberdog_miloc_architecture.jpg │ │ │ └── miloc_relocalization_process.jpg │ │ ├── cyberdog_occmap │ │ │ ├── cyberdog_occmap_cn.png │ │ │ └── cyberdog_occmap_en.png │ │ ├── cyberdog_train │ │ │ ├── cyberdog_train_cn_1.png │ │ │ ├── cyberdog_train_cn_2.png │ │ │ └── cyberdog_train_cn_3.png │ │ ├── cyberdog_ai_sports │ │ │ ├── cyberdog_ai_sports.png │ │ │ ├── cyberdog_ai_sports_flow.png │ │ │ └── sport_counts_detection_flow.png │ │ ├── cyberdog_mivins │ │ │ ├── mivins_mapping_process.jpg │ │ │ ├── mivins_following_process.jpg │ │ │ ├── mivins_localization_process.jpg │ │ │ └── cyberdog_mivins_architecture.jpg │ │ ├── cyberdog_ultrasonic │ │ │ ├── cyberdog_ultrasonic.png │ │ │ ├── cyberdog_ultrasonic_close_flow.png │ │ │ └── cyberdog_ultrasonic_open_flow.png │ │ └── cyberdog_bluetooth │ │ │ ├── cyberdog_bluetooth_node_cn.svg │ │ │ └── cyberdog_bluetooth_node_en.svg │ ├── README.md │ ├── cyberdog_parameter_en.md │ ├── cyberdog_miloc_en.md │ ├── cyberdog_wifi_en.md │ ├── cyberdog_tracking_en.md │ ├── cyberdog_mivins_en.md │ ├── cyberdog_laserslam_en.md │ ├── cyberdog_vision_en.md │ ├── cyberdog_gps_en.md │ ├── cyberdog_lidar_en.md │ ├── _sidebar.md │ ├── cyberdog_interactive_en.md │ ├── image_transmission_en.md │ ├── cyberdog_touch_en.md │ ├── connector_en.md │ ├── third_party_library_management_en.md │ ├── cyberdog_grpc_en.md │ ├── cyberdog_bms_en.md │ ├── cyberdog_camera_en.md │ ├── device_manager_en.md │ ├── cyberdog_bringup_en.md │ ├── cyberdog_ultrasonic_en.md │ ├── sensor_manager_en.md │ ├── cyberdog_tof_en.md │ ├── motion_manager_en.md │ ├── cyberdog_occmap_en.md │ ├── cyberdog_uwb_en.md │ ├── cyberdog_bluetooth_en.md │ └── cyberdog_train_en.md ├── image │ ├── dev.png │ ├── connect.jpg │ └── connect.png ├── _media │ └── icon.png ├── cn │ ├── image │ │ ├── motion │ │ │ ├── motion.png │ │ │ ├── motion_flow.png │ │ │ └── motion_flow_en.png │ │ ├── tracking_base │ │ │ ├── tree.png │ │ │ └── exceptions.png │ │ ├── cyberdog_gazebo │ │ │ ├── flow.jpg │ │ │ ├── terrain.png │ │ │ ├── build_ros.png │ │ │ ├── coordinate.png │ │ │ ├── heightmap.png │ │ │ ├── gazebo_rviz.png │ │ │ └── joint_and_link.jpg │ │ ├── cyberdog_loco │ │ │ ├── moon_walk.png │ │ │ ├── coordinate.png │ │ │ ├── flow_chart.png │ │ │ ├── coordinate_en.png │ │ │ ├── flow_chart_en.png │ │ │ ├── motion_list_cn.png │ │ │ ├── motion_list_en.png │ │ │ ├── motor_sdk_flow_chart_cn.png │ │ │ └── motor_sdk_flow_chart_en.png │ │ ├── developer_guide │ │ │ ├── image1.png │ │ │ ├── image2.png │ │ │ ├── image3.png │ │ │ ├── image4.png │ │ │ ├── image5.png │ │ │ ├── image6.png │ │ │ └── image-20230522152113978.png │ │ ├── realsense-ros │ │ │ ├── lifecycle.png │ │ │ └── align_algorithm.png │ │ ├── cyberdog_bms │ │ │ ├── cyberdog_bms.png │ │ │ ├── cyberdog_bms_close_flow.png │ │ │ └── cyberdog_bms_open_flow.png │ │ ├── cyberdog_tof │ │ │ ├── cyberdog_tof.png │ │ │ ├── cyberdog_tof_close_flow.png │ │ │ └── cyberdog_tof_open_flow.png │ │ ├── cyberdog_uwb │ │ │ ├── cyberdog_uwb.png │ │ │ ├── cyberdog_uwb_close_flow.png │ │ │ └── cyberdog_uwb_open_flow.png │ │ ├── cyberdog_camera │ │ │ ├── camera_arch.png │ │ │ └── camera_arch_inter.png │ │ ├── cyberdog_face │ │ │ ├── cyberdog_face.png │ │ │ ├── cyberdog_face_flow.png │ │ │ └── cyberdog_face_function.png │ │ ├── cyberdog_flash │ │ │ ├── format_udisk1.png │ │ │ ├── format_udisk2.png │ │ │ ├── format_udisk3.png │ │ │ ├── format_udisk4.png │ │ │ ├── conf_file_format.png │ │ │ └── flash_usb_port.png │ │ ├── cyberdog_laserslam │ │ │ ├── LaserSlam.jpg │ │ │ └── Structure.jpg │ │ ├── algorithm_manager │ │ │ ├── ab_navigation.png │ │ │ ├── lidar_mapping.png │ │ │ ├── uwb_tracking.png │ │ │ ├── uwb_tracking_en.png │ │ │ ├── vision_mapping.png │ │ │ ├── vision_tracking.png │ │ │ ├── ab_navigation_en.png │ │ │ ├── algorithm_manager.png │ │ │ ├── lidar_mapping_en.png │ │ │ ├── vision_mapping_en.png │ │ │ ├── vision_tracking_en.png │ │ │ ├── algorithm_manager_en.png │ │ │ ├── lidar_relocalization.png │ │ │ ├── lidar_relocalization_en.png │ │ │ ├── vision_relocalization.png │ │ │ └── vision_relocalization_en.png │ │ ├── cyberdog_gps │ │ │ ├── cyberdog_gps_close.png │ │ │ ├── cyberdog_gps_flow.png │ │ │ └── cyberdog_gps_open.png │ │ ├── cyberdog_led │ │ │ └── cyberdog_led_flow.png │ │ ├── cyberdog_tracking │ │ │ ├── tracking_arch.png │ │ │ ├── tracking_workflow_cn.png │ │ │ └── tracking_workflow_en.png │ │ ├── cyberdog_vision │ │ │ ├── vision_arch_cn.png │ │ │ ├── vision_arch_en.png │ │ │ ├── vision_workflow_cn.png │ │ │ └── vision_workflow_en.png │ │ ├── device_manager │ │ │ └── device_manager.png │ │ ├── sensor_manager │ │ │ └── sensor_manager.png │ │ ├── cyberdog_touch │ │ │ ├── Touch_data_stream.png │ │ │ └── touch_architecture_diagram.png │ │ ├── cyberdog_action │ │ │ └── cyberdog_action_flow.png │ │ ├── cyberdog_lidar │ │ │ └── cyberdog_lidar_scan.png │ │ ├── cyberdog_miloc │ │ │ ├── miloc_mapping_process.jpg │ │ │ ├── cyberdog_miloc_architecture.jpg │ │ │ └── miloc_relocalization_process.jpg │ │ ├── cyberdog_occmap │ │ │ ├── cyberdog_occmap_cn.png │ │ │ └── cyberdog_occmap_en.png │ │ ├── cyberdog_train │ │ │ ├── cyberdog_train_cn_1.png │ │ │ ├── cyberdog_train_cn_2.png │ │ │ └── cyberdog_train_cn_3.png │ │ ├── cyberdog_ai_sports │ │ │ ├── cyberdog_ai_sports.png │ │ │ ├── cyberdog_ai_sports_flow.png │ │ │ └── sport_counts_detection_flow.png │ │ ├── cyberdog_mivins │ │ │ ├── mivins_mapping_process.jpg │ │ │ ├── mivins_following_process.jpg │ │ │ ├── mivins_localization_process.jpg │ │ │ └── cyberdog_mivins_architecture.jpg │ │ ├── cyberdog_ultrasonic │ │ │ ├── cyberdog_ultrasonic.png │ │ │ ├── cyberdog_ultrasonic_close_flow.png │ │ │ └── cyberdog_ultrasonic_open_flow.png │ │ └── cyberdog_bluetooth │ │ │ ├── cyberdog_bluetooth_node_cn.svg │ │ │ └── cyberdog_bluetooth_node_en.svg │ ├── cyberdog_parameter_cn.md │ ├── cyberdog_laserslam_cn.md │ ├── cyberdog_miloc_cn.md │ ├── cyberdog_tracking_cn.md │ ├── cyberdog_vision_cn.md │ ├── cyberdog_wifi_cn.md │ ├── cyberdog_mivins_cn.md │ ├── cyberdog_interactive_cn.md │ ├── cyberdog_lidar_cn.md │ ├── question_answer_cn.md │ ├── image_transmission_cn.md │ ├── cyberdog_bringup_cn.md │ ├── connector_cn.md │ ├── cyberdog_gps_cn.md │ ├── motion_manager_cn.md │ ├── cyberdog_grpc_cn.md │ ├── cyberdog_bluetooth_cn.md │ ├── cyberdog_touch_cn.md │ ├── cyberdog_manager_cn.md │ ├── cyberdog_bms_cn.md │ ├── cyberdog_camera_cn.md │ ├── sensor_manager_cn.md │ ├── device_manager_cn.md │ ├── cyberdog_ultrasonic_cn.md │ ├── cyberdog_audio_cn.md │ ├── third_party_library_management_cn.md │ ├── cyberdog_uwb_cn.md │ ├── cyberdog_tof_cn.md │ ├── cyberdog_occmap_cn.md │ ├── cyberdog_train_cn.md │ ├── algorithm_manager_cn.md │ ├── cyberdog_action_cn.md │ ├── cyberdog_common_cn.md │ └── cyberdog_face_cn.md ├── libs │ ├── gitalk.plugins.js │ ├── docsify-darklight-theme@latest.js │ └── docsify-copy-code.min.js ├── README.md └── _sidebar.md ├── .gitignore └── md.md /docs/.nojekyll: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules -------------------------------------------------------------------------------- /docs/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/favicon.ico -------------------------------------------------------------------------------- /docs/_navbar.md: -------------------------------------------------------------------------------- 1 | * Translations 2 | 3 | * [:us: English](/en/) 4 | * [:cn: 简体中文](/) -------------------------------------------------------------------------------- /docs/en/_navbar.md: -------------------------------------------------------------------------------- 1 | * Translations 2 | 3 | * [:us: English](/en/) 4 | * [:cn: 简体中文](/) -------------------------------------------------------------------------------- /docs/image/dev.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/image/dev.png -------------------------------------------------------------------------------- /docs/_media/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/_media/icon.png -------------------------------------------------------------------------------- /docs/en/_media/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/_media/icon.png -------------------------------------------------------------------------------- /docs/image/connect.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/image/connect.jpg -------------------------------------------------------------------------------- /docs/image/connect.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/image/connect.png -------------------------------------------------------------------------------- /docs/cn/image/motion/motion.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/motion/motion.png -------------------------------------------------------------------------------- /docs/en/image/motion/motion.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/motion/motion.png -------------------------------------------------------------------------------- /docs/cn/image/motion/motion_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/motion/motion_flow.png -------------------------------------------------------------------------------- /docs/cn/image/tracking_base/tree.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/tracking_base/tree.png -------------------------------------------------------------------------------- /docs/en/image/motion/motion_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/motion/motion_flow.png -------------------------------------------------------------------------------- /docs/en/image/tracking_base/tree.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/tracking_base/tree.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_gazebo/flow.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_gazebo/flow.jpg -------------------------------------------------------------------------------- /docs/cn/image/motion/motion_flow_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/motion/motion_flow_en.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_gazebo/flow.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_gazebo/flow.jpeg -------------------------------------------------------------------------------- /docs/en/image/motion/motion_flow_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/motion/motion_flow_en.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_gazebo/terrain.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_gazebo/terrain.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_loco/moon_walk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_loco/moon_walk.png -------------------------------------------------------------------------------- /docs/cn/image/developer_guide/image1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/developer_guide/image1.png -------------------------------------------------------------------------------- /docs/cn/image/developer_guide/image2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/developer_guide/image2.png -------------------------------------------------------------------------------- /docs/cn/image/developer_guide/image3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/developer_guide/image3.png -------------------------------------------------------------------------------- /docs/cn/image/developer_guide/image4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/developer_guide/image4.png -------------------------------------------------------------------------------- /docs/cn/image/developer_guide/image5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/developer_guide/image5.png -------------------------------------------------------------------------------- /docs/cn/image/developer_guide/image6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/developer_guide/image6.png -------------------------------------------------------------------------------- /docs/cn/image/realsense-ros/lifecycle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/realsense-ros/lifecycle.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_gazebo/terrain.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_gazebo/terrain.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_loco/moon_walk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_loco/moon_walk.png -------------------------------------------------------------------------------- /docs/en/image/realsense-ros/lifecycle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/realsense-ros/lifecycle.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_bms/cyberdog_bms.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_bms/cyberdog_bms.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_gazebo/build_ros.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_gazebo/build_ros.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_gazebo/coordinate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_gazebo/coordinate.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_gazebo/heightmap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_gazebo/heightmap.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_loco/coordinate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_loco/coordinate.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_loco/flow_chart.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_loco/flow_chart.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_tof/cyberdog_tof.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_tof/cyberdog_tof.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_uwb/cyberdog_uwb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_uwb/cyberdog_uwb.png -------------------------------------------------------------------------------- /docs/cn/image/tracking_base/exceptions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/tracking_base/exceptions.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_bms/cyberdog_bms.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_bms/cyberdog_bms.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_gazebo/build_ros.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_gazebo/build_ros.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_gazebo/coordinate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_gazebo/coordinate.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_gazebo/heightmap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_gazebo/heightmap.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_loco/coordinate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_loco/coordinate.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_loco/flow_chart.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_loco/flow_chart.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_tof/cyberdog_tof.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_tof/cyberdog_tof.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_uwb/cyberdog_uwb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_uwb/cyberdog_uwb.png -------------------------------------------------------------------------------- /docs/en/image/tracking_base/exceptions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/tracking_base/exceptions.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_camera/camera_arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_camera/camera_arch.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_face/cyberdog_face.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_face/cyberdog_face.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_flash/format_udisk1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_flash/format_udisk1.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_flash/format_udisk2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_flash/format_udisk2.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_flash/format_udisk3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_flash/format_udisk3.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_flash/format_udisk4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_flash/format_udisk4.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_gazebo/gazebo_rviz.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_gazebo/gazebo_rviz.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_laserslam/LaserSlam.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_laserslam/LaserSlam.jpg -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_laserslam/Structure.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_laserslam/Structure.jpg -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_loco/coordinate_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_loco/coordinate_en.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_loco/flow_chart_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_loco/flow_chart_en.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_loco/motion_list_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_loco/motion_list_cn.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_loco/motion_list_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_loco/motion_list_en.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_camera/camera_arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_camera/camera_arch.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_face/cyberdog_face.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_face/cyberdog_face.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_flash/format_udisk1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_flash/format_udisk1.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_flash/format_udisk2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_flash/format_udisk2.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_flash/format_udisk3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_flash/format_udisk3.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_flash/format_udisk4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_flash/format_udisk4.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_gazebo/gazebo_rviz.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_gazebo/gazebo_rviz.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_laserslam/LaserSlam.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_laserslam/LaserSlam.jpg -------------------------------------------------------------------------------- /docs/en/image/cyberdog_laserslam/Structure.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_laserslam/Structure.jpg -------------------------------------------------------------------------------- /docs/en/image/cyberdog_loco/coordinate_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_loco/coordinate_en.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_loco/flow_chart_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_loco/flow_chart_en.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_loco/motion_list_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_loco/motion_list_cn.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_loco/motion_list_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_loco/motion_list_en.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/ab_navigation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/ab_navigation.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/lidar_mapping.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/lidar_mapping.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/uwb_tracking.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/uwb_tracking.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_flash/conf_file_format.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_flash/conf_file_format.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_flash/flash_usb_port.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_flash/flash_usb_port.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_gazebo/joint_and_link.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_gazebo/joint_and_link.jpg -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_gps/cyberdog_gps_close.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_gps/cyberdog_gps_close.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_gps/cyberdog_gps_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_gps/cyberdog_gps_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_gps/cyberdog_gps_open.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_gps/cyberdog_gps_open.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_led/cyberdog_led_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_led/cyberdog_led_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_tracking/tracking_arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_tracking/tracking_arch.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_vision/vision_arch_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_vision/vision_arch_cn.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_vision/vision_arch_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_vision/vision_arch_en.png -------------------------------------------------------------------------------- /docs/cn/image/device_manager/device_manager.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/device_manager/device_manager.png -------------------------------------------------------------------------------- /docs/cn/image/realsense-ros/align_algorithm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/realsense-ros/align_algorithm.png -------------------------------------------------------------------------------- /docs/cn/image/sensor_manager/sensor_manager.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/sensor_manager/sensor_manager.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/ab_navigation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/ab_navigation.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/lidar_mapping.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/lidar_mapping.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/uwb_tracking.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/uwb_tracking.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_flash/conf_file_format.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_flash/conf_file_format.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_flash/flash_usb_port.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_flash/flash_usb_port.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_gazebo/joint_and_link.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_gazebo/joint_and_link.jpg -------------------------------------------------------------------------------- /docs/en/image/cyberdog_gps/cyberdog_gps_close.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_gps/cyberdog_gps_close.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_gps/cyberdog_gps_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_gps/cyberdog_gps_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_gps/cyberdog_gps_open.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_gps/cyberdog_gps_open.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_led/cyberdog_led_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_led/cyberdog_led_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_tracking/tracking_arch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_tracking/tracking_arch.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_vision/vision_arch_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_vision/vision_arch_cn.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_vision/vision_arch_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_vision/vision_arch_en.png -------------------------------------------------------------------------------- /docs/en/image/device_manager/device_manager.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/device_manager/device_manager.png -------------------------------------------------------------------------------- /docs/en/image/realsense-ros/align_algorithm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/realsense-ros/align_algorithm.png -------------------------------------------------------------------------------- /docs/en/image/sensor_manager/sensor_manager.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/sensor_manager/sensor_manager.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/uwb_tracking_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/uwb_tracking_en.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/vision_mapping.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/vision_mapping.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/vision_tracking.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/vision_tracking.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_camera/camera_arch_inter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_camera/camera_arch_inter.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_face/cyberdog_face_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_face/cyberdog_face_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_touch/Touch_data_stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_touch/Touch_data_stream.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/uwb_tracking_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/uwb_tracking_en.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/vision_mapping.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/vision_mapping.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/vision_tracking.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/vision_tracking.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_camera/camera_arch_inter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_camera/camera_arch_inter.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_face/cyberdog_face_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_face/cyberdog_face_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_touch/Touch_data_stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_touch/Touch_data_stream.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/ab_navigation_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/ab_navigation_en.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/algorithm_manager.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/algorithm_manager.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/lidar_mapping_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/lidar_mapping_en.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/vision_mapping_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/vision_mapping_en.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/vision_tracking_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/vision_tracking_en.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_action/cyberdog_action_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_action/cyberdog_action_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_bms/cyberdog_bms_close_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_bms/cyberdog_bms_close_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_bms/cyberdog_bms_open_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_bms/cyberdog_bms_open_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_face/cyberdog_face_function.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_face/cyberdog_face_function.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_lidar/cyberdog_lidar_scan.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_lidar/cyberdog_lidar_scan.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_miloc/miloc_mapping_process.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_miloc/miloc_mapping_process.jpg -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_occmap/cyberdog_occmap_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_occmap/cyberdog_occmap_cn.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_occmap/cyberdog_occmap_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_occmap/cyberdog_occmap_en.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_tof/cyberdog_tof_close_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_tof/cyberdog_tof_close_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_tof/cyberdog_tof_open_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_tof/cyberdog_tof_open_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_train/cyberdog_train_cn_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_train/cyberdog_train_cn_1.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_train/cyberdog_train_cn_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_train/cyberdog_train_cn_2.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_train/cyberdog_train_cn_3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_train/cyberdog_train_cn_3.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_uwb/cyberdog_uwb_close_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_uwb/cyberdog_uwb_close_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_uwb/cyberdog_uwb_open_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_uwb/cyberdog_uwb_open_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_vision/vision_workflow_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_vision/vision_workflow_cn.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_vision/vision_workflow_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_vision/vision_workflow_en.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/ab_navigation_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/ab_navigation_en.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/algorithm_manager.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/algorithm_manager.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/lidar_mapping_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/lidar_mapping_en.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/vision_mapping_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/vision_mapping_en.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/vision_tracking_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/vision_tracking_en.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_action/cyberdog_action_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_action/cyberdog_action_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_bms/cyberdog_bms_close_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_bms/cyberdog_bms_close_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_bms/cyberdog_bms_open_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_bms/cyberdog_bms_open_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_face/cyberdog_face_function.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_face/cyberdog_face_function.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_lidar/cyberdog_lidar_scan.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_lidar/cyberdog_lidar_scan.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_miloc/miloc_mapping_process.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_miloc/miloc_mapping_process.jpg -------------------------------------------------------------------------------- /docs/en/image/cyberdog_occmap/cyberdog_occmap_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_occmap/cyberdog_occmap_cn.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_occmap/cyberdog_occmap_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_occmap/cyberdog_occmap_en.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_tof/cyberdog_tof_close_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_tof/cyberdog_tof_close_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_tof/cyberdog_tof_open_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_tof/cyberdog_tof_open_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_train/cyberdog_train_cn_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_train/cyberdog_train_cn_1.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_train/cyberdog_train_cn_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_train/cyberdog_train_cn_2.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_train/cyberdog_train_cn_3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_train/cyberdog_train_cn_3.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_uwb/cyberdog_uwb_close_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_uwb/cyberdog_uwb_close_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_uwb/cyberdog_uwb_open_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_uwb/cyberdog_uwb_open_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_vision/vision_workflow_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_vision/vision_workflow_cn.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_vision/vision_workflow_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_vision/vision_workflow_en.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/algorithm_manager_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/algorithm_manager_en.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/lidar_relocalization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/lidar_relocalization.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_ai_sports/cyberdog_ai_sports.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_ai_sports/cyberdog_ai_sports.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_loco/motor_sdk_flow_chart_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_loco/motor_sdk_flow_chart_cn.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_loco/motor_sdk_flow_chart_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_loco/motor_sdk_flow_chart_en.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_mivins/mivins_mapping_process.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_mivins/mivins_mapping_process.jpg -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_tracking/tracking_workflow_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_tracking/tracking_workflow_cn.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_tracking/tracking_workflow_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_tracking/tracking_workflow_en.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/algorithm_manager_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/algorithm_manager_en.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/lidar_relocalization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/lidar_relocalization.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_ai_sports/cyberdog_ai_sports.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_ai_sports/cyberdog_ai_sports.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_loco/motor_sdk_flow_chart_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_loco/motor_sdk_flow_chart_cn.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_loco/motor_sdk_flow_chart_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_loco/motor_sdk_flow_chart_en.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_mivins/mivins_mapping_process.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_mivins/mivins_mapping_process.jpg -------------------------------------------------------------------------------- /docs/en/image/cyberdog_tracking/tracking_workflow_cn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_tracking/tracking_workflow_cn.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_tracking/tracking_workflow_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_tracking/tracking_workflow_en.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/lidar_relocalization_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/lidar_relocalization_en.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/vision_relocalization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/vision_relocalization.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_mivins/mivins_following_process.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_mivins/mivins_following_process.jpg -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_touch/touch_architecture_diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_touch/touch_architecture_diagram.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_ultrasonic/cyberdog_ultrasonic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_ultrasonic/cyberdog_ultrasonic.png -------------------------------------------------------------------------------- /docs/cn/image/developer_guide/image-20230522152113978.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/developer_guide/image-20230522152113978.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/lidar_relocalization_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/lidar_relocalization_en.png -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/vision_relocalization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/vision_relocalization.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_mivins/mivins_following_process.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_mivins/mivins_following_process.jpg -------------------------------------------------------------------------------- /docs/en/image/cyberdog_touch/touch_architecture_diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_touch/touch_architecture_diagram.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_ultrasonic/cyberdog_ultrasonic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_ultrasonic/cyberdog_ultrasonic.png -------------------------------------------------------------------------------- /docs/cn/image/algorithm_manager/vision_relocalization_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/algorithm_manager/vision_relocalization_en.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_ai_sports/cyberdog_ai_sports_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_ai_sports/cyberdog_ai_sports_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_miloc/cyberdog_miloc_architecture.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_miloc/cyberdog_miloc_architecture.jpg -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_miloc/miloc_relocalization_process.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_miloc/miloc_relocalization_process.jpg -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_mivins/mivins_localization_process.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_mivins/mivins_localization_process.jpg -------------------------------------------------------------------------------- /docs/en/image/algorithm_manager/vision_relocalization_en.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/algorithm_manager/vision_relocalization_en.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_ai_sports/cyberdog_ai_sports_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_ai_sports/cyberdog_ai_sports_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_miloc/cyberdog_miloc_architecture.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_miloc/cyberdog_miloc_architecture.jpg -------------------------------------------------------------------------------- /docs/en/image/cyberdog_miloc/miloc_relocalization_process.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_miloc/miloc_relocalization_process.jpg -------------------------------------------------------------------------------- /docs/en/image/cyberdog_mivins/mivins_localization_process.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_mivins/mivins_localization_process.jpg -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_ai_sports/sport_counts_detection_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_ai_sports/sport_counts_detection_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_mivins/cyberdog_mivins_architecture.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_mivins/cyberdog_mivins_architecture.jpg -------------------------------------------------------------------------------- /docs/en/image/cyberdog_ai_sports/sport_counts_detection_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_ai_sports/sport_counts_detection_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_mivins/cyberdog_mivins_architecture.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_mivins/cyberdog_mivins_architecture.jpg -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_ultrasonic/cyberdog_ultrasonic_close_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_ultrasonic/cyberdog_ultrasonic_close_flow.png -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_ultrasonic/cyberdog_ultrasonic_open_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/cn/image/cyberdog_ultrasonic/cyberdog_ultrasonic_open_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_ultrasonic/cyberdog_ultrasonic_close_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_ultrasonic/cyberdog_ultrasonic_close_flow.png -------------------------------------------------------------------------------- /docs/en/image/cyberdog_ultrasonic/cyberdog_ultrasonic_open_flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MiRoboticsLab/blogs/HEAD/docs/en/image/cyberdog_ultrasonic/cyberdog_ultrasonic_open_flow.png -------------------------------------------------------------------------------- /docs/en/README.md: -------------------------------------------------------------------------------- 1 |

2 | Robot Development Document 3 |

4 | 5 | ## brief introduction 6 | 7 | Robot Development Document 8 | 9 | 10 | 11 | > Robot Development Document 12 | 13 | ## Update 14 | 15 | The following is a list of update times 16 | 17 | - {docsify-updated} Document Upload 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_parameter_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_parameter设计文档 2 | 3 | ## 版本 4 | 5 | | 编写 | change | version | date | 6 | | ---- | -------------------------- | ------- | ---------- | 7 | | 刘凯 | 梳理cyberdog_parameter功能 | 1.0 | 2023.05.22 | 8 | 9 | ## 概述 10 | 11 | cyberdog_parameter是对toml参数操作封装的类,包括获取与设置。 12 | 13 | ## 功能 14 | 15 | - 将toml参数配置生成so库。 16 | 17 | - 通过运行时加载so库从内存获取具体的参数值。 -------------------------------------------------------------------------------- /docs/libs/gitalk.plugins.js: -------------------------------------------------------------------------------- 1 | $docsify.plugins=[].concat(function(i){var e=Docsify.dom;i.mounted(function(i){var n=e.create("div");n.id="gitalk-container";var t=e.getNode("#main");n.style="width: "+t.clientWidth+"px; margin: 0 auto 20px;",e.appendTo(e.find(".content"),n)}),i.doneEach(function(i){for(var n=document.getElementById("gitalk-container");n.hasChildNodes();)n.removeChild(n.firstChild);gitalk.render("gitalk-container")})},$docsify.plugins); 2 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_laserslam_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_laserslam 2 | 3 | ## 模块简介 4 | 5 | cyberdog_laserslam是一个基于开源项目Cartographer开发的基于激光雷达、IMU可以实时定位建图的模块,包含的子模块有:位姿融合推理、激光地图匹配、位姿图等。 6 | 7 | ## 模块架构 8 | 9 | ![](./image/cyberdog_laserslam/Structure.jpg) 10 | 11 | 数据入口: 提供LaserSlam所需的数据入口 12 | 13 | 位姿推理器:根据激光前端匹配得到的位姿势,imu的角度信息,腿式里程计的速度信息来推理当前时刻的位姿 14 | 15 | 前端匹配器:通过新入激光数据,从位姿推理器拿到当前时刻位姿估计,将激光数据转换到世界坐标系和占据栅格匹配得到纠正后的位姿 16 | 17 | Pose Graph:通过整理前端匹配得到的节点和SubMap构建位姿图,进行图优化平均误差,顺滑位姿 18 | 19 | ## 处理流程 20 | 21 | ![](./image/cyberdog_laserslam/LaserSlam.jpg) 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_miloc_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_mivins 2 | 3 | ## 模块简介 4 | 5 | cyberdog_miloc是一个基于的三目相机的视觉的建图与重定位模块,用于视觉建图、导航功能中。在建图过程基于cyberdog_mivins输出的机器人位姿,建立稀疏的重定位地图;在导航过程基于重定位地图为机器人提供在当前地图下的位姿。 6 | 7 | ## 模块架构 8 | 9 | ![cyberdog_miloc_architecture](./image/cyberdog_miloc/cyberdog_miloc_architecture.jpg) 10 | 11 | cyberdog_miloc算法包括视觉建图、重定位、地图地图管理等模块。 12 | 13 | ## 服务流程 14 | 15 | ### 建图服务流程 16 | ![cyberdog_miloc_mapping](./image/cyberdog_miloc/miloc_mapping_process.jpg) 17 | 18 | ### 重定位定位服务流程 19 | ![cyberdog_miloc_reloc](./image/cyberdog_miloc/miloc_relocalization_process.jpg) 20 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_tracking_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_tracking 2 | 3 | ## 模块简介 4 | 5 | cyberdog_tracking是一个基于ROS2的功能包,主要用于对跟踪目标进行3D位置的估计,为下游跟随控制模块提供跟随的目标点。其中,位置估计的对象包括行人以及万物跟随中用户框选的任何规则非规则的目标。 6 | 7 | ## 模块架构 8 | 9 | ![](./image/cyberdog_tracking/tracking_arch.png) 10 | 11 | - camera_server:提供图像 12 | 13 | - cyberdog_vision:AI算法处理 14 | 15 | - cyberdog_tracking:位置估计 16 | 17 | - tracking_base:跟随规划控制 18 | 19 | ## 处理流程 20 | 21 | ![](./image/cyberdog_tracking/tracking_workflow_cn.png) 22 | 23 | 24 | 25 | - 模块输入:AI算法的处理结果及对齐后的深度图 26 | 27 | - 模块输出:跟踪目标的位置及跟踪状态 28 | 29 | - 状态发布:近距离、远距离、边缘 30 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_vision_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_vision 2 | 3 | ## 模块简介 4 | 5 | cyberdog_vision是一个ROS2功能包,主要用于AI算法调度管理及AI算法推理,包含的AI算法有:人脸识别(包含年龄及表情识别),人体检测、万物跟踪、骨骼点检测、静态手势识别、行人重识别6类算法。 6 | 7 | ## 模块架构 8 | 9 | ![](./image/cyberdog_vision/vision_arch_cn.png) 10 | 11 | - Sensor数据层:提供数据源的硬件设备 12 | 13 | - Camera应用层:基于硬件设备获取数据流并进行业务相关处理 14 | 15 | - AI应用层:基于AI算法及业务需求进行算法调度管理及算法推理 16 | 17 | - Tracking业务层:根据AI算法结果进行行人及万物跟随的目标位置估计 18 | 19 | 备注:红色虚线框为规划模块或方案变更导致变动的模块。 20 | 21 | ## 处理流程 22 | 23 | ![](./image/cyberdog_vision/vision_workflow_cn.png) 24 | 25 | - 模块输入:RGB图像 26 | - 模块输出:AI算法处理结果 27 | - 状态发布:选框中、跟随中 28 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_wifi_cn.md: -------------------------------------------------------------------------------- 1 | # WiFi功能设计文档 2 | 3 | ## 概述 4 | 5 | cyberdog_wifi是cyberdog控制WiFi连接的ROS接口节点,其它ROS节点可通过调用它来连接指定的路由器和热点。 6 | 7 | ## 系统结构 8 | 9 | 1. cyberdog_wifi功能包:生成cyberdog_wifi节点,用于提供连接指定ssid的路由器、发布WiFi连接状态信息 10 | - 连接WiFi服务器:用于连接指定ssid的WiFi 11 | - WiFi信息发布器:用于在连接状态向外广播当前的连接信息,包括ssid、信号强度和本设备IP 12 | 13 | 1. connector节点:获取WiFi连接状态,发起连接请求 14 | ![structure](./image/cyberdog_wifi/cyberdog_wifi_cn_structure.svg) 15 | 16 | ## 运行流程 17 | 18 | ### 连接流程 19 | ![connect](./image/cyberdog_wifi/cyberdog_wifi_request_cn.svg) 20 | ### 连接信息发布流程 21 | ![info](./image/cyberdog_wifi/cyberdog_wifi_info_cn.svg) -------------------------------------------------------------------------------- /docs/en/cyberdog_parameter_en.md: -------------------------------------------------------------------------------- 1 | # cyberdog_parameter Design Document 2 | 3 | ## Version 4 | 5 | | Author | change | version | date | 6 | | ------ | ------------------------------------- | ------- | ---------- | 7 | | KaiLiu | Sort out cyberdog_parameter functions | 1.0 | 2023.05.22 | 8 | 9 | ## Overview 10 | 11 | cyberdog_parameter is a class that encapsulates toml parameter operations, including acquisition and setting. 12 | 13 | ## Function 14 | 15 | - Configure toml parameter to generate so library. 16 | 17 | - Obtain specific parameter values from memory by loading the so library at runtime. -------------------------------------------------------------------------------- /docs/cn/cyberdog_mivins_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_mivins 2 | 3 | ## 模块简介 4 | 5 | cyberdog_mivins在视觉建图、视觉定位、视觉跟随等功能中使用,提供实时输出足式机器人位姿TF服务。基于开源项目SVO、VINS-FUSION开发,使用相机、IMU、足式里程计等多传感器数据融合定位。 6 | 7 | ## 模块架构 8 | 9 | ![cyberdog_mivins_architecture](./image/cyberdog_mivins/cyberdog_mivins_architecture.jpg) 10 | 11 | cyberdog_mivins定位算法包括数据输入、前端跟踪、前端建图、后端优化、输出等模块。 12 | 13 | ## 服务流程 14 | 15 | ### 建图服务流程 16 | ![mivins_mapping_process](./image/cyberdog_mivins/mivins_mapping_process.jpg) 17 | 18 | ### 定位服务流程 19 | 20 | ![mivins_localization_process](./image/cyberdog_mivins/mivins_localization_process.jpg) 21 | 22 | ### 跟随定位服务流程 23 | ![mivins_following_process](./image/cyberdog_mivins/mivins_following_process.jpg) 24 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_interactive_cn.md: -------------------------------------------------------------------------------- 1 | #
cyberdog_interactive 设计文档
2 | 3 | ## 1. 概述 4 | 仿生机器人互动功能将内置固定场景下的互动功能,作为机器人的基础能力,并以此为用户提供编程例子,抛砖引玉,希望用户(开发者)可以开发出更加好玩炫酷的功能。 5 | 6 | ## 2. 设计 7 | ### 2.1. 功能设计 8 | 9 | 摸下巴的仅在狗处于“坐下”状态时才会触发,且摸下巴动作分为以下三种状态: 10 | 1. 来回触摸下巴: 11 | - 当障碍物在狗下巴左右来回触摸时会进入该状态,该状态下狗会触发如下动作: 12 | - 狗吠一声; 13 | - 左右扭屁股动作; 14 | 2. 触摸下巴左侧: 15 | - 当障碍物在狗下巴左侧持续触摸时会进入该状态,该状态下狗会触发如下动作: 16 | - 狗吠一声; 17 | - 向右扭屁股动作; 18 | 3. 触摸下巴右侧 19 | - 当障碍物在狗下巴右侧持续触摸时会进入该状态,该状态下狗会触发如下动作: 20 | - 狗吠一声; 21 | - 向左扭屁股动作; 22 | 23 | #### 2.2 模块设计 24 | 25 |
26 | 27 | ![](./image/cyberdog_interactive/cyberdog_interactive_module.svg) 28 | 29 |
30 | 31 | 如上图所示,该功能可分为两个模块,一个主模块,主模块下仅挂一个检测摸下巴的互动模块,互动模块依赖TOF模块、运动模块和语音模块: 32 | 1. TOF模块:提供头部TOF传感数据,用于感知狗下巴附近是否存在障碍物; 33 | 2. 运动模块:提供基础运动能力; 34 | 3. 和语音模块:提供基础语音交互能力。 35 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_lidar_cn.md: -------------------------------------------------------------------------------- 1 | #
cyberdog_lidar 设计文档
2 | 3 | ## 1. 概述 4 | 5 |
6 | 7 | ![](./image/cyberdog_lidar/cyberdog_lidar_scan.png) 8 | 9 |
10 | 11 | 如上图所示,仿生机器人雷达驱动主要用于:需要实时反馈仿生机器人所处环境中的障碍物距离及距离探测值强度信息的场景。 12 | 13 | ## 2. 设计 14 | ### 2.1. 功能设计 15 | 16 | 仿生机器人雷达驱动工作流程主要如下: 17 | 1. 解析雷达配置参数并按照参数配置雷达软硬件; 18 | 2. 初始化雷达; 19 | 3. 如果初始化成功则继续下一步反之退出; 20 | 4. 采集雷达原始数据; 21 | 5. 如果需要滤波则对雷达原始数据进行过滤(拖尾滤波)并发布,反之直接发布原始数据; 22 | 6. 如果程序收到终止信号则退出,反之进行第4步。 23 | 24 | #### 2.2 模块设计 25 | 26 |
27 | 28 | ![](./image/cyberdog_lidar/cyberdog_lidar_module.svg) 29 | 30 |
31 | 32 | 如上图所示, 仿生机器人雷达驱动架构组成及各组成部分主要功能如下: 33 | 1. Lidar SDK:提供 Linux 下雷达固件数据采集及解析功能; 34 | 2. Utils:提供常用工具,如日志; 35 | 3. 雷达软硬件配置功能模块:提供雷达硬件及软件的配置功能; 36 | 4. 雷达数据采集功能模块:提供雷达数据采集及解析功能; 37 | 5. 雷达数据过滤功能模块:提供雷达数据过滤(拖尾滤波)功能; 38 | 6. 发布雷达数据:提供雷达数据发布功能。 39 | -------------------------------------------------------------------------------- /docs/cn/question_answer_cn.md: -------------------------------------------------------------------------------- 1 | 开发者QA问题汇总 2 | 3 | ## 刷机包在哪里下载? 4 | 5 | ```Bash 6 | 【CyberDog2 刷机包下载】https://s.xiaomi.cn/c/8JEpGDY8 7 | ``` 8 | 9 | ## 雷达自检失败是什么原因? 10 | 11 | ```Bash 12 | 更新英伟达官方软件包,可能导致雷达自检失败 13 | 1.目前已知nvidia自带的服务nvgetty.service会使用和雷达同样的串口,两者会产生冲突,我们在出厂时已经将该服务禁用掉了。 14 | 2.可以使用如下命令查看下是否该服务在运行: 15 | sudo systemctl status nvgetty.service 16 | 3.如果在运行,使用如下命令禁用掉: 17 | sudo systemctl disable nvgetty.service 18 | 执行之后重启即可 19 | ``` 20 | 21 | ## 编译过不去,遇到各种问题怎么办? 22 | 23 | ```Plain 24 | 1,进入tools目录下,可使用Dockerfile文件编译镜像 25 | 2,参考Dockerfile文档 https://github.com/MiRoboticsLab/blogs/blob/rolling/docs/cn/dockerfile_instructions_cn.md 进行编译,如果还有其它问题,可以在issue中提问。 26 | ``` 27 | 28 | ## cyberdog与cyberdog2对应仓库 29 | 30 | ```Bash 31 | cyberdog 对应仓库地址:https://github.com/MiRoboticsLab/cyberdog_ros2 32 | cyberdog2对应仓库地址:https://github.com/MiRoboticsLab/cyberdog_ws 33 | ``` 34 | -------------------------------------------------------------------------------- /docs/en/cyberdog_miloc_en.md: -------------------------------------------------------------------------------- 1 | # cyberdog_mivins 2 | 3 | ## Module Introduction 4 | 5 | cyberdog_miloc is a visual mapping and localization module based on a trinocular camera system, used for visual mapping and navigation. During the mapping process, it utilizes robot poses output from Minins and images for sparse reconstruction. During the navigation process, it provides the pose located in the map。 6 | 7 | ## Module Architecture 8 | 9 | ![cyberdog_miloc_architecture](./image/cyberdog_miloc/cyberdog_miloc_architecture.jpg) 10 | 11 | cyberdog_miloc includes modules of sparse reconstruction, re-localization and map manager。 12 | 13 | ## Service Process 14 | 15 | ### mapping service process 16 | 17 | 18 | ### re-localization service process 19 | 20 | 21 | -------------------------------------------------------------------------------- /docs/en/cyberdog_wifi_en.md: -------------------------------------------------------------------------------- 1 | # Cyberdog_wifi Design 2 | 3 | ## Overview 4 | 5 | cyberdog_wifi is the ROS interface node for cyberdog to control the WiFi connection, and other ROS nodes can call it to connect to the specified router and hotspot. 6 | 7 | ## System Structure 8 | 9 | 1. cyberdog_wifi package: Generates cyberdog_wifi node,which is used to provide a router connected to a specified ssid and publish WiFi connection status information 10 | 1. WiFi connection server: Connecting to specified SSID 11 | 2. WiFi info publisher: Publishing wifi connection status info, including SSID, RSSI, local IP, etc 12 | 13 | 1. connector node: Acquiring WiFi status info, and activating request 14 | ![structure](./image/cyberdog_wifi/cyberdog_wifi_en_structure.svg) 15 | 16 | ## Operation Process 17 | 18 | ### Connection Process 19 | ![connect](./image/cyberdog_wifi/cyberdog_wifi_request_en.svg) 20 | ### Info Publishing Process 21 | ![info](./image/cyberdog_wifi/cyberdog_wifi_info_en.svg) -------------------------------------------------------------------------------- /docs/en/cyberdog_tracking_en.md: -------------------------------------------------------------------------------- 1 | # cyberdog_tracking 2 | 3 | ## Module Introduction 4 | 5 | This is a function package based on ROS2. Cyberdog_tracking mainly used to estimate the 3D position of the tracking target and provide target point for tracking_base module to plan the path and control the robot. The objects of position estimation include pedestrians and any regular and irregular targets selected by the user. 6 | 7 | ## Module Architecture 8 | 9 | ![](./image/cyberdog_tracking/tracking_arch.png) 10 | 11 | - camera_server: Module that provide images 12 | - cyberdog_vision: Module for AI algorithm processing 13 | - cyberdog_tracking: Module for position estimation 14 | - tracking_base: Module for planner and controller 15 | 16 | ## Workflow 17 | 18 | ![](./image/cyberdog_tracking/tracking_workflow_en.png) 19 | 20 | - module input: The results of AI algorithm and the aligned depth image 21 | - module output: Tracking target location and tracking status 22 | - status pub: far, near, edge 23 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 |

2 | 四足机器人开发指南 3 |

4 | 5 | 6 | ## 简介 7 | 8 | 本文基于小米四足开发平台做软件开发指导。 9 | 10 | 11 | 12 | ## 框架 13 | 14 | **开发整体框架图** 15 | 16 | ![dev](./image/dev.png) 17 | 18 | **开发方式** 19 | 20 | 1. 基于PC端编写程序,通过grpc或ros对机器人进行调度 21 | 2. 基于NX端编写调度程序,对现有的服务类ros接口进行调用(推荐) 22 | 3. 基于NX端改写程序,对现有的调度类或服务类业务进行二次开发 23 | 24 | **开发流程** 25 | 26 | 1. 通过手机APP申请开发者权限 27 | 28 | 2. 与机器人建立连接(无线有线均可) 29 | 30 | - 有线登录方式,USB-Type数据线连接PC与机器人Download口: 31 | 32 | `ssh mi@192.168.55.1` 33 | 34 | `passwd: "123"` 35 | 36 | - 无线方式,通过app给机器人连网,使用无线网IP(app内可查看)登录 37 | 38 | ![connect](./image/connect.png) 39 | 40 | 3. 按照你想要的方式开始即可 41 | 42 | 43 | 44 | ## 资源 45 | 46 | - 核心代码开源:[Github地址](https://github.com/MiRoboticsLab/cyberdog_ws) 47 | - 开发者手册(API,上述第1、2种开发方式):[博客地址](https://miroboticslab.github.io/blogs/#/cn/developer_guide) 48 | - 程序设计(源码,上述第3种开发方式):应用文档,运控文档,slam文档以及感知文档; 49 | - 刷机:[博客地址](https://miroboticslab.github.io/blogs/#/cn/cyberdog_flash) 50 | - 运营咨询:[mi-cyberdog@xiaomi.com](mailto:mi-cyber@xiaomi.com) 51 | -------------------------------------------------------------------------------- /docs/en/cyberdog_mivins_en.md: -------------------------------------------------------------------------------- 1 | # cyberdog_mivins 2 | 3 | ## Module Introduction 4 | 5 | cyberdog_mivins is used in functions such as visual mapping, visual localization, and visual following, providing real-time output of the pose TF service for quadruped robots. Developed based on open-source projects SVO and VINS-FUSION, it uses multi-sensor data fusion positioning, including cameras, IMUs, and quadruped odometry. 6 | 7 | ## Module Architecture 8 | 9 | ![cyberdog_mivins_architecture](./image/cyberdog_mivins/cyberdog_mivins_architecture.jpg) 10 | 11 | The cyberdog_mivins localization algorithm includes modules for data input, front-end tracking, front-end mapping, back-end optimization, and output. 12 | 13 | ## Service Process 14 | 15 | ### mapping service process 16 | ![mivins_mapping_process](./image/cyberdog_mivins/mivins_mapping_process.jpg) 17 | 18 | ### localization service process 19 | 20 | ![mivins_localization_process](./image/cyberdog_mivins/mivins_localization_process.jpg) 21 | 22 | ### following localization function process 23 | ![mivins_following_process](./image/cyberdog_mivins/mivins_following_process.jpg) 24 | -------------------------------------------------------------------------------- /docs/en/cyberdog_laserslam_en.md: -------------------------------------------------------------------------------- 1 | # cyberdog_laserslam 2 | 3 | ## Module Introduction 4 | 5 | cyberdog_laserslam is a module developed based on open source project Cartographer, which capable to build map and localization in real time. The SubModule include pose extrapolator, laser scan matching, pose graph etc. 6 | 7 | ## Module Architecture 8 | 9 | ![](./image/cyberdog_laserslam/Structure.jpg) 10 | 11 | data entry: Provide data LaserSlam required 12 | 13 | pose_extrapolator: According to the position and posture obtained by the laser front-end matching, the angle information of the imu, and the speed information of the leg odometer to infer the current position and posture 14 | 15 | Scan Matching: Get the current moment pose estimation from the pose reasoner through the new laser data, convert the laser data to the world coordinate system and occupy the corrected pose after grid matching 16 | 17 | Pose Graph: Build a pose graph by organizing the nodes and SubMap obtained from the front-end matching, and optimize the average error of the graph to smooth the pose 18 | 19 | ## Workflow 20 | 21 | ![](./image/cyberdog_laserslam/LaserSlam.jpg) -------------------------------------------------------------------------------- /docs/en/cyberdog_vision_en.md: -------------------------------------------------------------------------------- 1 | # cyberdog_vision 2 | 3 | ## Module Introduction 4 | 5 | This is a function package based on ROS2. Cyberdog_vision mainly used for AI algorithm scheduling management and AI algorithm inference. The included AI algorithms are face recognition with age and emotion, face registration, body detection, ReID, gesture recognition, keypoints detection, auto track. 6 | 7 | ## Module Architecture 8 | 9 | ![](./image/cyberdog_vision/vision_arch_en.png) 10 | 11 | 12 | 13 | - Sensor layer: Hardware devices that provide source data 14 | - Camera layer: Acquire streams based on hardware devices and perform business-related processing 15 | - AI layger: Algorithm scheduling management and algorithm inference based on AI algorithm and business requirements 16 | - Tracking layer: Estimate the target pose of pedestrians and anything else selected by user based on the results of AI algorithm 17 | 18 | Remark: The red dotted box is the planning module or the module that changes due to the change of the plan. 19 | 20 | ## Workflow 21 | 22 | ![](./image/cyberdog_vision/vision_workflow_en.png) 23 | 24 | - module input: RGB image 25 | - module output: AI algorithm processing results 26 | - status pub: selecting, tracking 27 | -------------------------------------------------------------------------------- /docs/cn/image_transmission_cn.md: -------------------------------------------------------------------------------- 1 | # 图传功能设计文档 2 | 3 | ## 概述 4 | 5 | image_transmission是一个用于传输摄像头图像到APP的功能包 6 | 7 | ## 系统结构 8 | 9 | 1. app端:发起WebRTC的信令请求,接收WebRTC的视频流,发送停止信号 10 | 2. grpc通信:转发app下发的信令和停止信号 11 | 3. image_transmission功能包:作为一个so库被cyberdog_camera节点调用,接收信令,通知cyberdog_camera开启发送原始数据,发送WebRTC视频流 12 | 4. cyberdog_camera功能包:接收image_transmission的通知,发送图像原始数据 13 | 14 | ![struct](./image/image_transmission/image_transmission_cn.svg) 15 | 16 | ## 运行流程 17 | 18 | ### 启动流程 19 | 20 | 1. app端发起信令请求,通过grpc接收后转成ros话题"img_trans_signal_in"发送给image_transmission 21 | 2. image_transmission收到信令请求后会调用"camera_service"(ros服务)通知cyberdog_camera启动发送图像原始数据 22 | 3. image_transmission发送"img_trans_signal_out"话题应答信令,grpc将信令转发给app端 23 | 4. 步骤1和3会进行多次 24 | 5. 当信令协商成功后image_transmission会将图像原始数据转化为WebRTC视频流发送给app端 25 | 26 | ### 停止流程 27 | 28 | 正常停止流程,按app上的停止按钮: 29 | 30 | 1. app端发送停止信号,grpc转成ros话题"img_trans_signal_in"发送给image_transmission 31 | 2. image_transmission收到停止信号后断开WebRTC连接,并发送停止完成信号,用"img_trans_signal_out"话题发送哥grpc,grpc转发给app端 32 | 3. image_transmission调用"camera_service"(ros服务)通知cyberdog_camera停止发送图像原始数据 33 | 4. app端停止显示 34 | 35 | 或者直接返回退出app: 36 | 37 | 1. app端的WebRTC连接会断开 38 | 2. image_transmission检测到WebRTC连接会断开会调用"camera_service"(ros服务)通知cyberdog_camera停止发送图像原始数据 -------------------------------------------------------------------------------- /docs/cn/cyberdog_bringup_cn.md: -------------------------------------------------------------------------------- 1 | #
cyberdog_bringup 设计文档
2 | 3 | ## 1. 概述 4 | bringup 主要功能如下: 5 | 1. 可根据目标配置文件(*.yaml)内的参数启动机器人内所有已配置的节点; 6 | 2. 动态启动/停止某些节点(目前仅 navigation2 有需求); 7 | 3. 配置文件尽可能全的适配 ROS2 launch 内接口; 8 | 9 | ## 2. 设计 10 | ### 2.1. 功能设计 11 | bringup 启动约束分两类,即配置文件(*.yaml)和 命令行启动参数(argv[]),其中: 12 | 1. 配置文件(*.yaml)有唯一主文件,主文件内可加载其他副文件内符合 ros 约束的参数; 13 | 2. 命令行启动参数(argv[])适配 ROS2 launch 参数接口。 14 | 15 | 就启动文件呈现形式考虑到以下问题: 16 | 1. 启动约束在运行时动态加载到内存时用户不可见,不好排查分析; 17 | 2. 唯一启动文件不可以同时启动多次,不好排查分析。 18 | 19 | 故启动约束将静态穷举,尽可能避免运行时动态加载,由此: 20 | 1. 启动文件将在 cyberdog_bringup 编译时根据配置文件(*.yaml)动态生成; 21 | 2. 且尽可能穷举参数,但也会流出动态参数入口。 22 | 3. 启动文件有唯一主文件,负责启动参数约束下所有节点; 23 | 4. 启动文件有不同级别的副文件,负责启动参数约束下相应节点。 24 | 25 | #### 2.2 模块设计 26 | 27 |
28 | 29 | ![](./image/cyberdog_bringup/cyberdog_bringup.svg) 30 | 31 |
32 | 33 | - cyberdog_bringup根据配置文件(bringup.yaml, launch.yaml, node.yaml)动态生成所需的各级别的launch.py 文件。 34 | - 自动生成的launch.py文件会根据配置参数加载手动创建的 yaml 文件或 launch 文件,做到灵活配置。 35 | - 最终根据配置可实现启动机器人所有节点的唯一 launch.py,用于系统级别的service 调用,从而实现机器人软件功能的自动启动功能。 36 | 37 | 实现方案简单分为以下5步: 38 | 1. 将复杂冗长的语法进行分类抽象,提取出必须由人工编码的环节并高度抽象为自定义的元语言; 39 | 2. 用某种编码风格(比如toml,yaml,json等)对自定义元语言进行编码; 40 | 3. 为上述确定的编码风格及元语言编写编译工具,也就是机器人实现自动编码功能的程序; 41 | 4. 在每次编译时,自动对元语言进行编译,产生可执行脚本,并安装; 42 | 5. 通过系统指令设置开机启动。 43 | -------------------------------------------------------------------------------- /docs/cn/connector_cn.md: -------------------------------------------------------------------------------- 1 | #
connector 设计文档
2 | 3 | ## 1. 概述 4 | 仿生机器人快速连接网络及目标设备(下文简称快连)场景主要有以下特性: 5 | 1. 仿生机器人处于WiFi环境中; 6 | * WiFi可以是第三方路由的WiFi; 7 | * WiFi也可以是APP所在设备外放的热点。 8 | 2. App所在设备已连接上目标WiFi; 9 | 3. App发起的使得仿生机器人连接目标WiFi; 10 | 4. 仿生机器人连接目标WiFi成功后再连接目标APP所在设备; 11 | 5. 最终实现快速和目标WiFi及设备建立连接。 12 | 13 | ## 2. 设计 14 | ### 2.1. 功能设计 15 | 16 | 快连功能主要有两个: 17 | 1. 配网功能:通过长按touch来实现开关; 18 | 2. 自动连接:通过系统自带的重连机制实现。 19 | 20 | #### 2.2 模块设计 21 | 22 |
23 | 24 | ![](./image/connector/connector_module.svg) 25 | 26 |
27 | 28 | 如上图所示,仿生机器人快速连接网络及目标设备架构组成及各组成部分主要功能如下: 29 | 1. touch:提供机器人头部触摸信号。 30 | 2. led:提供展示机器人头部及身体上灯效功能。 31 | 3. audio:提供机器人语音播报功能。 32 | 4. camera:提供视野内图像数据。 33 | 5. QRreader:提供二维码识别功能。 34 | 6. WiFi:提供 linux 系统的目标WiFi连接功能。 35 | 7. App:提供 携带WiFi信息的 二维码生成功能。 36 | 37 | ## 3. 手动联网 38 | ### 3.1 联网及APP 39 | ``` 40 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/connect protocol/srv/Connector "{wifi_name: 'wifi_name',wifi_password: 'wifi_password',provider_ip: 'app_ip'}" 41 | ``` 42 | ### 3.2 仅联网 43 | ``` 44 | sudo nmcli dev wifi connect password ifname wlan0 45 | ``` 46 | ### 3.3 其他操作 47 | ``` 48 | # 查看连接过的WiFi列表 49 | nmcli connection | grep wifi 50 | # 删除连接过的WiFi列表中指定WiFi连接 51 | sudo nmcli connection delete 'ssid' 或者 'uuid' 52 | ``` 53 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_gps_cn.md: -------------------------------------------------------------------------------- 1 | # Cyberdog_gps 设计文档 2 | 3 | ## 概述 4 | 5 | ``cyberdog_gps`` 以ros2 plugin形式向客户端提供gps数据服务,负责控制gps模组的起停,并将gps数据组织成ros msg格式通过sensor_manager向外发布。 6 | 7 | ## 软件设计 8 | 9 | ### 软件框架 10 | 11 |
12 | 13 | ![avatar](./image/cyberdog_gps/cyberdog_gps_flow.png) 14 | 15 |
16 | 17 | #### 数据流开启 18 | 19 |
20 | 21 | ![avatar](./image/cyberdog_gps/cyberdog_gps_open.png) 22 | 23 |
24 | 25 | #### 数据流关闭 26 | 27 |
28 | 29 | ![avatar](./image/cyberdog_gps/cyberdog_gps_close.png) 30 | 31 |
32 | 33 | ## 功能设计 34 | 35 | - 通过配置文件可gps功能相关配置参数等 36 | - 提供传感器开启、关闭、自检、仿真等基本能力接口 37 | 38 | ## 配置文件 39 | 40 | - 源码路径:``bridges/params/toml_config/sensors`` 41 | - 安装路径:``/opt/ros2/cyberdog/share/params/toml_config/sensors`` 42 | - 配置文件: 43 | - ``bcmgps_config.toml``:用于配置gps相关参数 44 | - 主要的参数: 45 | - ``spi`` spi端口 46 | - ``patch_path`` patch路径 47 | - ``MsgRate`` gps数据频率 48 | 49 | ## API接口 50 | - ``Init(bool simulator)``:初始化配置 51 | - ``simulator = true``:配置为仿真模式 52 | - ``Open()``:打开传感器 53 | - ``Start()``:使能传感器 54 | - ``Stop()``:停止传感器 55 | - ``Close()``:关闭传感器 56 | - ``SelfCheck()``:传感器自检 57 | - ``LowPowerOn()``:进入低功耗模式 58 | - ``BCMGPS_Payload_callback(std::shared_ptr payload)``:设置消息回调函数 59 | -------------------------------------------------------------------------------- /docs/cn/motion_manager_cn.md: -------------------------------------------------------------------------------- 1 | # 运动管理设计文档 2 | 3 | ## 概述 4 | 运动管理模块是对运动控制功能的集中管理,所有的运控调用功能均由该模块向运控层转发,并获取运控的状态反馈。对于用户的对运控的调用指令,该模块将根据系统的整体状态机、电量状态、运动状态进行集中决策和执行。 5 | ## 模块架构 6 | 7 | ### 软件架构图 8 | 9 |
10 | 11 | ![](./image/motion/motion.png) 12 | 13 |
14 | 15 | ### 架构说明 16 | 17 | #### 角色说明 18 | 19 | MotionManager 20 | 21 | - Cyberdog软件体系的运动管理接口,在内部分为三层:MotionManager、MotionDecision、MotionHandler,其中MotionManager继承了CyberdogManager的状态机,实现了自检、低电量、低功耗、关机、OTA等不同的状态。MotionDecision依据机器急停状态、指令优先级等做决策;MotionHandler判断指令合法性、机器任务状态、电机状态等做进一步管理指令。 22 | - 所有需要运行在Cyberdog框架内的运控调用,只能通过该模块的接口。 23 | - 接口有自己维护的业务逻辑,即调用接口会得到返回,但不一定有运动响应,返回码中统一定义了所有的异常。 24 | 25 | MotionAction 26 | 27 | - 作为NX主控板与运控板的唯一接口,以lcm通讯协议向运控板下发所有的运动指令,同时将运控板的实时状态上报NX主控板 28 | - 根据运控定义的所有动作,管理维护所有的动作属性清单,包括运控定义每个动作的mode与gait_id属性与MotionManger对外定义motion_id的映射、每个动作允许衔接的前后状态与指令。 29 | 30 | 31 | 32 | ## 接口类型说明 33 | 34 | - 将所有动作进行封装,对外提供结果指令、伺服指令以及自定义动作指令三种接口,其中自定义动作指令只开放给可视化编程,其他指令开放给所有调用方。 35 | - 伺服指令主要面向运控板所定义的locomotion动作,即慢走、快走、小跑、跳跑等动作。此类指令下发运控后,运控会按照最后一帧的相关参数,如速度、抬腿高度等维持状态。在APP等需要grpc通讯的场景下,为避免通讯断连后运控方的失控状态,要求APP等上层下发的指令间隔时长默认不多于200ms,否则将强制认为通讯超时,并向运控板下发站立指令实现停止状态。 36 | - 伺服指令实现了多方协同控制,如APP、蓝牙手柄、导航任务等,内部定义了不同的指令优先级,按照优先级实现控制权管理,值越小代表优先级越高。以下是默认的优先级定义: 37 | 38 | ```C%2B%2B 39 | App = 0 # 安卓APP 40 | Audio = 1 # 语音 41 | Vis = 2 # 可视化编程 42 | BluTele = 3 # 蓝牙手柄 43 | Algo = 4 # 导航跟随等任务 44 | ``` 45 | 46 | ## 运动控制流程 47 | 48 |
49 | 50 | ![](./image/motion/motion_flow.png) 51 | 52 |
-------------------------------------------------------------------------------- /md.md: -------------------------------------------------------------------------------- 1 | ## docsify 2 | 3 | 基于 docsify 4 | 5 | https://docsify.js.org/#/zh-cn/cover 6 | 7 | ## 启动服务 8 | 9 | docsify serve ./docs 10 | 11 | https://zeroone001.github.io/robotdemo.github.io/#/ 12 | 13 | 14 | ## 仓库地址 15 | 16 | https://github.com/BerserkerRider/robot.github.io 17 | 18 | ## gittalk 19 | 20 | 展示GitHub issues 内容的插件 21 | 22 | https://github.com/gitalk/gitalk 23 | 24 | ### 安装 25 | 26 | https://github.com/gitalk/gitalk#install 27 | 28 | ```js 29 | var gitalkConfig = { 30 | clientID: "a2bdae5457402030fb6b", 31 | clientSecret: "c1c9ce6f3334a85f5456b602ca138dee038fd414", 32 | repo: "robotdemo.github.io", 33 | owner: "zeroone001", 34 | admin: ["zeroone001"], 35 | perPage: 20, 36 | language: "zh-CN", 37 | // labels: ['Open'], 38 | pagerDirection: "last", 39 | distractionFreeMode: false, 40 | proxy: 'http://192.168.31.16:8011' 41 | }; 42 | const gitalk = new Gitalk({ 43 | clientID: 'GitHub Application Client ID', 44 | clientSecret: 'GitHub Application Client Secret', 45 | repo: 'https://github.com/zeroone001/robotdemo.github.io/tree/master', // The repository of store comments, 46 | owner: 'zeroone001', 47 | admin: ['zeroone001'], 48 | id: location.pathname, // Ensure uniqueness and length less than 50 49 | distractionFreeMode: false // Facebook-like distraction free mode 50 | }) 51 | 52 | gitalk.render('gitalk-container'); 53 | ``` 54 | 55 | ## 第三方登录 56 | 57 | https://www.ruanyifeng.com/blog/2019/04/github-oauth.html 58 | 59 | ## 开源 60 | 61 | https://github.com/doocs/doocs.github.io -------------------------------------------------------------------------------- /docs/cn/cyberdog_grpc_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_grpc设计文档 2 | 3 | ## 概述 4 | 5 | cyberdog_grpc是一个用于将ROS中的话题、服务、行动转化成gRPC请求的节点。它可以实现ROS与APP的通信。 6 | 7 | ## gRPC基本信息 8 | 9 | gRPC是由 google开发的一个高性能、通用的开源RPC框架,主要面向移动应用开发且基于HTTP/2协议标准而设计,同时支持大多数流行的编程语言。 10 | 11 | gRPC使用Protocol Buffers作为序列化协议,它是一种与语言、平台无关 、可扩展的序列化结构数据。它的定位类似于JSON、XML,但是比他们更小、更快、更简单。 12 | 13 | ## 开源仓库 14 | 15 | https://github.com/grpc/grpc 16 | 17 | ### 版本 18 | 19 | 本产品中使用的是1.44.0,使用其它版本的gRPC也可以与本产品正常通信。 20 | 21 | ### 安装 22 | 23 | C++可参考https://github.com/grpc/grpc/blob/master/BUILDING.md 24 | 25 | Python: `pip3 install grpcio grpcio_tools` 26 | 27 | 其它语言可参考官方README.md 28 | 29 | ### 测试 30 | 31 | 编译测试例程: 32 | 33 | ```Shell 34 | cd grpc/examples/cpp/helloworld 35 | mkdir -p cmake/build 36 | cd cmake/build 37 | cmake ../.. 38 | make 39 | ``` 40 | 41 | 运行服务器: 42 | 43 | ```Shell 44 | ./greeter_server 45 | ``` 46 | 47 | 屏幕会显示:Server listening on 0.0.0.0:50051 48 | 49 | 运行客户端: 50 | 51 | ```Shell 52 | ./greeter_client 53 | ``` 54 | 55 | 屏幕会显示:Greeter received: Hello world 56 | 57 | ## 接口设计 58 | 59 | 接口采用`nameCode + 内容`的形式进行远程调用。 60 | 61 | ![image](./image/cyberdog_grpc/cyberdog_grpc_cn.svg) 62 | 63 | ### nameCode 64 | 65 | 即指令码,protobuf中类型为fixed32,C++中类型为uint32,对应着不同的调用功能,在本产品内部对应着不同的ROS接口。 66 | 67 | ### 内容序列化 68 | 69 | 多数的远程调用指令有调用参数以及返回的数据,这些内容都统一用符合JSON格式的字符串传输,本产品中采用[RapidJSON](https://github.com/Tencent/rapidjson)进行序列化和反序列化操作。 70 | 71 | 内容在protobuf中类型为string,C++中类型为std::string。 72 | 73 | #### params 74 | 75 | 即调用参数,对应调用功能需要的输入信息。 76 | 77 | #### data 78 | 79 | 即返回数据,对应调用功能的输出结果。 80 | 81 | [业务协议](/cn/grpc_protocol.md) 82 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_bluetooth_cn.md: -------------------------------------------------------------------------------- 1 | # 蓝牙模块功能设计 2 | 3 | ## 概述 4 | 5 | cyberdog_bluetooth是一个通过ROS操作BLE从设备的节点。它通过bluepy库创建BLE主设备,进行扫描、连接BLE从设备、对指定特征值读写等操作。作为ROS节点,它提供扫描、连接蓝牙手柄、获取设备信息、发布摇杆数据、启动UWB跟随等业务功能的接口。 6 | 7 | ## 系统结构 8 | 9 | ### 总体结构 10 | 11 | 1. 蓝牙节点cyberdog_bluetooth:负责连接蓝牙外设,发送和接收相关指令对UWB设备进行连接和断开 12 | 2. UWB功能包cyberdog_uwb:由device_manager调用,负责产生UWB的session_id和主从设备的mac,通过UWB硬件获取数据并计算距离角度等信息 13 | 3. grpc通信节点:负责转发app下发的相关扫描、连接、停止等指令 14 | 4. app端:下发指令来控制蓝牙UWB设备 15 | 5. 手环:用于UWB定位和遥控 16 | 6. 充电桩UWB设备:用于UWB定位 17 | 18 | ![struct](./image/cyberdog_bluetooth/cyberdog_bluetooth_cn.svg) 19 | 20 | ### cyberdog_bluetooth结构 21 | 22 | 1. 蓝牙核心模块:使用bluepy库实现BLE的基本功能 23 | 2. 固件升级模块:调用蓝牙核心模块实现BLE从设备的固件升级业务 24 | 3. UWB跟随模块:负责用ROS action调用UWB跟随任务,提供启动、停止和查询接口函数 25 | 4. ROS接口:根据ROS调用请求,调用蓝牙核心模块对BLE进行操作,实现相关业务功能 26 | 27 | ![node_struct](./image/cyberdog_bluetooth/cyberdog_bluetooth_node_cn.svg) 28 | 29 | ## 运行流程 30 | 31 | ### 扫描外设流程 32 | 33 | 1. APP下发扫描指令 34 | 2. grpc节点收到扫描指令,调用蓝牙节点提供的扫描服务 35 | 3. 蓝牙节点收到扫描服务请求,开始扫描蓝牙外设,完成扫描后将结果返回给调用方 36 | 4. grpc节点收到蓝牙外设列表,上发给APP 37 | 5. APP收到后显示设备列表 38 | 39 | ### 连接外设流程 40 | 41 | 1. APP下发选中的蓝牙外设信息 42 | 2. grpc节点收到选择的蓝牙外设信息,调用蓝牙节点提供的连接服务,并传入外设信息 43 | 3. 蓝牙节点收到连接服务请求,根据蓝牙外设信息开始连接蓝牙外设 44 | 4. 蓝牙连接成功后读取蓝牙外设固件版本、设备类型(手环或充电桩) 45 | 5. 根据设备类型,开启一些数据通知并注册相应的回调函数,比如:串口、电量、摇杆 46 | 6. 蓝牙节点调用UWB功能包提供的session id和mac生成服务 47 | 7. UWB功能包收到请求后生成session id和mac,返回给请求方 48 | 8. 蓝牙节点将新生成的session id和mac通过蓝牙发送给蓝牙外设,等待UWB连接确认 49 | 9. 蓝牙外设成功与UWB功能包通信上后通过蓝牙发送成功连接的消息给蓝牙节点 50 | 10. 蓝牙节点完成连接流程,将结果返回给调用方 51 | 11. grpc将结果上发给APP 52 | 12. APP显示连接结果 53 | 54 | ### 断开连接流程 55 | 56 | 1. APP下发断开连接指令 57 | 2. grpc节点收到指令,调用蓝牙节点提供的连接服务,并传入空连接信息 58 | 3. 蓝牙节点收到连接服务请求,通过蓝牙给外设发送断开UWB连接指令 59 | 4. 蓝牙外设断开UWB连接并用蓝牙发送断开连接成功的消息给蓝牙节点 60 | 5. 蓝牙节点注销回调函数并断开与外设的连接 61 | 6. 蓝牙节点完成断开连接流程,将结果返回给调用方 62 | 7. grpc将结果上发给APP 63 | 8. APP显示断开连接结果 -------------------------------------------------------------------------------- /docs/en/cyberdog_gps_en.md: -------------------------------------------------------------------------------- 1 | # Cyberdog_gps Design 2 | 3 | ## Functional Overview 4 | 5 | ``cyberdog_gps`` Provide gps data service to the client as a ros2 plugin, responsible for controlling the start and stop of the gps module, and organize the gps data into ros msg format and publish it through sensor_manage. 6 | 7 | ## Architecture Design 8 | 9 | ### Soft Architecture 10 | 11 |
12 | 13 | ![avatar](./image/cyberdog_gps/cyberdog_gps_flow.png) 14 | 15 |
16 | 17 | #### open gps 18 | 19 |
20 | 21 | ![avatar](./image/cyberdog_gps/cyberdog_gps_open.png) 22 | 23 |
24 | 25 | #### close gps 26 | 27 |
28 | 29 | ![avatar](./image/cyberdog_gps/cyberdog_gps_close.png) 30 | 31 |
32 | 33 | ## Feature Design 34 | 35 | 36 | - Through the configuration file, you can configure parameters related to the gps function 37 | - Provide basic capability interfaces such as sensor open, close, self-test, simulation. 38 | 39 | ## Configuration File 40 | 41 | - Src path:``bridges/params/toml_config/sensors`` 42 | - Install path:``/opt/ros2/cyberdog/share/params/toml_config/sensors`` 43 | - Config path:: 44 | - ``bcmgps_config.toml``:gps params config 45 | - params: 46 | - ``spi`` spi port 47 | - ``patch_path`` patch path 48 | - ``MsgRate`` gps data fps 49 | 50 | ## API Interface 51 | - ``Init(bool simulator)``:init 52 | - ``simulator = true``:simulator 53 | - ``Open()``:open gps 54 | - ``Start()``:enable gps to publish gps data 55 | - ``Stop()``:stop to publish gps data 56 | - ``Close()``:close gps 57 | - ``SelfCheck()``:gps self check 58 | - ``LowPowerOn()``:lowpower 59 | - ``BCMGPS_Payload_callback(std::shared_ptr payload)``topic callback function 60 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_touch_cn.md: -------------------------------------------------------------------------------- 1 | # Touch设计文档 2 | # 一、概述 3 | 下文旨在说明机器狗touch交互逻辑及其功能映射等。 4 | # 二、设计 5 | ## 2.1 功能划分 6 | 主要功能如下: 7 | - 获取touch触发事件(双击(0x03)、长按(0x07)等); 8 | - touch自检; 9 | ## 2.2 模块架构 10 | 模块架构图如下: 11 | 12 |
13 | 14 | ![avatar](./image/cyberdog_touch/touch_architecture_diagram.png) 15 | 16 |
17 | 18 | ## 2.3 接口设计 19 | touch模块数据流如下: 20 | 21 |
22 | 23 | ![avatar](./image/cyberdog_touch/Touch_data_stream.png) 24 | 25 |
26 | 27 | touch模块消息协议如下: 28 | 29 | - 话题名称:“touch_status” 30 | - 话题消息文件目录:“/bridges/protocol/ros/msg/TouchStatus.msg” 31 | - 话题消息结构如下: 32 | int32 touch_state //touch信号量,非0表示touch被触发 33 | uint64 timestamp //时间戳 34 | - touch_state信号量值如下: 35 | #define LPWG_SINGLETAP_DETECTED 0x01 36 | #define LPWG_DOUBLETAP_DETECTED 0x03 37 | #define LPWG_TOUCHANDHOLD_DETECTED 0x07 38 | #define LPWG_CIRCLE_DETECTED 0x08 39 | #define LPWG_TRIANGLE_DETECTED 0x09 40 | #define LPWG_VEE_DETECTED 0x0A 41 | #define LPWG_UNICODE_DETECTED 0x0B 42 | #define LPWG_SWIPE_DETECTED 0x0D 43 | #define LPWG_SWIPE_DETECTED_UP_CONTINU 0x0E 44 | #define LPWG_SWIPE_DETECTED_DOWN_CONTINU 0x0F 45 | #define LPWG_SWIPE_DETECTED_LEFT_CONTINU 0x10 46 | #define LPWG_SWIPE_DETECTED_RIGHT_CONTINU 0x11 47 | ## 2.4 模块设计 48 | 主要分为touch_input_event_reader、touch_base和- touch_sensor_handler和touch_plugin等模块。 49 | - touch_input_event_reader主要实现了从events中读取touch事件功能; 50 | - touch_base主要实现了touch模块初始化、自检等功能的定义; 51 | - touch_sensor_handler主要实现了设备文件打开、基于epolling机制对触摸事件进行处理和映射等功能; 52 | - touch_plugin主要实现了touch模块初始化、自检、触摸事件信号的话题发布等功能。 53 | -------------------------------------------------------------------------------- /docs/en/cyberdog_lidar_en.md: -------------------------------------------------------------------------------- 1 | #
cyberdog_interactive design document
2 | 3 | ## 1. Overview 4 | 5 |
6 | 7 | ![](./image/cyberdog_lidar/cyberdog_lidar_scan.png) 8 | 9 |
10 | 11 | As shown in the figure above, the bionic robot radar driver is mainly used in scenarios where real-time feedback of obstacle distance and distance detection value intensity information in the bionic robot's environment is required. 12 | 13 | ## 2. Design 14 | ### 2.1. Feature design 15 | The workflow of the bionic robot radar driver is mainly as follows: 16 | 1. Analyze the radar configuration parameters and configure the radar software and hardware according to the parameters; 17 | 2. Initialize the radar; 18 | 3. If the initialization is successful, continue to the next step, otherwise exit; 19 | 4. Collect raw radar data; 20 | 5. If filtering is required, filter the raw radar data (smearing filter) and release it, otherwise release the original data directly; 21 | 6. If the program receives the termination signal, exit, otherwise, go to step 4. 22 | 23 | ### 2.2 Technology architecture 24 | 25 |
26 | 27 | ![](./image/cyberdog_lidar/cyberdog_lidar_module.svg) 28 | 29 |
30 | 31 | As shown in the figure above, the composition of the bionic robot radar drive architecture and the main functions of each component are as follows: 32 | 1. Lidar SDK: Provides radar firmware data collection and analysis functions under Linux; 33 | 2. Utils: Provide common tools, such as logs; 34 | 3. Radar software and hardware configuration function module: provide radar hardware and software configuration functions; 35 | 4. Radar data acquisition function module: provide radar data acquisition and analysis functions; 36 | 5. Radar data filtering function module: provide radar data filtering (smearing filter) function; 37 | 6. Release radar data: Provide radar data release function. 38 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_manager_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_manager设计文档 2 | 3 | 4 | ## 概述 5 | cyberdog_manager为系统顶层管理器,用于接收机器人能力调用指令,管控机器人状态; 6 | ## 架构 7 | ## 框架设计 8 | ![](./image/cyberdog_manager/cyberdog_manager_architecture_cn.svg) 9 | ## 数据流 10 | ![](./image/cyberdog_manager/cyberdog_manager_data_flow_cn.svg) 11 | ## 主要管理模块功能 12 | ### 电量管理(BatteryCapacityInfo) 13 | 1. 管控与电量相关的所有Led灯效展示 14 | 15 | - 非充电状态 16 | - 电量≤20%:眼灯红色圆形缩放,头灯和尾灯红色流星灯效 17 | - 20%<电量<80%:眼灯青色圆形缩放,头灯和尾灯青色流星灯效 18 | - 电量≥80%:眼灯蓝色圆形缩放,头灯和尾灯蓝色流星灯效 19 | 20 | - 充电状态 21 | - 电量≤20%:眼灯红色圆形缩放,头灯和尾灯红色逐个点亮灯效 22 | - 20%<电量<80:眼灯青色圆形缩放,头灯和尾灯青色逐个点亮灯效 23 | - 电量≥80%:眼灯蓝色圆形缩放,头灯和尾灯蓝色逐个点亮灯效 24 | 25 | 2. 管控与电量相关的所有语音提示 26 | 27 | - 电量下降为0%:“电量为0,关机中!” 28 | - 电量下降至≤5%:“电量低于5%,电池即将耗尽,请尽快充电!” 29 | - 电量下降至≤20%:“电量低于20%,部分功能受限!” 30 | - 电量下降至≤30:“电量低于30%,请尽快充电!” 31 | 32 | 3. 管控与电量相关的状态机切换 33 | 34 | - 电量下降为0%:状态机切换到TearDown 35 | - 电量下降至≤5%:状态机切换至LowPower 36 | - 电量下降至≤20%:状态机切换至Protect 37 | 38 | ### 功耗管理(PowerConsumptionInfo) 39 | 1. 关机、重启、低功耗进入/退出、电机上/下的接口实现 40 | 2. 根据运动状态变化进入低功耗:铁蛋趴下超过30s进入低功耗(需打开低功耗开关) 41 | 3. 进/退低功耗led灯效的提示 42 | 43 | - 进入低功耗:尾部灯带熄灭 44 | - 退出低功耗:尾部灯带天蓝慢呼吸灯效 45 | ### 状态机管理(MachineStateSwitchContext) 46 | 1. 状态机配置管理入口 47 | 2. 根据实时电量对整机状态进行切换 48 | 3. 低功耗启用/禁用接口的实现 49 | 4. 与CyberdogMchine关联,同步切换各模块状态 50 | ### touch管理(TouchInfo) 51 | 52 | 1. 双击Touch:语音播报当前电量 53 | 2. 双击Touch:退出低功耗模式 54 | 3. 长按Touch:开启配网功能 55 | 56 | ### audio管理(AudioInfo) 57 | 1. audio初始化及自检 58 | 2. 整机自检状态语音播报 59 | 2. 各组件错误状态的语音播报接口实现 60 | ### led管理(LedInfo) 61 | 电量相关的所有Led灯效展示的实现 62 | 63 | ### 帐号管理(AccountInfo) 64 | 家庭成员帐号的增、删、改、查的接口实现 65 | 66 | ### 节点错误状态管理(ErrorContxt) 67 | 节点错误状态记录与发布 68 | 69 | ### 心跳管理(HeartContext) 70 | 铁蛋的心跳管理,包含以下模块: 71 | 72 | audio、device、sensor、motion_manager、algorithm_manager 73 | 74 | ### 请求管理(QueryInfo) 75 | 查询并向app端上报设备sn码、ota版本、电机温度、音量、 电量、运动、低功耗等数据。 76 | 77 | ### 就绪管理(ReadyInfo) 78 | 1. 发布自检及状态机状态 79 | 2. 发布APP连接状态 80 | 3. 开机后第一连接APP,控制铁蛋站立的接口实现 81 | 82 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_bms_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_bms设计文档 2 | 3 | ## 概述 4 | 5 | ``cyberdog_bms`` 以ros2 plugin形式向客户端提供bms数据服务,此插件为控制bms提供必要的API接口,并把采集到的电源板数据转换成ros消息格式通过device_manager向外发布。 6 | 7 | ## 软件设计 8 | 9 | #### 软件框架 10 | 11 | ![](./image/cyberdog_bms/cyberdog_bms.png) 12 | 13 | 19 | 20 | ## 功能设计 21 | 22 | - 通过配置文件可灵活配置消息源、指令id等 23 | - 提供使能、关闭、自检等基本能力接口 24 | 25 | ## 配置文件 26 | 27 | - 源码路径:``bridges/params/toml_config/device`` 28 | - 安装路径:``/opt/ros2/cyberdog/share/params/toml_config/device`` 29 | - 配置文件: 30 | - ``battery_config.toml``:用于配置bms模块 31 | - 主要配置说明: 32 | - ``protocol``:通信协议,默认为CAN。 33 | - ``can_interface``:CAN通信的消息通道,可配置``can0``、``can1`` 34 | - ``array``:数据包消息接收配置 35 | - ``array_name``:数据包名称 36 | - ``can_package_num``:数据包中,CAN数据帧的个数 37 | - ``can_id``:数据包中,CAN数据帧的``CAN_id`` 38 | 39 | - ``cmd``:指令包消息发送配置 40 | - ``cmd_name``:指令包名称 41 | - ``can_id``:指令包中,CAN数据帧的``CAN_id`` 42 | - ``ctrl_len``:指令包中,CAN数据帧的数据长度 43 | - ``ctrl_data``:指令包中,CAN数据帧的数据默认值 44 | 45 | ## ROS 协议 46 | 47 | - 源码路径:``bridges/protocol/ros`` 48 | - Ros topic:``bms_status`` 49 | - 协议介绍: 50 | - ``protocol::msg::BmsStatus``:电源管理模块数据格式 51 | - 协议路径:``bridges/protocol/ros/msg/BmsStatus.msg`` 52 | 53 | ## API接口 54 | 55 | - ``bool Init(std::functionfunction_callback, bool simulation)``:初始化配置 56 | - ``simulator = true``:配置为仿真模式 57 | - ``function_callback``:消息发布回调函数 58 | - ``Open()``:打开BMS消息上报 59 | - ``Close()``:停止BMS消息上报 60 | - ``SelfCheck()``:BMS自检 61 | - ``LowPower()``:进入低功耗模式 62 | - ``SetConnectedState(bool connected)``:设置uwb连接状态 63 | - ``void ServiceCommand(const std::shared_ptr request,std::shared_ptr response)``:ros2 service控制bms服务接口 64 | 65 | ## 调试命令 66 | 67 | - 获取bms topic:``ros2 topic list | grep bms_status`` -------------------------------------------------------------------------------- /docs/en/_sidebar.md: -------------------------------------------------------------------------------- 1 | - Application Documents 2 | 3 | - [platform software_architecture](/en/cyberdog_platform_software_architecture_en.md) 4 | - [cyberdog manager](/en/cyberdog_manager_en.md) 5 | - [sensor manager](/en/sensor_manager_en.md) 6 | - [device manager](/en/device_manager_en.md) 7 | - [motion manager](en/motion_manager_en.md) 8 | - [algorithm manager](en/algorithm_manager_en.md) 9 | - [cyberdog grpc](en/cyberdog_grpc_en.md) 10 | - [cyberdog bringup](en/cyberdog_bringup_en.md) 11 | - [connector](en/connector_en.md) 12 | - [image transmission](en/image_transmission_en.md) 13 | - [vp](en/cyberdog_vp_en.md) 14 | - [interactive](en/cyberdog_interactive_en.md) 15 | - [train](en/cyberdog_train_en.md) 16 | - [face](en/cyberdog_face_en.md) 17 | - [action](en/cyberdog_action_en.md) 18 | - [ai sports](en/cyberdog_ai_sports_en.md) 19 | - [gps](en/cyberdog_gps_en.md) 20 | - [TOF](en/cyberdog_tof_en.md) 21 | - [lidar](en/cyberdog_lidar_en.md) 22 | - [ultrasonic](en/cyberdog_ultrasonic_en.md) 23 | - [BMS](en/cyberdog_bms_en.md) 24 | - [touch](en/cyberdog_touch_en.md) 25 | - [LED](en/cyberdog_led_en.md) 26 | - [UWB](en/cyberdog_uwb_en.md) 27 | - [WiFi](en/cyberdog_wifi_en.md) 28 | - [bluetooth](en/cyberdog_bluetooth_en.md) 29 | - [audio](en/cyberdog_audio_en.md) 30 | - [machine](en/cyberdog_machine_en.md) 31 | - [Unified Error Code](en/cyberdog_system_en.md) 32 | - [common](en/cyberdog_common_en.md) 33 | 34 | 35 | - Operations Control Document 36 | - [loco](/en/cyberdog_loco_en.md) 37 | - [sim](/en/cyberdog_gazebo_en.md) 38 | 39 | - SLAM Document 40 | - [camera](/en/cyberdog_camera_en.md) 41 | - [occmap](/en/cyberdog_occmap_en.md) 42 | - [realsense-ros](/en/realsense-ros_en.md) 43 | - [miloc](/en/cyberdog_miloc_en.md) 44 | - [mivins](/en/cyberdog_mivins_en.md) 45 | 46 | - Perception Document 47 | - [version](cn/cyberdog_vision_cn.md) 48 | - [tracking](cn/cyberdog_tracking_cn.md) 49 | - [laserslam](cn/cyberdog_laserslam_cn.md) 50 | 51 | - Developer Manual 52 | - [flash](cn/cyberdog_flash.md) 53 | -------------------------------------------------------------------------------- /docs/_sidebar.md: -------------------------------------------------------------------------------- 1 | - 应用文档 2 | 3 | - [平台软件架构](cn/cyberdog_platform_software_architecture_cn.md) 4 | - [铁蛋管理模块](cn/cyberdog_manager_cn.md) 5 | - [传感器管理模块](cn/sensor_manager_cn.md) 6 | - [设备管理模块](cn/device_manager_cn.md) 7 | - [运动管理模块](cn/motion_manager_cn.md) 8 | - [算法管理模块](cn/algorithm_manager_cn.md) 9 | - [GRPC通信模块](cn/cyberdog_grpc_cn.md) 10 | - [启动模块](cn/cyberdog_bringup_cn.md) 11 | - [快连模块](cn/connector_cn.md) 12 | - [图传模块](cn/image_transmission_cn.md) 13 | - [可视化编程模块](cn/cyberdog_vp_cn.md) 14 | - [互动模块](cn/cyberdog_interactive_cn.md) 15 | - [语音训练词模块](cn/cyberdog_train_cn.md) 16 | - [人脸识别模块](cn/cyberdog_face_cn.md) 17 | - [手势识别模块](cn/cyberdog_action_cn.md) 18 | - [动作识别模块](cn/cyberdog_ai_sports_cn.md) 19 | - [GPS模块](cn/cyberdog_gps_cn.md) 20 | - [TOF模块](cn/cyberdog_tof_cn.md) 21 | - [雷达模块](cn/cyberdog_lidar_cn.md) 22 | - [超声模块](cn/cyberdog_ultrasonic_cn.md) 23 | - [BMS模块](cn/cyberdog_bms_cn.md) 24 | - [触摸板模块](cn/cyberdog_touch_cn.md) 25 | - [LED模块](cn/cyberdog_led_cn.md) 26 | - [UWB模块](cn/cyberdog_uwb_cn.md) 27 | - [WiFi模块](cn/cyberdog_wifi_cn.md) 28 | - [蓝牙模块](cn/cyberdog_bluetooth_cn.md) 29 | - [语音模块](cn/cyberdog_audio_cn.md) 30 | - [状态机模块](cn/cyberdog_machine_cn.md) 31 | - [系统统一错误码模块](cn/cyberdog_system_cn.md) 32 | - [通用工具模块](cn/cyberdog_common_cn.md) 33 | 34 | - 运控文档 35 | - [运动控制模块](cn/cyberdog_loco_cn.md) 36 | - [仿真平台模块](cn/cyberdog_gazebo_cn.md) 37 | 38 | - SLAM文档 39 | - [相机驱动](cn/cyberdog_camera_cn.md) 40 | - [视觉栅格地图构建节点](cn/cyberdog_occmap_cn.md) 41 | - [realsense-ros驱动模块](cn/realsense-ros_cn.md) 42 | - [视觉重定位模块](cn/cyberdog_miloc_cn.md) 43 | - [视觉里程计模块](cn/cyberdog_mivins_cn.md) 44 | 45 | 46 | - 感知文档 47 | - [AI算法调度管理及推理模块](cn/cyberdog_vision_cn.md) 48 | - [人体及万物目标位置估计模块](cn/cyberdog_tracking_cn.md) 49 | - [激光建图定位模块](cn/cyberdog_laserslam_cn.md) 50 | 51 | - 开发者手册 52 | - [开发者手册](cn/developer_guide.md) 53 | - [刷机文档](cn/cyberdog_flash.md) 54 | - [GRPC协议](cn/grpc_protocol.md) 55 | - [可视化编程协议](cn/cyberdog_vp_protocol_document_cn.md) 56 | - [QA汇总](cn/question_answer_cn.md) 57 | 58 | 59 | 60 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_camera_cn.md: -------------------------------------------------------------------------------- 1 | # 简介 2 | 运行于小米CyberDog,基于Nvidia [Argus](https://docs.nvidia.com/jetson/l4t-multimedia/group__LibargusAPI.html)和[Ros2](https://www.ros.org/)的相机程序。 3 | 4 | 该程序使用Argus API提供的接口来操控MIPI相机硬件并实时捕获图像,使用ROS2提供的接口来管理相机节点,为外部模块提供交互接口。 5 | 6 | 模块整体架构图如下: 7 | ![](./image/cyberdog_camera/camera_arch.png) 8 | 9 | 模块内部描述图如下: 10 | ![](./image/cyberdog_camera/camera_arch_inter.png) 11 | 12 | # 编译 13 | 编译本模块需要依赖若干外部软件包,编译前需按照下列命令安装: 14 | 15 | ## 依赖项 16 | ### 1.nvidia-l4t-jetson-multimedia-api 17 | ```console 18 | sudo apt-get install nvidia-l4t-jetson-multimedia-api 19 | ``` 20 | ### 2.cuda-toolkit 21 | ```console 22 | sudo apt-get install cuda-toolkit-10-2 23 | ``` 24 | ### 3.libavformat-dev 25 | ```console 26 | sudo apt-get install libavformat-dev 27 | ``` 28 | # 测试程序 29 | 基于相机api的相机测试程序,可以用来测试相机是否正常,亦可以作为camera api使用方式参考。 30 | 31 | ## 编译 32 | ```console 33 | colcon build --merge-install --packages-up-to camera_test 34 | ``` 35 | 36 | ## 运行 37 | 38 | ### 测试程序,用来测试相机出图是否正常 39 | ```console 40 | ./build/camera_test/camera_test cam_id width height rgb/bgr 41 | ``` 42 | 例如,测试camera 0,640x480分辨率RGB出图的话,使用如下命令: 43 | ./build/camera_test/camera_test 0 640 480 rgb 44 | 45 | ### 主摄相机服务程序,可以进行拍照录像等命令 46 | ```console 47 | ros2 run camera_test camera_server 48 | ``` 49 | 拍照命令: 50 | ```console 51 | ros2 service call /camera_service protocol/srv/CameraService "{command: 1, args: ''}" 52 | ``` 53 | 54 | 录像命令: 55 | ```console 56 | //开始 57 | $ ros2 service call /camera_service protocol/srv/CameraService "{command: 2, args: ''}" 58 | //停止 59 | $ ros2 service call /camera_service protocol/srv/CameraService "{command: 3, args: ''}" 60 | ``` 61 | 62 | ### 双目/RGB相机ros2程序 63 | ```console 64 | //启动stereo_camera 65 | ros2 run camera_test stereo_camera 66 | 67 | //如果使用开机自启的stereo_camera,注意topic前加上命名空间 68 | ros2 lifecycle set /stereo_camera configure 69 | 70 | ros2 lifecycle set /stereo_camera activate 71 | 72 | ros2 lifecycle set /stereo_camera deactivate 73 | 74 | ros2 lifecycle set /camera/camera cleanup 75 | 76 | 77 | ``` 78 | 运行之后,双目相机和RGB相机将通过/image_left、/image_right、/image_rgb三个topic发布图像。 79 | -------------------------------------------------------------------------------- /docs/en/cyberdog_interactive_en.md: -------------------------------------------------------------------------------- 1 | #
cyberdog_interactive design document
2 | 3 | ## 1. Overview 4 | The interactive function of the bionic robot will have the built-in interactive function in a fixed scene as the basic ability of the robot, and provide programming examples for users, and hope that users (developers) can develop more fun and cool functions. 5 | 6 | ## 2. Design 7 | ### 2.1. Feature design 8 | 9 | The action of touching the chin will only be triggered when the dog is in the "sit down" state, and the action of touching the chin is divided into the following three states: 10 | 1. Touch the chin back and forth: 11 | - When the obstacle touches the dog's chin back and forth, it will enter this state. In this state, the dog will trigger the following actions: 12 | - the dog barks once; 13 | - Twist the buttocks left and right; 14 | 2. Touch the left side of the chin: 15 | - When the obstacle is continuously touching the left side of the dog's chin, it will enter this state. In this state, the dog will trigger the following actions: 16 | - the dog barks once; 17 | - right butt twisting action; 18 | 3. Touch the right side of the chin 19 | - When the obstacle is continuously touching the right side of the dog's chin, it will enter this state. In this state, the dog will trigger the following actions: 20 | - the dog barks once; 21 | - Twist the buttocks to the left; 22 | 23 | ### 2.2 Technology architecture 24 | 25 |
26 | 27 | ![](./image/cyberdog_interactive/cyberdog_interactive_module.svg) 28 | 29 |
30 | 31 | As shown in the figure above, this function can be divided into two modules, one is the main module, and only one interactive module for detecting chin touch is hung under the main module. The interactive module depends on the TOF module, motion module and voice module: 32 | 1. TOF module: Provide head TOF sensing data for sensing whether there is an obstacle near the dog's chin; 33 | 2. Sports module: provide basic sports ability; 34 | 3. And voice module: provide basic voice interaction capabilities. 35 | -------------------------------------------------------------------------------- /docs/cn/sensor_manager_cn.md: -------------------------------------------------------------------------------- 1 | # sensor_manager 2 | 3 | ## 概述 4 | 5 | ``sensor_manager`` 是``cyberdog_tof``、``cyberdog_ultrasonic``、``cyberdog_lidar``等功能模块的管理模块;为各模块提供状态机管理、服务回调、消息发布的能力。各模块以ros plugin形式加载到``sensor_manager``。 6 | 7 | ## 软件设计 8 | 9 |
10 | 11 | ![avatar](./image/sensor_manager/sensor_manager.png) 12 | 13 |
14 | 15 | ## 功能设计 16 | 17 | ### 模块加载 18 | - [ROS plugin](https://github.com/ros2/ros2_documentation/blob/galactic/source/Tutorials/Beginner-Client-Libraries/Pluginlib.rst) 19 | 20 | - ``sensor_manager``使用 ``pluginlib::ClassLoader``加载``cyberdog_tof``等各模块。 21 | 22 | ``` 23 | // 加载过程参考ros样例 24 | pluginlib::ClassLoader poly_loader("polygon_base", "polygon_base::RegularPolygon"); 25 | 26 | std::shared_ptr triangle = poly_loader.createSharedInstance("polygon_plugins::Triangle"); 27 | ``` 28 | 29 | ### 状态机管理 30 | - 状态机管控各外设模块,如:控制``cyberdog_tof``等模块的“低功耗”和“激活”状态切换; 31 | 32 | - ``sensor_manager``继承``cyberdog::machine::MachineActuator``,客户端可通过``cyberdog::machine::MachineActuator``提供的服务接口,控制``sensor_manager``状态切换。在状态机内部,对已加载的各模块依次进行对应状态的控制。 33 | 34 | - 状态机详细介绍参考: [状态机设计](/cn/cyberdog_machine_cn.md) 35 | 36 | ### Topic&Service 37 | 38 | - ``sensor_manager``提供的ros message接口,客户端可订阅传感器数据topic获取传感器数据。 39 | - ``sensor_manager``提供的ros service接口,客户端可通过传感器控制服务控制传感器状态。 40 | - ROS接口: 41 | - ``protocol::msg::GpsPayload``:GPS消息 42 | - ``ScanMsg``:雷达消息 43 | - ``sensor_msgs::msg::Range``:超声消息 44 | - ``protocol::msg::HeadTofPayload``:头部Tof消息 45 | - ``protocol::msg::RearTofPayload``:尾部Tof消息 46 | - ``protocol::srv::SensorOperation``:传感器控制服务 47 | 48 | ### 模块插件 49 | - [GPS模块](/cn/cyberdog_gps_cn.md) 50 | - [TOF模块](/cn/cyberdog_tof_cn.md) 51 | - [雷达模块](/cn/cyberdog_lidar_cn.md) 52 | - [超声模块](/cn/cyberdog_ultrasonic_cn.md) 53 | 54 | ## 调试命令 55 | - 获取sensor_manager状态机服务: 56 | ``` 57 | ros2 topic list | grep sensor_manager 58 | ``` 59 | 60 | - 状态机切换(切换到“Active”状态): 61 | 62 | ``` 63 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/sensor_managermachine_service protocol/srv/FsMachine "{target_state: "Active"}" 64 | ``` 65 | -------------------------------------------------------------------------------- /docs/cn/device_manager_cn.md: -------------------------------------------------------------------------------- 1 | # device_manager 2 | 3 | ## 概述 4 | 5 | ``device_manager`` 是``cyberdog_touch``、``cyberdog_uwb``、``cyberdog_bms``等功能模块的管理模块;为各模块提供状态机管理、服务回调、消息发布的能力,各模块以ros plugin形式加载到``device_manager``。 6 | 7 | ## 软件设计 8 | 9 |
10 | 11 | ![avatar](./image/device_manager/device_manager.png) 12 | 13 |
14 | 15 | ## 功能设计 16 | 17 | ### 模块加载 18 | 19 | - [ROS plugin](https://github.com/ros2/ros2_documentation/blob/galactic/source/Tutorials/Beginner-Client-Libraries/Pluginlib.rst) 20 | 21 | - ``device_manager``使用 ``pluginlib::ClassLoader``加载``cyberdog_uwb``等各模块。 22 | 23 | ``` 24 | // 加载过程参考ros样例 25 | pluginlib::ClassLoader poly_loader("polygon_base", "polygon_base::RegularPolygon"); 26 | 27 | std::shared_ptr triangle = poly_loader.createSharedInstance("polygon_plugins::Triangle"); 28 | ``` 29 | 30 | ### 状态机管理 31 | 32 | - 状态机管控各外设模块,如:控制``cyberdog_led``等模块的“低功耗”和“激活”状态切换; 33 | 34 | - ``device_manager``继承``cyberdog::machine::MachineActuator``,客户端可通过``cyberdog::machine::MachineActuator``提供的服务接口,控制``device_manager``状态切换。在状态机内部,对已加载的各模块依次进行对应状态的控制。 35 | 36 | - 状态机详细介绍参考: [状态机设计](/cn/cyberdog_machine_cn.md) 37 | 38 | ### Topic&Service 39 | 40 | - ``device_manager``提供的ros message接口,客户端可订阅外设数据topic获取外设数据。 41 | - ``device_manager``提供的ros service接口,客户端可通过外设控制服务控制外设状态。 42 | - ROS接口: 43 | - ``protocol::msg::TouchStatus``:touch消息 44 | - ``protocol::msg::BmsStatus``:bms消息 45 | - ``protocol::msg::UwbRaw``:uwb消息 46 | - ``protocol::srv::GetUWBMacSessionID``:获取uwb mac服务 47 | - ``protocol::srv::LedExecute``:led控制服务 48 | 49 | ### 模块插件 50 | 51 | - [蓝牙模块](/cn/cyberdog_bluetooth_cn.md) 52 | - [bms模块](/cn/cyberdog_bms_cn.md) 53 | - [led模块](/cn/cyberdog_led_cn.md) 54 | - [touch模块](/cn/cyberdog_touch_cn.md) 55 | - [uwb模块](/cn/cyberdog_uwb_cn.md) 56 | - [wifi模块](/cn/cyberdog_wifi_cn.md) 57 | 58 | ## 调试命令 59 | 60 | - 获取device_manager状态机服务:``ros2 topic list | grep device_manager`` 61 | - 状态机切换(切换到“Active”状态): 62 | 63 | ``` 64 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/device_managermachine_service protocol/srv/FsMachine "{target_state: "Active"}" 65 | ``` 66 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_ultrasonic_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_ultrasonic设计 2 | 3 | ## 概述 4 | 5 | cyberdog_ultrasonic 以ros2 plugin形式向客户端提供ultrasonic数据服务,此插件为控制传感器提供必要的API接口,并把采集到的ultrasonic数据转换成ros消息格式通过sensor manager反馈给客户端。cyberdog默认配置1个ultrasonic。 6 | 7 | ## 软件设计 8 | 9 | #### 软件框架 10 | 11 |
12 | 13 | ![avatar](./image/cyberdog_ultrasonic/cyberdog_ultrasonic.png) 14 | 15 |
16 | 17 | 38 | 39 | ## 功能设计 40 | 41 | - 通过配置文件可灵活配置传感器个数、消息源、指令id等; 42 | - 提供传感器使能、关闭、自检等基本能力接口; 43 | 44 | ## 配置文件 45 | 46 | - 源码路径:``bridges/params/toml_config/sensors`` 47 | - 安装路径:``/opt/ros2/cyberdog/share/params/toml_config/sensors`` 48 | - 配置文件: 49 | - ``utrasonic_config.toml``:用于配置传感器个数和实际的配置文件 50 | - ``ultrasonic.toml``:用于配置头部传感器 51 | - 主要配置说明: 52 | - ``config_files``:程序根据数组成员实例出对应的传感器实体 53 | - ``protocol``:通信协议,默认为CAN。 54 | - ``can_interface``:CAN通信的消息通道,可配置``can0``、``can1`` 55 | - ``array``:数据包消息接收配置 56 | - ``array_name``:数据包名称 57 | - ``can_package_num``:数据包中,CAN数据帧的个数 58 | - ``can_id``:数据包中,CAN数据帧的``CAN_id`` 59 | 60 | - ``cmd``:指令包消息发送配置 61 | - ``cmd_name``:指令包名称 62 | - ``can_id``:指令包中,CAN数据帧的``CAN_id`` 63 | - ``ctrl_len``:指令包中,CAN数据帧的数据长度 64 | - ``ctrl_data``:指令包中,CAN数据帧的数据默认值 65 | 66 | ## ROS 协议 67 | - 源码路径:``bridges/protocol/ros`` 68 | - Ros topic:``ultrasonic_payload`` 69 | - 协议介绍: 70 | - ``sensor_msgs::msg::Range``:单个Ultrasonic数据格式 71 | - 协议路径:``bridges/protocol/ros/msg/UltrasonicPayload.msg`` 72 | 73 | ## API接口 74 | - ``Init(bool simulator)``:初始化配置 75 | - ``simulator = true``:配置为仿真模式 76 | - ``Open()``:打开传感器 77 | - ``Start()``:使能传感器 78 | - ``Stop()``:停止传感器 79 | - ``Close()``:关闭传感器 80 | - ``SelfCheck()``:传感器自检 81 | - ``LowPowerOn()``:进入低功耗模式 82 | - ``LowPowerOff()``:退出低功耗模式 83 | - ``SetSinglePayloadCallback(std::function payload)> cb)``:设置消息回调函数 84 | 85 | ## 调试命令 86 | - 获取ultrasonic topic:``ros2 topic list | grep ultrasonic_payload`` -------------------------------------------------------------------------------- /docs/en/image_transmission_en.md: -------------------------------------------------------------------------------- 1 | # Image_transmission Design 2 | 3 | ## Overview 4 | 5 | image_transmission is a ROS package that transmits video stream to APP 6 | 7 | ## System Structure 8 | 9 | 1. APP: Makes WebRTC signalling request, receives video stream, sends stop signal 10 | 11 | 2. cyberdog_grpc: Transmits signalling messages and stop signal 12 | 13 | 3. image_transmission: Generates an shared library which is linked by cyberdog_camera node. The library receives signalling messages, notifys cyberdog_camera node to activate streaming raw video data, uses WebRTC peer connection to send vieo stream 14 | 15 | 4. cyberdog_camera: Receives notification from image_transmission,generates raw image data 16 | 17 | ![image_trans](./image/image_transmission/image_transmission_en.svg) 18 | 19 | ## Operation Process 20 | 21 | ### Activation Process 22 | 23 | 1. APP sends signlling message through gRPC. cyberdog_grpc node receives the messages and converts them to ROS message and publish with topic "img_trans_signal_in". 24 | 25 | 2. image_transmission library receives messages from "img_trans_signal_in" topic, calls "camera_service" service to notify cyberdog_camera to start producing raw image data. 26 | 27 | 3. image_transmission sends response signalling messages through topic "img_trans_signal_out". cyberdog_grpc converts them to gRPC messag and sends to APP. 28 | 29 | 4. Repeat 1 to 3 several times. 30 | 31 | 5. image_transmission starts sending video stream to APP through WebRTC. 32 | 33 | ### Termination Process 34 | 35 | - Terminate video streaming from APP: 36 | 37 | 1. APP sends stop signal to cyberdog_grpc node through gRPC. cyberdog_grpc node receives the messages and converts it to ROS message and publish with topic "img_trans_signal_in". 38 | 39 | 2. image_transmission library receives messages from "img_trans_signal_in" topic, destructs WebRTC peer connection and sends response to cyberdog_grpc. cyberdog_grpc converts them to gRPC messag and sends to APP. 40 | 41 | 3. image_transmission calls "camera_service" service to notify cyberdog_camera to stop producing raw image data. 42 | 43 | 4. APP stop display the video images. 44 | 45 | - Or exit the APP directly: 46 | 47 | 1. APP disconnect WebRTC connection. 48 | 49 | 2. image_transmission detects that WebRTC connection state changing, calls "camera_service" service to notify cyberdog_camera to stop producing raw image data. -------------------------------------------------------------------------------- /docs/cn/cyberdog_audio_cn.md: -------------------------------------------------------------------------------- 1 | # 语音设计文档 2 | 3 | ## 概述 4 | 5 | cyberdog_audio是nx端与r329端的语音业务中转站。例如,通过cyberdog_audio可以转发具体的播放离线语音、在线语音的请求。 6 | 7 | ## 设计 8 | 9 | ### 框架图 10 | ![](./image/cyberdog_audio/framework_cn.svg) 11 | ### 业务数据流 12 | 13 | ![](./image/cyberdog_audio/diag_cn.svg) 14 | 15 | ### 模块功能 16 | 17 | cyberdog_audio具体模块设计 18 | 19 | 1. 基于LCM通讯的双向service与topic封装,实现了四个类:LcmAtocTopic、LcmCtoaTopic、LcmCtoaService、LcmAtocService。 20 | - LcmAtocTopic是R329发给NX的LCM消息处理封装类 21 | - LcmCtoaTopic封装了NX发往R329的LCM消息封装类 22 | - LcmCtoaService封装了NX请求R329的服务的LCM封装类。NX相当于客户端,R329相当服务端 23 | - LcmAtocService封装了R329请求NX的服务的LCM封装类。R329相当于客户端,NX相当服务端 24 | 2. 业务功能实现 25 | - 心跳维持 26 | - ```Bash 27 | NX以1次/s的心跳发送请求向R329确认其状态是否正常。如果心跳异常,会打印心跳异常日志。 28 | ``` 29 | 30 | - 鉴权信息同步。 31 | - ```Bash 32 | (1)App端连接上狗子,会将账户的token信息通过透传到NX端,再同步到R329端。 33 | (2)由App端发起将token透传到NX端grpc节点,grpc节点通过调用cyberdog_audio的服务,在服务内部实现将token同步到R329端。 34 | (3)R329端拿到token信息,会做本地缓存。并用此token访问小爱云端服务。 35 | ``` 36 | 37 | - 离线/在线语音播放 38 | - ```Bash 39 | (1)语音节点提供离线/在线语音播放topic与service两种方式。 40 | (2)离线语音,是指预至在nx端或者R329端的语音文件,通过id标识具体要播放的离线语音文件。可以参考“bridge/protocol/ros/msg/AudioPlay.msg”中定义的具体的离线语音id常量及关联的离线语音文件具体的播放文本内容。 41 | (3)在线语音,简称tts(text to speech)。是将文本转化为语音进行播放,需要NX端具有访问互联网的能力。 42 | ``` 43 | 44 | - 控制垂域动作实现 45 | - ```Bash 46 | (1)控制垂域特指小爱服务端已配置的可以返回控制指令的能力。 47 | (2)R329在拿到token后,注册了小爱云端的访问能力。这样就能接收到小爱云端控制垂域的消息通知。 48 | (3)在喊“铁蛋铁蛋”唤醒狗子后,对着它说:“后退X步”、“原地转圈”、“击掌”、“过来”、“握个手”、“坐下”、“跳跃”、“摇尾巴”、“摇头”、“点头”、“鞠躬”、“芭蕾舞”、“太空步”,狗子会做相应的动作回应。 49 | ``` 50 | 51 | - 音量设置获取及麦克风启用禁用 52 | - ```Bash 53 | (1)提供了音量大小设置服务。例:通过指令将音量设置为50, ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/audio_volume_set protocol/srv/AudioVolumeSet "{volume: 50}" 54 | (2)提供了音量大小获取服务。例:通过指令获取当前音量值, ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/audio_volume_get protocol/srv/AudioVolumeGet 55 | (3)提供了麦克风启用/禁用服务。麦克风禁用以后,不再进行收音,铁蛋不能被唤醒。 56 | ``` 57 | 58 | - 设置昵称、录制声纹及声纹识别 59 | - ```Bash 60 | (1)狗子默认的昵称为铁蛋。在App端可以给狗子设置新的昵称,例如“旺财”。设置新的昵称为三个字及三个字以内,在唤醒时需要喊两次昵称才能唤醒,设置新的呢称多余三个字,直接喊昵称就能唤醒。需要注意的时,更换狗子的昵称后,可能导致误唤醒率提高。 61 | (2)App端提供了录制声纹的入口,绑定具体的主人信息。在进行录制声纹时,会触发录制声纹的流程。在声纹录制成功以后,喊“铁蛋铁蛋”唤醒狗子会进行声纹识别,然后问它:“我是谁?”,它能回答出你的姓名。 62 | ``` 63 | 64 | - 狗信息上报 65 | - ```Bash 66 | (1)狗子信息主要包括:狗子昵称、狗子激活日期,狗子重量。 67 | (2)狗子在第一次激活后,会把狗子信息上报到云端。随着狗子成长,你问他几岁了,它会给出具体的回答。 68 | ``` -------------------------------------------------------------------------------- /docs/en/cyberdog_touch_en.md: -------------------------------------------------------------------------------- 1 | # Touch Design document 2 | # 1. Overview 3 | The following aims to explain the robot dog touch interaction logic and function mapping. 4 | # 2. Design 5 | ## 2.1 Function division 6 | The main functions are as follows: 7 | - Get touch trigger events (double click (0x03), long press (0x07), etc.); 8 | - touch self-test; 9 | ## 2.2 Module architecture 10 | The module architecture diagram is as follows: 11 | 12 |
13 | 14 | ![avatar](./image/cyberdog_touch/touch_architecture_diagram.png) 15 | 16 |
17 | 18 | ## 2.3 Interface design 19 | The touch module data stream is as follows: 20 | 21 |
22 | 23 | ![avatar](./image/cyberdog_touch/Touch_data_stream.png) 24 | 25 |
26 | 27 | The touch module message protocol is as follows: 28 | 29 | - Topic name: "touch_status" 30 | - topic message file directory: "Bridges/protocol/ros/MSG/TouchStatus MSG" 31 | - The topic message structure is as follows: 32 | int32 touch_state //touch semaphore, where a value not 0 indicates that touch is triggered 33 | uint64 timestamp // timestamp 34 | - touch_state signal quantity values are as follows: 35 | #define LPWG_SINGLETAP_DETECTED 0x01 36 | #define LPWG_DOUBLETAP_DETECTED 0x03 37 | #define LPWG_TOUCHANDHOLD_DETECTED 0x07 38 | #define LPWG_CIRCLE_DETECTED 0x08 39 | #define LPWG_TRIANGLE_DETECTED 0x09 40 | #define LPWG_VEE_DETECTED 0x0A 41 | #define LPWG_UNICODE_DETECTED 0x0B 42 | #define LPWG_SWIPE_DETECTED 0x0D 43 | #define LPWG_SWIPE_DETECTED_UP_CONTINU 0x0E 44 | #define LPWG_SWIPE_DETECTED_DOWN_CONTINU 0x0F 45 | #define LPWG_SWIPE_DETECTED_LEFT_CONTINU 0x10 46 | #define LPWG_SWIPE_DETECTED_RIGHT_CONTINU 0x11 47 | ## 2.4 Module design 48 | It is mainly divided into touch_input_event_reader, touch_base, -touch_sensor_handler and touch_plugin modules. 49 | - touch_input_event_reader reads touch events from events. 50 | - touch_base mainly implements the definition of touch module initialization, self-check and other functions; 51 | -touch_sensor_handler mainly implements device file opening, processing and mapping touch events based on epolling mechanism, etc. 52 | - touch_plugin mainly implements touch module initialization, self-check, touch event signal topic release and other functions. 53 | -------------------------------------------------------------------------------- /docs/en/connector_en.md: -------------------------------------------------------------------------------- 1 | #
connector design document
2 | 3 | ## 1. Overview 4 | The bionic robot quickly connects to the network and the target device (hereinafter referred to as fast connection) mainly has the following characteristics: 5 | 1. The bionic robot is in a WiFi environment; 6 | * WiFi can be a third-party routed WiFi; 7 | * WiFi can also be a hotspot placed outside the device where the APP is located. 8 | 2. The device where the App is located has been connected to the target WiFi; 9 | 3. The app initiates the bionic robot to connect to the target WiFi; 10 | 4. After the bionic robot successfully connects to the target WiFi, it connects to the device where the target APP is located; 11 | 5. Finally, quickly establish a connection with the target WiFi and device. 12 | 13 | ## 2. Design 14 | ### 2.1. Feature design 15 | 16 | There are two main quick connection functions: 17 | 1. Distribution network function: realize the switch by long pressing the touch; 18 | 2. Automatic connection: realized through the system's own reconnection mechanism. 19 | 20 | ### 2.2 Technology architecture 21 | 22 |
23 | 24 | ![](./image/connector/connector_module.svg) 25 | 26 |
27 | 28 | As shown in the figure above, the composition of the bionic robot fast connection network and target device architecture and the main functions of each component are as follows: 29 | 1. touch: Provide the robot head touch signal. 30 | 2. LED: Provides the function of displaying the lighting effects on the robot's head and body. 31 | 3. audio: Provide robot voice broadcast function. 32 | 4. camera: Provide image data within the field of view. 33 | 5. QRreader: Provides QR code recognition function. 34 | 6. WiFi: Provides the target WiFi connection function of the linux system. 35 | 7. App: Provides the function of generating QR codes carrying WiFi information. 36 | 37 | ## 3. Manual networking 38 | ### 3.1 Internet and APP 39 | ``` 40 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/connect protocol/srv/Connector "{wifi_name: 'wifi_name',wifi_password: 'wifi_password',provider_ip: 'app_ip'}" 41 | ``` 42 | ### 3.2 Internet only 43 | ``` 44 | sudo nmcli dev wifi connect password ifname wlan0 45 | ``` 46 | ### 3.3 other operations 47 | ``` 48 | # View the list of connected WiFi 49 | nmcli connection | grep wifi 50 | # Delete the specified WiFi connection in the connected WiFi list 51 | sudo nmcli connection delete 'ssid' 或者 'uuid' 52 | ``` 53 | -------------------------------------------------------------------------------- /docs/cn/third_party_library_management_cn.md: -------------------------------------------------------------------------------- 1 | # **机器人平台第三方库管理** 2 | 3 | **第三方库汇总** 4 | 5 | | 名称 | 版本 | License | 是否修改 | 6 | | --------- | ------------------------------------------------------------ | ------------------------------------------------------------ | -------- | 7 | | ROS2 | Galactic | Apache 2.0 | 否 | 8 | | ZXing | master/[2650e8a](https://github.com/liangxiaowei00/zxing-cpp/commit/2650e8a318e28eaeb246ee636909e637376de924) | Apache License2.0 | 是 | 9 | | rapidjson | V1.1.0 | MIT | 否 | 10 | | toml | V3.7.0 | MIT | 否 | 11 | | xpack | V1.0.1 | Apache License2.0 | 否 | 12 | | opencv | v4.1.1. | [3-clause BSD license](https://github.com/opencv/opencv/blob/4.4.0/LICENSE). | 否 | 13 | | lcm | master分支,commit: 91ce7a | [ LGPL-2.1 License ](https://github.com/lcm-proj/lcm/blob/master/COPYING) | 否 | 14 | | protobuf | 3.19.2 | https://github.com/protocolbuffers/protobuf/blob/main/LICENSE | 否 | 15 | | grpc | V1.44.0 | Apache License2.0 | 否 | 16 | | yaml-cpp | 0.7.0 | MIT | 否 | 17 | | ceres | 1.14.0 | Apache-2.0 | 否 | 18 | | HDF5 | 1.10.8 | `https://github.com/HDFGroup/hdf5` | 否 | 19 | | colmap | modify from dev branch commit: 2b7230679957e4dccd590ab467931d6cfffb9ede | BSD | 是 | 20 | | blockly | 8.0.1 | Apache License2.0 | 是 | 21 | -------------------------------------------------------------------------------- /docs/libs/docsify-darklight-theme@latest.js: -------------------------------------------------------------------------------- 1 | window.$docsify.plugins=[].concat((e,o)=>{let t={siteFont:"PT Sans",defaultTheme:"dark",codeFontFamily:"Roboto Mono, Monaco, courier, monospace",bodyFontSize:"17px",dark:{accent:"#42b983",toogleBackground:"#ffffff",background:"#091a28",textColor:"#b4b4b4",codeTextColor:"#ffffff",codeBackgroundColor:"#0e2233",borderColor:"#0d2538",blockQuoteColor:"#858585",highlightColor:"#d22778",sidebarSublink:"#b4b4b4",codeTypeColor:"#ffffff",coverBackground:"linear-gradient(to left bottom, hsl(118, 100%, 85%) 0%,hsl(181, 100%, 85%) 100%)",toogleImage:"url(https://cdn.jsdelivr.net/npm/docsify-darklight-theme@latest/icons/sun.svg)"},light:{accent:"#42b983",toogleBackground:"#091a28",background:"#ffffff",textColor:"#34495e",codeTextColor:"#525252",codeBackgroundColor:"#f8f8f8",borderColor:"rgba(0, 0, 0, 0.07)",blockQuoteColor:"#858585",highlightColor:"#d22778",sidebarSublink:"#505d6b",codeTypeColor:"#091a28",coverBackground:"linear-gradient(to left bottom, hsl(118, 100%, 85%) 0%,hsl(181, 100%, 85%) 100%)",toogleImage:"url(https://cdn.jsdelivr.net/npm/docsify-darklight-theme@latest/icons/moon.svg)"}};if(o.config.hasOwnProperty("darklightTheme")){for(var[r,l]of Object.entries(o.config.darklightTheme))"light"!==r&&"dark"!==r&&"defaultTheme"!==r&&(t[r]=l);for(var[r,l]of Object.entries(t))"light"!==r&&"dark"!==r&&(t[r]=l,document.documentElement.style.setProperty("--"+r,l));if(o.config.darklightTheme.hasOwnProperty("dark"))for(var[r,l]of Object.entries(o.config.darklightTheme.dark))t.dark[r]=l;if(o.config.darklightTheme.hasOwnProperty("light"))for(var[r,l]of Object.entries(o.config.darklightTheme.light))t.light[r]=l}else for(var[r,l]of Object.entries(t))"light"!==r&&"dark"!==r&&(t[r]=l,document.documentElement.style.setProperty("--"+r,l));window.matchMedia("(prefers-color-scheme: dark)").matches?t.defaultTheme="dark":window.matchMedia("(prefers-color-scheme: light)").matches&&(t.defaultTheme="light");var d=e=>{if(localStorage.setItem("DARK_LIGHT_THEME",e),t.defaultTheme=e,"light"==e)for(var[o,r]of Object.entries(t.light))document.documentElement.style.setProperty("--"+o,r);else if("dark"==e)for(var[o,r]of Object.entries(t.dark))document.documentElement.style.setProperty("--"+o,r);document.documentElement.style.setProperty("color-scheme",e)};e.afterEach((function(e,o){o(e='

.

'+e)})),e.doneEach((function(){let e=localStorage.getItem("DARK_LIGHT_THEME");"light"==e||"dark"==e?(t.defaultTheme=e,d(t.defaultTheme)):d(t.defaultTheme);const o=document.getElementById("docsify-darklight-theme");null!==o&&o.addEventListener("click",(function(){"light"===t.defaultTheme?d("dark"):d("light")}))}))},window.$docsify.plugins); 2 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_uwb_cn.md: -------------------------------------------------------------------------------- 1 | # Cyberdog_uwb设计 2 | 3 | ## 概述 4 | 5 | ``cyberdog_uwb`` 以ros2 plugin形式向客户端提供uwb数据服务,此插件为控制传感器提供必要的API接口,并把采集到的uwb数据转换成ros消息格式通过device manager反馈给客户端。cyberdog默认配置2个或4个UWB。 6 | 7 | ## 软件设计 8 | 9 | #### 软件框架 10 | 11 |
12 | 13 | ![avatar](./image/cyberdog_uwb/cyberdog_uwb.png) 14 | 15 |
16 | 17 | 37 | 38 | ## 功能设计 39 | 40 | - 通过配置文件可灵活配置传感器个数、消息源、指令id等 41 | - 提供传感器使能、关闭、自检等基本能力接口 42 | 43 | ## 配置文件 44 | 45 | - 源码路径:``bridges/params/toml_config/device`` 46 | - 安装路径:``/opt/ros2/cyberdog/share/params/toml_config/device`` 47 | - 配置文件: 48 | - ``uwb_config.toml``:用于配置传感器个数和实际的配置文件 49 | - ``uwb_head_tof.toml``:用于配置前方传感器 50 | - ``uwb_head_uwb.toml``:用于配置后方传感器 51 | - ``uwb_rear_uwb.toml``:用于配置左边传感器 52 | - ``uwb_head_tof.toml``:用于配置右边传感器 53 | - 主要配置说明: 54 | - ``simulate``:模拟器设置开关 55 | - ``use_static_mac``:uwb静态mac设置开关 56 | - ``uwb``:配置uwb设备实体,程序根据此字段的个数实例出uwb个数 57 | - ``com_file ``:uwb设备子配置文件 58 | - ``protocol``:通信协议,默认为CAN。 59 | - ``can_interface``:CAN通信的消息通道,可配置``can0``、``can1`` 60 | - ``array``:数据包消息接收配置 61 | - ``array_name``:数据包名称 62 | - ``can_package_num``:数据包中,CAN数据帧的个数 63 | - ``can_id``:数据包中,CAN数据帧的``CAN_id`` 64 | 65 | - ``cmd``:指令包消息发送配置 66 | - ``cmd_name``:指令包名称 67 | - ``can_id``:指令包中,CAN数据帧的``CAN_id`` 68 | - ``ctrl_len``:指令包中,CAN数据帧的数据长度 69 | - ``ctrl_data``:指令包中,CAN数据帧的数据默认值 70 | 71 | ## ROS 协议 72 | - 源码路径:``bridges/protocol/ros`` 73 | - ros topic:``uwb_raw`` 74 | - 协议介绍: 75 | - ``protocol::msg::UwbRaw``:单个UWB数据格式 76 | - 协议路径:``bridges/protocol/ros/msg/UwbRaw.msg`` 77 | - ``protocol::msg::UwbArray``:UWB数据数组 78 | - 协议路径:``bridges/protocol/ros/msg/`` 79 | 80 | ## API 接口 81 | - ``bool Init(std::functionfunction_callback, bool simulation)``:初始化配置 82 | - ``simulator = true``:配置为仿真模式 83 | - ``function_callback``:消息发布回调函数 84 | - ``Open()``:打开传感器 85 | - ``Stop()``:停止传感器 86 | - ``SelfCheck()``:传感器自检 87 | - ``LowPower()``:进入低功耗模式 88 | - ``SetConnectedState(bool connected)``:设置uwb连接状态 89 | - ``void Play(const std::shared_ptr info_request,std::shared_ptr info_response)``:ros2 service打开uwb接口 90 | 91 | ## 调试命令 92 | - 获取uwb topic:``ros2 topic list | grep uwb_raw`` -------------------------------------------------------------------------------- /docs/cn/cyberdog_tof_cn.md: -------------------------------------------------------------------------------- 1 | # Cyberdog_tof设计 2 | 3 | ## 概述 4 | 5 | ``cyberdog_tof`` 以ros2 plugin形式向客户端提供tof数据服务,此插件为控制传感器提供必要的API接口,并把采集到的tof数据转换成ros消息格式通过sensor_manager向外发布。 6 | 7 | ## 软件设计 8 | 9 | #### 软件框架 10 | 11 |
12 | 13 | ![avatar](./image/cyberdog_tof/cyberdog_tof.png) 14 | 15 |
16 | 17 | 35 | 36 | ## 功能设计 37 | 38 | - 通过配置文件可灵活配置传感器个数、消息源、指令id等 39 | - 提供传感器使能、关闭、自检等基本能力接口 40 | 41 | ## 配置文件 42 | 43 | - 源码路径:``bridges/params/toml_config/sensors`` 44 | - 安装路径:``/opt/ros2/cyberdog/share/params/toml_config/sensors`` 45 | - 配置文件: 46 | - ``tof_config.toml``:用于配置传感器个数和实际的配置文件 47 | - ``tof_left_head.toml``:用于配置头部左边传感器 48 | - ``tof_right_head.toml``:用于配置头部右边传感器 49 | - ``tof_left_rear.toml``:用于配置尾部左边传感器 50 | - ``tof_right_rear.toml``:用于配置尾部右边传感器 51 | - 主要配置说明: 52 | - ``config_files``:程序根据数组成员实例出对应的传感器实体 53 | - ``protocol``:通信协议,默认为CAN。 54 | - ``can_interface``:CAN通信的消息通道,可配置``can0``、``can1`` 55 | - ``array``:数据包消息接收配置 56 | - ``array_name``:数据包名称 57 | - ``can_package_num``:数据包中,CAN数据帧的个数 58 | - ``can_id``:数据包中,CAN数据帧的``CAN_id`` 59 | 60 | - ``cmd``:指令包消息发送配置 61 | - ``cmd_name``:指令包名称 62 | - ``can_id``:指令包中,CAN数据帧的``CAN_id`` 63 | - ``ctrl_len``:指令包中,CAN数据帧的数据长度 64 | - ``ctrl_data``:指令包中,CAN数据帧的数据默认值 65 | 66 | ## ROS 协议 67 | - 源码路径:``bridges/protocol/ros`` 68 | - Ros topic:``head_tof_payload``、``rear_tof_payload`` 69 | - 协议介绍: 70 | - ``protocol::msg::SingleTofPayload``:单个TOF数据格式 71 | - 协议路径:``bridges/protocol/ros/msg/SingleTofPayload.msg`` 72 | - ``protocol::msg::HeadTofPayload``:头部TOF数据格式 73 | - 协议路径:``bridges/protocol/ros/msg/HeadTofPayload.msg`` 74 | - ``protocol::msg::RearTofPayload``:尾部TOF数据格式 75 | - 协议路径:``bridges/protocol/ros/msg/RearTofPayload.msg`` 76 | 77 | ## API接口 78 | - ``Init(bool simulator)``:初始化配置 79 | - ``simulator = true``:配置为仿真模式 80 | - ``Open()``:打开传感器 81 | - ``Start()``:使能传感器 82 | - ``Stop()``:停止传感器 83 | - ``Close()``:关闭传感器 84 | - ``SelfCheck()``:传感器自检 85 | - ``LowPowerOn()``:进入低功耗模式 86 | - ``LowPowerOff()``:退出低功耗模式 87 | - ``SetSinglePayloadCallback(std::function payload)> cb)``:设置消息回调函数 88 | 89 | ## 调试命令 90 | - 获取头部Tof topic:``ros2 topic list | grep head_tof_payload`` 91 | - 获取尾部Tof topic:``ros2 topic list | grep rear_tof_payload`` 92 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_occmap_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_occmap 2 | 3 | ## 1. 模块简介 4 | cyberdog_occmap是一个基于单线激光雷达的在线栅格地图重建算法,基于输入的位姿和扫描点云实时生成相应栅格地图,基于查表更新和Protobuf等技术,实现了非常轻量化的实时栅格建图算法,我们的算法在NX平台上仅占用单核10%的CPU资源,为方便开发者进行二次开发,本项目将核心算法与ROS2进行了分离,核心算法放置在`3rdparty/occmap`路径下,开发者可参考ROS2接口进行二次开发使用。 5 | 6 | ## 2. 模块架构 7 | 8 |
9 | 10 | ![](./image/cyberdog_occmap/cyberdog_occmap_cn.png) 11 | 12 |
13 | 14 | ## 3. 使用教程 15 | 本项目提供了一个最小例程,基于单线激光雷达扫描点云文件和其对应的位姿文件离线生成栅格地图。 16 | 源代码位于`3rdparty/occmap/example`,数据位于`3rdparty/occmap/data`。其中`info.txt`存储了时间戳和对应的位姿,存储格式为`timestamp x y z x y z w`,`pointcloud`文件夹存储每一帧的点云文件,以对应时间戳命名,`gridmap_node.yaml`为本例程所使用的参数文件。 17 | 首先基于**README**安装好核心算法依赖库(无需安装ROS2相关依赖)。 18 | ```bash 19 | # build source code 20 | cd 3rdparty/occmap 21 | bash install.sh 22 | # build example 23 | cd ../example 24 | mkdir build && cd build 25 | cmake .. make -j4 26 | # run demo 27 | ./demo ../../data/ 28 | ``` 29 | 30 | ## 4. 代码详解 31 | 本节对`demo.cc`中的api调用和实现过程进行解释。 32 | 33 | 3.1 初始化数据集路径 34 | ```cpp 35 | const std::string root_path = argv[1]; 36 | const std::string config_file = root_path + "/gridmap_node.yaml"; 37 | std::vector vpointcloud_pose = readDataset(root_path); 38 | ``` 39 | 40 | 3.2 从`gridmap_node.yaml`文件解析参数,在使用前需要将`gridmap_node.yaml`中的`create_map_path`更改为当前系统的可用路径。 41 | ```cpp 42 | transform::Eigentf eigentf; 43 | ProbabilityParam probability_param; 44 | SubMapParam submap_param; 45 | FilesSaveParam files_save_param; 46 | FilterParam filter_param; 47 | CeresScanMatchingParam ceres_param; 48 | MapBeautiParam mapbeauti_param; 49 | ParseParameters(config_file, eigentf, probability_param, submap_param, files_save_param, filter_param, ceres_param, mapbeauti_param); 50 | if (boost::filesystem::exists(files_save_param.sub_range_data_path)) { 51 | std::cout << "remove path: " << files_save_param.sub_range_data_path << std::endl; 52 | boost::filesystem::remove_all(files_save_param.sub_range_data_path); 53 | } 54 | if(common::createFolder(files_save_param.sub_range_data_path)) { 55 | std::cout << "create path: " << files_save_param.sub_range_data_path << std::endl; 56 | } 57 | ``` 58 | 59 | 3.3 实例化建图入口对象 60 | ```cpp 61 | auto g_mapper_ = std::make_shared(eigentf, probability_param, submap_param, files_save_param, filter_param, ceres_param, mapbeauti_param); 62 | ``` 63 | 64 | 3.4 将点云数据插入地图 65 | ```cpp 66 | for(auto& pointcloud_pose : vpointcloud_pose) { 67 | std::unique_ptr range_data_ptr 68 | = g_mapper_->AddRangeData(pointcloud_pose.time, pointcloud_pose.pointcloud, pointcloud_pose.pose, eigentf.laser2baselink); 69 | } 70 | ``` 71 | 72 | 3.5 生成地图,最终地图和二进制子图文件会保存在`create_map_path`路径下。 73 | ```cpp 74 | g_mapper_->GenerateGrid("map"); 75 | ``` -------------------------------------------------------------------------------- /docs/en/third_party_library_management_en.md: -------------------------------------------------------------------------------- 1 | # **Robot platform third-party library management** 2 | 3 | Project L91 -Carpo 4 | 5 | **Open Source Library Summary ** 6 | 7 | | Name | Version | License | Whether to modify | 8 | | --------- | ------------------------------------------------------------ | ------------------------------------------------------------ | ----------------- | 9 | | ROS2 | Galactic | Apache 2.0 | No | 10 | | ZXing | master/[2650e8a](https://github.com/liangxiaowei00/zxing-cpp/commit/2650e8a318e28eaeb246ee636909e637376de924) | Apache License2.0 | Yes | 11 | | rapidjson | V1.1.0 | MIT | No | 12 | | toml | V3.7.0 | MIT | No | 13 | | xpack | V1.0.1 | Apache License2.0 | No | 14 | | opencv | v4.1.1. | [3-clause BSD license](https://github.com/opencv/opencv/blob/4.4.0/LICENSE). | No | 15 | | lcm | master branch,commit: 91ce7a | [ LGPL-2.1 License ](https://github.com/lcm-proj/lcm/blob/master/COPYING) | No | 16 | | protobuf | 3.19.2 | https://github.com/protocolbuffers/protobuf/blob/main/LICENSE | No | 17 | | grpc | V1.44.0 | Apache License2.0 | No | 18 | | yaml-cpp | 0.7.0 | MIT | No | 19 | | ceres | 1.14.0 | Apache-2.0 | No | 20 | | HDF5 | 1.10.8 | `https://github.com/HDFGroup/hdf5` | No | 21 | | colmap | modify from dev branch commit: 2b7230679957e4dccd590ab467931d6cfffb9ede | BSD | Yes | 22 | | blockly | 8.0.1 | Apache License2.0 | Yes | 23 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_train_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_train设计文档 2 | 3 | ## 1 概述 4 | 本文旨在说明与小爱训练计划结合相关的设计需求、设计细节等。该功能主要目的是cyberdog通过小爱训练计划接口,实现自定义语音指令词及触发该指令词后机器的对应动作。 5 | 训练计划场景主要有以下条件: 6 | 1. 仿生机器人处于WiFi环境中,能够访问互联网; 7 | 2. App已与仿生机器人建立链接; 8 | 9 | ## 2 需求 10 | 1. 小爱接收训练触发词后,cyberdog能作出响应(包括但不限于语音、动作、灯效等) 11 | 2. 用户能够自定义训练词及其响应 12 | 3. 系统将保留有限个训练词(目前6个),用户只能使用,不得查询、修改。 13 | 14 | ## 3 架构 15 | ![](./image/cyberdog_train/cyberdog_train_cn_1.png) 16 | ## 4 初始化及触发流程 17 | ### 4.1 初始化流程 18 | ![](./image/cyberdog_train/cyberdog_train_cn_1.png) 19 | ### 4.2 触发流程 20 | ![](./image/cyberdog_train/cyberdog_train_cn_1.png) 21 | ## 5 训练计划分组 22 | ### 5.1 系统保留字段 23 | 系统保留字段不能被用户查询、删除、修改; 24 | ```json 25 | {"伸懒腰":["motion", "1"]} 26 | {"伸左手":["motion", "2"]} 27 | {"伸右手":["motion", "3"]} 28 | ``` 29 | ### 5.2 可视化保留字段 30 | 可视化保留字段不能被用户查询、删除、修改; 31 | ```json 32 | {"终止任务":["vp_task", "shutdown"]} 33 | {"暂停任务":["vp_task", "suspent"]} 34 | {"继续任务":["vp_task", "recover"]} 35 | ``` 36 | ### 5.3 用户自定义字段 37 | ```json 38 | {"trigger1":["type", "value"]} 39 | {"trigger2":["type", "value"]} 40 | {"trigger3":["type", "value"]} 41 | ...... 42 | ``` 43 | ## 6 接口 44 | 1. 训练词发布接口(基础接口) 45 | 接口形式:ros topic 46 | 接口名字:"train_plan_word" 47 | 消息文件:"protocol/msg/TrainPlan.msg" 48 | 消息内容: 49 | ```json 50 | string trigger # 张三、李四、王五 51 | string type # 未定义(undefined)、运(motion)、语音(audio)、LED(led)、导航(navigation)、跟随(follow) 52 | string value # 后空翻 53 | ``` 54 | 2. 训练词增加接口 55 | 接口形式:ros service 56 | 接口名字:"set_train_plan" 57 | 消息文件:"protocol/msg/SetTrainPlan.msg" 58 | 消息内容: 59 | ```json 60 | string trigger 61 | string type 62 | string value 63 | --- 64 | int32 skill_id 65 | int32 code 66 | ``` 67 | 3. 训练词删除接口 68 | 接口形式:ros service 69 | 接口名字:"delete_train_plan" 70 | 消息文件:"protocol/msg/TrainPlan.msg" 71 | 消息内容: 72 | ```json 73 | string type 74 | string value 75 | --- 76 | int code 77 | ``` 78 | 4. 训练词修改接口 79 | 接口形式:ros service 80 | 接口名字:"modify_train_plan" 81 | 消息文件:"protocol/msg/SetTrainPlan.msg" 82 | 消息内容: 83 | ```json 84 | string trigger 85 | string type 86 | string value 87 | --- 88 | int code 89 | ``` 90 | 5. 查询所有训练词接口 91 | 接口形式:ros service 92 | 接口名字:"query_all_train_plan" 93 | 消息文件:"protocol/msg/TrainPlanAll.msg" 94 | 消息内容: 95 | ```json 96 | --- 97 | int code 98 | TrainPlan[] training_set 99 | ``` 100 | 6. 查询指定训练词接口 101 | 接口形式:ros service 102 | 接口名字:"query_train_plan" 103 | 消息文件:"protocol/msg/TrainPlan.msg" 104 | 消息内容: 105 | ```json 106 | string type 107 | string value 108 | --- 109 | int code 110 | TrainPlan[] training_set 111 | ``` 112 | -------------------------------------------------------------------------------- /docs/en/cyberdog_grpc_en.md: -------------------------------------------------------------------------------- 1 | # Cyberdog_grpc Design 2 | 3 | ## Overview 4 | 5 | cyberdog_grpc is a ROS node that converts ROS topics, services, actions into gRPC services, which implements communication between ROS and APP. 6 | 7 | ## Basic Information about GRPC 8 | 9 | gRPC is a modern open source high performance Remote Procedure Call (RPC) framework that can run in any environment. It can efficiently connect services in and across data centers with pluggable support for load balancing, tracing, health checking and authentication. It is also applicable in last mile of distributed computing to connect devices, mobile applications and browsers to backend services. 10 | 11 | gRPC can use protocol buffers as both its Interface Definition Language (IDL) and as its underlying message interchange format. Protocol buffers provide a language-neutral, platform-neutral, extensible mechanism for serializing structured data in a forward-compatible and backward-compatible way. It’s like JSON, except it’s smaller and faster, and it generates native language bindings. 12 | 13 | ### Opensource Repository 14 | 15 | https://github.com/grpc/grpc 16 | 17 | #### Version 18 | 19 | Version 1.44.0 is used in this product, but other versions are probably available to communicate with it. 20 | 21 | #### Installation 22 | 23 | C++: https://github.com/grpc/grpc/blob/master/BUILDING.md 24 | 25 | Python: `pip install grpcio grpcio_tools` 26 | 27 | To install other launguages, you can refer to https://github.com/grpc/grpc/blob/master/README.md 28 | 29 | ### Test 30 | 31 | Compile the test example: 32 | 33 | ```Shell 34 | cd grpc/examples/cpp/helloworld 35 | mkdir -p cmake/build 36 | cd cmake/build 37 | cmake ../.. 38 | make 39 | ``` 40 | 41 | Run server: 42 | 43 | ```Shell 44 | ./greeter_server 45 | ``` 46 | 47 | It will display on the screen: Server listening on 0.0.0.0:50051 48 | 49 | Run clieint: 50 | 51 | ```Shell 52 | ./greeter_client 53 | ``` 54 | 55 | It will display on the screen: Greeter received: Hello world 56 | 57 | ## Interface Design 58 | 59 | We use an interface in the form of `nameCode + content`to make RPC requests. 60 | 61 | ![image](./image/cyberdog_grpc/cyberdog_grpc_en.svg) 62 | 63 | ### NameCode 64 | 65 | Command code, corresponds to a unique function and it's corresponding ROS interface. 66 | 67 | Type: fixed32 in protobuf, uint32 in cpp 68 | 69 | ### Serialized Content 70 | 71 | Most of RPC commands have parameters and response data. These contents are uniformly transmitted in a JSON format string. We use [RapidJSON](https://github.com/Tencent/rapidjson) to implement serialization and deserialization. 72 | 73 | Type: string in protobuf, std::string in cpp 74 | 75 | #### Params 76 | 77 | Corresponds to input info of the function. 78 | 79 | #### Data 80 | 81 | Corresponds to output info of the function. 82 | 83 | [protocol doc](/cn/grpc_protocol.md) 84 | -------------------------------------------------------------------------------- /docs/en/cyberdog_bms_en.md: -------------------------------------------------------------------------------- 1 | # cyberdog_bms design document 2 | 3 | ## Overview 4 | 5 | ``cyberdog_bms`` provides bms data service to the client side in the form of ros2 plugin. This plug-in provides the necessary API interface for controlling bms, and converts the collected power board data into ros message format and publishes it through device_manager. 6 | 7 | ## Software Design 8 | 9 | ### Software framework 10 | ![](./image/cyberdog_bms/cyberdog_bms.png) 11 | 12 | 19 | 20 | ## Functional design 21 | 22 | - Flexible configuration of message sources, command ids , etc. through configuration files 23 | - Provide basic ability interfaces such as enable, shutdown, and self-check 24 | 25 | ## Configuration files 26 | 27 | - Source path: ``bridges/params/toml_config/device`` 28 | - Installed path:``/opt/ros2/cyberdog/share/params/toml_config/device`` 29 | - Configuration file: 30 | - ``battery_config.toml``: used to configure bms modules 31 | -Main configuration instructions: 32 | - Main configuration description: 33 | - ``Protocol``: communication protocol, default is ``CAN``. 34 | - ``can_interface``: Message channel for CAN communication, configurable ``can0``, ``can1`` 35 | - ``Array``: data packet message reception configuration 36 | - ``array_name``: data packet name 37 | - ``can_package_num``: the number of CAN data frames in the data packet 38 | - ``can_id``: data packet, CAN data frame ``CAN ID`` 39 | 40 | - ``cmd``: command packet message sending configuration 41 | - ``cmd_name``: instruction package name 42 | - ``can_id``: instruction package, CAN data frame ``CAN ID`` 43 | - ``ctrl_len``: The data length of the instruction data frame in the CAN package 44 | - ``ctrl_data``: Data default value of instruction data frame in CAN package 45 | 46 | ## ROS protocol 47 | - Source path: "bridges/protocol/ros" 48 | - Ros topic:``bms_status`` 49 | - Agreement introduction: 50 | - ``Protocol:: msg:: BmsStatus``: Power management module data format 51 | - Protocol path:``bridges/protocol/ros/msg/BmsStatus.msg`` 52 | 53 | ## API interface 54 | - ``bool Init(std::functionfunction_callback, bool simulation)``:initialize configuration 55 | - ``simulator = true``:Configure to emulate mode 56 | - ``function_callback``:Set callback function for message. 57 | - ``Open()``: Open BMS message reporting 58 | - ``Close()``: Stop BMS message reporting 59 | - ``SelfCheck()``: BMS self-check 60 | - ``LowPower()``: enter low power mode 61 | - ``void ServiceCommand(const std::shared_ptr request,std::shared_ptr response)``:ros2 service,using for ctrl bms. 62 | 63 | ## Debug command 64 | - Get bms topic:``ros2 topic list | grep bms_status`` -------------------------------------------------------------------------------- /docs/en/cyberdog_camera_en.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | This is a camera program running on Xiaomi CyberDog, based on Nvidia [Argus](https://docs.nvidia.com/jetson/l4t-multimedia/group__LibargusAPI.html) and [Ros2](https://www.ros.org/) camera API. 3 | 4 | The program uses the interfaces provided by the Argus API to control the MIPI camera hardware and capture images in real-time, and uses the interfaces provided by ROS2 to manage the camera node and provide interaction interfaces for external modules. 5 | 6 | The overall architecture of the module is shown below: 7 | ![](./image/cyberdog_camera/camera_arch.png) 8 | 9 | The internal description of the module is shown below: 10 | ![](./image/cyberdog_camera/camera_arch_inter.png) 11 | 12 | # Compilation 13 | To compile this module, you need to install several external software packages as follows: 14 | 15 | ## Dependencies 16 | ### 1. nvidia-l4t-jetson-multimedia-api 17 | ```console 18 | sudo apt-get install nvidia-l4t-jetson-multimedia-api 19 | ``` 20 | ### 2. cuda-toolkit 21 | ```console 22 | sudo apt-get install cuda-toolkit-10-2 23 | ``` 24 | ### 3. libavformat-dev 25 | ```console 26 | sudo apt-get install libavformat-dev 27 | ``` 28 | # Test Program 29 | This is a camera test program based on the camera API, which can be used to test whether the camera is working properly, and can also be used as a reference for using the camera API. 30 | 31 | ## Compilation 32 | ```console 33 | colcon build --merge-install --packages-up-to camera_test 34 | ``` 35 | 36 | ## Running 37 | 38 | ### Test program, used to test whether the camera is capturing images properly 39 | ```console 40 | ./build/camera_test/camera_test cam_id width height rgb/bgr 41 | ``` 42 | For example, to test camera 0 with a resolution of 640x480 and RGB output, use the following command: 43 | ./build/camera_test/camera_test 0 640 480 rgb 44 | 45 | ### Main camera service program, which can perform commands such as taking pictures and recording videos. 46 | ```console 47 | ros2 run camera_test camera_server 48 | ``` 49 | Command for taking pictures: 50 | ```console 51 | ros2 service call /camera_service protocol/srv/CameraService "{command: 1, args: ''}" 52 | ``` 53 | 54 | Command for recording videos: 55 | ```console 56 | //Start 57 | $ ros2 service call /camera_service protocol/srv/CameraService "{command: 2, args: ''}" 58 | //Stop 59 | $ ros2 service call /camera_service protocol/srv/CameraService "{command: 3, args: ''}" 60 | ``` 61 | 62 | ### ROS2 program for stereo/RGB cameras 63 | ```console 64 | ros2 run camera_test stereo_camera 65 | //configure 66 | ros2 lifecycle set /stereo_camera configure 67 | //activate 68 | ros2 lifecycle set /stereo_camera activate 69 | 70 | ros2 lifecycle set /stereo_camera deactivate 71 | 72 | ros2 lifecycle set /camera/camera cleanup 73 | ``` 74 | After running, the stereo camera and RGB camera will publish images through the /image_left, /image_right, and /image_rgb topics. 75 | -------------------------------------------------------------------------------- /docs/en/device_manager_en.md: -------------------------------------------------------------------------------- 1 | # device_manager 2 | 3 | ## Overview 4 | 5 | ``device_manager`` is the management module of ``cyberdog_touch``, ``cyberdog_uwb``, ``cyberdog_bms`` and other functional modules; it provides the ability of find-state machine management, service callback, and message release for each module, and each module is loaded into ``device_manager`` in the form of ros plugin. 6 | 7 | ## Software design 8 | 9 |
10 | 11 | ![avatar](./image/device_manager/device_manager.png) 12 | 13 |
14 | 15 | ## Functional design 16 | 17 | ### Module loading 18 | 19 | - [ROS plugin](https://github.com/ros2/ros2_documentation/blob/galactic/source/Tutorials/Beginner-Client-Libraries/Pluginlib.rst) 20 | 21 | - ``device_manager`` Use ``pluginlib::ClassLoader`` to load ``cyberdog_uwb`` and other modules. 22 | ``` 23 | //Refer to the ros sample for the loading process 24 | pluginlib::ClassLoader poly_loader("polygon_base", "polygon_base::RegularPolygon"); 25 | 26 | std::shared_ptr triangle = poly_loader.createSharedInstance("polygon_plugins::Triangle"); 27 | ``` 28 | 29 | ### Finite-state machine management 30 | - Finite-state machine controls various peripheral modules, such as: controlling "low power consumption" and " active " state switching of modules such as ``cyberdog_led``; 31 | 32 | - ``device_manager`` inherits ``Cyberdog::machine::MachineActuator``, the client side can control ``device_manager`` state switching through the service interface provided by ``Cyberdog::machine::MachineActuator``. Inside the final-state machine, the loaded modules are controlled in turn for corresponding states. 33 | 34 | - Finite-state machine detailed introduction reference: [Finite-state machine design](/en/cyberdog_machine_en.md ) 35 | 36 | ### Topic&Service 37 | 38 | - ``device_manager`` provides a ros message interface, the client side can subscribe to peripheral data topics to obtain peripheral data. 39 | - ``device_manager`` provides a ros service interface, and the client side can control the peripheral state through the peripheral control service. 40 | - ROS interface: 41 | - ``Protocol::msg::TouchStatus``: touch message 42 | - ``protocol::msg::BmsStatus``:bms message 43 | - ``Protocol::msg::UwbRaw``: uwb message 44 | - ``Protocol::srv::GetUWBMacSessionID``: get uwb mac service 45 | - ``Protocol::srv::LedExecute``: led control service 46 | 47 | ### Module plugin 48 | 49 | - [Bluetooth Module](/en/cyberdog_bluetooth_en.md ) 50 | - [bms module](/en/cyberdog_bms_en.md) 51 | - [led module](/en/cyberdog_led_en.md) 52 | - [touch module](/en/cyberdog_touch_en.md) 53 | - [Uwb module](/en/cyberdog_uwb_en.md ) 54 | - [wifi module](/en/cyberdog_wifi_en.md) 55 | 56 | ## Debug command 57 | 58 | - Get device_manager find-state machine service: 59 | 60 | ``` 61 | ros2 topic list | grep device_manager 62 | ``` 63 | 64 | - Final-state machine toggle (toggle to "Active" state): 65 | 66 | ``` 67 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/device_managermachine_service protocol/srv/FsMachine "{target_state: "Active"}" 68 | ``` 69 | -------------------------------------------------------------------------------- /docs/cn/algorithm_manager_cn.md: -------------------------------------------------------------------------------- 1 | # 算法任务管理设计文档 2 | 3 | ## 概述 4 | 算法任务管理模块是对系统中与自主行走能力有关的任务指令的集中管理模块,包括建图、定位、导航、跟随等算法任务。该模块根据系统整体状态机、任务执行状态等,对用户的任务指令进行决策,并根据确定的任务类型,管理系统中的硬件、软件资源。 5 | ## 模块架构 6 | 7 |
8 | 9 | ![](./image/algorithm_manager/algorithm_manager.png) 10 | 11 |
12 | 13 | ### 架构说明 14 | 15 | - 接口以ros action的形式提供。 16 | 17 | - 任务状态以独占形式进行管理,即当前如果有任务在执行时,新的任务指令将返回reject状态。同时停止任务指令也必须当前的任务状态对应,否则将返回停止失败,例如在执行建图任务时,调用方如果下发了停止导航的指令,将返回失败状态。 18 | 19 | - 算法任务包含了视觉建图、视觉定位、激光建图、激光定位、AB点导航、视觉跟随、UWB跟随任务。算法管理层负责启动相关的依赖节点、请求相关的服务,在停止任务时同时关闭相关的依赖节点和关闭相关的服务。 20 | 21 | ## 任务管理流程 22 | 23 | ### 1 激光建图 24 | 25 |
26 | 27 | ![](./image/algorithm_manager/lidar_mapping.png) 28 | 29 |
30 | 31 | 步骤: 32 | 33 | - step 1: 判断建图模式 34 | 35 | - step 2: 打开realsense传感器 36 | 37 | - step 3: 通知建图服务程开始建图 38 | 39 | - step 4: 判断是否实时上报机器人当前位置 40 | 41 | - step 5: 开始建图 42 | 43 | ### 2 视觉建图 44 | 45 |
46 | 47 | ![](./image/algorithm_manager/vision_mapping.png) 48 | 49 |
50 | 51 | 步骤: 52 | 53 | - step 1: 判断建图模式 54 | 55 | - step 2: 打开realsense传感器、双目传感器 56 | 57 | - step 3: 通知建图服务程开始建图 58 | 59 | - step 4: 判断是否实时上报机器人当前位置 60 | 61 | - step 5: 开始建图 62 | 63 | ### 3 激光重定位 64 | 65 |
66 | 67 | ![](./image/algorithm_manager/lidar_relocalization.png) 68 | 69 |
70 | 71 | 步骤: 72 | 73 | - step 1: 判断打开realsense传感器 74 | 75 | - step 2: 通知重定位服务程开始重定位功能 76 | 77 | - step 3: 等待结果是否超时,超时直接退出定位,否者等待定位结果 78 | 79 | - step 4: 判断重定位状态, 0成功, 100重试和1200失败 80 | 81 | - step 5: 定位成功 82 | 83 | ### 4 视觉重定位 84 | 85 |
86 | 87 | ![](./image/algorithm_manager/vision_relocalization.png) 88 | 89 |
90 | 91 | 步骤: 92 | 93 | - step 1: 判断打开realsense传感器、双目传感器 94 | 95 | - step 2: 通知重定位服务程开始重定位功能 96 | 97 | - step 3: 等待结果是否超时,超时直接退出定位,否者等待定位结果 98 | 99 | - step 4: 判断重定位状态, 0成功, 100重试和1200失败 100 | 101 | - step 5: 定位成功 102 | 103 | ### 5 AB点导航 104 | 105 |
106 | 107 | ![](./image/algorithm_manager/ab_navigation.png) 108 | 109 |
110 | 111 | 步骤: 112 | 113 | - step 1: 判断打开bt_navigator 114 | 115 | - step 2: 接受导航点B点位置,判断导航点有效性 116 | 117 | - step 3: 发送开始AB导航 118 | 119 | - step 4: 实时上报导航状态 120 | 121 | - step 5: 导航成功 122 | 123 | ### 6 视觉跟随 124 | 125 | 视觉跟流程图 126 | 127 |
128 | 129 | ![](./image/algorithm_manager/vision_tracking.png) 130 | 131 |
132 | 133 | - 视觉跟随功能由两个阶段组成:第一个阶段是启动视觉跟随功能,开始人体识别;第二阶段由用户选择需要跟随的目标,机器人开始自动跟随; 134 | 135 | - 第一阶段的对上接口是一个action server,由GRPC下发视觉跟随启动命令;当收到启动视觉跟随的请求时,管理模块会对realsense和cyberdog_vision模块进行lifecycle管理,配置并激活相应模块;然后启动人体识别算法(由一个service client请求启动); 136 | 137 | - 之后等待用户选择需要跟随的目标; 138 | 139 | - 第二阶段的对上接口是一个service server,通过GRPC下发跟随目标的roi;service server对roi信息进行透传,直接转发给视觉跟随模块(通过一个service client请求启动服务);之后分别对cyberdog_tracking和cyberdog_tracking_base模块进行lifecycle管理,配置并激活相应模块;实现自动跟随。 140 | 141 | ### 7 UWB跟随 142 | 143 | uwb跟随流程图 144 | 145 |
146 | 147 | ![](./image/algorithm_manager/uwb_tracking.png) 148 | 149 |
150 | 151 | - uwb跟随功能中,首先需要用户在APP中实现了蓝牙遥控器(发送uwb数据)的配对,否则在启动时会提示uwb数据异常。 152 | 153 | - 启动流程与视觉跟随相似,包含了启动相关的资源开启、依赖节点的激活等过程,区别在于不需要用户点选跟随的目标。 154 | 155 | - uwb跟随中,在跟随目标静止时,狗可以表现出自主行为,相关的行为表现包含了执行指定的动作、播报语音、灯带效果的变化。 156 | 157 | - 跟随中可以检测跟随路径上的台阶,在检测到台阶后,狗可以实现跳台阶的动作,跳上台阶后恢复跟随的步态。 158 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_action_cn.md: -------------------------------------------------------------------------------- 1 | # Cyberdog_action 设计文档 2 | 3 | ## 1. 功能概述 4 | 5 | 目前手势识别模块提供一个service和一个topic,service用于激活手势识别功能,激活后,在规定的timeout时间内,该节点会发布检测到的手势动作id的topic。源代码位置: 6 | https://github.com/MiRoboticsLab/interaction/tree/rolling/cyberdog_actionhttps://github.com/MiRoboticsLab/interaction/tree/rolling/cyberdog_action 7 | 8 | ## 2. 架构设计 9 | 10 |
11 | 12 | ![avatar](./image/cyberdog_action/cyberdog_action_flow.png) 13 | cyberdog_action 功能架构图 14 | 15 |
16 | 17 | 18 | - app:为调用gesture_action的功能模块,例如可视化编程。 19 | - cyberdog_action提供ros2 service接口用于控制手势动作识别的开启、关闭和timeout。 20 | - 开启处理流程: cyberdog_action收到开启算法的request后,需要依次打开camera、加载模型、逐帧推理以及将识别结果已ros2 topic的形式发布出去。 21 | - 关闭处理流程: cyberdog_action收到关闭算法的request或者请求时间timeout,需要依次关闭camera、卸载模型。 22 | 23 | 24 | ## 3. API接口 25 | ```Makefile 26 | - bool ReadTomlConfig():从配置文件读取相关参数,例如帧率、模型路径、版本等。 27 | - int activation_function_softmax(const _Tp * src, _Tp * dst, int length, int & max_index);:对模型的分类结果进行softmax操作。 28 | - void Gesture_Action_Rec_Fun( 29 | const std::shared_ptr request, 30 | std::shared_ptr response):处理client的请求,根据请求的参数打开算法一段时间或者立即关闭算法。 31 | - void CameraSignalCallback(const CameraMsg::SharedPtr msg):camera图像数据topic的回调函数。 32 | - void Camera_Operate():图像数据处理线程,包含更新推理模型、加载模型、cuda内存初始化以及控制算法持续运行时间,终止算法等操作。 33 | - void Inference_Operate():模型推理与优化后处理部分。 34 | - uint8_t ControlCamera(uint8_t _action):打开和关闭camera数据流。 35 | - int doInference():模型推理 36 | - int process_history(int max_index):通过对多帧数据进行累计判定,提高动作识别的正确率。 37 | - int proprecess(Gesture_ori max_index):过滤容易识别错的手势动作。 38 | - bool LoadEngineIntoCuda():加载tensorrt模型。 39 | - bool DestroyCuda():释放gpu内存和上下文资源 40 | - bool cudaMemoryPro():处理cuda内存以及推理所需要上下文资源。 41 | - void WifiSignalCallback(const WifiMsg::SharedPtr msg):WIFI信号监测的回调函数,用于在有网络的情况下,进行模型在线更新。 42 | ``` 43 | ## 4. 接口文件描述 44 | 45 | - 协议文件 : 46 | 47 | protocol/srv/GestureActionControl.srv 48 | 49 | protocol/msg/GestureActionResult.msg 50 | 51 | 52 | 53 | - 配置文件 54 | 55 | 位置:/params/toml_config/interaction/gesture_action.toml 56 | 57 | ```Makefile 58 | device = 0 # cuda device id 59 | queue_size = 5 # 预测结果队列长度 60 | frames_number = 3 # 相同预测结果帧数阈值 61 | refine_output = true # 是否采用调优策略 62 | softmax_thres = 0.5 # softmax阈值 63 | fps = 30 # 相机帧率,当前ai相机sdk最高支持30fps 64 | engine_path = "/SDCARD/vision/gesture_action/gesture_action.trt" # 引擎路径 65 | is_specified = false # 是否指定下载fds中指定版本模型,若为false,下载最新版本模型。 66 | # 若为true,下载最新版本模型 67 | version = "0.0" # 模型的指定版本 68 | ``` 69 | 70 | 71 | 72 | ## 5. 一些测试用到的cmd 73 | 74 | ```C%2B%2B 75 | //开启动作识别功能,timeout设置为60s 76 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/gesture_action_control protocol/srv/GestureActionControl "{command: 0,timeout: 100}" 77 | // 关闭动作识别功能 78 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/gesture_action_control protocol/srv/GestureActionControl "{command: 1,timeout: 100} 79 | 80 | ``` 81 | 82 | ## 6. 模型文件位置 83 | 84 | - 算法模型放置位置:/SDCARD/vision/gesture_action/gesture_action.trt。 85 | 86 | ## 7. 引用 87 | - 仓库中所使用的模型来至于,感谢其很棒的工作。 88 | -------------------------------------------------------------------------------- /docs/en/cyberdog_bringup_en.md: -------------------------------------------------------------------------------- 1 | #
cyberdog_bringup design document
2 | 3 | ## 1. Overview 4 | The main functions of bringup are as follows: 5 | 1. All configured nodes in the robot can be started according to the parameters in the target configuration file (*.yaml); 6 | 2. Dynamically start/stop certain nodes (currently only navigation2 is required); 7 | 3. The configuration file is adapted to the internal interface of ROS2 launch as fully as possible; 8 | 9 | ## 2. Design 10 | ### 2.1. Feature design 11 | 12 | There are two types of bringup startup constraints, namely configuration files (*.yaml) and command line startup parameters (argv[]), among which: 13 | 1. The configuration file (*.yaml) has a unique main file, and the parameters in other sub-files that meet the ros constraints can be loaded in the main file; 14 | 2. The command line startup parameters (argv[]) adapt to the ROS2 launch parameter interface. 15 | 16 | Consider the following issues regarding the presentation of the startup file: 17 | 1. The startup constraint is not visible to the user when it is dynamically loaded into the memory at runtime, which is not easy to troubleshoot and analyze; 18 | 2. The only startup file cannot be started multiple times at the same time, which is not easy to troubleshoot and analyze. 19 | 20 | Therefore, the startup constraints will be statically exhausted, and dynamic loading at runtime should be avoided as much as possible, thus: 21 | 1. The startup file will be dynamically generated according to the configuration file (*.yaml) when cyberdog_bringup is compiled; 22 | 2. And the parameters are exhausted as much as possible, but the dynamic parameter entry will also flow out. 23 | 3. The startup file has a unique main file, which is responsible for starting all nodes under the constraints of parameters; 24 | 4. The start-up file has sub-files of different levels, which are responsible for starting the corresponding nodes under the constraints of parameters. 25 | 26 | ### 2.2 Technology architecture 27 | 28 |
29 | 30 | ![](./image/cyberdog_bringup/cyberdog_bringup.svg) 31 | 32 |
33 | 34 | - cyberdog_bringup dynamically generates the required launch.py files at various levels according to the configuration files (bringup.yaml, launch.yaml, node.yaml). 35 | - The automatically generated launch.py file will load the manually created yaml file or launch file according to the configuration parameters to achieve flexible configuration. 36 | - Finally, according to the configuration, the unique launch.py for starting all nodes of the robot can be realized, which is used for system-level service calls, so as to realize the automatic startup function of the robot software function. 37 | 38 | The implementation plan is simply divided into the following 5 steps: 39 | 1. Classify and abstract the complex and lengthy grammar, extract the links that must be manually coded and highly abstract it into a self-defined meta-language; 40 | 2. Use a certain coding style (such as toml, yaml, json, etc.) to code the custom meta-language; 41 | 3. Write compilation tools for the coding style and meta-language determined above, that is, the program for the robot to realize the automatic coding function; 42 | 4. At each compilation, automatically compile the meta-language, generate an executable script, and install it; 43 | 5. Set the boot to start through the system command. 44 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_common_cn.md: -------------------------------------------------------------------------------- 1 | # cyberdog_common设计文档 2 | 3 | ## 版本 4 | 5 | | 编写 | change | version | date | 6 | | ---- | ----------------------- | ------- | ---------- | 7 | | 刘凯 | 梳理cyberdog_common功能 | 1.0 | 2023.05.22 | 8 | 9 | ## 一、概述 10 | 11 | cyberdog_common对log、json、toml、lcm、fds、message queque、semaphore进行了封装,在其它业务可以引用。 12 | 13 | ## 二、设计 14 | 15 | ### 2.1 框架图 16 | 17 | ![](./image/cyberdog_common/framework.svg) 18 | 19 | ### 2.2 功能说明 20 | 21 | - CyberdogLogger是对ros rcl logger进行了封装,简化丰富了日志输出接口宏定义 22 | 23 | - CyberdogToml是对toml操作进行了封装,还包括了文件存储 24 | 25 | - CyberdogJson是对json操作进行了封装,还包括了字符串转换、文件存储 26 | 27 | - LcmClient、LcmServer是对lcm的收发进行了cs封装 28 | 29 | - CyberdogFDS是fds功能封装,主要是从fds服务器进行拉取文件 30 | 31 | - Semaphore是condition_variable信号量封装类 32 | 33 | - MsgDeque是双向对列封装,消息阻塞式出队列 34 | 35 | ### 2.3 接口说明 36 | 37 | ​ CyberdogLogger接口如下:(按从上到下紧急程度依次递增) 38 | 39 | ​ DEBUG(...)// 可用于打印调试信息 40 | 41 | ​ INFO(...) // 可用于打印一般的提示信息 42 | 43 | ​ WARN(...) // 可用于打印警告信息 44 | 45 | ​ ERROR(...) // 可用于打印错误信息 46 | 47 | ​ FATAL(...) // 可用于打印致命错误信息 48 | 49 | 使用举例:INFO("hello world %s", name.c_str()) 50 | 51 | 52 | 53 | CyberdogToml接口如下: 54 | 55 | ​ bool ParseFile() // 从文件中读取数据,并翻译成toml数据结构 56 | 57 | ​ bool WriteFile() // 将toml数据写入文件 58 | 59 | ​ bool Get() // 从toml表格数据中,依据键k读取一个值 60 | 61 | ​ bool Set(toml::value & v, const std::string & k, const T & m) // 为toml表格数据设置一个值,若该键已经存在,则会覆盖,包括不同类型,若该键不存在,则会添加一个新的键值对 62 | 63 | ​ bool Set(toml::value & v, const T & m) // 为toml数组数据设置一个值, 该值会被追加在数组末尾 64 | 65 | ​ bool Set(toml::value & v, size_t k, const T & m) //为toml数组数据设置一个值,依据角标序号,若序号已经存在,则会覆盖,包括不同类型, 若值序号不存在,则不会添加,且返回错误 66 | 67 | 68 | 69 | CyberdogJson接口如下: 70 | 71 | ​ bool Add() // 为已创建好的Document添加变量,拥有多种不同数据类型的重载 72 | 73 | ​ bool Get() // 从json::Value中读取一个值,拥有多种不同数据类型的重载 74 | 75 | ​ bool String2Document() // 反序列化,将字符串转化为json数据结构 76 | 77 | ​ bool Document2String() // 序列化,将一个Document转化成string,用于传输. 78 | 79 | ​ bool Value2Document() // 将一个json::value 转换为 json::Document, Document具有完备的内存资源,如分配器等.进而可以不依赖其它资源进行数据结构再处理,如Add等操作. 80 | 81 | ​ bool ReadJsonFromFile() // 从文件读取数据,并存储到json内存结构中 82 | 83 | ​ bool WriteJsonToFile() // 将json数据写入文件, 默认使用pretty格式,即保持相对美观的缩进. 84 | 85 | 86 | 87 | LcmClient、LcmServer接口如下: 88 | 89 | LcmClient(const std::string & name) // 创建一个lcm Client对象 90 | 91 | LcmServer() // 创建一个lcm Server对象 92 | 93 | Request(const Req & request, Res & response, int mill_time) // 发起一个RPC调用 94 | 95 | 96 | 97 | CyberdogFDS接口如下: 98 | 99 | GetObject() //获取要下载的bucket 100 | 101 | GetObjectSize() // 获取要下载内容的大小 102 | 103 | StopDownloading() // 停止下载 104 | 105 | 106 | 107 | Semaphore接口如下: 108 | 109 | WaitFor(int time) // 信号等待函数,毫秒级 110 | 111 | Wait() //信号等待函数,诸塞直到获取消息 112 | 113 | Give() //换醒信号,唤醒一次 114 | 115 | GiveAll() // 唤醒信号,唤醒所有等待 116 | 117 | 118 | 119 | MsgDeque接口如下: 120 | 121 | bool EnQueue(const T & t) // 入队函数,会在队首原址构造,如果有等待出队线程,会唤醒一次 122 | 123 | bool EnQueueOne(const T & t) // 入队函数,会在队首原址构造,如果有等待出队线程,会唤醒一次,会保持队列不超过一个的数据,用于click场景 124 | 125 | bool DeQueue(T & t) //出队函数, 如果队列非空,则消耗掉一个数据,并将该数据引用返回, 如果队列为空,则进入条件等待,直到有数据入队,如果等待状态中发生析构,则会返回失败,此时引用参数不可用,其行为是未定义的 126 | 127 | void Reset() // 重置队列,此时所有等待函数会得到失败的返回值并解锁 -------------------------------------------------------------------------------- /docs/en/cyberdog_ultrasonic_en.md: -------------------------------------------------------------------------------- 1 | # Cyberdog_ultrasonic Design 2 | 3 | ## Overview 4 | 5 | ``cyberdog_ultrasonic`` cyberdog_ultrasonic provide ultrasonic data services to the client side in the form of ros2 plugin. This plug-in provides the necessary API interface for the control sensor, and converts the collected ultrasonic data into a ros message format and feeds it back to the client side through the sensor manager. Cyberdog is configured with 1 ultrasonic by default. 6 | 7 | ## Software Design 8 | 9 | #### Software framework 10 | 11 | 12 |
13 | 14 | ![avatar](./image/cyberdog_ultrasonic/cyberdog_ultrasonic.png) 15 | 16 |
17 | 18 | 38 | 39 | ## Functional design 40 | 41 | - Flexible configuration of sensor number, message source, command id , etc. through configuration file 42 | - Provide basic capability interfaces such as sensor enable, shutdown, and self-test 43 | 44 | ## Configuration files 45 | 46 | - Source path: ``bridges/params/toml_config/sensors`` 47 | - Installed path:``/opt/ros2/cyberdog/share/params/toml_config/sensors`` 48 | - Configuration file: 49 | - ``utrasonic_config.toml``: used to configure the number of sensors and the actual configuration file 50 | - ``ultrasonic.toml``: used to configure the sensor of the head 51 | - Main configuration description: 52 | - ``config_files``: the program gives the corresponding sensor entity according to the array member instance 53 | - ``Protocol``: communication protocol, default is ``CAN``. 54 | - ``can_interface``: message channel for CAN communication, configurable ``can0``, ``can1`` 55 | - ``Array``: data packet message reception configuration 56 | - ``array_name``: data packet name 57 | - ``can_package_num``: the number of CAN data frames in the data packet 58 | - ``can_id``: data packet, CAN data frame ``CAN ID`` 59 | 60 | - ``cmd``: command packet message sending configuration 61 | - ``cmd_name``: instruction package name 62 | - ``can_id``: instruction package, CAN data frame ``CAN ID`` 63 | - ``ctrl_len``: the data length of the instruction data frame in the CAN package 64 | - ``ctrl_data``: data default value of instruction data frame in CAN package 65 | 66 | ## ROS protocol 67 | - Source path: "bridges/protocol/ros" 68 | - Ros topic:``ultrasonic_payload`` 69 | - Agreement introduction: 70 | - ``sensor_msgs::msg::Range``: Single Ultrasonic data format 71 | - Protocol path:``bridges/protocol/ros/msg/UltrasonicPayload.msg`` 72 | 73 | ## API interface 74 | - ``Init (bool simulator)``: initialize configuration 75 | - ``Simulator = true``: configure to emulate mode 76 | - ``Open () ``: turn on the sensor 77 | - ``Start ()``: enable sensor 78 | - ``Stop ()``: stop sensor 79 | - ``Close ()``: turn off the sensor 80 | - ``SelfCheck ()``: sensor self-check 81 | - ``LowPowerOn ()``: enter low power mode 82 | - ``LowPowerOff ()``: exit low power mode 83 | - ``SetSinglePayloadCallback(std::function payload)> cb)``:set callback function for message. 84 | 85 | ## Debug command 86 | - Get ultrasonic topic:``ros2 topic list | grep ultrasonic_payload`` -------------------------------------------------------------------------------- /docs/libs/docsify-copy-code.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * docsify-copy-code 3 | * v2.1.1 4 | * https://github.com/jperasmus/docsify-copy-code 5 | * (c) 2017-2020 JP Erasmus 6 | * MIT license 7 | */ 8 | !function(){"use strict";function s(o){return(s="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(o){return typeof o}:function(o){return o&&"function"==typeof Symbol&&o.constructor===Symbol&&o!==Symbol.prototype?"symbol":typeof o})(o)}!function(o,e){void 0===e&&(e={});var t=e.insertAt;if(o&&"undefined"!=typeof document){var n=document.head||document.getElementsByTagName("head")[0],c=document.createElement("style");c.type="text/css","top"===t&&n.firstChild?n.insertBefore(c,n.firstChild):n.appendChild(c),c.styleSheet?c.styleSheet.cssText=o:c.appendChild(document.createTextNode(o))}}(".docsify-copy-code-button,.docsify-copy-code-button span{cursor:pointer;transition:all .25s ease}.docsify-copy-code-button{position:absolute;z-index:1;top:0;right:0;overflow:visible;padding:.65em .8em;border:0;border-radius:0;outline:0;font-size:1em;background:grey;background:var(--theme-color,grey);color:#fff;opacity:0}.docsify-copy-code-button span{border-radius:3px;background:inherit;pointer-events:none}.docsify-copy-code-button .error,.docsify-copy-code-button .success{position:absolute;z-index:-100;top:50%;right:0;padding:.5em .65em;font-size:.825em;opacity:0;-webkit-transform:translateY(-50%);transform:translateY(-50%)}.docsify-copy-code-button.error .error,.docsify-copy-code-button.success .success{right:100%;opacity:1;-webkit-transform:translate(-115%,-50%);transform:translate(-115%,-50%)}.docsify-copy-code-button:focus,pre:hover .docsify-copy-code-button{opacity:1}"),document.querySelector('link[href*="docsify-copy-code"]')&&console.warn("[Deprecation] Link to external docsify-copy-code stylesheet is no longer necessary."),window.DocsifyCopyCodePlugin={init:function(){return function(o,e){o.ready(function(){console.warn("[Deprecation] Manually initializing docsify-copy-code using window.DocsifyCopyCodePlugin.init() is no longer necessary.")})}}},window.$docsify=window.$docsify||{},window.$docsify.plugins=[function(o,r){o.doneEach(function(){var o=Array.apply(null,document.querySelectorAll("pre[data-lang]")),c={buttonText:"Copy to clipboard",errorText:"Error",successText:"Copied"};r.config.copyCode&&Object.keys(c).forEach(function(t){var n=r.config.copyCode[t];"string"==typeof n?c[t]=n:"object"===s(n)&&Object.keys(n).some(function(o){var e=-1',''.concat(c.buttonText,""),''.concat(c.errorText,""),''.concat(c.successText,""),""].join("");o.forEach(function(o){o.insertAdjacentHTML("beforeend",e)})}),o.mounted(function(){document.querySelector(".content").addEventListener("click",function(o){if(o.target.classList.contains("docsify-copy-code-button")){var e="BUTTON"===o.target.tagName?o.target:o.target.parentNode,t=document.createRange(),n=e.parentNode.querySelector("code"),c=window.getSelection();t.selectNode(n),c.removeAllRanges(),c.addRange(t);try{document.execCommand("copy")&&(e.classList.add("success"),setTimeout(function(){e.classList.remove("success")},1e3))}catch(o){console.error("docsify-copy-code: ".concat(o)),e.classList.add("error"),setTimeout(function(){e.classList.remove("error")},1e3)}"function"==typeof(c=window.getSelection()).removeRange?c.removeRange(t):"function"==typeof c.removeAllRanges&&c.removeAllRanges()}})})}].concat(window.$docsify.plugins||[])}(); 9 | //# sourceMappingURL=docsify-copy-code.min.js.map 10 | -------------------------------------------------------------------------------- /docs/en/sensor_manager_en.md: -------------------------------------------------------------------------------- 1 | # sensor_manager 2 | 3 | ## Overview 4 | 5 | ``sensor_manager`` is the management module of ``cyberdog_tof``, ``cyberdog_ultrasonic``, ``cyberdog_lidar`` and other functional modules; it provides the ability of find-state machine management, service callback, and message release for each module. Each module is loaded into the ``sensor_manager`` in the form of ros plugin. 6 | ## Software design 7 | 8 |
9 | 10 | ![avatar](./image/sensor_manager/sensor_manager.png) 11 | 12 |
13 | 14 | ## Functional design 15 | 16 | - Finite-state machine controls various peripheral modules, such as: controlling ``low power`` consumption and ``active`` state switching of modules such as ``cyberdog_tof``; 17 | - Provide a message channel for communication and control of each peripheral module for the ros2 platform client side; for example, the client can control the opening and closing of the ``cyberdog_tof`` through the ``protocol:: srv:: SensorOperation`` service, and the subscriber can subscribe to the ``sensor_msgs:: msg:: Range`` message to obtain ultrasonic sensor status information. 18 | 19 | ### Module loading 20 | - [ROS plugin](https://github.com/ros2/ros2_documentation/blob/galactic/source/Tutorials/Beginner-Client-Libraries/Pluginlib.rst) 21 | 22 | - ``sensor_manager`` Use ``pluginlib::ClassLoader``to load ``cyberdog_tof`` and other modules. 23 | 24 | ``` 25 | //Refer to the ros sample for the loading process 26 | pluginlib::ClassLoader poly_loader("polygon_base", "polygon_base::RegularPolygon"); 27 | 28 | std::shared_ptr triangle = poly_loader.createSharedInstance("polygon_plugins::Triangle"); 29 | ``` 30 | 31 | ### Finite-state machine management 32 | - Finite-state machine controls various peripheral modules, such as: controlling "low power consumption" and " active " state switching of modules such as ``cyberdog_tof``; 33 | 34 | - ``sensor_manager`` inherits ``Cyberdog::machine::MachineActuator``, the client side can control the ``sensor_manager`` state switching through the service interface provided by ``Cyberdog::machine::MachineActuator``. Inside the final-state machine, the loaded modules are controlled in turn for corresponding states. 35 | - Finite-state machine detailed introduction reference: [Finite-state machine design](/en/cyberdog_machine_en.md) 36 | 37 | ### Topic&Service 38 | 39 | - ``sensor_manager`` provides a ros message interface, and the client side can subscribe to sensor data topics to obtain sensor data. 40 | - ``sensor_manager`` provides a ros service interface, and the client side can control the sensor state through the sensor control service. 41 | - ROS interface: 42 | - ``Protocol:: msg:: GpsPayload``: GPS message 43 | - ``ScanMsg``: Radar message 44 | - ``sensor_msgs:::: Range``: Ultrasonic message 45 | - ``Protocol:: msg:: HeadTofPayload``: header Tof message 46 | - ``Protocol:: msg:: RearTofPayload``: tail Tof message 47 | - ``Protocol:: srv:: SensorOperation``: Sensor Control Service 48 | 49 | ### Module plugin 50 | 51 | - [GPS Module](/en/cyberdog_gps_en.md); 52 | - [TOF Module](/en/cyberdog_tof_en.md); 53 | - [Radar Module](/en/cyberdog_lidar_en.md); 54 | - [Ultrasound Module](/en/cyberdog_ultrasonic_en.md). 55 | 56 | ## Debug command 57 | 58 | - Get sensor_manager find-state machine service: 59 | 60 | ``` 61 | ros2 topic list | grep sensor_manager 62 | ``` 63 | 64 | - Final-state machine toggle (toggle to "Active" state): 65 | 66 | ``` 67 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/sensor_managermachine_service protocol/srv/ FsMachine "{target_state: "Active"}" 68 | ``` 69 | -------------------------------------------------------------------------------- /docs/cn/image/cyberdog_bluetooth/cyberdog_bluetooth_node_cn.svg: -------------------------------------------------------------------------------- 1 |
蓝牙节点
蓝牙核心模块
UWB跟随模块
固件升级模块
ROS接口
-------------------------------------------------------------------------------- /docs/en/image/cyberdog_bluetooth/cyberdog_bluetooth_node_cn.svg: -------------------------------------------------------------------------------- 1 |
蓝牙节点
蓝牙核心模块
UWB跟随模块
固件升级模块
ROS接口
-------------------------------------------------------------------------------- /docs/cn/image/cyberdog_bluetooth/cyberdog_bluetooth_node_en.svg: -------------------------------------------------------------------------------- 1 |
cyberdog_blutooth
bluetooth core
uwb tracking
dfu
ROS interface
-------------------------------------------------------------------------------- /docs/en/image/cyberdog_bluetooth/cyberdog_bluetooth_node_en.svg: -------------------------------------------------------------------------------- 1 |
cyberdog_blutooth
bluetooth core
uwb tracking
dfu
ROS interface
-------------------------------------------------------------------------------- /docs/en/cyberdog_tof_en.md: -------------------------------------------------------------------------------- 1 | # cyberdog_tof Design 2 | 3 | ## Overview 4 | 5 | ``cyberdog_tof`` provides tof data service to the client side in the form of ros2 plugin. This plug-in provides the necessary API interface for the control sensor, and converts the collected tof data into ros message format and publishes it through sensor_manager. 6 | 7 | ## Software Design 8 | 9 | #### Software framework 10 | 11 |
12 | 13 | ![avatar](./image/cyberdog_tof/cyberdog_tof.png) 14 | 15 |
16 | 17 | 34 | 35 | ## Functional design 36 | 37 | - Flexible configuration of sensor number, message source, command id , etc. through configuration file 38 | - Provide basic capability interfaces such as sensor enable, shutdown, and self-test 39 | 40 | ## Configuration files 41 | 42 | - Source path: ``bridges/params/toml_config/sensors`` 43 | - Installed path:``/opt/ros2/cyberdog/share/params/toml_config/sensors`` 44 | - Configuration file: 45 | - ``tof_config.toml``: used to configure the number of sensors and the actual configuration file 46 | - ``tof_left_head.toml``: used to configure the left sensor of the head 47 | - ``tof_right_head.toml``: used to configure the right-hand sensor of the head 48 | - ``tof_left_rear.toml``: used to configure the tail left sensor 49 | - ``tof_right_rear.toml``: used to configure the rear right-hand sensor 50 | - Main configuration description: 51 | - ``config_files``: the program gives the corresponding sensor entity according to the array member instance 52 | - ``Protocol``: communication protocol, default is ``CAN``. 53 | - ``can_interface``: message channel for CAN communication, configurable ``can0``, ``can1`` 54 | - ``Array``: data packet message reception configuration 55 | - ``array_name``: data packet name 56 | - ``can_package_num``: the number of CAN data frames in the data packet 57 | - ``can_id``: data packet, CAN data frame ``CAN ID`` 58 | 59 | - ``cmd``: command packet message sending configuration 60 | - ``cmd_name``: instruction package name 61 | - ``can_id``: instruction package, CAN data frame ``CAN ID`` 62 | - ``ctrl_len``: the data length of the instruction data frame in the CAN package 63 | - ``ctrl_data``: data default value of instruction data frame in CAN package 64 | 65 | ## ROS protocol 66 | - Source path: "bridges/protocol/ros" 67 | - Ros topic:``head_tof_payload``、``rear_tof_payload`` 68 | - Agreement introduction: 69 | - ``Protocol:: msg:: SingleTofPayload``: Single TOF data format 70 | - Protocol path: ``bridges/protocol/ros/msg/SingleTofPayload.msg`` 71 | - ``Protocol:: msg:: HeadTofPayload``: Header TOF data format 72 | - Protocol path: ``bridges/protocol/ros/msg/HeadTofPayload.msg`` 73 | - ``Protocol:: msg:: RearTofPayload``: Tail TOF data format 74 | - Protocol path: ``bridges/protocol/ros/msg/RearTofPayload.msg`` 75 | 76 | ## API interface 77 | - ``Init (bool simulator)``: initialize configuration 78 | - ``Simulator = true``: configure to emulate mode 79 | - ``Open () ``: turn on the sensor 80 | - ``Start ()``: enable sensor 81 | - ``Stop ()``: stop sensor 82 | - ``Close ()``: turn off the sensor 83 | - ``SelfCheck ()``: sensor self-check 84 | - ``LowPowerOn ()``: enter low power mode 85 | - ``LowPowerOff ()``: exit low power mode 86 | - ``SetSinglePayloadCallback(std::function payload)> cb)``:set callback function for message. 87 | 88 | ## Debug command 89 | - Get Head of Tof topic:``ros2 topic list | grep head_tof_payload`` 90 | - Get rear of Tof topic:``ros2 topic list | grep rear_tof_payload`` -------------------------------------------------------------------------------- /docs/en/motion_manager_en.md: -------------------------------------------------------------------------------- 1 | # Motion Management Design Documentation 2 | ## Overview 3 | The motion management module is responsible for centralized management of motion control functions. All motion control calls are forwarded to the motion control layer by this module, and it receives status feedback from the motion control layer. In response to user instructions for motion control, this module makes centralized decisions and executes them based on the system's overall state machine, power status, and motion status. 4 | 5 | ## Modular architecture 6 | 7 | ### Software Architecture Diagram 8 | 9 |
10 | 11 | ![](./image/motion/motion.png) 12 | 13 |
14 | 15 | ### Architecture Description 16 | 17 | #### Role description 18 | 19 | MotionManager 20 | 21 | - The motion management interface of the Cyberdog software system, Internally divided into three layers: MotionManagerk、MotionDecision、MotionHandler, Among them, MotionManager inherits the state machine of CyberdogManager, Different states such as self checking, low power consumption, low power consumption, shutdown, and OTA are implemented.MotionDecision makes decisions based on the machine's emergency stop status, command priority, etc.; MotionHandler determines the legality of the command, the machine's task status, the motor status, etc. to further manage the command. 22 | - All operation and control calls that need to run in the Cyberdog framework can only pass through the interface of this module. 23 | - The interface has its own business logic, that is, calling the interface will get a return, but there may not be a motion response, and all exceptions are uniformly defined in the return code. 24 | 25 | MotionAction 26 | 27 | - As the only interface between the NX main control board and the operation control board, all motion commands are sent to the operation control board through the lcm communication protocol, and the real-time status of the operation control board is reported to the NX main control board. 28 | - According to all the actions defined by the operation control, manage and maintain all the action attribute lists, including the mapping between the mode and gait_id attributes of each action defined by the operation control and the motion_id defined externally by MotionManger, the pre- and post-states and instructions allowed for each action. 29 | 30 | 31 | 32 | ## Interface Type Description 33 | 34 | - All actions are encapsulated, and three interfaces are provided: result command, servo command, and custom action command. Among them, the custom action command is only open to visual programming, and other commands are open to all callers. 35 | - Servo commands are mainly for the locomotion actions defined by the motion control board, that is, slow walking, fast walking, trotting, jumping and other actions. After such instructions are sent to the operation control, the operation control will maintain the state according to the relevant parameters of the last frame, such as speed and leg height. In APP and other scenarios that require grpc communication, in order to avoid the out-of-control state of the operation controller after the communication is disconnected, it is required that the interval between commands issued by the APP and other upper layers is not more than 200ms by default. The board issues a standing command to realize the stop state. 36 | - Servo commands realize multi-party collaborative control, such as APP, Bluetooth handle, navigation tasks, etc. Different command priorities are defined internally, and control management is realized according to the priority. The smaller the value, the higher the priority. The following are the default priority definitions: 37 | 38 | ```C%2B%2B 39 | App = 0 # Android APP 40 | Audio = 1 # voice 41 | Vis = 2 # visual programming 42 | BluTele = 3 # bluetooth controller 43 | Algo = 4 # Navigation and other tasks 44 | ``` 45 | 46 | ## Motion Control Flow 47 | 48 |
49 | 50 | ![](./image/motion/motion_flow_en.png) 51 | 52 |
-------------------------------------------------------------------------------- /docs/en/cyberdog_occmap_en.md: -------------------------------------------------------------------------------- 1 | # cyberdog_occmap 2 | 3 | ## 1 Introduction 4 | cyberdog_occmap is an online grid map reconstruction algorithm based on single-line lidar. It generates a corresponding grid map in real time based on the input pose and scanning point cloud. Based on table lookup update and Protobuf technology, it realizes very lightweight real-time grid reconstruction. Graph algorithm, our algorithm only occupies 10% of the CPU resources of a single core on the NX platform. In order to facilitate developers to carry out secondary development, this project separates the core algorithm from ROS2, and places the core algorithm in the `3rdparty/occmap` path Next, developers can refer to the ROS2 interface for secondary development and use. 5 | 6 | ## 2. Architecture 7 | 8 |
9 | 10 | ![](./image/cyberdog_occmap/cyberdog_occmap_en.png) 11 | 12 |
13 | 14 | ## 3. Use tutorial 15 | This project provides a minimal routine to generate a raster map offline based on a single-line lidar scanning point cloud file and its corresponding pose file. 16 | The source code is located in `3rdparty/occmap/example` and the data is located in `3rdparty/occmap/data`. Among them, `info.txt` stores the timestamp and the corresponding pose, the storage format is `timestamp x y z x y z w`, the `pointcloud` folder stores the point cloud file of each frame, named after the corresponding timestamp, `gridmap_node.yaml` is The parameter file used by this routine. 17 | First, install the core algorithm dependencies based on **README** (no need to install ROS2 related dependencies). 18 | ```bash 19 | # build source code 20 | cd 3rdparty/occmap 21 | mkdir build && cd build 22 | cmake .. && make 23 | make install 24 | # build example 25 | cd ../example 26 | mkdir build && cd build 27 | cmake .. make -j4 28 | # run demo 29 | ./demo ../../data/ 30 | ``` 31 | Follow the above tutorial to achieve the effect shown in the figure below. 32 | 33 | ## 4. Detailed code explanation 34 | This section explains the api calls and implementation process in `demo.cc`. 35 | 36 | 3.1 Initialize the dataset path 37 | ```cpp 38 | const std::string root_path = argv[1]; 39 | const std::string config_file = root_path + "/gridmap_node.yaml"; 40 | std::vector vpointcloud_pose = readDataset(root_path); 41 | ``` 42 | 43 | 3.2 Parse the parameters from the `gridmap_node.yaml` file. Before using it, you need to change `create_map_path` in `gridmap_node.yaml` to the available path of the current system. 44 | ```cpp 45 | transform::Eigentf eigentf; 46 | ProbabilityParam probability_param; 47 | SubMapParam submap_param; 48 | FilesSaveParam files_save_param; 49 | FilterParam filter_param; 50 | CeresScanMatchingParam ceres_param; 51 | MapBeautiParam mapbeauti_param; 52 | ParseParameters(config_file, eigentf, probability_param, submap_param, files_save_param, filter_param, ceres_param, mapbeauti_param); 53 | if (boost::filesystem::exists(files_save_param. sub_range_data_path)) { 54 | std::cout << "remove path: " << files_save_param.sub_range_data_path << std::endl; 55 | boost::filesystem::remove_all(files_save_param.sub_range_data_path); 56 | } 57 | if(common::createFolder(files_save_param.sub_range_data_path)) { 58 | std::cout << "create path: " << files_save_param.sub_range_data_path << std::endl; 59 | } 60 | ``` 61 | 62 | 3.3 Instantiate the mapping entry object 63 | ```cpp 64 | auto g_mapper_ = std::make_shared(eigentf, probability_param, submap_param, files_save_param, filter_param, ceres_param, mapbeauti_param); 65 | ``` 66 | 67 | 3.4 Insert point cloud data into the map 68 | ```cpp 69 | for(auto& pointcloud_pose : vpointcloud_pose) { 70 | std::unique_ptr range_data_ptr 71 | = g_mapper_->AddRangeData(pointcloud_pose.time, pointcloud_pose.pointcloud, pointcloud_pose.pose, eigentf.laser2baselink); 72 | } 73 | ``` 74 | 75 | 3.5 Generate the map, and the final map and binary submap files will be saved in `create_map_path`. 76 | ```cpp 77 | g_mapper_->GenerateGrid("map"); 78 | ``` -------------------------------------------------------------------------------- /docs/en/cyberdog_uwb_en.md: -------------------------------------------------------------------------------- 1 | # Cyberdog_uwb Design 2 | 3 | ## Overview 4 | 5 | ``cyberdog_uwb`` Provide uwb data service to the client side in the form of ros2 plugin. This plug-in provides the necessary API interface for the control sensor, and converts the collected uwb data into ros message format and feeds it back to the client side through the device manager. Cyberdog is configured with 2 or 4 UWBs by default. 6 | 7 | ## Software Design 8 | 9 | #### Software framework 10 | 11 |
12 | 13 | ![avatar](./image/cyberdog_uwb/cyberdog_uwb.png) 14 | 15 |
16 | 17 | 34 | 35 | ## Functional design 36 | 37 | - Flexible configuration of sensor number, message source, command id , etc. through configuration file 38 | - Provide basic capability interfaces such as sensor enable, shutdown, and self-test 39 | 40 | ## Configuration files 41 | 42 | - Source path: ``bridges/params/toml_config/device`` 43 | - Installed path:``/opt/ros2/cyberdog/share/params/toml_config/device`` 44 | - Configuration file: 45 | - ``uwb_config.toml``: used to configure the number of sensors and the actual configuration file 46 | - ``uwb_config.toml``: used to configure the number of sensors and the actual configuration file 47 | - ``uwb_head_tof.toml``: used to configuring forward sensors 48 | - ``uwb_head_uwb.toml``: used to configure rear sensors 49 | - ``uwb_rear_uwb.toml``: used to configure the left sensor 50 | - ``uwb_head_tof.toml``: used to configure the right sensor 51 | - Main configuration description: 52 | - ``simulate``:emulator setting switch 53 | - ``use_static_mac``:uwb static mac setting switch 54 | - ``uwb``:Configure the uwb device entity, and the program will instantiate the number of uwb according to the number of this field 55 | - ``com_file ``:uwb device subconfiguration file 56 | - ``Protocol``: communication protocol, default is ``CAN``. 57 | - ``can_interface``: Message channel for CAN communication, configurable ``can0``, ``can1`` 58 | - ``Array``: data packet message reception configuration 59 | - ``array_name``: data packet name 60 | - ``can_package_num``: the number of CAN data frames in the data packet 61 | - ``can_id``: data packet, CAN data frame ``CAN ID`` 62 | 63 | - ``cmd``: command packet message sending configuration 64 | - ``cmd_name``: instruction package name 65 | - ``can_id``: instruction package, CAN data frame ``CAN ID`` 66 | - ``ctrl_len``: the data length of the instruction data frame in the CAN package 67 | - ``ctrl_data``: data default value of instruction data frame in CAN package 68 | 69 | ## ROS protocol 70 | 71 | - Source path: ``bridges/protocol/ros`` 72 | - Ros topic:``uwb_raw`` 73 | - Agreement introduction: 74 | - ``Protocol:: msg:: UwbRaw``: Single UWB data format 75 | - Protocol path: ``bridges/protocol/ros/msg/UwbRaw.msg`` 76 | - ``Protocol:: msg:: UwbArray``: UWB data array 77 | - Protocol path: ``bridges/protocol/ros/msg/`` 78 | 79 | ## API interface 80 | 81 | - ``bool Init(std::functionfunction_callback, bool simulation)``:initialize configuration 82 | - ``simulator = true``:configure to emulate mode 83 | - ``function_callback``:set callback function for message. 84 | - ``Open () ``: turn on the sensor 85 | - ``Stop ()``: stop sensor 86 | - ``SelfCheck ()``: sensor self-check 87 | - ``LowPower ()``: enter low power mode 88 | - ``SetConnectedState(bool connected)``:set UWB device connection statu 89 | - ``void Play(const std::shared_ptr info_request,std::shared_ptr info_response)``:ros2 service,using for open uwb. 90 | 91 | ## Debug command 92 | 93 | - Get uwb topic:``ros2 topic list | grep uwb_raw`` -------------------------------------------------------------------------------- /docs/en/cyberdog_bluetooth_en.md: -------------------------------------------------------------------------------- 1 | # Cyerdog_bluetooth Design 2 | 3 | ## Overview 4 | 5 | Cyberdog_bluetooth is a ROS node through which you can control BLE peripherals. It uses bluepy library to create a BLE central, connect to BLE peripherals, write data to specified characteristics, etc. As a ROS node, it provides interfaces such as scanning, connecting to remote controller, acquiring device info, publishing joystick data, activating UWB tracking tasks, etc. 6 | 7 | ## System Structure 8 | 9 | ### Overall Structure 10 | 11 | 1. cyberdog_bluetooth: Connects BLE peripherals, publishes data and subscribes commands to init, activate and stop UWB device. 12 | 13 | 2. cyberdog_uwb: Is called by device_manager, to generate UWB session_id for both master and slave devices, and to produce UWB data of distance and angle. 14 | 15 | 3. cyberdog_grpc: Transmits commands from APP to cyberdog_bluetooth node. 16 | 17 | 4. APP: Sending bluetooth commands to cyberdog. 18 | 19 | 5. remote controller: Containing a BLE periferal and a UWB tag, is used for remote controlling and UWB tag tracking. 20 | 21 | 6. UWB device for charging docker: For localization. 22 | 23 | ![overall](./image/cyberdog_bluetooth/cyberdog_bluetooth_en.svg) 24 | 25 | ### Cyberdog_bluetooth Structure 26 | 27 | 1. Bluetooth Core: Operates BLE devices through bluepy library. 28 | 29 | 2. DFU modual: Uses Bluetooth Core to make a firmware update process. 30 | 31 | 3. UWB tracking modual: Uses ROS interfaces to activate and stop UWB tracking task. Provides activation, termination and checking interface. 32 | 33 | 4. ROS interface: Uses Bluetooth Core operating BLE devices to impletemt provided ROS interfaces. 34 | 35 | ![node](./image/cyberdog_bluetooth/cyberdog_bluetooth_node_en.svg) 36 | 37 | ## Operation Process 38 | 39 | ### Scan Process 40 | 41 | 1. APP sends scan command through gRPC. 42 | 43 | 2. cyberdog_grpc receives command and calls "scan_bluetooth_device" ROS service. 44 | 45 | 3. cyberdog_bluetooth receives request and uses bluepy to scan BLE peripherals and sends result list as service response. 46 | 47 | 4. cyberdog_grpc receives response and sends it to APP. 48 | 49 | 5. APP receives BLE peripheral list and diaplays them. 50 | 51 | ### Connection Process 52 | 53 | 1. APP sends connection command with peripheral mac through gRPC. 54 | 55 | 2. cyberdog_grpc receives command and calls "connect_bluetooth_devices" ROS service. 56 | 57 | 3. cyberdog_bluetooth receives request and uses bluepy to connect specified BLE peripheral. 58 | 59 | 4. cyberdog_bluetooth reads device type and firmware version info from the BLE peripheral connected. 60 | 61 | 5. According to the device type, cyberdog_bluetooth turns on notification and registers callback functions for uart, battery level and joystick data. 62 | 63 | 6. cyberdog_bluetooth calls "get_uwb_mac_session_id" ROS service. 64 | 65 | 7. cyberdog_uwb receives request and generates session id and mac for UWB connection and sends response. 66 | 67 | 8. cyberdog_bluetooth receives service response, sends session id and mac to BLE peripheral connected and waits for its response. 68 | 69 | 9. BLE peripheral activates UWB device and sends response back to cyberdog_bluetooth. 70 | 71 | 10. cyberdog_bluetooth completes the process and sends service response to cyberdog_grpc. 72 | 73 | 11. cyberdog_grpc receives response and sends it to APP. 74 | 75 | 12. APP receives response and updates the connection status. 76 | 77 | ### Disconnection Process 78 | 79 | 1. APP sends connection command without peripheral mac through gRPC. 80 | 81 | 2. cyberdog_grpc receives command and calls "connect_bluetooth_devices" ROS service. 82 | 83 | 3. cyberdog_bluetooth receives request and using bluepy to send disconnect UWB command to current connected BLE peripheral. 84 | 85 | 4. BLE peripheral deactivate UWB device and sends response back to cyberdog_bluetooth. 86 | 87 | 5. cyberdog_bluetooth remove callback functions and disconnect BLE peripheral. 88 | 89 | 6. cyberdog_bluetooth completes the process and sends service response to cyberdog_grpc. 90 | 91 | 7. cyberdog_grpc receives response and sends it to APP. 92 | 93 | 8. APP receives response and updates the connection status. -------------------------------------------------------------------------------- /docs/en/cyberdog_train_en.md: -------------------------------------------------------------------------------- 1 | # cyberdog_train design document 2 | 3 | ## 1 Overview 4 | This document aims to outline the design requirements and details related to the integration of the Xiao Ai training plan. The main goal of this feature is to allow Cyberdog to recognize custom voice commands and trigger corresponding actions through the Xiao Ai training plan interface. 5 | The training plan scenario requires the following conditions: 6 | 1. The robot must be in a WiFi-enabled environment that has access to the Internet. 7 | 2. The app must establish a connection with the bionic robot. 8 | 9 | ## 2 Functionality 10 | 1. When Xiao Ai recognizes the training trigger word, Cyberdog should respond accordingly, including but not limited to voice, movement, and lighting effects. 11 | 2. Users should have the ability to customize training words and their corresponding responses. 12 | 3. The system will retain a limited number of training words (currently 6), and users will typically be able to use them without the ability to query or modify them, though exceptions may be made in certain circumstances. 13 | 14 | ## 3 Architecture 15 | ![](./image/cyberdog_train/cyberdog_train_en_1.svg) 16 | ## 4 Initialization and Trigger Process 17 | ### 4.1 Initialization 18 | ![](./image/cyberdog_train/cyberdog_train_en_2.svg) 19 | ### 4.2 Trigger 20 | ![](./image/cyberdog_train/cyberdog_train_en_3.svg) 21 | ## 5 Training Plan Grouping 22 | ### 5.1 System reserved fields 23 | System reserved fields cannot be queried, deleted, or modified by users. 24 | ```json 25 | {"伸懒腰":["motion", "1"]} 26 | {"伸左手":["motion", "2"]} 27 | {"伸右手":["motion", "3"]} 28 | ``` 29 | ### 5.2 Visual reserved fields 30 | Visual reserved fields cannot be queried, deleted, or modified by users. 31 | ```json 32 | {"终止任务":["vp_task", "shutdown"]} 33 | {"暂停任务":["vp_task", "suspent"]} 34 | {"继续任务":["vp_task", "recover"]} 35 | ``` 36 | ### 5.3 User-defined fields 37 | ```json 38 | {"trigger1":["type", "value"]} 39 | {"trigger2":["type", "value"]} 40 | {"trigger3":["type", "value"]} 41 | ...... 42 | ``` 43 | ## 6 Interface 44 | 1. Training word publishing interface (basic interface) 45 | Interface form:ros topic 46 | topic name:"train_plan_word" 47 | message file:"protocol/msg/TrainPlan.msg" 48 | message content: 49 | ```json 50 | string trigger # 张三、李四、王五 51 | string type # 未定义(undefined)、运动(motion)、语音(audio)、LED(led)、导航(navigation)、跟随(follow) 52 | string value # 后空翻 53 | ``` 54 | 2. Training word addition interface 55 | Interface form:ros service 56 | service name:"set_train_plan" 57 | message file:"protocol/msg/SetTrainPlan.msg" 58 | message content: 59 | ```json 60 | string trigger 61 | string type 62 | string value 63 | --- 64 | int32 skill_id 65 | int32 code 66 | ``` 67 | 3. Training word deletion interface 68 | Interface form:ros service 69 | service name:"delete_train_plan" 70 | message file:"protocol/msg/TrainPlan.msg" 71 | message content: 72 | ```json 73 | string type 74 | string value 75 | --- 76 | int code 77 | ``` 78 | 4. Training word modification interface 79 | Interface form:ros service 80 | service name:"modify_train_plan" 81 | message file:"protocol/msg/SetTrainPlan.msg" 82 | message content: 83 | ```json 84 | string trigger 85 | string type 86 | string value 87 | --- 88 | int code 89 | ``` 90 | 5. Interface for querying all training words 91 | Interface form:ros service 92 | service name:"query_all_train_plan" 93 | message file:"protocol/msg/TrainPlanAll.msg" 94 | message content: 95 | ```json 96 | --- 97 | int code 98 | TrainPlan[] training_set 99 | ``` 100 | 6. Interface for querying a specific training word 101 | Interface form:ros service 102 | service name:"query_train_plan" 103 | message file:"protocol/msg/TrainPlan.msg" 104 | message content: 105 | ```json 106 | string type 107 | string value 108 | --- 109 | int code 110 | TrainPlan[] training_set 111 | ``` 112 | -------------------------------------------------------------------------------- /docs/cn/cyberdog_face_cn.md: -------------------------------------------------------------------------------- 1 | # Cyberdog_face 设计文档 2 | 3 | ## 1. Cyberdog_face功能概述 4 | 5 | - __cyberdog_face__:是APP端和机器狗交互AI能力的模块之一,主要包含人脸录入、人脸识别两大功能。 6 | - __人脸录入__:cyberdog_face接受上层请求实现人脸录入、删除人脸、更新人脸等功能。 7 | - __人脸识别__:cyberdog_face接受上层请求实现人脸识别功能。 8 | 9 |
10 | 11 | ![avatar](./image/cyberdog_face/cyberdog_face_function.png) 12 | 13 |
14 | 15 | ## 2. 人脸录入/识别软件设计 16 | 17 | ### 2.1 软件框架 18 | 19 |
20 | 21 | ![avatar](./image/cyberdog_face/cyberdog_face.png) 22 | 23 |
24 | 25 | ### 2.2 设计流程图 26 | 27 |
28 | 29 | ![avatar](./image/cyberdog_face/cyberdog_face_flow.png) 30 | 31 |
32 | 33 | ### 2.3 ROS 协议 34 | - 源码路径:``bridges/protocol/ros`` 35 | - 人脸录入协议文件 36 | - ``protocol/srv/FaceEntry.srv``:人脸录入Service协议。 37 | - ``cyberdog_face_entry_srv``:人脸录入Service名称。 38 | - ``protocol/msg/FaceEntryResult``:人脸录入Topic协议。 39 | - ``face_entry_msg``:人脸录入Topic名称。 40 | - 人脸识别协议文件 41 | - ``protocol/msg/FaceRecognitionResult``:人脸识别Service协议。 42 | - ``cyberdog_face_recognition_srv``:人脸识别Service名称。 43 | - ``protocol/msg/FaceRecognitionResult``:人脸识别Topic协议。 44 | - ``face_rec_msg``:人脸识别Topic名称。 45 | 46 | 47 | ## 3. 人脸录入功能 48 | ``cyberdog_face``通过service接受到人脸录入请求,返回应答请求并向底层发送人脸录入算法请求,通过监听底层发送的topic,将人脸录入的结果通过topic向上层发送。 49 | 50 | ### 3.1 添加人脸 51 | ``cyberdog_face``向底层人脸数据库添加人脸。 52 | 53 | ``` 54 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/cyberdog_face_entry_srv protocol/srv/FaceEntry "{command: 0,username: XiaoMing,oriname: ,ishost: false}" 55 | ``` 56 | 57 | ### 3.2 取消添加人脸 58 | ``cyberdog_face``正在人脸录入中,向底层发送取消添加录入人脸。 59 | 60 | ``` 61 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/cyberdog_face_entry_srv protocol/srv/FaceEntry "{command: 1,username: XiaoMing,oriname: ,ishost: false}" 62 | ``` 63 | 64 | ### 3.3 确认录入人脸 65 | ``cyberdog_face``在添加录入人脸成功后,再次发送确认添加录入人脸,成功则底层数据库添加人脸成功。 66 | 67 | ``` 68 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/cyberdog_face_entry_srv protocol/srv/FaceEntry "{command: 2,username: XiaoMing,oriname: ,ishost: false}" 69 | ``` 70 | ### 3.4 更新录入人脸 71 | ``cyberdog_face``更新底层人脸数据库中用户名字。 72 | 73 | ``` 74 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/cyberdog_face_entry_srv protocol/srv/FaceEntry "{command: 3,username: XiaoHong,oriname: XiaoMing,ishost: false}" 75 | ``` 76 | 77 | ### 3.5 删除人脸信息 78 | ``cyberdog_face``接受到删除人脸的id请求,向底层数据库中删除指定id的人脸,成功返回true,失败返回false。 79 | 80 | ``` 81 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/cyberdog_face_entry_srv protocol/srv/FaceEntry "{command: 4,username: XiaoHong,oriname: ,ishost: false}" 82 | ``` 83 | 84 | ### 3.6 获取所有人脸信息 85 | ``cyberdog_face``获取底层数据库中所有人脸的信息。 86 | 87 | ``` 88 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/cyberdog_face_entry_srv protocol/srv/FaceEntry "{command: 5,username: ,oriname: ,ishost: false}" 89 | ``` 90 | 91 | ## 4. 人脸识别功能 92 | ``cyberdog_face``通过service接受到人脸识别请求,返回应答请求并向底层发送人脸识别算法请求,通过监听底层发送的topic,将人脸识别的结果通过topic向上层发送。 93 | 94 | ### 4.1 识别人脸数据库中当前人脸是否存在 95 | ``cyberdog_face``判断当前识别的人脸是否在底层数据库中存在;如存在将识别到人脸的昵称发送出去。 96 | 97 | ``` 98 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/cyberdog_face_recognition_srv protocol/srv/FaceRec "{command: 0,username: XiaoMing,id: 11111111,timeout: 30}" 99 | ``` 100 | 101 | ### 4.2 识别人脸数据库中指定username的人脸是否存在 102 | ``cyberdog_face``判断识别指定昵称的人脸是否存在在与之昵称对应的底层数据库中;识别成功发送该用户昵称;否则,未识别到人脸。 103 | 104 | ``` 105 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/cyberdog_face_recognition_srv protocol/srv/FaceRec "{command: 1,username: XiaoMing,id: 11111111,timeout: 30}" 106 | ``` 107 | 108 | ### 4.3 取消识别用户的人脸 109 | ``cyberdog_face``正在进行人脸识别中,发送request取消当前的人脸识别。 110 | 111 | ``` 112 | ros2 service call /`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"`/cyberdog_face_entry_srv protocol/srv/FaceRec "{command: 2,username: XiaoMing,id: 11111111,timeout: 30}" 113 | ``` 114 | 115 | ## 5. 运行cyberdog_face模块 116 | 117 | ``` 118 | ros2 run cyberdog_face cyberdog_face --ros-args -r __ns:=/`ros2 node list | grep "mi_" | head -n 1 | cut -f 2 -d "/"` 119 | ``` --------------------------------------------------------------------------------