├── LICENSE ├── README.md ├── camera_calibration ├── LICENSE ├── camera_calibration │ ├── __init__.py │ ├── calibration_chessboard.yaml │ ├── camera_calibration.py │ ├── camera_color_viewer_realsense.py │ ├── camera_viewer_azure_kinect.py │ └── realsense │ │ ├── cali_1.jpg │ │ ├── cali_10.jpg │ │ ├── cali_2.jpg │ │ ├── cali_3.jpg │ │ ├── cali_4.jpg │ │ ├── cali_5.jpg │ │ ├── cali_6.jpg │ │ ├── cali_7.jpg │ │ ├── cali_8.jpg │ │ └── cali_9.jpg ├── package.xml ├── resource │ └── camera_calibration ├── setup.cfg ├── setup.py └── test │ ├── test_copyright.py │ ├── test_flake8.py │ └── test_pep257.py ├── doc ├── aruco.png ├── cali_experiment_combined.jpg ├── frame.png ├── handeye_setup_2.png ├── real_positions_1.gif ├── realsense_setup.jpg └── sim_positions_1.gif └── handeye_realsense ├── LICENSE ├── README.md ├── config.yaml ├── handeye_realsense ├── __init__.py ├── __pycache__ │ ├── __init__.cpython-310.pyc │ ├── aruco_estimation.cpython-310.pyc │ ├── handeye_estimation.cpython-310.pyc │ ├── joint_positions.cpython-310.pyc │ ├── publish_eye2hand.cpython-310.pyc │ └── robot_state_estimation.cpython-310.pyc ├── aruco_estimation.py ├── handeye_estimation.py ├── publish_eye2hand.py └── robot_state_estimation.py ├── launch └── taking_sample_launch.py ├── package.xml ├── realsense_info.yaml ├── resource ├── handeye_realsense ├── handeye_result_realsense.yaml ├── marker_data_realsense.yaml ├── marker_pose_realsense_1.jpg ├── marker_pose_realsense_2.jpg ├── marker_pose_realsense_3.jpg ├── marker_pose_realsense_4.jpg ├── marker_pose_realsense_5.jpg ├── marker_pose_realsense_6.jpg ├── marker_pose_realsense_7.jpg ├── marker_pose_realsense_8.jpg ├── marker_pose_realsense_9.jpg └── robot_data_realsense.yaml ├── setup.cfg ├── setup.py └── test ├── test_copyright.py ├── test_flake8.py └── test_pep257.py /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # handeye_calibration_ros2 2 | 3 | This is a ROS2 python handeye calibration package. It is hardware-independent and can be applied to any depth/RGB camera with ROS2 wrappers available. 4 | 5 | In summary, it provides functionality includes: 6 | 1. Sample and save the robot pose and tracking object pose with a simple keyboard click. 7 | 2. Compute and save the result of the eye-in-hand calibration through the Tsai-Lenz method implemented with OpenCV. 8 | 3. Publish the hand-eye calibration result and visualize it rviz. 9 | 10 | We also provided a simulation demo where you can add a virtual camera to start using the package. Please find the pkg [handeye_calibration_ros2_sim](https://github.com/shengyangzhuang/handeye_calibration_ros2_sim) 11 | 12 | To adapt to your usage, you only need to modify all the information in a single file: 13 | 1. The camera image topic 14 | 2. The name of the robot link names 15 | 3. The camera matrix and distortion coefficient 16 | 17 | You can also modify other parameters as per your preference in the information file. 18 | 19 | **Please feel free to raise an _issue_ as we seek to test the package performance under different hardware software environments. This would help us improve the package and make it truly universal!** 20 | 21 |

22 | sim_positions_1 23 | real_positions_1 24 |

25 | 26 | ### Hardware and Software Tested: 27 | 28 | #### Hardware: 29 | 1. **Robot**: KUKA LBR iiwa 7, AgileX Robotics PiPER 30 | 2. **Camera**: 31 | - Intel RealSense D415, D435 32 | - Microsoft Azure Kinect 33 | 3. **Chessboard** 34 | 4. **ArUco Marker**: 35 | - ID: 365 36 | - Side Length: 150mm 37 | 38 | #### Software: 39 | 1. **Robot Driver**: lbr_fri_ros2_stack 40 | 2. **Camera Drivers**: 41 | - realsense-ros 42 | - Azure_Kinect_ROS_Driver 43 | 3. **Operating System**: Ubuntu 22.04 44 | 4. **ROS 2**: Humble 45 | 46 | 47 | ## Table of Contents 48 | 1. [Overview](#overview) 49 | 2. [Installation](#installation) 50 | 3. [Usage](#usage) 51 | 4. [Example](#example) 52 | 5. [Contributing](#contributing) 53 | 6. [Citation](#citation) 54 | 55 | ## Overview 56 | This package provides a solution for hand-eye calibration between a robot arm and a camera in a ROS2 environment. It is designed to support various robotic and vision systems, offering flexibility for different hardware configurations. 57 | 58 | ## Installation 59 | 60 | 1. Create your workspace and clone this repository: 61 | ```bash 62 | mkdir -p handeye_calibration_ws/src 63 | cd handeye_calibration_ws/src 64 | git clone https://github.com/shengyangzhuang/handeye_calibration_ros2.git 65 | ``` 66 | 67 | 2. Install dependencies: 68 | ```bash 69 | rosdep install --from-paths src --ignore-src -r -y 70 | ``` 71 | 72 | 3. Build the package 73 | 74 | RealSense: 75 | ``` 76 | colcon build --packages-select handeye_realsense 77 | ``` 78 | 79 | Azure Kinect: 80 | ``` 81 | colcon build --packages-select handeye_ak 82 | ``` 83 | 84 | 4. Source your ROS2 workspace: 85 | ``` 86 | source install/setup.bash 87 | ``` 88 | ## Prerequisits 89 | 90 | 1. Install the [RealSense ROS2 wrapper](https://github.com/IntelRealSense/realsense-ros). 91 | 92 | ## Usage 93 | 1. Calibrate the camera intrinsics and record the intrinsic matrix `K` and distortion coefficient `D` at `realsense_info.yaml`. We also provided a package using the chessboard to calibrate the camera intrinsics, see `camera_calibration`. 94 | 95 | 2. Change the parameters at `config.yaml` 96 | 97 | 3. Start the camera node 98 | 99 | RealSense: 100 | ```bash 101 | ros2 run realsense2_camera realsense2_camera_node 102 | ``` 103 | 104 | 4. To start the hand-eye calibration, launch the node as follows and press key **q** to record marker and robot poses: 105 | ```bash 106 | ros2 launch handeye_realsense taking_sample_launch.py 107 | ``` 108 | 109 | 5. To calculate the hand-eye calibration result: 110 | ```bash 111 | ros2 run handeye_realsense handeye 112 | ``` 113 | 114 | 6. To visualize the result in rviz, publish the hand-eye transformation by: 115 | ```bash 116 | ros2 run handeye_realsense eye2hand 117 | ``` 118 | 119 | ## Example 120 | 121 | We provided an example usage with RealSense camera. Please refer [**here**](https://github.com/shengyangzhuang/handeye_calibration_ros2/tree/main/handeye_realsense). 122 | 123 | A video of hand-eye calibration process can be seen [**here**](https://youtu.be/EUSxnzzP8qk?si=HIvo1J5uhRv7PXBF), where we demonstrated ensuring calibration accuracy through simulation and replicating the similar poses in real LBR iiwa 7 robot and RealSense depth camera. 124 | 125 | 126 | 127 | 128 | 129 | 130 | ## Contributing 131 | 132 | We welcome contributions from the community! If you'd like to contribute, please follow these steps: 133 | 134 | 1. **Fork the repository**: 135 | Click the "Fork" button at the top of this repository to create a copy on your GitHub account. 136 | 137 | 2. **Create a feature branch**: 138 | Clone the forked repository to your local machine and create a new branch for your feature or bug fix: 139 | ```bash 140 | git checkout -b feature/my-feature 141 | ``` 142 | 3. **Make your changes**: 143 | Implement your changes in the code, and don't forget to write tests if applicable. 144 | 145 | 4. **Commit your changes**: 146 | Once your changes are ready, commit them with a descriptive commit message: 147 | ```bash 148 | git commit -m 'Add some feature' 149 | ``` 150 | Push your changes: Push your changes to the branch on your forked repository: 151 | ```bash 152 | git push origin feature/my-feature 153 | ``` 154 | Open a Pull Request: 155 | Go to the original repository on GitHub and open a Pull Request (PR). Make sure to describe your changes in detail and explain why they are necessary. 156 | 157 | ## Citation 158 | If you enjoyed using this package, we would really appreciate it if you could leave a ⭐ and / or cite it! 159 | 160 | ``` 161 | @mastersthesis{zhuang2024multirobot, 162 | author = {Zhuang, Shengyang}, 163 | title = {Multi-Robot System Prototyping for Cooperative Control in Robot-Assisted Spine Surgery}, 164 | school = {Imperial College London}, 165 | year = {2024}, 166 | } 167 | ``` 168 | -------------------------------------------------------------------------------- /camera_calibration/LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/camera_calibration/__init__.py -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/calibration_chessboard.yaml: -------------------------------------------------------------------------------- 1 | %YAML:1.0 2 | --- 3 | K: !!opencv-matrix 4 | rows: 3 5 | cols: 3 6 | dt: d 7 | data: [ 500., 0., 320., 0., 500., 240., 0., 0., 1. ] 8 | D: !!opencv-matrix 9 | rows: 1 10 | cols: 5 11 | dt: d 12 | data: [ 0., 0., 0., 0., 0. ] 13 | 14 | -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/camera_calibration.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright © 2024 Shengyang Zhuang. All rights reserved. 3 | 4 | Contact: https://shengyangzhuang.github.io/ 5 | """ 6 | from __future__ import print_function # Python 2/3 compatibility 7 | import cv2 # Import the OpenCV library to enable computer vision 8 | import numpy as np # Import the NumPy scientific computing library 9 | import glob # Used to get retrieve files that have a specified pattern 10 | 11 | # Chessboard dimensions 12 | number_of_squares_X = 12 # Number of chessboard squares along the x-axis 13 | number_of_squares_Y = 9 # Number of chessboard squares along the y-axis 14 | nX = number_of_squares_X - 1 # Number of interior corners along x-axis 15 | nY = number_of_squares_Y - 1 # Number of interior corners along y-axis 16 | square_size = 0.025 # Size, in meters, of a square side 17 | 18 | # Set termination criteria. We stop either when an accuracy is reached or when 19 | # we have finished a certain number of iterations. 20 | criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001) 21 | 22 | # Define real world coordinates for points in the 3D coordinate frame 23 | # Object points are (0,0,0), (1,0,0), (2,0,0) ...., (5,8,0) 24 | object_points_3D = np.zeros((nX * nY, 3), np.float32) 25 | 26 | # These are the x and y coordinates 27 | object_points_3D[:,:2] = np.mgrid[0:nY, 0:nX].T.reshape(-1, 2) 28 | 29 | object_points_3D = object_points_3D * square_size 30 | 31 | # Store vectors of 3D points for all chessboard images (world coordinate frame) 32 | object_points = [] 33 | 34 | # Store vectors of 2D points for all chessboard images (camera coordinate frame) 35 | image_points = [] 36 | 37 | def main(): 38 | 39 | # Get the file path for images in the current directory 40 | images = glob.glob('*.jpg') 41 | print(f"Found images: {images}") # This will print the list of found images 42 | 43 | # If no images are found, exit the function 44 | if not images: 45 | print("No images found. Check the file path and image format.") 46 | return 47 | 48 | # Go through each chessboard image, one by one 49 | for image_file in images: 50 | 51 | # Load the image 52 | image = cv2.imread(image_file) 53 | 54 | # Convert the image to grayscale 55 | gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) 56 | 57 | # Find the corners on the chessboard 58 | success, corners = cv2.findChessboardCorners(gray, (nY, nX), None) 59 | 60 | # If the corners are found by the algorithm, draw them 61 | if success == True: 62 | 63 | # Append object points 64 | object_points.append(object_points_3D) 65 | 66 | # Find more exact corner pixels 67 | corners_2 = cv2.cornerSubPix(gray, corners, (11,11), (-1,-1), criteria) 68 | 69 | # Append image points 70 | image_points.append(corners_2) 71 | 72 | # Draw the corners 73 | cv2.drawChessboardCorners(image, (nY, nX), corners_2, success) 74 | 75 | # Display the image. Used for testing. 76 | cv2.imshow("Image", image) 77 | 78 | # Display the window for a short period. Used for testing. 79 | cv2.waitKey(1000) 80 | 81 | # Perform camera calibration to return the camera matrix, distortion coefficients, rotation and translation vectors etc 82 | ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(object_points, 83 | image_points, 84 | gray.shape[::-1], 85 | None, 86 | None) 87 | 88 | # Save parameters to a file 89 | cv_file = cv2.FileStorage('calibration_chessboard_azure_kinect.yaml', cv2.FILE_STORAGE_WRITE) 90 | cv_file.write('K', mtx) 91 | cv_file.write('D', dist) 92 | cv_file.release() 93 | 94 | # Load the parameters from the saved file 95 | cv_file = cv2.FileStorage('calibration_chessboard_azure_kinect.yaml', cv2.FILE_STORAGE_READ) 96 | mtx = cv_file.getNode('K').mat() 97 | dst = cv_file.getNode('D').mat() 98 | cv_file.release() 99 | 100 | # Display key parameter outputs of the camera calibration process 101 | print("Camera matrix:") 102 | print(mtx) 103 | 104 | print("\n Distortion coefficient:") 105 | print(dist) 106 | 107 | # Close all windows 108 | cv2.destroyAllWindows() 109 | 110 | if __name__ == '__main__': 111 | print(__doc__) 112 | main() -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/camera_color_viewer_realsense.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright © 2024 Shengyang Zhuang. All rights reserved. 3 | 4 | Contact: https://shengyangzhuang.github.io/ 5 | """ 6 | import cv2 7 | import numpy as np 8 | import pyrealsense2 as rs 9 | 10 | #POSE_NAME = 'shifted_1' 11 | #POSE_NAME = 'shifted' 12 | #POSE_NAME = 'original' 13 | POSE_NAME = 'cali_10' 14 | 15 | def main(): 16 | # Create a pipeline 17 | pipeline = rs.pipeline() 18 | 19 | # Create a config and configure the pipeline to stream 20 | # different resolutions of color and depth streams 21 | config = rs.config() 22 | config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30) 23 | config.enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30) 24 | 25 | # Start streaming 26 | pipeline.start(config) 27 | 28 | try: 29 | while True: 30 | # Wait for a coherent pair of frames: depth and color 31 | frames = pipeline.wait_for_frames() 32 | color_frame = frames.get_color_frame() 33 | if not color_frame: 34 | continue 35 | 36 | # Convert images to numpy arrays 37 | color_image = np.asanyarray(color_frame.get_data()) 38 | 39 | # Show images 40 | cv2.imshow('RealSense', color_image) 41 | key = cv2.waitKey(1) 42 | if key & 0xFF == ord('q') or key == 27: 43 | cv2.destroyAllWindows() 44 | break 45 | 46 | # Save the last captured image 47 | cv2.imwrite(f"{POSE_NAME}.jpg", color_image) 48 | 49 | finally: 50 | # Stop streaming 51 | pipeline.stop() 52 | 53 | if __name__ == "__main__": 54 | main() 55 | -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/camera_viewer_azure_kinect.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright © 2024 Shengyang Zhuang. All rights reserved. 3 | 4 | Contact: https://shengyangzhuang.github.io/ 5 | """ 6 | import cv2 7 | import numpy as np 8 | from pyk4a import PyK4A, Config, ColorResolution, DepthMode 9 | 10 | # Pose name setting 11 | POSE_NAME = 'cali_10' 12 | 13 | def main(): 14 | # Load camera with specific configuration 15 | k4a = PyK4A( 16 | Config( 17 | color_resolution=ColorResolution.RES_720P, 18 | depth_mode=DepthMode.OFF, # Disable depth capture 19 | synchronized_images_only=False, # No need for synchronization as depth is not used 20 | ) 21 | ) 22 | 23 | # Start the camera 24 | k4a.start() 25 | 26 | try: 27 | while True: 28 | # Get the next capture (only color frame) 29 | capture = k4a.get_capture() 30 | 31 | if capture.color is not None: 32 | # Color image is a numpy array 33 | color_image = capture.color[..., :3] # Get BGRA color and strip alpha channel 34 | 35 | # Display the color image 36 | cv2.imshow('Azure Kinect', color_image) 37 | 38 | # Check for user input to close the program 39 | key = cv2.waitKey(1) 40 | if key & 0xFF == ord('q') or key == 27: 41 | cv2.destroyAllWindows() 42 | break 43 | 44 | # Save the last captured image 45 | cv2.imwrite(f"{POSE_NAME}.jpg", color_image) 46 | 47 | finally: 48 | # Stop the camera 49 | k4a.stop() 50 | 51 | if __name__ == "__main__": 52 | main() 53 | -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/realsense/cali_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/camera_calibration/realsense/cali_1.jpg -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/realsense/cali_10.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/camera_calibration/realsense/cali_10.jpg -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/realsense/cali_2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/camera_calibration/realsense/cali_2.jpg -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/realsense/cali_3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/camera_calibration/realsense/cali_3.jpg -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/realsense/cali_4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/camera_calibration/realsense/cali_4.jpg -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/realsense/cali_5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/camera_calibration/realsense/cali_5.jpg -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/realsense/cali_6.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/camera_calibration/realsense/cali_6.jpg -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/realsense/cali_7.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/camera_calibration/realsense/cali_7.jpg -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/realsense/cali_8.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/camera_calibration/realsense/cali_8.jpg -------------------------------------------------------------------------------- /camera_calibration/camera_calibration/realsense/cali_9.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/camera_calibration/realsense/cali_9.jpg -------------------------------------------------------------------------------- /camera_calibration/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | camera_calibration 5 | 0.0.0 6 | TODO: Package description 7 | szhuang 8 | Apache-2.0 9 | 10 | ament_copyright 11 | ament_flake8 12 | ament_pep257 13 | python3-pytest 14 | 15 | 16 | ament_python 17 | 18 | 19 | -------------------------------------------------------------------------------- /camera_calibration/resource/camera_calibration: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/camera_calibration/resource/camera_calibration -------------------------------------------------------------------------------- /camera_calibration/setup.cfg: -------------------------------------------------------------------------------- 1 | [develop] 2 | script_dir=$base/lib/camera_calibration 3 | [install] 4 | install_scripts=$base/lib/camera_calibration 5 | -------------------------------------------------------------------------------- /camera_calibration/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | 3 | package_name = 'camera_calibration' 4 | 5 | setup( 6 | name=package_name, 7 | version='0.0.0', 8 | packages=find_packages(exclude=['test']), 9 | data_files=[ 10 | ('share/ament_index/resource_index/packages', 11 | ['resource/' + package_name]), 12 | ('share/' + package_name, ['package.xml']), 13 | ], 14 | install_requires=['setuptools'], 15 | zip_safe=True, 16 | maintainer='szhuang', 17 | maintainer_email='szhuang@todo.todo', 18 | description='TODO: Package description', 19 | license='Apache-2.0', 20 | tests_require=['pytest'], 21 | entry_points={ 22 | 'console_scripts': [ 23 | ], 24 | }, 25 | ) 26 | -------------------------------------------------------------------------------- /camera_calibration/test/test_copyright.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 Open Source Robotics Foundation, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from ament_copyright.main import main 16 | import pytest 17 | 18 | 19 | # Remove the `skip` decorator once the source file(s) have a copyright header 20 | @pytest.mark.skip(reason='No copyright header has been placed in the generated source file.') 21 | @pytest.mark.copyright 22 | @pytest.mark.linter 23 | def test_copyright(): 24 | rc = main(argv=['.', 'test']) 25 | assert rc == 0, 'Found errors' 26 | -------------------------------------------------------------------------------- /camera_calibration/test/test_flake8.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Open Source Robotics Foundation, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from ament_flake8.main import main_with_errors 16 | import pytest 17 | 18 | 19 | @pytest.mark.flake8 20 | @pytest.mark.linter 21 | def test_flake8(): 22 | rc, errors = main_with_errors(argv=[]) 23 | assert rc == 0, \ 24 | 'Found %d code style errors / warnings:\n' % len(errors) + \ 25 | '\n'.join(errors) 26 | -------------------------------------------------------------------------------- /camera_calibration/test/test_pep257.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 Open Source Robotics Foundation, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from ament_pep257.main import main 16 | import pytest 17 | 18 | 19 | @pytest.mark.linter 20 | @pytest.mark.pep257 21 | def test_pep257(): 22 | rc = main(argv=['.', 'test']) 23 | assert rc == 0, 'Found code style errors / warnings' 24 | -------------------------------------------------------------------------------- /doc/aruco.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/doc/aruco.png -------------------------------------------------------------------------------- /doc/cali_experiment_combined.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/doc/cali_experiment_combined.jpg -------------------------------------------------------------------------------- /doc/frame.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/doc/frame.png -------------------------------------------------------------------------------- /doc/handeye_setup_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/doc/handeye_setup_2.png -------------------------------------------------------------------------------- /doc/real_positions_1.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/doc/real_positions_1.gif -------------------------------------------------------------------------------- /doc/realsense_setup.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/doc/realsense_setup.jpg -------------------------------------------------------------------------------- /doc/sim_positions_1.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/doc/sim_positions_1.gif -------------------------------------------------------------------------------- /handeye_realsense/LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /handeye_realsense/README.md: -------------------------------------------------------------------------------- 1 | # handeye_realsense 2 | 3 | This is a ROS2 handeye calibration package for RealSense depth camera. Here we provided a more detailed example for the usage. 4 | 5 | A video of the full process can be viewed [here](https://youtu.be/EUSxnzzP8qk?si=RNuauInFx9T1qH7X) 6 | 7 | ## Example with RealSense D415 8 | 1. **Camera and robot setup** 9 | 10 | Prepare a camera holder. This image shows two camera holders example designed for KUKA LBR iiwa 7/14 and RealSense, Azure Kinect depth camera. 11 | 12 |

13 | Camera and robot setup 14 |

15 | Prepare an ArUco marker. Attach the camera to the robot. 16 | 17 |

18 | aruco 19 | realsense_setup 20 |

21 | 22 | 3. **Camera intrinsic calibration** 23 | 24 | Calibrate the camera matrix `K` and distortion coefficient `D`. This can be done with the `camera_calibration` package we provided by using a chessboard. If you use RealSense/Azure Kinect camera, you can also get the camera parameters directly with `ros2 topic echo "/specific_camera_topic"`. 25 | 26 | When you are sure camera parameters are correct, you can record it in the `handeye_realsense/realsense_info.yaml`. 27 | 28 | 4. **Specifying the parameters of your robot** 29 | 30 | Under `handeye_realsense/config.yaml` file, modify the `aruco_marker_side_length`, `image_topic`, and relevant robot link names as per your setups. 31 | 32 | 5. **Bring up the robot and camera** 33 | 34 | If you use lbr_fri_ros2_stack for KUKA LBR robots, please refer to the guideline [here](https://github.com/lbr-stack/lbr_fri_ros2_stack) 35 | 36 | Start the RealSense camera ROS2 driver 37 | 38 | ``` 39 | ros2 run realsense2_camera realsense2_camera_node 40 | ``` 41 | 42 | 7. **Taking samples** 43 | You can use moveit, custom control script, or admittance control mode to guide the robot around to take unique samples of the camera poses. 44 | 45 | Go into your handeye calibration workspace and launch the calibration file 46 | 47 | ``` 48 | cd handeye_calibration_ws 49 | colcon build --packages-select handeye_realsense 50 | source install/setup.bash 51 | ros2 launch taking_sample_launch.py 52 | ``` 53 |

54 | cali_experiment_combined 55 |

56 | 57 | 8. **Computing and publishing the result** 58 | 59 | To compute the eye-in-hand calibration result 60 | 61 | ``` 62 | ros2 run handeye_realsense handeye 63 | ``` 64 | 65 | To publish the result and visualize in rviz 66 | 67 | ``` 68 | ros2 run handeye_realsense eye2hand 69 | ``` 70 | 71 |

72 | cali_experiment_combined 73 |

74 | -------------------------------------------------------------------------------- /handeye_realsense/config.yaml: -------------------------------------------------------------------------------- 1 | aruco_dictionary_name: "DICT_ARUCO_ORIGINAL" 2 | aruco_marker_side_length: 0.150 3 | camera_calibration_parameters_filename: "src/handeye_realsense/realsense_info.yaml" 4 | image_topic: "/camera/camera/color/image_raw" 5 | aruco_marker_name: "aruco_marker" 6 | calculated_camera_optical_frame_name: "camera/color/optical_frame" 7 | marker_data_file_name: "src/handeye_realsense/resource/marker_data_realsense.yaml" 8 | image_filename: "src/handeye_realsense/resource/marker_pose_realsense_{pose_count}.jpg" 9 | robot_data_file_name: "src/handeye_realsense/resource/robot_data_realsense.yaml" 10 | handeye_result_file_name: "src/handeye_realsense/resource/handeye_result_realsense.yaml" 11 | handeye_result_profile_file_name: "src/handeye_realsense/resource/handeye_result_realsense_profile.yaml" 12 | 13 | base_link: "lbr/link_0" 14 | ee_link: "lbr/link_ee" 15 | world_frame: "world" 16 | -------------------------------------------------------------------------------- /handeye_realsense/handeye_realsense/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/handeye_realsense/__init__.py -------------------------------------------------------------------------------- /handeye_realsense/handeye_realsense/__pycache__/__init__.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/handeye_realsense/__pycache__/__init__.cpython-310.pyc -------------------------------------------------------------------------------- /handeye_realsense/handeye_realsense/__pycache__/aruco_estimation.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/handeye_realsense/__pycache__/aruco_estimation.cpython-310.pyc -------------------------------------------------------------------------------- /handeye_realsense/handeye_realsense/__pycache__/handeye_estimation.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/handeye_realsense/__pycache__/handeye_estimation.cpython-310.pyc -------------------------------------------------------------------------------- /handeye_realsense/handeye_realsense/__pycache__/joint_positions.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/handeye_realsense/__pycache__/joint_positions.cpython-310.pyc -------------------------------------------------------------------------------- /handeye_realsense/handeye_realsense/__pycache__/publish_eye2hand.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/handeye_realsense/__pycache__/publish_eye2hand.cpython-310.pyc -------------------------------------------------------------------------------- /handeye_realsense/handeye_realsense/__pycache__/robot_state_estimation.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/handeye_realsense/__pycache__/robot_state_estimation.cpython-310.pyc -------------------------------------------------------------------------------- /handeye_realsense/handeye_realsense/aruco_estimation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright © 2024 Shengyang Zhuang. All rights reserved. 3 | 4 | Contact: https://shengyangzhuang.github.io/ 5 | """ 6 | import rclpy 7 | from rclpy.node import Node 8 | from sensor_msgs.msg import Image 9 | from geometry_msgs.msg import TransformStamped 10 | from tf2_ros import TransformBroadcaster, StaticTransformBroadcaster 11 | import transforms3d as tf_transformations 12 | from scipy.spatial.transform import Rotation as R 13 | from rclpy.qos import QoSProfile, DurabilityPolicy 14 | from cv_bridge import CvBridge 15 | from std_msgs.msg import String 16 | 17 | import cv2 18 | import numpy as np 19 | import yaml 20 | 21 | # Create a QoS profile for subscribing to /tf_static 22 | qos_profile = QoSProfile(depth=100, durability=DurabilityPolicy.TRANSIENT_LOCAL) 23 | 24 | # ArUco dictionary lookup 25 | ARUCO_DICT = { 26 | "DICT_4X4_50": cv2.aruco.DICT_4X4_50, 27 | "DICT_4X4_100": cv2.aruco.DICT_4X4_100, 28 | "DICT_4X4_250": cv2.aruco.DICT_4X4_250, 29 | "DICT_4X4_1000": cv2.aruco.DICT_4X4_1000, 30 | "DICT_5X5_50": cv2.aruco.DICT_5X5_50, 31 | "DICT_5X5_100": cv2.aruco.DICT_5X5_100, 32 | "DICT_5X5_250": cv2.aruco.DICT_5X5_250, 33 | "DICT_5X5_1000": cv2.aruco.DICT_5X5_1000, 34 | "DICT_6X6_50": cv2.aruco.DICT_6X6_50, 35 | "DICT_6X6_100": cv2.aruco.DICT_6X6_100, 36 | "DICT_6X6_250": cv2.aruco.DICT_6X6_250, 37 | "DICT_6X6_1000": cv2.aruco.DICT_6X6_1000, 38 | "DICT_7X7_50": cv2.aruco.DICT_7X7_50, 39 | "DICT_7X7_100": cv2.aruco.DICT_7X7_100, 40 | "DICT_7X7_250": cv2.aruco.DICT_7X7_250, 41 | "DICT_7X7_1000": cv2.aruco.DICT_7X7_1000, 42 | "DICT_ARUCO_ORIGINAL": cv2.aruco.DICT_ARUCO_ORIGINAL 43 | } 44 | 45 | class ArucoNode(Node): 46 | def __init__(self): 47 | super().__init__('aruco_node') 48 | 49 | with open('src/handeye_realsense/config.yaml', 'r') as file: 50 | config = yaml.safe_load(file) 51 | aruco_dictionary_name = config["aruco_dictionary_name"] 52 | self.aruco_marker_name = config["aruco_marker_name"] 53 | self.aruco_marker_side_length = config["aruco_marker_side_length"] 54 | self.camera_calibration_parameters_filename = config["camera_calibration_parameters_filename"] 55 | self.image_topic = config["image_topic"] 56 | self.calculated_camera_optical_frame_name = config["calculated_camera_optical_frame_name"] 57 | self.marker_data_file_name = config["marker_data_file_name"] 58 | self.image_filename = config["image_filename"] 59 | 60 | # Check that we have a valid ArUco marker 61 | if ARUCO_DICT.get(aruco_dictionary_name, None) is None: 62 | self.get_logger().error(f"ArUCo tag of '{aruco_dictionary_name}' is not supported") 63 | return 64 | 65 | # Load the camera parameters from the saved file 66 | cv_file = cv2.FileStorage(self.camera_calibration_parameters_filename, cv2.FILE_STORAGE_READ) 67 | self.mtx = cv_file.getNode('K').mat() 68 | self.dst = cv_file.getNode('D').mat() 69 | cv_file.release() 70 | 71 | # Load the ArUco dictionary 72 | self.get_logger().info(f"Detecting '{aruco_dictionary_name}' marker.") 73 | self.this_aruco_dictionary = cv2.aruco.getPredefinedDictionary(ARUCO_DICT[aruco_dictionary_name]) 74 | self.this_aruco_parameters = cv2.aruco.DetectorParameters() 75 | 76 | # Create the subscriber 77 | self.subscription = self.create_subscription(Image, self.image_topic, self.listener_callback, 10) 78 | self.keypress_publisher = self.create_publisher(String, 'keypress_topic', 10) 79 | 80 | self.pose_count = 0 81 | 82 | # Initialize the dynamic and static transform broadcasters 83 | self.tfbroadcaster = TransformBroadcaster(self) 84 | self.static_tfbroadcaster = StaticTransformBroadcaster(self) 85 | 86 | # Used to convert between ROS and OpenCV images 87 | self.bridge = CvBridge() 88 | 89 | 90 | def quaternion_to_rotation_matrix(self, x, y, z, w): 91 | """ Convert a quaternion into a full three-dimensional rotation matrix. """ 92 | return R.from_quat([x, y, z, w]).as_matrix() 93 | 94 | def listener_callback(self, data): 95 | current_frame = self.bridge.imgmsg_to_cv2(data) 96 | corners, marker_ids, rejected = cv2.aruco.detectMarkers(current_frame, self.this_aruco_dictionary, parameters=self.this_aruco_parameters) 97 | 98 | if marker_ids is not None: 99 | cv2.aruco.drawDetectedMarkers(current_frame, corners, marker_ids) 100 | rvecs, tvecs, _ = cv2.aruco.estimatePoseSingleMarkers(corners, self.aruco_marker_side_length, self.mtx, self.dst) 101 | 102 | for i, marker_id in enumerate(marker_ids): 103 | # Create the coordinate transform 104 | t_marker_to_camera = TransformStamped() 105 | t_marker_to_camera.header.stamp = self.get_clock().now().to_msg() 106 | 107 | t_marker_to_camera.header.frame_id = self.calculated_camera_optical_frame_name 108 | t_marker_to_camera.child_frame_id = self.aruco_marker_name 109 | 110 | # Store the translation (i.e. position) information 111 | t_marker_to_camera.transform.translation.x = tvecs[i][0][0] 112 | t_marker_to_camera.transform.translation.y = tvecs[i][0][1] 113 | t_marker_to_camera.transform.translation.z = tvecs[i][0][2] 114 | 115 | # Store the rotation information 116 | rotation_matrix = cv2.Rodrigues(rvecs[i][0])[0] 117 | r = R.from_matrix(rotation_matrix) 118 | quat = r.as_quat() 119 | 120 | # Quaternion format 121 | t_marker_to_camera.transform.rotation.x = quat[0] 122 | t_marker_to_camera.transform.rotation.y = quat[1] 123 | t_marker_to_camera.transform.rotation.z = quat[2] 124 | t_marker_to_camera.transform.rotation.w = quat[3] 125 | 126 | # Draw the axes on the marker 127 | cv2.drawFrameAxes(current_frame, self.mtx, self.dst, rvecs[i], tvecs[i], 0.05) 128 | 129 | # Send the transform from marker to the camera 130 | self.tfbroadcaster.sendTransform(t_marker_to_camera) 131 | 132 | # Create a named window that can be resized 133 | cv2.namedWindow("camera", cv2.WINDOW_NORMAL) 134 | # Resize the window to the desired size, e.g., 700x500 pixels 135 | cv2.resizeWindow("camera", 700, 500) 136 | 137 | cv2.imshow("camera", current_frame) 138 | key = cv2.waitKey(1) 139 | if key == ord('q'): 140 | self.save_marker_data(rvecs, tvecs) 141 | self.save_image(current_frame) 142 | self.get_logger().info(f"Saved pose_{self.pose_count} marker transform.") 143 | self.keypress_publisher.publish(String(data='q')) 144 | elif key == ord('e'): 145 | self.get_logger().info("Ending program...") 146 | self.keypress_publisher.publish(String(data='e')) 147 | cv2.destroyAllWindows() # Close all OpenCV windows 148 | rclpy.shutdown() # Shutdown ROS client library for Python 149 | 150 | def save_marker_data(self, rvecs, tvecs): 151 | yaml_file_path = self.marker_data_file_name # Filename to save the YAML data 152 | try: 153 | with open(yaml_file_path, 'r') as file: 154 | data = yaml.safe_load(file) or {'poses': []} # Use existing data or initialize if empty 155 | except FileNotFoundError: 156 | data = {'poses': []} # Initialize if file does not exist 157 | 158 | for rvec, tvec in zip(rvecs, tvecs): 159 | R_mat = cv2.Rodrigues(rvec)[0] 160 | marker_data = { 161 | 'rotation': R_mat.tolist(), 162 | 'translation': tvec[0].tolist() 163 | } 164 | data['poses'].append(marker_data) 165 | 166 | with open(yaml_file_path, 'w') as file: 167 | yaml.dump(data, file, default_flow_style=False) 168 | 169 | self.pose_count += 1 170 | self.get_logger().info(f"Pose {self.pose_count}:") 171 | self.get_logger().info("Rotation Matrix:\n" + str(R_mat)) 172 | self.get_logger().info("Translation Vector:\n" + str(tvec[0])) 173 | self.get_logger().info(f'Transformation for Pose {self.pose_count} appended to {self.marker_data_file_name}') 174 | 175 | def save_image(self, frame): 176 | image_filename = self.image_filename.format(pose_count=self.pose_count) 177 | cv2.imwrite(image_filename, frame) 178 | self.get_logger().info(f'Image saved as {image_filename}') 179 | 180 | 181 | def main(args=None): 182 | rclpy.init(args=args) 183 | aruco_node = ArucoNode() 184 | try: 185 | rclpy.spin(aruco_node) 186 | finally: 187 | aruco_node.destroy_node() 188 | 189 | if __name__ == '__main__': 190 | main() 191 | -------------------------------------------------------------------------------- /handeye_realsense/handeye_realsense/handeye_estimation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright © 2024 Shengyang Zhuang. All rights reserved. 3 | 4 | Contact: https://shengyangzhuang.github.io/ 5 | """ 6 | import rclpy 7 | from rclpy.node import Node 8 | import cv2 9 | import numpy as np 10 | import yaml 11 | from scipy.spatial.transform import Rotation as R 12 | import os 13 | 14 | class HandEyeCalibrationNode(Node): 15 | def __init__(self): 16 | super().__init__('hand_eye_calibration_node') 17 | self.get_logger().info("Starting Hand-Eye Calibration Node") 18 | 19 | with open('src/handeye_realsense/config.yaml', 'r') as file: 20 | config = yaml.safe_load(file) 21 | self.robot_data_file_name = config["robot_data_file_name"] 22 | self.marker_data_file_name = config["marker_data_file_name"] 23 | self.handeye_result_file_name = config["handeye_result_file_name"] 24 | self.handeye_result_profile_file_name = config["handeye_result_profile_file_name"] 25 | 26 | # Load transformation data from YAML files 27 | self.R_gripper2base, self.t_gripper2base = self.load_transformations(self.robot_data_file_name) 28 | self.R_target2cam, self.t_target2cam = self.load_transformations(self.marker_data_file_name) 29 | 30 | # Compute the hand-eye transformation matrix 31 | self.compute_hand_eye() 32 | 33 | def load_transformations(self, file_path): 34 | with open(file_path, 'r') as file: 35 | data = yaml.safe_load(file) 36 | poses = data['poses'] 37 | 38 | # Initialize to handle yaml data format 39 | R = [] 40 | t = [] 41 | 42 | for pose in poses: 43 | rotation = np.array(pose['rotation'], dtype=np.float32) 44 | translation = np.array(pose['translation'], dtype=np.float32) 45 | 46 | R.append(rotation) 47 | t.append(translation) 48 | 49 | return R, t 50 | 51 | def compute_hand_eye(self): 52 | self.get_logger().info(f"Loaded {len(self.R_gripper2base)} rotations and {len(self.t_gripper2base)} translations for gripper to base") 53 | self.get_logger().info(f"Loaded {len(self.R_target2cam)} rotations and {len(self.t_target2cam)} translations for target to camera") 54 | rotations = [r.reshape(3, 3) for r in self.R_gripper2base] 55 | translations = [t.reshape(3, 1) for t in self.t_gripper2base] 56 | obj_rotations = [r.reshape(3, 3) for r in self.R_target2cam] 57 | obj_translations = [t.reshape(3, 1) for t in self.t_target2cam] 58 | print(f"R_gripper2base: {rotations}") 59 | print(f"t_gripper2base: {translations}") 60 | print(f"R_target2cam: {obj_rotations}") 61 | print(f"t_target2cam: {obj_translations}") 62 | 63 | 64 | # Perform hand-eye calibration 65 | R, t = cv2.calibrateHandEye( 66 | rotations, translations, obj_rotations, obj_translations, 67 | method=cv2.CALIB_HAND_EYE_TSAI) 68 | 69 | # Save results to YAML 70 | # Output: camera relative to gripper frame (eye to hand) 71 | self.save_yaml(R, t) 72 | #self.save_yaml_profile(R_qua, t) 73 | 74 | def rotation_matrix_to_quaternion(self, matrix): 75 | """Convert a 3x3 rotation matrix into a quaternion.""" 76 | rotation = R.from_matrix(matrix) 77 | return rotation.as_quat() 78 | 79 | def save_yaml(self, R, t): 80 | '''This function will always show only the updated result''' 81 | new_data = {'rotation': R.flatten().tolist(), 'translation': t.flatten().tolist()} 82 | 83 | # Write the new data to the YAML file, overwriting any existing content 84 | with open(self.handeye_result_file_name, 'w') as file: 85 | yaml.safe_dump(new_data, file) 86 | 87 | self.get_logger().info("Simulated hand-eye calibration results saved.") 88 | print(f"Rotation matrix: {R}") 89 | print(f"Translation vector: {t}") 90 | 91 | def save_yaml_profile(self, R, t): 92 | '''This function saves the rotation and translation data in the correct format.''' 93 | new_data = {'rotation': R.flatten().tolist(), 'translation': t.flatten().tolist()} 94 | 95 | # Check if the file exists and is not empty 96 | if os.path.exists(self.handeye_result_profile_file_name) and os.path.getsize(self.handeye_result_profile_file_name) > 0: 97 | # Load the existing data from the file 98 | with open(self.handeye_result_profile_file_name, 'r') as file: 99 | existing_data = yaml.safe_load(file) 100 | 101 | # If the file contains data, append the new transform 102 | if 'transforms' in existing_data: 103 | existing_data['transforms'].append(new_data) 104 | else: 105 | existing_data = {'transforms': [new_data]} 106 | else: 107 | # If the file does not exist or is empty, start with a new structure 108 | existing_data = {'transforms': [new_data]} 109 | 110 | # Save the updated structure back to the file 111 | with open(self.handeye_result_profile_file_name, 'w') as file: 112 | yaml.safe_dump(existing_data, file) 113 | 114 | self.get_logger().info("Simulated hand-eye calibration results saved.") 115 | print(f"Rotation matrix quaternion: {R}") 116 | print(f"Translation vector: {t}") 117 | 118 | 119 | def main(args=None): 120 | rclpy.init(args=args) 121 | node = HandEyeCalibrationNode() 122 | rclpy.spin(node) 123 | node.destroy_node() 124 | rclpy.shutdown() 125 | 126 | if __name__ == '__main__': 127 | main() 128 | -------------------------------------------------------------------------------- /handeye_realsense/handeye_realsense/publish_eye2hand.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright © 2024 Shengyang Zhuang. All rights reserved. 3 | 4 | Contact: https://shengyangzhuang.github.io/ 5 | """ 6 | import rclpy 7 | from rclpy.node import Node 8 | import tf2_ros 9 | from geometry_msgs.msg import TransformStamped 10 | from tf2_msgs.msg import TFMessage 11 | from rclpy.qos import QoSProfile, DurabilityPolicy 12 | from scipy.spatial.transform import Rotation as R 13 | 14 | import numpy as np 15 | import yaml 16 | 17 | # Create a QoS profile for subscribing to /tf_static 18 | qos_profile = QoSProfile(depth=10, durability=DurabilityPolicy.TRANSIENT_LOCAL) 19 | 20 | class TransformPublisher(Node): 21 | 22 | def __init__(self): 23 | super().__init__('transform_publisher') 24 | 25 | with open('src/handeye_realsense/config.yaml', 'r') as file: 26 | config = yaml.safe_load(file) 27 | self.handeye_result_file_name = config["handeye_result_file_name"] 28 | self.base_link = config["base_link"] 29 | self.ee_link = config["ee_link"] 30 | self.world_frame = config["world_frame"] 31 | self.calculated_camera_optical_frame_name = config["calculated_camera_optical_frame_name"] 32 | 33 | self.tf_broadcaster = tf2_ros.TransformBroadcaster(self) 34 | 35 | # Load transformation from YAML file 36 | with open(self.handeye_result_file_name, 'r') as file_2: 37 | hand_eye_data = yaml.safe_load(file_2) 38 | if isinstance(hand_eye_data, list) and len(hand_eye_data) > 1: 39 | hand_eye_data = hand_eye_data[-1] 40 | elif isinstance(hand_eye_data, list): 41 | hand_eye_data = hand_eye_data[0] 42 | 43 | T = np.eye(4) 44 | T[:3, :3] = np.array(hand_eye_data['rotation']).reshape((3, 3)) 45 | T[:3, 3] = np.array(hand_eye_data['translation']).reshape((3,)) 46 | 47 | self.translation = np.array(hand_eye_data['translation']).reshape((3, 1)) 48 | self.rotation = np.array(hand_eye_data['rotation']).reshape((3, 3)) 49 | 50 | r = R.from_matrix(self.rotation) 51 | self.handeye_quaternion = r.as_quat() #xyzw 52 | 53 | print(f'rotation: {self.rotation}') 54 | print(f'translation: {self.translation}') 55 | print(f'handeye_quaternion: {self.handeye_quaternion}') 56 | 57 | timer_period = 0.1 # seconds 58 | self.timer = self.create_timer(timer_period, self.publish_handeye_transform) 59 | 60 | self.subscription_tf = self.create_subscription(TFMessage, '/tf', self.listener_callback_tf, 10) 61 | self.subscription_tf_static = self.create_subscription(TFMessage,'/tf_static', self.listener_callback_tf_static, qos_profile) 62 | 63 | self.transformations = {} 64 | 65 | def quaternion_to_rotation_matrix(self, x, y, z, w): 66 | """ Convert a quaternion into a full three-dimensional rotation matrix. """ 67 | return R.from_quat([x, y, z, w]).as_matrix() 68 | 69 | def listener_callback_tf(self, msg): 70 | """ Handle incoming transform messages. """ 71 | for transform in msg.transforms: 72 | if transform.child_frame_id and transform.header.frame_id: 73 | self.transformations[(transform.header.frame_id, transform.child_frame_id)] = transform 74 | 75 | def listener_callback_tf_static(self, msg): 76 | """ Handle incoming transform messages. """ 77 | for transform in msg.transforms: 78 | if transform.child_frame_id and transform.header.frame_id: 79 | self.transformations[(transform.header.frame_id, transform.child_frame_id)] = transform 80 | print("Subcribed to /tf_static successfully") 81 | 82 | 83 | def get_full_transformation_matrix(self): 84 | T = np.eye(4) # Start with the identity matrix 85 | link_order = [ 86 | ('world','lbr/world'), ('lbr/world','lbr/link_0'), 87 | ('lbr/link_0', 'lbr/link_1'), ('lbr/link_1', 'lbr/link_2'), 88 | ('lbr/link_2', 'lbr/link_3'), ('lbr/link_3', 'lbr/link_4'), 89 | ('lbr/link_4', 'lbr/link_5'), ('lbr/link_5', 'lbr/link_6'), 90 | ('lbr/link_6', 'lbr/link_7'), ('lbr/link_7', 'lbr/link_ee'), 91 | ] 92 | for (frame_id, child_frame_id) in link_order: 93 | if (frame_id, child_frame_id) in self.transformations: 94 | trans = self.transformations[(frame_id, child_frame_id)].transform 95 | translation = [trans.translation.x, trans.translation.y, trans.translation.z] 96 | rotation = [trans.rotation.x, trans.rotation.y, trans.rotation.z, trans.rotation.w] 97 | T_local = np.eye(4) 98 | T_local[:3, :3] = self.quaternion_to_rotation_matrix(*rotation) 99 | T_local[:3, 3] = translation 100 | T = np.dot(T, T_local) 101 | 102 | return T 103 | 104 | def publish_handeye_transform(self): 105 | transform_msg = TransformStamped() 106 | transform_msg.header.stamp = self.get_clock().now().to_msg() 107 | transform_msg.header.frame_id = self.ee_link 108 | transform_msg.child_frame_id = self.calculated_camera_optical_frame_name 109 | transform_msg.transform.translation.x = self.translation[0, 0] 110 | transform_msg.transform.translation.y = self.translation[1, 0] 111 | transform_msg.transform.translation.z = self.translation[2, 0] 112 | transform_msg.transform.rotation.x = self.handeye_quaternion[0] 113 | transform_msg.transform.rotation.y = self.handeye_quaternion[1] 114 | transform_msg.transform.rotation.z = self.handeye_quaternion[2] 115 | transform_msg.transform.rotation.w = self.handeye_quaternion[3] 116 | 117 | self.tf_broadcaster.sendTransform(transform_msg) 118 | 119 | 120 | def main(args=None): 121 | rclpy.init(args=args) 122 | 123 | transform_publisher = TransformPublisher() 124 | 125 | rclpy.spin(transform_publisher) 126 | 127 | transform_publisher.destroy_node() 128 | rclpy.shutdown() 129 | 130 | if __name__ == '__main__': 131 | main() 132 | -------------------------------------------------------------------------------- /handeye_realsense/handeye_realsense/robot_state_estimation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright © 2024 Shengyang Zhuang. All rights reserved. 3 | 4 | Contact: https://shengyangzhuang.github.io/ 5 | """ 6 | import rclpy 7 | from rclpy.node import Node 8 | from tf2_msgs.msg import TFMessage 9 | from scipy.spatial.transform import Rotation as R 10 | from rclpy.qos import QoSProfile, DurabilityPolicy 11 | from std_msgs.msg import String 12 | 13 | import yaml 14 | import numpy as np 15 | 16 | # Create a QoS profile for subscribing to /tf_static 17 | qos_profile = QoSProfile(depth=10, durability=DurabilityPolicy.TRANSIENT_LOCAL) 18 | 19 | class RobotTransformNode(Node): 20 | def __init__(self): 21 | super().__init__('robot_transform_node') 22 | self.subscription_tf = self.create_subscription(TFMessage, '/tf', self.listener_callback_tf, 10) 23 | self.subscription_tf_static = self.create_subscription(TFMessage,'/tf_static', self.listener_callback_tf_static, qos_profile) 24 | self.transformations = {} 25 | self.pose_count = 0 26 | self.subscription_keypress = self.create_subscription(String, 'keypress_topic', self.keypress_callback, 10) 27 | 28 | with open('src/handeye_realsense/config.yaml', 'r') as file: 29 | config = yaml.safe_load(file) 30 | self.robot_data_file_name = config["robot_data_file_name"] 31 | self.base_link = config["base_link"] 32 | self.ee_link = config["ee_link"] 33 | 34 | def quaternion_to_rotation_matrix(self, x, y, z, w): 35 | """ Convert a quaternion into a full three-dimensional rotation matrix. """ 36 | return R.from_quat([x, y, z, w]).as_matrix() 37 | 38 | def listener_callback_tf(self, msg): 39 | """ Handle incoming transform messages. """ 40 | for transform in msg.transforms: 41 | if transform.child_frame_id and transform.header.frame_id: 42 | self.transformations[(transform.header.frame_id, transform.child_frame_id)] = transform 43 | 44 | def listener_callback_tf_static(self, msg): 45 | """ Handle incoming transform messages. """ 46 | for transform in msg.transforms: 47 | if transform.child_frame_id and transform.header.frame_id: 48 | self.transformations[(transform.header.frame_id, transform.child_frame_id)] = transform 49 | self.get_logger().info("Subcribed to /tf_static successfully") 50 | 51 | 52 | def get_full_transformation_matrix(self): 53 | T = np.eye(4) # Start with the identity matrix 54 | link_order = [ 55 | ('lbr/link_0', 'lbr/link_1'), ('lbr/link_1', 'lbr/link_2'), 56 | ('lbr/link_2', 'lbr/link_3'), ('lbr/link_3', 'lbr/link_4'), 57 | ('lbr/link_4', 'lbr/link_5'), ('lbr/link_5', 'lbr/link_6'), 58 | ('lbr/link_6', 'lbr/link_7'), 59 | ('lbr/link_7', 'lbr/link_ee'), 60 | ] 61 | for (frame_id, child_frame_id) in link_order: 62 | if (frame_id, child_frame_id) in self.transformations: 63 | trans = self.transformations[(frame_id, child_frame_id)].transform 64 | translation = [trans.translation.x, trans.translation.y, trans.translation.z] 65 | rotation = [trans.rotation.x, trans.rotation.y, trans.rotation.z, trans.rotation.w] 66 | T_local = np.eye(4) 67 | T_local[:3, :3] = self.quaternion_to_rotation_matrix(*rotation) 68 | T_local[:3, 3] = translation 69 | T = np.dot(T, T_local) 70 | 71 | return T 72 | 73 | def save_transformation_to_yaml(self, rotation_matrix, translation_vector): 74 | """ Append the rotation matrix and translation vector to a YAML file and print them. """ 75 | # Load existing data from YAML if file exists and is not empty 76 | yaml_file_path = self.robot_data_file_name 77 | try: 78 | with open(yaml_file_path, 'r') as file: 79 | data = yaml.safe_load(file) or {'poses': []} # Use existing data or initialize if empty 80 | except FileNotFoundError: 81 | data = {'poses': []} # Initialize if file does not exist 82 | 83 | # Append new pose data 84 | data['poses'].append({ 85 | 'rotation': rotation_matrix.tolist(), 86 | 'translation': translation_vector.tolist() 87 | }) 88 | 89 | # Write updated data back to YAML 90 | with open(yaml_file_path, 'w') as file: # 'w' to overwrite existing file 91 | yaml.dump(data, file, default_flow_style=False) 92 | 93 | self.pose_count += 1 # Increment pose counter 94 | print(f"Pose {self.pose_count}:") 95 | print("Rotation Matrix:") 96 | print(rotation_matrix) 97 | print("Translation Vector:") 98 | print(translation_vector) 99 | self.get_logger().info(f'Transformation for Pose {self.pose_count} appended to robot_data_realsense.yaml') 100 | 101 | def keypress_callback(self, msg): 102 | key = msg.data 103 | if key == 'q': 104 | T = self.get_full_transformation_matrix() 105 | R_gripper2base = T[:3, :3] 106 | t_gripper2base = T[:3, 3] 107 | self.save_transformation_to_yaml(R_gripper2base, t_gripper2base) 108 | elif key == 'e': 109 | self.get_logger().info("Ending program...") 110 | rclpy.shutdown() 111 | 112 | 113 | def main(args=None): 114 | rclpy.init(args=args) 115 | robot_transform_node = RobotTransformNode() 116 | 117 | try: 118 | rclpy.spin(robot_transform_node) 119 | finally: 120 | robot_transform_node.destroy_node() 121 | rclpy.shutdown() 122 | 123 | if __name__ == '__main__': 124 | main() 125 | -------------------------------------------------------------------------------- /handeye_realsense/launch/taking_sample_launch.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright © 2024 Shengyang Zhuang. All rights reserved. 3 | 4 | Contact: https://shengyangzhuang.github.io/ 5 | """ 6 | from launch import LaunchDescription 7 | from launch_ros.actions import Node 8 | 9 | def generate_launch_description(): 10 | return LaunchDescription([ 11 | Node( 12 | package='handeye_realsense', 13 | executable='robot', 14 | name='robot_state_estimation' 15 | ), 16 | Node( 17 | package='handeye_realsense', 18 | executable='aruco', 19 | name='aruco_estimation' 20 | ), 21 | ]) -------------------------------------------------------------------------------- /handeye_realsense/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | handeye_realsense 5 | 0.0.0 6 | ROS2 package for hand-eye calibration 7 | szhuang 8 | Apache-2.0 9 | 10 | rclpy 11 | sensor_msgs 12 | cv_bridge 13 | visualization_msgs 14 | geometry_msgs 15 | tf2_ros 16 | tf_transformations 17 | ros2launch 18 | 19 | ament_copyright 20 | ament_flake8 21 | ament_pep257 22 | python3-pytest 23 | 24 | 25 | ament_python 26 | 27 | 28 | -------------------------------------------------------------------------------- /handeye_realsense/realsense_info.yaml: -------------------------------------------------------------------------------- 1 | %YAML:1.0 2 | --- 3 | K: !!opencv-matrix 4 | rows: 3 5 | cols: 3 6 | dt: d 7 | data: [ 931.9412841796875, 0., 634.0355224609375, 0., 8 | 931.72412109375, 351.0355224609375, 0., 0., 1. ] 9 | D: !!opencv-matrix 10 | rows: 1 11 | cols: 5 12 | dt: d 13 | data: [ 0.0, 0.0, 0.0, 0.0, 0.0 ] 14 | -------------------------------------------------------------------------------- /handeye_realsense/resource/handeye_realsense: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/resource/handeye_realsense -------------------------------------------------------------------------------- /handeye_realsense/resource/handeye_result_realsense.yaml: -------------------------------------------------------------------------------- 1 | rotation: 2 | - -0.008060546495996634 3 | - 0.999703982494103 4 | - -0.022955935520394228 5 | - -0.9999669003750362 6 | - -0.008083834221862762 7 | - -0.0009218343756400415 8 | - -0.0011071334736819545 9 | - 0.022947745198691354 10 | - 0.9997360527888188 11 | translation: 12 | - -0.05442818074643688 13 | - 0.03646387745037295 14 | - 0.027568893056412275 15 | -------------------------------------------------------------------------------- /handeye_realsense/resource/marker_data_realsense.yaml: -------------------------------------------------------------------------------- 1 | poses: 2 | - rotation: 3 | - - 0.7875771575295681 4 | - 0.4472980327076915 5 | - 0.42384748539241734 6 | - - 0.2796893882581179 7 | - -0.872370387442089 8 | - 0.40092861360838206 9 | - - 0.5490865751713898 10 | - -0.19721657397381318 11 | - -0.812163503191682 12 | translation: 13 | - 0.07601375421893658 14 | - 0.07583097607130644 15 | - 0.37573631164536975 16 | - rotation: 17 | - - 0.9990993777969284 18 | - 0.040678935496891444 19 | - -0.012068864595755477 20 | - - 0.04110390365057304 21 | - -0.8572712475356223 22 | - 0.5132216648324602 23 | - - 0.010531020390985092 24 | - -0.5132555234535304 25 | - -0.8581712330613088 26 | translation: 27 | - 0.01778257656648433 28 | - 0.01631192854877187 29 | - 0.2738030212578226 30 | - rotation: 31 | - - 0.8942421620416111 32 | - -0.3990304889863351 33 | - -0.2027452206254709 34 | - - -0.2787620786250803 35 | - -0.8509268344184869 36 | - 0.44521368575904946 37 | - - -0.35017518351215166 38 | - -0.3416111697908301 39 | - -0.8721692206942249 40 | translation: 41 | - -0.13163297200557736 42 | - 0.05494665723221066 43 | - 0.36463887495164177 44 | - rotation: 45 | - - 0.6961289850496581 46 | - -0.08348732493740162 47 | - -0.7130457928832695 48 | - - -0.03729737773195148 49 | - -0.9960796272091612 50 | - 0.08021397554777165 51 | - - -0.7169472377993055 52 | - -0.02924453510752221 53 | - -0.6965137582117751 54 | translation: 55 | - -0.018840875064481753 56 | - 0.028258257957518924 57 | - 0.37929877185096755 58 | - rotation: 59 | - - 0.9987177731058684 60 | - 0.04322073256051207 61 | - -0.026358641076268324 62 | - - 0.04254638398260518 63 | - -0.9987657082363025 64 | - -0.0256293827713769 65 | - - -0.027433827521138304 66 | - 0.02447505522301687 67 | - -0.9993239498677947 68 | translation: 69 | - 0.017240727279310423 70 | - 0.0623995740968227 71 | - 0.5358753786776422 72 | - rotation: 73 | - - 0.7634941148105137 74 | - -0.11018381860401177 75 | - 0.636346024398321 76 | - - -0.1552935726011358 77 | - -0.9877499831967115 78 | - 0.015293037751600257 79 | - - 0.626865729609209 80 | - -0.11049659186026659 81 | - -0.7712521379087254 82 | translation: 83 | - -0.003817500714181999 84 | - 0.026695418237481117 85 | - 0.3728607057737637 86 | - rotation: 87 | - - 0.8645033132355345 88 | - 0.20234443478839761 89 | - 0.46009863194205086 90 | - - 0.36807507939857576 91 | - -0.8782206865149992 92 | - -0.3053672570903035 93 | - - 0.34227877136993906 94 | - 0.43334184599146114 95 | - -0.8337026371448022 96 | translation: 97 | - 0.09493971019044817 98 | - 0.09204930428524295 99 | - 0.5413155120352341 100 | - rotation: 101 | - - 0.9989426296042763 102 | - 0.04593796816442011 103 | - 0.0018236886298622842 104 | - - 0.038494859720499264 105 | - -0.8140844338384059 106 | - -0.5794693092451931 107 | - - -0.025135006154625217 108 | - 0.5789267981903471 109 | - -0.8149920206987796 110 | translation: 111 | - 0.01684781544033249 112 | - 0.15659044682268103 113 | - 0.769246927039229 114 | - rotation: 115 | - - 0.8481645974348889 116 | - 0.06368249330893748 117 | - -0.5258910112409889 118 | - - -0.19769172904708393 119 | - -0.882974255102796 120 | - -0.425763367484847 121 | - - -0.49146189671681273 122 | - 0.4650817184878585 123 | - -0.7363180014123607 124 | translation: 125 | - 0.08625388368447144 126 | - 0.11951642887768739 127 | - 0.5307104797612258 128 | -------------------------------------------------------------------------------- /handeye_realsense/resource/marker_pose_realsense_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/resource/marker_pose_realsense_1.jpg -------------------------------------------------------------------------------- /handeye_realsense/resource/marker_pose_realsense_2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/resource/marker_pose_realsense_2.jpg -------------------------------------------------------------------------------- /handeye_realsense/resource/marker_pose_realsense_3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/resource/marker_pose_realsense_3.jpg -------------------------------------------------------------------------------- /handeye_realsense/resource/marker_pose_realsense_4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/resource/marker_pose_realsense_4.jpg -------------------------------------------------------------------------------- /handeye_realsense/resource/marker_pose_realsense_5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/resource/marker_pose_realsense_5.jpg -------------------------------------------------------------------------------- /handeye_realsense/resource/marker_pose_realsense_6.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/resource/marker_pose_realsense_6.jpg -------------------------------------------------------------------------------- /handeye_realsense/resource/marker_pose_realsense_7.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/resource/marker_pose_realsense_7.jpg -------------------------------------------------------------------------------- /handeye_realsense/resource/marker_pose_realsense_8.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/resource/marker_pose_realsense_8.jpg -------------------------------------------------------------------------------- /handeye_realsense/resource/marker_pose_realsense_9.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shengyangzhuang/handeye_calibration_ros2/27fd9015e59d0c269bc4f939b41b66cad6126429/handeye_realsense/resource/marker_pose_realsense_9.jpg -------------------------------------------------------------------------------- /handeye_realsense/resource/robot_data_realsense.yaml: -------------------------------------------------------------------------------- 1 | poses: 2 | - rotation: 3 | - - -0.8810676155002689 4 | - -0.41334424997957847 5 | - -0.22992691866219345 6 | - - -0.22953868505960587 7 | - 0.7986905677292491 8 | - -0.5562421856362905 9 | - - 0.41355997023137797 10 | - -0.4373098535696975 11 | - -0.7985788896490268 12 | translation: 13 | - 0.5859934878169077 14 | - 0.2793671357488082 15 | - 0.29634870263575097 16 | - rotation: 17 | - - -0.8480481430748489 18 | - -2.321797989498787e-06 19 | - -0.5299191891429393 20 | - - -8.021142316208716e-07 21 | - 0.9999999999948803 22 | - -3.097767619024867e-06 23 | - - 0.5299191891474189 24 | - -2.20200035377089e-06 25 | - -0.8480481430723695 26 | translation: 27 | - 0.6212479359588889 28 | - 1.2997815558296094e-06 29 | - 0.27522678022098224 30 | - rotation: 31 | - - -0.8298331752151482 32 | - 0.4372765910258416 33 | - -0.34664980059588324 34 | - - 0.311256143867057 35 | - 0.8783280008990663 36 | - 0.3628491914579133 37 | - - 0.4631376838666411 38 | - 0.19320741646601033 39 | - -0.8649707393924806 40 | translation: 41 | - 0.5567389560099935 42 | - -0.26944026079888234 43 | - 0.304846136497429 44 | - rotation: 45 | - - -0.9927714418463924 46 | - 0.11727600006016708 47 | - -0.025518700282808067 48 | - - 0.06450866959357487 49 | - 0.7006927567457086 50 | - 0.7105408448438183 51 | - - 0.1012101566123832 52 | - 0.7037584816213499 53 | - -0.7031930771448978 54 | translation: 55 | - 0.46964561740475236 56 | - -0.30724932212892236 57 | - 0.24601078482523694 58 | - rotation: 59 | - - -0.9998476951563902 60 | - -8.188436744584154e-15 61 | - -0.017452406437360568 62 | - - -9.46605816578933e-15 63 | - 1.0000000000000002 64 | - 7.312342279427453e-14 65 | - - 0.017452406437360568 66 | - 7.327749123727066e-14 67 | - -0.9998476951563902 68 | translation: 69 | - 0.5058646972630286 70 | - -3.6409508366543386e-14 71 | - 0.5567124834785221 72 | - rotation: 73 | - - -0.9777507019242926 74 | - 0.1486109874680766 75 | - -0.14804843562265935 76 | - - 0.20879442931507974 77 | - 0.7574580484845903 78 | - -0.6185969536563419 79 | - - 0.020210175000323127 80 | - -0.6357452942725408 81 | - -0.7716342849023604 82 | translation: 83 | - 0.5164527363239697 84 | - 0.24358141047770757 85 | - 0.33051000024963345 86 | - rotation: 87 | - - -0.9064820318087736 88 | - -0.16499364289895252 89 | - 0.3886739299345551 90 | - - -0.31745775611425553 91 | - 0.8732388015604002 92 | - -0.36969523736756243 93 | - - -0.2784077927981546 94 | - -0.4585096435761079 95 | - -0.8439537947405481 96 | translation: 97 | - 0.2892399300000551 98 | - 0.29031760363982423 99 | - 0.45701188350792293 100 | - rotation: 101 | - - -0.8191520447880419 102 | - 2.4485691984245875e-10 103 | - 0.5735764356383286 104 | - - 4.53972247873447e-11 105 | - 1.0 106 | - -3.620610566037099e-10 107 | - - -0.5735764356383286 108 | - -2.70544276473651e-10 109 | - -0.8191520447880419 110 | translation: 111 | - 0.11226301566149766 112 | - 1.145789754439343e-10 113 | - 0.6939018861998697 114 | - rotation: 115 | - - -0.8787077643541353 116 | - -0.03371410881927017 117 | - 0.4761680624845403 118 | - - 0.22483459247275556 119 | - 0.8507027620803636 120 | - 0.47513599802209466 121 | - - -0.4210962727113227 122 | - 0.5245647428634185 123 | - -0.7399390242805611 124 | translation: 125 | - 0.2774359029351345 126 | - -0.2184417434665171 127 | - 0.45897905782658655 128 | -------------------------------------------------------------------------------- /handeye_realsense/setup.cfg: -------------------------------------------------------------------------------- 1 | [develop] 2 | script_dir=$base/lib/handeye_realsense 3 | [install] 4 | install_scripts=$base/lib/handeye_realsense 5 | -------------------------------------------------------------------------------- /handeye_realsense/setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import find_packages, setup 2 | from glob import glob 3 | import os 4 | 5 | package_name = 'handeye_realsense' 6 | 7 | setup( 8 | name=package_name, 9 | version='0.0.0', 10 | packages=find_packages(exclude=['test']), 11 | data_files=[ 12 | ('share/ament_index/resource_index/packages', 13 | ['resource/' + package_name]), 14 | ('share/' + package_name, ['package.xml']), 15 | (os.path.join('share', package_name, 'launch'), glob(os.path.join('launch', '*launch.[pxy][yma]*'))) 16 | ], 17 | install_requires=['setuptools'], 18 | zip_safe=True, 19 | maintainer='szhuang', 20 | maintainer_email='heyeason@outlook.com', 21 | description='ROS2 package for hand-eye calibration', 22 | license='Apache-2.0', 23 | tests_require=['pytest'], 24 | entry_points={ 25 | 'console_scripts': [ 26 | 'robot = handeye_realsense.robot_state_estimation:main', 27 | 'handeye = handeye_realsense.handeye_estimation:main', 28 | 'aruco = handeye_realsense.aruco_estimation:main', 29 | 'eye2hand = handeye_realsense.publish_eye2hand:main', 30 | ], 31 | }, 32 | ) 33 | -------------------------------------------------------------------------------- /handeye_realsense/test/test_copyright.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 Open Source Robotics Foundation, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from ament_copyright.main import main 16 | import pytest 17 | 18 | 19 | # Remove the `skip` decorator once the source file(s) have a copyright header 20 | @pytest.mark.skip(reason='No copyright header has been placed in the generated source file.') 21 | @pytest.mark.copyright 22 | @pytest.mark.linter 23 | def test_copyright(): 24 | rc = main(argv=['.', 'test']) 25 | assert rc == 0, 'Found errors' 26 | -------------------------------------------------------------------------------- /handeye_realsense/test/test_flake8.py: -------------------------------------------------------------------------------- 1 | # Copyright 2017 Open Source Robotics Foundation, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from ament_flake8.main import main_with_errors 16 | import pytest 17 | 18 | 19 | @pytest.mark.flake8 20 | @pytest.mark.linter 21 | def test_flake8(): 22 | rc, errors = main_with_errors(argv=[]) 23 | assert rc == 0, \ 24 | 'Found %d code style errors / warnings:\n' % len(errors) + \ 25 | '\n'.join(errors) 26 | -------------------------------------------------------------------------------- /handeye_realsense/test/test_pep257.py: -------------------------------------------------------------------------------- 1 | # Copyright 2015 Open Source Robotics Foundation, Inc. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | from ament_pep257.main import main 16 | import pytest 17 | 18 | 19 | @pytest.mark.linter 20 | @pytest.mark.pep257 21 | def test_pep257(): 22 | rc = main(argv=['.', 'test']) 23 | assert rc == 0, 'Found code style errors / warnings' 24 | --------------------------------------------------------------------------------