├── ros2_trt_pose_hand
├── resource
│ └── ros2_trt_pose_hand
├── ros2_trt_pose_hand
│ ├── __init__.py
│ ├── input_dir
│ │ ├── svmmodel.sav
│ │ ├── gesture.json
│ │ └── hand_pose.json
│ ├── live_hand_pose.py
│ ├── preprocessdata.py
│ ├── utils.py
│ └── helper.py
├── setup.cfg
├── package.xml
├── test
│ ├── test_flake8.py
│ ├── test_copyright.py
│ └── test_pep257.py
├── setup.py
└── launch
│ ├── hand-pose-estimation.launch.py
│ └── hand_pose.rviz
├── hand_pose_msgs
├── msg
│ ├── FingerPoints.msg
│ └── HandPoseDetection.msg
├── package.xml
└── CMakeLists.txt
├── LICENSE.md
├── README.md
└── giistr-cla.md
/ros2_trt_pose_hand/resource/ros2_trt_pose_hand:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/ros2_trt_pose_hand/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/hand_pose_msgs/msg/FingerPoints.msg:
--------------------------------------------------------------------------------
1 | float32 x
2 | float32 y
3 | float32 confidence
4 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/setup.cfg:
--------------------------------------------------------------------------------
1 | [develop]
2 | script-dir=$base/lib/ros2_trt_pose_hand
3 | [install]
4 | install-scripts=$base/lib/ros2_trt_pose_hand
5 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/ros2_trt_pose_hand/input_dir/svmmodel.sav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/NVIDIA-AI-IOT/ros2_trt_pose_hand/HEAD/ros2_trt_pose_hand/ros2_trt_pose_hand/input_dir/svmmodel.sav
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/ros2_trt_pose_hand/input_dir/gesture.json:
--------------------------------------------------------------------------------
1 | {"paint": ["clear","draw", "click", "line", "erase", "no hand", "no hand"], "mouse": ["drag","pan", "click", "zoom", "scroll", "no hand", "no hand"], "classes": ["fist","pan", "stop", "peace", "ok", "no hand", "no hand"]}
2 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/ros2_trt_pose_hand/input_dir/hand_pose.json:
--------------------------------------------------------------------------------
1 | {"supercategory": "hand", "id": 1, "name": "hand", "keypoints": ["palm","thumb_1", "thumb_2", "thumb_3", "thumb_4", "index_finger_1", "index_finger_2", "index_finger_3", "index_finger_4", "middle_finger_1", "middle_finger_2", "middle_finger_3", "middle_finger_4", "ring_finger_1", "ring_finger_2", "ring_finger_3", "ring_finger_4", "baby_finger_1", "baby_finger_2", "baby_finger_3", "baby_finger_4"], "skeleton": [[1, 5], [1, 9], [1, 13], [1, 17], [1, 21], [2, 3], [3, 4], [4, 5], [6, 7], [7, 8], [8, 9], [10, 11], [11, 12], [12, 13], [14, 15], [15, 16], [16, 17], [18, 19], [19, 20], [20, 21]]}
2 |
--------------------------------------------------------------------------------
/hand_pose_msgs/msg/HandPoseDetection.msg:
--------------------------------------------------------------------------------
1 | uint32 hand_id
2 | FingerPoints palm
3 | FingerPoints thumb_1
4 | FingerPoints thumb_2
5 | FingerPoints thumb_3
6 | FingerPoints thumb_4
7 | FingerPoints index_finger_1
8 | FingerPoints index_finger_2
9 | FingerPoints index_finger_3
10 | FingerPoints index_finger_4
11 | FingerPoints middle_finger_1
12 | FingerPoints middle_finger_2
13 | FingerPoints middle_finger_3
14 | FingerPoints middle_finger_4
15 | FingerPoints ring_finger_1
16 | FingerPoints ring_finger_2
17 | FingerPoints ring_finger_3
18 | FingerPoints ring_finger_4
19 | FingerPoints baby_finger_1
20 | FingerPoints baby_finger_2
21 | FingerPoints baby_finger_3
22 | FingerPoints baby_finger_4
23 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | ros2_trt_pose_hand
5 | 0.0.0
6 | TODO: Package description
7 | ak-nv
8 | TODO: License declaration
9 | hand_pose_msgs
10 | image_tools
11 |
12 | ament_copyright
13 | ament_flake8
14 | ament_pep257
15 | python3-pytest
16 |
17 |
18 | ament_python
19 |
20 |
21 |
--------------------------------------------------------------------------------
/hand_pose_msgs/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | hand_pose_msgs
5 | 0.0.0
6 | TODO: Package description
7 | ak-nv
8 | TODO: License declaration
9 |
10 | ament_cmake
11 | rosidl_default_generators
12 |
13 | rosidl_default_runtime
14 |
15 | rosidl_interface_packages
16 |
17 | ament_lint_auto
18 | ament_lint_common
19 |
20 |
21 | ament_cmake
22 |
23 |
24 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/test/test_flake8.py:
--------------------------------------------------------------------------------
1 | # Copyright 2017 Open Source Robotics Foundation, Inc.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | from ament_flake8.main import main
16 | import pytest
17 |
18 |
19 | @pytest.mark.flake8
20 | @pytest.mark.linter
21 | def test_flake8():
22 | rc = main(argv=[])
23 | assert rc == 0, 'Found errors'
24 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/test/test_copyright.py:
--------------------------------------------------------------------------------
1 | # Copyright 2015 Open Source Robotics Foundation, Inc.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | from ament_copyright.main import main
16 | import pytest
17 |
18 |
19 | @pytest.mark.copyright
20 | @pytest.mark.linter
21 | def test_copyright():
22 | rc = main(argv=['.', 'test'])
23 | assert rc == 0, 'Found errors'
24 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/test/test_pep257.py:
--------------------------------------------------------------------------------
1 | # Copyright 2015 Open Source Robotics Foundation, Inc.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | from ament_pep257.main import main
16 | import pytest
17 |
18 |
19 | @pytest.mark.linter
20 | @pytest.mark.pep257
21 | def test_pep257():
22 | rc = main(argv=['.', 'test'])
23 | assert rc == 0, 'Found code style errors / warnings'
24 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 | from glob import glob
3 | import os
4 |
5 | package_name = 'ros2_trt_pose_hand'
6 |
7 | setup(
8 | name=package_name,
9 | version='0.0.0',
10 | packages=[package_name],
11 | data_files=[
12 | ('share/ament_index/resource_index/packages',
13 | ['resource/' + package_name]),
14 | ('share/' + package_name, ['package.xml']),
15 | (os.path.join('share', package_name), glob('launch/*.launch.py')),
16 | ],
17 | install_requires=['setuptools'],
18 | zip_safe=True,
19 | maintainer='ak-nv',
20 | maintainer_email='ameykulkarni@nvidia.com',
21 | description='TODO: Package description',
22 | license='TODO: License declaration',
23 | tests_require=['pytest'],
24 | entry_points={
25 | 'console_scripts': [
26 | 'hand-pose-estimation = ros2_trt_pose_hand.live_hand_pose:main',
27 | ],
28 | },
29 | )
30 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
2 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
3 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
4 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
5 |
--------------------------------------------------------------------------------
/hand_pose_msgs/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.5)
2 | project(hand_pose_msgs)
3 |
4 | # Default to C99
5 | if(NOT CMAKE_C_STANDARD)
6 | set(CMAKE_C_STANDARD 99)
7 | endif()
8 |
9 | # Default to C++14
10 | if(NOT CMAKE_CXX_STANDARD)
11 | set(CMAKE_CXX_STANDARD 14)
12 | endif()
13 |
14 | if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang")
15 | add_compile_options(-Wall -Wextra -Wpedantic)
16 | endif()
17 |
18 | # find dependencies
19 | find_package(ament_cmake REQUIRED)
20 | find_package(rosidl_default_generators REQUIRED)
21 |
22 | rosidl_generate_interfaces(${PROJECT_NAME}
23 | "msg/FingerPoints.msg"
24 | "msg/HandPoseDetection.msg"
25 | )
26 | # uncomment the following section in order to fill in
27 | # further dependencies manually.
28 | # find_package( REQUIRED)
29 |
30 | if(BUILD_TESTING)
31 | find_package(ament_lint_auto REQUIRED)
32 | # the following line skips the linter which checks for copyrights
33 | # uncomment the line when a copyright and license is not present in all source files
34 | #set(ament_cmake_copyright_FOUND TRUE)
35 | # the following line skips cpplint (only works in a git repo)
36 | # uncomment the line when this package is not in a git repo
37 | #set(ament_cmake_cpplint_FOUND TRUE)
38 | ament_lint_auto_find_test_dependencies()
39 | endif()
40 |
41 | ament_package()
42 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/ros2_trt_pose_hand/live_hand_pose.py:
--------------------------------------------------------------------------------
1 | # ---------------------------------------------------------------------------------------
2 | # Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
3 | # Permission is hereby granted, free of charge, to any person obtaining
4 | # a copy of this software and associated documentation files (the "Software"),
5 | # to deal in the Software without restriction, including without limitation
6 | # the rights to use, copy, modify, merge, publish, distribute, sublicense,
7 | # and/or sell copies of the Software, and to permit persons to whom the
8 | # Software is furnished to do so, subject to the following conditions:
9 | # The above copyright notice and this permission notice shall be included
10 | # in all copies or substantial portions of the Software.
11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
13 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
14 | # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
15 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
16 | # ---------------------------------------------------------------------------------------
17 | # ROS2 related
18 | import rclpy
19 | from ros2_trt_pose_hand.helper import TRTHandPose
20 |
21 | def main(args=None):
22 | rclpy.init(args=args)
23 | trt_pose_hand = TRTHandPose()
24 | trt_pose_hand.start()
25 | rclpy.spin(trt_pose_hand)
26 |
27 | trt_pose_hand.destroy_node()
28 | rclpy.shutdown()
29 |
30 | if __name__ == '__main__':
31 | main()
32 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/launch/hand-pose-estimation.launch.py:
--------------------------------------------------------------------------------
1 | # ---------------------------------------------------------------------------------------
2 | # Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
3 | # Permission is hereby granted, free of charge, to any person obtaining
4 | # a copy of this software and associated documentation files (the "Software"),
5 | # to deal in the Software without restriction, including without limitation
6 | # the rights to use, copy, modify, merge, publish, distribute, sublicense,
7 | # and/or sell copies of the Software, and to permit persons to whom the
8 | # Software is furnished to do so, subject to the following conditions:
9 | # The above copyright notice and this permission notice shall be included
10 | # in all copies or substantial portions of the Software.
11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
13 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
14 | # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
15 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
16 | # ---------------------------------------------------------------------------------------
17 |
18 | import launch
19 | import launch_ros
20 | from launch import LaunchDescription
21 | from launch_ros.actions import Node
22 | from launch.substitutions import LaunchConfiguration
23 | import os
24 | def generate_launch_description():
25 | pkg_share = launch_ros.substitutions.FindPackageShare(package='ros2_trt_pose_hand').find('ros2_trt_pose_hand')
26 | print(pkg_share)
27 | default_rviz_config_path = 'src/ros2_gesture_classification/ros2_trt_pose_hand/ros2_trt_pose_hand/launch/hand_pose.rviz'
28 |
29 | trt_pose_hand_node = Node(
30 | package="ros2_trt_pose_hand",
31 | node_executable="hand-pose-estimation",
32 | node_name="hand_pose_estimation",
33 | output="screen",
34 | parameters = [{
35 | 'base_dir':'/home/ak-nv/ros2_ws/src/ros2_gesture_classification/ros2_trt_pose_hand/ros2_trt_pose_hand/input_dir',
36 | 'point_range' : 10,
37 | 'show_image' : False,
38 | 'show_gesture' : True
39 | }],
40 | )
41 | cam2image_node = Node(
42 | package="image_tools",
43 | node_executable="cam2image",
44 | node_name="cam",
45 | )
46 |
47 | rviz_node = Node(
48 | package="rviz2",
49 | node_executable="rviz2",
50 | node_name="rviz2",
51 | arguments=['-d', LaunchConfiguration('rvizconfig')],
52 | )
53 |
54 | return LaunchDescription([
55 | launch.actions.DeclareLaunchArgument(name='rvizconfig', default_value=default_rviz_config_path),
56 | trt_pose_hand_node,
57 | cam2image_node,
58 | rviz_node
59 | ])
60 |
61 |
62 |
63 |
64 |
65 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ros2_trt_pose_hand
2 |
3 | In this repository, we build ros2 wrapper for [trt_pose_hand](https://github.com/NVIDIA-AI-IOT/trt_pose_hand) for Real-time hand pose estimation and gesture classification using TensorRT on NVIDIA Jetson Platform.
4 |
5 |
6 | ## Package outputs:
7 | - Hand Pose message with 21 key-points
8 | - Hand Pose detection image message
9 | - `std_msgs` for gesture classification with 6 classes [fist, pan, stop, fine, peace, no hand]
10 | - Visualization markers
11 | - Launch file for RViz
12 |
13 | ## Requirements:
14 | - ROS 2 Eloquent:
15 | - [Install Instructions](https://index.ros.org/doc/ros2/Installation/Eloquent/Linux-Development-Setup/)
16 | - trt_pose
17 | - [Dependencies for trt_pose](https://github.com/NVIDIA-AI-IOT/trt_pose#step-1---install-dependencies)
18 | - [Install trt_pose](https://github.com/NVIDIA-AI-IOT/trt_pose#step-2---install-trt_pose)
19 | - Gesture Classification
20 | - scikit-learn ```$ pip3 install -U scikit-learn```
21 |
22 | ## Build:
23 | - Clone repository under ros2 workspace
24 | ```
25 | $ cd ros2_ws/src/
26 | $ git clone https://github.com/NVIDIA-AI-IOT/ros2_trt_pose_hand.git
27 | ```
28 | - Install requirements using ```rosdep```
29 | ```
30 | $ rosdep install --from-paths src --ignore-src --rosdistro eloquent -y
31 | ```
32 | - Build and Install ros2_trt_pose_hand package
33 | ```
34 | $ colcon build
35 | $ source install/local_setup.sh
36 | ```
37 |
38 | ## Run
39 | - Change Power Mode for Jetson
40 | ``` sudo nvpmodel -m2 ``` (for Jetson Xavier NX)
41 | ``` sudo nvpmodel -m0 ``` (for Jetson Xavier and Jetson Nano)
42 | - Keep ```trt_pose_hand``` related model files in ```base_dir```, it should include:
43 | - Model files for resnet18 [download link](https://drive.google.com/file/d/1NCVo0FiooWccDzY7hCc5MAKaoUpts3mo/view?usp=sharing)
44 | - Hand Pose points [json file]()
45 | - Gesture Classification classes [json file]()
46 | - Gesture Classification model [included in git]()
47 | - Method 1:
48 | - Input Images are captured using ```image_tools``` package
49 | ``` ros2 run image_tools cam2image ```
50 | - Run ```ros2_trt_pose``` node
51 | ```
52 | $ ros2 run ros2_trt_pose_hand hand-pose-estimation --ros-args -p base_dir:=''
53 | ```
54 | - Visualize markers
55 | ```
56 | $ ros2 run rviz2 rviz2 launch/hand_pose.rviz
57 | ```
58 | - Method 2: Use Launch file to each node:
59 |
60 | - Run using Launch file
61 | ```
62 | $ ros2 launch ros2_trt_pose_hand hand-pose-estimation.launch.py
63 | ```
64 | *Note: Update rviz file location in launch file in* ```launch/hand_pose_estimation.launch.py```
65 |
66 |
67 | - For following use separate window for each:
68 | - See Pose message
69 | ```
70 | $ source install/local_setup.sh
71 | $ ros2 run rqt_topic rqt_topic
72 | ```
73 |
74 | ## Other related ROS 2 projects
75 | - [ros2_jetson webpage](https://nvidia-ai-iot.github.io/ros2_jetson/)
76 | - [ros2_trt_pose](https://github.com/NVIDIA-AI-IOT/ros2_trt_pose)
77 | - [ros2_torch_trt](https://github.com/NVIDIA-AI-IOT/ros2_torch_trt) : ROS2 Real Time Classification and Detection
78 | - [ros2_deepstream](https://github.com/NVIDIA-AI-IOT/ros2_deepstream) : ROS2 nodes for DeepStream applications
79 | - [ros2_jetson_stats](https://github.com/NVIDIA-AI-IOT/ros2_jetson_stats) : ROS 2 package for monitoring and controlling NVIDIA Jetson Platform resources
80 |
81 |
--------------------------------------------------------------------------------
/giistr-cla.md:
--------------------------------------------------------------------------------
1 | ## Individual Contributor License Agreement (CLA)
2 |
3 | **Thank you for submitting your contributions to this project.**
4 |
5 | By signing this CLA, you agree that the following terms apply to all of your past, present and future contributions
6 | to the project.
7 |
8 | ### License.
9 |
10 | You hereby represent that all present, past and future contributions are governed by the
11 | [MIT License](https://opensource.org/licenses/MIT)
12 | copyright statement.
13 |
14 | This entails that to the extent possible under law, you transfer all copyright and related or neighboring rights
15 | of the code or documents you contribute to the project itself or its maintainers.
16 | Furthermore you also represent that you have the authority to perform the above waiver
17 | with respect to the entirety of you contributions.
18 |
19 | ### Moral Rights.
20 |
21 | To the fullest extent permitted under applicable law, you hereby waive, and agree not to
22 | assert, all of your “moral rights” in or relating to your contributions for the benefit of the project.
23 |
24 | ### Third Party Content.
25 |
26 | If your Contribution includes or is based on any source code, object code, bug fixes, configuration changes, tools,
27 | specifications, documentation, data, materials, feedback, information or other works of authorship that were not
28 | authored by you (“Third Party Content”) or if you are aware of any third party intellectual property or proprietary
29 | rights associated with your Contribution (“Third Party Rights”),
30 | then you agree to include with the submission of your Contribution full details respecting such Third Party
31 | Content and Third Party Rights, including, without limitation, identification of which aspects of your
32 | Contribution contain Third Party Content or are associated with Third Party Rights, the owner/author of the
33 | Third Party Content and Third Party Rights, where you obtained the Third Party Content, and any applicable
34 | third party license terms or restrictions respecting the Third Party Content and Third Party Rights. For greater
35 | certainty, the foregoing obligations respecting the identification of Third Party Content and Third Party Rights
36 | do not apply to any portion of a Project that is incorporated into your Contribution to that same Project.
37 |
38 | ### Representations.
39 |
40 | You represent that, other than the Third Party Content and Third Party Rights identified by
41 | you in accordance with this Agreement, you are the sole author of your Contributions and are legally entitled
42 | to grant the foregoing licenses and waivers in respect of your Contributions. If your Contributions were
43 | created in the course of your employment with your past or present employer(s), you represent that such
44 | employer(s) has authorized you to make your Contributions on behalf of such employer(s) or such employer
45 | (s) has waived all of their right, title or interest in or to your Contributions.
46 |
47 | ### Disclaimer.
48 |
49 | To the fullest extent permitted under applicable law, your Contributions are provided on an "as is"
50 | basis, without any warranties or conditions, express or implied, including, without limitation, any implied
51 | warranties or conditions of non-infringement, merchantability or fitness for a particular purpose. You are not
52 | required to provide support for your Contributions, except to the extent you desire to provide support.
53 |
54 | ### No Obligation.
55 |
56 | You acknowledge that the maintainers of this project are under no obligation to use or incorporate your contributions
57 | into the project. The decision to use or incorporate your contributions into the project will be made at the
58 | sole discretion of the maintainers or their authorized delegates.
59 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/ros2_trt_pose_hand/preprocessdata.py:
--------------------------------------------------------------------------------
1 | # ---------------------------------------------------------------------------------------
2 | # Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
3 | # Permission is hereby granted, free of charge, to any person obtaining
4 | # a copy of this software and associated documentation files (the "Software"),
5 | # to deal in the Software without restriction, including without limitation
6 | # the rights to use, copy, modify, merge, publish, distribute, sublicense,
7 | # and/or sell copies of the Software, and to permit persons to whom the
8 | # Software is furnished to do so, subject to the following conditions:
9 | # The above copyright notice and this permission notice shall be included
10 | # in all copies or substantial portions of the Software.
11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
13 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
14 | # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
15 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
16 | # ---------------------------------------------------------------------------------------
17 | import math
18 | import pickle
19 | import cv2
20 |
21 |
22 | class preprocessdata:
23 |
24 | def __init__(self, topology, num_parts):
25 | self.joints = []
26 | self.dist_bn_joints = []
27 | self.topology = topology
28 | self.num_parts = num_parts
29 | self.text = "no hand"
30 | self.num_frames = 4
31 | self.prev_queue = [self.num_frames] * self.num_frames
32 |
33 | def svm_accuracy(self, test_predicted, labels_test):
34 | """"
35 | This method calculates the accuracy of the model
36 | Input: test_predicted - predicted test classes
37 | labels_test
38 | Output: accuracy - of the model
39 | """
40 | predicted = []
41 | for i in range(len(labels_test)):
42 | if labels_test[i] == test_predicted[i]:
43 | predicted.append(0)
44 | else:
45 | predicted.append(1)
46 | accuracy = 1 - sum(predicted) / len(labels_test)
47 | return accuracy
48 |
49 | def trainsvm(self, clf, train_data, test_data, labels_train, labels_test):
50 | """
51 | This method trains the different gestures
52 | Input: clf - Sk-learn model pipeline to train, You can choose an SVM, linear regression, etc
53 | train_data - preprocessed training image data -in this case the distance between the joints
54 | test_data - preprocessed testing image data -in this case the distance between the joints
55 | labels_train - labels for training images
56 | labels_test - labels for testing images
57 | Output: trained model, predicted_test_classes
58 | """
59 | clf.fit(train_data, labels_train)
60 | predicted_test = clf.predict(test_data)
61 | return clf, predicted_test
62 | # def loadsvmweights():
63 |
64 | def joints_inference(self, image, counts, objects, peaks):
65 | """
66 | This method returns predicted joints from an image/frame
67 | Input: image, counts, objects, peaks
68 | Output: predicted joints
69 | """
70 | joints_t = []
71 | height = image.shape[0]
72 | width = image.shape[1]
73 | K = self.topology.shape[0]
74 | count = int(counts[0])
75 | for i in range(count):
76 | obj = objects[0][i]
77 | C = obj.shape[0]
78 | for j in range(C):
79 | k = int(obj[j])
80 | picked_peaks = peaks[0][j][k]
81 | joints_t.append([round(float(picked_peaks[1]) * width), round(float(picked_peaks[0]) * height)])
82 | joints_pt = joints_t[:self.num_parts]
83 | rest_of_joints_t = joints_t[self.num_parts:]
84 |
85 | # when it does not predict a particular joint in the same association it will try to find it in a different association
86 | for i in range(len(rest_of_joints_t)):
87 | l = i % self.num_parts
88 | if joints_pt[l] == [0, 0]:
89 | joints_pt[l] = rest_of_joints_t[i]
90 |
91 | # if nothing is predicted
92 | if count == 0:
93 | joints_pt = [[0, 0]] * self.num_parts
94 | return joints_pt
95 |
96 | def find_distance(self, joints):
97 | """
98 | This method finds the distance between each joints
99 | Input: a list that contains the [x,y] positions of the 21 joints
100 | Output: a list that contains the distance between the joints
101 | """
102 | joints_features = []
103 | for i in joints:
104 | for j in joints:
105 | dist_between_i_j = math.sqrt((i[0] - j[0]) ** 2 + (i[1] - j[1]) ** 2)
106 | joints_features.append(dist_between_i_j)
107 | return joints_features
108 |
109 | def print_label(self, gesture_joints, gesture_type):
110 | """
111 | This method prints the gesture class detected.
112 | Example. Incase of the cursor control application it shows if your gesture is a click or other type of gesture
113 |
114 | """
115 | if self.prev_queue == [1] * self.num_frames:
116 | self.text = gesture_type[0]
117 | elif self.prev_queue == [2] * self.num_frames:
118 | self.text = gesture_type[1]
119 | elif self.prev_queue == [3] * self.num_frames:
120 | self.text = gesture_type[2]
121 | elif self.prev_queue == [4] * self.num_frames:
122 | self.text = gesture_type[3]
123 | elif self.prev_queue == [5] * self.num_frames:
124 | self.text = gesture_type[4]
125 | elif self.prev_queue == [6] * self.num_frames:
126 | self.text = gesture_type[5]
127 | elif self.prev_queue == [7] * self.num_frames:
128 | self.text = gesture_type[6]
129 | return self.text
130 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/launch/hand_pose.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz_common/Displays
3 | Help Height: 78
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /Global Options1
8 | - /Status1
9 | Splitter Ratio: 0.5
10 | Tree Height: 596
11 | - Class: rviz_common/Selection
12 | Name: Selection
13 | - Class: rviz_common/Tool Properties
14 | Expanded:
15 | - /2D Goal Pose1
16 | - /Publish Point1
17 | Name: Tool Properties
18 | Splitter Ratio: 0.5886790156364441
19 | - Class: rviz_common/Views
20 | Expanded:
21 | - /Current View1
22 | Name: Views
23 | Splitter Ratio: 0.5
24 | Visualization Manager:
25 | Class: ""
26 | Displays:
27 | - Alpha: 0.5
28 | Cell Size: 1
29 | Class: rviz_default_plugins/Grid
30 | Color: 160; 160; 164
31 | Enabled: true
32 | Line Style:
33 | Line Width: 0.029999999329447746
34 | Value: Lines
35 | Name: Grid
36 | Normal Cell Count: 0
37 | Offset:
38 | X: 0
39 | Y: 0
40 | Z: 0
41 | Plane: XY
42 | Plane Cell Count: 10
43 | Reference Frame:
44 | Value: true
45 | - Class: rviz_default_plugins/Image
46 | Enabled: true
47 | Max Value: 1
48 | Median window: 5
49 | Min Value: 0
50 | Name: Image
51 | Normalize Range: true
52 | Topic:
53 | Depth: 5
54 | Durability Policy: Volatile
55 | History Policy: Keep Last
56 | Reliability Policy: Reliable
57 | Value: /detections_image
58 | Value: true
59 | - Class: rviz_default_plugins/Marker
60 | Enabled: true
61 | Name: Marker
62 | Namespaces:
63 | joints: true
64 | Topic:
65 | Depth: 5
66 | Durability Policy: Volatile
67 | History Policy: Keep Last
68 | Reliability Policy: Reliable
69 | Value: /hand_joints
70 | Value: true
71 | - Class: rviz_default_plugins/Marker
72 | Enabled: true
73 | Name: Marker
74 | Namespaces:
75 | joint_line: true
76 | Topic:
77 | Depth: 5
78 | Durability Policy: Volatile
79 | History Policy: Keep Last
80 | Reliability Policy: Reliable
81 | Value: /hand_skeleton
82 | Value: true
83 | Enabled: true
84 | Global Options:
85 | Background Color: 48; 48; 48
86 | Fixed Frame: map
87 | Frame Rate: 30
88 | Name: root
89 | Tools:
90 | - Class: rviz_default_plugins/Interact
91 | Hide Inactive Objects: true
92 | - Class: rviz_default_plugins/MoveCamera
93 | - Class: rviz_default_plugins/Select
94 | - Class: rviz_default_plugins/FocusCamera
95 | - Class: rviz_default_plugins/Measure
96 | Line color: 128; 128; 0
97 | - Class: rviz_default_plugins/SetInitialPose
98 | Topic:
99 | Depth: 5
100 | Durability Policy: Volatile
101 | History Policy: Keep Last
102 | Reliability Policy: Reliable
103 | Value: /initialpose
104 | - Class: rviz_default_plugins/SetGoal
105 | Topic:
106 | Depth: 5
107 | Durability Policy: Volatile
108 | History Policy: Keep Last
109 | Reliability Policy: Reliable
110 | Value: /goal_pose
111 | - Class: rviz_default_plugins/PublishPoint
112 | Single click: true
113 | Topic:
114 | Depth: 5
115 | Durability Policy: Volatile
116 | History Policy: Keep Last
117 | Reliability Policy: Reliable
118 | Value: /clicked_point
119 | Transformation:
120 | Current:
121 | Class: rviz_default_plugins/TF
122 | Value: true
123 | Views:
124 | Current:
125 | Class: rviz_default_plugins/Orbit
126 | Distance: 48.87112045288086
127 | Enable Stereo Rendering:
128 | Stereo Eye Separation: 0.05999999865889549
129 | Stereo Focal Distance: 1
130 | Swap Stereo Eyes: false
131 | Value: false
132 | Focal Point:
133 | X: 0
134 | Y: 0
135 | Z: 0
136 | Focal Shape Fixed Size: true
137 | Focal Shape Size: 0.05000000074505806
138 | Invert Z Axis: false
139 | Name: Current View
140 | Near Clip Distance: 0.009999999776482582
141 | Pitch: 0.785398006439209
142 | Target Frame:
143 | Value: Orbit (rviz)
144 | Yaw: 0.785398006439209
145 | Saved: ~
146 | Window Geometry:
147 | Displays:
148 | collapsed: false
149 | Height: 1056
150 | Hide Left Dock: false
151 | Hide Right Dock: false
152 | Image:
153 | collapsed: false
154 | QMainWindow State: 000000ff00000000fd000000040000000000000156000003dcfc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000027000002df000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d006100670065010000030c000000f70000002800ffffff000000010000010f000003dcfc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a005600690065007700730100000027000003dc000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004420000003efc0100000002fb0000000800540069006d00650100000000000004420000000000000000fb0000000800540069006d00650100000000000004500000000000000000000004ce000003dc00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
155 | Selection:
156 | collapsed: false
157 | Tool Properties:
158 | collapsed: false
159 | Views:
160 | collapsed: false
161 | Width: 1855
162 | X: 65
163 | Y: 24
164 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/ros2_trt_pose_hand/utils.py:
--------------------------------------------------------------------------------
1 | # ---------------------------------------------------------------------------------------
2 | # Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
3 | # Permission is hereby granted, free of charge, to any person obtaining
4 | # a copy of this software and associated documentation files (the "Software"),
5 | # to deal in the Software without restriction, including without limitation
6 | # the rights to use, copy, modify, merge, publish, distribute, sublicense,
7 | # and/or sell copies of the Software, and to permit persons to whom the
8 | # Software is furnished to do so, subject to the following conditions:
9 | # The above copyright notice and this permission notice shall be included
10 | # in all copies or substantial portions of the Software.
11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
13 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
14 | # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
15 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
16 | # ---------------------------------------------------------------------------------------
17 |
18 | import json
19 | import cv2
20 | import matplotlib.pyplot as plt
21 | import matplotlib.image as mpimg
22 | import math
23 | import os
24 | import numpy as np
25 | import pickle
26 | import trt_pose.coco
27 | import trt_pose.models
28 | from trt_pose.parse_objects import ParseObjects
29 |
30 | import torch
31 | import torch2trt
32 | from torch2trt import TRTModule
33 |
34 | import torchvision.transforms as transforms
35 | import PIL.Image
36 | from ros2_trt_pose_hand.preprocessdata import preprocessdata
37 |
38 | from sklearn.pipeline import make_pipeline
39 | from sklearn.preprocessing import StandardScaler
40 | from sklearn.svm import SVC
41 |
42 | # Pre-process image message received from cam2image
43 | def preprocess(image, width, height):
44 | global device
45 | mean = torch.Tensor([0.485, 0.456, 0.406]).cuda()
46 | std = torch.Tensor([0.229, 0.224, 0.225]).cuda()
47 | device = torch.device('cuda')
48 | image = cv2.resize(image, (width, height))
49 | image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
50 | image = PIL.Image.fromarray(image)
51 | image = transforms.functional.to_tensor(image).to(device)
52 | image.sub_(mean[:, None, None]).div_(std[:, None, None])
53 | return image[None, ...]
54 |
55 | def load_image(base_dir, image_name):
56 | frame = cv2.imread(os.path.join(base_dir, image_name))
57 | image = cv2.resize(frame, (224, 224))
58 | print("Image {} Loaded".format(image_name))
59 | return image
60 |
61 |
62 | def load_params(base_dir, hand_pose_json):
63 | with open(hand_pose_json, 'r') as f:
64 | hand_pose = json.load(f)
65 |
66 | topology = trt_pose.coco.coco_category_to_topology(hand_pose)
67 | num_parts = len(hand_pose['keypoints'])
68 | num_links = len(hand_pose['skeleton'])
69 | #hand_pose_skeleton = hand_pose['skeleton']
70 | parse_objects = ParseObjects(topology, cmap_threshold=0.15, link_threshold=0.15)
71 | MODEL_WEIGHTS = 'hand_pose_resnet18_att_244_244.pth'
72 | model_weights = os.path.join(base_dir, MODEL_WEIGHTS)
73 |
74 |
75 | return num_parts, num_links, model_weights, parse_objects, topology
76 |
77 | def load_model(base_dir, num_parts, num_links, model_weights, height, width):
78 | model = trt_pose.models.resnet18_baseline_att(num_parts, 2 * num_links).cuda().eval()
79 | model.load_state_dict(torch.load(model_weights))
80 | MODEL_WEIGHTS = 'hand_pose_resnet18_att_244_244.pth'
81 | OPTIMIZED_MODEL = 'hand_pose_resnet18_att_244_244_trt.pth'
82 | model_file_path = os.path.join(base_dir, OPTIMIZED_MODEL)
83 | if not os.path.isfile(model_file_path):
84 | data = torch.zeros((1, 3, height, width)).cuda()
85 | model_trt = torch2trt.torch2trt(model, [data], fp16_mode=True, max_workspace_size=1 << 25)
86 | torch.save(model_trt.state_dict(), model_file_path)
87 | model_trt = TRTModule()
88 | model_trt.load_state_dict(torch.load(model_file_path))
89 | return model_trt
90 |
91 | def load_svm(base_dir):
92 | clf = make_pipeline(StandardScaler(), SVC(gamma='auto', kernel='rbf'))
93 | svm_train = False
94 | if svm_train:
95 | clf, predicted = preprocessdata.trainsvm(clf, joints_train, joints_test, hand.labels_train, hand.labels_test)
96 | filename = os.path.join(base_dir, 'svmmodel.sav')
97 | print("Filename:{}".format(filename))
98 | pickle.dump(clf, open(filename, 'wb'))
99 | else:
100 | filename = os.path.join(base_dir, 'svmmodel.sav')
101 | print("Filename:{}".format(filename))
102 | with open(filename, 'rb') as f:
103 | clf = pickle.load(f)
104 | return clf
105 |
106 | def draw_objects(image, object_counts, objects, normalized_peaks, topology):
107 | topology = topology
108 | height = image.shape[0]
109 | width = image.shape[1]
110 | count = int(object_counts[0])
111 | K = topology.shape[0]
112 | for i in range(count):
113 | color = (0, 255, 0)
114 | obj = objects[0][i]
115 | C = obj.shape[0]
116 | for j in range(C):
117 | k = int(obj[j])
118 | if k >= 0:
119 | peak = normalized_peaks[0][j][k]
120 | x = round(float(peak[1]) * width)
121 | y = round(float(peak[0]) * height)
122 | cv2.circle(image, (x, y), 3, color, 2)
123 |
124 | for k in range(K):
125 | c_a = topology[k][2]
126 | c_b = topology[k][3]
127 | if obj[c_a] >= 0 and obj[c_b] >= 0:
128 | peak0 = normalized_peaks[0][c_a][obj[c_a]]
129 | peak1 = normalized_peaks[0][c_b][obj[c_b]]
130 | x0 = round(float(peak0[1]) * width)
131 | y0 = round(float(peak0[0]) * height)
132 | x1 = round(float(peak1[1]) * width)
133 | y1 = round(float(peak1[0]) * height)
134 | cv2.line(image, (x0, y0), (x1, y1), color, 2)
135 |
136 | return image
137 |
138 | def draw_joints(image, joints, hand_pose_skeleton):
139 | count = 0
140 | for i in joints:
141 | if i==[0,0]:
142 | count+=1
143 | '''
144 | if count>= 3:
145 | return
146 | '''
147 | for i in joints:
148 | cv2.circle(image, (i[0],i[1]), 2, (0,0,255), 1)
149 | cv2.circle(image, (joints[0][0],joints[0][1]), 2, (255,0,255), 1)
150 | for i in hand_pose_skeleton:
151 | if joints[i[0]-1][0]==0 or joints[i[1]-1][0] == 0:
152 | break
153 | cv2.line(image, (joints[i[0]-1][0],joints[i[0]-1][1]), (joints[i[1]-1][0],joints[i[1]-1][1]), (0,255,0), 1)
154 |
155 |
156 | return image
157 |
158 |
--------------------------------------------------------------------------------
/ros2_trt_pose_hand/ros2_trt_pose_hand/helper.py:
--------------------------------------------------------------------------------
1 | # ---------------------------------------------------------------------------------------
2 | # Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved.
3 | # Permission is hereby granted, free of charge, to any person obtaining
4 | # a copy of this software and associated documentation files (the "Software"),
5 | # to deal in the Software without restriction, including without limitation
6 | # the rights to use, copy, modify, merge, publish, distribute, sublicense,
7 | # and/or sell copies of the Software, and to permit persons to whom the
8 | # Software is furnished to do so, subject to the following conditions:
9 | # The above copyright notice and this permission notice shall be included
10 | # in all copies or substantial portions of the Software.
11 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
12 | # INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
13 | # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
14 | # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
15 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
16 | # ---------------------------------------------------------------------------------------
17 | # TRT_pose related
18 | import cv2
19 | import os
20 | import json
21 | import numpy as np
22 | import math
23 | from ros2_trt_pose_hand.utils import preprocess, load_params, load_model, draw_objects, draw_joints, load_image
24 | from ros2_trt_pose_hand.utils import load_svm
25 | from ros2_trt_pose_hand.preprocessdata import preprocessdata
26 | # Gesture Classification
27 | from sklearn.pipeline import make_pipeline
28 | from sklearn.preprocessing import StandardScaler
29 | from sklearn.svm import SVC
30 | # ROS2 related
31 | import rclpy
32 | from rclpy.node import Node
33 | from sensor_msgs.msg import Image as ImageMsg
34 | from visualization_msgs.msg import Marker
35 | from geometry_msgs.msg import Point
36 | from hand_pose_msgs.msg import FingerPoints, HandPoseDetection # For pose_msgs
37 | from rclpy.duration import Duration
38 | from std_msgs.msg import String
39 |
40 | class TRTHandPose(Node):
41 | def __init__(self):
42 | super().__init__('trt_pose_hand')
43 | self.height = 224
44 | self.width = 224
45 | self.num_parts = None
46 | self.num_links = None
47 | self.model_weights = None
48 | self.parse_objects = None
49 | self.counts = None
50 | self.peaks = None
51 | self.objects = None
52 | self.topology = None
53 | self.hand_pose_skeleton = None
54 | self.model = None
55 | self.clf = None # SVM Model
56 | self.preprocessdata = None
57 | self.image = None
58 | self.gesture_type = None
59 | # ROS2 parameters
60 | self.declare_parameter('base_dir', os.getenv("HOME") + 'gesture_models')
61 | # Based Dir should contain: model_file resnet/densenet, human_pose json file
62 | self.declare_parameter('point_range', 10) # default range is 0 to 10
63 | self.declare_parameter('show_image', True) # Show image in cv2.imshow
64 | self.declare_parameter('show_gesture', True) # Shows gestures (fist, pan, stop, fine, peace, no hand).
65 | self.base_dir = self.get_parameter('base_dir')._value
66 | self.json_file = os.path.join(self.base_dir, 'hand_pose.json')
67 | self.point_range = self.get_parameter('point_range')._value
68 | self.show_image_param = self.get_parameter('show_image')._value
69 | self.show_gesture_param = self.get_parameter('show_gesture')._value
70 | # ROS2 related init
71 | # Image subscriber from cam2image
72 | self.subscriber_ = self.create_subscription(ImageMsg, 'image', self.read_cam_callback, 10)
73 | self.image_pub = self.create_publisher(ImageMsg, 'detections_image', 10)
74 | # Publisher for Body Joints and Skeleton
75 | self.hand_joints_pub = self.create_publisher(Marker, 'hand_joints', 1000)
76 | self.hand_skeleton_pub = self.create_publisher(Marker, 'hand_skeleton', 10)
77 | # Publishing pose Message
78 | self.publish_pose = self.create_publisher(HandPoseDetection, 'hand_pose_msgs', 100)
79 | # Publishing gesture classifcation
80 | self.publish_gesture = self.create_publisher(String, "gesture_class", 100)
81 |
82 | def start(self):
83 | self.get_logger().info("Loading Parameters\n")
84 | self.num_parts, self.num_links, self.model_weights, self.parse_objects, self.topology = load_params(base_dir=self.base_dir, hand_pose_json=self.json_file)
85 | with open(os.path.join(self.base_dir,'gesture.json'), 'r') as f:
86 | gesture = json.load(f)
87 | self.gesture_type = gesture["classes"]
88 | self.get_logger().info("Loading model weights\n")
89 | self.clf = load_svm(base_dir=self.base_dir)
90 | self.model = load_model(self.base_dir, self.num_parts, self.num_links, self.model_weights, self.height, self.width)
91 | self.get_logger().info("Model weights loaded...\n Waiting for images...\n")
92 | self.preprocessdata = preprocessdata(self.topology, self.num_parts)
93 |
94 |
95 | def execute(self):
96 | data = preprocess(image=self.image, width=self.width, height=self.height)
97 | cmap, paf = self.model(data)
98 | cmap, paf = cmap.detach().cpu(), paf.detach().cpu()
99 | self.counts, self.objects, self.peaks = self.parse_objects(cmap, paf)
100 | joints = self.preprocessdata.joints_inference(self.image, self.counts, self.objects, self.peaks)
101 | with open(self.json_file, 'r') as f:
102 | hand_pose = json.load(f)
103 | hand_pose_skeleton = hand_pose ['skeleton']
104 | annotated_image = draw_joints(self.image, joints, hand_pose_skeleton)
105 | # cv2.imwrite('unname_pose.jpg', annotated_image)
106 |
107 | self.parse_k()
108 | print("Show gesture Parameter:{}".format(self.show_gesture_param))
109 | if self.show_gesture_param:
110 | self.parse_gesture(joints=joints)
111 | return annotated_image
112 |
113 | # Borrowed from OpenPose-ROS repo
114 | def image_np_to_image_msg(self, image_np):
115 | image_msg = ImageMsg()
116 | image_msg.height = image_np.shape[0]
117 | image_msg.width = image_np.shape[1]
118 | image_msg.encoding = 'bgr8'
119 | image_msg.data = image_np.tostring()
120 | image_msg.step = len(image_msg.data) // image_msg.height
121 | image_msg.header.frame_id = 'map'
122 | return image_msg
123 |
124 | def init_finger_points_msg(self):
125 | finger_points = FingerPoints()
126 | finger_points.x = float('NaN')
127 | finger_points.y = float('NaN')
128 | finger_points.confidence = float('NaN')
129 | return finger_points
130 |
131 | # Subscribe and Publish to image topic
132 | def read_cam_callback(self, msg):
133 | img = np.asarray(msg.data)
134 | self.image = np.reshape(img, (msg.height, msg.width, 3))
135 | self.annotated_image = self.execute()
136 |
137 | #image_msg = self.image_np_to_image_msg(self.image)
138 | image_msg = self.image_np_to_image_msg(self.annotated_image)
139 | self.image_pub.publish(image_msg)
140 | if self.show_image_param:
141 | cv2.imshow('frame', self.annotated_image)
142 | cv2.waitKey(1)
143 |
144 | def write_finger_points_msg(self, pixel_location):
145 | finger_points = FingerPoints()
146 | finger_points.y = float(pixel_location[0] * self.height)
147 | finger_points.x = float(pixel_location[1] * self.width)
148 | return finger_points
149 |
150 | def parse_gesture(self, joints):
151 | dist_bn_joints = self.preprocessdata.find_distance(joints)
152 | gesture = self.clf.predict([dist_bn_joints,[0]*self.num_parts*self.num_parts])
153 | gesture_joints = gesture[0]
154 | self.preprocessdata.prev_queue.append(gesture_joints)
155 | self.preprocessdata.prev_queue.pop(0)
156 | gesture_label = self.preprocessdata.print_label(self.preprocessdata.prev_queue, self.gesture_type)
157 | msg = String()
158 | msg.data = gesture_label
159 | self.publish_gesture.publish(msg)
160 | self.get_logger().info('Hand Pose Classified as: "%s"' % msg.data)
161 |
162 | def init_markers_spheres(self):
163 | marker_joints = Marker()
164 | marker_joints.header.frame_id = '/map'
165 | marker_joints.id = 1
166 | marker_joints.ns = "joints"
167 | marker_joints.type = marker_joints.SPHERE_LIST
168 | marker_joints.action = marker_joints.ADD
169 | marker_joints.scale.x = 0.7
170 | marker_joints.scale.y = 0.7
171 | marker_joints.scale.z = 0.7
172 | marker_joints.color.a = 1.0
173 | marker_joints.color.r = 1.0
174 | marker_joints.color.g = 0.0
175 | marker_joints.color.b = 0.0
176 | marker_joints.lifetime = Duration(seconds=3, nanoseconds=5e2).to_msg()
177 | return marker_joints
178 |
179 | def init_markers_lines(self):
180 | marker_line = Marker()
181 | marker_line.header.frame_id = '/map'
182 | marker_line.id = 1
183 | marker_line.ns = "joint_line"
184 | marker_line.header.stamp = self.get_clock().now().to_msg()
185 | marker_line.type = marker_line.LINE_LIST
186 | marker_line.action = marker_line.ADD
187 | marker_line.scale.x = 0.1
188 | marker_line.scale.y = 0.1
189 | marker_line.scale.z = 0.1
190 | marker_line.color.a = 1.0
191 | marker_line.color.r = 0.0
192 | marker_line.color.g = 1.0
193 | marker_line.color.b = 0.0
194 | marker_line.lifetime = Duration(seconds=3, nanoseconds=5e2).to_msg()
195 | return marker_line
196 |
197 | def init_all_hand_msgs(self, _msg, count):
198 | _msg.hand_id = count
199 | _msg.palm = self.init_finger_points_msg()
200 | _msg.thumb_1 = self.init_finger_points_msg()
201 | _msg.thumb_2 = self.init_finger_points_msg()
202 | _msg.thumb_3 = self.init_finger_points_msg()
203 | _msg.thumb_4 = self.init_finger_points_msg()
204 | _msg.index_finger_1 = self.init_finger_points_msg()
205 | _msg.index_finger_2 = self.init_finger_points_msg()
206 | _msg.index_finger_3 = self.init_finger_points_msg()
207 | _msg.index_finger_4 = self.init_finger_points_msg()
208 | _msg.middle_finger_1 = self.init_finger_points_msg()
209 | _msg.middle_finger_2 = self.init_finger_points_msg()
210 | _msg.middle_finger_3 = self.init_finger_points_msg()
211 | _msg.middle_finger_4 = self.init_finger_points_msg()
212 | _msg.ring_finger_1 = self.init_finger_points_msg()
213 | _msg.ring_finger_2 = self.init_finger_points_msg()
214 | _msg.ring_finger_3 = self.init_finger_points_msg()
215 | _msg.ring_finger_4 = self.init_finger_points_msg()
216 | _msg.baby_finger_1 = self.init_finger_points_msg()
217 | _msg.baby_finger_2 = self.init_finger_points_msg()
218 | _msg.baby_finger_3 = self.init_finger_points_msg()
219 | _msg.baby_finger_4 = self.init_finger_points_msg()
220 | return _msg
221 |
222 | def add_point_to_marker(self, finger_points_msg):
223 | p = Point()
224 | p.x = float((finger_points_msg.x / self.width) * self.point_range)
225 | p.y = float((finger_points_msg.y / self.height) * self.point_range)
226 | p.z = 0.0
227 | return p
228 |
229 | def valid_marker_point(self, finger_points_msg):
230 | if math.isnan(finger_points_msg.x) or math.isnan(finger_points_msg.y):
231 | return False
232 | return True
233 |
234 | def parse_k(self):
235 | image_idx = 0
236 | try:
237 | count = int(self.counts[image_idx])
238 | primary_msg = HandPoseDetection()
239 | for i in range(count):
240 | primary_msg.hand_id = i
241 | primary_msg = self.init_all_hand_msgs(_msg=primary_msg, count=i)
242 | marker_joints = self.init_markers_spheres()
243 | marker_skeleton = self.init_markers_lines()
244 | for k in range(21):
245 | _idx = self.objects[image_idx, i, k]
246 | if _idx >= 0:
247 | _location = self.peaks[image_idx, k, _idx, :]
248 | if k == 0:
249 | primary_msg.palm = self.write_finger_points_msg(_location)
250 | marker_joints.points.append(self.add_point_to_marker(primary_msg.palm))
251 | self.get_logger().info(
252 | "Finger Point Detected: Palm at X:{}, Y:{}".format(primary_msg.palm.x,
253 | primary_msg.palm.y))
254 | if k == 1:
255 | primary_msg.thumb_1 = self.write_finger_points_msg(_location)
256 | marker_joints.points.append(self.add_point_to_marker(primary_msg.thumb_1))
257 | self.get_logger().info(
258 | "Finger Point Detected: Thumb:1 at X:{}, Y:{}".format(primary_msg.thumb_1.x,
259 | primary_msg.thumb_1.y))
260 | if k == 2:
261 | primary_msg.thumb_2 = self.write_finger_points_msg(_location)
262 | marker_joints.points.append(self.add_point_to_marker(primary_msg.thumb_2))
263 | self.get_logger().info(
264 | "Finger Point Detected: Thumb: 2 at X:{}, Y:{}".format(primary_msg.thumb_2.x,
265 | primary_msg.thumb_2.y))
266 | if self.valid_marker_point(primary_msg.thumb_1):
267 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.thumb_1))
268 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.thumb_2))
269 |
270 | if k == 3:
271 | primary_msg.thumb_3 = self.write_finger_points_msg(_location)
272 | marker_joints.points.append(self.add_point_to_marker(primary_msg.thumb_3))
273 | self.get_logger().info(
274 | "Finger Point Detected: Thumb: 3 at X:{}, Y:{}".format(primary_msg.thumb_3.x,
275 | primary_msg.thumb_3.y))
276 | if self.valid_marker_point(primary_msg.thumb_2):
277 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.thumb_2))
278 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.thumb_3))
279 |
280 | if k == 4:
281 | primary_msg.thumb_4 = self.write_finger_points_msg(_location)
282 | marker_joints.points.append(self.add_point_to_marker(primary_msg.thumb_4))
283 | self.get_logger().info(
284 | "Finger Point Detected: Thumb: 4 at X:{}, Y:{}".format(primary_msg.thumb_4.x,
285 | primary_msg.thumb_4.y))
286 | if self.valid_marker_point(primary_msg.thumb_3):
287 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.thumb_3))
288 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.thumb_4))
289 |
290 | if self.valid_marker_point(primary_msg.palm):
291 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.palm))
292 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.thumb_4))
293 |
294 | if k == 5:
295 | primary_msg.index_finger_1 = self.write_finger_points_msg(_location)
296 | marker_joints.points.append(self.add_point_to_marker(primary_msg.index_finger_1))
297 | self.get_logger().info(
298 | "Finger Point Detected: Index Finger:1 at X:{}, Y:{}".format(primary_msg.index_finger_1.x,
299 | primary_msg.index_finger_1.y))
300 |
301 | if k == 6:
302 | primary_msg.index_finger_2 = self.write_finger_points_msg(_location)
303 | marker_joints.points.append(self.add_point_to_marker(primary_msg.index_finger_2))
304 | self.get_logger().info(
305 | "Finger Point Detected: Index Finger:2 at X:{}, Y:{}".format(primary_msg.index_finger_2.x,
306 | primary_msg.index_finger_2.y))
307 | if self.valid_marker_point(primary_msg.index_finger_1):
308 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.index_finger_1))
309 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.index_finger_2))
310 |
311 | if k == 7:
312 | primary_msg.index_finger_3 = self.write_finger_points_msg(_location)
313 | marker_joints.points.append(self.add_point_to_marker(primary_msg.index_finger_3))
314 | self.get_logger().info(
315 | "Finger Point Detected: Index Finger: 3 at X:{}, Y:{}".format(primary_msg.index_finger_3.x,
316 | primary_msg.index_finger_3.y))
317 | if self.valid_marker_point(primary_msg.index_finger_2):
318 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.index_finger_2))
319 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.index_finger_3))
320 |
321 | if k == 8:
322 | primary_msg.index_finger_4 = self.write_finger_points_msg(_location)
323 | marker_joints.points.append(self.add_point_to_marker(primary_msg.index_finger_4))
324 | self.get_logger().info(
325 | "Finger Point Detected: Index Finger: 4 at X:{}, Y:{}".format(primary_msg.index_finger_4.x,
326 | primary_msg.index_finger_4.y))
327 | if self.valid_marker_point(primary_msg.index_finger_3):
328 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.index_finger_3))
329 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.index_finger_4))
330 |
331 | if self.valid_marker_point(primary_msg.palm):
332 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.palm))
333 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.index_finger_4))
334 |
335 | if k == 9:
336 | primary_msg.middle_finger_1 = self.write_finger_points_msg(_location)
337 | marker_joints.points.append(self.add_point_to_marker(primary_msg.middle_finger_1))
338 | self.get_logger().info(
339 | "Finger Point Detected: Middle Finger:1 at X:{}, Y:{}".format(
340 | primary_msg.middle_finger_1.x,
341 | primary_msg.middle_finger_1.y))
342 |
343 | if k == 10:
344 | primary_msg.middle_finger_2 = self.write_finger_points_msg(_location)
345 | marker_joints.points.append(self.add_point_to_marker(primary_msg.middle_finger_2))
346 | self.get_logger().info(
347 | "Finger Point Detected: Middle Finger:2 at X:{}, Y:{}".format(
348 | primary_msg.middle_finger_2.x,
349 | primary_msg.middle_finger_2.y))
350 | if self.valid_marker_point(primary_msg.middle_finger_1):
351 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.middle_finger_1))
352 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.middle_finger_2))
353 |
354 | if k == 11:
355 | primary_msg.middle_finger_3 = self.write_finger_points_msg(_location)
356 | marker_joints.points.append(self.add_point_to_marker(primary_msg.middle_finger_3))
357 | self.get_logger().info(
358 | "Finger Point Detected: Middle Finger: 3 at X:{}, Y:{}".format(
359 | primary_msg.middle_finger_3.x,
360 | primary_msg.middle_finger_3.y))
361 | if self.valid_marker_point(primary_msg.middle_finger_2):
362 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.middle_finger_2))
363 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.middle_finger_3))
364 |
365 | if k == 12:
366 | primary_msg.middle_finger_4 = self.write_finger_points_msg(_location)
367 | marker_joints.points.append(self.add_point_to_marker(primary_msg.middle_finger_4))
368 | self.get_logger().info(
369 | "Finger Point Detected: Middle Finger: 4 at X:{}, Y:{}".format(
370 | primary_msg.middle_finger_4.x,
371 | primary_msg.middle_finger_4.y))
372 | if self.valid_marker_point(primary_msg.middle_finger_3):
373 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.middle_finger_3))
374 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.middle_finger_4))
375 |
376 | if self.valid_marker_point(primary_msg.palm):
377 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.palm))
378 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.middle_finger_4))
379 |
380 | if k == 13:
381 | primary_msg.ring_finger_1 = self.write_finger_points_msg(_location)
382 | marker_joints.points.append(self.add_point_to_marker(primary_msg.ring_finger_1))
383 | self.get_logger().info(
384 | "Finger Point Detected: Ring Finger:1 at X:{}, Y:{}".format(primary_msg.ring_finger_1.x,
385 | primary_msg.ring_finger_1.y))
386 |
387 | if k == 14:
388 | primary_msg.ring_finger_2 = self.write_finger_points_msg(_location)
389 | marker_joints.points.append(self.add_point_to_marker(primary_msg.ring_finger_2))
390 | self.get_logger().info(
391 | "Finger Point Detected: Ring Finger:2 at X:{}, Y:{}".format(
392 | primary_msg.ring_finger_2.x,
393 | primary_msg.ring_finger_2.y))
394 | if self.valid_marker_point(primary_msg.ring_finger_1):
395 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.ring_finger_1))
396 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.ring_finger_2))
397 |
398 | if k == 15:
399 | primary_msg.ring_finger_3 = self.write_finger_points_msg(_location)
400 | marker_joints.points.append(self.add_point_to_marker(primary_msg.ring_finger_3))
401 | self.get_logger().info(
402 | "Finger Point Detected: Ring Finger: 3 at X:{}, Y:{}".format(
403 | primary_msg.ring_finger_3.x,
404 | primary_msg.ring_finger_3.y))
405 | if self.valid_marker_point(primary_msg.ring_finger_2):
406 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.ring_finger_2))
407 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.ring_finger_3))
408 |
409 | if k == 16:
410 | primary_msg.ring_finger_4 = self.write_finger_points_msg(_location)
411 | marker_joints.points.append(self.add_point_to_marker(primary_msg.ring_finger_4))
412 | self.get_logger().info(
413 | "Finger Point Detected: Ring Finger: 4 at X:{}, Y:{}".format(
414 | primary_msg.ring_finger_4.x,
415 | primary_msg.ring_finger_4.y))
416 | if self.valid_marker_point(primary_msg.ring_finger_3):
417 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.ring_finger_3))
418 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.ring_finger_4))
419 |
420 | if self.valid_marker_point(primary_msg.palm):
421 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.palm))
422 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.ring_finger_4))
423 |
424 | if k == 17:
425 | primary_msg.baby_finger_1 = self.write_finger_points_msg(_location)
426 | marker_joints.points.append(self.add_point_to_marker(primary_msg.baby_finger_1))
427 | self.get_logger().info(
428 | "Finger Point Detected: Baby Finger:1 at X:{}, Y:{}".format(primary_msg.baby_finger_1.x,
429 | primary_msg.baby_finger_1.y))
430 |
431 | if k == 18:
432 | primary_msg.baby_finger_2 = self.write_finger_points_msg(_location)
433 | marker_joints.points.append(self.add_point_to_marker(primary_msg.baby_finger_2))
434 | self.get_logger().info(
435 | "Finger Point Detected: Baby Finger:2 at X:{}, Y:{}".format(
436 | primary_msg.baby_finger_2.x,
437 | primary_msg.baby_finger_2.y))
438 | if self.valid_marker_point(primary_msg.baby_finger_1):
439 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.baby_finger_1))
440 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.baby_finger_2))
441 |
442 | if k == 19:
443 | primary_msg.baby_finger_3 = self.write_finger_points_msg(_location)
444 | marker_joints.points.append(self.add_point_to_marker(primary_msg.baby_finger_3))
445 | self.get_logger().info(
446 | "Finger Point Detected: Baby Finger: 3 at X:{}, Y:{}".format(
447 | primary_msg.baby_finger_3.x,
448 | primary_msg.baby_finger_3.y))
449 | if self.valid_marker_point(primary_msg.baby_finger_2):
450 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.baby_finger_2))
451 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.baby_finger_3))
452 |
453 | if k == 20:
454 | primary_msg.baby_finger_4 = self.write_finger_points_msg(_location)
455 | marker_joints.points.append(self.add_point_to_marker(primary_msg.baby_finger_4))
456 | self.get_logger().info(
457 | "Finger Point Detected: Baby Finger: 4 at X:{}, Y:{}".format(
458 | primary_msg.baby_finger_4.x,
459 | primary_msg.baby_finger_4.y))
460 | if self.valid_marker_point(primary_msg.baby_finger_3):
461 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.baby_finger_3))
462 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.baby_finger_4))
463 |
464 | if self.valid_marker_point(primary_msg.palm):
465 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.palm))
466 | marker_skeleton.points.append(self.add_point_to_marker(primary_msg.baby_finger_4))
467 |
468 | self.publish_pose.publish(primary_msg)
469 | self.hand_skeleton_pub.publish(marker_skeleton)
470 | self.hand_joints_pub.publish(marker_joints)
471 |
472 | except:
473 | pass
474 |
--------------------------------------------------------------------------------