├── .gitignore ├── CMakeLists.txt ├── Dockerfile ├── LICENSE ├── README.md ├── environment.yml ├── evaluation ├── evaluate_DREAM.ipynb └── evaluate_baxter_dataset.ipynb ├── imageloaders ├── DREAM.py └── baxter.py ├── images ├── baxter.png └── panda.jpg ├── inference_single_image.ipynb ├── models ├── BPnP.py ├── CtRNet.py ├── keypoint_seg_resnet.py ├── mesh_renderer.py └── robot_arm.py ├── package.xml ├── ros_nodes ├── baxter_leftarm_pose.py └── panda_pose.py ├── setup.py ├── train.py ├── urdfs ├── Baxter │ ├── E0 │ │ ├── E0.obj │ │ └── material.lib │ ├── E1 │ │ ├── E1.obj │ │ └── material.lib │ ├── S0 │ │ ├── S0.obj │ │ └── material.lib │ ├── S1 │ │ ├── S1.obj │ │ └── material.lib │ ├── W0 │ │ ├── W0.obj │ │ └── material.lib │ ├── W1 │ │ ├── W1.obj │ │ └── material.lib │ ├── W2 │ │ ├── W2.obj │ │ └── material.lib │ └── baxter_description │ │ ├── CMakeLists.txt │ │ ├── meshes │ │ ├── base │ │ │ ├── PEDESTAL.DAE │ │ │ ├── PEDESTAL.STL │ │ │ ├── pedestal_link_collision.DAE │ │ │ └── pedestal_link_collision.STL │ │ ├── head │ │ │ ├── H0.DAE │ │ │ ├── H0.STL │ │ │ ├── H1.DAE │ │ │ └── H1.STL │ │ ├── lower_elbow │ │ │ ├── E1.DAE │ │ │ └── E1.STL │ │ ├── lower_forearm │ │ │ ├── W1.DAE │ │ │ └── W1.STL │ │ ├── lower_shoulder │ │ │ ├── S1.DAE │ │ │ └── S1.STL │ │ ├── torso │ │ │ ├── base_link.DAE │ │ │ ├── base_link.STL │ │ │ ├── base_link_collision.DAE │ │ │ └── base_link_collision.STL │ │ ├── upper_elbow │ │ │ ├── E0.DAE │ │ │ └── E0.STL │ │ ├── upper_forearm │ │ │ ├── W0.DAE │ │ │ └── W0.STL │ │ ├── upper_shoulder │ │ │ ├── S0.DAE │ │ │ └── S0.STL │ │ └── wrist │ │ │ ├── W2.DAE │ │ │ └── W2.STL │ │ ├── package.xml │ │ └── urdf │ │ ├── baxter.urdf │ │ ├── baxter.urdf.xacro │ │ ├── baxter_base │ │ ├── baxter_base.gazebo.xacro │ │ └── baxter_base.urdf.xacro │ │ ├── left_end_effector.urdf.xacro │ │ ├── pedestal │ │ └── pedestal.xacro │ │ └── right_end_effector.urdf.xacro └── Panda │ ├── meshes │ ├── collision │ │ ├── finger.obj │ │ ├── hand.obj │ │ ├── link0.obj │ │ ├── link1.obj │ │ ├── link2.obj │ │ ├── link3.obj │ │ ├── link4.obj │ │ ├── link5.obj │ │ ├── link6.obj │ │ └── link7.obj │ └── visual │ │ ├── finger.dae │ │ ├── finger │ │ ├── finger.obj │ │ └── material.lib │ │ ├── hand.dae │ │ ├── hand │ │ ├── hand.obj │ │ └── material.lib │ │ ├── link0.dae │ │ ├── link0 │ │ ├── link0.obj │ │ └── material.lib │ │ ├── link1.dae │ │ ├── link1 │ │ ├── link1.obj │ │ └── material.lib │ │ ├── link2.dae │ │ ├── link2 │ │ ├── link2.obj │ │ └── material.lib │ │ ├── link3.dae │ │ ├── link3 │ │ ├── link3.obj │ │ └── material.lib │ │ ├── link4.dae │ │ ├── link4 │ │ ├── link4.obj │ │ └── material.lib │ │ ├── link5.dae │ │ ├── link5 │ │ ├── link5.obj │ │ └── material.lib │ │ ├── link6.dae │ │ ├── link6 │ │ ├── link6.obj │ │ └── material.lib │ │ ├── link7.dae │ │ └── link7 │ │ ├── link7.obj │ │ └── material.lib │ ├── panda.urdf │ ├── panda_dae.urdf │ └── panda_obj.urdf └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | 131 | # model weights 132 | weights/ 133 | outputs/ 134 | 135 | # dev codes 136 | dev_* 137 | runs 138 | 139 | # dataset 140 | data/ -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.0.2) 2 | project(ctrnet-robot-pose-estimation-ros) 3 | 4 | ## Compile as C++11, supported in ROS Kinetic and newer 5 | # add_compile_options(-std=c++11) 6 | 7 | ## Find catkin macros and libraries 8 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) 9 | ## is used, also find other catkin packages 10 | find_package(catkin REQUIRED COMPONENTS 11 | roscpp 12 | rospy 13 | std_msgs 14 | ) 15 | 16 | ## System dependencies are found with CMake's conventions 17 | # find_package(Boost REQUIRED COMPONENTS system) 18 | 19 | 20 | ## Uncomment this if the package has a setup.py. This macro ensures 21 | ## modules and global scripts declared therein get installed 22 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html 23 | catkin_python_setup() 24 | 25 | ################################################ 26 | ## Declare ROS messages, services and actions ## 27 | ################################################ 28 | 29 | ## To declare and build messages, services or actions from within this 30 | ## package, follow these steps: 31 | ## * Let MSG_DEP_SET be the set of packages whose message types you use in 32 | ## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...). 33 | ## * In the file package.xml: 34 | ## * add a build_depend tag for "message_generation" 35 | ## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET 36 | ## * If MSG_DEP_SET isn't empty the following dependency has been pulled in 37 | ## but can be declared for certainty nonetheless: 38 | ## * add a exec_depend tag for "message_runtime" 39 | ## * In this file (CMakeLists.txt): 40 | ## * add "message_generation" and every package in MSG_DEP_SET to 41 | ## find_package(catkin REQUIRED COMPONENTS ...) 42 | ## * add "message_runtime" and every package in MSG_DEP_SET to 43 | ## catkin_package(CATKIN_DEPENDS ...) 44 | ## * uncomment the add_*_files sections below as needed 45 | ## and list every .msg/.srv/.action file to be processed 46 | ## * uncomment the generate_messages entry below 47 | ## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...) 48 | 49 | ## Generate messages in the 'msg' folder 50 | # add_message_files( 51 | # FILES 52 | # Message1.msg 53 | # Message2.msg 54 | # ) 55 | 56 | ## Generate services in the 'srv' folder 57 | # add_service_files( 58 | # FILES 59 | # Service1.srv 60 | # Service2.srv 61 | # ) 62 | 63 | ## Generate actions in the 'action' folder 64 | # add_action_files( 65 | # FILES 66 | # Action1.action 67 | # Action2.action 68 | # ) 69 | 70 | ## Generate added messages and services with any dependencies listed here 71 | # generate_messages( 72 | # DEPENDENCIES 73 | # std_msgs 74 | # ) 75 | 76 | ################################################ 77 | ## Declare ROS dynamic reconfigure parameters ## 78 | ################################################ 79 | 80 | ## To declare and build dynamic reconfigure parameters within this 81 | ## package, follow these steps: 82 | ## * In the file package.xml: 83 | ## * add a build_depend and a exec_depend tag for "dynamic_reconfigure" 84 | ## * In this file (CMakeLists.txt): 85 | ## * add "dynamic_reconfigure" to 86 | ## find_package(catkin REQUIRED COMPONENTS ...) 87 | ## * uncomment the "generate_dynamic_reconfigure_options" section below 88 | ## and list every .cfg file to be processed 89 | 90 | ## Generate dynamic reconfigure parameters in the 'cfg' folder 91 | # generate_dynamic_reconfigure_options( 92 | # cfg/DynReconf1.cfg 93 | # cfg/DynReconf2.cfg 94 | # ) 95 | 96 | ################################### 97 | ## catkin specific configuration ## 98 | ################################### 99 | ## The catkin_package macro generates cmake config files for your package 100 | ## Declare things to be passed to dependent projects 101 | ## INCLUDE_DIRS: uncomment this if your package contains header files 102 | ## LIBRARIES: libraries you create in this project that dependent projects also need 103 | ## CATKIN_DEPENDS: catkin_packages dependent projects also need 104 | ## DEPENDS: system dependencies of this project that dependent projects also need 105 | catkin_package( 106 | # INCLUDE_DIRS include 107 | # LIBRARIES ctrnet-robot-pose-estimation-ros 108 | # CATKIN_DEPENDS roscpp rospy std_msgs 109 | # DEPENDS system_lib 110 | ) 111 | 112 | ########### 113 | ## Build ## 114 | ########### 115 | 116 | ## Specify additional locations of header files 117 | ## Your package locations should be listed before other locations 118 | include_directories( 119 | # include 120 | ${catkin_INCLUDE_DIRS} 121 | ) 122 | 123 | ## Declare a C++ library 124 | # add_library(${PROJECT_NAME} 125 | # src/${PROJECT_NAME}/ctrnet-robot-pose-estimation-ros.cpp 126 | # ) 127 | 128 | ## Add cmake target dependencies of the library 129 | ## as an example, code may need to be generated before libraries 130 | ## either from message generation or dynamic reconfigure 131 | # add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) 132 | 133 | ## Declare a C++ executable 134 | ## With catkin_make all packages are built within a single CMake context 135 | ## The recommended prefix ensures that target names across packages don't collide 136 | # add_executable(${PROJECT_NAME}_node src/ctrnet-robot-pose-estimation-ros_node.cpp) 137 | 138 | ## Rename C++ executable without prefix 139 | ## The above recommended prefix causes long target names, the following renames the 140 | ## target back to the shorter version for ease of user use 141 | ## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node" 142 | # set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "") 143 | 144 | ## Add cmake target dependencies of the executable 145 | ## same as for the library above 146 | # add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) 147 | 148 | ## Specify libraries to link a library or executable target against 149 | # target_link_libraries(${PROJECT_NAME}_node 150 | # ${catkin_LIBRARIES} 151 | # ) 152 | 153 | ############# 154 | ## Install ## 155 | ############# 156 | 157 | # all install targets should use catkin DESTINATION variables 158 | # See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html 159 | 160 | ## Mark executable scripts (Python etc.) for installation 161 | ## in contrast to setup.py, you can choose the destination 162 | catkin_install_python(PROGRAMS 163 | ros_nodes/panda_pose.py 164 | ros_nodes/baxter_leftarm_pose.py 165 | DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} 166 | ) 167 | 168 | ## Mark executables for installation 169 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html 170 | # install(TARGETS ${PROJECT_NAME}_node 171 | # RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} 172 | # ) 173 | 174 | ## Mark libraries for installation 175 | ## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html 176 | # install(TARGETS ${PROJECT_NAME} 177 | # ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} 178 | # LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} 179 | # RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} 180 | # ) 181 | 182 | ## Mark cpp header files for installation 183 | # install(DIRECTORY include/${PROJECT_NAME}/ 184 | # DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} 185 | # FILES_MATCHING PATTERN "*.h" 186 | # PATTERN ".svn" EXCLUDE 187 | # ) 188 | 189 | ## Mark other files for installation (e.g. launch and bag files, etc.) 190 | # install(FILES 191 | # # myfile1 192 | # # myfile2 193 | # DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} 194 | # ) 195 | 196 | ############# 197 | ## Testing ## 198 | ############# 199 | 200 | ## Add gtest based cpp test target and link libraries 201 | # catkin_add_gtest(${PROJECT_NAME}-test test/test_ctrnet-robot-pose-estimation-ros.cpp) 202 | # if(TARGET ${PROJECT_NAME}-test) 203 | # target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME}) 204 | # endif() 205 | 206 | ## Add folders to be run by python nosetests 207 | # catkin_add_nosetests(test) 208 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # 2 | # ref https://github.com/tebeka/pythonwise/blob/master/docker-miniconda/Dockerfile 3 | # 4 | # miniconda vers: http://repo.continuum.io/miniconda 5 | # sample variations: 6 | # Miniconda3-latest-Linux-armv7l.sh 7 | # Miniconda3-latest-Linux-x86_64.sh 8 | # Miniconda3-py38_4.10.3-Linux-x86_64.sh 9 | # Miniconda3-py37_4.10.3-Linux-x86_64.sh 10 | # 11 | # py vers: https://anaconda.org/anaconda/python/files 12 | # tf vers: https://anaconda.org/anaconda/tensorflow/files 13 | # tf-mkl vers: https://anaconda.org/anaconda/tensorflow-mkl/files 14 | # 15 | 16 | ARG UBUNTU_VER=20.04 17 | ARG CONDA_VER=latest 18 | ARG OS_TYPE=x86_64 19 | ARG PY_VER=3.8.11 20 | 21 | 22 | FROM ubuntu:${UBUNTU_VER} 23 | 24 | # System packages 25 | RUN apt-get -y update 26 | RUN apt-get -y upgrade 27 | RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata 28 | 29 | RUN apt-get install -yq curl build-essential 30 | RUN apt-get install -y git 31 | RUN apt-get install ffmpeg libsm6 libxext6 -y 32 | # Use the above args during building https://docs.docker.com/engine/reference/builder/#understand-how-arg-and-from-interact 33 | ARG CONDA_VER 34 | ARG OS_TYPE 35 | # Install miniconda to /miniconda 36 | RUN curl -LO "http://repo.continuum.io/miniconda/Miniconda3-${CONDA_VER}-Linux-${OS_TYPE}.sh" 37 | RUN bash Miniconda3-${CONDA_VER}-Linux-${OS_TYPE}.sh -p /miniconda -b 38 | RUN rm Miniconda3-${CONDA_VER}-Linux-${OS_TYPE}.sh 39 | ENV PATH=/miniconda/bin:${PATH} 40 | RUN conda update -y conda 41 | RUN conda install -c conda-forge jupyterlab 42 | RUN conda install -c conda-forge nb_conda_kernels 43 | 44 | ARG PY_VER 45 | 46 | 47 | RUN mkdir /home/robot_pose_estimation 48 | RUN mkdir /home/dataset 49 | 50 | COPY environment.yml environment.yml 51 | RUN conda env create -f environment.yml 52 | SHELL ["conda","run","-n","pytorch","/bin/bash","-c"] 53 | RUN python -m ipykernel install --name pytorch --display-name "pytorch" 54 | 55 | SHELL ["/bin/bash","-c"] 56 | RUN conda init 57 | RUN echo 'conda activate pytorch' >> ~/.bashrc 58 | 59 | WORKDIR /home 60 | 61 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Jingpei Lu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Markerless Camera-to-Robot Pose Estimation via Self-supervised Sim-to-Real Transfer 2 | 3 | PyTorch implementation of CtRNet https://arxiv.org/abs/2302.14332 4 | 5 | 6 | ### Dependencies 7 | Recommend set up the environment using Anaconda. 8 | Code is developed and tested on Ubuntu 20.04. 9 | - Python(3.8) 10 | - Numpy(1.22.4) 11 | - PyTorch(1.10.0) 12 | - torchvision(0.11.1) 13 | - pytorch3d(0.6.2) 14 | - Kornia(0.6.3) 15 | - Transforms3d(0.3.1) 16 | 17 | More details see `environment.yml`. 18 | 19 | ## Usage 20 | - See `inference_single_frame.ipynb` for example single frame inference. 21 | - We provide ROS node for CtRNet, which subscribes image and joint state topics and publishes robot pose. 22 | ``` 23 | python ros_node/panda_pose.py 24 | ``` 25 | 26 | ## Dataset 27 | 28 | 1. [DREAM dataset](https://github.com/NVlabs/DREAM/blob/master/data/DOWNLOAD.sh) 29 | 2. [Baxter dataset](https://drive.google.com/file/d/12bCv6GBuh-FdvLGKjlUx2jPN-DBRUqUn/view?usp=share_link) 30 | 31 | ## Weights 32 | 33 | Weights for Panda and Baxter can be found [here](https://drive.google.com/file/d/1OAamxl3_cMLdlpksNo0p-8K20fSMukbI/view?usp=share_link). 34 | 35 | 36 | ## Videos 37 | Using CtRNet for visual servoing experiment with moving camera. 38 | 39 | 40 | 41 | https://user-images.githubusercontent.com/22925036/230662315-5ca62a10-e6b5-4eee-8abc-c3bf2b0fbaae.mp4 42 | 43 | 44 | 45 | https://user-images.githubusercontent.com/22925036/230662322-89b90a32-ca7b-4f64-a6ad-f9b807ddc08d.mp4 46 | 47 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: pytorch_ros 2 | channels: 3 | - pytorch3d 4 | - iopath 5 | - fvcore 6 | - pytorch 7 | - conda-forge 8 | - defaults 9 | dependencies: 10 | - _libgcc_mutex=0.1=main 11 | - _openmp_mutex=4.5=1_gnu 12 | - blas=1.0=mkl 13 | - blosc=1.21.0=h8c45485_0 14 | - brotli=1.0.9=he6710b0_2 15 | - brunsli=0.1=h2531618_0 16 | - bzip2=1.0.8=h7b6447c_0 17 | - c-ares=1.18.1=h7f8727e_0 18 | - ca-certificates=2022.5.18.1=ha878542_0 19 | - cached-property=1.5.2=py_0 20 | - certifi=2022.6.15=py38h06a4308_0 21 | - cfitsio=3.470=hf0d0db6_6 22 | - charls=2.2.0=h2531618_0 23 | - cloudpickle=2.0.0=pyhd3eb1b0_0 24 | - colorama=0.4.4=pyh9f0ad1d_0 25 | - cudatoolkit=11.3.1=h2bc3f7f_2 26 | - cytoolz=0.11.0=py38h7b6447c_0 27 | - dask-core=2022.5.0=py38h06a4308_0 28 | - ffmpeg=4.3=hf484d3e_0 29 | - freetype=2.11.0=h70c0345_0 30 | - fsspec=2022.3.0=py38h06a4308_0 31 | - fvcore=0.1.5.post20210915=py38 32 | - giflib=5.2.1=h7b6447c_0 33 | - gmp=6.2.1=h2531618_2 34 | - gnutls=3.6.15=he1e5248_0 35 | - h5py=3.2.1=py38h6c542dc_0 36 | - hdf5=1.10.6=hb1b8bf9_0 37 | - imagecodecs=2021.8.26=py38h4cda21f_0 38 | - intel-openmp=2021.4.0=h06a4308_3561 39 | - iopath=0.1.9=py38 40 | - jpeg=9d=h7f8727e_0 41 | - jxrlib=1.1=h7b6447c_2 42 | - krb5=1.19.2=hac12032_0 43 | - lame=3.100=h7b6447c_0 44 | - lcms2=2.12=h3be6417_0 45 | - ld_impl_linux-64=2.35.1=h7274673_9 46 | - lerc=3.0=h295c915_0 47 | - libaec=1.0.4=he6710b0_1 48 | - libcurl=7.82.0=h0b77cf5_0 49 | - libdeflate=1.8=h7f8727e_5 50 | - libedit=3.1.20210910=h7f8727e_0 51 | - libev=4.33=h7f8727e_1 52 | - libffi=3.3=he6710b0_2 53 | - libgcc-ng=9.3.0=h5101ec6_17 54 | - libgfortran-ng=7.5.0=ha8ba4b0_17 55 | - libgfortran4=7.5.0=ha8ba4b0_17 56 | - libgomp=9.3.0=h5101ec6_17 57 | - libiconv=1.15=h63c8f33_5 58 | - libidn2=2.3.2=h7f8727e_0 59 | - libnghttp2=1.46.0=hce63b2e_0 60 | - libpng=1.6.37=hbc83047_0 61 | - libssh2=1.10.0=h8f2d780_0 62 | - libstdcxx-ng=9.3.0=hd4cf53a_17 63 | - libtasn1=4.16.0=h27cfd23_0 64 | - libtiff=4.2.0=h85742a9_0 65 | - libunistring=0.9.10=h27cfd23_0 66 | - libuv=1.40.0=h7b6447c_0 67 | - libwebp=1.2.0=h89dd481_0 68 | - libwebp-base=1.2.0=h27cfd23_0 69 | - libzopfli=1.0.3=he6710b0_0 70 | - locket=1.0.0=py38h06a4308_0 71 | - lz4-c=1.9.3=h295c915_1 72 | - mkl=2021.4.0=h06a4308_640 73 | - mkl-service=2.4.0=py38h7f8727e_0 74 | - mkl_fft=1.3.1=py38hd3c417c_0 75 | - mkl_random=1.2.2=py38h51133e4_0 76 | - ncurses=6.3=h7f8727e_0 77 | - nettle=3.7.3=hbbd107a_1 78 | - networkx=2.7.1=pyhd3eb1b0_0 79 | - olefile=0.46=pyhd3eb1b0_0 80 | - openh264=2.1.0=hd408876_0 81 | - openjpeg=2.4.0=h3ad879b_0 82 | - openssl=1.1.1o=h7f8727e_0 83 | - partd=1.2.0=pyhd3eb1b0_1 84 | - pillow=8.4.0=py38h5aabda8_0 85 | - pip=21.2.4=py38h06a4308_0 86 | - portalocker=2.4.0=py38h578d9bd_0 87 | - python=3.8.12=h12debd9_0 88 | - python_abi=3.8=2_cp38 89 | - pytorch=1.10.0=py3.8_cuda11.3_cudnn8.2.0_0 90 | - pytorch-mutex=1.0=cuda 91 | - pytorch3d=0.6.2=py38_cu113_pyt1100 92 | - pywavelets=1.3.0=py38h7f8727e_0 93 | - readline=8.1=h27cfd23_0 94 | - scikit-image=0.19.2=py38h51133e4_0 95 | - scipy=1.7.3=py38hc147768_0 96 | - setuptools=58.0.4=py38h06a4308_0 97 | - six=1.16.0=pyhd3eb1b0_0 98 | - snappy=1.1.9=h295c915_0 99 | - sqlite=3.36.0=hc218d9a_0 100 | - tabulate=0.8.9=pyhd8ed1ab_0 101 | - termcolor=1.1.0=py_2 102 | - tifffile=2021.7.2=pyhd3eb1b0_2 103 | - tk=8.6.11=h1ccaba5_0 104 | - toolz=0.11.2=pyhd3eb1b0_0 105 | - torchaudio=0.10.0=py38_cu113 106 | - torchvision=0.11.1=py38_cu113 107 | - tqdm=4.64.0=pyhd8ed1ab_0 108 | - typing_extensions=3.10.0.2=pyh06a4308_0 109 | - wheel=0.37.0=pyhd3eb1b0_1 110 | - xz=5.2.5=h7b6447c_0 111 | - yacs=0.1.8=pyhd8ed1ab_0 112 | - yaml=0.2.5=h7b6447c_0 113 | - zfp=0.5.5=h295c915_6 114 | - zlib=1.2.11=h7b6447c_3 115 | - zstd=1.4.9=haebb681_0 116 | - pip: 117 | - absl-py==1.1.0 118 | - ansitable==0.9.6 119 | - anyio==3.3.4 120 | - argon2-cffi==21.1.0 121 | - attrdict==2.0.1 122 | - attrs==21.2.0 123 | - av==9.2.0 124 | - babel==2.9.1 125 | - backcall==0.2.0 126 | - bleach==4.1.0 127 | - cachetools==5.2.0 128 | - catkin-pkg==0.5.2 129 | - cffi==1.15.0 130 | - charset-normalizer==2.0.7 131 | - colored==1.4.3 132 | - debugpy==1.5.1 133 | - decorator==5.1.0 134 | - defusedxml==0.7.1 135 | - distro==1.8.0 136 | - docutils==0.19 137 | - entrypoints==0.3 138 | - gnupg==2.3.1 139 | - google-auth==2.8.0 140 | - google-auth-oauthlib==0.4.6 141 | - grpcio==1.47.0 142 | - idna==3.3 143 | - imageio==2.10.1 144 | - importlib-metadata==4.11.4 145 | - imutils==0.5.4 146 | - ipykernel==6.5.0 147 | - ipython==7.29.0 148 | - ipython-genutils==0.2.0 149 | - jedi==0.18.0 150 | - jinja2==3.0.2 151 | - json5==0.9.6 152 | - jsonschema==4.1.2 153 | - jupyter-client==7.0.6 154 | - jupyter-core==4.9.1 155 | - jupyter-server==1.11.2 156 | - jupyterlab==3.2.1 157 | - jupyterlab-pygments==0.1.2 158 | - jupyterlab-server==2.8.2 159 | - kornia==0.6.3 160 | - markdown==3.3.7 161 | - markupsafe==2.0.1 162 | - matlabengineforpython==R2021a 163 | - matplotlib-inline==0.1.3 164 | - mistune==0.8.4 165 | - nbclassic==0.3.4 166 | - nbclient==0.5.4 167 | - nbconvert==6.2.0 168 | - nbformat==5.1.3 169 | - nest-asyncio==1.5.1 170 | - notebook==6.4.5 171 | - numpy==1.22.4 172 | - oauthlib==3.2.0 173 | - opencv-contrib-python==4.5.4.58 174 | - packaging==21.2 175 | - pandocfilters==1.5.0 176 | - parso==0.8.2 177 | - pexpect==4.8.0 178 | - pgraph-python==0.6.1 179 | - pickleshare==0.7.5 180 | - progress==1.6 181 | - progressbar==2.5 182 | - prometheus-client==0.12.0 183 | - prompt-toolkit==3.0.21 184 | - protobuf==3.19.4 185 | - psutil==5.9.3 186 | - ptyprocess==0.7.0 187 | - pyasn1==0.4.8 188 | - pyasn1-modules==0.2.8 189 | - pycparser==2.20 190 | - pycryptodomex==3.15.0 191 | - pygments==2.10.0 192 | - pymunk==6.2.1 193 | - pyparsing==2.4.7 194 | - pyrsistent==0.18.0 195 | - python-dateutil==2.8.2 196 | - pytz==2021.3 197 | - pyyaml==6.0 198 | - pyzmq==22.3.0 199 | - requests==2.26.0 200 | - requests-oauthlib==1.3.1 201 | - roboticstoolbox-python==1.0.1 202 | - rospkg==1.4.0 203 | - rsa==4.8 204 | - rtb-data==1.0.0 205 | - ruamel-yaml==0.17.21 206 | - ruamel-yaml-clib==0.2.6 207 | - scikit-fmm==2022.3.26 208 | - send2trash==1.8.0 209 | - sniffio==1.2.0 210 | - spatialgeometry==1.0.1 211 | - spatialmath-python==1.0.0 212 | - swift-sim==1.0.0 213 | - tensorboard==2.9.1 214 | - tensorboard-data-server==0.6.1 215 | - tensorboard-plugin-wit==1.8.1 216 | - terminado==0.12.1 217 | - testpath==0.5.0 218 | - tornado==6.1 219 | - traitlets==5.1.1 220 | - transforms3d==0.3.1 221 | - urllib3==1.26.7 222 | - wcwidth==0.2.5 223 | - webencodings==0.5.1 224 | - websocket-client==1.2.1 225 | - websockets==10.3 226 | - werkzeug==2.1.2 227 | - zipp==3.8.0 228 | prefix: /home/jingpei/anaconda3/envs/pytorch_ros 229 | -------------------------------------------------------------------------------- /evaluation/evaluate_DREAM.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import sys\n", 10 | "import os\n", 11 | "base_dir = \"/home/jingpei/Desktop/CtRNet-robot-pose-estimation\"\n", 12 | "sys.path.append(base_dir)\n", 13 | "\n", 14 | "import torch\n", 15 | "from torch.utils.data import Dataset, DataLoader\n", 16 | "import torchvision.transforms as transforms\n", 17 | "import numpy as np\n", 18 | "import matplotlib.pyplot as plt\n", 19 | "\n", 20 | "from utils import *\n", 21 | "from models.CtRNet import CtRNet\n", 22 | "from models.BPnP import batch_transform_3d\n", 23 | "\n" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 2, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [ 32 | "import argparse\n", 33 | "from imageloaders.DREAM import ImageDataLoaderReal, load_camera_parameters\n", 34 | "parser = argparse.ArgumentParser()\n", 35 | "\n", 36 | "args = parser.parse_args(\"\")\n", 37 | "\n", 38 | "args.data_folder = \"/media/jingpei/DATA/DREAM/data/real/panda-3cam_realsense\"\n", 39 | "args.base_dir = \"/home/jingpei/Desktop/CtRNet-robot-pose-estimation\"\n", 40 | "args.use_gpu = True\n", 41 | "args.trained_on_multi_gpus = True\n", 42 | "args.keypoint_seg_model_path = os.path.join(args.base_dir,\"weights/panda/panda-3cam_realsense/net.pth\")\n", 43 | "args.urdf_file = os.path.join(args.base_dir,\"urdfs/Panda/panda.urdf\")\n", 44 | "\n", 45 | "args.robot_name = 'Panda' # \"Panda\" or \"Baxter_left_arm\"\n", 46 | "args.n_kp = 7\n", 47 | "args.scale = 0.5\n", 48 | "args.height = 480\n", 49 | "args.width = 640\n", 50 | "args.fx, args.fy, args.px, args.py = load_camera_parameters(args.data_folder)\n", 51 | "\n", 52 | "# scale the camera parameters\n", 53 | "args.width = int(args.width * args.scale)\n", 54 | "args.height = int(args.height * args.scale)\n", 55 | "args.fx = args.fx * args.scale\n", 56 | "args.fy = args.fy * args.scale\n", 57 | "args.px = args.px * args.scale\n", 58 | "args.py = args.py * args.scale" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 3, 64 | "metadata": {}, 65 | "outputs": [ 66 | { 67 | "name": "stdout", 68 | "output_type": "stream", 69 | "text": [ 70 | "Loading keypoint segmentation model from /home/jingpei/Desktop/CtRNet-robot-pose-estimation/weights/panda/panda-3cam_realsense/net.pth\n", 71 | "Camera intrinsics: [[307.76196289 0. 164.13032532]\n", 72 | " [ 0. 307.60958862 125.89585114]\n", 73 | " [ 0. 0. 1. ]]\n", 74 | "Robot model: Panda\n" 75 | ] 76 | } 77 | ], 78 | "source": [ 79 | "CtRNet = CtRNet(args)" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": 4, 85 | "metadata": {}, 86 | "outputs": [], 87 | "source": [ 88 | "trans_to_tensor = transforms.Compose([\n", 89 | " transforms.ToTensor(),\n", 90 | " transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n", 91 | "])\n", 92 | "\n", 93 | "dataset = ImageDataLoaderReal(data_folder = args.data_folder, scale = args.scale, trans_to_tensor = trans_to_tensor)" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": 5, 99 | "metadata": {}, 100 | "outputs": [ 101 | { 102 | "data": { 103 | "application/vnd.jupyter.widget-view+json": { 104 | "model_id": "9de685e4b97242bfa46d52994ff1ef06", 105 | "version_major": 2, 106 | "version_minor": 0 107 | }, 108 | "text/plain": [ 109 | " 0%| | 0/5944 [00:00 0: 74 | pts2d_flat.grad.zero_() 75 | P_6d_flat.grad.zero_() 76 | pts3d_flat.grad.zero_() 77 | K_flat.grad.zero_() 78 | 79 | R = kn.geometry.conversions.angle_axis_to_rotation_matrix(P_6d_flat[0:m-3].view(1,3)) 80 | 81 | P = torch.cat((R[0,0:3,0:3].view(3,3), P_6d_flat[m-3:m].view(3,1)),dim=-1) 82 | KP = torch.mm(K_flat.view(3,3), P) 83 | pts2d_i = pts2d_flat.view(n,2).transpose(0,1) 84 | pts3d_i = torch.cat((pts3d_flat.view(n,3),torch.ones(n,1,device=device)),dim=-1).t() 85 | proj_i = KP.mm(pts3d_i) 86 | Si = proj_i[2,:].view(1,n) 87 | 88 | r = pts2d_i*Si-proj_i[0:2,:] 89 | coefs = get_coefs(P_6d_flat.view(1,6), pts3d_flat.view(n,3), K_flat.view(3,3)) 90 | coef = coefs[:,:,j].transpose(0,1) # size: [2,n] 91 | fj = (coef*r).sum() 92 | fj.backward() 93 | J_fy[j,:] = P_6d_flat.grad.clone() 94 | J_fx[j,:] = pts2d_flat.grad.clone() 95 | J_fz[j,:] = pts3d_flat.grad.clone() 96 | J_fK[j,:] = K_flat.grad.clone() 97 | 98 | inv_J_fy = torch.inverse(J_fy) 99 | 100 | J_yx = (-1) * torch.mm(inv_J_fy, J_fx) 101 | J_yz = (-1) * torch.mm(inv_J_fy, J_fz) 102 | J_yK = (-1) * torch.mm(inv_J_fy, J_fK) 103 | 104 | grad_x[i] = grad_output[i].view(1,m).mm(J_yx).view(n,2) 105 | grad_z += grad_output[i].view(1,m).mm(J_yz).view(n,3) 106 | grad_K += grad_output[i].view(1,m).mm(J_yK).view(3,3) 107 | 108 | return grad_x, grad_z, grad_K, None 109 | 110 | 111 | class BPnP_m3d(torch.autograd.Function): 112 | """ 113 | BPnP_m3d supports mini-batch intputs of 3D keypoints, where the i-th set of 2D keypoints correspond to the i-th set of 3D keypoints. 114 | INPUTS: 115 | pts2d - the 2D keypoints coordinates of size [batch_size, num_keypoints, 2] 116 | pts3d - the 3D keypoints coordinates of size [batch_size, num_keypoints, 3] 117 | K - the camera intrinsic matrix of size [3, 3] 118 | OUTPUT: 119 | P_6d - the 6 DOF poses of size [batch_size, 6], where the first 3 elements of each row are the angle-axis rotation 120 | vector (Euler vector) and the last 3 elements are the translation vector. 121 | NOTE: 122 | For situations where all sets of 2D points in the mini-batch correspond to one common set of 3D points, use the BPnP class. 123 | """ 124 | @staticmethod 125 | def forward(ctx, pts2d, pts3d, K, ini_pose=None): 126 | bs = pts2d.size(0) 127 | n = pts2d.size(1) 128 | device = pts2d.device 129 | K_np = np.array(K.detach().cpu()) 130 | P_6d = torch.zeros(bs,6,device=device) 131 | 132 | for i in range(bs): 133 | pts2d_i_np = np.ascontiguousarray(pts2d[i].detach().cpu()).reshape((n,1,2)) 134 | pts3d_i_np = np.ascontiguousarray(pts3d[i].detach().cpu()).reshape((n,3)) 135 | if ini_pose is None: 136 | #_, rvec0, T0, _ = cv.solvePnPRansac(objectPoints=pts3d_i_np, imagePoints=pts2d_i_np, cameraMatrix=K_np, distCoeffs=None, flags=cv.SOLVEPNP_ITERATIVE, confidence=0.9999 ,reprojectionError=1) 137 | _, rvec0, T0 = cv.solvePnP(objectPoints=pts3d_i_np, imagePoints=pts2d_i_np, cameraMatrix=K_np, distCoeffs=None, flags=cv.SOLVEPNP_EPNP) 138 | else: 139 | rvec0 = np.array(ini_pose[i, 0:3].cpu().view(3, 1)) 140 | T0 = np.array(ini_pose[i, 3:6].cpu().view(3, 1)) 141 | _, rvec, T = cv.solvePnP(objectPoints=pts3d_i_np, imagePoints=pts2d_i_np, cameraMatrix=K_np, distCoeffs=None, flags=cv.SOLVEPNP_ITERATIVE, useExtrinsicGuess=True, rvec=rvec0, tvec=T0) 142 | angle_axis = torch.tensor(rvec,device=device,dtype=torch.float).view(1, 3) 143 | T = torch.tensor(T,device=device,dtype=torch.float).view(1, 3) 144 | P_6d[i,:] = torch.cat((angle_axis,T),dim=-1) 145 | 146 | ctx.save_for_backward(pts2d,P_6d,pts3d,K) 147 | return P_6d 148 | 149 | @staticmethod 150 | def backward(ctx, grad_output): 151 | 152 | pts2d, P_6d, pts3d, K = ctx.saved_tensors 153 | device = pts2d.device 154 | bs = pts2d.size(0) 155 | n = pts2d.size(1) 156 | m = 6 157 | 158 | grad_x = torch.zeros_like(pts2d) 159 | grad_z = torch.zeros_like(pts3d) 160 | grad_K = torch.zeros_like(K) 161 | 162 | for i in range(bs): 163 | J_fy = torch.zeros(m,m, device=device) 164 | J_fx = torch.zeros(m,2*n, device=device) 165 | J_fz = torch.zeros(m,3*n, device=device) 166 | J_fK = torch.zeros(m, 9, device=device) 167 | 168 | torch.set_grad_enabled(True) 169 | pts2d_flat = pts2d[i].clone().view(-1).detach().requires_grad_() 170 | P_6d_flat = P_6d[i].clone().view(-1).detach().requires_grad_() 171 | pts3d_flat = pts3d[i].clone().view(-1).detach().requires_grad_() 172 | K_flat = K.clone().view(-1).detach().requires_grad_() 173 | 174 | for j in range(m): 175 | torch.set_grad_enabled(True) 176 | if j > 0: 177 | pts2d_flat.grad.zero_() 178 | P_6d_flat.grad.zero_() 179 | pts3d_flat.grad.zero_() 180 | K_flat.grad.zero_() 181 | 182 | R = kn.geometry.conversions.angle_axis_to_rotation_matrix(P_6d_flat[0:m-3].view(1,3)) 183 | 184 | P = torch.cat((R[0,0:3,0:3].view(3,3), P_6d_flat[m-3:m].view(3,1)),dim=-1) 185 | KP = torch.mm(K_flat.view(3,3), P) 186 | pts2d_i = pts2d_flat.view(n,2).transpose(0,1) 187 | pts3d_i = torch.cat((pts3d_flat.view(n,3),torch.ones(n,1,device=device)),dim=-1).t() 188 | proj_i = KP.mm(pts3d_i) 189 | Si = proj_i[2,:].view(1,n) 190 | 191 | r = pts2d_i*Si-proj_i[0:2,:] 192 | coefs = get_coefs(P_6d_flat.view(1,6), pts3d_flat.view(n,3), K_flat.view(3,3)) 193 | coef = coefs[:,:,j].transpose(0,1) # size: [2,n] 194 | fj = (coef*r).sum() 195 | fj.backward() 196 | J_fy[j,:] = P_6d_flat.grad.clone() 197 | J_fx[j,:] = pts2d_flat.grad.clone() 198 | J_fz[j,:] = pts3d_flat.grad.clone() 199 | J_fK[j,:] = K_flat.grad.clone() 200 | 201 | inv_J_fy = torch.inverse(J_fy) 202 | 203 | J_yx = (-1) * torch.mm(inv_J_fy, J_fx) 204 | J_yz = (-1) * torch.mm(inv_J_fy, J_fz) 205 | J_yK = (-1) * torch.mm(inv_J_fy, J_fK) 206 | 207 | grad_x[i] = grad_output[i].view(1,m).mm(J_yx).view(n,2) 208 | grad_z[i] = grad_output[i].view(1,m).mm(J_yz).view(n,3) 209 | grad_K += grad_output[i].view(1,m).mm(J_yK).view(3,3) 210 | 211 | return grad_x, grad_z, grad_K, None 212 | 213 | 214 | class BPnP_fast(torch.autograd.Function): 215 | """ 216 | BPnP_fast is the efficient version of the BPnP class which ignores the higher order dirivatives through the coefs' graph. This sacrifices 217 | negligible gradient accuracy yet saves significant runtime. 218 | INPUTS: 219 | pts2d - the 2D keypoints coordinates of size [batch_size, num_keypoints, 2] 220 | pts3d - the 3D keypoints coordinates of size [num_keypoints, 3] 221 | K - the camera intrinsic matrix of size [3, 3] 222 | OUTPUT: 223 | P_6d - the 6 DOF poses of size [batch_size, 6], where the first 3 elements of each row are the angle-axis rotation 224 | vector (Euler vector) and the last 3 elements are the translation vector. 225 | NOTE: 226 | This BPnP function assumes that all sets of 2D points in the mini-batch correspond to one common set of 3D points. 227 | For situations where pts3d is also a mini-batch, use the BPnP_m3d class. 228 | """ 229 | @staticmethod 230 | def forward(ctx, pts2d, pts3d, K, ini_pose=None): 231 | bs = pts2d.size(0) 232 | n = pts2d.size(1) 233 | device = pts2d.device 234 | pts3d_np = np.array(pts3d.detach().cpu()) 235 | K_np = np.array(K.detach().cpu()) 236 | P_6d = torch.zeros(bs,6,device=device) 237 | 238 | for i in range(bs): 239 | pts2d_i_np = np.ascontiguousarray(pts2d[i].detach().cpu()).reshape((n,1,2)) 240 | if ini_pose is None: 241 | _, rvec0, T0, _ = cv.solvePnPRansac(objectPoints=pts3d_np, imagePoints=pts2d_i_np, cameraMatrix=K_np, distCoeffs=None, flags=cv.SOLVEPNP_ITERATIVE, confidence=0.9999 ,reprojectionError=3) 242 | else: 243 | rvec0 = np.array(ini_pose[i, 0:3].cpu().view(3, 1)) 244 | T0 = np.array(ini_pose[i, 3:6].cpu().view(3, 1)) 245 | _, rvec, T = cv.solvePnP(objectPoints=pts3d_np, imagePoints=pts2d_i_np, cameraMatrix=K_np, distCoeffs=None, flags=cv.SOLVEPNP_ITERATIVE, useExtrinsicGuess=True, rvec=rvec0, tvec=T0) 246 | angle_axis = torch.tensor(rvec,device=device,dtype=torch.float).view(1, 3) 247 | T = torch.tensor(T,device=device,dtype=torch.float).view(1, 3) 248 | P_6d[i,:] = torch.cat((angle_axis,T),dim=-1) 249 | 250 | ctx.save_for_backward(pts2d,P_6d,pts3d,K) 251 | return P_6d 252 | 253 | @staticmethod 254 | def backward(ctx, grad_output): 255 | 256 | pts2d, P_6d, pts3d, K = ctx.saved_tensors 257 | device = pts2d.device 258 | bs = pts2d.size(0) 259 | n = pts2d.size(1) 260 | m = 6 261 | 262 | grad_x = torch.zeros_like(pts2d) 263 | grad_z = torch.zeros_like(pts3d) 264 | grad_K = torch.zeros_like(K) 265 | 266 | for i in range(bs): 267 | J_fy = torch.zeros(m,m, device=device) 268 | J_fx = torch.zeros(m,2*n, device=device) 269 | J_fz = torch.zeros(m,3*n, device=device) 270 | J_fK = torch.zeros(m, 9, device=device) 271 | 272 | coefs = get_coefs(P_6d[i].view(1,6), pts3d, K, create_graph=False).detach() 273 | 274 | pts2d_flat = pts2d[i].clone().view(-1).detach().requires_grad_() 275 | P_6d_flat = P_6d[i].clone().view(-1).detach().requires_grad_() 276 | pts3d_flat = pts3d.clone().view(-1).detach().requires_grad_() 277 | K_flat = K.clone().view(-1).detach().requires_grad_() 278 | 279 | for j in range(m): 280 | torch.set_grad_enabled(True) 281 | if j > 0: 282 | pts2d_flat.grad.zero_() 283 | P_6d_flat.grad.zero_() 284 | pts3d_flat.grad.zero_() 285 | K_flat.grad.zero_() 286 | 287 | R = kn.geometry.conversions.angle_axis_to_rotation_matrix(P_6d_flat[0:m-3].view(1,3)) 288 | 289 | P = torch.cat((R[0,0:3,0:3].view(3,3), P_6d_flat[m-3:m].view(3,1)),dim=-1) 290 | KP = torch.mm(K_flat.view(3,3), P) 291 | pts2d_i = pts2d_flat.view(n,2).transpose(0,1) 292 | pts3d_i = torch.cat((pts3d_flat.view(n,3),torch.ones(n,1,device=device)),dim=-1).t() 293 | proj_i = KP.mm(pts3d_i) 294 | Si = proj_i[2,:].view(1,n) 295 | 296 | r = pts2d_i*Si-proj_i[0:2,:] 297 | coef = coefs[:,:,j].transpose(0,1) # size: [2,n] 298 | fj = (coef*r).sum() 299 | fj.backward() 300 | J_fy[j,:] = P_6d_flat.grad.clone() 301 | J_fx[j,:] = pts2d_flat.grad.clone() 302 | J_fz[j,:] = pts3d_flat.grad.clone() 303 | J_fK[j,:] = K_flat.grad.clone() 304 | 305 | inv_J_fy = torch.inverse(J_fy) 306 | 307 | J_yx = (-1) * torch.mm(inv_J_fy, J_fx) 308 | J_yz = (-1) * torch.mm(inv_J_fy, J_fz) 309 | J_yK = (-1) * torch.mm(inv_J_fy, J_fK) 310 | 311 | grad_x[i] = grad_output[i].view(1,m).mm(J_yx).view(n,2) 312 | grad_z += grad_output[i].view(1,m).mm(J_yz).view(n,3) 313 | grad_K += grad_output[i].view(1,m).mm(J_yK).view(3,3) 314 | 315 | return grad_x, grad_z, grad_K, None 316 | 317 | 318 | def get_coefs(P_6d, pts3d, K, create_graph=True): 319 | device = P_6d.device 320 | n = pts3d.size(0) 321 | m = P_6d.size(-1) 322 | coefs = torch.zeros(n,2,m,device=device) 323 | torch.set_grad_enabled(True) 324 | y = P_6d.repeat(n,1) 325 | proj = batch_project(y, pts3d, K).squeeze() 326 | vec = torch.diag(torch.ones(n,device=device).float()) 327 | for k in range(2): 328 | torch.set_grad_enabled(True) 329 | y_grad = torch.autograd.grad(proj[:,:,k],y,vec, retain_graph=True, create_graph=create_graph) 330 | coefs[:,k,:] = -2*y_grad[0].clone() 331 | return coefs 332 | 333 | def batch_project(P, pts3d, K, angle_axis=True): 334 | # pts3d.size = (N,3) 335 | # P.size = (B,6) 336 | n = pts3d.size(0) 337 | bs = P.size(0) 338 | device = P.device 339 | pts3d_h = torch.cat((pts3d, torch.ones(n, 1, device=device)), dim=-1) 340 | if angle_axis: 341 | R_out = kn.geometry.conversions.angle_axis_to_rotation_matrix(P[:, 0:3].view(bs, 3)) 342 | PM = torch.cat((R_out[:,0:3,0:3], P[:, 3:6].view(bs, 3, 1)), dim=-1) 343 | else: 344 | PM = P 345 | pts3d_cam = pts3d_h.matmul(PM.transpose(-2,-1)) 346 | pts2d_proj = pts3d_cam.matmul(K.t()) 347 | S = pts2d_proj[:,:, 2].view(bs, n, 1) 348 | pts2d_pro = pts2d_proj[:,:,0:2].div(S) 349 | 350 | return pts2d_pro 351 | 352 | def batch_transform_3d(P, pts3d, angle_axis=True): 353 | # pts3d.size = (N,3) 354 | # P.size = (B,6) 355 | n = pts3d.size(0) 356 | bs = P.size(0) 357 | device = P.device 358 | pts3d_h = torch.cat((pts3d, torch.ones(n, 1, device=device)), dim=-1) 359 | if angle_axis: 360 | R_out = kn.geometry.conversions.angle_axis_to_rotation_matrix(P[:, 0:3].view(bs, 3)) 361 | PM = torch.cat((R_out[:,0:3,0:3], P[:, 3:6].view(bs, 3, 1)), dim=-1) 362 | else: 363 | PM = P 364 | pts3d_cam = pts3d_h.matmul(PM.transpose(-2,-1)) 365 | 366 | return pts3d_cam 367 | 368 | 369 | 370 | 371 | -------------------------------------------------------------------------------- /models/CtRNet.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn.functional as F 3 | import kornia 4 | import numpy as np 5 | 6 | from .keypoint_seg_resnet import KeyPointSegNet 7 | from .BPnP import BPnP, BPnP_m3d, batch_project 8 | from .mesh_renderer import RobotMeshRenderer 9 | 10 | 11 | class CtRNet(torch.nn.Module): 12 | def __init__(self, args): 13 | super(CtRNet, self).__init__() 14 | 15 | self.args = args 16 | 17 | if args.use_gpu: 18 | self.device = "cuda" 19 | else: 20 | self.device = "cpu" 21 | 22 | # load keypoint segmentation model 23 | self.keypoint_seg_predictor = KeyPointSegNet(args, use_gpu=args.use_gpu) 24 | 25 | if args.use_gpu: 26 | self.keypoint_seg_predictor = self.keypoint_seg_predictor.cuda() 27 | 28 | if args.trained_on_multi_gpus == True: 29 | self.keypoint_seg_predictor = torch.nn.DataParallel(self.keypoint_seg_predictor, device_ids=[0]) 30 | 31 | if args.keypoint_seg_model_path is not None: 32 | print("Loading keypoint segmentation model from {}".format(args.keypoint_seg_model_path)) 33 | self.keypoint_seg_predictor.load_state_dict(torch.load(args.keypoint_seg_model_path)) 34 | 35 | self.keypoint_seg_predictor.eval() 36 | 37 | # load BPnP 38 | self.bpnp = BPnP.apply 39 | self.bpnp_m3d = BPnP_m3d.apply 40 | 41 | # set up camera intrinsics 42 | 43 | self.intrinsics = np.array([[ args.fx, 0. , args.px ], 44 | [ 0. , args.fy, args.py ], 45 | [ 0. , 0. , 1. ]]) 46 | print("Camera intrinsics: {}".format(self.intrinsics)) 47 | 48 | self.K = torch.tensor(self.intrinsics, device=self.device, dtype=torch.float) 49 | 50 | 51 | # set up robot model 52 | if args.robot_name == "Panda": 53 | from .robot_arm import PandaArm 54 | self.robot = PandaArm(args.urdf_file) 55 | elif args.robot_name == "Baxter_left_arm": 56 | from .robot_arm import BaxterLeftArm 57 | self.robot = BaxterLeftArm(args.urdf_file) 58 | print("Robot model: {}".format(args.robot_name)) 59 | 60 | 61 | def inference_single_image(self, img, joint_angles): 62 | # img: (3, H, W) 63 | # joint_angles: (7) 64 | # robot: robot model 65 | 66 | # detect 2d keypoints and segmentation masks 67 | points_2d, segmentation = self.keypoint_seg_predictor(img[None]) 68 | foreground_mask = torch.sigmoid(segmentation) 69 | _,t_list = self.robot.get_joint_RT(joint_angles) 70 | points_3d = torch.from_numpy(np.array(t_list)).float().to(self.device) 71 | if self.args.robot_name == "Panda": 72 | points_3d = points_3d[[0,2,3,4,6,7,8]] # remove 1 and 5 links as they are overlapping with 2 and 6 73 | 74 | #init_pose = torch.tensor([[ 1.5497, 0.5420, -0.3909, -0.4698, -0.0211, 1.3243]]) 75 | #cTr = bpnp(points_2d_pred, points_3d, K, init_pose) 76 | cTr = self.bpnp(points_2d, points_3d, self.K) 77 | 78 | return cTr, points_2d, foreground_mask 79 | 80 | def inference_batch_images(self, img, joint_angles): 81 | # img: (B, 3, H, W) 82 | # joint_angles: (B, 7) 83 | # robot: robot model 84 | 85 | # detect 2d keypoints and segmentation masks 86 | points_2d, segmentation = self.keypoint_seg_predictor(img) 87 | foreground_mask = torch.sigmoid(segmentation) 88 | 89 | points_3d_batch = [] 90 | for b in range(joint_angles.shape[0]): 91 | _,t_list = self.robot.get_joint_RT(joint_angles[b]) 92 | points_3d = torch.from_numpy(np.array(t_list)).float().to(self.device) 93 | if self.args.robot_name == "Panda": 94 | points_3d = points_3d[:,[0,2,3,4,6,7,8]] 95 | points_3d_batch.append(points_3d[None]) 96 | 97 | points_3d_batch = torch.cat(points_3d_batch, dim=0) 98 | 99 | cTr = self.bpnp_m3d(points_2d, points_3d_batch, self.K) 100 | 101 | return cTr, points_2d, foreground_mask 102 | 103 | 104 | def cTr_to_pose_matrix(self, cTr): 105 | """ 106 | cTr: (batch_size, 6) 107 | pose_matrix: (batch_size, 4, 4) 108 | """ 109 | batch_size = cTr.shape[0] 110 | pose_matrix = torch.zeros((batch_size, 4, 4), device=self.device) 111 | pose_matrix[:, :3, :3] = kornia.geometry.conversions.angle_axis_to_rotation_matrix(cTr[:, :3]) 112 | pose_matrix[:, :3, 3] = cTr[:, 3:] 113 | pose_matrix[:, 3, 3] = 1 114 | return pose_matrix 115 | 116 | def to_valid_R_batch(self, R): 117 | # R is a batch of 3x3 rotation matrices 118 | U, S, V = torch.svd(R) 119 | return torch.bmm(U, V.transpose(1,2)) 120 | 121 | def setup_robot_renderer(self, mesh_files): 122 | # mesh_files: list of mesh files 123 | focal_length = [-self.args.fx,-self.args.fy] 124 | principal_point = [self.args.px, self.args.py] 125 | image_size = [self.args.height,self.args.width] 126 | 127 | robot_renderer = RobotMeshRenderer( 128 | focal_length=focal_length, principal_point=principal_point, image_size=image_size, 129 | robot=self.robot, mesh_files=mesh_files, device=self.device) 130 | 131 | return robot_renderer 132 | 133 | def render_single_robot_mask(self, cTr, robot_mesh, robot_renderer): 134 | # cTr: (6) 135 | # img: (1, H, W) 136 | 137 | R = kornia.geometry.conversions.angle_axis_to_rotation_matrix(cTr[:3][None]) # (1, 3, 3) 138 | R = torch.transpose(R,1,2) 139 | #R = to_valid_R_batch(R) 140 | T = cTr[3:][None] # (1, 3) 141 | 142 | if T[0,-1] < 0: 143 | rendered_image = robot_renderer.silhouette_renderer(meshes_world=robot_mesh, R = -R, T = -T) 144 | else: 145 | rendered_image = robot_renderer.silhouette_renderer(meshes_world=robot_mesh, R = R, T = T) 146 | 147 | if torch.isnan(rendered_image).any(): 148 | rendered_image = torch.nan_to_num(rendered_image) 149 | 150 | return rendered_image[..., 3] 151 | 152 | 153 | 154 | def train_on_batch(self, img, joint_angles, robot_renderer, criterions, phase='train'): 155 | # img: (B, 3, H, W) 156 | # joint_angles: (B, 7) 157 | with torch.set_grad_enabled(phase == 'train'): 158 | # detect 2d keypoints 159 | points_2d, segmentation = self.keypoint_seg_predictor(img) 160 | 161 | mask_list = list() 162 | seg_weight_list = list() 163 | 164 | for b in range(img.shape[0]): 165 | # get 3d points 166 | _,t_list = self.robot.get_joint_RT(joint_angles[b]) 167 | points_3d = torch.from_numpy(np.array(t_list)).float().to(self.device) 168 | if self.args.robot_name == "Panda": 169 | points_3d = points_3d[:,[0,2,3,4,6,7,8]] 170 | 171 | # get camera pose 172 | cTr = self.bpnp(points_2d[b][None], points_3d, self.K) 173 | 174 | # config robot mesh 175 | robot_mesh = robot_renderer.get_robot_mesh(joint_angles[b]) 176 | 177 | # render robot mask 178 | rendered_image = self.render_single_robot_mask(cTr.squeeze(), robot_mesh, robot_renderer) 179 | 180 | mask_list.append(rendered_image) 181 | points_2d_proj = batch_project(cTr, points_3d, self.K) 182 | reproject_error = criterions["mse_mean"](points_2d[b], points_2d_proj.squeeze()) 183 | seg_weight = torch.exp(-reproject_error * self.args.reproj_err_scale) 184 | seg_weight_list.append(seg_weight) 185 | 186 | mask_batch = torch.cat(mask_list,0) 187 | 188 | loss_bce = 0 189 | for b in range(segmentation.shape[0]): 190 | loss_bce = loss_bce + seg_weight_list[b] * criterions["bce"](segmentation[b].squeeze(), mask_batch[b].detach()) 191 | 192 | img_ref = torch.sigmoid(segmentation).detach() 193 | #loss_reproj = 0.0005 * criterionMSE_mean(points_2d, points_2d_proj_batch) 194 | loss_mse = 0.001 * criterions["mse_sum"](mask_batch, img_ref.squeeze()) 195 | loss = loss_mse + loss_bce 196 | 197 | return loss 198 | 199 | 200 | 201 | 202 | 203 | -------------------------------------------------------------------------------- /models/keypoint_seg_resnet.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | import torch.nn.functional as F 4 | from torch.autograd import Variable 5 | import numpy as np 6 | import torchvision.models as models 7 | 8 | 9 | 10 | class KeypointUpSample(nn.Module): 11 | def __init__(self, in_channels, num_keypoints): 12 | super().__init__() 13 | input_features = in_channels 14 | deconv_kernel = 4 15 | self.kps_score_lowres = nn.ConvTranspose2d( 16 | input_features, 17 | num_keypoints, 18 | deconv_kernel, 19 | stride=2, 20 | padding=deconv_kernel // 2 - 1, 21 | ) 22 | nn.init.kaiming_normal_(self.kps_score_lowres.weight, mode="fan_out", nonlinearity="relu") 23 | nn.init.constant_(self.kps_score_lowres.bias, 0) 24 | #nn.init.uniform_(self.kps_score_lowres.weight) 25 | #nn.init.uniform_(self.kps_score_lowres.bias) 26 | self.up_scale = 1 27 | self.out_channels = num_keypoints 28 | 29 | def forward(self, x): 30 | x = self.kps_score_lowres(x) 31 | return torch.nn.functional.interpolate( 32 | x, scale_factor=float(self.up_scale), mode="bilinear", align_corners=False, recompute_scale_factor=False 33 | ) 34 | 35 | 36 | 37 | 38 | class SpatialSoftArgmax(nn.Module): 39 | """ 40 | The spatial softmax of each feature 41 | map is used to compute a weighted mean of the pixel 42 | locations, effectively performing a soft arg-max 43 | over the feature dimension. 44 | 45 | """ 46 | 47 | def __init__(self, normalize=True): 48 | """Constructor. 49 | Args: 50 | normalize (bool): Whether to use normalized 51 | image coordinates, i.e. coordinates in 52 | the range `[-1, 1]`. 53 | """ 54 | super().__init__() 55 | 56 | self.normalize = normalize 57 | 58 | def _coord_grid(self, h, w, device): 59 | if self.normalize: 60 | return torch.stack( 61 | torch.meshgrid( 62 | torch.linspace(-1, 1, h, device=device), 63 | torch.linspace(-1, 1, w, device=device), 64 | indexing='ij', 65 | ) 66 | ) 67 | return torch.stack( 68 | torch.meshgrid( 69 | torch.arange(0, h, device=device), 70 | torch.arange(0, w, device=device), 71 | indexing='ij', 72 | ) 73 | ) 74 | 75 | def forward(self, x): 76 | assert x.ndim == 4, "Expecting a tensor of shape (B, C, H, W)." 77 | 78 | # compute a spatial softmax over the input: 79 | # given an input of shape (B, C, H, W), 80 | # reshape it to (B*C, H*W) then apply 81 | # the softmax operator over the last dimension 82 | b, c, h, w = x.shape 83 | softmax = F.softmax(x.view(-1, h * w), dim=-1) 84 | 85 | # create a meshgrid of pixel coordinates 86 | # both in the x and y axes 87 | yc, xc = self._coord_grid(h, w, x.device) 88 | 89 | # element-wise multiply the x and y coordinates 90 | # with the softmax, then sum over the h*w dimension 91 | # this effectively computes the weighted mean of x 92 | # and y locations 93 | x_mean = (softmax * xc.flatten()).sum(dim=1, keepdims=True) 94 | y_mean = (softmax * yc.flatten()).sum(dim=1, keepdims=True) 95 | 96 | # concatenate and reshape the result 97 | # to (B, C, 2) where for every feature 98 | # we have the expected x and y pixel 99 | # locations 100 | return torch.cat([x_mean, y_mean], dim=1).view(-1, c, 2) 101 | 102 | 103 | class KeyPointSegNet(nn.Module): 104 | def __init__(self, args, lim=[-1., 1., -1., 1.], use_gpu=True): 105 | super(KeyPointSegNet, self).__init__() 106 | 107 | self.args = args 108 | self.lim = lim 109 | 110 | k = args.n_kp 111 | 112 | if use_gpu: 113 | self.device = "cuda" 114 | else: 115 | self.device = "cpu" 116 | 117 | 118 | deeplabv3_resnet50 = models.segmentation.deeplabv3_resnet50(pretrained=True) 119 | deeplabv3_resnet50.classifier[4] = torch.nn.Conv2d(256, 1, kernel_size=(1, 1), stride=(1, 1)) # Change final layer to 2 classes 120 | 121 | self.backbone = torch.nn.Sequential(list(deeplabv3_resnet50.children())[0]) 122 | 123 | self.read_out = KeypointUpSample(2048, k) 124 | 125 | self.spatialsoftargmax = SpatialSoftArgmax() 126 | 127 | self.classifer = torch.nn.Sequential((list(deeplabv3_resnet50.children())[1])) 128 | 129 | 130 | 131 | def forward(self, img): 132 | input_shape = img.shape[-2:] 133 | 134 | resnet_out = self.backbone(img)['out'] # (B, 2048, H//8, W//8) 135 | 136 | # keypoint prediction branch 137 | heatmap = self.read_out(resnet_out) # (B, k, H//4, W//4) 138 | keypoints = self.spatialsoftargmax(heatmap) 139 | # mapping back to original resolution from [-1,1] 140 | offset = torch.tensor([self.lim[0], self.lim[2]], device = resnet_out.device) 141 | scale = torch.tensor([self.args.width // 2, self.args.height // 2], device = resnet_out.device) 142 | keypoints = keypoints - offset 143 | keypoints = keypoints * scale 144 | 145 | # segmentation branch 146 | x = self.classifer(resnet_out) 147 | segout = F.interpolate(x, size=input_shape, mode='bilinear', align_corners=False) 148 | 149 | return keypoints, segout 150 | -------------------------------------------------------------------------------- /models/mesh_renderer.py: -------------------------------------------------------------------------------- 1 | import os 2 | import torch 3 | import numpy as np 4 | import torch.nn as nn 5 | import torch.nn.functional as F 6 | 7 | # io utils 8 | from pytorch3d.io import load_obj 9 | 10 | # datastructures 11 | from pytorch3d.structures import Meshes 12 | 13 | # rendering components 14 | from pytorch3d.renderer import ( 15 | RasterizationSettings, MeshRenderer, MeshRasterizer, BlendParams, 16 | SoftSilhouetteShader, HardPhongShader, PointLights, TexturesVertex, 17 | PerspectiveCameras,Textures 18 | ) 19 | 20 | from os.path import exists 21 | 22 | 23 | class RobotMeshRenderer(): 24 | """ 25 | Class that render robot mesh with differentiable renderer 26 | """ 27 | def __init__(self, focal_length, principal_point, image_size, robot, mesh_files, device): 28 | 29 | self.focal_length = focal_length 30 | self.principal_point = principal_point 31 | self.image_size = image_size 32 | self.device = device 33 | self.robot = robot 34 | self.mesh_files = mesh_files 35 | self.preload_verts = [] 36 | self.preload_faces = [] 37 | 38 | 39 | # preload the mesh to save loading time 40 | for m_file in mesh_files: 41 | assert exists(m_file) 42 | preload_verts_i, preload_faces_idx_i, _ = load_obj(m_file) 43 | preload_faces_i = preload_faces_idx_i.verts_idx 44 | self.preload_verts.append(preload_verts_i) 45 | self.preload_faces.append(preload_faces_i) 46 | 47 | 48 | # set up differentiable renderer with given camera parameters 49 | self.cameras = PerspectiveCameras(focal_length = [focal_length], 50 | principal_point = [principal_point], 51 | device=device, 52 | in_ndc=False, image_size = [image_size]) # (height, width) !!!!! 53 | 54 | blend_params = BlendParams(sigma=1e-8, gamma=1e-8) 55 | raster_settings = RasterizationSettings( 56 | image_size=image_size, 57 | blur_radius=np.log(1. / 1e-4 - 1.) * blend_params.sigma, 58 | faces_per_pixel=100, 59 | max_faces_per_bin=100000, # max_faces_per_bin=1000000, 60 | ) 61 | 62 | # Create a silhouette mesh renderer by composing a rasterizer and a shader. 63 | self.silhouette_renderer = MeshRenderer( 64 | rasterizer=MeshRasterizer( 65 | cameras=self.cameras, 66 | raster_settings=raster_settings 67 | ), 68 | shader=SoftSilhouetteShader(blend_params=blend_params) 69 | ) 70 | 71 | 72 | # We will also create a Phong renderer. This is simpler and only needs to render one face per pixel. 73 | raster_settings = RasterizationSettings( 74 | image_size=image_size, 75 | blur_radius=0.0, 76 | faces_per_pixel=1, 77 | max_faces_per_bin=100000, 78 | ) 79 | # We can add a point light in front of the object. 80 | lights = PointLights(device=device, location=((2.0, 2.0, -2.0),)) 81 | self.phong_renderer = MeshRenderer( 82 | rasterizer=MeshRasterizer( 83 | cameras=self.cameras, 84 | raster_settings=raster_settings 85 | ), 86 | shader=HardPhongShader(device=device, cameras=self.cameras, lights=lights) 87 | ) 88 | 89 | def get_robot_mesh(self, joint_angle): 90 | 91 | R_list, t_list = self.robot.get_joint_RT(joint_angle) 92 | assert len(self.mesh_files) == R_list.shape[0] and len(self.mesh_files) == t_list.shape[0] 93 | 94 | verts_list = [] 95 | faces_list = [] 96 | verts_rgb_list = [] 97 | verts_count = 0 98 | for i in range(len(self.mesh_files)): 99 | verts_i = self.preload_verts[i] 100 | faces_i = self.preload_faces[i] 101 | 102 | R = torch.tensor(R_list[i],dtype=torch.float32) 103 | t = torch.tensor(t_list[i],dtype=torch.float32) 104 | verts_i = verts_i @ R.T + t 105 | #verts_i = (R @ verts_i.T).T + t 106 | faces_i = faces_i + verts_count 107 | 108 | verts_count+=verts_i.shape[0] 109 | 110 | verts_list.append(verts_i.to(self.device)) 111 | faces_list.append(faces_i.to(self.device)) 112 | 113 | # Initialize each vertex to be white in color. 114 | color = torch.rand(3) 115 | verts_rgb_i = torch.ones_like(verts_i) * color # (V, 3) 116 | verts_rgb_list.append(verts_rgb_i.to(self.device)) 117 | 118 | 119 | 120 | verts = torch.concat(verts_list, dim=0) 121 | faces = torch.concat(faces_list, dim=0) 122 | 123 | verts_rgb = torch.concat(verts_rgb_list,dim=0)[None] 124 | textures = Textures(verts_rgb=verts_rgb) 125 | 126 | # Create a Meshes object 127 | robot_mesh = Meshes( 128 | verts=[verts.to(self.device)], 129 | faces=[faces.to(self.device)], 130 | textures=textures 131 | ) 132 | 133 | return robot_mesh 134 | 135 | 136 | def get_robot_verts_and_faces(self, joint_angle): 137 | 138 | R_list, t_list = self.robot.get_joint_RT(joint_angle) 139 | assert len(self.mesh_files) == R_list.shape[0] and len(self.mesh_files) == t_list.shape[0] 140 | 141 | verts_list = [] 142 | faces_list = [] 143 | verts_rgb_list = [] 144 | verts_count = 0 145 | for i in range(len(self.mesh_files)): 146 | verts_i = self.preload_verts[i] 147 | faces_i = self.preload_faces[i] 148 | 149 | R = torch.tensor(R_list[i],dtype=torch.float32) 150 | t = torch.tensor(t_list[i],dtype=torch.float32) 151 | verts_i = verts_i @ R.T + t 152 | #verts_i = (R @ verts_i.T).T + t 153 | faces_i = faces_i + verts_count 154 | 155 | verts_count+=verts_i.shape[0] 156 | 157 | verts_list.append(verts_i.to(self.device)) 158 | faces_list.append(faces_i.to(self.device)) 159 | 160 | # Initialize each vertex to be white in color. 161 | #color = torch.rand(3) 162 | #verts_rgb_i = torch.ones_like(verts_i) * color # (V, 3) 163 | #verts_rgb_list.append(verts_rgb_i.to(self.device)) 164 | 165 | verts = torch.concat(verts_list, dim=0) 166 | faces = torch.concat(faces_list, dim=0) 167 | 168 | 169 | return verts, faces -------------------------------------------------------------------------------- /models/robot_arm.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from roboticstoolbox.robot.ERobot import ERobot 3 | 4 | class BaxterLeftArm(): 5 | def __init__(self, urdf_file): 6 | 7 | self.robot = self.Baxter(urdf_file) 8 | 9 | def get_joint_RT(self, joint_angle): 10 | 11 | assert joint_angle.shape[0] == 7 12 | joint_angle_all = np.zeros(15) 13 | joint_angle_all[-7:] = joint_angle 14 | 15 | link_idx_list = [30,31,32,33,34,36,37] 16 | R_list = [] 17 | t_list = [] 18 | # base:30, J1:30, J2:31, J3:32, J4:33, J5:34, J6:36, J7:37 19 | 20 | for i in range(joint_angle.shape[0]): 21 | link_idx = link_idx_list[i] 22 | T = self.robot.fkine(joint_angle_all, end = self.robot.links[link_idx], start = self.robot.links[30]) 23 | R_list.append(T.R) 24 | t_list.append(T.t) 25 | 26 | 27 | 28 | return np.array(R_list),np.array(t_list) 29 | 30 | 31 | class Baxter(ERobot): 32 | """ 33 | Class that imports a URDF model 34 | """ 35 | 36 | def __init__(self, urdf_file): 37 | 38 | links, name, urdf_string, urdf_filepath = self.URDF_read(urdf_file) 39 | 40 | super().__init__( 41 | links, 42 | name=name, 43 | manufacturer="Rethink", 44 | urdf_string=urdf_string, 45 | urdf_filepath=urdf_filepath, 46 | # gripper_links=elinks[9] 47 | ) 48 | 49 | # self.qdlim = np.array([ 50 | # 2.1750, 2.1750, 2.1750, 2.1750, 2.6100, 2.6100, 2.6100, 3.0, 3.0]) 51 | 52 | 53 | class PandaArm(): 54 | def __init__(self, urdf_file): 55 | 56 | self.robot = self.Panda(urdf_file) 57 | 58 | def get_joint_RT(self, joint_angle): 59 | 60 | assert joint_angle.shape[0] == 7 61 | 62 | 63 | link_idx_list = [0,1,2,3,4,5,6,7,9] 64 | # link 0,1,2,3,4,5,6,7, and hand 65 | R_list = [] 66 | t_list = [] 67 | 68 | 69 | for i in range(len(link_idx_list)): 70 | link_idx = link_idx_list[i] 71 | T = self.robot.fkine(joint_angle, end = self.robot.links[link_idx], start = self.robot.links[0]) 72 | R_list.append(T.R) 73 | t_list.append(T.t) 74 | 75 | 76 | 77 | return np.array(R_list),np.array(t_list) 78 | 79 | 80 | class Panda(ERobot): 81 | """ 82 | Class that imports a URDF model 83 | """ 84 | 85 | def __init__(self, urdf_file): 86 | 87 | links, name, urdf_string, urdf_filepath = self.URDF_read(urdf_file) 88 | 89 | super().__init__( 90 | links, 91 | name=name, 92 | manufacturer="Franka", 93 | urdf_string=urdf_string, 94 | urdf_filepath=urdf_filepath, 95 | # gripper_links=elinks[9] 96 | ) 97 | 98 | 99 | 100 | -------------------------------------------------------------------------------- /package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | ctrnet-robot-pose-estimation-ros 4 | 0.0.0 5 | The ctrnet-robot-pose-estimation-ros package 6 | 7 | 8 | 9 | 10 | arclab 11 | 12 | 13 | 14 | 15 | 16 | TODO 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | catkin 52 | roscpp 53 | rospy 54 | std_msgs 55 | roscpp 56 | rospy 57 | std_msgs 58 | roscpp 59 | rospy 60 | std_msgs 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | -------------------------------------------------------------------------------- /ros_nodes/baxter_leftarm_pose.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | base_dir = os.path.abspath(".") 4 | sys.path.append(base_dir) 5 | 6 | import numpy as np 7 | import time 8 | import rospy 9 | from cv_bridge import CvBridge, CvBridgeError 10 | from message_filters import ApproximateTimeSynchronizer, Subscriber 11 | import sensor_msgs 12 | import geometry_msgs 13 | import kornia 14 | 15 | import torch 16 | import torchvision.transforms as transforms 17 | 18 | from PIL import Image as PILImage 19 | from utils import * 20 | from models.CtRNet import CtRNet 21 | 22 | import cv2 23 | bridge = CvBridge() 24 | 25 | #os.environ['ROS_MASTER_URI']='http://192.168.1.116:11311' 26 | #os.environ['ROS_IP']='192.168.1.186' 27 | 28 | ################################################################ 29 | import argparse 30 | parser = argparse.ArgumentParser() 31 | 32 | args = parser.parse_args("") 33 | 34 | args.base_dir = "/home/jingpei/Desktop/CtRNet-robot-pose-estimation" 35 | args.use_gpu = True 36 | args.trained_on_multi_gpus = False 37 | args.keypoint_seg_model_path = os.path.join(args.base_dir,"weights/baxter/net.pth") 38 | args.urdf_file = os.path.join(args.base_dir,"urdfs/Baxter/baxter_description/urdf/baxter.urdf") 39 | args.robot_name = 'Baxter_left_arm' # "Panda" or "Baxter_left_arm" 40 | args.n_kp = 7 41 | args.scale = 0.3125 42 | args.height = 1536 43 | args.width = 2048 44 | args.fx, args.fy, args.px, args.py = 960.41357421875, 960.22314453125, 1021.7171020507812, 776.2381591796875 45 | # scale the camera parameters 46 | args.width = int(args.width * args.scale) 47 | args.height = int(args.height * args.scale) 48 | args.fx = args.fx * args.scale 49 | args.fy = args.fy * args.scale 50 | args.px = args.px * args.scale 51 | args.py = args.py * args.scale 52 | 53 | if args.use_gpu: 54 | device = "cuda" 55 | else: 56 | device = "cpu" 57 | 58 | trans_to_tensor = transforms.Compose([ 59 | transforms.ToTensor(), 60 | transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), 61 | ]) 62 | 63 | CtRNet = CtRNet(args) 64 | 65 | def preprocess_img(cv_img,args): 66 | image_pil = PILImage.fromarray(cv_img) 67 | width, height = image_pil.size 68 | new_size = (int(width*args.scale),int(height*args.scale)) 69 | image_pil = image_pil.resize(new_size) 70 | image = trans_to_tensor(image_pil) 71 | return image 72 | 73 | 74 | #############################################################################3 75 | 76 | #start = time.time() 77 | def gotData(img_msg, joint_msg): 78 | #global start 79 | print("Received data!") 80 | try: 81 | # Convert your ROS Image message to OpenCV2 82 | cv2_img = bridge.imgmsg_to_cv2(img_msg, "bgr8") 83 | cv_img = cv2.cvtColor(cv2_img, cv2.COLOR_BGR2RGB) 84 | image = preprocess_img(cv_img,args) 85 | 86 | joint_angles = np.array(joint_msg.position)[[4,5,2,3,6,7,8]] 87 | 88 | if args.use_gpu: 89 | image = image.cuda() 90 | 91 | cTr, points_2d, segmentation = CtRNet.inference_single_image(image, joint_angles) 92 | 93 | print(cTr) 94 | qua = kornia.geometry.conversions.angle_axis_to_quaternion(cTr[:,:3]).detach().cpu().numpy().squeeze() # xyzw 95 | #print(qua) 96 | T = cTr[:,3:].detach().cpu().numpy().squeeze() 97 | p = geometry_msgs.msg.PoseStamped() 98 | p.header = img_msg.header 99 | p.pose.position.x = T[0] 100 | p.pose.position.y = T[1] 101 | p.pose.position.z = T[2] 102 | p.pose.orientation.x = qua[0] 103 | p.pose.orientation.y = qua[1] 104 | p.pose.orientation.z = qua[2] 105 | p.pose.orientation.w = qua[3] 106 | #print(p) 107 | pose_pub.publish(p) 108 | 109 | 110 | #### visualization code #### 111 | #points_2d = points_2d.detach().cpu().numpy() 112 | #img_np = to_numpy_img(image) 113 | #img_np = overwrite_image(img_np,points_2d[0].astype(int), color=(1,0,0)) 114 | #plt.imsave("test.png",img_np) 115 | #### 116 | 117 | 118 | except CvBridgeError as e: 119 | print(e) 120 | 121 | 122 | 123 | rospy.init_node('baxter_leftarm_pose') 124 | # Define your image topic 125 | image_topic = "/rgb/image_raw" 126 | robot_joint_topic = "/joint_states" 127 | robot_pose_topic = "robot_pose" 128 | # Set up your subscriber and define its callback 129 | #rospy.Subscriber(image_topic, sensor_msgs.msg.Image, gotData) 130 | 131 | image_sub = Subscriber(image_topic, sensor_msgs.msg.Image) 132 | robot_j_sub = Subscriber(robot_joint_topic, sensor_msgs.msg.JointState) 133 | pose_pub = rospy.Publisher(robot_pose_topic, geometry_msgs.msg.PoseStamped, queue_size=1) 134 | 135 | ats = ApproximateTimeSynchronizer([image_sub, robot_j_sub], queue_size=10, slop=5) 136 | ats.registerCallback(gotData) 137 | 138 | 139 | # Main loop: 140 | rate = rospy.Rate(30) # 30hz 141 | 142 | while not rospy.is_shutdown(): 143 | rate.sleep() -------------------------------------------------------------------------------- /ros_nodes/panda_pose.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import sys 3 | import os 4 | base_dir = os.path.abspath(".") 5 | sys.path.append(base_dir) 6 | 7 | import numpy as np 8 | import time 9 | import rospy 10 | from cv_bridge import CvBridge, CvBridgeError 11 | from message_filters import ApproximateTimeSynchronizer, Subscriber 12 | import sensor_msgs 13 | import geometry_msgs 14 | import kornia 15 | 16 | import torch 17 | import torchvision.transforms as transforms 18 | 19 | from PIL import Image as PILImage 20 | from utils import * 21 | from models.CtRNet import CtRNet 22 | 23 | import cv2 24 | bridge = CvBridge() 25 | 26 | import transforms3d as t3d 27 | import tf2_ros 28 | #os.environ['ROS_MASTER_URI']='http://192.168.1.116:11311' 29 | #os.environ['ROS_IP']='192.168.1.186' 30 | 31 | ################################################################ 32 | import argparse 33 | parser = argparse.ArgumentParser() 34 | 35 | args = parser.parse_args("") 36 | 37 | args.base_dir = "/home/workspace/src/ctrnet-robot-pose-estimation-ros/" 38 | args.use_gpu = True 39 | args.trained_on_multi_gpus = True 40 | args.keypoint_seg_model_path = os.path.join(args.base_dir,"weights/panda/panda-3cam_azure/net.pth") 41 | args.urdf_file = os.path.join(args.base_dir,"urdfs/Panda/panda.urdf") 42 | args.robot_name = 'Panda' # "Panda" or "Baxter_left_arm" 43 | args.n_kp = 7 44 | args.scale = 0.15625 45 | args.height = 1536 46 | args.width = 2048 47 | args.fx, args.fy, args.px, args.py = 960.41357421875, 960.22314453125, 1021.7171020507812, 776.2381591796875 48 | # scale the camera parameters 49 | args.width = int(args.width * args.scale) 50 | args.height = int(args.height * args.scale) 51 | args.fx = args.fx * args.scale 52 | args.fy = args.fy * args.scale 53 | args.px = args.px * args.scale 54 | args.py = args.py * args.scale 55 | 56 | if args.use_gpu: 57 | device = "cuda" 58 | else: 59 | device = "cpu" 60 | 61 | trans_to_tensor = transforms.Compose([ 62 | transforms.ToTensor(), 63 | transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), 64 | ]) 65 | 66 | CtRNet = CtRNet(args) 67 | 68 | def preprocess_img(cv_img,args): 69 | image_pil = PILImage.fromarray(cv_img) 70 | width, height = image_pil.size 71 | new_size = (int(width*args.scale),int(height*args.scale)) 72 | image_pil = image_pil.resize(new_size) 73 | image = trans_to_tensor(image_pil) 74 | return image 75 | 76 | 77 | #############################################################################3 78 | 79 | #start = time.time() 80 | def gotData(img_msg, joint_msg): 81 | #global start 82 | print("Received data!") 83 | try: 84 | # Convert your ROS Image message to OpenCV2 85 | cv2_img = bridge.imgmsg_to_cv2(img_msg, "bgr8") 86 | cv_img = cv2.cvtColor(cv2_img, cv2.COLOR_BGR2RGB) 87 | image = preprocess_img(cv_img,args) 88 | 89 | joint_angles = np.array(joint_msg.position)[[0,1,2,3,4,5,6]] 90 | 91 | if args.use_gpu: 92 | image = image.cuda() 93 | 94 | cTr, points_2d, segmentation = CtRNet.inference_single_image(image, joint_angles) 95 | #cTb = CtRNet.bpnp(CtRNet.points_2d_pred, CtRNet.points_3d, CtRNet.K).detach().cpu() 96 | print(cTr) 97 | qua = kornia.geometry.conversions.angle_axis_to_quaternion(cTr[:,:3]).detach().cpu().numpy().squeeze() # xyzw 98 | #print(qua) 99 | T = cTr[:,3:].detach().cpu().numpy().squeeze() 100 | p = geometry_msgs.msg.PoseStamped() 101 | p.header = img_msg.header 102 | p.pose.position.x = T[0] 103 | p.pose.position.y = T[1] 104 | p.pose.position.z = T[2] 105 | p.pose.orientation.x = qua[0] 106 | p.pose.orientation.y = qua[1] 107 | p.pose.orientation.z = qua[2] 108 | p.pose.orientation.w = qua[3] 109 | #print(p) 110 | pose_pub.publish(p) 111 | 112 | # Rotating to ROS format 113 | cvTr= np.eye(4) 114 | cvTr[:3, :3] = kornia.geometry.conversions.angle_axis_to_rotation_matrix(cTr[:, :3]).detach().cpu().numpy().squeeze() 115 | cvTr[:3, 3] = np.array(cTr[:, 3:].detach().cpu()) 116 | 117 | # ROS camera to CV camera transform 118 | cTcv = np.array([[0, 0 , 1, 0], [-1, 0, 0 , 0], [0, -1, 0, 0], [0, 0, 0, 1]]) 119 | T = cTcv@cvTr 120 | qua = t3d.quaternions.mat2quat(T[:3, :3]) # wxyz 121 | # Publish Transform 122 | br = tf2_ros.TransformBroadcaster() 123 | t = geometry_msgs.msg.TransformStamped() 124 | t.header.stamp = rospy.Time.now() 125 | t.header.frame_id = "camera_base" 126 | t.child_frame_id = "panda_link0" 127 | t.transform.translation.x = T[0, 3] 128 | t.transform.translation.y = T[1, 3] 129 | t.transform.translation.z = T[2, 3] 130 | t.transform.rotation.x = qua[1] 131 | t.transform.rotation.y = qua[2] 132 | t.transform.rotation.z = qua[3] 133 | t.transform.rotation.w = qua[0] 134 | br.sendTransform(t) 135 | 136 | #### visualization code #### 137 | #points_2d = points_2d.detach().cpu().numpy() 138 | #img_np = to_numpy_img(image) 139 | #img_np = overwrite_image(img_np,points_2d[0].astype(int), color=(1,0,0)) 140 | #plt.imsave("test.png",img_np) 141 | #### 142 | 143 | 144 | except CvBridgeError as e: 145 | print(e) 146 | 147 | 148 | 149 | rospy.init_node('panda_pose') 150 | # Define your image topic 151 | image_topic = "/rgb/image_raw" 152 | robot_joint_topic = "/joint_states" 153 | robot_pose_topic = "robot_pose" 154 | # Set up your subscriber and define its callback 155 | #rospy.Subscriber(image_topic, sensor_msgs.msg.Image, gotData) 156 | 157 | image_sub = Subscriber(image_topic, sensor_msgs.msg.Image) 158 | robot_j_sub = Subscriber(robot_joint_topic, sensor_msgs.msg.JointState) 159 | pose_pub = rospy.Publisher(robot_pose_topic, geometry_msgs.msg.PoseStamped, queue_size=1) 160 | 161 | ats = ApproximateTimeSynchronizer([image_sub, robot_j_sub], queue_size=10, slop=5) 162 | ats.registerCallback(gotData) 163 | 164 | 165 | # Main loop: 166 | rate = rospy.Rate(30) # 30hz 167 | 168 | while not rospy.is_shutdown(): 169 | rate.sleep() -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | ## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD 2 | from distutils.core import setup 3 | from catkin_pkg.python_setup import generate_distutils_setup 4 | # fetch values from package.xml 5 | setup_args = generate_distutils_setup( 6 | packages=['models', 'utils', 'urdfs', 'weights'], 7 | package_dir={'': '.'}, 8 | ) 9 | setup(**setup_args) -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | base_dir = os.path.abspath(".") 4 | sys.path.append(base_dir) 5 | 6 | import numpy as np 7 | import matplotlib.pyplot as plt 8 | import torch 9 | from torch.utils.data import Dataset, DataLoader 10 | import torchvision.transforms as transforms 11 | import torch.optim as optim 12 | from torch.optim.lr_scheduler import ReduceLROnPlateau 13 | from torch.utils.tensorboard import SummaryWriter 14 | from tqdm import tqdm 15 | from pathlib import Path 16 | 17 | from utils import * 18 | 19 | import argparse 20 | 21 | def get_args(): 22 | parser = argparse.ArgumentParser() 23 | args = parser.parse_args("") 24 | 25 | args.data_folder = '/home/jingpei/Desktop/robot_pose_estimation/data_generation/baxter_data' 26 | args.base_dir = "/home/jingpei/Desktop/CtRNet-robot-pose-estimation" 27 | args.use_gpu = True 28 | args.trained_on_multi_gpus = False 29 | args.keypoint_seg_model_path = os.path.join(args.base_dir,"weights/pretrain/baxter/net.pth") 30 | #args.keypoint_seg_model_path = os.path.join(args.base_dir,"weights/baxter/net.pth") 31 | args.urdf_file = os.path.join(args.base_dir,"urdfs/Baxter/baxter_description/urdf/baxter.urdf") 32 | 33 | ##### training parameters ##### 34 | args.batch_size = 6 35 | args.num_workers = 6 36 | args.lr = 1e-6 37 | args.beta1 = 0.9 38 | args.n_epoch = 500 39 | args.out_dir = 'outputs/Baxter_arm/weights' 40 | args.ckp_per_epoch = 10 41 | args.reproj_err_scale = 1.0 / 100.0 42 | ################################ 43 | 44 | args.robot_name = 'Baxter_left_arm' # "Panda" or "Baxter_left_arm" 45 | args.n_kp = 7 46 | args.scale = 0.3125 47 | args.height = 1536 48 | args.width = 2048 49 | args.fx, args.fy, args.px, args.py = 960.41357421875, 960.22314453125, 1021.7171020507812, 776.2381591796875 50 | 51 | # scale the camera parameters 52 | args.width = int(args.width * args.scale) 53 | args.height = int(args.height * args.scale) 54 | args.fx = args.fx * args.scale 55 | args.fy = args.fy * args.scale 56 | args.px = args.px * args.scale 57 | args.py = args.py * args.scale 58 | 59 | return args 60 | 61 | def main(args): 62 | ######## setup CtRNet ######## 63 | from models.CtRNet import CtRNet 64 | 65 | CtRNet = CtRNet(args) 66 | 67 | mesh_files = [os.path.join(args.base_dir,"urdfs/Baxter/S0/S0.obj"), 68 | os.path.join(args.base_dir,"urdfs/Baxter/S1/S1.obj"), 69 | os.path.join(args.base_dir,"urdfs/Baxter/E0/E0.obj"), 70 | os.path.join(args.base_dir,"urdfs/Baxter/E1/E1.obj"), 71 | os.path.join(args.base_dir,"urdfs/Baxter/W0/W0.obj"), 72 | os.path.join(args.base_dir,"urdfs/Baxter/W1/W1.obj"), 73 | os.path.join(args.base_dir,"urdfs/Baxter/W2/W2.obj")] 74 | 75 | robot_renderer = CtRNet.setup_robot_renderer(mesh_files) 76 | 77 | 78 | ######## setup dataset ######## 79 | from imageloaders.baxter import ImageDataLoaderReal 80 | 81 | trans_to_tensor = transforms.Compose([ 82 | transforms.ToTensor(), 83 | transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]), 84 | ]) 85 | 86 | datasets = {} 87 | dataloaders = {} 88 | data_n_batches = {} 89 | for phase in ['train','eval']: 90 | datasets[phase] = ImageDataLoaderReal(data_folder = args.data_folder, scale = args.scale, trans_to_tensor = trans_to_tensor) 91 | 92 | 93 | dataloaders[phase] = DataLoader( 94 | datasets[phase], batch_size=args.batch_size, 95 | shuffle=True if phase == 'train' else False, 96 | num_workers=args.num_workers) 97 | 98 | data_n_batches[phase] = len(dataloaders[phase]) 99 | 100 | ######## setup optimizer and criterions ######## 101 | 102 | criterionMSE_sum = torch.nn.MSELoss(reduction='sum') 103 | criterionMSE_mean = torch.nn.MSELoss(reduction='mean') 104 | criterionBCE = torch.nn.BCEWithLogitsLoss() 105 | criterions = {"mse_sum": criterionMSE_sum, "mse_mean": criterionMSE_mean, "bce": criterionBCE} 106 | 107 | optimizer = optim.Adam(CtRNet.keypoint_seg_predictor.parameters(), lr=args.lr, betas=(args.beta1, 0.999)) 108 | scheduler = ReduceLROnPlateau(optimizer, 'min', factor=0.1, patience=5, verbose=True) 109 | 110 | ######## training loop ######## 111 | epoch_writer = SummaryWriter(comment="_writter") 112 | 113 | best_valid_loss = np.inf 114 | 115 | for epoch in range(0, args.n_epoch): 116 | phases = ['train','eval'] 117 | 118 | for phase in phases: 119 | iter_writer = SummaryWriter(comment="_epoch_" + str(epoch) + "_" + phase) 120 | 121 | # set model to train/eval mode 122 | CtRNet.keypoint_seg_predictor.train(phase == 'train') 123 | print("model training: " + str(CtRNet.keypoint_seg_predictor.training)) 124 | 125 | 126 | meter_loss = AverageMeter() 127 | 128 | loader = dataloaders[phase] 129 | 130 | for i, data in tqdm(enumerate(loader), total=data_n_batches[phase]): 131 | 132 | if args.use_gpu: 133 | if isinstance(data, list): 134 | data = [d.cuda() for d in data] 135 | else: 136 | data = data.cuda() 137 | 138 | # load data 139 | img, joint_angles = data 140 | 141 | # forward 142 | loss = CtRNet.train_on_batch(img, joint_angles.cpu().squeeze(), robot_renderer, criterions, phase) 143 | meter_loss.update(loss.item(), n=img.size(0)) 144 | 145 | if phase == 'train': 146 | optimizer.zero_grad() 147 | loss.backward() 148 | torch.nn.utils.clip_grad_value_(CtRNet.keypoint_seg_predictor.parameters(), 10) 149 | optimizer.step() 150 | 151 | # write to log 152 | iter_writer.add_scalar('loss_all', loss.item(), i) 153 | 154 | log = '%s [%d/%d] Loss: %.6f, LR: %f' % ( 155 | phase, epoch, args.n_epoch, 156 | meter_loss.avg, 157 | get_lr(optimizer)) 158 | 159 | iter_writer.close() 160 | 161 | print(log) 162 | if phase == 'train': 163 | epoch_writer.add_scalar('loss_train', meter_loss.avg, epoch) 164 | else: 165 | epoch_writer.add_scalar('loss_eval', meter_loss.avg, epoch) 166 | 167 | 168 | if phase == 'eval': 169 | scheduler.step(meter_loss.avg) 170 | if meter_loss.avg < best_valid_loss: 171 | best_valid_loss = meter_loss.avg 172 | 173 | torch.save(CtRNet.keypoint_seg_predictor.state_dict(), '%s/net_best.pth' % (args.out_dir)) 174 | 175 | log = 'Best eval: %.6f' % (best_valid_loss) 176 | print(log) 177 | torch.save(CtRNet.keypoint_seg_predictor.state_dict(), '%s/net_last.pth' % (args.out_dir)) 178 | 179 | epoch_writer.close() 180 | 181 | 182 | if __name__ == '__main__': 183 | args = get_args() 184 | if args.out_dir: 185 | Path(args.out_dir).mkdir(parents=True, exist_ok=True) 186 | main(args) -------------------------------------------------------------------------------- /urdfs/Baxter/E0/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 06/09/2022 21:18:11 4 | 5 | newmtl Material_001 6 | Ka 0.1 0.1 0.1 7 | Kd 0.512 0.01365332 0.01365332 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.25 0.25 0.25 12 | Ns 50 13 | -------------------------------------------------------------------------------- /urdfs/Baxter/E1/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 06/09/2022 21:19:01 4 | 5 | newmtl Material_001 6 | Ka 0.1 0.1 0.1 7 | Kd 0.512 0 0 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.25 0.25 0.25 12 | Ns 50 13 | -------------------------------------------------------------------------------- /urdfs/Baxter/S0/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 06/08/2022 20:09:18 4 | 5 | newmtl Material_002 6 | Ka 0.1 0.1 0.1 7 | Kd 0.09748456 0.09748456 0.09748456 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.25 0.25 0.25 12 | Ns 50 13 | -------------------------------------------------------------------------------- /urdfs/Baxter/S1/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 06/09/2022 21:16:20 4 | 5 | newmtl Material_001 6 | Ka 0.1 0.1 0.1 7 | Kd 0.64 0 0 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.5 0.5 0.5 12 | Ns 50 13 | -------------------------------------------------------------------------------- /urdfs/Baxter/W0/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 06/09/2022 21:17:56 4 | 5 | newmtl Material_001 6 | Ka 0.1 0.1 0.1 7 | Kd 0.512 0 0 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.25 0.25 0.25 12 | Ns 50 13 | -------------------------------------------------------------------------------- /urdfs/Baxter/W1/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 06/09/2022 21:18:31 4 | 5 | newmtl Material_001 6 | Ka 0.1 0.1 0.1 7 | Kd 0.09748472 0.09748472 0.09748472 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.25 0.25 0.25 12 | Ns 50 13 | -------------------------------------------------------------------------------- /urdfs/Baxter/W2/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 06/09/2022 21:17:39 4 | 5 | newmtl Material_001 6 | Ka 0.1 0.1 0.1 7 | Kd 0.01630304 0.01630304 0.01630304 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.25 0.25 0.25 12 | Ns 50 13 | -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 2.8.3) 2 | project(baxter_description) 3 | 4 | find_package(catkin REQUIRED) 5 | 6 | catkin_package() 7 | 8 | foreach(dir meshes urdf) 9 | install(DIRECTORY ${dir}/ 10 | DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}/${dir}) 11 | endforeach(dir) 12 | -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/base/PEDESTAL.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/base/PEDESTAL.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/base/pedestal_link_collision.DAE: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Blender User 6 | Blender 2.67.0 r57141 7 | 8 | 2013-08-05T13:45:40 9 | 2013-08-05T13:45:40 10 | 11 | Z_UP 12 | 13 | 14 | 15 | 16 | 17 | 18 | 0 -0.1922612 -0.7516 -0.2904722 -0.2632086 -0.7516 -0.2904722 -0.1922612 -0.7516 0 0.1922612 -0.7516 0.2440557 0.2558969 -0.7516 0.4170677 0.218715 -0.7516 0.4170677 -0.218715 -0.7516 0.2440557 -0.2558969 -0.7516 0.2196846 -0.08665293 -0.7516 -0.2904722 0.1922612 -0.7516 -0.2904722 0.2632086 -0.7516 0.2196846 0.08665293 -0.7516 -0.5019719 0.1922612 -0.7516 -0.2991126 0.06803309 -0.7516 -0.302941 0 -0.7516 -0.2991126 -0.06803309 -0.7516 -0.5019719 -0.1922612 -0.7516 0.1556287 -0.04569667 -0.06152498 0.1621989 0 -0.6466 0.1621989 0 -0.06152498 0.1556287 0.04569667 -0.6466 0.1556287 0.04569667 -0.06152498 0.1364504 0.08769136 -0.6466 0.1364504 0.08769136 -0.06152498 0.1062177 0.1225817 -0.6466 0.1062177 0.1225817 -0.06152498 0.06737983 0.1475413 -0.6466 0.06737983 0.1475413 -0.06152498 0.02308326 0.160548 -0.6466 0.02308326 0.160548 -0.06152498 -0.02308326 0.160548 -0.6466 -0.02308326 0.160548 -0.06152498 -0.06737983 0.1475413 -0.6466 -0.06737983 0.1475413 -0.06152498 -0.1062177 0.1225817 -0.6466 -0.1062177 0.1225817 -0.06152498 -0.1364504 0.08769136 -0.6466 -0.1364504 0.08769136 -0.06152498 -0.1556287 0.04569667 -0.6466 -0.1556287 0.04569667 -0.06152498 -0.1621989 0 -0.6466 -0.1621989 0 -0.06152498 -0.1556287 -0.04569667 -0.6466 -0.1556287 -0.04569667 -0.06152498 -0.1364504 -0.08769136 -0.6466 -0.1364504 -0.08769136 -0.06152498 -0.1062177 -0.1225817 -0.6466 -0.1062177 -0.1225817 -0.06152498 -0.06737983 -0.1475413 -0.6466 -0.06737983 -0.1475413 -0.06152498 -0.02308326 -0.160548 -0.6466 -0.02308326 -0.160548 -0.06152498 0.02308326 -0.160548 -0.6466 0.02308326 -0.160548 -0.06152498 0.06737983 -0.1475413 -0.6466 0.06737983 -0.1475413 -0.06152498 0.1062177 -0.1225817 -0.6466 0.1062177 -0.1225817 -0.06152498 0.1364504 -0.08769136 -0.6466 0.1364504 -0.08769136 -0.06152498 0.1556287 -0.04569667 -0.6466 -0.1621989 0 -0.06152498 0.2196846 0.08665293 -0.6466 0.2196846 -0.08665293 -0.6466 -0.2991126 0.06803309 -0.6466 0 0.1922612 -0.6466 -0.2991126 -0.06803309 -0.6466 0 -0.1922612 -0.6466 -0.3381437 -0.2748523 -0.6466 -0.5019719 -0.1922612 -0.6466 -0.385532 -0.4156633 -0.6466 -0.5019719 -0.4156633 -0.6466 -0.302941 0 -0.6466 -0.385532 0.4156633 -0.6466 -0.5019719 0.4156633 -0.6466 -0.3381437 0.2748523 -0.6466 -0.5019719 0.1922612 -0.6466 0.2440557 -0.4156633 -0.6466 0.4170677 -0.4156633 -0.6466 0.2440557 -0.2558969 -0.6466 0.4170677 -0.218715 -0.6466 0.2440557 0.2558969 -0.6466 0.4170677 0.218715 -0.6466 0.2440557 0.4156633 -0.6466 0.4170677 0.4156633 -0.6466 -0.5019719 -0.1922612 -0.92418 -0.5019719 -0.4156633 -0.92418 -0.385532 -0.4156633 -0.92418 -0.3381437 -0.2748523 -0.92418 -0.2904722 -0.2632086 -0.92418 0.2440557 -0.2558969 -0.92418 0.2440557 -0.4156633 -0.92418 0.4170677 -0.4156633 -0.92418 0.4170677 -0.218715 -0.92418 0.4170677 0.218715 -0.92418 0.4170677 0.4156633 -0.92418 0.2440557 0.4156633 -0.92418 0.2440557 0.2558969 -0.92418 -0.2904722 0.2632086 -0.92418 -0.3381437 0.2748523 -0.92418 -0.385532 0.4156633 -0.92418 -0.5019719 0.4156633 -0.92418 -0.5019719 0.1922612 -0.92418 -0.2904722 0.1922612 -0.92418 -0.2904722 -0.1922612 -0.92418 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0.9898214 -0.1423147 0 0.9898214 0.1423147 0 0.9898214 0.1423147 0 0.909632 0.4154151 0 0.909632 0.4154151 0 0.7557495 0.6548608 0 0.7557495 0.6548608 0 0.5406409 0.8412535 0 0.5406409 0.8412535 0 0.2817326 0.9594931 0 0.2817326 0.9594931 0 0 1 0 0 1 0 -0.2817326 0.9594931 0 -0.2817326 0.9594931 0 -0.5406409 0.8412535 0 -0.5406409 0.8412535 0 -0.7557495 0.6548608 0 -0.7557495 0.6548608 0 -0.909632 0.4154151 0 -0.909632 0.4154151 0 -0.9898214 0.1423147 0 -0.9898214 0.1423147 0 -0.9898214 -0.1423147 0 -0.9898214 -0.1423147 0 -0.909632 -0.4154151 0 -0.909632 -0.4154151 0 -0.7557495 -0.6548608 0 -0.7557495 -0.6548608 0 -0.5406409 -0.8412535 0 -0.5406409 -0.8412535 0 -0.2817326 -0.9594931 0 -0.2817326 -0.9594931 0 0 -1 0 0 -1 0 0.2817326 -0.9594931 0 0.2817326 -0.9594931 0 0.5406409 -0.8412535 0 0.5406409 -0.8412535 0 0.7557495 -0.6548608 0 0.7557495 -0.6548608 0 0.909632 -0.4154151 0 0.909632 -0.4154151 0 0.9898214 -0.1423147 0 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 0 0 1 -0.9984205 -0.05618375 0 -0.9984205 -0.05618375 0 -0.522241 0.8527979 0 -0.522241 0.8527979 0 -1 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0.947768 -0.3189606 0 0.947768 -0.3189606 0 0.2372734 -0.9714429 0 0.2372734 -0.971443 0 0.2372732 -0.971443 0 0.2372732 -0.9714429 0 -0.252307 -0.9676472 0 -0.252307 -0.9676472 0 -1 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 1 0 0 1 0 0 1 0 0 0.5560793 0.8311293 0 0.5560793 0.8311293 0 1 0 0 1 0 0 0.5560793 -0.8311293 0 0.5560793 -0.8311293 0 1 0 0 1 0 0 1 0 0 0 1 0 0 1 0 -1 0 0 -1 0 0 -1 0 0 -0.252307 0.9676472 0 -0.252307 0.9676472 0 0.2372732 0.971443 0 0.2372732 0.9714429 0 0.2372734 0.9714429 0 0.2372734 0.971443 0 0.947768 0.3189606 0 0.947768 0.3189606 0 0 1 0 0 1 0 -1 0 0 -1 0 0 -1 0 0 -0.522241 -0.8527979 0 -0.522241 -0.8527979 0 -0.9984205 0.05618375 0 -0.9984205 0.05618375 0 1 0 0 1 0 0 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -0.2101123 -0.9776773 0 -0.2101123 -0.9776773 0 0 0 -1 0 0 -1 -0.2101123 0.9776773 0 -0.2101123 0.9776773 0 0 0 -1 0 0 -1 0 1 0 0 1 0 1 0 0 1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 44 |

0 0 1 0 2 0 3 1 4 1 5 1 6 2 7 2 8 2 8 3 7 3 9 3 9 4 10 4 8 4 8 5 10 5 3 5 8 6 3 6 11 6 11 7 3 7 5 7 12 8 9 8 13 8 13 9 9 9 7 9 13 10 7 10 14 10 14 11 7 11 0 11 14 12 0 12 15 12 15 13 0 13 2 13 15 14 2 14 16 14 17 15 18 15 19 15 19 16 18 16 20 16 19 17 20 17 21 17 21 18 20 18 22 18 21 19 22 19 23 19 23 20 22 20 24 20 23 21 24 21 25 21 25 22 24 22 26 22 25 23 26 23 27 23 27 24 26 24 28 24 27 25 28 25 29 25 29 26 28 26 30 26 29 27 30 27 31 27 31 28 30 28 32 28 31 29 32 29 33 29 33 30 32 30 34 30 33 31 34 31 35 31 35 32 34 32 36 32 35 33 36 33 37 33 37 34 36 34 38 34 37 35 38 35 39 35 39 36 38 36 40 36 39 37 40 37 41 37 41 38 40 38 42 38 41 39 42 39 43 39 43 40 42 40 44 40 43 41 44 41 45 41 45 42 44 42 46 42 45 43 46 43 47 43 47 44 46 44 48 44 47 45 48 45 49 45 49 46 48 46 50 46 49 47 50 47 51 47 51 48 50 48 52 48 51 49 52 49 53 49 53 50 52 50 54 50 53 51 54 51 55 51 55 52 54 52 56 52 55 53 56 53 57 53 57 54 56 54 58 54 57 55 58 55 59 55 59 56 58 56 60 56 59 57 60 57 17 57 17 58 60 58 18 58 27 59 29 59 47 59 39 60 61 60 43 60 29 61 31 61 47 61 47 62 31 62 33 62 47 63 33 63 45 63 27 64 47 64 25 64 25 65 47 65 49 65 25 66 49 66 51 66 51 67 53 67 25 67 25 68 53 68 55 68 25 69 55 69 57 69 39 70 43 70 37 70 45 71 33 71 43 71 43 72 33 72 35 72 43 73 35 73 37 73 17 74 19 74 21 74 57 75 59 75 25 75 25 76 59 76 17 76 25 77 17 77 23 77 23 78 17 78 21 78 20 79 18 79 62 79 58 80 56 80 63 80 64 81 36 81 34 81 32 82 30 82 65 82 65 83 30 83 28 83 48 84 46 84 66 84 54 85 52 85 67 85 67 86 52 86 50 86 50 87 48 87 67 87 67 88 48 88 66 88 67 89 66 89 68 89 68 90 66 90 69 90 68 91 69 91 70 91 70 92 69 92 71 92 46 93 44 93 66 93 66 94 44 94 42 94 66 95 42 95 72 95 72 96 42 96 40 96 72 97 40 97 64 97 64 98 40 98 38 98 64 99 38 99 36 99 73 100 74 100 75 100 75 101 74 101 76 101 75 102 76 102 65 102 65 103 76 103 64 103 65 104 64 104 32 104 32 105 64 105 34 105 62 106 18 106 63 106 63 107 18 107 60 107 63 108 60 108 58 108 77 109 78 109 79 109 79 110 78 110 80 110 79 111 80 111 67 111 67 112 80 112 63 112 67 113 63 113 54 113 54 114 63 114 56 114 26 115 24 115 62 115 62 116 24 116 22 116 62 117 22 117 20 117 28 118 26 118 65 118 65 119 26 119 62 119 65 120 62 120 81 120 81 121 62 121 82 121 81 122 82 122 83 122 83 123 82 123 84 123 66 124 72 124 15 124 15 125 72 125 14 125 69 126 66 126 16 126 16 127 66 127 15 127 85 128 86 128 16 128 16 129 86 129 71 129 16 130 71 130 69 130 87 131 70 131 86 131 86 132 70 132 71 132 88 133 68 133 87 133 87 134 68 134 70 134 0 135 67 135 1 135 1 136 67 136 68 136 1 137 68 137 89 137 89 138 68 138 88 138 79 139 67 139 7 139 7 140 67 140 0 140 90 141 91 141 7 141 7 142 91 142 77 142 7 143 77 143 79 143 92 144 78 144 91 144 91 145 78 145 77 145 80 146 78 146 6 146 6 147 78 147 92 147 6 148 92 148 93 148 63 149 80 149 8 149 8 150 80 150 6 150 62 151 63 151 11 151 11 152 63 152 8 152 82 153 62 153 5 153 5 154 62 154 11 154 94 155 95 155 5 155 5 156 95 156 84 156 5 157 84 157 82 157 96 158 83 158 95 158 95 159 83 159 84 159 96 160 97 160 83 160 83 161 97 161 4 161 83 162 4 162 81 162 65 163 81 163 3 163 3 164 81 164 4 164 98 165 99 165 10 165 10 166 99 166 75 166 10 167 75 167 3 167 3 168 75 168 65 168 100 169 73 169 99 169 99 170 73 170 75 170 101 171 74 171 100 171 100 172 74 172 73 172 76 173 74 173 12 173 12 174 74 174 101 174 12 175 101 175 102 175 64 176 76 176 13 176 13 177 76 177 12 177 72 178 64 178 14 178 14 179 64 179 13 179 10 180 9 180 98 180 98 181 9 181 103 181 9 182 12 182 103 182 103 183 12 183 102 183 98 184 103 184 99 184 99 185 103 185 102 185 99 186 102 186 100 186 100 187 102 187 101 187 5 188 4 188 94 188 94 189 4 189 97 189 96 190 95 190 97 190 97 191 95 191 94 191 7 192 6 192 90 192 90 193 6 193 93 193 92 194 91 194 93 194 93 195 91 195 90 195 16 196 2 196 85 196 85 197 2 197 104 197 2 198 1 198 104 198 104 199 1 199 89 199 87 200 86 200 88 200 88 201 86 201 85 201 88 202 85 202 89 202 89 203 85 203 104 203

45 |
46 |
47 | 1 48 |
49 |
50 | 51 | 52 | 53 | 54 | 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 |
63 | -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/base/pedestal_link_collision.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/base/pedestal_link_collision.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/head/H0.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/head/H0.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/head/H1.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/head/H1.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/lower_elbow/E1.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/lower_elbow/E1.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/lower_forearm/W1.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/lower_forearm/W1.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/lower_shoulder/S1.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/lower_shoulder/S1.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/torso/base_link.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/torso/base_link.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/torso/base_link_collision.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/torso/base_link_collision.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/upper_elbow/E0.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/upper_elbow/E0.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/upper_forearm/W0.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/upper_forearm/W0.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/upper_shoulder/S0.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/upper_shoulder/S0.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/meshes/wrist/W2.STL: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ucsdarclab/CtRNet-robot-pose-estimation/05156c5ed5ac4bd67e96b51dc72238ff2743c388/urdfs/Baxter/baxter_description/meshes/wrist/W2.STL -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/package.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | baxter_description 4 | 1.2.0 5 | 6 | Description of Baxter Research Robot from Rethink Robotics. 7 | This package contains the URDF and meshes describing Baxter. 8 | 9 | 10 | 11 | Rethink Robotics Inc. 12 | 13 | BSD 14 | http://sdk.rethinkrobotics.com 15 | 16 | https://github.com/RethinkRobotics/baxter_common 17 | 18 | 19 | https://github.com/RethinkRobotics/baxter_common/issues 20 | 21 | Rethink Robotics Inc. 22 | 23 | catkin 24 | rethink_ee_description 25 | 26 | 27 | -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/urdf/baxter.urdf.xacro: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/urdf/baxter_base/baxter_base.gazebo.xacro: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | /robot 7 | 8 | 9 | 10 | 11 | true 12 | 13 | 14 | 1 15 | 16 | 17 | 1 18 | 19 | 20 | 1 21 | 22 | 23 | 1 24 | 25 | 26 | 1 27 | 28 | 29 | 1 30 | 31 | 32 | 1 33 | 34 | 35 | 1 36 | 37 | 38 | 1 39 | 40 | 41 | 1 42 | 43 | 44 | 1 45 | 46 | 47 | 1 48 | 49 | 50 | 1 51 | 52 | 53 | 1 54 | 55 | 56 | 1 57 | 58 | 59 | 60 | 61 | 30.0 62 | 63 | 0.0 0.21 0.0 0.0 -0.8 -1.570796327 64 | 1.3962634 65 | 66 | 800 67 | 800 68 | R8G8B8 69 | 70 | 71 | 0.02 72 | 300 73 | 74 | 75 | gaussian 76 | 0.0 77 | 0.007 78 | 79 | 80 | 81 | true 82 | 0.0 83 | head_camera 84 | image 85 | camera_info 86 | head_camera 87 | 0.07 88 | 0.0 89 | 0.0 90 | 0.0 91 | 0.0 92 | 0.0 93 | /cameras 94 | 95 | 96 | 97 | 98 | 99 | 30.0 100 | 101 | 0.0 0.0 0.0 0.0 -1.570796327 1.570796327 102 | 1.3962634 103 | 104 | 800 105 | 800 106 | R8G8B8 107 | 108 | 109 | 0.02 110 | 300 111 | 112 | 113 | gaussian 114 | 0.0 115 | 0.007 116 | 117 | 118 | 119 | true 120 | 0.0 121 | right_hand_camera 122 | image 123 | camera_info 124 | right_hand_camera 125 | 0.07 126 | 0.0 127 | 0.0 128 | 0.0 129 | 0.0 130 | 0.0 131 | /cameras 132 | 133 | 134 | 135 | 136 | 137 | 30.0 138 | 139 | 0.0 0.0 0.0 0.0 -1.570796327 1.570796327 140 | 1.3962634 141 | 142 | 800 143 | 800 144 | R8G8B8 145 | 146 | 147 | 0.02 148 | 300 149 | 150 | 151 | gaussian 152 | 0.0 153 | 0.007 154 | 155 | 156 | 157 | true 158 | 0.0 159 | left_hand_camera 160 | image 161 | camera_info 162 | left_hand_camera 163 | 0.07 164 | 0.0 165 | 0.0 166 | 0.0 167 | 0.0 168 | 0.0 169 | /cameras 170 | 171 | 172 | 173 | 174 | 175 | 176 | 600 177 | 1024 178 | /robot/xdisplay 179 | 180 | 181 | 182 | 183 | 184 | 0.0 0.0 0.0 0.0 0.0 0.0 185 | 186 | 187 | 188 | 12 189 | 1.0 190 | -3.14 191 | 3.14 192 | 193 | 194 | 2 195 | 1.0 196 | -0.001 197 | 0 198 | 199 | 200 | 201 | 0.05 202 | 50.0 203 | 204 | 205 | 206 | 0.00 207 | true 208 | 100.0 209 | /robot/sonar/head_sonar/state 210 | sonar_ring 211 | 212 | true 213 | 100.0 214 | 215 | 216 | 217 | 218 | 0.0 0.0 0.0 0.0 0.0 0.0 219 | 220 | 221 | 222 | 1 223 | 1.0 224 | -0.5 225 | 0.5 226 | 227 | 228 | 229 | 0.004 230 | 0.4 231 | 232 | 233 | 234 | 0.005 235 | true 236 | 100 237 | /sim/laserscan/right_hand_range/state 238 | right_hand_range 239 | 240 | true 241 | 100.0 242 | 243 | 244 | 245 | 246 | 0.0 0.0 0.0 0.0 0.0 0.0 247 | 248 | 249 | 250 | 1 251 | 1.0 252 | -0.5 253 | 0.5 254 | 255 | 256 | 257 | 0.004 258 | 0.4 259 | 260 | 261 | 262 | 0.005 263 | true 264 | 100 265 | /sim/laserscan/left_hand_range/state 266 | left_hand_range 267 | 268 | true 269 | 100.0 270 | 271 | 272 | 273 | 274 | 275 | 276 | transmission_interface/SimpleTransmission 277 | 278 | EffortJointInterface 279 | 280 | 281 | EffortJointInterface 282 | 1 283 | 284 | 285 | 286 | transmission_interface/SimpleTransmission 287 | 288 | EffortJointInterface 289 | 290 | 291 | EffortJointInterface 292 | 1 293 | 294 | 295 | 296 | transmission_interface/SimpleTransmission 297 | 298 | EffortJointInterface 299 | 300 | 301 | EffortJointInterface 302 | 1 303 | 304 | 305 | 306 | transmission_interface/SimpleTransmission 307 | 308 | EffortJointInterface 309 | 310 | 311 | EffortJointInterface 312 | 1 313 | 314 | 315 | 316 | transmission_interface/SimpleTransmission 317 | 318 | EffortJointInterface 319 | 320 | 321 | EffortJointInterface 322 | 1 323 | 324 | 325 | 326 | transmission_interface/SimpleTransmission 327 | 328 | EffortJointInterface 329 | 330 | 331 | EffortJointInterface 332 | 1 333 | 334 | 335 | 336 | transmission_interface/SimpleTransmission 337 | 338 | EffortJointInterface 339 | 340 | 341 | EffortJointInterface 342 | 1 343 | 344 | 345 | 346 | 347 | 348 | transmission_interface/SimpleTransmission 349 | 350 | EffortJointInterface 351 | 352 | 353 | EffortJointInterface 354 | 1 355 | 356 | 357 | 358 | transmission_interface/SimpleTransmission 359 | 360 | EffortJointInterface 361 | 362 | 363 | EffortJointInterface 364 | 1 365 | 366 | 367 | 368 | transmission_interface/SimpleTransmission 369 | 370 | EffortJointInterface 371 | 372 | 373 | EffortJointInterface 374 | 1 375 | 376 | 377 | 378 | transmission_interface/SimpleTransmission 379 | 380 | EffortJointInterface 381 | 382 | 383 | EffortJointInterface 384 | 1 385 | 386 | 387 | 388 | transmission_interface/SimpleTransmission 389 | 390 | EffortJointInterface 391 | 392 | 393 | EffortJointInterface 394 | 1 395 | 396 | 397 | 398 | transmission_interface/SimpleTransmission 399 | 400 | EffortJointInterface 401 | 402 | 403 | EffortJointInterface 404 | 1 405 | 406 | 407 | 408 | transmission_interface/SimpleTransmission 409 | 410 | EffortJointInterface 411 | 412 | 413 | EffortJointInterface 414 | 1 415 | 416 | 417 | 418 | 419 | transmission_interface/SimpleTransmission 420 | 421 | EffortJointInterface 422 | 423 | 424 | EffortJointInterface 425 | 1 426 | 427 | 428 | 429 | -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/urdf/left_end_effector.urdf.xacro: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 13 | 14 | -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/urdf/pedestal/pedestal.xacro: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /urdfs/Baxter/baxter_description/urdf/right_end_effector.urdf.xacro: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 13 | 14 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/collision/finger.obj: -------------------------------------------------------------------------------- 1 | # File produced by Open Asset Import Library (http://www.assimp.sf.net) 2 | # (assimp v3.0.1262) 3 | 4 | mtllib finger.stl.obj.mtl 5 | 6 | # 96 vertex positions 7 | v 0.01036 0.0264034 0.000154629 8 | v 0.0104486 0.0025833 0.000146801 9 | v -0.0103872 0.00253418 0.000131696 10 | v -0.0103872 0.00253418 0.000131696 11 | v -0.0104795 0.0161016 0.0189882 12 | v -0.0104013 0.0263094 0.00016651 13 | v -0.0104013 0.0263094 0.00016651 14 | v -0.0104795 0.0161016 0.0189882 15 | v -0.0103889 0.0252203 0.0191876 16 | v 0.0104486 0.0025833 0.000146801 17 | v -0.0087304 -2.35252e-05 0.0361648 18 | v -0.0103872 0.00253418 0.000131696 19 | v 0.0104005 0.0252534 0.0190366 20 | v -0.0103889 0.0252203 0.0191876 21 | v 0.00583983 0.0142743 0.0538035 22 | v -0.0103872 0.00253418 0.000131696 23 | v -0.0104013 0.0263094 0.00016651 24 | v 0.01036 0.0264034 0.000154629 25 | v 0.00861608 0.0139887 0.0513279 26 | v 0.0104948 0.0151026 0.0184356 27 | v 0.0104005 0.0252534 0.0190366 28 | v 0.0104948 0.0151026 0.0184356 29 | v 0.00869277 -0.000132643 0.0501662 30 | v 0.0104486 0.0025833 0.000146801 31 | v 0.00861608 0.0139887 0.0513279 32 | v 0.00869277 -0.000132643 0.0501662 33 | v 0.0104948 0.0151026 0.0184356 34 | v -0.00862294 -5.68019e-05 0.0509528 35 | v -0.0103872 0.00253418 0.000131696 36 | v -0.0087304 -2.35252e-05 0.0361648 37 | v 0.0104486 0.0025833 0.000146801 38 | v 0.00869277 -0.000132643 0.0501662 39 | v -0.0087304 -2.35252e-05 0.0361648 40 | v 0.00869277 -0.000132643 0.0501662 41 | v -0.00548142 -9.11208e-05 0.0537247 42 | v -0.0087304 -2.35252e-05 0.0361648 43 | v 0.0104005 0.0252534 0.0190366 44 | v 0.0104948 0.0151026 0.0184356 45 | v 0.0104486 0.0025833 0.000146801 46 | v 0.0104005 0.0252534 0.0190366 47 | v 0.0104486 0.0025833 0.000146801 48 | v 0.01036 0.0264034 0.000154629 49 | v 0.0104005 0.0252534 0.0190366 50 | v 0.01036 0.0264034 0.000154629 51 | v -0.0103889 0.0252203 0.0191876 52 | v -0.0103889 0.0252203 0.0191876 53 | v -0.00527792 0.0142931 0.053849 54 | v 0.00583983 0.0142743 0.0538035 55 | v -0.00777838 0.0142182 0.0523659 56 | v -0.00527792 0.0142931 0.053849 57 | v -0.0103889 0.0252203 0.0191876 58 | v -0.00862294 -5.68019e-05 0.0509528 59 | v -0.00548142 -9.11208e-05 0.0537247 60 | v -0.00777838 0.0142182 0.0523659 61 | v -0.0103872 0.00253418 0.000131696 62 | v -0.00862294 -5.68019e-05 0.0509528 63 | v -0.0104795 0.0161016 0.0189882 64 | v 0.0104005 0.0252534 0.0190366 65 | v 0.00583983 0.0142743 0.0538035 66 | v 0.00861608 0.0139887 0.0513279 67 | v 0.01036 0.0264034 0.000154629 68 | v -0.0104013 0.0263094 0.00016651 69 | v -0.0103889 0.0252203 0.0191876 70 | v -0.0104795 0.0161016 0.0189882 71 | v -0.00884117 0.0139176 0.0505894 72 | v -0.0103889 0.0252203 0.0191876 73 | v -0.00884117 0.0139176 0.0505894 74 | v -0.00777838 0.0142182 0.0523659 75 | v -0.0103889 0.0252203 0.0191876 76 | v 0.00583983 0.0142743 0.0538035 77 | v -0.00548142 -9.11208e-05 0.0537247 78 | v 0.00613802 -2.06026e-05 0.0535776 79 | v -0.00527792 0.0142931 0.053849 80 | v -0.00548142 -9.11208e-05 0.0537247 81 | v 0.00583983 0.0142743 0.0538035 82 | v 0.00869277 -0.000132643 0.0501662 83 | v 0.00613802 -2.06026e-05 0.0535776 84 | v -0.00548142 -9.11208e-05 0.0537247 85 | v 0.00613802 -2.06026e-05 0.0535776 86 | v 0.00869277 -0.000132643 0.0501662 87 | v 0.00861608 0.0139887 0.0513279 88 | v -0.00548142 -9.11208e-05 0.0537247 89 | v -0.00862294 -5.68019e-05 0.0509528 90 | v -0.0087304 -2.35252e-05 0.0361648 91 | v -0.00777838 0.0142182 0.0523659 92 | v -0.00548142 -9.11208e-05 0.0537247 93 | v -0.00527792 0.0142931 0.053849 94 | v -0.00862294 -5.68019e-05 0.0509528 95 | v -0.00884117 0.0139176 0.0505894 96 | v -0.0104795 0.0161016 0.0189882 97 | v 0.00613802 -2.06026e-05 0.0535776 98 | v 0.00861608 0.0139887 0.0513279 99 | v 0.00583983 0.0142743 0.0538035 100 | v -0.00777838 0.0142182 0.0523659 101 | v -0.00884117 0.0139176 0.0505894 102 | v -0.00862294 -5.68019e-05 0.0509528 103 | 104 | # 0 UV coordinates 105 | 106 | # 96 vertex normals 107 | vn 0.000724164 0.000331308 -1 108 | vn 0.000724164 0.000331308 -1 109 | vn 0.000724164 0.000331308 -1 110 | vn -0.99999 -0.000585782 -0.00447174 111 | vn -0.99999 -0.000585782 -0.00447174 112 | vn -0.99999 -0.000585782 -0.00447174 113 | vn -0.99995 0.00990875 0.00122006 114 | vn -0.99995 0.00990875 0.00122006 115 | vn -0.99995 0.00990875 0.00122006 116 | vn 0.00240304 -0.997479 -0.0709137 117 | vn 0.00240304 -0.997479 -0.0709137 118 | vn 0.00240304 -0.997479 -0.0709137 119 | vn 0.000668274 0.953556 0.301214 120 | vn 0.000668274 0.953556 0.301214 121 | vn 0.000668274 0.953556 0.301214 122 | vn -0.0005789 0.00146393 -0.999999 123 | vn -0.0005789 0.00146393 -0.999999 124 | vn -0.0005789 0.00146393 -0.999999 125 | vn 0.998344 0.00589157 0.0572229 126 | vn 0.998344 0.00589157 0.0572229 127 | vn 0.998344 0.00589157 0.0572229 128 | vn 0.998185 -0.050838 0.0322792 129 | vn 0.998185 -0.050838 0.0322792 130 | vn 0.998185 -0.050838 0.0322792 131 | vn 0.998371 0.000729252 0.0570496 132 | vn 0.998371 0.000729252 0.0570496 133 | vn 0.998371 0.000729252 0.0570496 134 | vn -0.871287 -0.490747 0.00522707 135 | vn -0.871287 -0.490747 0.00522707 136 | vn -0.871287 -0.490747 0.00522707 137 | vn 0.0362712 -0.99794 -0.0529128 138 | vn 0.0362712 -0.99794 -0.0529128 139 | vn 0.0362712 -0.99794 -0.0529128 140 | vn -0.00372285 -0.999988 -0.00316058 141 | vn -0.00372285 -0.999988 -0.00316058 142 | vn -0.00372285 -0.999988 -0.00316058 143 | vn 0.999909 0.00984192 -0.00926313 144 | vn 0.999909 0.00984192 -0.00926313 145 | vn 0.999909 0.00984192 -0.00926313 146 | vn 0.999991 0.00372285 -0.00191858 147 | vn 0.999991 0.00372285 -0.00191858 148 | vn 0.999991 0.00372285 -0.00191858 149 | vn -0.0011495 0.99815 0.0607926 150 | vn -0.0011495 0.99815 0.0607926 151 | vn -0.0011495 0.99815 0.0607926 152 | vn 0.00284562 0.953846 0.300284 153 | vn 0.00284562 0.953846 0.300284 154 | vn 0.00284562 0.953846 0.300284 155 | vn -0.218924 0.920873 0.32259 156 | vn -0.218924 0.920873 0.32259 157 | vn -0.218924 0.920873 0.32259 158 | vn -0.661425 -0.0350314 0.749193 159 | vn -0.661425 -0.0350314 0.749193 160 | vn -0.661425 -0.0350314 0.749193 161 | vn -0.998169 -0.0513123 0.0320353 162 | vn -0.998169 -0.0513123 0.0320353 163 | vn -0.998169 -0.0513123 0.0320353 164 | vn 0.377717 0.867563 0.323518 165 | vn 0.377717 0.867563 0.323518 166 | vn 0.377717 0.867563 0.323518 167 | vn -0.00448618 0.998355 0.0571685 168 | vn -0.00448618 0.998355 0.0571685 169 | vn -0.00448618 0.998355 0.0571685 170 | vn -0.998589 0.0087761 0.0523757 171 | vn -0.998589 0.0087761 0.0523757 172 | vn -0.998589 0.0087761 0.0523757 173 | vn -0.665951 0.690851 0.281485 174 | vn -0.665951 0.690851 0.281485 175 | vn -0.665951 0.690851 0.281485 176 | vn 0.0127505 -0.0155288 0.999798 177 | vn 0.0127505 -0.0155288 0.999798 178 | vn 0.0127505 -0.0155288 0.999798 179 | vn 0.00408502 -0.00870048 0.999954 180 | vn 0.00408502 -0.00870048 0.999954 181 | vn 0.00408502 -0.00870048 0.999954 182 | vn 0.006542 -0.999267 0.0377181 183 | vn 0.006542 -0.999267 0.0377181 184 | vn 0.006542 -0.999267 0.0377181 185 | vn 0.798906 -0.0450007 0.59977 186 | vn 0.798906 -0.0450007 0.59977 187 | vn 0.798906 -0.0450007 0.59977 188 | vn -0.00899609 -0.999957 -0.00218479 189 | vn -0.00899609 -0.999957 -0.00218479 190 | vn -0.00899609 -0.999957 -0.00218479 191 | vn -0.510144 -0.000216662 0.860089 192 | vn -0.510144 -0.000216662 0.860089 193 | vn -0.510144 -0.000216662 0.860089 194 | vn -0.998608 -0.0142745 0.0507836 195 | vn -0.998608 -0.0142745 0.0507836 196 | vn -0.998608 -0.0142745 0.0507836 197 | vn 0.665648 0.00209596 0.746263 198 | vn 0.665648 0.00209596 0.746263 199 | vn 0.665648 0.00209596 0.746263 200 | vn -0.858159 -4.90786e-05 0.513384 201 | vn -0.858159 -4.90786e-05 0.513384 202 | vn -0.858159 -4.90786e-05 0.513384 203 | 204 | # Mesh '' with 32 faces 205 | g 206 | usemtl DefaultMaterial 207 | f 1//1 2//2 3//3 208 | f 4//4 5//5 6//6 209 | f 7//7 8//8 9//9 210 | f 10//10 11//11 12//12 211 | f 13//13 14//14 15//15 212 | f 16//16 17//17 18//18 213 | f 19//19 20//20 21//21 214 | f 22//22 23//23 24//24 215 | f 25//25 26//26 27//27 216 | f 28//28 29//29 30//30 217 | f 31//31 32//32 33//33 218 | f 34//34 35//35 36//36 219 | f 37//37 38//38 39//39 220 | f 40//40 41//41 42//42 221 | f 43//43 44//44 45//45 222 | f 46//46 47//47 48//48 223 | f 49//49 50//50 51//51 224 | f 52//52 53//53 54//54 225 | f 55//55 56//56 57//57 226 | f 58//58 59//59 60//60 227 | f 61//61 62//62 63//63 228 | f 64//64 65//65 66//66 229 | f 67//67 68//68 69//69 230 | f 70//70 71//71 72//72 231 | f 73//73 74//74 75//75 232 | f 76//76 77//77 78//78 233 | f 79//79 80//80 81//81 234 | f 82//82 83//83 84//84 235 | f 85//85 86//86 87//87 236 | f 88//88 89//89 90//90 237 | f 91//91 92//92 93//93 238 | f 94//94 95//95 96//96 239 | 240 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/collision/link0.obj: -------------------------------------------------------------------------------- 1 | # File produced by Open Asset Import Library (http://www.assimp.sf.net) 2 | # (assimp v3.1.187496374) 3 | 4 | mtllib link0.obj.mtl 5 | 6 | # 102 vertex positions 7 | v -0.00122674 -0.0946137 -3.24928e-05 8 | v 0.0710809 0.00844602 0.00133993 9 | v 0.071567 -0.00127396 0.0013494 10 | v -0.00051723 0.0946704 -6.42576e-06 11 | v 0.0706646 -0.0115626 0.00133336 12 | v -0.135131 -0.0658707 0.00751985 13 | v -0.136305 -0.0659457 3.26395e-07 14 | v -0.131625 -0.0687992 -5.27309e-07 15 | v 0.0543353 -0.0469489 0.00104792 16 | v 0.0619449 -0.0359785 0.00118116 17 | v 0.0673593 -0.0245271 0.00127621 18 | v 0.0526517 -0.0163561 0.14 19 | v 0.0548547 0.00447165 0.14 20 | v -0.0549666 0.00345713 0.14 21 | v 0.0391015 0.0387489 0.14 22 | v 0.00777804 -0.0910717 0.00846191 23 | v 0.0395665 -0.0384696 0.140003 24 | v -0.029337 -0.0466451 0.140001 25 | v 0.000349224 -0.0550819 0.14 26 | v 0.0686186 0.0208178 0.00129774 27 | v 0.0635871 0.0329931 0.00120988 28 | v 0.0525783 0.048771 0.00101755 29 | v 0.05859 0.0412413 0.00112268 30 | v 0.0509475 0.0207886 0.14 31 | v -0.0218528 0.0506849 0.14 32 | v -0.0317059 0.0450099 0.14 33 | v 0.0317161 0.0450507 0.14 34 | v -0.0400853 0.0378337 0.14 35 | v -0.0477037 0.0275963 0.14 36 | v 0.00485769 0.0549582 0.14 37 | v 0.0474762 -0.0279259 0.14 38 | v -0.00886536 0.0543586 0.140002 39 | v 0.0276805 -0.0476799 0.14 40 | v -0.054766 -0.0054659 0.14 41 | v -0.0528838 -0.015585 0.14 42 | v -0.122615 0.00563594 0.0611487 43 | v -0.122711 -0.00614431 0.060957 44 | v 0.0143701 -0.0532489 0.139999 45 | v -0.071675 -0.0756205 0.0575204 46 | v -0.111235 -0.0474773 0.0590551 47 | v -0.137824 -0.0624363 0.00755269 48 | v -0.133699 0.0669321 0.00741502 49 | v -0.13583 0.0663151 8.33834e-07 50 | v -0.136293 0.0646171 0.00756638 51 | v -0.131612 0.0688472 7.4968e-07 52 | v 0.0031716 0.0933497 0.00863144 53 | v 0.00408648 0.0939818 0.000170711 54 | v 0.0462474 0.0300326 0.14 55 | v 0.0548768 -0.00456585 0.14 56 | v -0.129159 0.0686836 0.00744486 57 | v -0.0528609 0.0157105 0.14 58 | v 0.0200202 0.0514462 0.139999 59 | v 0.00648893 0.0735192 0.08108 60 | v 0.00826788 -0.0918247 0.00024289 61 | v -0.0464371 -0.0299037 0.14 62 | v -0.140043 0.0607227 -1.6486e-07 63 | v 0.00769447 0.0922715 0.000234291 64 | v -0.00159938 0.0936322 0.00849181 65 | v 0.00734398 0.0914466 0.00831775 66 | v -0.148808 -0.0420547 -7.21858e-08 67 | v -0.149244 -0.0371861 0.00781712 68 | v -0.152578 -0.0248176 1.68373e-07 69 | v 0.00907648 0.0737151 0.0784549 70 | v -0.121673 -0.0147367 0.0614178 71 | v 0.00678171 -0.0735316 0.0809757 72 | v -0.130951 -0.0681401 0.00766805 73 | v -0.0189172 -0.051755 0.14 74 | v -0.00908423 -0.0542971 0.14 75 | v -0.0658985 -0.0765477 0.0589793 76 | v -0.0376956 -0.0401377 0.14 77 | v -0.143472 -0.0548617 -4.38612e-08 78 | v -0.145452 -0.0485961 0.00768247 79 | v -0.1203 -0.0235449 0.0615099 80 | v -0.118609 -0.03105 0.06134 81 | v -0.114761 -0.0419663 0.0601784 82 | v -0.154079 -0.00554168 -3.75503e-07 83 | v -0.152725 -0.0137992 0.00819143 84 | v -0.153166 -0.00203576 0.00835078 85 | v -0.142117 0.0555469 0.00762839 86 | v 0.0535544 0.0128185 0.14 87 | v -0.109983 0.0486112 0.0588796 88 | v 0.00366946 -0.0932492 0.00847566 89 | v 0.00349566 -0.0942157 0.000177738 90 | v 0.00906588 -0.073708 0.0785962 91 | v -0.117004 -0.0366378 0.0608364 92 | v -0.151587 -0.0248333 0.00804774 93 | v -0.15241 0.0258275 -1.09161e-08 94 | v -0.153841 0.0105757 0 95 | v -0.15202 0.0217804 0.00810954 96 | v -0.121149 0.0181378 0.0615703 97 | v -0.113238 0.0447149 0.0596848 98 | v -0.000866581 -0.0937369 0.00817823 99 | v -0.141842 -0.0559592 0.00761703 100 | v -0.149634 0.0355816 0.00786878 101 | v -0.14929 0.0402499 3.01802e-07 102 | v -0.152949 0.0100689 0.00821985 103 | v -0.145377 0.0508924 -1.27878e-07 104 | v -0.11571 0.0395945 0.0607111 105 | v -0.0231112 -0.0759725 0.0689683 106 | v -0.117233 0.035881 0.060957 107 | v -0.146172 0.0467529 0.00770437 108 | v -0.119414 0.0287903 0.0611397 109 | 110 | # 0 UV coordinates 111 | 112 | # 200 vertex normals 113 | vn 0.0190098 -2.30831e-05 -0.999819 114 | vn 0.0188816 6.69136e-05 -0.999822 115 | vn 0.0191344 -0.000120273 -0.999817 116 | vn -0.518479 -0.850405 0.0893954 117 | vn 0.0222843 -0.00331509 -0.999746 118 | vn 0.0205047 -0.00139704 -0.999789 119 | vn 7.25974e-07 -5.81537e-06 1 120 | vn 9.85433e-07 -5.68784e-06 1 121 | vn 0.991829 0.0497189 0.11749 122 | vn 0.692904 -0.711464 0.11706 123 | vn 4.45103e-05 3.61862e-05 1 124 | vn 0.0193277 0.000437566 -0.999813 125 | vn 0.0199677 0.0010368 -0.9998 126 | vn 0.0232843 0.0046319 -0.999718 127 | vn 0.0210964 0.0022106 -0.999775 128 | vn 0.0197075 -0.000616531 -0.999806 129 | vn 6.42989e-05 2.18738e-05 1 130 | vn 0.91755 0.380023 0.116978 131 | vn 3.35708e-05 -1.87847e-05 1 132 | vn -7.63878e-06 1.53928e-05 1 133 | vn -8.50133e-06 1.24836e-05 1 134 | vn 8.62742e-06 2.71648e-06 1 135 | vn 4.31916e-06 1.10982e-05 1 136 | vn 0.000248513 -0.00151077 0.999999 137 | vn -0.000100683 -0.000160507 1 138 | vn 0.000441559 2.26903e-05 1 139 | vn -0.759017 -0.00436697 0.651056 140 | vn -0.758153 -0.0170388 0.651854 141 | vn -4.08147e-05 -6.51499e-05 1 142 | vn 3.63574e-05 2.96612e-05 1 143 | vn -0.488489 -0.714003 0.501575 144 | vn -0.779928 -0.612675 0.127831 145 | vn -0.6563 0.74377 0.126791 146 | vn -0.51319 0.854867 0.0764032 147 | vn 0.143998 0.98555 0.0891981 148 | vn 6.6605e-05 0.00021454 1 149 | vn 0.794096 -0.595678 0.120749 150 | vn 0.815277 -0.566949 0.117862 151 | vn 0.897326 -0.425249 0.118194 152 | vn 0.906239 -0.40539 0.119958 153 | vn 0.848855 0.515505 0.117047 154 | vn 0.992823 0.00242477 0.11957 155 | vn 0.000137166 -1.78568e-05 1 156 | vn -0.256878 0.90791 0.331229 157 | vn -4.88056e-05 2.05051e-05 1 158 | vn 1.50367e-05 -7.30032e-06 1 159 | vn -3.22116e-05 6.85885e-05 1 160 | vn -0.0415945 0.953296 0.29916 161 | vn 0.0339572 -0.0169327 -0.99928 162 | vn -9.01903e-05 -3.23136e-05 1 163 | vn -0.795774 0.599474 0.0858742 164 | vn 0.423964 0.898605 0.112979 165 | vn 0.047941 0.0640793 -0.996793 166 | vn 0.0548988 0.990257 0.12797 167 | vn 0.0494285 0.963916 0.261577 168 | vn 0.0344071 0.0175091 -0.999255 169 | vn 0.691328 0.715168 0.102958 170 | vn 0.691451 0.712649 0.118438 171 | vn -0.973898 -0.213048 0.0783147 172 | vn 7.42399e-05 3.32698e-05 1 173 | vn 0.884937 0.449942 0.120157 174 | vn 0.989315 -0.0869511 0.117026 175 | vn 0.962281 -0.245852 0.116503 176 | vn -0.192564 0.977601 0.0849446 177 | vn -0.191831 0.976828 0.0949088 178 | vn -0.123193 0.952732 0.277713 179 | vn -0.441485 0.766549 0.466362 180 | vn 0.467327 0.85467 0.22615 181 | vn 0.692847 -0.713157 0.106629 182 | vn -0.756883 -0.0565346 0.651101 183 | vn 0.123333 -0.943265 0.308285 184 | vn -0.493923 -0.720807 0.48629 185 | vn -0.477017 -0.8711 0.116787 186 | vn -0.44611 -0.794944 0.411157 187 | vn -6.11448e-05 -0.000331755 1 188 | vn -1.43156e-05 -8.03482e-05 1 189 | vn -0.228986 -0.885719 0.403816 190 | vn -0.608389 -0.519659 0.599848 191 | vn -0.920452 -0.383492 0.0755124 192 | vn -0.736949 -0.180808 0.651318 193 | vn -0.611742 -0.514007 0.601306 194 | vn 0.419302 0.901141 0.110141 195 | vn -0.991437 -0.0387918 0.124694 196 | vn -0.993276 -0.0773118 0.0861707 197 | vn -0.833082 0.535892 0.137092 198 | vn 8.17296e-05 2.2162e-05 1 199 | vn 0.943552 0.308615 0.120276 200 | vn 0.981039 0.152834 0.119183 201 | vn 0.973953 0.194242 0.116982 202 | vn 0.767761 0.629431 0.119836 203 | vn 0.775121 0.620511 0.118969 204 | vn 0.975511 -0.184098 0.120356 205 | vn -0.334118 0.859344 0.38716 206 | vn -0.186878 0.944088 0.271613 207 | vn -0.461627 0.746886 0.478605 208 | vn 0.418676 0.882839 0.21285 209 | vn 0.516842 0.832916 0.197804 210 | vn 0.465813 -0.878241 0.10822 211 | vn 0.444917 -0.890581 0.0944139 212 | vn 0.0505572 -0.0737673 -0.995993 213 | vn 0.458814 -0.864421 0.205587 214 | vn 0.516064 -0.83361 0.196908 215 | vn -0.387155 -0.789577 0.476109 216 | vn -0.244693 -0.878338 0.410666 217 | vn -0.208468 -0.972704 0.101918 218 | vn -0.193464 -0.977355 0.0857284 219 | vn -0.182203 -0.978184 0.0997879 220 | vn -0.519874 -0.667682 0.532852 221 | vn -0.000155222 -6.31553e-05 1 222 | vn -4.55781e-05 -2.14887e-05 1 223 | vn -0.773654 -0.387569 0.501247 224 | vn -0.836842 -0.541097 0.0831258 225 | vn -0.938711 -0.313668 0.142944 226 | vn 0.642112 0.752523 0.146293 227 | vn -0.975003 -0.187197 0.119691 228 | vn -0.98721 -0.103388 0.121358 229 | vn -0.991212 0.0930191 0.0940533 230 | vn -0.745994 0.128207 0.653495 231 | vn -0.357321 0.923722 0.138052 232 | vn -2.44058e-05 1.82076e-05 1 233 | vn -0.562279 0.66248 0.494937 234 | vn -0.609088 0.612822 0.50345 235 | vn 0.183429 0.950262 0.251704 236 | vn 0.21534 0.929691 0.298837 237 | vn 0.229004 0.927667 0.294942 238 | vn 0.604019 -0.779541 0.165763 239 | vn 0.230026 -0.929068 0.289689 240 | vn 0.372231 -0.889715 0.264293 241 | vn -0.171199 -0.978844 0.112047 242 | vn 0.0790207 -0.991596 0.102437 243 | vn -0.774998 -0.384377 0.501629 244 | vn -0.890148 -0.437507 0.127373 245 | vn -0.67661 -0.535887 0.505 246 | vn -0.844574 -0.525041 0.105012 247 | vn -0.695542 -0.313148 0.646653 248 | vn -0.714122 -0.263551 0.648514 249 | vn -0.974069 0.210719 0.082385 250 | vn -0.866667 0.0208868 0.498449 251 | vn -0.993956 0.0146296 0.108802 252 | vn -0.994723 0.0188777 0.100843 253 | vn -0.990466 0.0795982 0.112429 254 | vn -0.875827 0.475205 0.0843008 255 | vn -0.648594 0.437886 0.622561 256 | vn -0.631999 0.470323 0.615934 257 | vn -0.542549 0.633495 0.551656 258 | vn -0.569254 0.593159 0.569309 259 | vn -0.854631 -0.127937 0.503228 260 | vn 0.187376 -0.949866 0.250288 261 | vn -0.0299751 -0.962324 0.270247 262 | vn -0.733474 -0.46004 0.500378 263 | vn -0.819413 -0.278244 0.501142 264 | vn -0.818885 -0.280329 0.500842 265 | vn -0.840131 -0.200724 0.503876 266 | vn -0.865477 -0.0010483 0.500948 267 | vn -0.861767 -0.077237 0.501391 268 | vn -0.734892 0.455474 0.502471 269 | vn -0.789187 0.35685 0.499842 270 | vn -0.899886 0.416094 0.130652 271 | vn -0.934904 0.343772 0.0881805 272 | vn -0.750321 -0.110129 0.651836 273 | vn -0.744296 -0.138475 0.653336 274 | vn -0.0486186 -0.96337 0.263731 275 | vn -0.0569915 -0.952558 0.298973 276 | vn -0.0786935 -0.945844 0.314939 277 | vn 0.0865387 -0.962264 0.257989 278 | vn 0.0988769 -0.988653 0.113084 279 | vn -0.040165 -0.954927 0.294112 280 | vn -0.679455 -0.364641 0.636692 281 | vn -0.676925 -0.535527 0.504959 282 | vn -0.860556 -0.0952615 0.500368 283 | vn -0.866101 -0.0391639 0.498333 284 | vn -0.848661 -0.170347 0.500757 285 | vn -0.976331 0.171097 0.132301 286 | vn -0.784297 0.365988 0.500931 287 | vn -0.743675 0.147478 0.652072 288 | vn -0.75636 0.0667582 0.650741 289 | vn -0.726694 0.470061 0.500957 290 | vn -0.946042 0.295169 0.133715 291 | vn -0.699993 0.303725 0.646344 292 | vn -0.690574 0.326044 0.645603 293 | vn -0.722223 0.238827 0.64912 294 | vn -0.862635 0.0731211 0.500513 295 | vn -0.860436 0.0839585 0.502594 296 | vn -0.823144 0.266065 0.501641 297 | vn -0.850508 0.155797 0.502359 298 | vn -0.850275 0.158766 0.501823 299 | vn -0.824425 0.262885 0.501214 300 | vn -0.000104827 0.000138107 -1 301 | vn -6.4345e-05 4.8788e-05 -1 302 | vn -0.000255153 -5.05889e-05 -1 303 | vn 8.56579e-06 1.35717e-06 -1 304 | vn 3.45225e-06 -5.63502e-06 -1 305 | vn 0.000304041 -4.54974e-06 -1 306 | vn 0.0004587 -0.000166978 -1 307 | vn -4.30629e-05 3.84989e-05 -1 308 | vn -0.000206776 -3.99625e-05 -1 309 | vn -7.87823e-05 -3.27836e-06 -1 310 | vn -0.000409956 0.000110369 -1 311 | vn 6.31049e-05 7.3988e-06 -1 312 | vn -0.0003035 2.77655e-05 -1 313 | 314 | # Mesh '' with 200 faces 315 | g 316 | usemtl DefaultMaterial 317 | f 1//1 2//1 3//1 318 | f 4//2 2//2 1//2 319 | f 3//3 5//3 1//3 320 | f 6//4 7//4 8//4 321 | f 9//5 1//5 10//5 322 | f 1//6 11//6 10//6 323 | f 12//7 13//7 14//7 324 | f 13//7 15//8 14//7 325 | f 2//9 13//9 3//9 326 | f 16//10 9//10 17//10 327 | f 14//11 18//11 19//11 328 | f 20//12 2//12 4//12 329 | f 21//13 20//13 4//13 330 | f 22//14 23//14 4//14 331 | f 23//15 21//15 4//15 332 | f 1//16 5//16 11//16 333 | f 13//17 24//17 15//17 334 | f 20//18 21//18 24//18 335 | f 25//19 26//19 14//19 336 | f 15//20 27//20 14//20 337 | f 28//21 29//21 14//20 338 | f 14//22 30//22 25//22 339 | f 14//22 31//23 12//23 340 | f 32//24 25//24 30//24 341 | f 33//25 17//25 14//25 342 | f 34//26 35//26 14//26 343 | f 36//27 37//27 14//27 344 | f 14//28 37//28 34//28 345 | f 38//29 33//29 14//29 346 | f 19//30 38//30 14//30 347 | f 39//31 18//31 40//31 348 | f 7//32 6//32 41//32 349 | f 42//33 43//33 44//33 350 | f 45//34 43//34 42//34 351 | f 4//35 46//35 47//35 352 | f 17//36 31//36 14//36 353 | f 9//37 31//37 17//37 354 | f 10//38 31//38 9//38 355 | f 11//39 31//39 10//39 356 | f 12//40 31//40 11//40 357 | f 21//41 23//41 48//41 358 | f 13//42 49//42 3//42 359 | f 12//43 49//43 13//43 360 | f 25//44 32//44 50//44 361 | f 14//45 29//45 51//45 362 | f 52//46 30//46 14//46 363 | f 27//47 52//47 14//47 364 | f 53//48 32//48 30//48 365 | f 9//49 54//49 1//49 366 | f 35//50 55//50 14//50 367 | f 56//51 44//51 43//51 368 | f 57//52 47//52 46//52 369 | f 4//53 47//53 57//53 370 | f 58//54 46//54 4//54 371 | f 53//55 46//55 58//55 372 | f 57//56 22//56 4//56 373 | f 57//57 59//57 22//57 374 | f 59//58 15//58 22//58 375 | f 60//59 61//59 62//59 376 | f 24//60 48//60 15//60 377 | f 24//61 21//61 48//61 378 | f 49//62 5//62 3//62 379 | f 5//63 12//63 11//63 380 | f 50//64 4//64 45//64 381 | f 4//65 50//65 58//65 382 | f 32//66 53//66 58//66 383 | f 42//67 26//67 25//67 384 | f 52//68 27//68 63//68 385 | f 9//69 16//69 54//69 386 | f 37//70 64//70 34//70 387 | f 38//71 19//71 65//71 388 | f 40//72 6//72 39//72 389 | f 8//73 66//73 6//73 390 | f 6//74 66//74 39//74 391 | f 18//75 67//75 19//75 392 | f 68//76 19//76 67//76 393 | f 67//77 69//77 68//77 394 | f 40//78 70//78 55//78 395 | f 71//79 72//79 60//79 396 | f 73//80 74//80 35//80 397 | f 55//81 75//81 40//81 398 | f 57//82 46//82 59//82 399 | f 76//83 77//83 78//83 400 | f 62//84 77//84 76//84 401 | f 79//85 44//85 56//85 402 | f 80//86 24//86 13//86 403 | f 24//87 80//87 20//87 404 | f 2//88 80//88 13//88 405 | f 20//89 80//89 2//89 406 | f 15//90 48//90 22//90 407 | f 22//91 48//91 23//91 408 | f 49//92 12//92 5//92 409 | f 25//93 50//93 42//93 410 | f 58//94 50//94 32//94 411 | f 42//95 81//95 26//95 412 | f 59//96 46//96 63//96 413 | f 59//97 63//97 27//97 414 | f 82//98 54//98 16//98 415 | f 82//99 83//99 54//99 416 | f 54//100 83//100 1//100 417 | f 16//101 84//101 82//101 418 | f 33//102 84//102 16//102 419 | f 39//103 67//103 18//103 420 | f 69//104 67//104 39//104 421 | f 8//105 39//105 66//105 422 | f 1//106 39//106 8//106 423 | f 69//107 39//107 1//107 424 | f 40//108 18//108 70//108 425 | f 70//109 18//109 14//109 426 | f 55//110 70//110 14//110 427 | f 72//111 75//111 85//111 428 | f 7//112 41//112 71//112 429 | f 60//113 72//113 61//113 430 | f 59//114 27//114 15//114 431 | f 62//115 61//115 86//115 432 | f 86//116 77//116 62//116 433 | f 87//117 88//117 89//117 434 | f 51//118 90//118 14//118 435 | f 42//119 50//119 45//119 436 | f 14//120 26//120 28//120 437 | f 42//121 44//121 81//121 438 | f 44//122 91//122 81//122 439 | f 53//123 63//123 46//123 440 | f 52//124 53//124 30//124 441 | f 63//125 53//125 52//125 442 | f 33//126 16//126 17//126 443 | f 65//127 84//127 38//127 444 | f 33//128 38//128 84//128 445 | f 69//129 1//129 92//129 446 | f 83//130 92//130 1//130 447 | f 75//131 72//131 93//131 448 | f 93//132 72//132 71//132 449 | f 6//133 40//133 75//133 450 | f 41//134 93//134 71//134 451 | f 85//135 55//135 35//135 452 | f 74//136 85//136 35//136 453 | f 87//137 94//137 95//137 454 | f 36//138 96//138 78//138 455 | f 76//139 96//139 88//139 456 | f 78//140 96//140 76//140 457 | f 88//141 96//141 89//141 458 | f 97//142 79//142 56//142 459 | f 29//143 91//143 98//143 460 | f 28//144 91//144 29//144 461 | f 26//145 81//145 28//145 462 | f 91//146 28//146 81//146 463 | f 73//147 64//147 86//147 464 | f 65//148 82//148 84//148 465 | f 92//149 65//149 99//149 466 | f 93//150 41//150 75//150 467 | f 72//151 85//151 61//151 468 | f 61//152 85//152 74//152 469 | f 61//153 74//153 73//153 470 | f 36//154 78//154 37//154 471 | f 37//155 77//155 64//155 472 | f 98//156 91//156 79//156 473 | f 79//157 100//157 98//157 474 | f 97//158 101//158 79//158 475 | f 95//159 101//159 97//159 476 | f 64//160 73//160 34//160 477 | f 73//161 35//161 34//161 478 | f 69//162 92//162 99//162 479 | f 19//163 69//163 99//163 480 | f 68//164 69//164 19//164 481 | f 92//165 82//165 65//165 482 | f 92//166 83//166 82//166 483 | f 99//167 65//167 19//167 484 | f 85//168 75//168 55//168 485 | f 41//169 6//169 75//169 486 | f 64//170 77//170 86//170 487 | f 77//171 37//171 78//171 488 | f 86//172 61//172 73//172 489 | f 87//173 89//173 94//173 490 | f 79//174 101//174 100//174 491 | f 102//175 90//175 51//175 492 | f 36//176 14//176 90//176 493 | f 91//177 44//177 79//177 494 | f 95//178 94//178 101//178 495 | f 29//179 98//179 51//179 496 | f 100//180 51//180 98//180 497 | f 102//181 51//181 100//181 498 | f 89//182 96//182 36//182 499 | f 36//183 90//183 89//183 500 | f 101//184 102//184 100//184 501 | f 94//185 89//185 102//185 502 | f 90//186 102//186 89//186 503 | f 101//187 94//187 102//187 504 | f 4//188 1//188 87//188 505 | f 4//189 87//189 45//189 506 | f 1//190 8//190 87//190 507 | f 60//191 87//191 71//191 508 | f 56//192 87//191 97//192 509 | f 76//193 87//193 62//193 510 | f 56//194 43//194 87//194 511 | f 45//195 87//195 43//195 512 | f 8//196 7//196 87//196 513 | f 62//197 87//197 60//197 514 | f 97//198 87//198 95//198 515 | f 71//199 87//199 7//199 516 | f 88//200 87//200 76//200 517 | 518 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/visual/finger/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 08/01/2022 23:32:00 4 | 5 | newmtl Part__Feature_007 6 | Ka 0 0 0 7 | Kd 0.2509804 0.2509804 0.2509804 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.5 0.5 0.5 12 | Ns 0 13 | newmtl Part__Feature001_006 14 | Ka 0 0 0 15 | Kd 0.9019608 0.9215686 0.9294118 16 | d 1 17 | Tr 0 18 | illum 2 19 | Ks 0.5 0.5 0.5 20 | Ns 0 21 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/visual/hand/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 08/01/2022 23:31:49 4 | 5 | newmtl Part__Feature002_005_005 6 | Ka 0 0 0 7 | Kd 0.9019608 0.9215686 0.9294118 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.03125 0.03125 0.03125 12 | Ns 0 13 | newmtl Part__Feature005_001_005 14 | Ka 0 0 0 15 | Kd 1 1 1 16 | d 1 17 | Tr 0 18 | illum 2 19 | Ks 0.03125 0.03125 0.03125 20 | Ns 0 21 | newmtl Part__Feature_009_005 22 | Ka 0 0 0 23 | Kd 0.2509804 0.2509804 0.2509804 24 | d 1 25 | Tr 0 26 | illum 2 27 | Ks 0.03125 0.03125 0.03125 28 | Ns 0 29 | newmtl Part__Feature001_008_005 30 | Ka 0 0 0 31 | Kd 0.2509804 0.2509804 0.2509804 32 | d 1 33 | Tr 0 34 | illum 2 35 | Ks 0.015625 0.015625 0.015625 36 | Ns 0 37 | newmtl Part__Feature005_001_005_001 38 | Ka 0 0 0 39 | Kd 0.9019608 0.9215686 0.9294118 40 | d 1 41 | Tr 0 42 | illum 2 43 | Ks 0.03125 0.03125 0.03125 44 | Ns 0 45 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/visual/link0/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 08/01/2022 23:31:31 4 | 5 | newmtl Part__Feature017_001 6 | Ka 0 0 0 7 | Kd 1 1 1 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 1 1 1 12 | Ns 0 13 | newmtl Part__Feature022_001 14 | Ka 0 0 0 15 | Kd 0.9019608 0.9215686 0.9294118 16 | d 1 17 | Tr 0 18 | illum 2 19 | Ks 0.25 0.25 0.25 20 | Ns 0 21 | newmtl Part__Feature023_001 22 | Ka 0 0 0 23 | Kd 0.2509804 0.2509804 0.2509804 24 | d 1 25 | Tr 0 26 | illum 2 27 | Ks 0.25 0.25 0.25 28 | Ns 0 29 | newmtl Part__Feature019_001 30 | Ka 0 0 0 31 | Kd 1 1 1 32 | d 1 33 | Tr 0 34 | illum 2 35 | Ks 0.25 0.25 0.25 36 | Ns 0 37 | newmtl Part__Feature018_001 38 | Ka 0 0 0 39 | Kd 1 1 1 40 | d 1 41 | Tr 0 42 | illum 2 43 | Ks 1 1 1 44 | Ns 0 45 | newmtl Face636_001 46 | Ka 0 0 0 47 | Kd 0.9019608 0.9215686 0.9294118 48 | d 1 49 | Tr 0 50 | illum 2 51 | Ks 0.25 0.25 0.25 52 | Ns 0 53 | newmtl Shell001_001 54 | Ka 0 0 0 55 | Kd 0.2509804 0.2509804 0.2509804 56 | d 1 57 | Tr 0 58 | illum 2 59 | Ks 0.25 0.25 0.25 60 | Ns 0 61 | newmtl Shell002_001 62 | Ka 0 0 0 63 | Kd 0.9019608 0.9215686 0.9294118 64 | d 1 65 | Tr 0 66 | illum 2 67 | Ks 0.25 0.25 0.25 68 | Ns 0 69 | newmtl Shell_001 70 | Ka 0 0 0 71 | Kd 0.2509804 0.2509804 0.2509804 72 | d 1 73 | Tr 0 74 | illum 2 75 | Ks 0.25 0.25 0.25 76 | Ns 0 77 | newmtl Shell003_001 78 | Ka 0 0 0 79 | Kd 0.9019608 0.9215686 0.9294118 80 | d 1 81 | Tr 0 82 | illum 2 83 | Ks 0.25 0.25 0.25 84 | Ns 0 85 | newmtl Shell009_001 86 | Ka 0 0 0 87 | Kd 0.2509804 0.2509804 0.2509804 88 | d 1 89 | Tr 0 90 | illum 2 91 | Ks 0.25 0.25 0.25 92 | Ns 0 93 | newmtl Shell010_001 94 | Ka 0 0 0 95 | Kd 0.9019608 0.9215686 0.9294118 96 | d 1 97 | Tr 0 98 | illum 2 99 | Ks 0.25 0.25 0.25 100 | Ns 0 101 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/visual/link1/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 08/01/2022 23:31:16 4 | 5 | newmtl Part__Feature_001 6 | Ka 0 0 0 7 | Kd 1 1 1 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.125 0.125 0.125 12 | Ns 0 13 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/visual/link2/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 08/01/2022 23:31:03 4 | 5 | newmtl Part__Feature024 6 | Ka 0 0 0 7 | Kd 1 1 1 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.25 0.25 0.25 12 | Ns 0 13 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/visual/link3/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 08/01/2022 23:30:47 4 | 5 | newmtl Part__Feature_001_001_001_002 6 | Ka 0 0 0 7 | Kd 0.2509804 0.2509804 0.2509804 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.015625 0.015625 0.015625 12 | Ns 0 13 | newmtl Part__Feature001_010_001_002 14 | Ka 0 0 0 15 | Kd 1 1 1 16 | d 1 17 | Tr 0 18 | illum 2 19 | Ks 0.015625 0.015625 0.015625 20 | Ns 0 21 | newmtl Part__Feature002_007_001_002 22 | Ka 0 0 0 23 | Kd 1 1 1 24 | d 1 25 | Tr 0 26 | illum 2 27 | Ks 0.015625 0.015625 0.015625 28 | Ns 0 29 | newmtl Part__Feature003_004_001_002 30 | Ka 0 0 0 31 | Kd 1 1 1 32 | d 1 33 | Tr 0 34 | illum 2 35 | Ks 0.015625 0.015625 0.015625 36 | Ns 0 37 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/visual/link4/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 08/01/2022 23:30:31 4 | 5 | newmtl Part__Feature_002_003_001 6 | Ka 0 0 0 7 | Kd 1 1 1 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.015625 0.015625 0.015625 12 | Ns 0 13 | newmtl Part__Feature002_001_003_001 14 | Ka 0 0 0 15 | Kd 0.2509804 0.2509804 0.2509804 16 | d 1 17 | Tr 0 18 | illum 2 19 | Ks 0.015625 0.015625 0.015625 20 | Ns 0 21 | newmtl Part__Feature003_001_003_001 22 | Ka 0 0 0 23 | Kd 1 1 1 24 | d 1 25 | Tr 0 26 | illum 2 27 | Ks 0.015625 0.015625 0.015625 28 | Ns 0 29 | newmtl Part__Feature001_001_003_001 30 | Ka 0 0 0 31 | Kd 1 1 1 32 | d 1 33 | Tr 0 34 | illum 2 35 | Ks 0.015625 0.015625 0.015625 36 | Ns 0 37 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/visual/link5/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 08/01/2022 23:30:10 4 | 5 | newmtl Part__Feature_002_004_003 6 | Ka 0 0 0 7 | Kd 1 1 1 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.03125 0.03125 0.03125 12 | Ns 0 13 | newmtl Shell_001_001_003 14 | Ka 0 0 0 15 | Kd 1 1 1 16 | d 1 17 | Tr 0 18 | illum 2 19 | Ks 0.03125 0.03125 0.03125 20 | Ns 0 21 | newmtl Shell001_001_001_003 22 | Ka 0 0 0 23 | Kd 0.25 0.25 0.25 24 | d 1 25 | Tr 0 26 | illum 2 27 | Ks 0.03125 0.03125 0.03125 28 | Ns 0 29 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/visual/link6/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 08/01/2022 23:29:58 4 | 5 | newmtl Shell012_002_002_001 6 | Ka 0 0 0 7 | Kd 1 1 1 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.0078125 0.0078125 0.0078125 12 | Ns 0 13 | newmtl Part__Feature001_009_001_002_001 14 | Ka 0 0 0 15 | Kd 0.2509804 0.2509804 0.2509804 16 | d 1 17 | Tr 0 18 | illum 2 19 | Ks 0.0078125 0.0078125 0.0078125 20 | Ns 0 21 | newmtl Part__Feature002_006_001_002_001 22 | Ka 0 0 0 23 | Kd 0.2509804 0.2509804 0.2509804 24 | d 1 25 | Tr 0 26 | illum 2 27 | Ks 0.0078125 0.0078125 0.0078125 28 | Ns 0 29 | newmtl Face064_002_001_002_001 30 | Ka 0 0 0 31 | Kd 1 0 0 32 | d 1 33 | Tr 0 34 | illum 2 35 | Ks 0.0078125 0.0078125 0.0078125 36 | Ns 0 37 | newmtl Face065_002_001_002_001 38 | Ka 0 0 0 39 | Kd 0 1 0 40 | d 1 41 | Tr 0 42 | illum 2 43 | Ks 0.0078125 0.0078125 0.0078125 44 | Ns 0 45 | newmtl Face374_002_001_002_001 46 | Ka 0 0 0 47 | Kd 1 1 1 48 | d 1 49 | Tr 0 50 | illum 2 51 | Ks 0.0078125 0.0078125 0.0078125 52 | Ns 0 53 | newmtl Shell_003_001_002_001 54 | Ka 0 0 0 55 | Kd 0.2509804 0.2509804 0.2509804 56 | d 1 57 | Tr 0 58 | illum 2 59 | Ks 0.0078125 0.0078125 0.0078125 60 | Ns 0 61 | newmtl Face539_002_001_002_001 62 | Ka 0 0 0 63 | Kd 0.2509804 0.2509804 0.2509804 64 | d 1 65 | Tr 0 66 | illum 2 67 | Ks 0.0078125 0.0078125 0.0078125 68 | Ns 0 69 | newmtl Union_001_001_002_001 70 | Ka 0 0 0 71 | Kd 0.03921568999999999 0.54117649999999992 0.7803922 72 | d 1 73 | Tr 0 74 | illum 2 75 | Ks 0.0078125 0.0078125 0.0078125 76 | Ns 0 77 | newmtl Union001_001_001_002_001 78 | Ka 0 0 0 79 | Kd 0.03921568999999999 0.54117649999999992 0.7803922 80 | d 1 81 | Tr 0 82 | illum 2 83 | Ks 0.0078125 0.0078125 0.0078125 84 | Ns 0 85 | newmtl Shell002_002_001_002_001 86 | Ka 0 0 0 87 | Kd 1 1 1 88 | d 1 89 | Tr 0 90 | illum 2 91 | Ks 0.0078125 0.0078125 0.0078125 92 | Ns 0 93 | newmtl Shell003_002_001_002_001 94 | Ka 0 0 0 95 | Kd 1 1 1 96 | d 1 97 | Tr 0 98 | illum 2 99 | Ks 0.0078125 0.0078125 0.0078125 100 | Ns 0 101 | newmtl Shell004_001_001_002_001 102 | Ka 0 0 0 103 | Kd 1 1 1 104 | d 1 105 | Tr 0 106 | illum 2 107 | Ks 0.0078125 0.0078125 0.0078125 108 | Ns 0 109 | newmtl Shell005_001_001_002_001 110 | Ka 0 0 0 111 | Kd 1 1 1 112 | d 1 113 | Tr 0 114 | illum 2 115 | Ks 0.0078125 0.0078125 0.0078125 116 | Ns 0 117 | newmtl Shell007_002_002_001 118 | Ka 0 0 0 119 | Kd 0.25 0.25 0.25 120 | d 1 121 | Tr 0 122 | illum 2 123 | Ks 0.0078125 0.0078125 0.0078125 124 | Ns 0 125 | newmtl Shell011_002_002_001 126 | Ka 0 0 0 127 | Kd 1 1 1 128 | d 1 129 | Tr 0 130 | illum 2 131 | Ks 0.0078125 0.0078125 0.0078125 132 | Ns 0 133 | newmtl Shell006_003_002_001 134 | Ka 0 0 0 135 | Kd 0.9019608 0.9215686 0.9294118 136 | d 1 137 | Tr 0 138 | illum 2 139 | Ks 0.03125 0.03125 0.03125 140 | Ns 0 141 | -------------------------------------------------------------------------------- /urdfs/Panda/meshes/visual/link7/material.lib: -------------------------------------------------------------------------------- 1 | # Aspose.3D Wavefront OBJ Exporter 2 | # Copyright 2004-2022 Aspose Pty Ltd. 3 | # File created: 08/01/2022 23:28:53 4 | 5 | newmtl Part__Mirroring_004_001 6 | Ka 0 0 0 7 | Kd 0.8980392 0.9176471 0.9294118 8 | d 1 9 | Tr 0 10 | illum 2 11 | Ks 0.0625 0.0625 0.0625 12 | Ns 0 13 | newmtl Part__Mirroring002_004_001 14 | Ka 0 0 0 15 | Kd 0.2509804 0.2509804 0.2509804 16 | d 1 17 | Tr 0 18 | illum 2 19 | Ks 0.0625 0.0625 0.0625 20 | Ns 0 21 | newmtl Part__Mirroring003_004_001 22 | Ka 0 0 0 23 | Kd 0.2509804 0.2509804 0.2509804 24 | d 1 25 | Tr 0 26 | illum 2 27 | Ks 0.0625 0.0625 0.0625 28 | Ns 0 29 | newmtl Part__Mirroring005_004_001 30 | Ka 0 0 0 31 | Kd 0.2509804 0.2509804 0.2509804 32 | d 1 33 | Tr 0 34 | illum 2 35 | Ks 0.0625 0.0625 0.0625 36 | Ns 0 37 | newmtl Part__Mirroring006_004_001 38 | Ka 0 0 0 39 | Kd 0.2509804 0.2509804 0.2509804 40 | d 1 41 | Tr 0 42 | illum 2 43 | Ks 0.0625 0.0625 0.0625 44 | Ns 0 45 | newmtl Part__Mirroring007_004_001 46 | Ka 0 0 0 47 | Kd 0.2509804 0.2509804 0.2509804 48 | d 1 49 | Tr 0 50 | illum 2 51 | Ks 0.0625 0.0625 0.0625 52 | Ns 0 53 | newmtl Part__Mirroring001_004_002 54 | Ka 0 0 0 55 | Kd 0.2509804 0.2509804 0.2509804 56 | d 1 57 | Tr 0 58 | illum 2 59 | Ks 0.03125 0.03125 0.03125 60 | Ns 0 61 | newmtl Part__Mirroring004_004_002 62 | Ka 0 0 0 63 | Kd 1 1 1 64 | d 1 65 | Tr 0 66 | illum 2 67 | Ks 0.0625 0.0625 0.0625 68 | Ns 0 69 | -------------------------------------------------------------------------------- /urdfs/Panda/panda.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | -------------------------------------------------------------------------------- /urdfs/Panda/panda_dae.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | -------------------------------------------------------------------------------- /urdfs/Panda/panda_obj.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 232 | 233 | 234 | 235 | 236 | 237 | 238 | 239 | 240 | 241 | 242 | 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | --------------------------------------------------------------------------------