├── .gitignore
├── LICENSE
├── README.md
├── assets
├── datainfo
│ ├── multiple_models_data_split_dict_1.json
│ ├── multiple_models_data_split_dict_2.json
│ └── multiple_models_data_split_dict_3.json
├── interbotix_descriptions
│ ├── CMakeLists.txt
│ ├── README.md
│ ├── images
│ │ └── descriptions_flowchart.png
│ ├── launch
│ │ └── description.launch
│ ├── meshes
│ │ ├── interbotix_black.png
│ │ ├── meshes_px100
│ │ │ ├── PXA-100-M-1-Base.stl
│ │ │ ├── PXA-100-M-2-Shoulder.stl
│ │ │ ├── PXA-100-M-3-UA.stl
│ │ │ ├── PXA-100-M-4-Forearm.stl
│ │ │ ├── PXA-100-M-5-Wrist-Gripper.stl
│ │ │ ├── PXA-100-M-6-Gripper-Prop.stl
│ │ │ ├── PXA-100-M-7-Gripper-Bar.stl
│ │ │ └── PXA-100-M-8-Finger.stl
│ │ ├── meshes_px150
│ │ │ ├── PXA-150-M-1-Base.stl
│ │ │ ├── PXA-150-M-2-Shoulder.stl
│ │ │ ├── PXA-150-M-3-UA.stl
│ │ │ ├── PXA-150-M-4-Forearm.stl
│ │ │ ├── PXA-150-M-5-Wrist.stl
│ │ │ ├── PXA-150-M-6-Gripper.stl
│ │ │ ├── PXA-150-M-7-Gripper-Prop.stl
│ │ │ ├── PXA-150-M-8-Gripper-Bar.stl
│ │ │ └── PXA-150-M-9-Finger.stl
│ │ ├── meshes_pxxls
│ │ │ ├── PXT-XLS-M-1-Base.stl
│ │ │ ├── PXT-XLS-M-2-Shoulder.stl
│ │ │ └── PXT-XLS-M-3-Top.stl
│ │ ├── meshes_rx150
│ │ │ ├── RXA-150-M-1-Base.stl
│ │ │ ├── RXA-150-M-2-Shoulder.stl
│ │ │ ├── RXA-150-M-3-UA.stl
│ │ │ ├── RXA-150-M-4-Forearm.stl
│ │ │ ├── RXA-150-M-5-Wrist.stl
│ │ │ ├── RXA-150-M-6-Gripper.stl
│ │ │ ├── RXA-150-M-7-Gripper-Prop.stl
│ │ │ ├── RXA-150-M-8-Gripper-Bar.stl
│ │ │ └── RXA-150-M-9-Finger.stl
│ │ ├── meshes_rx200
│ │ │ ├── RXA-200-M-1-Base.stl
│ │ │ ├── RXA-200-M-2-Shoulder.stl
│ │ │ ├── RXA-200-M-3-UA.stl
│ │ │ ├── RXA-200-M-4-Forearm.stl
│ │ │ ├── RXA-200-M-5-Wrist.stl
│ │ │ ├── RXA-200-M-6-Gripper.stl
│ │ │ ├── RXA-200-M-7-Gripper-Prop.stl
│ │ │ ├── RXA-200-M-8-Gripper-Bar.stl
│ │ │ └── RXA-200-M-9-Finger.stl
│ │ ├── meshes_vx250
│ │ │ ├── VXA-250-M-1-Base.stl
│ │ │ ├── VXA-250-M-2-Shoulder.stl
│ │ │ ├── VXA-250-M-3-UA.stl
│ │ │ ├── VXA-250-M-4-Forearm.stl
│ │ │ ├── VXA-250-M-5-Wrist.stl
│ │ │ ├── VXA-250-M-6-Gripper.stl
│ │ │ ├── VXA-250-M-7-Gripper-Prop.stl
│ │ │ ├── VXA-250-M-8-Gripper-Bar.stl
│ │ │ └── VXA-250-M-9-Finger.stl
│ │ ├── meshes_vx300
│ │ │ ├── VXA-300-M-1-Base.stl
│ │ │ ├── VXA-300-M-2-Shoulder.stl
│ │ │ ├── VXA-300-M-3-UA.stl
│ │ │ ├── VXA-300-M-4-Forearm.stl
│ │ │ ├── VXA-300-M-5-Wrist.stl
│ │ │ ├── VXA-300-M-6-Gripper.stl
│ │ │ ├── VXA-300-M-7-Gripper-Prop.stl
│ │ │ ├── VXA-300-M-8-Gripper-Bar.stl
│ │ │ └── VXA-300-M-9-Finger.stl
│ │ ├── meshes_vx300s
│ │ │ ├── VXSA-300-M-1-Base.stl
│ │ │ ├── VXSA-300-M-10-Finger.stl
│ │ │ ├── VXSA-300-M-2-Shoulder.stl
│ │ │ ├── VXSA-300-M-3-UA.stl
│ │ │ ├── VXSA-300-M-4-UF.stl
│ │ │ ├── VXSA-300-M-5-LF.stl
│ │ │ ├── VXSA-300-M-6-Wrist.stl
│ │ │ ├── VXSA-300-M-7-Gripper.stl
│ │ │ ├── VXSA-300-M-8-Gripper-Prop.stl
│ │ │ └── VXSA-300-M-9-Gripper-Bar.stl
│ │ ├── meshes_vxxmd
│ │ │ ├── VXT-XMD-M-1-Base.stl
│ │ │ ├── VXT-XMD-M-2-Shoulder.stl
│ │ │ └── VXT-XMD-M-3-Top.stl
│ │ ├── meshes_vxxms
│ │ │ ├── VXT-XMS-M-1-Base.stl
│ │ │ ├── VXT-XMS-M-2-Shoulder.stl
│ │ │ └── VXT-XMS-M-3-Top.stl
│ │ ├── meshes_wx200
│ │ │ ├── WXA-200-M-1-Base.stl
│ │ │ ├── WXA-200-M-2-Shoulder.stl
│ │ │ ├── WXA-200-M-3-UA.stl
│ │ │ ├── WXA-200-M-4-Forearm.stl
│ │ │ ├── WXA-200-M-5-Wrist.stl
│ │ │ ├── WXA-200-M-6-Gripper.stl
│ │ │ ├── WXA-200-M-7-Gripper-Prop.stl
│ │ │ ├── WXA-200-M-8-Gripper-Bar.stl
│ │ │ ├── WXA-200-M-9-Finger.stl
│ │ │ └── grippers.STL
│ │ ├── meshes_wx250
│ │ │ ├── WXA-250-M-1-Base.stl
│ │ │ ├── WXA-250-M-2-Shoulder.stl
│ │ │ ├── WXA-250-M-3-UA.stl
│ │ │ ├── WXA-250-M-4-Forearm.stl
│ │ │ ├── WXA-250-M-5-Wrist.stl
│ │ │ ├── WXA-250-M-6-Gripper.stl
│ │ │ ├── WXA-250-M-7-Gripper-Prop.stl
│ │ │ ├── WXA-250-M-8-Gripper-Bar.stl
│ │ │ └── WXA-250-M-9-Finger.stl
│ │ ├── meshes_wx250s
│ │ │ ├── WXSA-250-M-1-Base.stl
│ │ │ ├── WXSA-250-M-10-Finger.stl
│ │ │ ├── WXSA-250-M-2-Shoulder.stl
│ │ │ ├── WXSA-250-M-3-UA.stl
│ │ │ ├── WXSA-250-M-4-UF.stl
│ │ │ ├── WXSA-250-M-5-LF.stl
│ │ │ ├── WXSA-250-M-6-Wrist.stl
│ │ │ ├── WXSA-250-M-7-Gripper.stl
│ │ │ ├── WXSA-250-M-8-Gripper-Prop.stl
│ │ │ └── WXSA-250-M-9-Gripper-Bar.stl
│ │ ├── meshes_wxxmd
│ │ │ ├── WXT-XMD-M-1-Base.stl
│ │ │ ├── WXT-XMD-M-2-Shoulder.stl
│ │ │ └── WXT-XMD-M-3-Top.stl
│ │ └── meshes_wxxms
│ │ │ ├── WXT-XMS-M-1-Base.stl
│ │ │ ├── WXT-XMS-M-2-Shoulder.stl
│ │ │ └── WXT-XMS-M-3-Top.stl
│ ├── package.xml
│ ├── rviz
│ │ └── description.rviz
│ ├── setup.py
│ ├── src
│ │ └── interbotix_descriptions
│ │ │ ├── __init__.py
│ │ │ └── interbotix_mr_descriptions.py
│ └── urdf
│ │ ├── px100.urdf.xacro
│ │ ├── px150.urdf.xacro
│ │ ├── pxxls.urdf.xacro
│ │ ├── rx150.urdf.xacro
│ │ ├── rx200.urdf.xacro
│ │ ├── vx250.urdf.xacro
│ │ ├── vx300.urdf.xacro
│ │ ├── vx300s.urdf.xacro
│ │ ├── vxxmd.urdf.xacro
│ │ ├── vxxms.urdf.xacro
│ │ ├── wx200.urdf
│ │ ├── wx200.urdf.xacro
│ │ ├── wx200_gripper.urdf
│ │ ├── wx200_gripper_record.urdf
│ │ ├── wx250.urdf.xacro
│ │ ├── wx250s.urdf.xacro
│ │ ├── wxxmd.urdf.xacro
│ │ └── wxxms.urdf.xacro
└── widowx_arm_description
│ ├── meshes
│ ├── base_link.stl
│ ├── biceps_link.new.stl
│ ├── biceps_link.stl
│ ├── forearm_link.new.stl
│ ├── forearm_link.stl
│ ├── gripper_hand_fixed_link.left.stl
│ ├── gripper_hand_fixed_link.new.stl
│ ├── gripper_hand_fixed_link.stl
│ ├── gripper_rail_link.new.stl
│ ├── gripper_rail_link.stl
│ ├── sensors
│ │ ├── sr300.stl
│ │ └── sr300_stand.stl
│ ├── shoulder_link.stl
│ ├── wrist_1_link.stl
│ ├── wrist_2_link.new.stl
│ └── wrist_2_link.stl
│ └── urdf
│ ├── block.obj
│ ├── block.urdf
│ ├── common_colours.xacro
│ ├── common_materials.xacro
│ ├── sensor_sr300.urdf.xacro
│ ├── textured_sphere_smooth.obj
│ ├── weightless_sphere.urdf
│ ├── weightless_sphere_record.urdf
│ ├── widowx.urdf
│ ├── widowx.urdf.xacro
│ └── widowx_gripper.xacro
├── configs
├── state_condition
│ └── config1.yaml
├── state_condition_kinematic
│ └── config1.yaml
└── state_condition_kinematic_scratch
│ └── config1.yaml
├── dataset.py
├── eval.py
├── figures
└── teaser.gif
├── fusion.py
├── main.py
├── model_utils.py
├── models.py
├── requirements.txt
├── scripts
├── reconstruction.sh
├── reconstruction_eval.sh
├── state_condition.sh
└── state_condition_eval.sh
├── setup.py
├── sim.py
└── utils
├── __init__.py
├── common.py
├── eval_mesh.py
├── libkdtree
├── .gitignore
├── LICENSE.txt
├── MANIFEST.in
├── README
├── README.rst
├── __init__.py
├── pykdtree
│ ├── __init__.py
│ ├── _kdtree_core.c
│ ├── _kdtree_core.c.mako
│ ├── kdtree.c
│ ├── kdtree.cpython-36m-x86_64-linux-gnu.so
│ ├── kdtree.pyx
│ ├── render_template.py
│ └── test_tree.py
└── setup.cfg
└── libmesh
├── .gitignore
├── __init__.py
├── inside_mesh.py
├── triangle_hash.cpython-36m-x86_64-linux-gnu.so
└── triangle_hash.pyx
/.gitignore:
--------------------------------------------------------------------------------
1 | *.*~
2 | *~
3 | saved_meshes*
4 | __pycache__
5 | .DS_Store
6 | */logs_*
7 | build
8 | *.zip
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Boyuan Chen
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Full-Body Visual Self-Modeling of Robot Morphologies
2 |
3 | [Boyuan Chen](http://boyuanchen.com/),
4 | [Robert Kwiatkowskig](https://www.linkedin.com/in/robert-kwiatkowski-7a5266201),
5 | [Carl Vondrick](http://www.cs.columbia.edu/~vondrick/),
6 | [Hod Lipson](https://www.hodlipson.com/)
7 |
8 | Columbia University
9 |
10 |
11 | ### [Project Website](https://robot-morphology.cs.columbia.edu/) | [Video](https://youtu.be/aoCAplokoWE) | [Paper](http://arxiv.org/abs/2111.06389)
12 |
13 | ## Overview
14 | This repo contains the PyTorch implementation for paper "Full-Body Visual Self-Modeling of Robot Morphologies".
15 |
16 | 
17 |
18 | ## Citation
19 |
20 | If you find our paper or codebase helpful, please consider citing:
21 |
22 | ```
23 | @article{chen2021morphology,
24 | title={Full-Body Visual Self-Modeling of Robot Morphologies},
25 | author={Chen, Boyuan and Kwiatkowskig, Robert and Vondrick, Carl and Lipson, Hod},
26 | journal={arXiv preprint arXiv:2111.06389},
27 | year={2021}
28 | }
29 | ```
30 |
31 | ## Content
32 |
33 | - [Installation](#installation)
34 | - [Data Preparation](#data-preparation)
35 | - [About Configs and Logs](#about-configs-and-logs)
36 | - [Training](#training)
37 | - [Evaluation](#evaluation)
38 |
39 | ## Installation
40 |
41 | Our code has been tested on Ubuntu 18.04 with CUDA 11.0. Create a python3.6 virtual environment and install the dependencies.
42 |
43 | ```
44 | virtualenv -p /usr/bin/python3.6 env-visual-selfmodeling
45 | source env-visual-selfmodeling/bin/activate
46 | cd visual-selfmodeling
47 | pip install -r requirements.txt
48 | ```
49 |
50 | You may also need to run the following two lines to specify the correct cuda path for pycuda and nvcc.
51 |
52 | ```
53 | export PATH=/usr/local/cuda-11.0/bin:$PATH
54 | export PATH=/usr/local/cuda-11.0/bin:${PATH:+${PATH}}
55 | ```
56 |
57 | To run the evaluation metrics, please install the additional package with the following line.
58 |
59 | ```
60 | python setup.py build_ext --inplace
61 | ```
62 |
63 | ## Data Preparation
64 |
65 | Run the following commands to generate the simulated data in Pybullet.
66 |
67 | ```
68 | cd visual-selfmodeling
69 | python sim.py
70 | ```
71 |
72 | This will generate the mesh files in a folder named `saved_meshes` under current directory. A `robot_state.json` file will also be generated in `saved_meshes` folder to store the corresponding joint angles.
73 |
74 | Then generate the pointcloud with normals.
75 |
76 | ```
77 | ipython3
78 | from utils import common
79 | common.convert_ply_to_xyzn(folder='./saved_meshes')
80 | ```
81 |
82 | ## About Configs and Logs
83 |
84 | Before training and evaluation, we first introduce the configuration and logging structure.
85 |
86 | **Configs:** all the specific parameters used for training and evaluation are indicated in `./configs/state_condition/config1.yaml`. If you would like to play with other parameters, feel free to copy the existing config file and modify it. You will then just need to change the config file path in the following training steps to point to the new configuration file.
87 |
88 | To train the self-model which also predicts the end effector position together with our visual self-model, please use `./configs/state_condition_kinematic/config1.yaml`.
89 |
90 | To train the self-model which only predicts the end effector from scratch, without out visual self-model, please use `./configs/state_condition_kinematic_scratch/config1.yaml`.
91 |
92 | If you save the data to other directories, please make sure the `data_filepath` argument in each config file points to the correct path.
93 |
94 | **Logs:** both the training and evaluation results will be saved in the log folder for each experiment. The log folders will be located under `./scripts` folder. The last digit in the logs folder indicates the random seed. Inside the logs folder, the structure and contents are:
95 |
96 | ```
97 | \logs_True_False_False_image_conv2d-encoder-decoder_True_{output_representation}_{seed}
98 | \lightning_logs
99 | \checkpoints [saved checkpoint]
100 | \version_0 [training stats]
101 | \predictions [complete predicted meshes before normalization]
102 | \predictions_denormalized [complete predicted meshes after normalization]
103 | ```
104 |
105 | ## Training
106 |
107 | To train our visual self-model, run the following command.
108 |
109 | ```
110 | cd scripts;
111 | CUDA_VISIBLE_DEVICES=0 python ../main.py ../configs/state_condition/config1.yaml NA;
112 | ```
113 |
114 | To use our pre-trained self-model to train a small network to predict end-effector position, run the following command. For this step, please uncomment the validation code in `models.py` (line 143-158, line 202-204, and line 225-231). Please only uncomment then for this particular step.
115 |
116 | ```
117 | cd scripts;
118 | CUDA_VISIBLE_DEVICES=0 python ../main.py ../configs/state_condition_kinematic/config1.yaml kinematic ./logs_state-condition_new-global-siren-sdf_1/lightning_logs/version_0/checkpoints/;
119 | ```
120 |
121 | To train the baseline model that predicts end-effector position from scratch, without using our visual self-model, run the following command. For this step, please uncomment the validation code in `models.py` (line 143-158, line 202-204, and line 225-231). Please only uncomment then for this particular step.
122 |
123 | ```
124 | CUDA_VISIBLE_DEVICES=0 python ../main.py ../configs/state_condition_kinematic_scratch/config1.yaml kinematic-scratch NA;
125 | ```
126 |
127 | ## Evaluation
128 |
129 | To evaluate the predicted meshes and compare with baselines, run the following commands.
130 |
131 | ```
132 | cd scripts;
133 | CUDA_VISIBLE_DEVICES=0 python ../eval.py ../configs/state_condition/config1.yaml ./logs_state-condition_new-global-siren-sdf_1/lightning_logs/version_0/checkpoints/ eval-state-condition;
134 |
135 | cd utils;
136 | python eval_mesh.py ../configs/state_condition/config1.yaml model;
137 | python eval_mesh.py ../configs/state_condition/config1.yaml nearest-neighbor;
138 | python eval_mesh.py ../configs/state_condition/config1.yaml random;
139 |
140 | CUDA_VISIBLE_DEVICES=0 python ../eval.py ../configs/state_condition_kinematic/config1.yaml ./logs_state-condition-kinematic_new-global-siren-sdf_1/lightning_logs/version_0/checkpoints/ eval-kinematic ./logs_state-condition_new-global-siren-sdf_1/lightning_logs/version_0/checkpoints/;
141 |
142 | CUDA_VISIBLE_DEVICES=4 python ../eval.py ../configs/state_condition_kinematic_scratch/config1.yaml ./logs_state-condition-kinematic-scratch_new-global-siren-sdf_1/lightning_logs/version_0/checkpoints/ eval-kinematic;
143 | ```
144 |
145 | ## License
146 |
147 | This repository is released under the MIT license. See [LICENSE](LICENSE) for additional details.
148 |
149 | ## Reference
150 |
151 | - https://github.com/vsitzmann/siren
152 | - https://github.com/autonomousvision/occupancy_networks/
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 2.8.3)
2 | project(interbotix_descriptions)
3 |
4 | ## Find catkin macros and libraries
5 | ## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
6 | ## is used, also find other catkin packages
7 | find_package(catkin REQUIRED COMPONENTS
8 | joint_state_publisher
9 | joint_state_publisher_gui
10 | robot_state_publisher
11 | roslaunch
12 | rviz
13 | xacro
14 | )
15 |
16 | ## Uncomment this if the package has a setup.py. This macro ensures
17 | ## modules and global scripts declared therein get installed
18 | ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
19 | catkin_python_setup()
20 |
21 | ###################################
22 | ## catkin specific configuration ##
23 | ###################################
24 | ## The catkin_package macro generates cmake config files for your package
25 | ## Declare things to be passed to dependent projects
26 | catkin_package(
27 | CATKIN_DEPENDS joint_state_publisher joint_state_publisher_gui robot_state_publisher roslaunch rviz xacro
28 | )
29 |
30 | ###########
31 | ## Build ##
32 | ###########
33 |
34 | ## Specify additional locations of header files
35 | ## Your package locations should be listed before other locations
36 | include_directories(
37 | ${catkin_INCLUDE_DIRS}
38 | )
39 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/README.md:
--------------------------------------------------------------------------------
1 | # interbotix_descriptions
2 |
3 | ## Overview
4 | This package contains the URDFs and meshes for the many X-Series Interbotix arms and turrets. The STL files for each robot are located in a unique folder inside the [meshes](meshes/) directory. Also in the 'meshes' directory is the [interbotix_black.png](meshes/interbotix_black.png) picture. The appearance and texture of the robots come from this picture. Next, the URDFs for the robot are located in the [urdf](urdf/) directory. They are written in 'xacro' format so that users have the ability to customize what parts of the URDF get loaded to the parameter server (see the 'Usage' section below for details). Note that all the other ROS packages in the repo reference this package to launch the robot model.
5 |
6 | ## Structure
7 | 
8 | This package contains the [description.launch](launch/description.launch) file responsible for loading parts or all of the robot model. It launches up to four nodes as described below:
9 | - **joint_state_publisher** - responsible for parsing the 'robot_description' parameter to find all non-fixed joints and publish a JointState message with those joints defined.
10 | - **joint_state_publisher_gui** - does the same thing as the 'joint_state_publisher' node but with a GUI that allows a user to easily manipulate the joints.
11 | - **robot_state_publisher** - uses the URDF specified by the parameter robot_description and the joint positions from the joint_states topic to calculate the forward kinematics of the robot and publish the results via tf.
12 | - **rviz** - displays the virtual robot model using the transforms in the 'tf' topic.
13 |
14 | ## Usage
15 | To run this package, type the line below in a terminal. Note that the `robot_name` argument must be specified as the name of one of the URDF files located in the [urdf](/urdf) directory (excluding the '.urdf.xacro' part). For example, to launch the ReactorX 150 arm, type:
16 | ```
17 | $ roslaunch interbotix_descriptions description.launch robot_name:=rx150 jnt_pub_gui:=true
18 | ```
19 | This is the bare minimum needed to get up and running. Take a look at the table below to see how to further customize with other launch file arguments.
20 |
21 | | Argument | Description | Default Value |
22 | | -------- | ----------- | :-----------: |
23 | | robot_name | name of a robot (ex. 'arm1/wx200' or 'wx200') | "" |
24 | | robot_model | only used when launching multiple robots or if `robot_name` contains more than the model type; if that's the case, this should be set to the robot model type (ex. 'wx200'); `robot_name` should then be set to a unique name followed by '$(arg robot_model)' - such as 'arm1/wx200' | '$(arg robot_name)' |
25 | | use_default_gripper_bar | if true, the gripper_bar link is also loaded to the 'robot_description' parameter; if false, the gripper_bar link and any other link past it in the kinematic chain is not loaded to the parameter server. Set to 'false' if you have a custom gripper attachment | true |
26 | | use_default_gripper_fingers | if true, the gripper fingers are also loaded to the 'robot_description' parameter; if false, the gripper fingers and any other link past it in the kinematic chain is not loaded to the parameter server. Set to 'false' if you have custom gripper fingers | true |
27 | | use_world_frame | set this to 'true' if you would like to load a 'world' frame to the 'robot_description' parameter which is located exactly at the 'base_link' frame of the robot; if using multiple robots or if you would like to attach the 'base_link' frame of the robot to a different frame, set this to False | true |
28 | | external_urdf_loc | the file path to the custom urdf.xacro file that you would like to include in the Interbotix robot's urdf.xacro file| "" |
29 | | load_gazebo_configs | set this to 'true' if Gazebo is being used; it makes sure to also load Gazebo related configs to the 'robot_description' parameter so that the robot models show up black in Gazebo | false |
30 | | jnt_pub_gui | launches the joint_state_publisher GUI | false |
31 | | use_joint_pub | launches the joint_state_publisher node | false |
32 | | use_default_rviz | launches the rviz and static_transform_publisher nodes | true |
33 | | rvizconfig | file path to the config file Rviz should load | refer to [description.launch](launch/description.launch) |
34 | | model | file path to the robot-specific URDF including arguments to be passed in | refer to [description.launch](launch/description.launch) |
35 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/images/descriptions_flowchart.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/images/descriptions_flowchart.png
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/launch/description.launch:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
21 |
22 |
23 |
24 |
29 |
30 |
31 |
36 |
37 |
38 |
43 |
44 |
45 |
51 |
52 |
53 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/interbotix_black.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/interbotix_black.png
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-3-UA.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-3-UA.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-4-Forearm.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-4-Forearm.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-5-Wrist-Gripper.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-5-Wrist-Gripper.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-6-Gripper-Prop.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-6-Gripper-Prop.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-7-Gripper-Bar.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-7-Gripper-Bar.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-8-Finger.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px100/PXA-100-M-8-Finger.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-3-UA.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-3-UA.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-4-Forearm.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-4-Forearm.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-5-Wrist.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-5-Wrist.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-6-Gripper.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-6-Gripper.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-7-Gripper-Prop.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-7-Gripper-Prop.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-8-Gripper-Bar.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-8-Gripper-Bar.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-9-Finger.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_px150/PXA-150-M-9-Finger.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_pxxls/PXT-XLS-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_pxxls/PXT-XLS-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_pxxls/PXT-XLS-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_pxxls/PXT-XLS-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_pxxls/PXT-XLS-M-3-Top.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_pxxls/PXT-XLS-M-3-Top.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-3-UA.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-3-UA.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-4-Forearm.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-4-Forearm.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-5-Wrist.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-5-Wrist.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-6-Gripper.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-6-Gripper.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-7-Gripper-Prop.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-7-Gripper-Prop.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-8-Gripper-Bar.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-8-Gripper-Bar.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-9-Finger.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx150/RXA-150-M-9-Finger.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-3-UA.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-3-UA.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-4-Forearm.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-4-Forearm.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-5-Wrist.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-5-Wrist.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-6-Gripper.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-6-Gripper.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-7-Gripper-Prop.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-7-Gripper-Prop.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-8-Gripper-Bar.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-8-Gripper-Bar.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-9-Finger.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_rx200/RXA-200-M-9-Finger.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-3-UA.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-3-UA.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-4-Forearm.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-4-Forearm.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-5-Wrist.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-5-Wrist.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-6-Gripper.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-6-Gripper.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-7-Gripper-Prop.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-7-Gripper-Prop.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-8-Gripper-Bar.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-8-Gripper-Bar.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-9-Finger.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx250/VXA-250-M-9-Finger.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-3-UA.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-3-UA.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-4-Forearm.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-4-Forearm.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-5-Wrist.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-5-Wrist.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-6-Gripper.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-6-Gripper.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-7-Gripper-Prop.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-7-Gripper-Prop.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-8-Gripper-Bar.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-8-Gripper-Bar.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-9-Finger.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300/VXA-300-M-9-Finger.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-10-Finger.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-10-Finger.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-3-UA.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-3-UA.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-4-UF.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-4-UF.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-5-LF.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-5-LF.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-6-Wrist.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-6-Wrist.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-7-Gripper.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-7-Gripper.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-8-Gripper-Prop.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-8-Gripper-Prop.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-9-Gripper-Bar.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vx300s/VXSA-300-M-9-Gripper-Bar.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vxxmd/VXT-XMD-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vxxmd/VXT-XMD-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vxxmd/VXT-XMD-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vxxmd/VXT-XMD-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vxxmd/VXT-XMD-M-3-Top.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vxxmd/VXT-XMD-M-3-Top.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vxxms/VXT-XMS-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vxxms/VXT-XMS-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vxxms/VXT-XMS-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vxxms/VXT-XMS-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_vxxms/VXT-XMS-M-3-Top.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_vxxms/VXT-XMS-M-3-Top.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-3-UA.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-3-UA.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-4-Forearm.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-4-Forearm.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-5-Wrist.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-5-Wrist.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-6-Gripper.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-6-Gripper.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-7-Gripper-Prop.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-7-Gripper-Prop.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-8-Gripper-Bar.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-8-Gripper-Bar.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-9-Finger.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx200/WXA-200-M-9-Finger.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx200/grippers.STL:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx200/grippers.STL
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-3-UA.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-3-UA.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-4-Forearm.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-4-Forearm.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-5-Wrist.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-5-Wrist.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-6-Gripper.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-6-Gripper.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-7-Gripper-Prop.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-7-Gripper-Prop.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-8-Gripper-Bar.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-8-Gripper-Bar.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-9-Finger.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250/WXA-250-M-9-Finger.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-10-Finger.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-10-Finger.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-3-UA.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-3-UA.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-4-UF.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-4-UF.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-5-LF.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-5-LF.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-6-Wrist.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-6-Wrist.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-7-Gripper.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-7-Gripper.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-8-Gripper-Prop.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-8-Gripper-Prop.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-9-Gripper-Bar.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wx250s/WXSA-250-M-9-Gripper-Bar.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wxxmd/WXT-XMD-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wxxmd/WXT-XMD-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wxxmd/WXT-XMD-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wxxmd/WXT-XMD-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wxxmd/WXT-XMD-M-3-Top.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wxxmd/WXT-XMD-M-3-Top.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wxxms/WXT-XMS-M-1-Base.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wxxms/WXT-XMS-M-1-Base.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wxxms/WXT-XMS-M-2-Shoulder.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wxxms/WXT-XMS-M-2-Shoulder.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/meshes/meshes_wxxms/WXT-XMS-M-3-Top.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/meshes/meshes_wxxms/WXT-XMS-M-3-Top.stl
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/package.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | interbotix_descriptions
4 | 0.0.0
5 | The interbotix_descriptions package
6 | Solomon Wiznitzer
7 | BSD
8 |
9 | Solomon Wiznitzer
10 |
11 |
12 |
13 | catkin
14 | joint_state_publisher
15 | joint_state_publisher_gui
16 | robot_state_publisher
17 | roslaunch
18 | rviz
19 | xacro
20 | joint_state_publisher
21 | joint_state_publisher_gui
22 | robot_state_publisher
23 | roslaunch
24 | rviz
25 | xacro
26 | joint_state_publisher
27 | joint_state_publisher_gui
28 | robot_state_publisher
29 | roslaunch
30 | rviz
31 | xacro
32 |
33 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/rviz/description.rviz:
--------------------------------------------------------------------------------
1 | Panels:
2 | - Class: rviz/Displays
3 | Help Height: 78
4 | Name: Displays
5 | Property Tree Widget:
6 | Expanded:
7 | - /Global Options1
8 | Splitter Ratio: 0.577777803
9 | Tree Height: 775
10 | - Class: rviz/Selection
11 | Name: Selection
12 | - Class: rviz/Tool Properties
13 | Expanded:
14 | - /2D Pose Estimate1
15 | - /2D Nav Goal1
16 | - /Publish Point1
17 | Name: Tool Properties
18 | Splitter Ratio: 0.588679016
19 | - Class: rviz/Views
20 | Expanded:
21 | - /Current View1
22 | Name: Views
23 | Splitter Ratio: 0.5
24 | - Class: rviz/Time
25 | Experimental: false
26 | Name: Time
27 | SyncMode: 0
28 | SyncSource: ""
29 | Toolbars:
30 | toolButtonStyle: 2
31 | Visualization Manager:
32 | Class: ""
33 | Displays:
34 | - Alpha: 0.5
35 | Cell Size: 0.25
36 | Class: rviz/Grid
37 | Color: 160; 160; 164
38 | Enabled: true
39 | Line Style:
40 | Line Width: 0.0299999993
41 | Value: Lines
42 | Name: Grid
43 | Normal Cell Count: 0
44 | Offset:
45 | X: 0
46 | Y: 0
47 | Z: 0
48 | Plane: XY
49 | Plane Cell Count: 10
50 | Reference Frame:
51 | Value: true
52 | - Alpha: 1
53 | Class: rviz/RobotModel
54 | Collision Enabled: false
55 | Enabled: true
56 | Links:
57 | All Links Enabled: true
58 | Expand Joint Details: false
59 | Expand Link Details: false
60 | Expand Tree: false
61 | Link Tree Style: Links in Alphabetic Order
62 | arm1/wx200/base_link:
63 | Alpha: 1
64 | Show Axes: false
65 | Show Trail: false
66 | Value: true
67 | arm1/wx200/ee_arm_link:
68 | Alpha: 1
69 | Show Axes: false
70 | Show Trail: false
71 | arm1/wx200/ee_gripper_link:
72 | Alpha: 1
73 | Show Axes: false
74 | Show Trail: false
75 | arm1/wx200/fingers_link:
76 | Alpha: 1
77 | Show Axes: false
78 | Show Trail: false
79 | arm1/wx200/forearm_link:
80 | Alpha: 1
81 | Show Axes: false
82 | Show Trail: false
83 | Value: true
84 | arm1/wx200/gripper_bar_link:
85 | Alpha: 1
86 | Show Axes: false
87 | Show Trail: false
88 | Value: true
89 | arm1/wx200/gripper_link:
90 | Alpha: 1
91 | Show Axes: false
92 | Show Trail: false
93 | Value: true
94 | arm1/wx200/gripper_prop_link:
95 | Alpha: 1
96 | Show Axes: false
97 | Show Trail: false
98 | Value: true
99 | arm1/wx200/left_finger_link:
100 | Alpha: 1
101 | Show Axes: false
102 | Show Trail: false
103 | Value: true
104 | arm1/wx200/right_finger_link:
105 | Alpha: 1
106 | Show Axes: false
107 | Show Trail: false
108 | Value: true
109 | arm1/wx200/shoulder_link:
110 | Alpha: 1
111 | Show Axes: false
112 | Show Trail: false
113 | Value: true
114 | arm1/wx200/upper_arm_link:
115 | Alpha: 1
116 | Show Axes: false
117 | Show Trail: false
118 | Value: true
119 | arm1/wx200/wrist_link:
120 | Alpha: 1
121 | Show Axes: false
122 | Show Trail: false
123 | Value: true
124 | Name: RobotModel
125 | Robot Description: robot_description
126 | TF Prefix: ""
127 | Update Interval: 0
128 | Value: true
129 | Visual Enabled: true
130 | - Class: rviz/TF
131 | Enabled: true
132 | Frame Timeout: 15
133 | Frames:
134 | All Enabled: false
135 | arm1/wx200/base_link:
136 | Value: true
137 | arm1/wx200/ee_arm_link:
138 | Value: true
139 | arm1/wx200/ee_gripper_link:
140 | Value: true
141 | arm1/wx200/fingers_link:
142 | Value: true
143 | arm1/wx200/forearm_link:
144 | Value: true
145 | arm1/wx200/gripper_bar_link:
146 | Value: true
147 | arm1/wx200/gripper_link:
148 | Value: true
149 | arm1/wx200/gripper_prop_link:
150 | Value: true
151 | arm1/wx200/left_finger_link:
152 | Value: true
153 | arm1/wx200/right_finger_link:
154 | Value: true
155 | arm1/wx200/shoulder_link:
156 | Value: true
157 | arm1/wx200/upper_arm_link:
158 | Value: true
159 | arm1/wx200/wrist_link:
160 | Value: true
161 | Marker Scale: 0.200000003
162 | Name: TF
163 | Show Arrows: true
164 | Show Axes: true
165 | Show Names: true
166 | Tree:
167 | arm1/wx200/base_link:
168 | arm1/wx200/shoulder_link:
169 | arm1/wx200/upper_arm_link:
170 | arm1/wx200/forearm_link:
171 | arm1/wx200/wrist_link:
172 | arm1/wx200/gripper_link:
173 | arm1/wx200/ee_arm_link:
174 | arm1/wx200/gripper_bar_link:
175 | arm1/wx200/fingers_link:
176 | arm1/wx200/ee_gripper_link:
177 | {}
178 | arm1/wx200/left_finger_link:
179 | {}
180 | arm1/wx200/right_finger_link:
181 | {}
182 | arm1/wx200/gripper_prop_link:
183 | {}
184 | Update Interval: 0
185 | Value: true
186 | Enabled: true
187 | Global Options:
188 | Background Color: 0; 60; 90
189 | Default Light: true
190 | Fixed Frame: world
191 | Frame Rate: 30
192 | Name: root
193 | Tools:
194 | - Class: rviz/Interact
195 | Hide Inactive Objects: true
196 | - Class: rviz/MoveCamera
197 | - Class: rviz/Select
198 | - Class: rviz/FocusCamera
199 | - Class: rviz/Measure
200 | - Class: rviz/SetInitialPose
201 | Topic: /initialpose
202 | - Class: rviz/SetGoal
203 | Topic: /move_base_simple/goal
204 | - Class: rviz/PublishPoint
205 | Single click: true
206 | Topic: /clicked_point
207 | Value: true
208 | Views:
209 | Current:
210 | Class: rviz/Orbit
211 | Distance: 1.69209588
212 | Enable Stereo Rendering:
213 | Stereo Eye Separation: 0.0599999987
214 | Stereo Focal Distance: 1
215 | Swap Stereo Eyes: false
216 | Value: false
217 | Focal Point:
218 | X: 0
219 | Y: 0
220 | Z: 0
221 | Focal Shape Fixed Size: false
222 | Focal Shape Size: 0.0500000007
223 | Invert Z Axis: false
224 | Name: Current View
225 | Near Clip Distance: 0.00999999978
226 | Pitch: 0.785398185
227 | Target Frame:
228 | Value: Orbit (rviz)
229 | Yaw: 0.785398185
230 | Saved: ~
231 | Window Geometry:
232 | Displays:
233 | collapsed: false
234 | Height: 1056
235 | Hide Left Dock: false
236 | Hide Right Dock: true
237 | QMainWindow State: 000000ff00000000fd00000004000000000000016a00000396fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006100fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000002800000396000000d700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f00000396fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002800000396000000ad00fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073f0000003efc0100000002fb0000000800540069006d006501000000000000073f0000030000fffffffb0000000800540069006d00650100000000000004500000000000000000000005cf0000039600000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
238 | Selection:
239 | collapsed: false
240 | Time:
241 | collapsed: false
242 | Tool Properties:
243 | collapsed: false
244 | Views:
245 | collapsed: true
246 | Width: 1855
247 | X: 65
248 | Y: 24
249 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/setup.py:
--------------------------------------------------------------------------------
1 | ## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD
2 |
3 | from distutils.core import setup
4 | from catkin_pkg.python_setup import generate_distutils_setup
5 |
6 | # fetch values from package.xml
7 | setup_args = generate_distutils_setup(
8 | packages=['interbotix_descriptions'],
9 | package_dir={'': 'src'},
10 | )
11 |
12 | setup(**setup_args)
13 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/src/interbotix_descriptions/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/interbotix_descriptions/src/interbotix_descriptions/__init__.py
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/src/interbotix_descriptions/interbotix_mr_descriptions.py:
--------------------------------------------------------------------------------
1 | # Modern Robotics Descriptions for all eight Interbotix Arms.
2 | # Note that the end-effector is positioned at '/ee_arm_link'
3 | # and that the Space frame is positioned at '/base_link'.
4 |
5 | import numpy as np
6 |
7 | class px100:
8 | Slist = np.array([[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
9 | [0.0, 1.0, 0.0, -0.09305, 0.0, 0.0],
10 | [0.0, -1.0, 0.0, 0.19305, 0.0, -0.035],
11 | [0.0, -1.0, 0.0, 0.19305, 0.0, -0.135]]).T
12 |
13 | M = np.array([[1.0, 0.0, 0.0, 0.248575],
14 | [0.0, 1.0, 0.0, 0.0],
15 | [0.0, 0.0, 1.0, 0.19305],
16 | [0.0, 0.0, 0.0, 1.0]])
17 |
18 | class px150:
19 | Slist = np.array([[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
20 | [0.0, 1.0, 0.0, -0.10391, 0.0, 0.0],
21 | [0.0, -1.0, 0.0, 0.25391, 0.0, -0.05],
22 | [0.0, -1.0, 0.0, 0.25391, 0.0, -0.2],
23 | [1.0, 0.0, 0.0, 0.0, 0.25391, 0.0]]).T
24 |
25 | M = np.array([[1.0, 0.0, 0.0, 0.358575],
26 | [0.0, 1.0, 0.0, 0.0],
27 | [0.0, 0.0, 1.0, 0.25391],
28 | [0.0, 0.0, 0.0, 1.0]])
29 |
30 | class rx150:
31 | Slist = np.array([[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
32 | [0.0, 1.0, 0.0, -0.10391, 0.0, 0.0],
33 | [0.0, -1.0, 0.0, 0.25391, 0.0, -0.05],
34 | [0.0, -1.0, 0.0, 0.25391, 0.0, -0.2],
35 | [1.0, 0.0, 0.0, 0.0, 0.25391, 0.0]]).T
36 |
37 | M = np.array([[1.0, 0.0, 0.0, 0.358575],
38 | [0.0, 1.0, 0.0, 0.0],
39 | [0.0, 0.0, 1.0, 0.25931],
40 | [0.0, 0.0, 0.0, 1.0]])
41 |
42 | class rx200:
43 | Slist = np.array([[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
44 | [0.0, 1.0, 0.0, -0.10391, 0.0, 0.0],
45 | [0.0, -1.0, 0.0, 0.30391, 0.0, -0.05],
46 | [0.0, -1.0, 0.0, 0.30391, 0.0, -0.25],
47 | [1.0, 0.0, 0.0, 0.0, 0.30391, 0.0]]).T
48 |
49 | M = np.array([[1.0, 0.0, 0.0, 0.408575],
50 | [0.0, 1.0, 0.0, 0.0],
51 | [0.0, 0.0, 1.0, 0.30391],
52 | [0.0, 0.0, 0.0, 1.0]])
53 |
54 | class vx250:
55 | Slist = np.array([[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
56 | [0.0, 1.0, 0.0, -0.12675, 0.0, 0.0],
57 | [0.0, -1.0, 0.0, 0.37675, 0.0, -0.06],
58 | [0.0, -1.0, 0.0, 0.37675, 0.0, -0.31],
59 | [1.0, 0.0, 0.0, 0.0, 0.37675, 0.0]]).T
60 |
61 | M = np.array([[1.0, 0.0, 0.0, 0.468575],
62 | [0.0, 1.0, 0.0, 0.0],
63 | [0.0, 0.0, 1.0, 0.37675],
64 | [0.0, 0.0, 0.0, 1.0]])
65 |
66 | class vx300:
67 | Slist = np.array([[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
68 | [0.0, 1.0, 0.0, -0.12675, 0.0, 0.0],
69 | [0.0, -1.0, 0.0, 0.42675, 0.0, -0.05955],
70 | [0.0, -1.0, 0.0, 0.42675, 0.0, -0.35955],
71 | [1.0, 0.0, 0.0, 0.0, 0.42675, 0.0]]).T
72 |
73 | M = np.array([[1.0, 0.0, 0.0, 0.536494],
74 | [0.0, 1.0, 0.0, 0.0],
75 | [0.0, 0.0, 1.0, 0.42675],
76 | [0.0, 0.0, 0.0, 1.0]])
77 |
78 | class vx300s:
79 | Slist = np.array([[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
80 | [0.0, 1.0, 0.0, -0.12675, 0.0, 0.0],
81 | [0.0, -1.0, 0.0, 0.42675, 0.0, -0.05955],
82 | [1.0, 0.0, 0.0, 0.0, 0.42675, 0.0],
83 | [0.0, -1.0, 0.0, 0.42675, 0.0, -0.35955],
84 | [1.0, 0.0, 0.0, 0.0, 0.42675, 0.0]]).T
85 |
86 | M = np.array([[1.0, 0.0, 0.0, 0.536494],
87 | [0.0, 1.0, 0.0, 0.0],
88 | [0.0, 0.0, 1.0, 0.42675],
89 | [0.0, 0.0, 0.0, 1.0]])
90 |
91 | class wx200:
92 | Slist = np.array([[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
93 | [0.0, 1.0, 0.0, -0.11025, 0.0, 0.0],
94 | [0.0, -1.0, 0.0, 0.31025, 0.0, -0.05],
95 | [0.0, -1.0, 0.0, 0.31025, 0.0, -0.25],
96 | [1.0, 0.0, 0.0, 0.0, 0.31025, 0.0]]).T
97 |
98 | Blist = np.array([[0.0, 0.0, 1.0, 0.0, 0.358, 0.0],
99 | [0.0, 1.0, 0.0, 0.2, 0.0, -0.358],
100 | [0.0, -1.0, 0.0, 0.0, 0.0, 0.308],
101 | [0.0, -1.0, 0.0, 0.0, 0.0, 0.108],
102 | [1.0, 0.0, 0.0, 0.0 ,0.0, 0.0]]).T
103 |
104 | M = np.array([[1.0, 0.0, 0.0, 0.408575],
105 | [0.0, 1.0, 0.0, 0.0],
106 | [0.0, 0.0, 1.0, 0.31025],
107 | [0.0, 0.0, 0.0, 1.0]])
108 |
109 | class wx250:
110 | Slist = np.array([[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
111 | [0.0, 1.0, 0.0, -0.11025, 0.0, 0.0],
112 | [0.0, -1.0, 0.0, 0.36025, 0.0, -0.04975],
113 | [0.0, -1.0, 0.0, 0.36025, 0.0, -0.29975],
114 | [1.0, 0.0, 0.0, 0.0, 0.36025, 0.0]]).T
115 |
116 | M = np.array([[1.0, 0.0, 0.0, 0.458325],
117 | [0.0, 1.0, 0.0, 0.0],
118 | [0.0, 0.0, 1.0, 0.36025],
119 | [0.0, 0.0, 0.0, 1.0]])
120 |
121 | class wx250s:
122 | Slist = np.array([[0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
123 | [0.0, 1.0, 0.0, -0.11025, 0.0, 0.0],
124 | [0.0, -1.0, 0.0, 0.36025, 0.0, -0.04975],
125 | [1.0, 0.0, 0.0, 0.0, 0.36025, 0.0],
126 | [0.0, -1.0, 0.0, 0.36025, 0.0, -0.29975],
127 | [1.0, 0.0, 0.0, 0.0, 0.36025, 0.0]]).T
128 |
129 | M = np.array([[1.0, 0.0, 0.0, 0.458325],
130 | [0.0, 1.0, 0.0, 0.0],
131 | [0.0, 0.0, 1.0, 0.36025],
132 | [0.0, 0.0, 0.0, 1.0]])
133 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/urdf/pxxls.urdf.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/urdf/vxxmd.urdf.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 | >
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/urdf/vxxms.urdf.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/urdf/wx200_gripper.urdf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 | transmission_interface/SimpleTransmission
40 |
41 | hardware_interface/EffortJointInterface
42 |
43 |
44 | 1
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 | transmission_interface/SimpleTransmission
76 |
77 | hardware_interface/EffortJointInterface
78 |
79 |
80 | 1
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 | transmission_interface/SimpleTransmission
112 |
113 | hardware_interface/EffortJointInterface
114 |
115 |
116 | 1
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 | transmission_interface/SimpleTransmission
148 |
149 | hardware_interface/EffortJointInterface
150 |
151 |
152 | 1
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 | transmission_interface/SimpleTransmission
184 |
185 | hardware_interface/EffortJointInterface
186 |
187 |
188 | 1
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/urdf/wx200_gripper_record.urdf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 | transmission_interface/SimpleTransmission
40 |
41 | hardware_interface/EffortJointInterface
42 |
43 |
44 | 1
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 | transmission_interface/SimpleTransmission
76 |
77 | hardware_interface/EffortJointInterface
78 |
79 |
80 | 1
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 | transmission_interface/SimpleTransmission
112 |
113 | hardware_interface/EffortJointInterface
114 |
115 |
116 | 1
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 | transmission_interface/SimpleTransmission
148 |
149 | hardware_interface/EffortJointInterface
150 |
151 |
152 | 1
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 | transmission_interface/SimpleTransmission
184 |
185 | hardware_interface/EffortJointInterface
186 |
187 |
188 | 1
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/urdf/wxxmd.urdf.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
--------------------------------------------------------------------------------
/assets/interbotix_descriptions/urdf/wxxms.urdf.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/base_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/base_link.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/biceps_link.new.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/biceps_link.new.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/biceps_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/biceps_link.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/forearm_link.new.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/forearm_link.new.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/forearm_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/forearm_link.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/gripper_hand_fixed_link.left.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/gripper_hand_fixed_link.left.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/gripper_hand_fixed_link.new.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/gripper_hand_fixed_link.new.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/gripper_hand_fixed_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/gripper_hand_fixed_link.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/gripper_rail_link.new.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/gripper_rail_link.new.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/gripper_rail_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/gripper_rail_link.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/sensors/sr300.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/sensors/sr300.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/sensors/sr300_stand.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/sensors/sr300_stand.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/shoulder_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/shoulder_link.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/wrist_1_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/wrist_1_link.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/wrist_2_link.new.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/wrist_2_link.new.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/meshes/wrist_2_link.stl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/assets/widowx_arm_description/meshes/wrist_2_link.stl
--------------------------------------------------------------------------------
/assets/widowx_arm_description/urdf/block.obj:
--------------------------------------------------------------------------------
1 | # block.obj
2 | #
3 |
4 | o block
5 | mtllib block.mtl
6 |
7 | v -0.500000 -0.500000 0.500000
8 | v 0.500000 -0.500000 0.500000
9 | v -0.500000 0.500000 0.500000
10 | v 0.500000 0.500000 0.500000
11 | v -0.500000 0.500000 -0.500000
12 | v 0.500000 0.500000 -0.500000
13 | v -0.500000 -0.500000 -0.500000
14 | v 0.500000 -0.500000 -0.500000
15 |
16 | vt 0.000000 0.000000
17 | vt 1.000000 0.000000
18 | vt 0.000000 1.000000
19 | vt 1.000000 1.000000
20 |
21 | vn 0.000000 0.000000 1.000000
22 | vn 0.000000 1.000000 0.000000
23 | vn 0.000000 0.000000 -1.000000
24 | vn 0.000000 -1.000000 0.000000
25 | vn 1.000000 0.000000 0.000000
26 | vn -1.000000 0.000000 0.000000
27 |
28 | g block
29 | usemtl block
30 | s 1
31 | f 1/1/1 2/2/1 3/3/1
32 | f 3/3/1 2/2/1 4/4/1
33 | s 2
34 | f 3/1/2 4/2/2 5/3/2
35 | f 5/3/2 4/2/2 6/4/2
36 | s 3
37 | f 5/4/3 6/3/3 7/2/3
38 | f 7/2/3 6/3/3 8/1/3
39 | s 4
40 | f 7/1/4 8/2/4 1/3/4
41 | f 1/3/4 8/2/4 2/4/4
42 | s 5
43 | f 2/1/5 8/2/5 4/3/5
44 | f 4/3/5 8/2/5 6/4/5
45 | s 6
46 | f 7/1/6 1/2/6 5/3/6
47 | f 5/3/6 1/2/6 3/4/6
48 |
--------------------------------------------------------------------------------
/assets/widowx_arm_description/urdf/block.urdf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/assets/widowx_arm_description/urdf/common_colours.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/assets/widowx_arm_description/urdf/common_materials.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/assets/widowx_arm_description/urdf/sensor_sr300.urdf.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
55 |
56 |
57 |
58 | Gazebo/Blue
59 | true
60 | true
61 |
62 |
63 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
95 |
96 |
97 |
98 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 | true
139 | 20.0
140 |
141 | ${60.0*M_PI/180.0}
142 |
143 | B8G8R8
144 | 640
145 | 480
146 |
147 |
148 | 0.05
149 | 8.0
150 |
151 |
152 |
153 | camera_sr300
154 | true
155 | 10
156 | rgb/image_raw
157 | depth/image_raw
158 | depth/points
159 | rgb/camera_info
160 | depth/camera_info
161 | camera_depth_optical_frame
162 | 0.1
163 | 0.0
164 | 0.0
165 | 0.0
166 | 0.0
167 | 0.0
168 | 0.2
169 |
170 |
171 |
172 |
173 |
174 |
--------------------------------------------------------------------------------
/assets/widowx_arm_description/urdf/weightless_sphere.urdf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/assets/widowx_arm_description/urdf/weightless_sphere_record.urdf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/assets/widowx_arm_description/urdf/widowx.urdf:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
137 |
139 |
177 |
178 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
225 |
232 |
247 |
248 |
256 |
257 |
258 | transmission_interface/SimpleTransmission
259 |
260 |
261 | EffortJointInterface
262 | 1
263 |
264 |
265 |
266 |
267 | transmission_interface/SimpleTransmission
268 |
269 |
270 | EffortJointInterface
271 | 1
272 |
273 |
274 |
275 |
276 | transmission_interface/SimpleTransmission
277 |
278 |
279 | EffortJointInterface
280 | 1
281 |
282 |
283 |
284 |
285 | transmission_interface/SimpleTransmission
286 |
287 |
288 | EffortJointInterface
289 | 1
290 |
291 |
292 |
293 |
294 | transmission_interface/SimpleTransmission
295 |
296 |
297 | EffortJointInterface
298 | 1
299 |
300 |
301 |
302 |
303 | transmission_interface/SimpleTransmission
304 |
305 |
306 | EffortJointInterface
307 | 1
308 |
309 |
310 |
311 |
312 |
--------------------------------------------------------------------------------
/assets/widowx_arm_description/urdf/widowx_gripper.xacro:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
--------------------------------------------------------------------------------
/configs/state_condition/config1.yaml:
--------------------------------------------------------------------------------
1 | lr: 0.00005 # more_layers: 0.00005, one layer: 0.0001
2 | seed: 1
3 | if_cuda: True
4 | gamma: 0.5
5 | log_dir: 'logs'
6 | train_batch: 4800
7 | val_batch: 4800
8 | test_batch: 4800
9 | task_batch: 9600
10 | num_workers: 8
11 | model_name: 'state-condition'
12 | data_filepath: './saved_meshes'
13 | lr_schedule: [10000000]
14 | num_gpus: 1
15 | epochs: 2000
16 | loss_type: 'siren_sdf'
17 | dof: 4
18 | coord_system: 'cartesian'
19 | tag: 'new-global-siren-sdf'
--------------------------------------------------------------------------------
/configs/state_condition_kinematic/config1.yaml:
--------------------------------------------------------------------------------
1 | lr: 0.0009 # more_layers: 0.00005, one layer: 0.0001
2 | seed: 1
3 | if_cuda: True
4 | gamma: 0.5
5 | log_dir: 'logs'
6 | train_batch: 4500
7 | val_batch: 4500
8 | test_batch: 4500
9 | task_batch: 9600
10 | num_workers: 8
11 | model_name: 'state-condition-kinematic'
12 | data_filepath: './saved_meshes'
13 | lr_schedule: [10000000]
14 | num_gpus: 1
15 | epochs: 1000
16 | loss_type: 'siren_sdf_kinematic'
17 | dof: 4
18 | coord_system: 'cartesian'
19 | tag: 'new-global-siren-sdf'
--------------------------------------------------------------------------------
/configs/state_condition_kinematic_scratch/config1.yaml:
--------------------------------------------------------------------------------
1 | lr: 0.0005 # more_layers: 0.00005, one layer: 0.0001
2 | seed: 1
3 | if_cuda: True
4 | gamma: 0.5
5 | log_dir: 'logs'
6 | train_batch: 4500
7 | val_batch: 4500
8 | test_batch: 4500
9 | task_batch: 9600
10 | num_workers: 8
11 | model_name: 'state-condition-kinematic-scratch'
12 | data_filepath: './saved_meshes'
13 | lr_schedule: [100000000]
14 | num_gpus: 1
15 | epochs: 1000
16 | loss_type: 'siren_sdf_kinematic_scratch'
17 | dof: 4
18 | coord_system: 'cartesian'
19 | tag: 'new-global-siren-sdf'
--------------------------------------------------------------------------------
/dataset.py:
--------------------------------------------------------------------------------
1 |
2 | import os
3 | import glob
4 | import math
5 | import json
6 | import torch
7 | import numpy as np
8 | from torch.utils.data import Dataset
9 |
10 | class MultipleModel(Dataset):
11 | def __init__(self, flag, seed, pointcloud_folder, on_surface_points):
12 | super().__init__()
13 |
14 | self.flag = flag
15 | self.seed = seed
16 | self.pointcloud_folder = pointcloud_folder
17 | self.on_surface_points = on_surface_points
18 | self.all_filelist = self.get_all_filelist()
19 | self.robot_state_dict = self.load_robot_state()
20 |
21 | def get_all_filelist(self):
22 | filelist = []
23 | with open(os.path.join('../assets', 'datainfo', f'multiple_models_data_split_dict_{self.seed}.json'), 'r') as file:
24 | seq_dict = json.load(file)
25 | id_lst = seq_dict[self.flag]
26 |
27 | for idx in id_lst:
28 | filepath = os.path.join(self.pointcloud_folder, f'mesh_{idx}.xyzn')
29 | filelist.append(filepath)
30 | return filelist
31 |
32 | def __len__(self):
33 | return len(self.all_filelist)
34 |
35 | def __getitem__(self, idx):
36 |
37 | # =====> sdf
38 | coords, normals = self.load_pcd(idx)
39 |
40 | off_surface_samples = self.on_surface_points # **2
41 | total_samples = self.on_surface_points + off_surface_samples
42 |
43 | # random coords
44 | point_cloud_size = coords.shape[0]
45 | rand_idcs = np.random.choice(point_cloud_size, size=self.on_surface_points)
46 |
47 | on_surface_coords = coords[rand_idcs, :]
48 | on_surface_normals = normals[rand_idcs, :]
49 |
50 | off_surface_coords = np.random.uniform(-1, 1, size=(off_surface_samples, 3))
51 | off_surface_normals = np.ones((off_surface_samples, 3)) * -1
52 |
53 | sdf = np.zeros((total_samples, 1)) # on-surface = 0
54 | sdf[self.on_surface_points:, :] = -1 # off-surface = -1
55 |
56 | final_coords = np.concatenate((on_surface_coords, off_surface_coords), axis=0)
57 | final_normals = np.concatenate((on_surface_normals, off_surface_normals), axis=0)
58 |
59 | # =====> robot state
60 | index = self.all_filelist[idx].split('/')[-1].split('.')[0].split('_')[1]
61 | robot_state = self.robot_state_dict[index]
62 | sel_robot_state = np.array([robot_state[0][0], robot_state[1][0], robot_state[2][0], robot_state[3][0]])
63 | sel_robot_state = sel_robot_state / np.pi
64 | sel_robot_state = sel_robot_state.reshape(1, -1)
65 | final_robot_states = np.tile(sel_robot_state, (total_samples, 1))
66 |
67 | return {'coords': torch.from_numpy(final_coords).float(), 'states': torch.from_numpy(final_robot_states).float()},{'sdf': torch.from_numpy(sdf).float(), 'normals': torch.from_numpy(final_normals).float()}
68 |
69 | def load_pcd(self, idx):
70 | point_cloud = np.genfromtxt(self.all_filelist[idx])
71 | coords = point_cloud[:, :3]
72 | normals = point_cloud[:, 3:]
73 |
74 | # reshape point cloud such that it lies in bounding box of (-1, 1) (distorts geometry, but makes for high sample efficiency)
75 | coords[:, 0] = coords[:, 0] / 0.45 # (-1, 1)
76 | coords[:, 1] = coords[:, 1] / 0.45 # (-1, 1)
77 | coords[:, 2] = coords[:, 2] - 0.13 # zero centering (-0.13, 0.51)
78 | coords[:, 2] = (coords[: ,2] + 0.13) / (0.51 + 0.13) # (0, 1)
79 | coords[:, 2] = coords[:, 2] - 0.5 # (-0.5, 0.5)
80 | coords[:, 2] = coords[:, 2] / 0.5 # (-1, 1)
81 | return coords, normals
82 |
83 | def load_robot_state(self):
84 | robot_state_filepath = os.path.join(self.pointcloud_folder, 'robot_state.json')
85 | with open(robot_state_filepath, 'r') as file:
86 | robot_state_dict = json.load(file)
87 | return robot_state_dict
88 |
89 | class MultipleModelLink(Dataset):
90 | def __init__(self, flag, seed, pointcloud_folder):
91 | super().__init__()
92 |
93 | self.flag = flag
94 | self.seed = seed
95 | self.pointcloud_folder = pointcloud_folder
96 | self.all_filelist = self.get_all_filelist()
97 | self.robot_state_dict = self.load_robot_state()
98 |
99 | def get_all_filelist(self):
100 | filelist = []
101 | if self.flag == 'val':
102 | for idx in range(10000, 11000):
103 | filepath = os.path.join(self.pointcloud_folder, f'mesh_{idx}.xyzn')
104 | filelist.append(filepath)
105 | else:
106 | with open(os.path.join('../assets', 'datainfo', f'multiple_models_data_split_dict_{self.seed}.json'), 'r') as file:
107 | seq_dict = json.load(file)
108 | id_lst = seq_dict[self.flag]
109 |
110 | for idx in id_lst:
111 | filepath = os.path.join(self.pointcloud_folder, f'mesh_{idx}.xyzn')
112 | filelist.append(filepath)
113 | return filelist
114 |
115 | def __len__(self):
116 | return len(self.all_filelist)
117 |
118 | def __getitem__(self, idx):
119 |
120 | # =====> robot state
121 | index = self.all_filelist[idx].split('/')[-1].split('.')[0].split('_')[1]
122 | robot_state = self.robot_state_dict[index]
123 | sel_robot_state = np.array([robot_state[0][0], robot_state[1][0], robot_state[2][0], robot_state[3][0]])
124 | tar_robot_state = np.array([robot_state[5][0], robot_state[5][1], robot_state[5][2]])
125 | sel_robot_state = sel_robot_state / np.pi
126 |
127 | return {'states': torch.from_numpy(sel_robot_state).float()},{'target_states': torch.from_numpy(tar_robot_state).float()}
128 |
129 | def load_robot_state(self):
130 | if self.flag == 'val':
131 | robot_state_filepath = os.path.join(self.pointcloud_folder, 'robot_state_kinematic_val.json')
132 | else:
133 | robot_state_filepath = os.path.join(self.pointcloud_folder, 'robot_state.json')
134 | with open(robot_state_filepath, 'r') as file:
135 | robot_state_dict = json.load(file)
136 | return robot_state_dict
--------------------------------------------------------------------------------
/figures/teaser.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/figures/teaser.gif
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 |
2 | import os
3 | import sys
4 | import yaml
5 | import torch
6 | import pprint
7 | from munch import munchify
8 | from models import VisModelingModel
9 | from pytorch_lightning.plugins import DDPPlugin
10 | from pytorch_lightning import Trainer, seed_everything
11 | from pytorch_lightning.loggers import TensorBoardLogger
12 | from pytorch_lightning.callbacks import ModelCheckpoint
13 |
14 |
15 | def load_config(filepath):
16 | with open(filepath, 'r') as stream:
17 | try:
18 | trainer_params = yaml.safe_load(stream)
19 | return trainer_params
20 | except yaml.YAMLError as exc:
21 | print(exc)
22 |
23 | def seed(cfg):
24 | torch.manual_seed(cfg.seed)
25 | if cfg.if_cuda:
26 | torch.cuda.manual_seed(cfg.seed)
27 |
28 |
29 | def main():
30 | config_filepath = str(sys.argv[1])
31 | cfg = load_config(filepath=config_filepath)
32 | pprint.pprint(cfg)
33 | cfg = munchify(cfg)
34 | seed(cfg)
35 | seed_everything(cfg.seed)
36 |
37 | log_dir = '_'.join([cfg.log_dir,
38 | cfg.model_name,
39 | cfg.tag,
40 | str(cfg.seed)])
41 |
42 | model = VisModelingModel(lr=cfg.lr,
43 | seed=cfg.seed,
44 | dof=cfg.dof,
45 | if_cuda=cfg.if_cuda,
46 | if_test=False,
47 | gamma=cfg.gamma,
48 | log_dir=log_dir,
49 | train_batch=cfg.train_batch,
50 | val_batch=cfg.val_batch,
51 | test_batch=cfg.test_batch,
52 | num_workers=cfg.num_workers,
53 | model_name=cfg.model_name,
54 | data_filepath=cfg.data_filepath,
55 | loss_type = cfg.loss_type,
56 | coord_system=cfg.coord_system,
57 | lr_schedule=cfg.lr_schedule)
58 |
59 | # define trainer
60 | trainer = Trainer(gpus=cfg.num_gpus,
61 | max_epochs=cfg.epochs,
62 | deterministic=True,
63 | plugins=DDPPlugin(find_unused_parameters=False),
64 | amp_backend='native',
65 | default_root_dir=log_dir)
66 |
67 | trainer.fit(model)
68 |
69 | def main_kinematic():
70 | config_filepath = str(sys.argv[1])
71 | cfg = load_config(filepath=config_filepath)
72 | pprint.pprint(cfg)
73 | cfg = munchify(cfg)
74 | seed(cfg)
75 | seed_everything(cfg.seed)
76 |
77 | log_dir = '_'.join([cfg.log_dir,
78 | cfg.model_name,
79 | cfg.tag,
80 | str(cfg.seed)])
81 |
82 | model = VisModelingModel(lr=cfg.lr,
83 | seed=cfg.seed,
84 | dof=cfg.dof,
85 | if_cuda=cfg.if_cuda,
86 | if_test=False,
87 | gamma=cfg.gamma,
88 | log_dir=log_dir,
89 | train_batch=cfg.train_batch,
90 | val_batch=cfg.val_batch,
91 | test_batch=cfg.test_batch,
92 | num_workers=cfg.num_workers,
93 | model_name=cfg.model_name,
94 | data_filepath=cfg.data_filepath,
95 | loss_type = cfg.loss_type,
96 | coord_system=cfg.coord_system,
97 | lr_schedule=cfg.lr_schedule)
98 |
99 | # define callback for selecting checkpoints during training
100 | checkpoint_callback = ModelCheckpoint(
101 | filename=log_dir + "{epoch}_{val_loss}",
102 | verbose=True,
103 | monitor='val_loss',
104 | mode='min',
105 | prefix='')
106 |
107 | # define trainer
108 | trainer = Trainer(gpus=cfg.num_gpus,
109 | max_epochs=cfg.epochs,
110 | deterministic=True,
111 | plugins=DDPPlugin(find_unused_parameters=False),
112 | amp_backend='native',
113 | default_root_dir=log_dir,
114 | val_check_interval=1.0,
115 | checkpoint_callback=checkpoint_callback)
116 |
117 | model.extract_kinematic_encoder_model(sys.argv[3])
118 | trainer.fit(model)
119 |
120 | def main_kinematic_scratch():
121 | config_filepath = str(sys.argv[1])
122 | cfg = load_config(filepath=config_filepath)
123 | pprint.pprint(cfg)
124 | cfg = munchify(cfg)
125 | seed(cfg)
126 | seed_everything(cfg.seed)
127 |
128 | log_dir = '_'.join([cfg.log_dir,
129 | cfg.model_name,
130 | cfg.tag,
131 | str(cfg.seed)])
132 |
133 | model = VisModelingModel(lr=cfg.lr,
134 | seed=cfg.seed,
135 | dof=cfg.dof,
136 | if_cuda=cfg.if_cuda,
137 | if_test=False,
138 | gamma=cfg.gamma,
139 | log_dir=log_dir,
140 | train_batch=cfg.train_batch,
141 | val_batch=cfg.val_batch,
142 | test_batch=cfg.test_batch,
143 | num_workers=cfg.num_workers,
144 | model_name=cfg.model_name,
145 | data_filepath=cfg.data_filepath,
146 | loss_type = cfg.loss_type,
147 | coord_system=cfg.coord_system,
148 | lr_schedule=cfg.lr_schedule)
149 |
150 | # define callback for selecting checkpoints during training
151 | checkpoint_callback = ModelCheckpoint(
152 | filename=log_dir + "{epoch}_{val_loss}",
153 | verbose=True,
154 | monitor='val_loss',
155 | mode='min',
156 | prefix='')
157 |
158 | # define trainer
159 | trainer = Trainer(gpus=cfg.num_gpus,
160 | max_epochs=cfg.epochs,
161 | deterministic=True,
162 | plugins=DDPPlugin(find_unused_parameters=False),
163 | amp_backend='native',
164 | default_root_dir=log_dir,
165 | val_check_interval=1.0,
166 | checkpoint_callback=checkpoint_callback)
167 |
168 | trainer.fit(model)
169 |
170 |
171 | if __name__ == '__main__':
172 | if sys.argv[2] == 'kinematic':
173 | main_kinematic()
174 | elif sys.argv[2] == 'kinematic-scratch':
175 | main_kinematic_scratch()
176 | else:
177 | main()
178 |
--------------------------------------------------------------------------------
/model_utils.py:
--------------------------------------------------------------------------------
1 |
2 | import torch
3 | import numpy as np
4 | import torch.nn as nn
5 | import torch.nn.functional as F
6 |
7 |
8 | class SirenLayer(nn.Module):
9 | def __init__(self, in_f, out_f, w0=30, is_first=False, is_last=False):
10 | super().__init__()
11 | self.in_f = in_f
12 | self.w0 = w0
13 | self.linear = nn.Linear(in_f, out_f)
14 | self.is_first = is_first
15 | self.is_last = is_last
16 | self.init_weights()
17 |
18 | def init_weights(self):
19 | b = 1 / self.in_f if self.is_first else np.sqrt(6 / self.in_f) / self.w0
20 | with torch.no_grad():
21 | self.linear.weight.uniform_(-b, b)
22 |
23 | def forward(self, x):
24 | x = self.linear(x)
25 | return x if self.is_last else torch.sin(self.w0 * x)
26 |
27 | class StateConditionMLPQueryModel(torch.nn.Module):
28 | def __init__(self, in_channels=4, out_channels=1, hidden_features=256):
29 | super(StateConditionMLPQueryModel, self).__init__()
30 |
31 | half_hidden_features = int(hidden_features / 2)
32 | self.layerq1 = SirenLayer(3, half_hidden_features, is_first=True)
33 | self.layers1 = SirenLayer(in_channels-3, half_hidden_features, is_first=True)
34 | self.layers2 = SirenLayer(half_hidden_features, half_hidden_features)
35 | self.layers3 = SirenLayer(half_hidden_features, half_hidden_features)
36 | self.layers4 = SirenLayer(half_hidden_features, half_hidden_features)
37 | self.layer2 = SirenLayer(hidden_features, hidden_features)
38 | self.layer3 = SirenLayer(hidden_features, hidden_features)
39 | self.layer4 = SirenLayer(hidden_features, hidden_features)
40 | self.layer5 = SirenLayer(hidden_features, out_channels, is_last=True)
41 |
42 | def query_encoder(self, x):
43 | x = self.layerq1(x)
44 | return x
45 |
46 | def state_encoder(self, x):
47 | x = self.layers1(x)
48 | x = self.layers2(x)
49 | x = self.layers3(x)
50 | x = self.layers4(x)
51 | return x
52 |
53 | def forward(self, x):
54 | query_feat = self.query_encoder(x[:, :3])
55 | state_feat = self.state_encoder(x[:, 3:])
56 | x = torch.cat((query_feat, state_feat), dim=1)
57 | x = self.layer2(x)
58 | x = self.layer3(x)
59 | x = self.layer4(x)
60 | x = self.layer5(x)
61 | return x
62 |
63 | class KinematicFeatToLinkModel(torch.nn.Module):
64 | def __init__(self, in_channels=128, out_channels=3, hidden_features=64):
65 | super(KinematicFeatToLinkModel, self).__init__()
66 |
67 | self.layer1 = SirenLayer(in_channels, hidden_features)
68 | self.layer2 = SirenLayer(hidden_features, out_channels, is_last=True)
69 |
70 | def forward(self, x):
71 | x = self.layer1(x)
72 | x = self.layer2(x)
73 | return x
74 |
75 | class KinematicScratchModel(torch.nn.Module):
76 | def __init__(self, in_channels=4, out_channels=3, hidden_features=128, hidden_hidden_features=64):
77 | super(KinematicScratchModel, self).__init__()
78 |
79 | # original self-model's kinematic branch
80 | self.layer1 = SirenLayer(in_channels, hidden_features, is_first=True)
81 | self.layer2 = SirenLayer(hidden_features, hidden_features)
82 | self.layer3 = SirenLayer(hidden_features, hidden_features)
83 | self.layer4 = SirenLayer(hidden_features, hidden_features)
84 | # newly added branches for X_link tasks
85 | self.layer5 = SirenLayer(hidden_features, hidden_hidden_features)
86 | self.layer6 = SirenLayer(hidden_hidden_features, out_channels, is_last=True)
87 |
88 | def forward(self, x):
89 | x = self.layer1(x)
90 | x = self.layer2(x)
91 | x = self.layer3(x)
92 | x = self.layer4(x)
93 | x = self.layer5(x)
94 | x = self.layer6(x)
95 | return x
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | absl-py==0.12.0
2 | addict==2.4.0
3 | aiohttp==3.7.4.post0
4 | anyio==3.2.1
5 | appdirs==1.4.4
6 | argon2-cffi==20.1.0
7 | async-generator==1.10
8 | async-timeout==3.0.1
9 | attrs==20.3.0
10 | Babel==2.9.1
11 | backcall==0.2.0
12 | bleach==3.3.0
13 | cachetools==4.2.1
14 | certifi==2020.12.5
15 | cffi==1.14.5
16 | chardet==4.0.0
17 | cloudpickle==1.6.0
18 | contextvars==2.4
19 | cycler==0.10.0
20 | Cython==0.29.23
21 | dataclasses==0.8
22 | decorator==4.4.2
23 | defusedxml==0.7.1
24 | deprecation==2.1.0
25 | entrypoints==0.3
26 | freetype-py==2.2.0
27 | fsspec==2021.4.0
28 | future==0.18.2
29 | google-auth==1.29.0
30 | google-auth-oauthlib==0.4.4
31 | grpcio==1.37.0
32 | idna==2.10
33 | idna-ssl==1.1.0
34 | imageio==2.9.0
35 | immutables==0.15
36 | importlib-metadata==4.0.0
37 | ipykernel==5.5.5
38 | ipython==7.16.1
39 | ipython-genutils==0.2.0
40 | ipywidgets==7.6.3
41 | jedi==0.17.2
42 | Jinja2==3.0.1
43 | joblib==1.0.1
44 | json5==0.9.6
45 | jsonschema==3.2.0
46 | jupyter-client==6.1.12
47 | jupyter-core==4.7.1
48 | jupyter-packaging==0.10.2
49 | jupyter-server==1.8.0
50 | jupyterlab==3.0.16
51 | jupyterlab-pygments==0.1.2
52 | jupyterlab-server==2.6.0
53 | jupyterlab-widgets==1.0.0
54 | kiwisolver==1.3.1
55 | llvmlite==0.36.0
56 | Mako==1.1.4
57 | Markdown==3.3.4
58 | MarkupSafe==2.0.1
59 | matplotlib==3.3.4
60 | mesh-to-sdf==0.0.14
61 | meshio==4.4.6
62 | mistune==0.8.4
63 | multidict==5.1.0
64 | munch==2.5.0
65 | nbclassic==0.3.1
66 | nbclient==0.5.3
67 | nbconvert==6.0.7
68 | nbformat==5.1.3
69 | nest-asyncio==1.5.1
70 | networkx==2.5.1
71 | notebook==6.4.0
72 | numba==0.53.1
73 | numpy==1.19.5
74 | oauthlib==3.1.0
75 | opencv-python==4.5.1.48
76 | packaging==20.9
77 | pandas==1.1.5
78 | pandocfilters==1.4.3
79 | parso==0.7.1
80 | pexpect==4.8.0
81 | pickleshare==0.7.5
82 | Pillow==8.2.0
83 | plyfile==0.7.3
84 | prometheus-client==0.11.0
85 | prompt-toolkit==3.0.18
86 | protobuf==3.15.8
87 | ptyprocess==0.7.0
88 | pyasn1==0.4.8
89 | pyasn1-modules==0.2.8
90 | pybullet==3.1.2
91 | pycparser==2.20
92 | pycuda==2021.1
93 | pyglet==1.5.0
94 | Pygments==2.8.1
95 | pyparsing==2.4.7
96 | pyrender==0.1.45
97 | pyrsistent==0.17.3
98 | PySimpleGUI==4.45.0
99 | python-dateutil==2.8.1
100 | pytools==2021.2.3
101 | pytorch-lightning==1.2.8
102 | pytz==2021.1
103 | pyvista==0.30.1
104 | PyWavelets==1.1.1
105 | pyzmq==22.1.0
106 | requests==2.25.1
107 | requests-oauthlib==1.3.0
108 | rsa==4.7.2
109 | scikit-image==0.17.2
110 | scikit-learn==0.24.2
111 | scipy==1.5.4
112 | scooby==0.5.7
113 | Send2Trash==1.7.1
114 | six==1.15.0
115 | sklearn==0.0
116 | sniffio==1.2.0
117 | tensorboard==2.4.1
118 | tensorboard-plugin-wit==1.8.0
119 | terminado==0.10.1
120 | testpath==0.5.0
121 | threadpoolctl==2.1.0
122 | tifffile==2020.9.3
123 | tomlkit==0.7.2
124 | --find-links https://download.pytorch.org/whl/torch_stable.html
125 | torch==1.7.0
126 | torchfile==0.1.0
127 | torchmetrics==0.3.2
128 | torchvision==0.8.1+cu92
129 | tornado==6.1
130 | tqdm==4.60.0
131 | traitlets==4.3.3
132 | transforms3d==0.3.1
133 | trimesh==3.9.20
134 | typing-extensions==3.7.4.3
135 | urllib3==1.26.4
136 | vtk==9.0.1
137 | wcwidth==0.2.5
138 | webencodings==0.5.1
139 | websocket-client==1.1.0
140 | Werkzeug==1.0.1
141 | widgetsnbextension==3.5.1
142 | yarl==1.6.3
143 | zipp==3.4.1
144 |
--------------------------------------------------------------------------------
/scripts/reconstruction.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | screen -S train -dm bash -c "CUDA_VISIBLE_DEVICES=0 python ../main.py ../configs/single_reconstruct/config1.yaml; \
4 | exec sh";
--------------------------------------------------------------------------------
/scripts/reconstruction_eval.sh:
--------------------------------------------------------------------------------
1 |
2 | #!/bin/bash
3 |
4 | screen -S train -dm bash -c "CUDA_VISIBLE_DEVICES=1 python ../eval.py ../configs/single_reconstruct/config1.yaml ./logs_reconstruction_1/lightning_logs/version_0/checkpoints eval-single; \
5 | exec sh";
--------------------------------------------------------------------------------
/scripts/state_condition.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | screen -S train -dm bash -c "CUDA_VISIBLE_DEVICES=1 python ../main.py ../configs/state_condition/config1.yaml NA NA; \
4 | exec sh";
--------------------------------------------------------------------------------
/scripts/state_condition_eval.sh:
--------------------------------------------------------------------------------
1 |
2 | #!/bin/bash
3 |
4 | screen -S train -dm bash -c "CUDA_VISIBLE_DEVICES=3 python ../eval.py ../configs/state_condition/config1.yaml ./logs_state-condition_1/lightning_logs/version_0/checkpoints eval-state-condition; \
5 | exec sh";
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | try:
2 | from setuptools import setup
3 | except ImportError:
4 | from distutils.core import setup
5 | from distutils.extension import Extension
6 | from Cython.Build import cythonize
7 | from setuptools.command.build_ext import build_ext
8 | import numpy
9 |
10 | # Get the numpy include directory.
11 | numpy_include_dir = numpy.get_include()
12 |
13 | # Extensions
14 | # pykdtree (kd tree)
15 | pykdtree = Extension(
16 | 'utils.libkdtree.pykdtree.kdtree',
17 | sources=[
18 | 'utils/libkdtree/pykdtree/kdtree.c',
19 | 'utils/libkdtree/pykdtree/_kdtree_core.c'
20 | ],
21 | language='c',
22 | extra_compile_args=['-std=c99', '-O3', '-fopenmp'],
23 | extra_link_args=['-lgomp'],
24 | include_dirs=[numpy_include_dir]
25 | )
26 |
27 |
28 | # triangle hash (efficient mesh intersection)
29 | triangle_hash_module = Extension(
30 | 'utils.libmesh.triangle_hash',
31 | sources=[
32 | 'utils/libmesh/triangle_hash.pyx'
33 | ],
34 | libraries=['m'], # Unix-like specific
35 | include_dirs=[numpy_include_dir]
36 | )
37 |
38 |
39 | # Gather all extension modules
40 | ext_modules = [
41 | pykdtree,
42 | triangle_hash_module,
43 | ]
44 |
45 | setup(
46 | ext_modules=cythonize(ext_modules),
47 | include_dirs=[numpy.get_include()],
48 | cmdclass={
49 | 'build_ext': build_ext
50 | }
51 | )
--------------------------------------------------------------------------------
/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/utils/__init__.py
--------------------------------------------------------------------------------
/utils/common.py:
--------------------------------------------------------------------------------
1 |
2 | import os
3 | import glob
4 | import time
5 | import json
6 | import torch
7 | import random
8 | import shutil
9 | import logging
10 | import plyfile
11 | import trimesh
12 | import numpy as np
13 | import pyvista as pv
14 | from tqdm import tqdm
15 | import skimage.measure
16 | from torch.autograd import grad
17 | from mesh_to_sdf import sample_sdf_near_surface, get_surface_point_cloud, scale_to_unit_sphere
18 |
19 | def set_visible(client, visual_data, visible=True):
20 | if visual_data is None:
21 | return
22 | for body_id, link_colors in visual_data.items():
23 | for link_ind, link_color in link_colors.items():
24 | client.changeVisualShape(
25 | body_id, link_ind,
26 | rgbaColor=link_color if visible else (0, 0, 0, 0)
27 | )
28 |
29 | def get_body_colors(client, body_id):
30 | link_colors = dict()
31 | for visual_info in client.getVisualShapeData(body_id):
32 | link_colors[visual_info[1]] = visual_info[7]
33 | return link_colors
34 |
35 | def mkdir(folder):
36 | if os.path.exists(folder):
37 | shutil.rmtree(folder)
38 | os.makedirs(folder)
39 |
40 | def gradient(y, x, grad_outputs=None):
41 | """reference: https://github.com/vsitzmann/siren"""
42 | if grad_outputs is None:
43 | grad_outputs = torch.ones_like(y)
44 | grad = torch.autograd.grad(y, [x], grad_outputs=grad_outputs, create_graph=True)[0]
45 | return grad
46 |
47 | def convert_sdf_samples_to_ply(
48 | pytorch_3d_sdf_tensor,
49 | voxel_grid_origin,
50 | voxel_size,
51 | ply_filename_out,
52 | offset=None,
53 | scale=None,
54 | ):
55 | """
56 | Convert sdf samples to .ply
57 | :param pytorch_3d_sdf_tensor: a torch.FloatTensor of shape (n,n,n)
58 | :voxel_grid_origin: a list of three floats: the bottom, left, down origin of the voxel grid
59 | :voxel_size: float, the size of the voxels
60 | :ply_filename_out: string, path of the filename to save to
61 | This function adapted from: https://github.com/RobotLocomotion/spartan
62 | """
63 |
64 | start_time = time.time()
65 |
66 | numpy_3d_sdf_tensor = pytorch_3d_sdf_tensor.numpy()
67 |
68 | verts, faces, normals, values = np.zeros((0, 3)), np.zeros((0, 3)), np.zeros((0, 3)), np.zeros(0)
69 | try:
70 | verts, faces, normals, values = skimage.measure.marching_cubes_lewiner(
71 | numpy_3d_sdf_tensor, level=0.0, spacing=[voxel_size] * 3
72 | )
73 | except:
74 | pass
75 |
76 | # transform from voxel coordinates to camera coordinates
77 | # note x and y are flipped in the output of marching_cubes
78 | mesh_points = np.zeros_like(verts)
79 | mesh_points[:, 0] = voxel_grid_origin[0] + verts[:, 0]
80 | mesh_points[:, 1] = voxel_grid_origin[1] + verts[:, 1]
81 | mesh_points[:, 2] = voxel_grid_origin[2] + verts[:, 2]
82 |
83 | # apply additional offset and scale
84 | if scale is not None:
85 | mesh_points = mesh_points / scale
86 | if offset is not None:
87 | mesh_points = mesh_points - offset
88 |
89 | # try writing to the ply file
90 |
91 | num_verts = verts.shape[0]
92 | num_faces = faces.shape[0]
93 |
94 | verts_tuple = np.zeros((num_verts,), dtype=[("x", "f4"), ("y", "f4"), ("z", "f4")])
95 |
96 | for i in range(0, num_verts):
97 | verts_tuple[i] = tuple(mesh_points[i, :])
98 |
99 | faces_building = []
100 | for i in range(0, num_faces):
101 | faces_building.append(((faces[i, :].tolist(),)))
102 | faces_tuple = np.array(faces_building, dtype=[("vertex_indices", "i4", (3,))])
103 |
104 | el_verts = plyfile.PlyElement.describe(verts_tuple, "vertex")
105 | el_faces = plyfile.PlyElement.describe(faces_tuple, "face")
106 |
107 | ply_data = plyfile.PlyData([el_verts, el_faces])
108 | logging.debug("saving mesh to %s" % (ply_filename_out))
109 | ply_data.write(ply_filename_out)
110 |
111 | logging.debug(
112 | "converting to ply format and writing to file took {} s".format(
113 | time.time() - start_time
114 | )
115 | )
116 |
117 |
118 |
119 |
120 | def create_mesh(
121 | decoder, filename, N=1600, max_batch=64 ** 3, offset=None, scale=None
122 | ):
123 | start = time.time()
124 | ply_filename = filename
125 |
126 | decoder.eval()
127 |
128 | # NOTE: the voxel_origin is actually the (bottom, left, down) corner, not the middle
129 | voxel_origin = [-1, -1, -1]
130 | voxel_size = 2.0 / (N - 1)
131 |
132 | overall_index = torch.arange(0, N ** 3, 1, out=torch.LongTensor())
133 | samples = torch.zeros(N ** 3, 4)
134 |
135 | # transform first 3 columns
136 | # to be the x, y, z index
137 | samples[:, 2] = overall_index % N
138 | samples[:, 1] = (overall_index.long() / N) % N
139 | samples[:, 0] = ((overall_index.long() / N) / N) % N
140 |
141 | # transform first 3 columns
142 | # to be the x, y, z coordinate
143 | samples[:, 0] = (samples[:, 0] * voxel_size) + voxel_origin[2]
144 | samples[:, 1] = (samples[:, 1] * voxel_size) + voxel_origin[1]
145 | samples[:, 2] = (samples[:, 2] * voxel_size) + voxel_origin[0]
146 |
147 | num_samples = N ** 3
148 |
149 | samples.requires_grad = False
150 |
151 | head = 0
152 |
153 | while head < num_samples:
154 | print(head)
155 | sample_subset = samples[head : min(head + max_batch, num_samples), 0:3].cuda()
156 |
157 | samples[head : min(head + max_batch, num_samples), 3] = (
158 | decoder(sample_subset)
159 | .squeeze()#.squeeze(1)
160 | .detach()
161 | .cpu()
162 | )
163 | head += max_batch
164 |
165 | sdf_values = samples[:, 3]
166 | sdf_values = sdf_values.reshape(N, N, N)
167 |
168 | end = time.time()
169 | print("sampling takes: %f" % (end - start))
170 |
171 | convert_sdf_samples_to_ply(
172 | sdf_values.data.cpu(),
173 | voxel_origin,
174 | voxel_size,
175 | ply_filename + ".ply",
176 | offset,
177 | scale,
178 | )
179 |
180 | def create_random_seed_split(seed=1, ratio=0.9):
181 | seq_dict = {}
182 | total_num = 10000
183 |
184 | ids = list(range(total_num))
185 | random.shuffle(ids)
186 |
187 | # test
188 | start = int(total_num * ratio)
189 | seq_dict['test'] = ids[start:]
190 |
191 | # train
192 | seq_dict['train'] = ids[:int(total_num*ratio)]
193 |
194 | with open(os.path.join(f'../assets/datainfo/multiple_models_data_split_dict_{seed}.json'), 'w') as file:
195 | json.dump(seq_dict, file, indent=4)
196 |
197 | def convert_ply_to_xyzn(folder='./saved_meshes'):
198 | import open3d as o3d
199 | all_ply_files = glob.glob(os.path.join(folder, 'mesh_*.ply'))
200 | for p_file in tqdm(all_ply_files):
201 | mesh = o3d.io.read_triangle_mesh(p_file)
202 | pcd = o3d.geometry.PointCloud()
203 | pcd.points = mesh.vertices
204 | pcd.colors = mesh.vertex_colors
205 | pcd.normals = mesh.vertex_normals
206 | o3d.io.write_point_cloud(p_file.replace('ply', 'xyzn'), pcd)
207 |
208 | def convert_ply_to_sdf_old(folder='./saved_meshes'):
209 | os.environ['PYOPENGL_PLATFORM'] = 'egl'
210 | all_ply_files = glob.glob(os.path.join(folder, 'mesh_*.ply'))
211 | for p_file in tqdm(all_ply_files):
212 | mesh = trimesh.load(p_file)
213 | mesh = scale_to_unit_sphere(mesh)
214 | surface_point_cloud = get_surface_point_cloud(mesh)
215 | points, sdf = surface_point_cloud.sample_sdf_near_surface(number_of_points=250000)
216 | np.savez(p_file.replace('ply', 'npz'), points=points, sdf=sdf)
217 |
218 | def convert_ply_to_sdf(p_file):
219 | mesh = trimesh.load(p_file)
220 | points, sdf = sample_sdf_near_surface(mesh, 250000)
221 | translation, scale = compute_unit_sphere_transform(mesh)
222 | points = (points / scale) - translation
223 | sdf /= scale
224 | np.savez(p_file.replace('ply', 'npz'), points=points, sdf=sdf)
225 |
226 | def convert_ply_to_sdf_parallel(folder='./saved_meshes'):
227 | os.environ['PYOPENGL_PLATFORM'] = 'egl'
228 | from multiprocessing import Pool
229 | pool = Pool(8)
230 |
231 | all_ply_files = glob.glob(os.path.join(folder, 'mesh_*.ply'))
232 | progress = tqdm(total=len(all_ply_files))
233 | def on_complete(*_):
234 | progress.update()
235 |
236 | for p_file in all_ply_files:
237 | pool.apply_async(convert_ply_to_sdf, args=(p_file,), callback=on_complete)
238 | pool.close()
239 | pool.join()
240 |
241 | def compute_unit_sphere_transform(mesh):
242 | """
243 | returns translation and scale, which is applied to meshes before computing their SDF cloud
244 | """
245 | # the transformation applied by mesh_to_sdf.scale_to_unit_sphere(mesh)
246 | translation = -mesh.bounding_box.centroid
247 | scale = 1 / np.max(np.linalg.norm(mesh.vertices + translation, axis=1))
248 | return translation, scale
249 |
250 | # cmake -GNinja -DVTK_BUILD_TESTING=OFF -DVTK_WHEEL_BUILD=ON -DVTK_PYTHON_VERSION=3 -DVTK_WRAP_PYTHON=ON -DVTK_OPENGL_HAS_EGL=True -DVTK_USE_X=False -DPython3_EXECUTABLE=$PYBIN ../
251 | def render_screenshot_for_multiple_conditional_angles(folder, save_to_folder):
252 | mkdir(save_to_folder)
253 | pv.set_plot_theme("document")
254 |
255 | filenames = os.listdir(folder)
256 | angle_lst = []
257 | for p_filename in filenames:
258 | angle_lst.append(float(p_filename.replace('.ply', '')))
259 | angle_lst = sorted(angle_lst)
260 |
261 | for p_angle in tqdm(angle_lst):
262 | filename = str(p_angle) + '.ply'
263 | filepath = os.path.join(folder, filename)
264 | mesh = pv.read(filepath)
265 |
266 | plotter = pv.Plotter(off_screen=True)
267 | plotter.add_mesh(mesh, color='#454545')
268 | plotter.add_text(str(p_angle), name='angle')
269 | plotter.show(screenshot=os.path.join(save_to_folder, f'{p_angle}.png'))
270 | plotter.close()
271 |
272 | def render_saved_screenshot_to_video(folder='angles_rendered_angle_0_pi_pi'):
273 | save_to_folder = folder+'_renamed'
274 | mkdir(save_to_folder)
275 |
276 | filenames = os.listdir(folder)
277 | angle_lst = []
278 | for p_filename in filenames:
279 | angle_lst.append(float(p_filename.replace('.png', '')))
280 | angle_lst = sorted(angle_lst)
281 |
282 | for idx in range(len(angle_lst)):
283 | os.rename(os.path.join(folder, f'{angle_lst[idx]}.png'), os.path.join(save_to_folder, f'{idx}.png'))
284 |
285 | os.system(f'ffmpeg -r 1 -i {save_to_folder}/%01d.png -vcodec mpeg4 -y {folder}.mp4')
--------------------------------------------------------------------------------
/utils/libkdtree/.gitignore:
--------------------------------------------------------------------------------
1 | build
2 |
--------------------------------------------------------------------------------
/utils/libkdtree/LICENSE.txt:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007, 2015 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise based on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | based on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work based on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work based
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work based on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
166 |
--------------------------------------------------------------------------------
/utils/libkdtree/MANIFEST.in:
--------------------------------------------------------------------------------
1 | exclude pykdtree/render_template.py
2 | include LICENSE.txt
3 |
--------------------------------------------------------------------------------
/utils/libkdtree/README:
--------------------------------------------------------------------------------
1 | README.rst
--------------------------------------------------------------------------------
/utils/libkdtree/README.rst:
--------------------------------------------------------------------------------
1 | .. image:: https://travis-ci.org/storpipfugl/pykdtree.svg?branch=master
2 | :target: https://travis-ci.org/storpipfugl/pykdtree
3 | .. image:: https://ci.appveyor.com/api/projects/status/ubo92368ktt2d25g/branch/master
4 | :target: https://ci.appveyor.com/project/storpipfugl/pykdtree
5 |
6 | ========
7 | pykdtree
8 | ========
9 |
10 | Objective
11 | ---------
12 | pykdtree is a kd-tree implementation for fast nearest neighbour search in Python.
13 | The aim is to be the fastest implementation around for common use cases (low dimensions and low number of neighbours) for both tree construction and queries.
14 |
15 | The implementation is based on scipy.spatial.cKDTree and libANN by combining the best features from both and focus on implementation efficiency.
16 |
17 | The interface is similar to that of scipy.spatial.cKDTree except only Euclidean distance measure is supported.
18 |
19 | Queries are optionally multithreaded using OpenMP.
20 |
21 | Installation
22 | ------------
23 | Default build of pykdtree with OpenMP enabled queries using libgomp
24 |
25 | .. code-block:: bash
26 |
27 | $ cd
28 | $ python setup.py install
29 |
30 | If it fails with undefined compiler flags or you want to use another OpenMP implementation please modify setup.py at the indicated point to match your system.
31 |
32 | Building without OpenMP support is controlled by the USE_OMP environment variable
33 |
34 | .. code-block:: bash
35 |
36 | $ cd
37 | $ export USE_OMP=0
38 | $ python setup.py install
39 |
40 | Note evironment variables are by default not exported when using sudo so in this case do
41 |
42 | .. code-block:: bash
43 |
44 | $ USE_OMP=0 sudo -E python setup.py install
45 |
46 | Usage
47 | -----
48 | The usage of pykdtree is similar to scipy.spatial.cKDTree so for now refer to its documentation
49 |
50 | >>> from pykdtree.kdtree import KDTree
51 | >>> kd_tree = KDTree(data_pts)
52 | >>> dist, idx = kd_tree.query(query_pts, k=8)
53 |
54 | The number of threads to be used in OpenMP enabled queries can be controlled with the standard OpenMP environment variable OMP_NUM_THREADS.
55 |
56 | The **leafsize** argument (number of data points per leaf) for the tree creation can be used to control the memory overhead of the kd-tree. pykdtree uses a default **leafsize=16**.
57 | Increasing **leafsize** will reduce the memory overhead and construction time but increase query time.
58 |
59 | pykdtree accepts data in double precision (numpy.float64) or single precision (numpy.float32) floating point. If data of another type is used an internal copy in double precision is made resulting in a memory overhead. If the kd-tree is constructed on single precision data the query points must be single precision as well.
60 |
61 | Benchmarks
62 | ----------
63 | Comparison with scipy.spatial.cKDTree and libANN. This benchmark is on geospatial 3D data with 10053632 data points and 4276224 query points. The results are indexed relative to the construction time of scipy.spatial.cKDTree. A leafsize of 10 (scipy.spatial.cKDTree default) is used.
64 |
65 | Note: libANN is *not* thread safe. In this benchmark libANN is compiled with "-O3 -funroll-loops -ffast-math -fprefetch-loop-arrays" in order to achieve optimum performance.
66 |
67 | ================== ===================== ====== ======== ==================
68 | Operation scipy.spatial.cKDTree libANN pykdtree pykdtree 4 threads
69 | ------------------ --------------------- ------ -------- ------------------
70 |
71 | Construction 100 304 96 96
72 |
73 | query 1 neighbour 1267 294 223 70
74 |
75 | Total 1 neighbour 1367 598 319 166
76 |
77 | query 8 neighbours 2193 625 449 143
78 |
79 | Total 8 neighbours 2293 929 545 293
80 | ================== ===================== ====== ======== ==================
81 |
82 | Looking at the combined construction and query this gives the following performance improvement relative to scipy.spatial.cKDTree
83 |
84 | ========== ====== ======== ==================
85 | Neighbours libANN pykdtree pykdtree 4 threads
86 | ---------- ------ -------- ------------------
87 | 1 129% 329% 723%
88 |
89 | 8 147% 320% 682%
90 | ========== ====== ======== ==================
91 |
92 | Note: mileage will vary with the dataset at hand and computer architecture.
93 |
94 | Test
95 | ----
96 | Run the unit tests using nosetest
97 |
98 | .. code-block:: bash
99 |
100 | $ cd
101 | $ python setup.py nosetests
102 |
103 | Installing on AppVeyor
104 | ----------------------
105 |
106 | Pykdtree requires the "stdint.h" header file which is not available on certain
107 | versions of Windows or certain Windows compilers including those on the
108 | continuous integration platform AppVeyor. To get around this the header file(s)
109 | can be downloaded and placed in the correct "include" directory. This can
110 | be done by adding the `anaconda/missing-headers.ps1` script to your repository
111 | and running it the install step of `appveyor.yml`:
112 |
113 | # install missing headers that aren't included with MSVC 2008
114 | # https://github.com/omnia-md/conda-recipes/pull/524
115 | - "powershell ./appveyor/missing-headers.ps1"
116 |
117 | In addition to this, AppVeyor does not support OpenMP so this feature must be
118 | turned off by adding the following to `appveyor.yml` in the
119 | `environment` section:
120 |
121 | environment:
122 | global:
123 | # Don't build with openmp because it isn't supported in appveyor's compilers
124 | USE_OMP: "0"
125 |
126 | Changelog
127 | ---------
128 | v1.3.1 : Fix masking in the "query" method introduced in 1.3.0
129 |
130 | v1.3.0 : Keyword argument "mask" added to "query" method. OpenMP compilation now works for MS Visual Studio compiler
131 |
132 | v1.2.2 : Build process fixes
133 |
134 | v1.2.1 : Fixed OpenMP thread safety issue introduced in v1.2.0
135 |
136 | v1.2.0 : 64 and 32 bit MSVC Windows support added
137 |
138 | v1.1.1 : Same as v1.1 release due to incorrect pypi release
139 |
140 | v1.1 : Build process improvements. Add data attribute to kdtree class for scipy interface compatibility
141 |
142 | v1.0 : Switched license from GPLv3 to LGPLv3
143 |
144 | v0.3 : Avoid zipping of installed egg
145 |
146 | v0.2 : Reduced memory footprint. Can now handle single precision data internally avoiding copy conversion to double precision. Default leafsize changed from 10 to 16 as this reduces the memory footprint and makes it a cache line multiplum (negligible if any query performance observed in benchmarks). Reduced memory allocation for leaf nodes. Applied patch for building on OS X.
147 |
148 | v0.1 : Initial version.
149 |
--------------------------------------------------------------------------------
/utils/libkdtree/__init__.py:
--------------------------------------------------------------------------------
1 | from .pykdtree.kdtree import KDTree
2 |
3 |
4 | __all__ = [
5 | KDTree
6 | ]
7 |
--------------------------------------------------------------------------------
/utils/libkdtree/pykdtree/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/utils/libkdtree/pykdtree/__init__.py
--------------------------------------------------------------------------------
/utils/libkdtree/pykdtree/kdtree.cpython-36m-x86_64-linux-gnu.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/utils/libkdtree/pykdtree/kdtree.cpython-36m-x86_64-linux-gnu.so
--------------------------------------------------------------------------------
/utils/libkdtree/pykdtree/kdtree.pyx:
--------------------------------------------------------------------------------
1 | #pykdtree, Fast kd-tree implementation with OpenMP-enabled queries
2 | #
3 | #Copyright (C) 2013 - present Esben S. Nielsen
4 | #
5 | # This program is free software: you can redistribute it and/or modify it under
6 | # the terms of the GNU Lesser General Public License as published by the Free
7 | # Software Foundation, either version 3 of the License, or
8 | #(at your option) any later version.
9 | #
10 | # This program is distributed in the hope that it will be useful, but WITHOUT
11 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
12 | # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
13 | # details.
14 | #
15 | # You should have received a copy of the GNU Lesser General Public License along
16 | # with this program. If not, see .
17 |
18 | import numpy as np
19 | cimport numpy as np
20 | from libc.stdint cimport uint32_t, int8_t, uint8_t
21 | cimport cython
22 |
23 |
24 | # Node structure
25 | cdef struct node_float:
26 | float cut_val
27 | int8_t cut_dim
28 | uint32_t start_idx
29 | uint32_t n
30 | float cut_bounds_lv
31 | float cut_bounds_hv
32 | node_float *left_child
33 | node_float *right_child
34 |
35 | cdef struct tree_float:
36 | float *bbox
37 | int8_t no_dims
38 | uint32_t *pidx
39 | node_float *root
40 |
41 | cdef struct node_double:
42 | double cut_val
43 | int8_t cut_dim
44 | uint32_t start_idx
45 | uint32_t n
46 | double cut_bounds_lv
47 | double cut_bounds_hv
48 | node_double *left_child
49 | node_double *right_child
50 |
51 | cdef struct tree_double:
52 | double *bbox
53 | int8_t no_dims
54 | uint32_t *pidx
55 | node_double *root
56 |
57 | cdef extern tree_float* construct_tree_float(float *pa, int8_t no_dims, uint32_t n, uint32_t bsp) nogil
58 | cdef extern void search_tree_float(tree_float *kdtree, float *pa, float *point_coords, uint32_t num_points, uint32_t k, float distance_upper_bound, float eps_fac, uint8_t *mask, uint32_t *closest_idxs, float *closest_dists) nogil
59 | cdef extern void delete_tree_float(tree_float *kdtree)
60 |
61 | cdef extern tree_double* construct_tree_double(double *pa, int8_t no_dims, uint32_t n, uint32_t bsp) nogil
62 | cdef extern void search_tree_double(tree_double *kdtree, double *pa, double *point_coords, uint32_t num_points, uint32_t k, double distance_upper_bound, double eps_fac, uint8_t *mask, uint32_t *closest_idxs, double *closest_dists) nogil
63 | cdef extern void delete_tree_double(tree_double *kdtree)
64 |
65 | cdef class KDTree:
66 | """kd-tree for fast nearest-neighbour lookup.
67 | The interface is made to resemble the scipy.spatial kd-tree except
68 | only Euclidean distance measure is supported.
69 |
70 | :Parameters:
71 | data_pts : numpy array
72 | Data points with shape (n , dims)
73 | leafsize : int, optional
74 | Maximum number of data points in tree leaf
75 | """
76 |
77 | cdef tree_float *_kdtree_float
78 | cdef tree_double *_kdtree_double
79 | cdef readonly np.ndarray data_pts
80 | cdef readonly np.ndarray data
81 | cdef float *_data_pts_data_float
82 | cdef double *_data_pts_data_double
83 | cdef readonly uint32_t n
84 | cdef readonly int8_t ndim
85 | cdef readonly uint32_t leafsize
86 |
87 | def __cinit__(KDTree self):
88 | self._kdtree_float = NULL
89 | self._kdtree_double = NULL
90 |
91 | def __init__(KDTree self, np.ndarray data_pts not None, int leafsize=16):
92 |
93 | # Check arguments
94 | if leafsize < 1:
95 | raise ValueError('leafsize must be greater than zero')
96 |
97 | # Get data content
98 | cdef np.ndarray[float, ndim=1] data_array_float
99 | cdef np.ndarray[double, ndim=1] data_array_double
100 |
101 | if data_pts.dtype == np.float32:
102 | data_array_float = np.ascontiguousarray(data_pts.ravel(), dtype=np.float32)
103 | self._data_pts_data_float = data_array_float.data
104 | self.data_pts = data_array_float
105 | else:
106 | data_array_double = np.ascontiguousarray(data_pts.ravel(), dtype=np.float64)
107 | self._data_pts_data_double = data_array_double.data
108 | self.data_pts = data_array_double
109 |
110 | # scipy interface compatibility
111 | self.data = self.data_pts
112 |
113 | # Get tree info
114 | self.n = data_pts.shape[0]
115 | self.leafsize = leafsize
116 | if data_pts.ndim == 1:
117 | self.ndim = 1
118 | else:
119 | self.ndim = data_pts.shape[1]
120 |
121 | # Release GIL and construct tree
122 | if data_pts.dtype == np.float32:
123 | with nogil:
124 | self._kdtree_float = construct_tree_float(self._data_pts_data_float, self.ndim,
125 | self.n, self.leafsize)
126 | else:
127 | with nogil:
128 | self._kdtree_double = construct_tree_double(self._data_pts_data_double, self.ndim,
129 | self.n, self.leafsize)
130 |
131 |
132 | def query(KDTree self, np.ndarray query_pts not None, k=1, eps=0,
133 | distance_upper_bound=None, sqr_dists=False, mask=None):
134 | """Query the kd-tree for nearest neighbors
135 |
136 | :Parameters:
137 | query_pts : numpy array
138 | Query points with shape (m, dims)
139 | k : int
140 | The number of nearest neighbours to return
141 | eps : non-negative float
142 | Return approximate nearest neighbours; the k-th returned value
143 | is guaranteed to be no further than (1 + eps) times the distance
144 | to the real k-th nearest neighbour
145 | distance_upper_bound : non-negative float
146 | Return only neighbors within this distance.
147 | This is used to prune tree searches.
148 | sqr_dists : bool, optional
149 | Internally pykdtree works with squared distances.
150 | Determines if the squared or Euclidean distances are returned.
151 | mask : numpy array, optional
152 | Array of booleans where neighbors are considered invalid and
153 | should not be returned. A mask value of True represents an
154 | invalid pixel. Mask should have shape (n,) to match data points.
155 | By default all points are considered valid.
156 |
157 | """
158 |
159 | # Check arguments
160 | if k < 1:
161 | raise ValueError('Number of neighbours must be greater than zero')
162 | elif eps < 0:
163 | raise ValueError('eps must be non-negative')
164 | elif distance_upper_bound is not None:
165 | if distance_upper_bound < 0:
166 | raise ValueError('distance_upper_bound must be non negative')
167 |
168 | # Check dimensions
169 | if query_pts.ndim == 1:
170 | q_ndim = 1
171 | else:
172 | q_ndim = query_pts.shape[1]
173 |
174 | if self.ndim != q_ndim:
175 | raise ValueError('Data and query points must have same dimensions')
176 |
177 | if self.data_pts.dtype == np.float32 and query_pts.dtype != np.float32:
178 | raise TypeError('Type mismatch. query points must be of type float32 when data points are of type float32')
179 |
180 | # Get query info
181 | cdef uint32_t num_qpoints = query_pts.shape[0]
182 | cdef uint32_t num_n = k
183 | cdef np.ndarray[uint32_t, ndim=1] closest_idxs = np.empty(num_qpoints * k, dtype=np.uint32)
184 | cdef np.ndarray[float, ndim=1] closest_dists_float
185 | cdef np.ndarray[double, ndim=1] closest_dists_double
186 |
187 |
188 | # Set up return arrays
189 | cdef uint32_t *closest_idxs_data = closest_idxs.data
190 | cdef float *closest_dists_data_float
191 | cdef double *closest_dists_data_double
192 |
193 | # Get query points data
194 | cdef np.ndarray[float, ndim=1] query_array_float
195 | cdef np.ndarray[double, ndim=1] query_array_double
196 | cdef float *query_array_data_float
197 | cdef double *query_array_data_double
198 | cdef np.ndarray[np.uint8_t, ndim=1] query_mask
199 | cdef np.uint8_t *query_mask_data
200 |
201 | if mask is not None and mask.size != self.n:
202 | raise ValueError('Mask must have the same size as data points')
203 | elif mask is not None:
204 | query_mask = np.ascontiguousarray(mask.ravel(), dtype=np.uint8)
205 | query_mask_data = query_mask.data
206 | else:
207 | query_mask_data = NULL
208 |
209 |
210 | if query_pts.dtype == np.float32 and self.data_pts.dtype == np.float32:
211 | closest_dists_float = np.empty(num_qpoints * k, dtype=np.float32)
212 | closest_dists = closest_dists_float
213 | closest_dists_data_float = closest_dists_float.data
214 | query_array_float = np.ascontiguousarray(query_pts.ravel(), dtype=np.float32)
215 | query_array_data_float = query_array_float.data
216 | else:
217 | closest_dists_double = np.empty(num_qpoints * k, dtype=np.float64)
218 | closest_dists = closest_dists_double
219 | closest_dists_data_double = closest_dists_double.data
220 | query_array_double = np.ascontiguousarray(query_pts.ravel(), dtype=np.float64)
221 | query_array_data_double = query_array_double.data
222 |
223 | # Setup distance_upper_bound
224 | cdef float dub_float
225 | cdef double dub_double
226 | if distance_upper_bound is None:
227 | if self.data_pts.dtype == np.float32:
228 | dub_float = np.finfo(np.float32).max
229 | else:
230 | dub_double = np.finfo(np.float64).max
231 | else:
232 | if self.data_pts.dtype == np.float32:
233 | dub_float = (distance_upper_bound * distance_upper_bound)
234 | else:
235 | dub_double = (distance_upper_bound * distance_upper_bound)
236 |
237 | # Set epsilon
238 | cdef double epsilon_float = eps
239 | cdef double epsilon_double = eps
240 |
241 | # Release GIL and query tree
242 | if self.data_pts.dtype == np.float32:
243 | with nogil:
244 | search_tree_float(self._kdtree_float, self._data_pts_data_float,
245 | query_array_data_float, num_qpoints, num_n, dub_float, epsilon_float,
246 | query_mask_data, closest_idxs_data, closest_dists_data_float)
247 |
248 | else:
249 | with nogil:
250 | search_tree_double(self._kdtree_double, self._data_pts_data_double,
251 | query_array_data_double, num_qpoints, num_n, dub_double, epsilon_double,
252 | query_mask_data, closest_idxs_data, closest_dists_data_double)
253 |
254 | # Shape result
255 | if k > 1:
256 | closest_dists_res = closest_dists.reshape(num_qpoints, k)
257 | closest_idxs_res = closest_idxs.reshape(num_qpoints, k)
258 | else:
259 | closest_dists_res = closest_dists
260 | closest_idxs_res = closest_idxs
261 |
262 | if distance_upper_bound is not None: # Mark out of bounds results
263 | if self.data_pts.dtype == np.float32:
264 | idx_out = (closest_dists_res >= dub_float)
265 | else:
266 | idx_out = (closest_dists_res >= dub_double)
267 |
268 | closest_dists_res[idx_out] = np.Inf
269 | closest_idxs_res[idx_out] = self.n
270 |
271 | if not sqr_dists: # Return actual cartesian distances
272 | closest_dists_res = np.sqrt(closest_dists_res)
273 |
274 | return closest_dists_res, closest_idxs_res
275 |
276 | def __dealloc__(KDTree self):
277 | if self._kdtree_float != NULL:
278 | delete_tree_float(self._kdtree_float)
279 | elif self._kdtree_double != NULL:
280 | delete_tree_double(self._kdtree_double)
281 |
--------------------------------------------------------------------------------
/utils/libkdtree/pykdtree/render_template.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 |
3 | from mako.template import Template
4 |
5 | mytemplate = Template(filename='_kdtree_core.c.mako')
6 | with open('_kdtree_core.c', 'w') as fp:
7 | fp.write(mytemplate.render())
8 |
--------------------------------------------------------------------------------
/utils/libkdtree/setup.cfg:
--------------------------------------------------------------------------------
1 | [bdist_rpm]
2 | requires=numpy
3 | release=1
4 |
5 |
6 |
--------------------------------------------------------------------------------
/utils/libmesh/.gitignore:
--------------------------------------------------------------------------------
1 | triangle_hash.cpp
2 | build
3 |
--------------------------------------------------------------------------------
/utils/libmesh/__init__.py:
--------------------------------------------------------------------------------
1 | from .inside_mesh import (
2 | check_mesh_contains, MeshIntersector, TriangleIntersector2d
3 | )
4 |
5 |
6 | __all__ = [
7 | check_mesh_contains, MeshIntersector, TriangleIntersector2d
8 | ]
9 |
--------------------------------------------------------------------------------
/utils/libmesh/inside_mesh.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from .triangle_hash import TriangleHash as _TriangleHash
3 |
4 |
5 | def check_mesh_contains(mesh, points, hash_resolution=512):
6 | intersector = MeshIntersector(mesh, hash_resolution)
7 | contains = intersector.query(points)
8 | return contains
9 |
10 |
11 | class MeshIntersector:
12 | def __init__(self, mesh, resolution=512):
13 | triangles = mesh.vertices[mesh.faces].astype(np.float64)
14 | n_tri = triangles.shape[0]
15 |
16 | self.resolution = resolution
17 | self.bbox_min = triangles.reshape(3 * n_tri, 3).min(axis=0)
18 | self.bbox_max = triangles.reshape(3 * n_tri, 3).max(axis=0)
19 | # Tranlate and scale it to [0.5, self.resolution - 0.5]^3
20 | self.scale = (resolution - 1) / (self.bbox_max - self.bbox_min)
21 | self.translate = 0.5 - self.scale * self.bbox_min
22 |
23 | self._triangles = triangles = self.rescale(triangles)
24 | # assert(np.allclose(triangles.reshape(-1, 3).min(0), 0.5))
25 | # assert(np.allclose(triangles.reshape(-1, 3).max(0), resolution - 0.5))
26 |
27 | triangles2d = triangles[:, :, :2]
28 | self._tri_intersector2d = TriangleIntersector2d(
29 | triangles2d, resolution)
30 |
31 | def query(self, points):
32 | # Rescale points
33 | points = self.rescale(points)
34 |
35 | # placeholder result with no hits we'll fill in later
36 | contains = np.zeros(len(points), dtype=np.bool)
37 |
38 | # cull points outside of the axis aligned bounding box
39 | # this avoids running ray tests unless points are close
40 | inside_aabb = np.all(
41 | (0 <= points) & (points <= self.resolution), axis=1)
42 | if not inside_aabb.any():
43 | return contains
44 |
45 | # Only consider points inside bounding box
46 | mask = inside_aabb
47 | points = points[mask]
48 |
49 | # Compute intersection depth and check order
50 | points_indices, tri_indices = self._tri_intersector2d.query(points[:, :2])
51 |
52 | triangles_intersect = self._triangles[tri_indices]
53 | points_intersect = points[points_indices]
54 |
55 | depth_intersect, abs_n_2 = self.compute_intersection_depth(
56 | points_intersect, triangles_intersect)
57 |
58 | # Count number of intersections in both directions
59 | smaller_depth = depth_intersect >= points_intersect[:, 2] * abs_n_2
60 | bigger_depth = depth_intersect < points_intersect[:, 2] * abs_n_2
61 | points_indices_0 = points_indices[smaller_depth]
62 | points_indices_1 = points_indices[bigger_depth]
63 |
64 | nintersect0 = np.bincount(points_indices_0, minlength=points.shape[0])
65 | nintersect1 = np.bincount(points_indices_1, minlength=points.shape[0])
66 |
67 | # Check if point contained in mesh
68 | contains1 = (np.mod(nintersect0, 2) == 1)
69 | contains2 = (np.mod(nintersect1, 2) == 1)
70 | if (contains1 != contains2).any():
71 | print('Warning: contains1 != contains2 for some points.')
72 | contains[mask] = (contains1 & contains2)
73 | return contains
74 |
75 | def compute_intersection_depth(self, points, triangles):
76 | t1 = triangles[:, 0, :]
77 | t2 = triangles[:, 1, :]
78 | t3 = triangles[:, 2, :]
79 |
80 | v1 = t3 - t1
81 | v2 = t2 - t1
82 | # v1 = v1 / np.linalg.norm(v1, axis=-1, keepdims=True)
83 | # v2 = v2 / np.linalg.norm(v2, axis=-1, keepdims=True)
84 |
85 | normals = np.cross(v1, v2)
86 | alpha = np.sum(normals[:, :2] * (t1[:, :2] - points[:, :2]), axis=1)
87 |
88 | n_2 = normals[:, 2]
89 | t1_2 = t1[:, 2]
90 | s_n_2 = np.sign(n_2)
91 | abs_n_2 = np.abs(n_2)
92 |
93 | mask = (abs_n_2 != 0)
94 |
95 | depth_intersect = np.full(points.shape[0], np.nan)
96 | depth_intersect[mask] = \
97 | t1_2[mask] * abs_n_2[mask] + alpha[mask] * s_n_2[mask]
98 |
99 | # Test the depth:
100 | # TODO: remove and put into tests
101 | # points_new = np.concatenate([points[:, :2], depth_intersect[:, None]], axis=1)
102 | # alpha = (normals * t1).sum(-1)
103 | # mask = (depth_intersect == depth_intersect)
104 | # assert(np.allclose((points_new[mask] * normals[mask]).sum(-1),
105 | # alpha[mask]))
106 | return depth_intersect, abs_n_2
107 |
108 | def rescale(self, array):
109 | array = self.scale * array + self.translate
110 | return array
111 |
112 |
113 | class TriangleIntersector2d:
114 | def __init__(self, triangles, resolution=128):
115 | self.triangles = triangles
116 | self.tri_hash = _TriangleHash(triangles, resolution)
117 |
118 | def query(self, points):
119 | point_indices, tri_indices = self.tri_hash.query(points)
120 | point_indices = np.array(point_indices, dtype=np.int64)
121 | tri_indices = np.array(tri_indices, dtype=np.int64)
122 | points = points[point_indices]
123 | triangles = self.triangles[tri_indices]
124 | mask = self.check_triangles(points, triangles)
125 | point_indices = point_indices[mask]
126 | tri_indices = tri_indices[mask]
127 | return point_indices, tri_indices
128 |
129 | def check_triangles(self, points, triangles):
130 | contains = np.zeros(points.shape[0], dtype=np.bool)
131 | A = triangles[:, :2] - triangles[:, 2:]
132 | A = A.transpose([0, 2, 1])
133 | y = points - triangles[:, 2]
134 |
135 | detA = A[:, 0, 0] * A[:, 1, 1] - A[:, 0, 1] * A[:, 1, 0]
136 |
137 | mask = (np.abs(detA) != 0.)
138 | A = A[mask]
139 | y = y[mask]
140 | detA = detA[mask]
141 |
142 | s_detA = np.sign(detA)
143 | abs_detA = np.abs(detA)
144 |
145 | u = (A[:, 1, 1] * y[:, 0] - A[:, 0, 1] * y[:, 1]) * s_detA
146 | v = (-A[:, 1, 0] * y[:, 0] + A[:, 0, 0] * y[:, 1]) * s_detA
147 |
148 | sum_uv = u + v
149 | contains[mask] = (
150 | (0 < u) & (u < abs_detA) & (0 < v) & (v < abs_detA)
151 | & (0 < sum_uv) & (sum_uv < abs_detA)
152 | )
153 | return contains
154 |
155 |
--------------------------------------------------------------------------------
/utils/libmesh/triangle_hash.cpython-36m-x86_64-linux-gnu.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BoyuanChen/visual-selfmodeling/67b6df654f12c6ad8cc3778eabe217bbe9ef915c/utils/libmesh/triangle_hash.cpython-36m-x86_64-linux-gnu.so
--------------------------------------------------------------------------------
/utils/libmesh/triangle_hash.pyx:
--------------------------------------------------------------------------------
1 |
2 | # distutils: language=c++
3 | import numpy as np
4 | cimport numpy as np
5 | cimport cython
6 | from libcpp.vector cimport vector
7 | from libc.math cimport floor, ceil
8 |
9 | cdef class TriangleHash:
10 | cdef vector[vector[int]] spatial_hash
11 | cdef int resolution
12 |
13 | def __cinit__(self, double[:, :, :] triangles, int resolution):
14 | self.spatial_hash.resize(resolution * resolution)
15 | self.resolution = resolution
16 | self._build_hash(triangles)
17 |
18 | @cython.boundscheck(False) # Deactivate bounds checking
19 | @cython.wraparound(False) # Deactivate negative indexing.
20 | cdef int _build_hash(self, double[:, :, :] triangles):
21 | assert(triangles.shape[1] == 3)
22 | assert(triangles.shape[2] == 2)
23 |
24 | cdef int n_tri = triangles.shape[0]
25 | cdef int bbox_min[2]
26 | cdef int bbox_max[2]
27 |
28 | cdef int i_tri, j, x, y
29 | cdef int spatial_idx
30 |
31 | for i_tri in range(n_tri):
32 | # Compute bounding box
33 | for j in range(2):
34 | bbox_min[j] = min(
35 | triangles[i_tri, 0, j], triangles[i_tri, 1, j], triangles[i_tri, 2, j]
36 | )
37 | bbox_max[j] = max(
38 | triangles[i_tri, 0, j], triangles[i_tri, 1, j], triangles[i_tri, 2, j]
39 | )
40 | bbox_min[j] = min(max(bbox_min[j], 0), self.resolution - 1)
41 | bbox_max[j] = min(max(bbox_max[j], 0), self.resolution - 1)
42 |
43 | # Find all voxels where bounding box intersects
44 | for x in range(bbox_min[0], bbox_max[0] + 1):
45 | for y in range(bbox_min[1], bbox_max[1] + 1):
46 | spatial_idx = self.resolution * x + y
47 | self.spatial_hash[spatial_idx].push_back(i_tri)
48 |
49 | @cython.boundscheck(False) # Deactivate bounds checking
50 | @cython.wraparound(False) # Deactivate negative indexing.
51 | cpdef query(self, double[:, :] points):
52 | assert(points.shape[1] == 2)
53 | cdef int n_points = points.shape[0]
54 |
55 | cdef vector[int] points_indices
56 | cdef vector[int] tri_indices
57 | # cdef int[:] points_indices_np
58 | # cdef int[:] tri_indices_np
59 |
60 | cdef int i_point, k, x, y
61 | cdef int spatial_idx
62 |
63 | for i_point in range(n_points):
64 | x = int(points[i_point, 0])
65 | y = int(points[i_point, 1])
66 | if not (0 <= x < self.resolution and 0 <= y < self.resolution):
67 | continue
68 |
69 | spatial_idx = self.resolution * x + y
70 | for i_tri in self.spatial_hash[spatial_idx]:
71 | points_indices.push_back(i_point)
72 | tri_indices.push_back(i_tri)
73 |
74 | points_indices_np = np.zeros(points_indices.size(), dtype=np.int32)
75 | tri_indices_np = np.zeros(tri_indices.size(), dtype=np.int32)
76 |
77 | cdef int[:] points_indices_view = points_indices_np
78 | cdef int[:] tri_indices_view = tri_indices_np
79 |
80 | for k in range(points_indices.size()):
81 | points_indices_view[k] = points_indices[k]
82 |
83 | for k in range(tri_indices.size()):
84 | tri_indices_view[k] = tri_indices[k]
85 |
86 | return points_indices_np, tri_indices_np
87 |
--------------------------------------------------------------------------------