├── .gitignore ├── LICENSE ├── README.md ├── config_files ├── smpl2smplh.yaml ├── smpl2smplx.yaml ├── smplh2smpl.yaml ├── smplh2smplx.yaml ├── smplh2smplx_as.yaml ├── smplh2smplx_onepose.yaml ├── smplx2smpl.yaml └── smplx2smplh.yaml ├── examples ├── demo.py ├── demo_layers.py ├── vis_flame_vertices.py └── vis_mano_vertices.py ├── images ├── example.png └── teaser_fig.png ├── optional-requirements.txt ├── requirements.txt ├── setup.py ├── smplx ├── __init__.py ├── body_models.py ├── joint_names.py ├── lbs.py ├── utils.py ├── vertex_ids.py └── vertex_joint_selector.py ├── tools ├── README.md ├── __init__.py ├── clean_ch.py └── merge_smplh_mano.py ├── transfer_data └── support_data │ └── github_data │ └── amass_sample.npz └── transfer_model ├── README.md ├── __init__.py ├── __main__.py ├── config ├── __init__.py ├── body_model_defaults.py ├── cmd_parser.py ├── dataset_defaults.py ├── defaults.py ├── loss_defaults.py ├── optim_defaults.py └── utils_cfg.py ├── data ├── __init__.py ├── build.py └── datasets │ ├── __init__.py │ └── mesh.py ├── docs ├── images │ └── smpl_smplx_correspondence.png └── transfer.md ├── losses ├── __init__.py ├── losses.py └── utils.py ├── merge_output.py ├── optimizers ├── __init__.py ├── minimize.py └── optim_factory.py ├── requirements.txt ├── transfer_model.py ├── utils ├── __init__.py ├── def_transfer.py ├── mesh_utils.py ├── metrics.py ├── np_utils.py ├── o3d_utils.py ├── pose_utils.py ├── timer.py ├── torch_utils.py └── typing.py ├── view_pkl.py └── write_obj.py /.gitignore: -------------------------------------------------------------------------------- 1 | #### joe made this: http://goel.io/joe 2 | 3 | #####=== Python ===##### 4 | 5 | # Byte-compiled / optimized / DLL files 6 | __pycache__/ 7 | *.py[cod] 8 | *$py.class 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # pyenv 80 | .python-version 81 | 82 | # celery beat schedule file 83 | celerybeat-schedule 84 | 85 | # SageMath parsed files 86 | *.sage.py 87 | 88 | # Environments 89 | .env 90 | .venv 91 | env/ 92 | venv/ 93 | ENV/ 94 | env.bak/ 95 | venv.bak/ 96 | 97 | # Spyder project settings 98 | .spyderproject 99 | .spyproject 100 | 101 | # Rope project settings 102 | .ropeproject 103 | 104 | # mkdocs documentation 105 | /site 106 | 107 | # mypy 108 | .mypy_cache/ 109 | 110 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | License 2 | 3 | Software Copyright License for non-commercial scientific research purposes 4 | Please read carefully the following terms and conditions and any accompanying documentation before you download and/or use the SMPL-X/SMPLify-X model, data and software, (the "Model & Software"), including 3D meshes, blend weights, blend shapes, textures, software, scripts, and animations. By downloading and/or using the Model & Software (including downloading, cloning, installing, and any other use of this github repository), you acknowledge that you have read these terms and conditions, understand them, and agree to be bound by them. If you do not agree with these terms and conditions, you must not download and/or use the Model & Software. Any infringement of the terms of this agreement will automatically terminate your rights under this License 5 | 6 | Ownership / Licensees 7 | The Software and the associated materials has been developed at the 8 | 9 | Max Planck Institute for Intelligent Systems (hereinafter "MPI"). 10 | 11 | Any copyright or patent right is owned by and proprietary material of the 12 | 13 | Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (hereinafter “MPG”; MPI and MPG hereinafter collectively “Max-Planck”) 14 | 15 | hereinafter the “Licensor”. 16 | 17 | License Grant 18 | Licensor grants you (Licensee) personally a single-user, non-exclusive, non-transferable, free of charge right: 19 | 20 | To install the Model & Software on computers owned, leased or otherwise controlled by you and/or your organization; 21 | To use the Model & Software for the sole purpose of performing non-commercial scientific research, non-commercial education, or non-commercial artistic projects; 22 | Any other use, in particular any use for commercial, pornographic, military, or surveillance, purposes is prohibited. This includes, without limitation, incorporation in a commercial product, use in a commercial service, or production of other artifacts for commercial purposes. The Data & Software may not be used to create fake, libelous, misleading, or defamatory content of any kind excluding analyses in peer-reviewed scientific research. The Data & Software may not be reproduced, modified and/or made available in any form to any third party without Max-Planck’s prior written permission. 23 | 24 | The Data & Software may not be used for pornographic purposes or to generate pornographic material whether commercial or not. This license also prohibits the use of the Software to train methods/algorithms/neural networks/etc. for commercial, pornographic, military, surveillance, or defamatory use of any kind. By downloading the Data & Software, you agree not to reverse engineer it. 25 | 26 | No Distribution 27 | The Model & Software and the license herein granted shall not be copied, shared, distributed, re-sold, offered for re-sale, transferred or sub-licensed in whole or in part except that you may make one copy for archive purposes only. 28 | 29 | Disclaimer of Representations and Warranties 30 | You expressly acknowledge and agree that the Model & Software results from basic research, is provided “AS IS”, may contain errors, and that any use of the Model & Software is at your sole risk. LICENSOR MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE MODEL & SOFTWARE, NEITHER EXPRESS NOR IMPLIED, AND THE ABSENCE OF ANY LEGAL OR ACTUAL DEFECTS, WHETHER DISCOVERABLE OR NOT. Specifically, and not to limit the foregoing, licensor makes no representations or warranties (i) regarding the merchantability or fitness for a particular purpose of the Model & Software, (ii) that the use of the Model & Software will not infringe any patents, copyrights or other intellectual property rights of a third party, and (iii) that the use of the Model & Software will not cause any damage of any kind to you or a third party. 31 | 32 | Limitation of Liability 33 | Because this Model & Software License Agreement qualifies as a donation, according to Section 521 of the German Civil Code (Bürgerliches Gesetzbuch – BGB) Licensor as a donor is liable for intent and gross negligence only. If the Licensor fraudulently conceals a legal or material defect, they are obliged to compensate the Licensee for the resulting damage. 34 | Licensor shall be liable for loss of data only up to the amount of typical recovery costs which would have arisen had proper and regular data backup measures been taken. For the avoidance of doubt Licensor shall be liable in accordance with the German Product Liability Act in the event of product liability. The foregoing applies also to Licensor’s legal representatives or assistants in performance. Any further liability shall be excluded. 35 | Patent claims generated through the usage of the Model & Software cannot be directed towards the copyright holders. 36 | The Model & Software is provided in the state of development the licensor defines. If modified or extended by Licensee, the Licensor makes no claims about the fitness of the Model & Software and is not responsible for any problems such modifications cause. 37 | 38 | No Maintenance Services 39 | You understand and agree that Licensor is under no obligation to provide either maintenance services, update services, notices of latent defects, or corrections of defects with regard to the Model & Software. Licensor nevertheless reserves the right to update, modify, or discontinue the Model & Software at any time. 40 | 41 | Defects of the Model & Software must be notified in writing to the Licensor with a comprehensible description of the error symptoms. The notification of the defect should enable the reproduction of the error. The Licensee is encouraged to communicate any use, results, modification or publication. 42 | 43 | Publications using the Model & Software 44 | You acknowledge that the Model & Software is a valuable scientific resource and agree to appropriately reference the following paper in any publication making use of the Model & Software. 45 | 46 | Citation: 47 | 48 | 49 | @inproceedings{SMPL-X:2019, 50 | title = {Expressive Body Capture: 3D Hands, Face, and Body from a Single Image}, 51 | author = {Pavlakos, Georgios and Choutas, Vasileios and Ghorbani, Nima and Bolkart, Timo and Osman, Ahmed A. A. and Tzionas, Dimitrios and Black, Michael J.}, 52 | booktitle = {Proceedings IEEE Conf. on Computer Vision and Pattern Recognition (CVPR)}, 53 | year = {2019} 54 | } 55 | Commercial licensing opportunities 56 | For commercial uses of the Software, please send email to ps-license@tue.mpg.de 57 | 58 | This Agreement shall be governed by the laws of the Federal Republic of Germany except for the UN Sales Convention. 59 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## SMPL-X: A new joint 3D model of the human body, face and hands together 2 | 3 | [[Paper Page](https://smpl-x.is.tue.mpg.de)] [[Paper](https://ps.is.tuebingen.mpg.de/uploads_file/attachment/attachment/497/SMPL-X.pdf)] 4 | [[Supp. Mat.](https://ps.is.tuebingen.mpg.de/uploads_file/attachment/attachment/498/SMPL-X-supp.pdf)] 5 | 6 | ![SMPL-X Examples](./images/teaser_fig.png) 7 | 8 | ## Table of Contents 9 | * [License](#license) 10 | * [Description](#description) 11 | * [News](#news) 12 | * [Installation](#installation) 13 | * [Downloading the model](#downloading-the-model) 14 | * [Loading SMPL-X, SMPL+H and SMPL](#loading-smpl-x-smplh-and-smpl) 15 | * [SMPL and SMPL+H setup](#smpl-and-smplh-setup) 16 | * [Model loading](https://github.com/vchoutas/smplx#model-loading) 17 | * [MANO and FLAME correspondences](#mano-and-flame-correspondences) 18 | * [Example](#example) 19 | * [Modifying the global pose of the model](#modifying-the-global-pose-of-the-model) 20 | * [Citation](#citation) 21 | * [Acknowledgments](#acknowledgments) 22 | * [Contact](#contact) 23 | 24 | ## License 25 | 26 | Software Copyright License for **non-commercial scientific research purposes**. 27 | Please read carefully the [terms and conditions](https://github.com/vchoutas/smplx/blob/master/LICENSE) and any accompanying documentation before you download and/or use the SMPL-X/SMPLify-X model, data and software, (the "Model & Software"), including 3D meshes, blend weights, blend shapes, textures, software, scripts, and animations. By downloading and/or using the Model & Software (including downloading, cloning, installing, and any other use of this github repository), you acknowledge that you have read these terms and conditions, understand them, and agree to be bound by them. If you do not agree with these terms and conditions, you must not download and/or use the Model & Software. Any infringement of the terms of this agreement will automatically terminate your rights under this [License](./LICENSE). 28 | 29 | ## Disclaimer 30 | 31 | The original images used for the figures 1 and 2 of the paper can be found in this link. 32 | The images in the paper are used under license from gettyimages.com. 33 | We have acquired the right to use them in the publication, but redistribution is not allowed. 34 | Please follow the instructions on the given link to acquire right of usage. 35 | Our results are obtained on the 483 × 724 pixels resolution of the original images. 36 | 37 | ## Description 38 | 39 | *SMPL-X* (SMPL eXpressive) is a unified body model with shape parameters trained jointly for the 40 | face, hands and body. *SMPL-X* uses standard vertex based linear blend skinning with learned corrective blend 41 | shapes, has N = 10, 475 vertices and K = 54 joints, 42 | which include joints for the neck, jaw, eyeballs and fingers. 43 | SMPL-X is defined by a function M(θ, β, ψ), where θ is the pose parameters, β the shape parameters and 44 | ψ the facial expression parameters. 45 | 46 | ## News 47 | 48 | - 3 November 2020: We release the code to transfer between the models in the 49 | SMPL family. For more details on the code, go to this [readme 50 | file](./transfer_model/README.md). A detailed explanation on how the mappings 51 | were extracted can be found [here](./transfer_model/docs/transfer.md). 52 | - 23 September 2020: A UV map is now available for SMPL-X, please check the 53 | Downloads section of the website. 54 | - 20 August 2020: The full shape and expression space of SMPL-X are now available. 55 | 56 | ## Installation 57 | 58 | To install the model please follow the next steps in the specified order: 59 | 1. To install from PyPi simply run: 60 | ```Shell 61 | pip install smplx[all] 62 | ``` 63 | 2. Clone this repository and install it using the *setup.py* script: 64 | ```Shell 65 | git clone https://github.com/vchoutas/smplx 66 | python setup.py install 67 | ``` 68 | 69 | ## Downloading the model 70 | 71 | To download the *SMPL-X* model go to [this project website](https://smpl-x.is.tue.mpg.de) and register to get access to the downloads section. 72 | 73 | To download the *SMPL+H* model go to [this project website](http://mano.is.tue.mpg.de) and register to get access to the downloads section. 74 | 75 | To download the *SMPL* model go to [this](http://smpl.is.tue.mpg.de) (male and female models) and [this](http://smplify.is.tue.mpg.de) (gender neutral model) project website and register to get access to the downloads section. 76 | 77 | ## Loading SMPL-X, SMPL+H and SMPL 78 | 79 | ### SMPL and SMPL+H setup 80 | 81 | The loader gives the option to use any of the SMPL-X, SMPL+H, SMPL, and MANO models. Depending on the model you want to use, please follow the respective download instructions. To switch between MANO, SMPL, SMPL+H and SMPL-X just change the *model_path* or *model_type* parameters. For more details please check the docs of the model classes. 82 | Before using SMPL and SMPL+H you should follow the instructions in [tools/README.md](./tools/README.md) to remove the 83 | Chumpy objects from both model pkls, as well as merge the MANO parameters with SMPL+H. 84 | 85 | ### Model loading 86 | 87 | You can either use the [create](https://github.com/vchoutas/smplx/blob/c63c02b478c5c6f696491ed9167e3af6b08d89b1/smplx/body_models.py#L54) 88 | function from [body_models](./smplx/body_models.py) or directly call the constructor for the 89 | [SMPL](https://github.com/vchoutas/smplx/blob/c63c02b478c5c6f696491ed9167e3af6b08d89b1/smplx/body_models.py#L106), 90 | [SMPL+H](https://github.com/vchoutas/smplx/blob/c63c02b478c5c6f696491ed9167e3af6b08d89b1/smplx/body_models.py#L395) and 91 | [SMPL-X](https://github.com/vchoutas/smplx/blob/c63c02b478c5c6f696491ed9167e3af6b08d89b1/smplx/body_models.py#L628) model. The path to the model can either be the path to the file with the parameters or a directory with the following structure: 92 | ```bash 93 | models 94 | ├── smpl 95 | │   ├── SMPL_FEMALE.pkl 96 | │   └── SMPL_MALE.pkl 97 | │   └── SMPL_NEUTRAL.pkl 98 | ├── smplh 99 | │   ├── SMPLH_FEMALE.pkl 100 | │   └── SMPLH_MALE.pkl 101 | ├── mano 102 | | ├── MANO_RIGHT.pkl 103 | | └── MANO_LEFT.pkl 104 | └── smplx 105 | ├── SMPLX_FEMALE.npz 106 | ├── SMPLX_FEMALE.pkl 107 | ├── SMPLX_MALE.npz 108 | ├── SMPLX_MALE.pkl 109 | ├── SMPLX_NEUTRAL.npz 110 | └── SMPLX_NEUTRAL.pkl 111 | ``` 112 | 113 | 114 | ## MANO and FLAME correspondences 115 | 116 | The vertex correspondences between SMPL-X and MANO, FLAME can be downloaded 117 | from [the project website](https://smpl-x.is.tue.mpg.de). If you have extracted 118 | the correspondence data in the folder *correspondences*, then use the following 119 | scripts to visualize them: 120 | 121 | 1. To view MANO correspondences run the following command: 122 | 123 | ``` 124 | python examples/vis_mano_vertices.py --model-folder $SMPLX_FOLDER --corr-fname correspondences/MANO_SMPLX_vertex_ids.pkl 125 | ``` 126 | 127 | 2. To view FLAME correspondences run the following command: 128 | 129 | ``` 130 | python examples/vis_flame_vertices.py --model-folder $SMPLX_FOLDER --corr-fname correspondences/SMPL-X__FLAME_vertex_ids.npy 131 | ``` 132 | 133 | ## Example 134 | 135 | After installing the *smplx* package and downloading the model parameters you should be able to run the *demo.py* 136 | script to visualize the results. For this step you have to install the [pyrender](https://pyrender.readthedocs.io/en/latest/index.html) and [trimesh](https://trimsh.org/) packages. 137 | 138 | `python examples/demo.py --model-folder $SMPLX_FOLDER --plot-joints=True --gender="neutral"` 139 | 140 | ![SMPL-X Examples](./images/example.png) 141 | 142 | ## Modifying the global pose of the model 143 | 144 | If you want to modify the global pose of the model, i.e. the root rotation and 145 | translation, to a new coordinate system for example, you need to take into 146 | account that the model rotation uses the pelvis as the center of rotation. A 147 | more detailed description can be found in the following 148 | [link](https://www.dropbox.com/scl/fi/zkatuv5shs8d4tlwr8ecc/Change-parameters-to-new-coordinate-system.paper?dl=0&rlkey=lotq1sh6wzkmyttisc05h0in0). 149 | If something is not clear, please let me know so that I can update the 150 | description. 151 | 152 | ## Citation 153 | 154 | Depending on which model is loaded for your project, i.e. SMPL-X or SMPL+H or SMPL, please cite the most relevant work below, listed in the same order: 155 | 156 | ``` 157 | @inproceedings{SMPL-X:2019, 158 | title = {Expressive Body Capture: 3D Hands, Face, and Body from a Single Image}, 159 | author = {Pavlakos, Georgios and Choutas, Vasileios and Ghorbani, Nima and Bolkart, Timo and Osman, Ahmed A. A. and Tzionas, Dimitrios and Black, Michael J.}, 160 | booktitle = {Proceedings IEEE Conf. on Computer Vision and Pattern Recognition (CVPR)}, 161 | year = {2019} 162 | } 163 | ``` 164 | 165 | ``` 166 | @article{MANO:SIGGRAPHASIA:2017, 167 | title = {Embodied Hands: Modeling and Capturing Hands and Bodies Together}, 168 | author = {Romero, Javier and Tzionas, Dimitrios and Black, Michael J.}, 169 | journal = {ACM Transactions on Graphics, (Proc. SIGGRAPH Asia)}, 170 | volume = {36}, 171 | number = {6}, 172 | series = {245:1--245:17}, 173 | month = nov, 174 | year = {2017}, 175 | month_numeric = {11} 176 | } 177 | ``` 178 | 179 | ``` 180 | @article{SMPL:2015, 181 | author = {Loper, Matthew and Mahmood, Naureen and Romero, Javier and Pons-Moll, Gerard and Black, Michael J.}, 182 | title = {{SMPL}: A Skinned Multi-Person Linear Model}, 183 | journal = {ACM Transactions on Graphics, (Proc. SIGGRAPH Asia)}, 184 | month = oct, 185 | number = {6}, 186 | pages = {248:1--248:16}, 187 | publisher = {ACM}, 188 | volume = {34}, 189 | year = {2015} 190 | } 191 | ``` 192 | 193 | This repository was originally developed for SMPL-X / SMPLify-X (CVPR 2019), you might be interested in having a look: [https://smpl-x.is.tue.mpg.de](https://smpl-x.is.tue.mpg.de). 194 | 195 | ## Acknowledgments 196 | 197 | ### Facial Contour 198 | 199 | Special thanks to [Soubhik Sanyal](https://github.com/soubhiksanyal) for sharing the Tensorflow code used for the facial 200 | landmarks. 201 | 202 | ## Contact 203 | The code of this repository was implemented by [Vassilis Choutas](vassilis.choutas@tuebingen.mpg.de). 204 | 205 | For questions, please contact [smplx@tue.mpg.de](smplx@tue.mpg.de). 206 | 207 | For commercial licensing (and all related questions for business applications), please contact [ps-licensing@tue.mpg.de](ps-licensing@tue.mpg.de). 208 | -------------------------------------------------------------------------------- /config_files/smpl2smplh.yaml: -------------------------------------------------------------------------------- 1 | datasets: 2 | mesh_folder: 3 | data_folder: 'transfer_data/meshes/smpl' 4 | deformation_transfer_path: 'transfer_data/smpl2smplh_def_transfer.pkl' 5 | mask_ids_fname: '' 6 | summary_steps: 100 7 | 8 | edge_fitting: 9 | per_part: False 10 | 11 | optim: 12 | type: 'trust-ncg' 13 | maxiters: 100 14 | gtol: 1e-06 15 | 16 | body_model: 17 | model_type: "smplh" 18 | # SMPL+H has no neutral model, so we have to manually select the gender 19 | gender: "female" 20 | # gender: "male" 21 | folder: "transfer_data/body_models" 22 | use_compressed: False 23 | smplh: 24 | betas: 25 | num: 10 26 | -------------------------------------------------------------------------------- /config_files/smpl2smplx.yaml: -------------------------------------------------------------------------------- 1 | datasets: 2 | mesh_folder: 3 | data_folder: 'transfer_data/meshes/smpl' 4 | deformation_transfer_path: 'transfer_data/smpl2smplx_deftrafo_setup.pkl' 5 | mask_ids_fname: 'smplx_mask_ids.npy' 6 | summary_steps: 100 7 | 8 | edge_fitting: 9 | per_part: False 10 | 11 | optim: 12 | type: 'trust-ncg' 13 | maxiters: 100 14 | gtol: 1e-06 15 | 16 | body_model: 17 | model_type: "smplx" 18 | gender: "neutral" 19 | folder: "transfer_data/body_models" 20 | use_compressed: False 21 | use_face_contour: True 22 | smplx: 23 | betas: 24 | num: 10 25 | expression: 26 | num: 10 27 | -------------------------------------------------------------------------------- /config_files/smplh2smpl.yaml: -------------------------------------------------------------------------------- 1 | datasets: 2 | mesh_folder: 3 | data_folder: 'transfer_data/meshes/smplh' 4 | deformation_transfer_path: 'transfer_data/smplh2smpl_def_transfer.pkl' 5 | mask_ids_fname: '' 6 | summary_steps: 100 7 | 8 | edge_fitting: 9 | per_part: False 10 | 11 | optim: 12 | type: 'trust-ncg' 13 | maxiters: 100 14 | gtol: 1e-06 15 | 16 | body_model: 17 | model_type: "smpl" 18 | gender: "neutral" 19 | folder: "transfer_data/body_models" 20 | use_compressed: False 21 | use_face_contour: True 22 | smpl: 23 | betas: 24 | num: 10 25 | -------------------------------------------------------------------------------- /config_files/smplh2smplx.yaml: -------------------------------------------------------------------------------- 1 | datasets: 2 | mesh_folder: 3 | data_folder: 'transfer_data/meshes/smplh' 4 | deformation_transfer_path: 'transfer_data/smplh2smplx_deftrafo_setup.pkl' 5 | mask_ids_fname: 'smplx_mask_ids.npy' 6 | summary_steps: 100 7 | 8 | edge_fitting: 9 | per_part: False 10 | 11 | optim: 12 | type: 'trust-ncg' 13 | maxiters: 100 14 | gtol: 1e-06 15 | 16 | body_model: 17 | model_type: "smplx" 18 | gender: "neutral" 19 | folder: "transfer_data/body_models" 20 | use_compressed: False 21 | use_face_contour: True 22 | smplx: 23 | betas: 24 | num: 10 25 | expression: 26 | num: 10 27 | -------------------------------------------------------------------------------- /config_files/smplh2smplx_as.yaml: -------------------------------------------------------------------------------- 1 | datasets: 2 | mesh_folder: 3 | data_folder: 'transfer_data/meshes/amass_sample' 4 | deformation_transfer_path: 'transfer_data/smplh2smplx_deftrafo_setup.pkl' 5 | mask_ids_fname: 'smplx_mask_ids.npy' 6 | summary_steps: 100 7 | 8 | edge_fitting: 9 | per_part: False 10 | 11 | optim: 12 | type: 'trust-ncg' 13 | maxiters: 100 14 | gtol: 1e-06 15 | 16 | body_model: 17 | model_type: "smplx" 18 | gender: "neutral" 19 | folder: "models" 20 | use_compressed: False 21 | use_face_contour: True 22 | smplx: 23 | betas: 24 | num: 10 25 | expression: 26 | num: 10 27 | -------------------------------------------------------------------------------- /config_files/smplh2smplx_onepose.yaml: -------------------------------------------------------------------------------- 1 | datasets: 2 | mesh_folder: 3 | data_folder: 'transfer_data/meshes/amass_onepose' 4 | deformation_transfer_path: 'transfer_data/smplh2smplx_deftrafo_setup.pkl' 5 | mask_ids_fname: 'smplx_mask_ids.npy' 6 | summary_steps: 100 7 | 8 | edge_fitting: 9 | per_part: False 10 | 11 | optim: 12 | type: 'adam' 13 | lr: 0.1 14 | maxiters: 10000 15 | gtol: 1e-06 16 | 17 | body_model: 18 | model_type: "smplx" 19 | gender: "neutral" 20 | folder: "models" 21 | use_compressed: False 22 | use_face_contour: True 23 | smplx: 24 | betas: 25 | num: 10 26 | expression: 27 | num: 10 28 | -------------------------------------------------------------------------------- /config_files/smplx2smpl.yaml: -------------------------------------------------------------------------------- 1 | datasets: 2 | mesh_folder: 3 | data_folder: 'meshes/smplx' 4 | deformation_transfer_path: 'transfer_data/smplx2smpl_deftrafo_setup.pkl' 5 | mask_ids_fname: '' 6 | summary_steps: 100 7 | 8 | edge_fitting: 9 | per_part: False 10 | 11 | optim: 12 | type: 'lbfgs' 13 | maxiters: 200 14 | gtol: 1e-06 15 | 16 | body_model: 17 | model_type: "smpl" 18 | gender: "neutral" 19 | ext: 'pkl' 20 | folder: "transfer_data/body_models" 21 | use_compressed: False 22 | use_face_contour: True 23 | smpl: 24 | betas: 25 | num: 10 26 | -------------------------------------------------------------------------------- /config_files/smplx2smplh.yaml: -------------------------------------------------------------------------------- 1 | datasets: 2 | mesh_folder: 3 | data_folder: 'meshes/smplx' 4 | deformation_transfer_path: 'transfer_data/smplx2smplh_deftrafo_setup.pkl' 5 | mask_ids_fname: '' 6 | summary_steps: 100 7 | 8 | edge_fitting: 9 | per_part: False 10 | 11 | optim: 12 | type: 'lbfgs' 13 | maxiters: 200 14 | gtol: 1e-06 15 | 16 | body_model: 17 | model_type: "smplh" 18 | # SMPL+H has no neutral model, so we have to manually select the gender 19 | gender: "female" 20 | # gender: "male" 21 | ext: 'pkl' 22 | folder: "transfer_data/body_models" 23 | use_compressed: False 24 | use_face_contour: True 25 | smplh: 26 | betas: 27 | num: 10 28 | -------------------------------------------------------------------------------- /examples/demo.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: ps-license@tuebingen.mpg.de 16 | 17 | import os.path as osp 18 | import argparse 19 | 20 | import numpy as np 21 | import torch 22 | 23 | import smplx 24 | 25 | 26 | def main(model_folder, 27 | model_type='smplx', 28 | ext='npz', 29 | gender='neutral', 30 | plot_joints=False, 31 | num_betas=10, 32 | sample_shape=True, 33 | sample_expression=True, 34 | num_expression_coeffs=10, 35 | plotting_module='pyrender', 36 | use_face_contour=False): 37 | 38 | model = smplx.create(model_folder, model_type=model_type, 39 | gender=gender, use_face_contour=use_face_contour, 40 | num_betas=num_betas, 41 | num_expression_coeffs=num_expression_coeffs, 42 | ext=ext) 43 | print(model) 44 | 45 | betas, expression = None, None 46 | if sample_shape: 47 | betas = torch.randn([1, model.num_betas], dtype=torch.float32) 48 | if sample_expression: 49 | expression = torch.randn( 50 | [1, model.num_expression_coeffs], dtype=torch.float32) 51 | 52 | output = model(betas=betas, expression=expression, 53 | return_verts=True) 54 | vertices = output.vertices.detach().cpu().numpy().squeeze() 55 | joints = output.joints.detach().cpu().numpy().squeeze() 56 | 57 | print('Vertices shape =', vertices.shape) 58 | print('Joints shape =', joints.shape) 59 | 60 | if plotting_module == 'pyrender': 61 | import pyrender 62 | import trimesh 63 | vertex_colors = np.ones([vertices.shape[0], 4]) * [0.3, 0.3, 0.3, 0.8] 64 | tri_mesh = trimesh.Trimesh(vertices, model.faces, 65 | vertex_colors=vertex_colors) 66 | 67 | mesh = pyrender.Mesh.from_trimesh(tri_mesh) 68 | 69 | scene = pyrender.Scene() 70 | scene.add(mesh) 71 | 72 | if plot_joints: 73 | sm = trimesh.creation.uv_sphere(radius=0.005) 74 | sm.visual.vertex_colors = [0.9, 0.1, 0.1, 1.0] 75 | tfs = np.tile(np.eye(4), (len(joints), 1, 1)) 76 | tfs[:, :3, 3] = joints 77 | joints_pcl = pyrender.Mesh.from_trimesh(sm, poses=tfs) 78 | scene.add(joints_pcl) 79 | 80 | pyrender.Viewer(scene, use_raymond_lighting=True) 81 | elif plotting_module == 'matplotlib': 82 | from matplotlib import pyplot as plt 83 | from mpl_toolkits.mplot3d import Axes3D 84 | from mpl_toolkits.mplot3d.art3d import Poly3DCollection 85 | 86 | fig = plt.figure() 87 | ax = fig.add_subplot(111, projection='3d') 88 | 89 | mesh = Poly3DCollection(vertices[model.faces], alpha=0.1) 90 | face_color = (1.0, 1.0, 0.9) 91 | edge_color = (0, 0, 0) 92 | mesh.set_edgecolor(edge_color) 93 | mesh.set_facecolor(face_color) 94 | ax.add_collection3d(mesh) 95 | ax.scatter(joints[:, 0], joints[:, 1], joints[:, 2], color='r') 96 | 97 | if plot_joints: 98 | ax.scatter(joints[:, 0], joints[:, 1], joints[:, 2], alpha=0.1) 99 | plt.show() 100 | elif plotting_module == 'open3d': 101 | import open3d as o3d 102 | 103 | mesh = o3d.geometry.TriangleMesh() 104 | mesh.vertices = o3d.utility.Vector3dVector( 105 | vertices) 106 | mesh.triangles = o3d.utility.Vector3iVector(model.faces) 107 | mesh.compute_vertex_normals() 108 | mesh.paint_uniform_color([0.3, 0.3, 0.3]) 109 | 110 | geometry = [mesh] 111 | if plot_joints: 112 | joints_pcl = o3d.geometry.PointCloud() 113 | joints_pcl.points = o3d.utility.Vector3dVector(joints) 114 | joints_pcl.paint_uniform_color([0.7, 0.3, 0.3]) 115 | geometry.append(joints_pcl) 116 | 117 | o3d.visualization.draw_geometries(geometry) 118 | else: 119 | raise ValueError('Unknown plotting_module: {}'.format(plotting_module)) 120 | 121 | 122 | if __name__ == '__main__': 123 | parser = argparse.ArgumentParser(description='SMPL-X Demo') 124 | 125 | parser.add_argument('--model-folder', required=True, type=str, 126 | help='The path to the model folder') 127 | parser.add_argument('--model-type', default='smplx', type=str, 128 | choices=['smpl', 'smplh', 'smplx', 'mano', 'flame'], 129 | help='The type of model to load') 130 | parser.add_argument('--gender', type=str, default='neutral', 131 | help='The gender of the model') 132 | parser.add_argument('--num-betas', default=10, type=int, 133 | dest='num_betas', 134 | help='Number of shape coefficients.') 135 | parser.add_argument('--num-expression-coeffs', default=10, type=int, 136 | dest='num_expression_coeffs', 137 | help='Number of expression coefficients.') 138 | parser.add_argument('--plotting-module', type=str, default='pyrender', 139 | dest='plotting_module', 140 | choices=['pyrender', 'matplotlib', 'open3d'], 141 | help='The module to use for plotting the result') 142 | parser.add_argument('--ext', type=str, default='npz', 143 | help='Which extension to use for loading') 144 | parser.add_argument('--plot-joints', default=False, 145 | type=lambda arg: arg.lower() in ['true', '1'], 146 | help='The path to the model folder') 147 | parser.add_argument('--sample-shape', default=True, 148 | dest='sample_shape', 149 | type=lambda arg: arg.lower() in ['true', '1'], 150 | help='Sample a random shape') 151 | parser.add_argument('--sample-expression', default=True, 152 | dest='sample_expression', 153 | type=lambda arg: arg.lower() in ['true', '1'], 154 | help='Sample a random expression') 155 | parser.add_argument('--use-face-contour', default=False, 156 | type=lambda arg: arg.lower() in ['true', '1'], 157 | help='Compute the contour of the face') 158 | 159 | args = parser.parse_args() 160 | 161 | model_folder = osp.expanduser(osp.expandvars(args.model_folder)) 162 | model_type = args.model_type 163 | plot_joints = args.plot_joints 164 | use_face_contour = args.use_face_contour 165 | gender = args.gender 166 | ext = args.ext 167 | plotting_module = args.plotting_module 168 | num_betas = args.num_betas 169 | num_expression_coeffs = args.num_expression_coeffs 170 | sample_shape = args.sample_shape 171 | sample_expression = args.sample_expression 172 | 173 | main(model_folder, model_type, ext=ext, 174 | gender=gender, plot_joints=plot_joints, 175 | num_betas=num_betas, 176 | num_expression_coeffs=num_expression_coeffs, 177 | sample_shape=sample_shape, 178 | sample_expression=sample_expression, 179 | plotting_module=plotting_module, 180 | use_face_contour=use_face_contour) 181 | -------------------------------------------------------------------------------- /examples/demo_layers.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: ps-license@tuebingen.mpg.de 16 | 17 | import os.path as osp 18 | import argparse 19 | 20 | import numpy as np 21 | import torch 22 | 23 | import smplx 24 | 25 | 26 | def main(model_folder, 27 | model_type='smplx', 28 | ext='npz', 29 | gender='neutral', 30 | plot_joints=False, 31 | num_betas=10, 32 | sample_shape=True, 33 | sample_expression=True, 34 | num_expression_coeffs=10, 35 | plotting_module='pyrender', 36 | use_face_contour=False): 37 | 38 | model = smplx.build_layer( 39 | model_folder, model_type=model_type, 40 | gender=gender, use_face_contour=use_face_contour, 41 | num_betas=num_betas, 42 | num_expression_coeffs=num_expression_coeffs, 43 | ext=ext) 44 | print(model) 45 | 46 | betas, expression = None, None 47 | if sample_shape: 48 | betas = torch.randn([1, model.num_betas], dtype=torch.float32) 49 | if sample_expression: 50 | expression = torch.randn( 51 | [1, model.num_expression_coeffs], dtype=torch.float32) 52 | 53 | output = model(betas=betas, expression=expression, 54 | return_verts=True) 55 | vertices = output.vertices.detach().cpu().numpy().squeeze() 56 | joints = output.joints.detach().cpu().numpy().squeeze() 57 | 58 | print('Vertices shape =', vertices.shape) 59 | print('Joints shape =', joints.shape) 60 | 61 | if plotting_module == 'pyrender': 62 | import pyrender 63 | import trimesh 64 | vertex_colors = np.ones([vertices.shape[0], 4]) * [0.3, 0.3, 0.3, 0.8] 65 | tri_mesh = trimesh.Trimesh(vertices, model.faces, 66 | vertex_colors=vertex_colors) 67 | 68 | mesh = pyrender.Mesh.from_trimesh(tri_mesh) 69 | 70 | scene = pyrender.Scene() 71 | scene.add(mesh) 72 | 73 | if plot_joints: 74 | sm = trimesh.creation.uv_sphere(radius=0.005) 75 | sm.visual.vertex_colors = [0.9, 0.1, 0.1, 1.0] 76 | tfs = np.tile(np.eye(4), (len(joints), 1, 1)) 77 | tfs[:, :3, 3] = joints 78 | joints_pcl = pyrender.Mesh.from_trimesh(sm, poses=tfs) 79 | scene.add(joints_pcl) 80 | 81 | pyrender.Viewer(scene, use_raymond_lighting=True) 82 | elif plotting_module == 'matplotlib': 83 | from matplotlib import pyplot as plt 84 | from mpl_toolkits.mplot3d import Axes3D 85 | from mpl_toolkits.mplot3d.art3d import Poly3DCollection 86 | 87 | fig = plt.figure() 88 | ax = fig.add_subplot(111, projection='3d') 89 | 90 | mesh = Poly3DCollection(vertices[model.faces], alpha=0.1) 91 | face_color = (1.0, 1.0, 0.9) 92 | edge_color = (0, 0, 0) 93 | mesh.set_edgecolor(edge_color) 94 | mesh.set_facecolor(face_color) 95 | ax.add_collection3d(mesh) 96 | ax.scatter(joints[:, 0], joints[:, 1], joints[:, 2], color='r') 97 | 98 | if plot_joints: 99 | ax.scatter(joints[:, 0], joints[:, 1], joints[:, 2], alpha=0.1) 100 | plt.show() 101 | elif plotting_module == 'open3d': 102 | import open3d as o3d 103 | 104 | mesh = o3d.geometry.TriangleMesh() 105 | mesh.vertices = o3d.utility.Vector3dVector( 106 | vertices) 107 | mesh.triangles = o3d.utility.Vector3iVector(model.faces) 108 | mesh.compute_vertex_normals() 109 | mesh.paint_uniform_color([0.3, 0.3, 0.3]) 110 | 111 | geometry = [mesh] 112 | if plot_joints: 113 | joints_pcl = o3d.geometry.PointCloud() 114 | joints_pcl.points = o3d.utility.Vector3dVector(joints) 115 | joints_pcl.paint_uniform_color([0.7, 0.3, 0.3]) 116 | geometry.append(joints_pcl) 117 | 118 | o3d.visualization.draw_geometries(geometry) 119 | else: 120 | raise ValueError('Unknown plotting_module: {}'.format(plotting_module)) 121 | 122 | 123 | if __name__ == '__main__': 124 | parser = argparse.ArgumentParser(description='SMPL-X Demo') 125 | 126 | parser.add_argument('--model-folder', required=True, type=str, 127 | help='The path to the model folder') 128 | parser.add_argument('--model-type', default='smplx', type=str, 129 | choices=['smpl', 'smplh', 'smplx', 'mano', 'flame'], 130 | help='The type of model to load') 131 | parser.add_argument('--gender', type=str, default='neutral', 132 | help='The gender of the model') 133 | parser.add_argument('--num-betas', default=10, type=int, 134 | dest='num_betas', 135 | help='Number of shape coefficients.') 136 | parser.add_argument('--num-expression-coeffs', default=10, type=int, 137 | dest='num_expression_coeffs', 138 | help='Number of expression coefficients.') 139 | parser.add_argument('--plotting-module', type=str, default='pyrender', 140 | dest='plotting_module', 141 | choices=['pyrender', 'matplotlib', 'open3d'], 142 | help='The module to use for plotting the result') 143 | parser.add_argument('--ext', type=str, default='npz', 144 | help='Which extension to use for loading') 145 | parser.add_argument('--plot-joints', default=False, 146 | type=lambda arg: arg.lower() in ['true', '1'], 147 | help='The path to the model folder') 148 | parser.add_argument('--sample-shape', default=True, 149 | dest='sample_shape', 150 | type=lambda arg: arg.lower() in ['true', '1'], 151 | help='Sample a random shape') 152 | parser.add_argument('--sample-expression', default=True, 153 | dest='sample_expression', 154 | type=lambda arg: arg.lower() in ['true', '1'], 155 | help='Sample a random expression') 156 | parser.add_argument('--use-face-contour', default=False, 157 | type=lambda arg: arg.lower() in ['true', '1'], 158 | help='Compute the contour of the face') 159 | 160 | args = parser.parse_args() 161 | 162 | model_folder = osp.expanduser(osp.expandvars(args.model_folder)) 163 | model_type = args.model_type 164 | plot_joints = args.plot_joints 165 | use_face_contour = args.use_face_contour 166 | gender = args.gender 167 | ext = args.ext 168 | plotting_module = args.plotting_module 169 | num_betas = args.num_betas 170 | num_expression_coeffs = args.num_expression_coeffs 171 | sample_shape = args.sample_shape 172 | sample_expression = args.sample_expression 173 | 174 | main(model_folder, model_type, ext=ext, 175 | gender=gender, plot_joints=plot_joints, 176 | num_betas=num_betas, 177 | num_expression_coeffs=num_expression_coeffs, 178 | sample_shape=sample_shape, 179 | sample_expression=sample_expression, 180 | plotting_module=plotting_module, 181 | use_face_contour=use_face_contour) 182 | -------------------------------------------------------------------------------- /examples/vis_flame_vertices.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: ps-license@tuebingen.mpg.de 16 | 17 | import os.path as osp 18 | import argparse 19 | import pickle 20 | 21 | import numpy as np 22 | import torch 23 | import open3d as o3d 24 | 25 | import smplx 26 | 27 | 28 | def main(model_folder, corr_fname, ext='npz', 29 | head_color=(0.3, 0.3, 0.6), 30 | gender='neutral'): 31 | 32 | head_idxs = np.load(corr_fname) 33 | 34 | model = smplx.create(model_folder, model_type='smplx', 35 | gender=gender, 36 | ext=ext) 37 | betas = torch.zeros([1, 10], dtype=torch.float32) 38 | expression = torch.zeros([1, 10], dtype=torch.float32) 39 | 40 | output = model(betas=betas, expression=expression, 41 | return_verts=True) 42 | vertices = output.vertices.detach().cpu().numpy().squeeze() 43 | joints = output.joints.detach().cpu().numpy().squeeze() 44 | 45 | print('Vertices shape =', vertices.shape) 46 | print('Joints shape =', joints.shape) 47 | 48 | mesh = o3d.geometry.TriangleMesh() 49 | mesh.vertices = o3d.utility.Vector3dVector(vertices) 50 | mesh.triangles = o3d.utility.Vector3iVector(model.faces) 51 | mesh.compute_vertex_normals() 52 | 53 | colors = np.ones_like(vertices) * [0.3, 0.3, 0.3] 54 | colors[head_idxs] = head_color 55 | 56 | mesh.vertex_colors = o3d.utility.Vector3dVector(colors) 57 | 58 | o3d.visualization.draw_geometries([mesh]) 59 | 60 | 61 | if __name__ == '__main__': 62 | parser = argparse.ArgumentParser(description='SMPL-X Demo') 63 | 64 | parser.add_argument('--model-folder', required=True, type=str, 65 | help='The path to the model folder') 66 | parser.add_argument('--corr-fname', required=True, type=str, 67 | dest='corr_fname', 68 | help='Filename with the head correspondences') 69 | parser.add_argument('--gender', type=str, default='neutral', 70 | help='The gender of the model') 71 | parser.add_argument('--ext', type=str, default='npz', 72 | help='Which extension to use for loading') 73 | parser.add_argument('--head', default='right', 74 | choices=['right', 'left'], 75 | type=str, help='Which head to plot') 76 | parser.add_argument('--head-color', type=float, nargs=3, dest='head_color', 77 | default=(0.3, 0.3, 0.6), 78 | help='Color for the head vertices') 79 | 80 | args = parser.parse_args() 81 | 82 | model_folder = osp.expanduser(osp.expandvars(args.model_folder)) 83 | corr_fname = args.corr_fname 84 | gender = args.gender 85 | ext = args.ext 86 | head = args.head 87 | head_color = args.head_color 88 | 89 | main(model_folder, corr_fname, ext=ext, 90 | head_color=head_color, 91 | gender=gender 92 | ) 93 | -------------------------------------------------------------------------------- /examples/vis_mano_vertices.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: ps-license@tuebingen.mpg.de 16 | 17 | import os.path as osp 18 | import argparse 19 | import pickle 20 | 21 | import numpy as np 22 | import torch 23 | import open3d as o3d 24 | 25 | import smplx 26 | 27 | 28 | def main(model_folder, corr_fname, ext='npz', 29 | hand_color=(0.3, 0.3, 0.6), 30 | gender='neutral', hand='right'): 31 | 32 | with open(corr_fname, 'rb') as f: 33 | idxs_data = pickle.load(f) 34 | if hand == 'both': 35 | hand_idxs = np.concatenate( 36 | [idxs_data['left_hand'], idxs_data['right_hand']] 37 | ) 38 | else: 39 | hand_idxs = idxs_data[f'{hand}_hand'] 40 | 41 | model = smplx.create(model_folder, model_type='smplx', 42 | gender=gender, 43 | ext=ext) 44 | betas = torch.zeros([1, 10], dtype=torch.float32) 45 | expression = torch.zeros([1, 10], dtype=torch.float32) 46 | 47 | output = model(betas=betas, expression=expression, 48 | return_verts=True) 49 | vertices = output.vertices.detach().cpu().numpy().squeeze() 50 | joints = output.joints.detach().cpu().numpy().squeeze() 51 | 52 | print('Vertices shape =', vertices.shape) 53 | print('Joints shape =', joints.shape) 54 | 55 | mesh = o3d.geometry.TriangleMesh() 56 | mesh.vertices = o3d.utility.Vector3dVector(vertices) 57 | mesh.triangles = o3d.utility.Vector3iVector(model.faces) 58 | mesh.compute_vertex_normals() 59 | 60 | colors = np.ones_like(vertices) * [0.3, 0.3, 0.3] 61 | colors[hand_idxs] = hand_color 62 | 63 | mesh.vertex_colors = o3d.utility.Vector3dVector(colors) 64 | 65 | o3d.visualization.draw_geometries([mesh]) 66 | 67 | 68 | if __name__ == '__main__': 69 | parser = argparse.ArgumentParser(description='SMPL-X Demo') 70 | 71 | parser.add_argument('--model-folder', required=True, type=str, 72 | help='The path to the model folder') 73 | parser.add_argument('--corr-fname', required=True, type=str, 74 | dest='corr_fname', 75 | help='Filename with the hand correspondences') 76 | parser.add_argument('--gender', type=str, default='neutral', 77 | help='The gender of the model') 78 | parser.add_argument('--ext', type=str, default='npz', 79 | help='Which extension to use for loading') 80 | parser.add_argument('--hand', default='right', 81 | choices=['right', 'left', 'both'], 82 | type=str, help='Which hand to plot') 83 | parser.add_argument('--hand-color', type=float, nargs=3, dest='hand_color', 84 | default=(0.3, 0.3, 0.6), 85 | help='Color for the hand vertices') 86 | 87 | args = parser.parse_args() 88 | 89 | model_folder = osp.expanduser(osp.expandvars(args.model_folder)) 90 | corr_fname = args.corr_fname 91 | gender = args.gender 92 | ext = args.ext 93 | hand = args.hand 94 | hand_color = args.hand_color 95 | 96 | main(model_folder, corr_fname, ext=ext, 97 | hand_color=hand_color, 98 | gender=gender, hand=hand 99 | ) 100 | -------------------------------------------------------------------------------- /images/example.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vchoutas/smplx/1265df7ba545e8b00f72e7c557c766e15c71632f/images/example.png -------------------------------------------------------------------------------- /images/teaser_fig.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vchoutas/smplx/1265df7ba545e8b00f72e7c557c766e15c71632f/images/teaser_fig.png -------------------------------------------------------------------------------- /optional-requirements.txt: -------------------------------------------------------------------------------- 1 | pyrender>=0.1.23 2 | shapely 3 | trimesh>=2.37.6 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy>=1.16.2 2 | torch>=1.0.1.post2 3 | dataclasses>=0.6 4 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems and the Max Planck Institute for Biological 14 | # Cybernetics. All rights reserved. 15 | # 16 | # Contact: ps-license@tuebingen.mpg.de 17 | 18 | import io 19 | import os 20 | 21 | from setuptools import setup 22 | 23 | # Package meta-data. 24 | NAME = 'smplx' 25 | DESCRIPTION = 'PyTorch module for loading the SMPLX body model' 26 | URL = 'http://smpl-x.is.tuebingen.mpg.de' 27 | EMAIL = 'vassilis.choutas@tuebingen.mpg.de' 28 | AUTHOR = 'Vassilis Choutas' 29 | REQUIRES_PYTHON = '>=3.6.0' 30 | VERSION = '0.1.28' 31 | 32 | here = os.path.abspath(os.path.dirname(__file__)) 33 | 34 | try: 35 | FileNotFoundError 36 | except NameError: 37 | FileNotFoundError = IOError 38 | 39 | # Import the README and use it as the long-description. 40 | # Note: this will only work if 'README.md' is present in your MANIFEST.in file! 41 | try: 42 | with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f: 43 | long_description = '\n' + f.read() 44 | except FileNotFoundError: 45 | long_description = DESCRIPTION 46 | 47 | # Load the package's __version__.py module as a dictionary. 48 | about = {} 49 | if not VERSION: 50 | with open(os.path.join(here, NAME, '__version__.py')) as f: 51 | exec(f.read(), about) 52 | else: 53 | about['__version__'] = VERSION 54 | 55 | pyrender_reqs = ['pyrender>=0.1.23', 'trimesh>=2.37.6', 'shapely'] 56 | matplotlib_reqs = ['matplotlib'] 57 | open3d_reqs = ['open3d-python'] 58 | 59 | setup(name=NAME, 60 | version=about['__version__'], 61 | description=DESCRIPTION, 62 | long_description=long_description, 63 | long_description_content_type='text/markdown', 64 | author=AUTHOR, 65 | author_email=EMAIL, 66 | python_requires=REQUIRES_PYTHON, 67 | url=URL, 68 | install_requires=[ 69 | 'numpy>=1.16.2', 70 | 'torch>=1.0.1.post2', 71 | ], 72 | extras_require={ 73 | 'pyrender': pyrender_reqs, 74 | 'open3d': open3d_reqs, 75 | 'matplotlib': matplotlib_reqs, 76 | 'all': pyrender_reqs + matplotlib_reqs + open3d_reqs 77 | }, 78 | packages=['smplx']) 79 | -------------------------------------------------------------------------------- /smplx/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: ps-license@tuebingen.mpg.de 16 | 17 | from .body_models import ( 18 | create, 19 | SMPL, 20 | SMPLH, 21 | SMPLX, 22 | MANO, 23 | FLAME, 24 | build_layer, 25 | SMPLLayer, 26 | SMPLHLayer, 27 | SMPLXLayer, 28 | MANOLayer, 29 | FLAMELayer, 30 | ) 31 | -------------------------------------------------------------------------------- /smplx/joint_names.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: ps-license@tuebingen.mpg.de 16 | 17 | import numpy as np 18 | 19 | JOINT_NAMES = [ 20 | "pelvis", 21 | "left_hip", 22 | "right_hip", 23 | "spine1", 24 | "left_knee", 25 | "right_knee", 26 | "spine2", 27 | "left_ankle", 28 | "right_ankle", 29 | "spine3", 30 | "left_foot", 31 | "right_foot", 32 | "neck", 33 | "left_collar", 34 | "right_collar", 35 | "head", 36 | "left_shoulder", 37 | "right_shoulder", 38 | "left_elbow", 39 | "right_elbow", 40 | "left_wrist", 41 | "right_wrist", 42 | "jaw", 43 | "left_eye_smplhf", 44 | "right_eye_smplhf", 45 | "left_index1", 46 | "left_index2", 47 | "left_index3", 48 | "left_middle1", 49 | "left_middle2", 50 | "left_middle3", 51 | "left_pinky1", 52 | "left_pinky2", 53 | "left_pinky3", 54 | "left_ring1", 55 | "left_ring2", 56 | "left_ring3", 57 | "left_thumb1", 58 | "left_thumb2", 59 | "left_thumb3", 60 | "right_index1", 61 | "right_index2", 62 | "right_index3", 63 | "right_middle1", 64 | "right_middle2", 65 | "right_middle3", 66 | "right_pinky1", 67 | "right_pinky2", 68 | "right_pinky3", 69 | "right_ring1", 70 | "right_ring2", 71 | "right_ring3", 72 | "right_thumb1", 73 | "right_thumb2", 74 | "right_thumb3", 75 | "nose", 76 | "right_eye", 77 | "left_eye", 78 | "right_ear", 79 | "left_ear", 80 | "left_big_toe", 81 | "left_small_toe", 82 | "left_heel", 83 | "right_big_toe", 84 | "right_small_toe", 85 | "right_heel", 86 | "left_thumb", 87 | "left_index", 88 | "left_middle", 89 | "left_ring", 90 | "left_pinky", 91 | "right_thumb", 92 | "right_index", 93 | "right_middle", 94 | "right_ring", 95 | "right_pinky", 96 | "right_eye_brow1", 97 | "right_eye_brow2", 98 | "right_eye_brow3", 99 | "right_eye_brow4", 100 | "right_eye_brow5", 101 | "left_eye_brow5", 102 | "left_eye_brow4", 103 | "left_eye_brow3", 104 | "left_eye_brow2", 105 | "left_eye_brow1", 106 | "nose1", 107 | "nose2", 108 | "nose3", 109 | "nose4", 110 | "right_nose_2", 111 | "right_nose_1", 112 | "nose_middle", 113 | "left_nose_1", 114 | "left_nose_2", 115 | "right_eye1", 116 | "right_eye2", 117 | "right_eye3", 118 | "right_eye4", 119 | "right_eye5", 120 | "right_eye6", 121 | "left_eye4", 122 | "left_eye3", 123 | "left_eye2", 124 | "left_eye1", 125 | "left_eye6", 126 | "left_eye5", 127 | "right_mouth_1", 128 | "right_mouth_2", 129 | "right_mouth_3", 130 | "mouth_top", 131 | "left_mouth_3", 132 | "left_mouth_2", 133 | "left_mouth_1", 134 | "left_mouth_5", # 59 in OpenPose output 135 | "left_mouth_4", # 58 in OpenPose output 136 | "mouth_bottom", 137 | "right_mouth_4", 138 | "right_mouth_5", 139 | "right_lip_1", 140 | "right_lip_2", 141 | "lip_top", 142 | "left_lip_2", 143 | "left_lip_1", 144 | "left_lip_3", 145 | "lip_bottom", 146 | "right_lip_3", 147 | # Face contour 148 | "right_contour_1", 149 | "right_contour_2", 150 | "right_contour_3", 151 | "right_contour_4", 152 | "right_contour_5", 153 | "right_contour_6", 154 | "right_contour_7", 155 | "right_contour_8", 156 | "contour_middle", 157 | "left_contour_8", 158 | "left_contour_7", 159 | "left_contour_6", 160 | "left_contour_5", 161 | "left_contour_4", 162 | "left_contour_3", 163 | "left_contour_2", 164 | "left_contour_1", 165 | ] 166 | 167 | 168 | SMPLH_JOINT_NAMES = [ 169 | "pelvis", 170 | "left_hip", 171 | "right_hip", 172 | "spine1", 173 | "left_knee", 174 | "right_knee", 175 | "spine2", 176 | "left_ankle", 177 | "right_ankle", 178 | "spine3", 179 | "left_foot", 180 | "right_foot", 181 | "neck", 182 | "left_collar", 183 | "right_collar", 184 | "head", 185 | "left_shoulder", 186 | "right_shoulder", 187 | "left_elbow", 188 | "right_elbow", 189 | "left_wrist", 190 | "right_wrist", 191 | "left_index1", 192 | "left_index2", 193 | "left_index3", 194 | "left_middle1", 195 | "left_middle2", 196 | "left_middle3", 197 | "left_pinky1", 198 | "left_pinky2", 199 | "left_pinky3", 200 | "left_ring1", 201 | "left_ring2", 202 | "left_ring3", 203 | "left_thumb1", 204 | "left_thumb2", 205 | "left_thumb3", 206 | "right_index1", 207 | "right_index2", 208 | "right_index3", 209 | "right_middle1", 210 | "right_middle2", 211 | "right_middle3", 212 | "right_pinky1", 213 | "right_pinky2", 214 | "right_pinky3", 215 | "right_ring1", 216 | "right_ring2", 217 | "right_ring3", 218 | "right_thumb1", 219 | "right_thumb2", 220 | "right_thumb3", 221 | "nose", 222 | "right_eye", 223 | "left_eye", 224 | "right_ear", 225 | "left_ear", 226 | "left_big_toe", 227 | "left_small_toe", 228 | "left_heel", 229 | "right_big_toe", 230 | "right_small_toe", 231 | "right_heel", 232 | "left_thumb", 233 | "left_index", 234 | "left_middle", 235 | "left_ring", 236 | "left_pinky", 237 | "right_thumb", 238 | "right_index", 239 | "right_middle", 240 | "right_ring", 241 | "right_pinky", 242 | ] 243 | 244 | SMPL_JOINT_NAMES = [ 245 | "pelvis", 246 | "left_hip", 247 | "right_hip", 248 | "spine1", 249 | "left_knee", 250 | "right_knee", 251 | "spine2", 252 | "left_ankle", 253 | "right_ankle", 254 | "spine3", 255 | "left_foot", 256 | "right_foot", 257 | "neck", 258 | "left_collar", 259 | "right_collar", 260 | "head", 261 | "left_shoulder", 262 | "right_shoulder", 263 | "left_elbow", 264 | "right_elbow", 265 | "left_wrist", 266 | "right_wrist", 267 | "left_hand", 268 | "right_hand", 269 | ] 270 | 271 | 272 | class Body: 273 | """ 274 | Class for storing a single body pose. 275 | """ 276 | 277 | def __init__(self, joints, joint_names): 278 | assert joints.ndim > 1 279 | assert joints.shape[0] == len(joint_names) 280 | self.joints = {} 281 | for i, j in enumerate(joint_names): 282 | self.joints[j] = joints[i] 283 | 284 | @staticmethod 285 | def from_smpl(joints): 286 | """ 287 | Create a Body object from SMPL joints. 288 | """ 289 | return Body(joints, SMPL_JOINT_NAMES) 290 | 291 | @staticmethod 292 | def from_smplh(joints): 293 | """ 294 | Create a Body object from SMPLH joints. 295 | """ 296 | return Body(joints, SMPLH_JOINT_NAMES) 297 | 298 | def _as(self, joint_names): 299 | """ 300 | Return a Body object with the specified joint names. 301 | """ 302 | joint_list = [] 303 | for j in joint_names: 304 | if j not in self.joints: 305 | joint_list.append(np.zeros_like(self.joints["spine1"])) 306 | else: 307 | joint_list.append(self.joints[j]) 308 | return np.stack(joint_list, axis=0) 309 | 310 | def as_smpl(self): 311 | """ 312 | Convert the body to SMPL joints. 313 | """ 314 | return self._as(SMPL_JOINT_NAMES) 315 | 316 | def as_smplh(self): 317 | """ 318 | Convert the body to SMPLH joints. 319 | """ 320 | return self._as(SMPLH_JOINT_NAMES) 321 | -------------------------------------------------------------------------------- /smplx/lbs.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: ps-license@tuebingen.mpg.de 16 | 17 | from __future__ import absolute_import 18 | from __future__ import print_function 19 | from __future__ import division 20 | 21 | from typing import Tuple, List 22 | import numpy as np 23 | 24 | import torch 25 | import torch.nn.functional as F 26 | 27 | from .utils import rot_mat_to_euler, Tensor 28 | 29 | 30 | def find_dynamic_lmk_idx_and_bcoords( 31 | vertices: Tensor, 32 | pose: Tensor, 33 | dynamic_lmk_faces_idx: Tensor, 34 | dynamic_lmk_b_coords: Tensor, 35 | neck_kin_chain: List[int], 36 | pose2rot: bool = True, 37 | ) -> Tuple[Tensor, Tensor]: 38 | ''' Compute the faces, barycentric coordinates for the dynamic landmarks 39 | 40 | 41 | To do so, we first compute the rotation of the neck around the y-axis 42 | and then use a pre-computed look-up table to find the faces and the 43 | barycentric coordinates that will be used. 44 | 45 | Special thanks to Soubhik Sanyal (soubhik.sanyal@tuebingen.mpg.de) 46 | for providing the original TensorFlow implementation and for the LUT. 47 | 48 | Parameters 49 | ---------- 50 | vertices: torch.tensor BxVx3, dtype = torch.float32 51 | The tensor of input vertices 52 | pose: torch.tensor Bx(Jx3), dtype = torch.float32 53 | The current pose of the body model 54 | dynamic_lmk_faces_idx: torch.tensor L, dtype = torch.long 55 | The look-up table from neck rotation to faces 56 | dynamic_lmk_b_coords: torch.tensor Lx3, dtype = torch.float32 57 | The look-up table from neck rotation to barycentric coordinates 58 | neck_kin_chain: list 59 | A python list that contains the indices of the joints that form the 60 | kinematic chain of the neck. 61 | dtype: torch.dtype, optional 62 | 63 | Returns 64 | ------- 65 | dyn_lmk_faces_idx: torch.tensor, dtype = torch.long 66 | A tensor of size BxL that contains the indices of the faces that 67 | will be used to compute the current dynamic landmarks. 68 | dyn_lmk_b_coords: torch.tensor, dtype = torch.float32 69 | A tensor of size BxL that contains the indices of the faces that 70 | will be used to compute the current dynamic landmarks. 71 | ''' 72 | 73 | dtype = vertices.dtype 74 | batch_size = vertices.shape[0] 75 | 76 | if pose2rot: 77 | aa_pose = torch.index_select(pose.view(batch_size, -1, 3), 1, 78 | neck_kin_chain) 79 | rot_mats = batch_rodrigues( 80 | aa_pose.view(-1, 3)).view(batch_size, -1, 3, 3) 81 | else: 82 | rot_mats = torch.index_select( 83 | pose.view(batch_size, -1, 3, 3), 1, neck_kin_chain) 84 | 85 | rel_rot_mat = torch.eye( 86 | 3, device=vertices.device, dtype=dtype).unsqueeze_(dim=0).repeat( 87 | batch_size, 1, 1) 88 | for idx in range(len(neck_kin_chain)): 89 | rel_rot_mat = torch.bmm(rot_mats[:, idx], rel_rot_mat) 90 | 91 | y_rot_angle = torch.round( 92 | torch.clamp(-rot_mat_to_euler(rel_rot_mat) * 180.0 / np.pi, 93 | max=39)).to(dtype=torch.long) 94 | neg_mask = y_rot_angle.lt(0).to(dtype=torch.long) 95 | mask = y_rot_angle.lt(-39).to(dtype=torch.long) 96 | neg_vals = mask * 78 + (1 - mask) * (39 - y_rot_angle) 97 | y_rot_angle = (neg_mask * neg_vals + 98 | (1 - neg_mask) * y_rot_angle) 99 | 100 | dyn_lmk_faces_idx = torch.index_select(dynamic_lmk_faces_idx, 101 | 0, y_rot_angle) 102 | dyn_lmk_b_coords = torch.index_select(dynamic_lmk_b_coords, 103 | 0, y_rot_angle) 104 | 105 | return dyn_lmk_faces_idx, dyn_lmk_b_coords 106 | 107 | 108 | def vertices2landmarks( 109 | vertices: Tensor, 110 | faces: Tensor, 111 | lmk_faces_idx: Tensor, 112 | lmk_bary_coords: Tensor 113 | ) -> Tensor: 114 | ''' Calculates landmarks by barycentric interpolation 115 | 116 | Parameters 117 | ---------- 118 | vertices: torch.tensor BxVx3, dtype = torch.float32 119 | The tensor of input vertices 120 | faces: torch.tensor Fx3, dtype = torch.long 121 | The faces of the mesh 122 | lmk_faces_idx: torch.tensor L, dtype = torch.long 123 | The tensor with the indices of the faces used to calculate the 124 | landmarks. 125 | lmk_bary_coords: torch.tensor Lx3, dtype = torch.float32 126 | The tensor of barycentric coordinates that are used to interpolate 127 | the landmarks 128 | 129 | Returns 130 | ------- 131 | landmarks: torch.tensor BxLx3, dtype = torch.float32 132 | The coordinates of the landmarks for each mesh in the batch 133 | ''' 134 | # Extract the indices of the vertices for each face 135 | # BxLx3 136 | batch_size, num_verts = vertices.shape[:2] 137 | device = vertices.device 138 | 139 | lmk_faces = torch.index_select(faces, 0, lmk_faces_idx.view(-1).to(torch.long)).view( 140 | batch_size, -1, 3) 141 | #The '.to(torch.long)'. 142 | # added to make the trace work in c++, 143 | # otherwise you get a runtime error in c++: 144 | # 'index_select(): Expected dtype int32 or int64 for index' 145 | 146 | lmk_faces += torch.arange( 147 | batch_size, dtype=torch.long, device=device).view(-1, 1, 1) * num_verts 148 | 149 | lmk_vertices = vertices.view(-1, 3)[lmk_faces].view( 150 | batch_size, -1, 3, 3) 151 | 152 | landmarks = torch.einsum('blfi,blf->bli', [lmk_vertices, lmk_bary_coords]) 153 | return landmarks 154 | 155 | 156 | def lbs( 157 | betas: Tensor, 158 | pose: Tensor, 159 | v_template: Tensor, 160 | shapedirs: Tensor, 161 | posedirs: Tensor, 162 | J_regressor: Tensor, 163 | parents: Tensor, 164 | lbs_weights: Tensor, 165 | pose2rot: bool = True, 166 | ) -> Tuple[Tensor, Tensor]: 167 | ''' Performs Linear Blend Skinning with the given shape and pose parameters 168 | 169 | Parameters 170 | ---------- 171 | betas : torch.tensor BxNB 172 | The tensor of shape parameters 173 | pose : torch.tensor Bx(J + 1) * 3 174 | The pose parameters in axis-angle format 175 | v_template torch.tensor BxVx3 176 | The template mesh that will be deformed 177 | shapedirs : torch.tensor 1xNB 178 | The tensor of PCA shape displacements 179 | posedirs : torch.tensor Px(V * 3) 180 | The pose PCA coefficients 181 | J_regressor : torch.tensor JxV 182 | The regressor array that is used to calculate the joints from 183 | the position of the vertices 184 | parents: torch.tensor J 185 | The array that describes the kinematic tree for the model 186 | lbs_weights: torch.tensor N x V x (J + 1) 187 | The linear blend skinning weights that represent how much the 188 | rotation matrix of each part affects each vertex 189 | pose2rot: bool, optional 190 | Flag on whether to convert the input pose tensor to rotation 191 | matrices. The default value is True. If False, then the pose tensor 192 | should already contain rotation matrices and have a size of 193 | Bx(J + 1)x9 194 | dtype: torch.dtype, optional 195 | 196 | Returns 197 | ------- 198 | verts: torch.tensor BxVx3 199 | The vertices of the mesh after applying the shape and pose 200 | displacements. 201 | joints: torch.tensor BxJx3 202 | The joints of the model 203 | ''' 204 | 205 | batch_size = max(betas.shape[0], pose.shape[0]) 206 | device, dtype = betas.device, betas.dtype 207 | 208 | # Add shape contribution 209 | v_shaped = v_template + blend_shapes(betas, shapedirs) 210 | 211 | # Get the joints 212 | # NxJx3 array 213 | J = vertices2joints(J_regressor, v_shaped) 214 | 215 | # 3. Add pose blend shapes 216 | # N x J x 3 x 3 217 | ident = torch.eye(3, dtype=dtype, device=device) 218 | if pose2rot: 219 | rot_mats = batch_rodrigues(pose.view(-1, 3)).view( 220 | [batch_size, -1, 3, 3]) 221 | 222 | pose_feature = (rot_mats[:, 1:, :, :] - ident).view([batch_size, -1]) 223 | # (N x P) x (P, V * 3) -> N x V x 3 224 | pose_offsets = torch.matmul( 225 | pose_feature, posedirs).view(batch_size, -1, 3) 226 | else: 227 | pose_feature = pose[:, 1:].view(batch_size, -1, 3, 3) - ident 228 | rot_mats = pose.view(batch_size, -1, 3, 3) 229 | 230 | pose_offsets = torch.matmul(pose_feature.view(batch_size, -1), 231 | posedirs).view(batch_size, -1, 3) 232 | 233 | v_posed = pose_offsets + v_shaped 234 | # 4. Get the global joint location 235 | J_transformed, A = batch_rigid_transform(rot_mats, J, parents, dtype=dtype) 236 | 237 | # 5. Do skinning: 238 | # W is N x V x (J + 1) 239 | W = lbs_weights.unsqueeze(dim=0).expand([batch_size, -1, -1]) 240 | # (N x V x (J + 1)) x (N x (J + 1) x 16) 241 | num_joints = J_regressor.shape[0] 242 | T = torch.matmul(W, A.view(batch_size, num_joints, 16)) \ 243 | .view(batch_size, -1, 4, 4) 244 | 245 | homogen_coord = torch.ones([batch_size, v_posed.shape[1], 1], 246 | dtype=dtype, device=device) 247 | v_posed_homo = torch.cat([v_posed, homogen_coord], dim=2) 248 | v_homo = torch.matmul(T, torch.unsqueeze(v_posed_homo, dim=-1)) 249 | 250 | verts = v_homo[:, :, :3, 0] 251 | 252 | return verts, J_transformed 253 | 254 | 255 | def vertices2joints(J_regressor: Tensor, vertices: Tensor) -> Tensor: 256 | ''' Calculates the 3D joint locations from the vertices 257 | 258 | Parameters 259 | ---------- 260 | J_regressor : torch.tensor JxV 261 | The regressor array that is used to calculate the joints from the 262 | position of the vertices 263 | vertices : torch.tensor BxVx3 264 | The tensor of mesh vertices 265 | 266 | Returns 267 | ------- 268 | torch.tensor BxJx3 269 | The location of the joints 270 | ''' 271 | 272 | return torch.einsum('bik,ji->bjk', [vertices, J_regressor]) 273 | 274 | 275 | def blend_shapes(betas: Tensor, shape_disps: Tensor) -> Tensor: 276 | ''' Calculates the per vertex displacement due to the blend shapes 277 | 278 | 279 | Parameters 280 | ---------- 281 | betas : torch.tensor Bx(num_betas) 282 | Blend shape coefficients 283 | shape_disps: torch.tensor Vx3x(num_betas) 284 | Blend shapes 285 | 286 | Returns 287 | ------- 288 | torch.tensor BxVx3 289 | The per-vertex displacement due to shape deformation 290 | ''' 291 | 292 | # Displacement[b, m, k] = sum_{l} betas[b, l] * shape_disps[m, k, l] 293 | # i.e. Multiply each shape displacement by its corresponding beta and 294 | # then sum them. 295 | blend_shape = torch.einsum('bl,mkl->bmk', [betas, shape_disps]) 296 | return blend_shape 297 | 298 | 299 | def batch_rodrigues( 300 | rot_vecs: Tensor, 301 | epsilon: float = 1e-8, 302 | ) -> Tensor: 303 | ''' Calculates the rotation matrices for a batch of rotation vectors 304 | Parameters 305 | ---------- 306 | rot_vecs: torch.tensor Nx3 307 | array of N axis-angle vectors 308 | Returns 309 | ------- 310 | R: torch.tensor Nx3x3 311 | The rotation matrices for the given axis-angle parameters 312 | ''' 313 | 314 | batch_size = rot_vecs.shape[0] 315 | device, dtype = rot_vecs.device, rot_vecs.dtype 316 | 317 | angle = torch.norm(rot_vecs + 1e-8, dim=1, keepdim=True) 318 | rot_dir = rot_vecs / angle 319 | 320 | cos = torch.unsqueeze(torch.cos(angle), dim=1) 321 | sin = torch.unsqueeze(torch.sin(angle), dim=1) 322 | 323 | # Bx1 arrays 324 | rx, ry, rz = torch.split(rot_dir, 1, dim=1) 325 | K = torch.zeros((batch_size, 3, 3), dtype=dtype, device=device) 326 | 327 | zeros = torch.zeros((batch_size, 1), dtype=dtype, device=device) 328 | K = torch.cat([zeros, -rz, ry, rz, zeros, -rx, -ry, rx, zeros], dim=1) \ 329 | .view((batch_size, 3, 3)) 330 | 331 | ident = torch.eye(3, dtype=dtype, device=device).unsqueeze(dim=0) 332 | rot_mat = ident + sin * K + (1 - cos) * torch.bmm(K, K) 333 | return rot_mat 334 | 335 | 336 | def transform_mat(R: Tensor, t: Tensor) -> Tensor: 337 | ''' Creates a batch of transformation matrices 338 | Args: 339 | - R: Bx3x3 array of a batch of rotation matrices 340 | - t: Bx3x1 array of a batch of translation vectors 341 | Returns: 342 | - T: Bx4x4 Transformation matrix 343 | ''' 344 | # No padding left or right, only add an extra row 345 | return torch.cat([F.pad(R, [0, 0, 0, 1]), 346 | F.pad(t, [0, 0, 0, 1], value=1)], dim=2) 347 | 348 | 349 | def batch_rigid_transform( 350 | rot_mats: Tensor, 351 | joints: Tensor, 352 | parents: Tensor, 353 | dtype=torch.float32 354 | ) -> Tensor: 355 | """ 356 | Applies a batch of rigid transformations to the joints 357 | 358 | Parameters 359 | ---------- 360 | rot_mats : torch.tensor BxNx3x3 361 | Tensor of rotation matrices 362 | joints : torch.tensor BxNx3 363 | Locations of joints 364 | parents : torch.tensor BxN 365 | The kinematic tree of each object 366 | dtype : torch.dtype, optional: 367 | The data type of the created tensors, the default is torch.float32 368 | 369 | Returns 370 | ------- 371 | posed_joints : torch.tensor BxNx3 372 | The locations of the joints after applying the pose rotations 373 | rel_transforms : torch.tensor BxNx4x4 374 | The relative (with respect to the root joint) rigid transformations 375 | for all the joints 376 | """ 377 | 378 | joints = torch.unsqueeze(joints, dim=-1) 379 | 380 | rel_joints = joints.clone() 381 | rel_joints[:, 1:] -= joints[:, parents[1:]] 382 | 383 | transforms_mat = transform_mat( 384 | rot_mats.reshape(-1, 3, 3), 385 | rel_joints.reshape(-1, 3, 1)).reshape(-1, joints.shape[1], 4, 4) 386 | 387 | transform_chain = [transforms_mat[:, 0]] 388 | for i in range(1, parents.shape[0]): 389 | # Subtract the joint location at the rest pose 390 | # No need for rotation, since it's identity when at rest 391 | curr_res = torch.matmul(transform_chain[parents[i]], 392 | transforms_mat[:, i]) 393 | transform_chain.append(curr_res) 394 | 395 | transforms = torch.stack(transform_chain, dim=1) 396 | 397 | # The last column of the transformations contains the posed joints 398 | posed_joints = transforms[:, :, :3, 3] 399 | 400 | joints_homogen = F.pad(joints, [0, 0, 0, 1]) 401 | 402 | rel_transforms = transforms - F.pad( 403 | torch.matmul(transforms, joints_homogen), [3, 0, 0, 0, 0, 0, 0, 0]) 404 | 405 | return posed_joints, rel_transforms 406 | -------------------------------------------------------------------------------- /smplx/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: ps-license@tuebingen.mpg.de 16 | 17 | from typing import NewType, Union, Optional 18 | from dataclasses import dataclass, asdict, fields 19 | import numpy as np 20 | import torch 21 | 22 | Tensor = NewType('Tensor', torch.Tensor) 23 | Array = NewType('Array', np.ndarray) 24 | 25 | 26 | @dataclass 27 | class ModelOutput: 28 | vertices: Optional[Tensor] = None 29 | joints: Optional[Tensor] = None 30 | full_pose: Optional[Tensor] = None 31 | global_orient: Optional[Tensor] = None 32 | transl: Optional[Tensor] = None 33 | v_shaped: Optional[Tensor] = None 34 | 35 | def __getitem__(self, key): 36 | return getattr(self, key) 37 | 38 | def get(self, key, default=None): 39 | return getattr(self, key, default) 40 | 41 | def __iter__(self): 42 | return self.keys() 43 | 44 | def keys(self): 45 | keys = [t.name for t in fields(self)] 46 | return iter(keys) 47 | 48 | def values(self): 49 | values = [getattr(self, t.name) for t in fields(self)] 50 | return iter(values) 51 | 52 | def items(self): 53 | data = [(t.name, getattr(self, t.name)) for t in fields(self)] 54 | return iter(data) 55 | 56 | 57 | @dataclass 58 | class SMPLOutput(ModelOutput): 59 | betas: Optional[Tensor] = None 60 | body_pose: Optional[Tensor] = None 61 | 62 | 63 | @dataclass 64 | class SMPLHOutput(SMPLOutput): 65 | left_hand_pose: Optional[Tensor] = None 66 | right_hand_pose: Optional[Tensor] = None 67 | transl: Optional[Tensor] = None 68 | 69 | 70 | @dataclass 71 | class SMPLXOutput(SMPLHOutput): 72 | expression: Optional[Tensor] = None 73 | jaw_pose: Optional[Tensor] = None 74 | 75 | 76 | @dataclass 77 | class MANOOutput(ModelOutput): 78 | betas: Optional[Tensor] = None 79 | hand_pose: Optional[Tensor] = None 80 | 81 | 82 | @dataclass 83 | class FLAMEOutput(ModelOutput): 84 | betas: Optional[Tensor] = None 85 | expression: Optional[Tensor] = None 86 | jaw_pose: Optional[Tensor] = None 87 | neck_pose: Optional[Tensor] = None 88 | 89 | 90 | def find_joint_kin_chain(joint_id, kinematic_tree): 91 | kin_chain = [] 92 | curr_idx = joint_id 93 | while curr_idx != -1: 94 | kin_chain.append(curr_idx) 95 | curr_idx = kinematic_tree[curr_idx] 96 | return kin_chain 97 | 98 | 99 | def to_tensor( 100 | array: Union[Array, Tensor], dtype=torch.float32 101 | ) -> Tensor: 102 | if torch.is_tensor(array): 103 | return array 104 | else: 105 | return torch.tensor(array, dtype=dtype) 106 | 107 | 108 | class Struct(object): 109 | def __init__(self, **kwargs): 110 | for key, val in kwargs.items(): 111 | setattr(self, key, val) 112 | 113 | 114 | def to_np(array, dtype=np.float32): 115 | if 'scipy.sparse' in str(type(array)): 116 | array = array.todense() 117 | return np.array(array, dtype=dtype) 118 | 119 | 120 | def rot_mat_to_euler(rot_mats): 121 | # Calculates rotation matrix to euler angles 122 | # Careful for extreme cases of eular angles like [0.0, pi, 0.0] 123 | 124 | sy = torch.sqrt(rot_mats[:, 0, 0] * rot_mats[:, 0, 0] + 125 | rot_mats[:, 1, 0] * rot_mats[:, 1, 0]) 126 | return torch.atan2(-rot_mats[:, 2, 0], sy) 127 | -------------------------------------------------------------------------------- /smplx/vertex_ids.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: ps-license@tuebingen.mpg.de 16 | 17 | from __future__ import print_function 18 | from __future__ import absolute_import 19 | from __future__ import division 20 | 21 | # Joint name to vertex mapping. SMPL/SMPL-H/SMPL-X vertices that correspond to 22 | # MSCOCO and OpenPose joints 23 | vertex_ids = { 24 | 'smplh': { 25 | 'nose': 332, 26 | 'reye': 6260, 27 | 'leye': 2800, 28 | 'rear': 4071, 29 | 'lear': 583, 30 | 'rthumb': 6191, 31 | 'rindex': 5782, 32 | 'rmiddle': 5905, 33 | 'rring': 6016, 34 | 'rpinky': 6133, 35 | 'lthumb': 2746, 36 | 'lindex': 2319, 37 | 'lmiddle': 2445, 38 | 'lring': 2556, 39 | 'lpinky': 2673, 40 | 'LBigToe': 3216, 41 | 'LSmallToe': 3226, 42 | 'LHeel': 3387, 43 | 'RBigToe': 6617, 44 | 'RSmallToe': 6624, 45 | 'RHeel': 6787 46 | }, 47 | 'smplx': { 48 | 'nose': 9120, 49 | 'reye': 9929, 50 | 'leye': 9448, 51 | 'rear': 616, 52 | 'lear': 6, 53 | 'rthumb': 8079, 54 | 'rindex': 7669, 55 | 'rmiddle': 7794, 56 | 'rring': 7905, 57 | 'rpinky': 8022, 58 | 'lthumb': 5361, 59 | 'lindex': 4933, 60 | 'lmiddle': 5058, 61 | 'lring': 5169, 62 | 'lpinky': 5286, 63 | 'LBigToe': 5770, 64 | 'LSmallToe': 5780, 65 | 'LHeel': 8846, 66 | 'RBigToe': 8463, 67 | 'RSmallToe': 8474, 68 | 'RHeel': 8635 69 | }, 70 | 'mano': { 71 | 'thumb': 744, 72 | 'index': 320, 73 | 'middle': 443, 74 | 'ring': 554, 75 | 'pinky': 671, 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /smplx/vertex_joint_selector.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: ps-license@tuebingen.mpg.de 16 | 17 | from __future__ import absolute_import 18 | from __future__ import print_function 19 | from __future__ import division 20 | 21 | import numpy as np 22 | 23 | import torch 24 | import torch.nn as nn 25 | 26 | from .utils import to_tensor 27 | 28 | 29 | class VertexJointSelector(nn.Module): 30 | 31 | def __init__(self, vertex_ids=None, 32 | use_hands=True, 33 | use_feet_keypoints=True, **kwargs): 34 | super(VertexJointSelector, self).__init__() 35 | 36 | extra_joints_idxs = [] 37 | 38 | face_keyp_idxs = np.array([ 39 | vertex_ids['nose'], 40 | vertex_ids['reye'], 41 | vertex_ids['leye'], 42 | vertex_ids['rear'], 43 | vertex_ids['lear']], dtype=np.int64) 44 | 45 | extra_joints_idxs = np.concatenate([extra_joints_idxs, 46 | face_keyp_idxs]) 47 | 48 | if use_feet_keypoints: 49 | feet_keyp_idxs = np.array([vertex_ids['LBigToe'], 50 | vertex_ids['LSmallToe'], 51 | vertex_ids['LHeel'], 52 | vertex_ids['RBigToe'], 53 | vertex_ids['RSmallToe'], 54 | vertex_ids['RHeel']], dtype=np.int32) 55 | 56 | extra_joints_idxs = np.concatenate( 57 | [extra_joints_idxs, feet_keyp_idxs]) 58 | 59 | if use_hands: 60 | self.tip_names = ['thumb', 'index', 'middle', 'ring', 'pinky'] 61 | 62 | tips_idxs = [] 63 | for hand_id in ['l', 'r']: 64 | for tip_name in self.tip_names: 65 | tips_idxs.append(vertex_ids[hand_id + tip_name]) 66 | 67 | extra_joints_idxs = np.concatenate( 68 | [extra_joints_idxs, tips_idxs]) 69 | 70 | self.register_buffer('extra_joints_idxs', 71 | to_tensor(extra_joints_idxs, dtype=torch.long)) 72 | 73 | def forward(self, vertices, joints): 74 | extra_joints = torch.index_select(vertices, 1, self.extra_joints_idxs.to(torch.long)) #The '.to(torch.long)'. 75 | # added to make the trace work in c++, 76 | # otherwise you get a runtime error in c++: 77 | # 'index_select(): Expected dtype int32 or int64 for index' 78 | joints = torch.cat([joints, extra_joints], dim=1) 79 | 80 | return joints 81 | -------------------------------------------------------------------------------- /tools/README.md: -------------------------------------------------------------------------------- 1 | ## Removing Chumpy objects 2 | 3 | In a Python 2 virtual environment with [Chumpy](https://github.com/mattloper/chumpy) installed run the following to remove any Chumpy objects from the model data: 4 | 5 | ```bash 6 | python tools/clean_ch.py --input-models path-to-models/*.pkl --output-folder output-folder 7 | ``` 8 | 9 | ## Merging SMPL-H and MANO parameters 10 | 11 | In order to use the given PyTorch SMPL-H module we first need to merge the SMPL-H and MANO parameters in a single file. After agreeing to the license and downloading the models, run the following command: 12 | 13 | ```bash 14 | python tools/merge_smplh_mano.py --smplh-fn SMPLH_FOLDER/SMPLH_GENDER.pkl \ 15 | --mano-left-fn MANO_FOLDER/MANO_LEFT.pkl \ 16 | --mano-right-fn MANO_FOLDER/MANO_RIGHT.pkl \ 17 | --output-folder OUTPUT_FOLDER 18 | ``` 19 | 20 | where SMPLH_FOLDER is the folder with the SMPL-H files and MANO_FOLDER the one for the MANO files. 21 | 22 | 23 | ### SMPL-H version used in AMASS 24 | 25 | For AMASS, you should download the body with 16 betas, here is the process: 26 | 27 | ``` 28 | - Download the zip folder from "Models & Code" and extract it to get the folder `mano_v1_2` 29 | - Download the zip folder from "Extended SMPL+H model" and extract it to get the folder `smplh` 30 | 31 | $ git clone https://github.com/vchoutas/smplx.git 32 | $ cd smplx 33 | $ python tools/merge_smplh_mano.py \ 34 | --smplh-fn /path/to/smplh/female/model.npz \ 35 | --mano-left-fn /path/to/mano_v1_2/models/MANO_LEFT.pkl \ 36 | --mano-right-fn /path/to/mano_v1_2/models/MANO_RIGHT.pkl \ 37 | --output-folder /path/to/smplh/merged 38 | 39 | cp /path/to/smplh/merged/model.pkl /path/to/smplx_models/smplh/SMPLH_FEMALE.pkl 40 | ``` 41 | 42 | In the end you get the smplh model required by smplx 'smplx_models/smplh/SMPLH_FEMALE.pkl' 43 | -------------------------------------------------------------------------------- /tools/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems and the Max Planck Institute for Biological 14 | # Cybernetics. All rights reserved. 15 | # 16 | # Contact: ps-license@tuebingen.mpg.de 17 | 18 | import clean_ch 19 | import merge_smplh_mano 20 | -------------------------------------------------------------------------------- /tools/clean_ch.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems and the Max Planck Institute for Biological 14 | # Cybernetics. All rights reserved. 15 | # 16 | # Contact: ps-license@tuebingen.mpg.de 17 | 18 | from __future__ import print_function 19 | from __future__ import absolute_import 20 | from __future__ import division 21 | 22 | import argparse 23 | import os 24 | import os.path as osp 25 | 26 | import pickle 27 | 28 | from tqdm import tqdm 29 | import numpy as np 30 | 31 | 32 | def clean_fn(fn, output_folder='output'): 33 | with open(fn, 'rb') as body_file: 34 | body_data = pickle.load(body_file) 35 | 36 | output_dict = {} 37 | for key, data in body_data.iteritems(): 38 | if 'chumpy' in str(type(data)): 39 | output_dict[key] = np.array(data) 40 | else: 41 | output_dict[key] = data 42 | 43 | out_fn = osp.split(fn)[1] 44 | 45 | out_path = osp.join(output_folder, out_fn) 46 | with open(out_path, 'wb') as out_file: 47 | pickle.dump(output_dict, out_file) 48 | 49 | 50 | if __name__ == '__main__': 51 | parser = argparse.ArgumentParser() 52 | parser.add_argument('--input-models', dest='input_models', nargs='+', 53 | required=True, type=str, 54 | help='The path to the model that will be processed') 55 | parser.add_argument('--output-folder', dest='output_folder', 56 | required=True, type=str, 57 | help='The path to the output folder') 58 | 59 | args = parser.parse_args() 60 | 61 | input_models = args.input_models 62 | output_folder = args.output_folder 63 | if not osp.exists(output_folder): 64 | print('Creating directory: {}'.format(output_folder)) 65 | os.makedirs(output_folder) 66 | 67 | for input_model in input_models: 68 | clean_fn(input_model, output_folder=output_folder) 69 | -------------------------------------------------------------------------------- /tools/merge_smplh_mano.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2019 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems and the Max Planck Institute for Biological 14 | # Cybernetics. All rights reserved. 15 | # 16 | # Contact: ps-license@tuebingen.mpg.de 17 | 18 | from __future__ import print_function 19 | 20 | import os 21 | import os.path as osp 22 | import pickle 23 | 24 | import argparse 25 | 26 | import numpy as np 27 | 28 | 29 | def merge_models(smplh_fn, mano_left_fn, mano_right_fn, 30 | output_folder='output'): 31 | 32 | if smplh_fn.endswith('.pkl'): 33 | with open(smplh_fn, 'rb') as body_file: 34 | body_data = pickle.load(body_file, encoding='latin1') 35 | elif smplh_fn.endswith('.npz'): 36 | body_data_np = np.load(smplh_fn) 37 | body_data = {} 38 | for key in body_data_np: 39 | body_data[key] = body_data_np[key] 40 | else: 41 | raise ValueError('The body model file should be either a .pkl or a .npz file.') 42 | 43 | 44 | with open(mano_left_fn, 'rb') as lhand_file: 45 | lhand_data = pickle.load(lhand_file, encoding='latin1') 46 | 47 | with open(mano_right_fn, 'rb') as rhand_file: 48 | rhand_data = pickle.load(rhand_file, encoding='latin1') 49 | 50 | out_fn = osp.split(smplh_fn)[1] 51 | if out_fn.endswith('.npz'): 52 | out_fn = out_fn.replace('.npz', '.pkl') 53 | 54 | 55 | output_data = body_data.copy() 56 | output_data['hands_componentsl'] = lhand_data['hands_components'] 57 | output_data['hands_componentsr'] = rhand_data['hands_components'] 58 | 59 | output_data['hands_coeffsl'] = lhand_data['hands_coeffs'] 60 | output_data['hands_coeffsr'] = rhand_data['hands_coeffs'] 61 | 62 | output_data['hands_meanl'] = lhand_data['hands_mean'] 63 | output_data['hands_meanr'] = rhand_data['hands_mean'] 64 | 65 | for key, data in output_data.items(): 66 | if 'chumpy' in str(type(data)): 67 | output_data[key] = np.array(data) 68 | else: 69 | output_data[key] = data 70 | 71 | out_path = osp.join(output_folder, out_fn) 72 | print(out_path) 73 | print('Saving to {}'.format(out_path)) 74 | with open(out_path, 'wb') as output_file: 75 | pickle.dump(output_data, output_file) 76 | 77 | 78 | if __name__ == '__main__': 79 | parser = argparse.ArgumentParser() 80 | parser.add_argument('--smplh-fn', dest='smplh_fn', required=True, 81 | type=str, help='The path to the SMPLH model') 82 | parser.add_argument('--mano-left-fn', dest='mano_left_fn', required=True, 83 | type=str, help='The path to the left hand MANO model') 84 | parser.add_argument('--mano-right-fn', dest='mano_right_fn', required=True, 85 | type=str, help='The path to the right hand MANO model') 86 | parser.add_argument('--output-folder', dest='output_folder', 87 | required=True, type=str, 88 | help='The path to the output folder') 89 | 90 | args = parser.parse_args() 91 | 92 | smplh_fn = args.smplh_fn 93 | mano_left_fn = args.mano_left_fn 94 | mano_right_fn = args.mano_right_fn 95 | output_folder = args.output_folder 96 | 97 | if not osp.exists(output_folder): 98 | print('Creating directory: {}'.format(output_folder)) 99 | os.makedirs(output_folder) 100 | 101 | merge_models(smplh_fn, mano_left_fn, mano_right_fn, output_folder) 102 | -------------------------------------------------------------------------------- /transfer_data/support_data/github_data/amass_sample.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vchoutas/smplx/1265df7ba545e8b00f72e7c557c766e15c71632f/transfer_data/support_data/github_data/amass_sample.npz -------------------------------------------------------------------------------- /transfer_model/README.md: -------------------------------------------------------------------------------- 1 | # Model parameter transfer 2 | 3 | ## Table of Contents 4 | * [License](#license) 5 | * [Description](#description) 6 | * [Using the code](#using-the-code) 7 | * [Data](#data) 8 | * [Steps](#steps) 9 | * [SMPL to SMPL-X](#smpl-to-smpl-x) 10 | * [SMPL-X to SMPL](#smpl-x-to-smpl) 11 | * [SMPL+H to SMPL](#smpl%2Bh-to-smpl) 12 | * [SMPL to SMPL+H](#smpl-to-smpl%2Bh) 13 | * [SMPL+H to SMPL-X](#smpl%2Bh-to-smpl-x) 14 | * [SMPL-X to SMPL+H](#smpl-x-to-smpl%2Bh) 15 | * [Visualize correspondences](visualize-correspondences) 16 | * [Citation](#citation) 17 | * [Acknowledgments](#acknowledgments) 18 | * [Contact](#contact) 19 | 20 | ## License 21 | 22 | Software Copyright License for **non-commercial scientific research purposes**. 23 | Please read carefully the [terms and conditions](https://github.com/vchoutas/smplx/blob/master/LICENSE) and any accompanying documentation before you download and/or use the SMPL-X/SMPLify-X model, data and software, (the "Model & Software"), including 3D meshes, blend weights, blend shapes, textures, software, scripts, and animations. By downloading and/or using the Model & Software (including downloading, cloning, installing, and any other use of this github repository), you acknowledge that you have read these terms and conditions, understand them, and agree to be bound by them. If you do not agree with these terms and conditions, you must not download and/or use the Model & Software. Any infringement of the terms of this agreement will automatically terminate your rights under this [License](./LICENSE). 24 | 25 | ## Description 26 | 27 | The repository contains code for converting model parameters of one model to 28 | another. **Never** copy parameters between the models. You will not get the 29 | same poses. SMPL, SMPL+H and SMPL-X shape spaces are **NOT** compatible, since 30 | each model is the result of a different training process. 31 | A more detailed explanation on how we extract correspondences 32 | between the models and the loss function used to estimate the parameters can be 33 | found [here](./docs/transfer.md). 34 | 35 | ## Requirements 36 | 37 | 1. Install [mesh](https://github.com/MPI-IS/mesh) 38 | 2. Start by cloning the SMPL-X repo: 39 | ```Shell 40 | git clone https://github.com/vchoutas/smplx.git 41 | ``` 42 | 3. Run the following command to install all necessary requirements 43 | ```Shell 44 | pip install -r requirements.txt 45 | ``` 46 | 4. Install the Torch Trust Region optimizer by following the instructions [here](https://github.com/vchoutas/torch-trust-ncg) 47 | 5. Install loguru 48 | 6. Install open3d 49 | 7. Install omegaconf 50 | 51 | ## Using the code 52 | 53 | ### Data 54 | 55 | Register on the [SMPL-X website](http://smpl-x.is.tue.mpg.de/), go to the 56 | downloads section to get the correspondences and sample data, 57 | by clicking on the *Model correspondences* button. 58 | Create a folder 59 | named `transfer_data` and extract the downloaded zip there. You should have the 60 | following folder structure now: 61 | 62 | ```bash 63 | transfer_data 64 | ├── meshes 65 | │   ├── smpl 66 | │   ├── smplx 67 | ├── smpl2smplh_def_transfer.pkl 68 | ├── smpl2smplx_deftrafo_setup.pkl 69 | ├── smplh2smpl_def_transfer.pkl 70 | ├── smplh2smplx_deftrafo_setup.pkl 71 | ├── smplx2smpl_deftrafo_setup.pkl 72 | ├── smplx2smplh_deftrafo_setup.pkl 73 | ├── smplx_mask_ids.npy 74 | ``` 75 | 76 | ### Steps 77 | 78 | First, break the motion into a set of pose `.obj` files. Depending on how the 79 | SMPL-* parameters are stored this code will differ. For the example AMASS data 80 | in this repository you can use the example code here: 81 | 82 | ``` 83 | python write_obj.py --model-folder ../models/ --motion-file ../transfer_data/support_data/github_data/amass_sample.npz --output-folder ../transfer_data/meshes/amass_sample/ 84 | ``` 85 | 86 | To run the `transfer_model` utility you will require a `.yaml` config file, 87 | which can point to the location the output `.obj` files have been saved. Use the 88 | templates in `config_files` in the root of this repository. To convert the 89 | sample AMASS code to SMPL-X: 90 | 91 | ``` 92 | python -m transfer_model --exp-cfg config_files/smplh2smplx_as.yaml 93 | ``` 94 | 95 | Finally, the output `.obj` files have to be merged into a single motion 96 | sequence. Example code to do this in a way that matches `SMPL-X` AMASS archives 97 | can be found in `merge_output.py` and run as follows: 98 | 99 | ``` 100 | python merge_output.py --gender neutral ../output 101 | ``` 102 | 103 | Debug notes describing common problems encountered during this can be found 104 | [here](https://github.com/gngdb/smplx/blob/debug/transfer_model/DEBUG_NOTES.md). 105 | Problems are also discussed in 106 | [two](https://github.com/vchoutas/smplx/issues/82) 107 | [issues](https://github.com/vchoutas/smplx/issues/75). 108 | 109 | ### SMPL to SMPL-X 110 | 111 | To run the code to convert SMPL meshes to SMPL-X parameters use the following command: 112 | ```Shell 113 | python -m transfer_model --exp-cfg config_files/smpl2smplx.yaml 114 | ``` 115 | This should be run from the top directory of the repository. 116 | 117 | The file *smpl2smplx.yaml* contains a sample configuration that reads meshes from a folder, 118 | processes them and returns pkl files with SMPL-X parameters. To run on your own data create a folder 119 | with SMPL meshes, in either ply or obj format, change the path in the config file and run the code. 120 | 121 | ### SMPL-X to SMPL 122 | 123 | To run the code to convert SMPL-X meshes to SMPL parameters use the following command: 124 | ```Shell 125 | python main.py --exp-cfg config_files/smplx2smpl.yaml 126 | ``` 127 | 128 | The file *smplx2smpl.yaml* contains a sample configuration that reads meshes from a folder, 129 | processes them and returns pkl files with SMPL parameters. To run on your own data create a folder 130 | with SMPL-X meshes, in either ply or obj format, change the path in the config file and run the code. 131 | When creating the SMPL-X meshes, do not use the hand and face parameters. 132 | Naturally, you will lose all hand and face information if you choose this, since 133 | SMPL cannot model them. 134 | 135 | 136 | ### SMPL+H to SMPL 137 | 138 | To run the code to convert SMPL+H meshes to SMPL parameters use the following command from the root `smplx` directory: 139 | ```Shell 140 | python -m transfer_model --exp-cfg config_files/smplh2smpl.yaml 141 | ``` 142 | This should be run from the top directory of the repository. 143 | 144 | The file *smplh2smpl.yaml* contains a sample configuration that reads meshes from a folder, 145 | processes them and returns pkl files with SMPL parameters. To run on your own data create a folder 146 | with SMPL+H meshes, in either ply or obj format, change the path in the config file and run the code. 147 | Note that using this direction means that you will lose information on the 148 | hands. 149 | 150 | 151 | ### SMPL to SMPL+H 152 | 153 | To run the code to convert SMPL meshes to SMPL+H parameters use the following command: 154 | ```Shell 155 | python -m transfer_model --exp-cfg config_files/smpl2smplh.yaml 156 | ``` 157 | This should be run from the top directory of the repository. 158 | 159 | The file *smpl2smplh.yaml* contains a sample configuration that reads meshes from a folder, 160 | processes them and returns pkl files with SMPL parameters. To run on your own data create a folder 161 | with SMPL meshes, in either ply or obj format, change the path in the config file and run the code. 162 | 163 | ### SMPL+H to SMPL-X 164 | 165 | To run the code to convert SMPL+H meshes to SMPL-X parameters use the following command: 166 | ```Shell 167 | python -m transfer_model --exp-cfg config_files/smplh2smplx.yaml 168 | ``` 169 | This should be run from the top directory of the repository. 170 | 171 | The file *smplh2smplx.yaml* contains a sample configuration that reads meshes from a folder, 172 | processes them and returns pkl files with SMPL-X parameters. To run on your own data create a folder 173 | with SMPL+H meshes, in either ply or obj format, change the path in the config file and run the code. 174 | 175 | 176 | ### SMPL-X to SMPL+H 177 | 178 | To run the code to convert SMPL-X meshes to SMPL+H parameters use the following command: 179 | ```Shell 180 | python -m transfer_model --exp-cfg config_files/smplx2smplh.yaml 181 | ``` 182 | This should be run from the top directory of the repository. 183 | 184 | The file *smplx2smpl.yaml* contains a sample configuration that reads meshes from a folder, 185 | processes them and returns pkl files with SMPL+H parameters. To run on your own data create a folder 186 | with SMPL-X meshes, in either ply or obj format, change the path in the config file and run the code. 187 | Make sure that you do not use the jaw pose and expression parameters to generate 188 | the meshes. 189 | 190 | 191 | ## Visualize correspondences 192 | 193 | To visualize correspondences: 194 | ```Shell 195 | python vis_correspondences.py --exp-cfg configs/smpl2smplx.yaml --exp-opts colors_path PATH_TO_SMPL_COLORS 196 | ``` 197 | You should then see the following image. Points with similar color are in 198 | correspondence. 199 | ![Correspondence example](./docs/images/smpl_smplx_correspondence.png) 200 | 201 | ## Citation 202 | 203 | Depending on which model is loaded for your project, i.e. SMPL-X or SMPL+H or SMPL, please cite the most relevant work: 204 | 205 | ``` 206 | @article{SMPL:2015, 207 | author = {Loper, Matthew and Mahmood, Naureen and Romero, Javier and Pons-Moll, Gerard and Black, Michael J.}, 208 | title = {{SMPL}: A Skinned Multi-Person Linear Model}, 209 | journal = {ACM Transactions on Graphics, (Proc. SIGGRAPH Asia)}, 210 | month = oct, 211 | number = {6}, 212 | pages = {248:1--248:16}, 213 | publisher = {ACM}, 214 | volume = {34}, 215 | year = {2015} 216 | } 217 | ``` 218 | 219 | ``` 220 | @article{MANO:SIGGRAPHASIA:2017, 221 | title = {Embodied Hands: Modeling and Capturing Hands and Bodies Together}, 222 | author = {Romero, Javier and Tzionas, Dimitrios and Black, Michael J.}, 223 | journal = {ACM Transactions on Graphics, (Proc. SIGGRAPH Asia)}, 224 | volume = {36}, 225 | number = {6}, 226 | pages = {245:1--245:17}, 227 | series = {245:1--245:17}, 228 | publisher = {ACM}, 229 | month = nov, 230 | year = {2017}, 231 | url = {http://doi.acm.org/10.1145/3130800.3130883}, 232 | month_numeric = {11} 233 | } 234 | ``` 235 | 236 | 237 | ``` 238 | @inproceedings{SMPL-X:2019, 239 | title = {Expressive Body Capture: 3D Hands, Face, and Body from a Single Image}, 240 | author = {Pavlakos, Georgios and Choutas, Vasileios and Ghorbani, Nima and Bolkart, Timo and Osman, Ahmed A. A. and Tzionas, Dimitrios and Black, Michael J.}, 241 | booktitle = {Proceedings IEEE Conf. on Computer Vision and Pattern Recognition (CVPR)}, 242 | year = {2019} 243 | } 244 | ``` 245 | 246 | 247 | ## Acknowledgments 248 | The code of this repository was implemented by [Vassilis Choutas](vassilis.choutas@tuebingen.mpg.de), 249 | based on a Chumpy implementation from [Timo Bolkart](timo.bolkart@tuebingen.mpg.de). 250 | 251 | ## Contact 252 | 253 | For questions, please contact [smplx@tue.mpg.de](smplx@tue.mpg.de). 254 | -------------------------------------------------------------------------------- /transfer_model/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | -------------------------------------------------------------------------------- /transfer_model/__main__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | import os 18 | import os.path as osp 19 | import sys 20 | import pickle 21 | 22 | import numpy as np 23 | import open3d as o3d 24 | import torch 25 | from loguru import logger 26 | from tqdm import tqdm 27 | 28 | from smplx import build_layer 29 | 30 | from .config import parse_args 31 | from .data import build_dataloader 32 | from .transfer_model import run_fitting 33 | from .utils import read_deformation_transfer, np_mesh_to_o3d 34 | 35 | 36 | def main() -> None: 37 | exp_cfg = parse_args() 38 | 39 | if torch.cuda.is_available() and exp_cfg["use_cuda"]: 40 | device = torch.device('cuda') 41 | else: 42 | device = torch.device('cpu') 43 | if exp_cfg["use_cuda"]: 44 | if input("use_cuda=True and GPU is not available, using CPU instead," 45 | " would you like to continue? (y/n)") != "y": 46 | sys.exit(3) 47 | 48 | logger.remove() 49 | logger.add( 50 | lambda x: tqdm.write(x, end=''), level=exp_cfg.logger_level.upper(), 51 | colorize=True) 52 | 53 | output_folder = osp.expanduser(osp.expandvars(exp_cfg.output_folder)) 54 | logger.info(f'Saving output to: {output_folder}') 55 | os.makedirs(output_folder, exist_ok=True) 56 | 57 | model_path = exp_cfg.body_model.folder 58 | body_model = build_layer(model_path, **exp_cfg.body_model) 59 | logger.info(body_model) 60 | body_model = body_model.to(device=device) 61 | 62 | deformation_transfer_path = exp_cfg.get('deformation_transfer_path', '') 63 | def_matrix = read_deformation_transfer( 64 | deformation_transfer_path, device=device) 65 | 66 | # Read mask for valid vertex ids 67 | mask_ids_fname = osp.expandvars(exp_cfg.mask_ids_fname) 68 | mask_ids = None 69 | if osp.exists(mask_ids_fname): 70 | logger.info(f'Loading mask ids from: {mask_ids_fname}') 71 | mask_ids = np.load(mask_ids_fname) 72 | mask_ids = torch.from_numpy(mask_ids).to(device=device) 73 | else: 74 | logger.warning(f'Mask ids fname not found: {mask_ids_fname}') 75 | 76 | data_obj_dict = build_dataloader(exp_cfg) 77 | 78 | dataloader = data_obj_dict['dataloader'] 79 | 80 | for ii, batch in enumerate(tqdm(dataloader)): 81 | for key in batch: 82 | if torch.is_tensor(batch[key]): 83 | batch[key] = batch[key].to(device=device) 84 | var_dict = run_fitting( 85 | exp_cfg, batch, body_model, def_matrix, mask_ids) 86 | paths = batch['paths'] 87 | 88 | for ii, path in enumerate(paths): 89 | _, fname = osp.split(path) 90 | 91 | output_path = osp.join( 92 | output_folder, f'{osp.splitext(fname)[0]}.pkl') 93 | with open(output_path, 'wb') as f: 94 | pickle.dump(var_dict, f) 95 | 96 | output_path = osp.join( 97 | output_folder, f'{osp.splitext(fname)[0]}.obj') 98 | mesh = np_mesh_to_o3d( 99 | var_dict['vertices'][ii], var_dict['faces']) 100 | o3d.io.write_triangle_mesh(output_path, mesh) 101 | 102 | 103 | if __name__ == '__main__': 104 | main() 105 | -------------------------------------------------------------------------------- /transfer_model/config/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from .cmd_parser import parse_args 18 | -------------------------------------------------------------------------------- /transfer_model/config/body_model_defaults.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from omegaconf import OmegaConf 18 | from loguru import logger 19 | from dataclasses import dataclass 20 | from .utils_cfg import Variable, Pose 21 | 22 | 23 | @dataclass 24 | class PCA: 25 | num_comps: int = 12 26 | flat_hand_mean: bool = False 27 | 28 | 29 | @dataclass 30 | class PoseWithPCA(Pose): 31 | pca: PCA = PCA() 32 | 33 | 34 | @dataclass 35 | class Shape(Variable): 36 | num: int = 10 37 | 38 | 39 | @dataclass 40 | class Expression(Variable): 41 | num: int = 10 42 | 43 | 44 | @dataclass 45 | class SMPL: 46 | betas: Shape = Shape() 47 | global_rot: Pose = Pose() 48 | body_pose: Pose = Pose() 49 | translation: Variable = Variable() 50 | 51 | 52 | @dataclass 53 | class SMPLH(SMPL): 54 | left_hand_pose: PoseWithPCA = PoseWithPCA() 55 | right_hand_pose: PoseWithPCA = PoseWithPCA() 56 | 57 | 58 | @dataclass 59 | class SMPLX(SMPLH): 60 | expression: Expression = Expression() 61 | jaw_pose: Pose = Pose() 62 | leye_pose: Pose = Pose() 63 | reye_pose: Pose = Pose() 64 | 65 | 66 | @dataclass 67 | class MANO: 68 | betas: Shape = Shape() 69 | wrist_pose: Pose = Pose() 70 | hand_pose: PoseWithPCA = PoseWithPCA() 71 | translation: Variable = Variable() 72 | 73 | 74 | @dataclass 75 | class FLAME: 76 | betas: Shape = Shape() 77 | expression: Expression = Expression() 78 | global_rot: Pose = Pose() 79 | neck_pose: Pose = Pose() 80 | jaw_pose: Pose = Pose() 81 | leye_pose: Pose = Pose() 82 | reye_pose: Pose = Pose() 83 | 84 | 85 | @dataclass 86 | class BodyModelConfig: 87 | model_type: str = 'smplx' 88 | use_compressed: bool = True 89 | folder: str = 'models' 90 | gender: str = 'neutral' 91 | extra_joint_path: str = '' 92 | ext: str = 'npz' 93 | 94 | num_expression_coeffs: int = 10 95 | 96 | use_face_contour: bool = True 97 | joint_regressor_path: str = '' 98 | 99 | smpl: SMPL = SMPL() 100 | star: SMPL = SMPL() 101 | smplh: SMPLH = SMPLH() 102 | smplx: SMPLX = SMPLX() 103 | mano: MANO = MANO() 104 | flame: FLAME = FLAME() 105 | 106 | 107 | conf = OmegaConf.structured(BodyModelConfig) 108 | -------------------------------------------------------------------------------- /transfer_model/config/cmd_parser.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | 20 | import sys 21 | import os 22 | 23 | import argparse 24 | from loguru import logger 25 | 26 | from omegaconf import OmegaConf 27 | from .defaults import conf as default_conf 28 | 29 | 30 | def parse_args(argv=None) -> OmegaConf: 31 | arg_formatter = argparse.ArgumentDefaultsHelpFormatter 32 | 33 | description = 'Model transfer script' 34 | parser = argparse.ArgumentParser(formatter_class=arg_formatter, 35 | description=description) 36 | 37 | parser.add_argument('--exp-cfg', type=str, dest='exp_cfg', 38 | help='The configuration of the experiment') 39 | parser.add_argument('--exp-opts', default=[], dest='exp_opts', 40 | nargs='*', 41 | help='Command line arguments') 42 | 43 | cmd_args = parser.parse_args() 44 | 45 | cfg = default_conf.copy() 46 | if cmd_args.exp_cfg: 47 | cfg.merge_with(OmegaConf.load(cmd_args.exp_cfg)) 48 | if cmd_args.exp_opts: 49 | cfg.merge_with(OmegaConf.from_cli(cmd_args.exp_opts)) 50 | 51 | return cfg 52 | -------------------------------------------------------------------------------- /transfer_model/config/dataset_defaults.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from omegaconf import OmegaConf 18 | from dataclasses import dataclass 19 | 20 | 21 | @dataclass 22 | class MeshFolder: 23 | data_folder: str = 'data/meshes' 24 | 25 | 26 | @dataclass 27 | class DatasetConfig: 28 | num_workers: int = 0 29 | name: str = 'mesh-folder' 30 | mesh_folder: MeshFolder = MeshFolder() 31 | 32 | 33 | conf = OmegaConf.structured(DatasetConfig) 34 | -------------------------------------------------------------------------------- /transfer_model/config/defaults.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from typing import Tuple, Optional 18 | from copy import deepcopy 19 | # from yacs.config import CfgNode as CN 20 | from dataclasses import dataclass 21 | from omegaconf import OmegaConf 22 | 23 | from .loss_defaults import conf as loss_cfg, LossConfig 24 | from .dataset_defaults import conf as dataset_cfg, DatasetConfig 25 | from .optim_defaults import conf as optim_cfg, OptimConfig 26 | from .body_model_defaults import conf as body_model_cfg, BodyModelConfig 27 | 28 | 29 | @dataclass 30 | class EdgeFitting: 31 | per_part: bool = False 32 | reduction: str = 'mean' 33 | 34 | 35 | @dataclass 36 | class VertexFitting: 37 | per_part: bool = False 38 | reduction: str = 'mean' 39 | type: str = 'l2' 40 | 41 | 42 | @dataclass 43 | class Config: 44 | use_cuda: bool = True 45 | log_file: str = '/tmp/logs' 46 | output_folder: str = 'output' 47 | save_verts: bool = True 48 | save_joints: bool = True 49 | save_mesh: bool = False 50 | save_img_summaries: bool = True 51 | summary_steps: int = 5 52 | degrees: Tuple[float] = (90,) 53 | float_type: str = 'float' 54 | logger_level: str = 'INFO' 55 | interactive: bool = True 56 | batch_size: Optional[int] = 1 57 | color_path: str = 'data/smpl_with_colors.ply' 58 | 59 | optim: OptimConfig = optim_cfg 60 | datasets: DatasetConfig = dataset_cfg 61 | losses: LossConfig = loss_cfg 62 | body_model: BodyModelConfig = body_model_cfg 63 | 64 | deformation_transfer_path: str = '' 65 | mask_ids_fname: str = '' 66 | 67 | per_part: bool = True 68 | edge_fitting: EdgeFitting = EdgeFitting() 69 | 70 | 71 | conf = OmegaConf.structured(Config) 72 | -------------------------------------------------------------------------------- /transfer_model/config/loss_defaults.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | # from yacs.config import CfgNode as CN 17 | 18 | from typing import List, Tuple, Union 19 | from omegaconf import OmegaConf 20 | from loguru import logger 21 | from dataclasses import dataclass, make_dataclass 22 | 23 | 24 | @dataclass 25 | class LossTemplate: 26 | type: str = 'l2' 27 | active: bool = False 28 | weight: Tuple[float] = (0.0,) 29 | requires_grad: bool = True 30 | enable: int = 0 31 | 32 | 33 | @dataclass 34 | class LossConfig: 35 | type: str = 'smplify-x' 36 | 37 | 38 | conf = OmegaConf.structured(LossConfig) 39 | -------------------------------------------------------------------------------- /transfer_model/config/optim_defaults.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from typing import Tuple 18 | from omegaconf import OmegaConf 19 | from dataclasses import dataclass 20 | 21 | 22 | @dataclass 23 | class LBFGS: 24 | line_search_fn: str = 'strong_wolfe' 25 | max_iter: int = 50 26 | 27 | 28 | @dataclass 29 | class SGD: 30 | momentum: float = 0.9 31 | nesterov: bool = True 32 | 33 | 34 | @dataclass 35 | class ADAM: 36 | betas: Tuple[float, float] = (0.9, 0.999) 37 | eps: float = 1e-08 38 | amsgrad: bool = False 39 | 40 | 41 | @dataclass 42 | class RMSProp: 43 | alpha: float = 0.99 44 | 45 | 46 | @dataclass 47 | class TrustRegionNewtonCG: 48 | max_trust_radius: float = 1000 49 | initial_trust_radius: float = 0.05 50 | eta: float = 0.15 51 | gtol: float = 1e-05 52 | 53 | 54 | @dataclass 55 | class OptimConfig: 56 | type: str = 'trust-ncg' 57 | lr: float = 1.0 58 | gtol: float = 1e-8 59 | ftol: float = -1.0 60 | maxiters: int = 100 61 | 62 | lbfgs: LBFGS = LBFGS() 63 | sgd: SGD = SGD() 64 | adam: ADAM = ADAM() 65 | trust_ncg: TrustRegionNewtonCG = TrustRegionNewtonCG() 66 | 67 | 68 | conf = OmegaConf.structured(OptimConfig) 69 | -------------------------------------------------------------------------------- /transfer_model/config/utils_cfg.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from typing import Tuple 18 | from dataclasses import dataclass 19 | 20 | 21 | @dataclass 22 | class Variable: 23 | create: bool = True 24 | requires_grad: bool = True 25 | 26 | 27 | @dataclass 28 | class Pose(Variable): 29 | type: str = 'aa' 30 | -------------------------------------------------------------------------------- /transfer_model/data/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from .build import build_dataloader 18 | -------------------------------------------------------------------------------- /transfer_model/data/build.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from typing import List, Tuple 18 | import sys 19 | 20 | import torch 21 | import torch.utils.data as dutils 22 | from .datasets import MeshFolder 23 | 24 | from loguru import logger 25 | 26 | 27 | def build_dataloader(exp_cfg): 28 | dset_name = exp_cfg.datasets.name 29 | if dset_name == 'mesh-folder': 30 | mesh_folder_cfg = exp_cfg.datasets.mesh_folder 31 | key, *_ = mesh_folder_cfg.keys() 32 | value = mesh_folder_cfg[key] 33 | logger.info(f'{key}: {value}\n') 34 | dataset = MeshFolder(**mesh_folder_cfg) 35 | else: 36 | raise ValueError(f'Unknown dataset: {dset_name}') 37 | 38 | batch_size = exp_cfg.batch_size 39 | num_workers = exp_cfg.datasets.num_workers 40 | 41 | logger.info( 42 | f'Creating dataloader with B={batch_size}, workers={num_workers}') 43 | dataloader = dutils.DataLoader(dataset, 44 | batch_size=batch_size, 45 | num_workers=num_workers, 46 | shuffle=False) 47 | 48 | return {'dataloader': dataloader, 'dataset': dataset} 49 | -------------------------------------------------------------------------------- /transfer_model/data/datasets/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from .mesh import MeshFolder 18 | -------------------------------------------------------------------------------- /transfer_model/data/datasets/mesh.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from typing import Optional, Tuple 18 | 19 | import sys 20 | import os 21 | import os.path as osp 22 | 23 | import numpy as np 24 | from psbody.mesh import Mesh 25 | import trimesh 26 | 27 | import torch 28 | from torch.utils.data import Dataset 29 | from loguru import logger 30 | 31 | 32 | class MeshFolder(Dataset): 33 | def __init__( 34 | self, 35 | data_folder: str, 36 | transforms=None, 37 | exts: Optional[Tuple] = None 38 | ) -> None: 39 | ''' Dataset similar to ImageFolder that reads meshes with the same 40 | topology 41 | ''' 42 | if exts is None: 43 | exts = ['.obj', '.ply'] 44 | 45 | self.data_folder = osp.expandvars(data_folder) 46 | 47 | logger.info( 48 | f'Building mesh folder dataset for folder: {self.data_folder}') 49 | 50 | self.data_paths = np.array([ 51 | osp.join(self.data_folder, fname) 52 | for fname in os.listdir(self.data_folder) 53 | if any(fname.endswith(ext) for ext in exts) 54 | ]) 55 | self.num_items = len(self.data_paths) 56 | 57 | def __len__(self) -> int: 58 | return self.num_items 59 | 60 | def __getitem__(self, index): 61 | mesh_path = self.data_paths[index] 62 | 63 | # Load the mesh 64 | mesh = trimesh.load(mesh_path, process=False) 65 | 66 | return { 67 | 'vertices': np.asarray(mesh.vertices, dtype=np.float32), 68 | 'faces': np.asarray(mesh.faces, dtype=np.int32), 69 | 'indices': index, 70 | 'paths': mesh_path, 71 | } 72 | -------------------------------------------------------------------------------- /transfer_model/docs/images/smpl_smplx_correspondence.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vchoutas/smplx/1265df7ba545e8b00f72e7c557c766e15c71632f/transfer_model/docs/images/smpl_smplx_correspondence.png -------------------------------------------------------------------------------- /transfer_model/docs/transfer.md: -------------------------------------------------------------------------------- 1 | # Converting SMPL to SMPL-X 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | The SMPL body model [1] is in wide use in computer vision and graphics for both 15 | research and industrial applications. While widely used, SMPL lacks details like 16 | articulated hands and an expressive face. The SMPL-X model [3] addresses this 17 | and includes both the face and hands. 18 | 19 | Many legacy applications and datasets are built on SMPL and people want to 20 | "upgrade" them to SMPL-X. While SMPL-X is based on the SMPL technology, they are 21 | not completely interchangeable. 22 | 23 | Importantly the shape and pose parameters of SMPL and SMPL-X seem tantalizingly 24 | similar. Sadly, you can't just take them from one model and use them with the 25 | other. In particular, the joint locations in SMPL-X differ from those in SMPL, 26 | meaning that the pose (theta) parameters are not interchangeable. 27 | 28 | Here we describe a tool to convert back and forth between the models. This 29 | involves fitting one model to the other to recover the right parameters. 30 | 31 | The first step in this process is to establish a mapping between SMPL and 32 | SMPL-X, since their topologies differ. For this, we assume we have a SMPL-X 33 | template mesh registered to the SMPL template. Now that the two surfaces match, 34 | we compute and store the following quantities: 35 | 36 | * For each SMPL-X vertex find the nearest point on the SMPL mesh and store: 37 | * The index $t_i$ of the triangle where the nearest point is located. 38 | * Store the barycentric coordinates of the nearest point with respect to 39 | the SMPL triangle $\left[a_i, b_i, c_i\right]$. 40 | 41 | 42 | SMPL-X and SMPL share the same topology up to the neck, therefore the Barycentric coordinates of 43 | these points are a permutation of `[1.0, 0.0, 0.0]`. We also store a mask of 44 | valid vertices, to remove points that have no match between the two meshes, 45 | such as the eyeballs or the inner mouth. If we color-code the correspondences 46 | we end up with the following image, where the left mesh is SMPL and the right 47 | one is SMPL-X: 48 | 49 | ![Correspondences](./images/smpl_smplx_correspondence.png) 50 | 51 | Now that we have established the correspondences between the models, we can fit 52 | SMPL-X to the SMPL annotations. 53 | 1. The first step is to build a mesh with the SMPL-X topology from the posed 54 | SMPL annotations. 55 | 56 | 1. If $t_i$ is the index of the corresponding SMPL triangle for the i-th SMPL-X 57 | vertex, then let $f_i \in \mathbb{N}^3$ be the 3 indices of the SMPL vertices that 58 | form the triangle. 59 | 2. Let $m_i$ be the binary mask value for the validity of this vertex. 60 | 2. The i-th vertex is computed using the barycentrics $\left[a_i, b_i, c_i\right]$ as: 61 | 62 | $v_i^{SMPL-X} = a_i * v_{f_i^0}^{SMPL} + b_i * v_{f_i^1}^{SMPL} + c_i * v_{f_i^2}^{SMPL}$ 63 | 64 | 2. Now that we have a mesh in SMPL-X topology, we need to find the SMPL-X 65 | parameters, i.e. pose $\theta$, shape $\beta$, expression $\psi$ and translation $\gamma$, that best explain it. 66 | We use an iterative optimization scheme to 67 | recover the parameters: 68 | 69 | 1. Optimize over the pose with a 3D edge term. Make sure that we only use 70 | the valid edges, i.e. those whose both end points are found on both 71 | meshes: 72 | 73 | $L_1\left(\theta\right) = \sum_{(i, j) \in \mathcal{E}} m_i m_j \left\lVert(v_i - v_j) - (\hat{v}_i - \hat{v}_j) \right\rVert_2^2$ 74 | 75 | 2. Optimize over the translation vector $\gamma$ to align the two models: 76 | 77 | $L_2\left({\gamma}\right) = \sum_{i} m_i \left\lVert v_i - \hat{v}_i \right\rVert$ 78 | 79 | 3. Optimize over all parameters, to get the tightest possible fit: 80 | 81 | $L_3\left((\theta, \beta, \psi, \gamma)\right) = \sum_{i} m_i \left\lVert v_i - \hat{v}_i \right\rVert_2^2$ 82 | 83 | 84 | So now, if you have data in SMPL format, you can convert it to SMPL-X. This 85 | should allow you to use it for training. 86 | 87 | For the inverse mapping, from SMPL-X to 88 | SMPL, we follow a similar process to generate the correspondences and then optimize 89 | over the SMPL parameters that best fit the 90 | transferred mesh. Of course, if you choose to do this, you will lose all 91 | information about the hands and the face, since SMPL is not able to model this. 92 | 93 | For SMPL and SMPL+H [2], the process is easier, since they share the same 94 | topology. We can therefore skip the first step, since we already know the 95 | correspondences, compute a SMPL or SMPL+H mesh and estimate the parameters of 96 | the other model. If we wish to transfer SMPL+H annotations, such as the AMASS 97 | motion capture data [4], to SMPL-X, then we can use the correspondences of the 98 | SMPL to SMPL-X mapping. 99 | 100 | ## Bibliography 101 | 102 | [1]: Loper, M., Mahmood, N., Romero, J., Pons-Moll, G., Black, M.J.: SMPL: A 103 | skinned multi-person linear model. ACM Transactions on Graphics (TOG) - Proceedings of ACM SIGGRAPH Asia 34(6), 248:1–248:16 (2015) 104 | 105 | [2]: Romero, J., Tzionas, D., Black, M.J.: Embodied hands: Modeling and capturing 106 | hands and bodies together. ACM Transactions on Graphics (TOG) - Proceedings 107 | of ACM SIGGRAPH Asia 36(6), 245:1–245:17 (2017) 108 | 109 | [3]: Pavlakos, G., Choutas, V., Ghorbani, N., Bolkart, T., Osman, A.A.A., Tzionas, 110 | D., Black, M.J.: Expressive body capture: 3D hands, face, and body from a single 111 | image. In: Proceedings of the IEEE Conference on Computer Vision and Pattern 112 | Recognition (CVPR). pp. 10967–10977 (2019) 113 | 114 | [4]: Mahmood, N., Ghorbani, N., Troje, N.F., Pons-Moll, G., Black, M.J.: Amass: 115 | Archive of motion capture as surface shapes. ICCV (2019) 116 | -------------------------------------------------------------------------------- /transfer_model/losses/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from .losses import * 18 | -------------------------------------------------------------------------------- /transfer_model/losses/losses.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | from __future__ import print_function 17 | from __future__ import absolute_import 18 | from __future__ import division 19 | 20 | import sys 21 | import time 22 | from typing import Callable, Iterator, Union, Optional, List 23 | 24 | import os.path as osp 25 | import yaml 26 | from loguru import logger 27 | 28 | import pickle 29 | 30 | import numpy as np 31 | 32 | import torch 33 | import torch.autograd as autograd 34 | import torch.nn as nn 35 | import torch.nn.functional as F 36 | 37 | from .utils import get_reduction_method 38 | 39 | __all__ = [ 40 | 'VertexEdgeLoss', 41 | 'build_loss', 42 | ] 43 | 44 | 45 | def build_loss(type='l2', reduction='mean', **kwargs) -> nn.Module: 46 | logger.debug(f'Building loss: {type}') 47 | if type == 'l2': 48 | return WeightedMSELoss(reduction=reduction, **kwargs) 49 | elif type == 'vertex-edge': 50 | return VertexEdgeLoss(reduction=reduction, **kwargs) 51 | elif type == 'l1': 52 | return nn.L1Loss() 53 | else: 54 | raise ValueError(f'Unknown loss type: {type}') 55 | 56 | 57 | class WeightedMSELoss(nn.Module): 58 | def __init__(self, reduction='mean', **kwargs): 59 | super(WeightedMSELoss, self).__init__() 60 | self.reduce_str = reduction 61 | self.reduce = get_reduction_method(reduction) 62 | 63 | def forward(self, input, target, weights=None): 64 | diff = input - target 65 | if weights is None: 66 | return diff.pow(2).sum() / diff.shape[0] 67 | else: 68 | return ( 69 | weights.unsqueeze(dim=-1) * diff.pow(2)).sum() / diff.shape[0] 70 | 71 | 72 | class VertexEdgeLoss(nn.Module): 73 | def __init__(self, norm_type='l2', 74 | gt_edges=None, 75 | gt_edge_path='', 76 | est_edges=None, 77 | est_edge_path='', 78 | robustifier=None, 79 | edge_thresh=0.0, epsilon=1e-8, 80 | reduction='sum', 81 | **kwargs): 82 | super(VertexEdgeLoss, self).__init__() 83 | 84 | assert norm_type in ['l1', 'l2'], 'Norm type must be [l1, l2]' 85 | self.norm_type = norm_type 86 | self.epsilon = epsilon 87 | self.reduction = reduction 88 | assert self.reduction in ['sum', 'mean'] 89 | logger.info(f'Building edge loss with' 90 | f' norm_type={norm_type},' 91 | f' reduction={reduction},' 92 | ) 93 | 94 | gt_edge_path = osp.expandvars(gt_edge_path) 95 | est_edge_path = osp.expandvars(est_edge_path) 96 | assert osp.exists(gt_edge_path) or gt_edges is not None, ( 97 | 'gt_edges must not be None or gt_edge_path must exist' 98 | ) 99 | assert osp.exists(est_edge_path) or est_edges is not None, ( 100 | 'est_edges must not be None or est_edge_path must exist' 101 | ) 102 | if osp.exists(gt_edge_path) and gt_edges is None: 103 | gt_edges = np.load(gt_edge_path) 104 | if osp.exists(est_edge_path) and est_edges is None: 105 | est_edges = np.load(est_edge_path) 106 | 107 | self.register_buffer( 108 | 'gt_connections', torch.tensor(gt_edges, dtype=torch.long)) 109 | self.register_buffer( 110 | 'est_connections', torch.tensor(est_edges, dtype=torch.long)) 111 | 112 | def extra_repr(self): 113 | msg = [ 114 | f'Norm type: {self.norm_type}', 115 | ] 116 | if self.has_connections: 117 | msg.append( 118 | f'GT Connections shape: {self.gt_connections.shape}' 119 | ) 120 | msg.append( 121 | f'Est Connections shape: {self.est_connections.shape}' 122 | ) 123 | return '\n'.join(msg) 124 | 125 | def compute_edges(self, points, connections): 126 | edge_points = torch.index_select( 127 | points, 1, connections.view(-1)).reshape(points.shape[0], -1, 2, 3) 128 | return edge_points[:, :, 1] - edge_points[:, :, 0] 129 | 130 | def forward(self, gt_vertices, est_vertices, weights=None): 131 | gt_edges = self.compute_edges( 132 | gt_vertices, connections=self.gt_connections) 133 | est_edges = self.compute_edges( 134 | est_vertices, connections=self.est_connections) 135 | 136 | raw_edge_diff = (gt_edges - est_edges) 137 | 138 | batch_size = gt_vertices.shape[0] 139 | if self.norm_type == 'l2': 140 | edge_diff = raw_edge_diff.pow(2) 141 | elif self.norm_type == 'l1': 142 | edge_diff = raw_edge_diff.abs() 143 | else: 144 | raise NotImplementedError( 145 | f'Loss type not implemented: {self.loss_type}') 146 | if self.reduction == 'sum': 147 | return edge_diff.sum() 148 | elif self.reduction == 'mean': 149 | return edge_diff.sum() / batch_size 150 | -------------------------------------------------------------------------------- /transfer_model/losses/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | import torch 18 | 19 | def get_reduction_method(reduction='mean'): 20 | if reduction == 'mean': 21 | return torch.mean 22 | elif reduction == 'sum': 23 | return torch.sum 24 | elif reduction == 'none': 25 | return lambda x: x 26 | else: 27 | raise ValueError('Unknown reduction method: {}'.format(reduction)) 28 | -------------------------------------------------------------------------------- /transfer_model/merge_output.py: -------------------------------------------------------------------------------- 1 | # merges the output of the main transfer_model script 2 | 3 | import torch 4 | from pathlib import Path 5 | import pickle 6 | from scipy.spatial.transform import Rotation as R 7 | 8 | KEYS = [ 9 | "transl", 10 | "global_orient", 11 | "body_pose", 12 | "betas", 13 | "left_hand_pose", 14 | "right_hand_pose", 15 | "jaw_pose", 16 | "leye_pose", 17 | "reye_pose", 18 | "expression", 19 | "vertices", 20 | "joints", 21 | "full_pose", 22 | "v_shaped", 23 | "faces" 24 | ] 25 | 26 | IGNORED_KEYS = [ 27 | "vertices", 28 | "faces", 29 | "v_shaped" 30 | ] 31 | 32 | def aggregate_rotmats(x): 33 | x = torch.cat(x, dim=0).detach().numpy() 34 | s = x.shape[:-2] 35 | x = R.from_matrix(x.reshape(-1, 3, 3)).as_rotvec() 36 | x = x.reshape(s[0], -1) 37 | return x 38 | 39 | aggregate_function = {k: lambda x: torch.cat(x, 0).detach().numpy() for k in KEYS} 40 | aggregate_function["betas"] = lambda x: torch.cat(x, 0).mean(0).detach().numpy() 41 | 42 | for k in ["global_orient", "body_pose", "left_hand_pose", "right_hand_pose", "jaw_pose", "full_pose"]: 43 | aggregate_function[k] = aggregate_rotmats 44 | 45 | def merge(output_dir, gender): 46 | output_dir = Path(output_dir) 47 | assert output_dir.exists() 48 | assert output_dir.is_dir() 49 | 50 | # get list of all pkl files in output_dir with fixed length numeral names 51 | pkl_files = [f for f in output_dir.glob("*.pkl") if f.stem != "merged"] 52 | pkl_files = [f for f in sorted(pkl_files, key=lambda x: int(x.stem))] 53 | assert "merged.pkl" not in [f.name for f in pkl_files] 54 | 55 | merged = {} 56 | # iterate over keys and put all values in lists 57 | keys = set(KEYS) - set(IGNORED_KEYS) 58 | for k in keys: 59 | merged[k] = [] 60 | for pkl_file in pkl_files: 61 | with open(pkl_file, "rb") as f: 62 | data = pickle.load(f) 63 | for k in keys: 64 | if k in data: 65 | merged[k].append(data[k]) 66 | b = torch.cat(merged["betas"], 0) 67 | print("betas:") 68 | for mu, sigma in zip(b.mean(0), b.std(0)): 69 | print(" {:.3f} +/- {:.3f}".format(mu, sigma)) 70 | 71 | # aggregate all values 72 | for k in keys: 73 | merged[k] = aggregate_function[k](merged[k]) 74 | 75 | # add gender 76 | merged["gender"] = gender 77 | 78 | # save merged data to same output_dir 79 | with open(output_dir / "merged.pkl", "wb") as f: 80 | pickle.dump(merged, f) 81 | 82 | if __name__ == '__main__': 83 | import argparse 84 | parser = argparse.ArgumentParser(description='Merge output of transfer_model script') 85 | parser.add_argument('output_dir', type=str, help='output directory of transfer_model script') 86 | parser.add_argument('--gender', type=str, choices=['male', 'female', 'neutral'], help='gender of actor in motion sequence') 87 | args = parser.parse_args() 88 | merge(args.output_dir, args.gender) 89 | -------------------------------------------------------------------------------- /transfer_model/optimizers/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from .optim_factory import build_optimizer 18 | from .minimize import minimize 19 | -------------------------------------------------------------------------------- /transfer_model/optimizers/minimize.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from typing import List, Union, Callable, Optional, Dict 18 | import torch 19 | from loguru import logger 20 | from tqdm import tqdm 21 | 22 | from transfer_model.utils import ( 23 | from_torch, Tensor, Array, rel_change) 24 | 25 | 26 | def minimize( 27 | optimizer: torch.optim, 28 | closure, 29 | params: List[Tensor], 30 | summary_closure: Optional[Callable[[], Dict[str, float]]] = None, 31 | maxiters=100, 32 | ftol=-1.0, 33 | gtol=1e-9, 34 | interactive=True, 35 | summary_steps=10, 36 | **kwargs 37 | ): 38 | ''' Helper function for running an optimization process 39 | Args: 40 | - optimizer: The PyTorch optimizer object 41 | - closure: The function used to calculate the gradients 42 | - params: a list containing the parameters that will be optimized 43 | Keyword arguments: 44 | - maxiters (100): The maximum number of iterations for the 45 | optimizer 46 | - ftol: The tolerance for the relative change in the loss 47 | function. 48 | If it is lower than this value, then the process stops 49 | - gtol: The tolerance for the maximum change in the gradient. 50 | If the maximum absolute values of the all gradient tensors 51 | are less than this, then the process will stop. 52 | ''' 53 | prev_loss = None 54 | for n in tqdm(range(maxiters), desc='Fitting iterations'): 55 | loss = optimizer.step(closure) 56 | 57 | if n > 0 and prev_loss is not None and ftol > 0: 58 | loss_rel_change = rel_change(prev_loss, loss.item()) 59 | 60 | if loss_rel_change <= ftol: 61 | prev_loss = loss.item() 62 | break 63 | 64 | if (all([var.grad.view(-1).abs().max().item() < gtol 65 | for var in params if var.grad is not None]) and gtol > 0): 66 | prev_loss = loss.item() 67 | break 68 | 69 | if interactive and n % summary_steps == 0: 70 | logger.info(f'[{n:05d}] Loss: {loss.item():.4f}') 71 | if summary_closure is not None: 72 | summaries = summary_closure() 73 | for key, val in summaries.items(): 74 | logger.info(f'[{n:05d}] {key}: {val:.4f}') 75 | 76 | prev_loss = loss.item() 77 | 78 | # Save the final step 79 | if interactive: 80 | logger.info(f'[{n + 1:05d}] Loss: {loss.item():.4f}') 81 | if summary_closure is not None: 82 | summaries = summary_closure() 83 | for key, val in summaries.items(): 84 | logger.info(f'[{n + 1:05d}] {key}: {val:.4f}') 85 | 86 | return prev_loss 87 | -------------------------------------------------------------------------------- /transfer_model/optimizers/optim_factory.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | import sys 18 | 19 | from typing import NewType, List, Dict 20 | 21 | import torch 22 | import torch.optim as optim 23 | from loguru import logger 24 | from torchtrustncg import TrustRegion 25 | 26 | Tensor = NewType('Tensor', torch.Tensor) 27 | 28 | 29 | def build_optimizer(parameters: List[Tensor], 30 | optim_cfg: Dict 31 | ) -> Dict: 32 | ''' Creates the optimizer 33 | ''' 34 | optim_type = optim_cfg.get('type', 'sgd') 35 | logger.info(f'Building: {optim_type.title()}') 36 | 37 | num_params = len(parameters) 38 | parameters = list(filter(lambda x: x.requires_grad, parameters)) 39 | if num_params != len(parameters): 40 | logger.info(f'Some parameters have requires_grad off') 41 | 42 | if optim_type == 'adam': 43 | optimizer = optim.Adam(parameters, **optim_cfg.get('adam', {})) 44 | create_graph = False 45 | elif optim_type == 'lbfgs' or optim_type == 'lbfgsls': 46 | optimizer = optim.LBFGS(parameters, **optim_cfg.get('lbfgs', {})) 47 | create_graph = False 48 | elif optim_type == 'trust_ncg' or optim_type == 'trust-ncg': 49 | optimizer = TrustRegion( 50 | parameters, **optim_cfg.get('trust_ncg', {})) 51 | create_graph = True 52 | elif optim_type == 'rmsprop': 53 | optimizer = optim.RMSprop(parameters, **optim_cfg.get('rmsprop', {})) 54 | create_graph = False 55 | elif optim_type == 'sgd': 56 | optimizer = optim.SGD(parameters, **optim_cfg.get('sgd', {})) 57 | create_graph = False 58 | else: 59 | raise ValueError(f'Optimizer {optim_type} not supported!') 60 | return {'optimizer': optimizer, 'create_graph': create_graph} 61 | 62 | 63 | def build_scheduler(optimizer, sched_type='exp', 64 | lr_lambda=0.1, **kwargs): 65 | if lr_lambda <= 0.0: 66 | return None 67 | 68 | if sched_type == 'exp': 69 | return optim.lr_scheduler.ExponentialLR(optimizer, lr_lambda) 70 | else: 71 | raise ValueError('Unknown learning rate' + 72 | ' scheduler: '.format(sched_type)) 73 | -------------------------------------------------------------------------------- /transfer_model/requirements.txt: -------------------------------------------------------------------------------- 1 | numpy>=1.16.2 2 | torch>=1.0.1.post2 3 | dataclasses>=0.6 4 | pyrender>=0.1.23 5 | shapely 6 | trimesh>=2.37.6 7 | open3d 8 | smplx 9 | omegaconf 10 | loguru 11 | -------------------------------------------------------------------------------- /transfer_model/transfer_model.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from typing import Optional, Dict, Callable 18 | import sys 19 | import numpy as np 20 | import torch 21 | import torch.nn as nn 22 | 23 | from tqdm import tqdm 24 | 25 | from loguru import logger 26 | from .utils import get_vertices_per_edge 27 | 28 | from .optimizers import build_optimizer, minimize 29 | from .utils import ( 30 | Tensor, batch_rodrigues, apply_deformation_transfer) 31 | from .losses import build_loss 32 | 33 | 34 | def summary_closure(gt_vertices, var_dict, body_model, mask_ids=None): 35 | param_dict = {} 36 | for key, var in var_dict.items(): 37 | # Decode the axis-angles 38 | if 'pose' in key or 'orient' in key: 39 | param_dict[key] = batch_rodrigues( 40 | var.reshape(-1, 3)).reshape(len(var), -1, 3, 3) 41 | else: 42 | # Simply pass the variable 43 | param_dict[key] = var 44 | body_model_output = body_model( 45 | return_full_pose=True, get_skin=True, **param_dict) 46 | est_vertices = body_model_output['vertices'] 47 | if mask_ids is not None: 48 | est_vertices = est_vertices[:, mask_ids] 49 | gt_vertices = gt_vertices[:, mask_ids] 50 | 51 | v2v = (est_vertices - gt_vertices).pow(2).sum(dim=-1).sqrt().mean() 52 | return { 53 | 'Vertex-to-Vertex': v2v * 1000} 54 | 55 | 56 | def build_model_forward_closure( 57 | body_model: nn.Module, 58 | var_dict: Dict[str, Tensor], 59 | per_part: bool = True, 60 | part_key: Optional[str] = None, 61 | jidx: Optional[int] = None, 62 | part: Optional[Tensor] = None 63 | ) -> Callable: 64 | if per_part: 65 | cond = part is not None and part_key is not None and jidx is not None 66 | assert cond, ( 67 | 'When per-part is True, "part", "part_key", "jidx" must not be' 68 | ' None.' 69 | ) 70 | 71 | def model_forward(): 72 | param_dict = {} 73 | for key, var in var_dict.items(): 74 | if part_key == key: 75 | param_dict[key] = batch_rodrigues( 76 | var.reshape(-1, 3)).reshape(len(var), -1, 3, 3) 77 | param_dict[key][:, jidx] = batch_rodrigues( 78 | part.reshape(-1, 3)).reshape(-1, 3, 3) 79 | else: 80 | # Decode the axis-angles 81 | if 'pose' in key or 'orient' in key: 82 | param_dict[key] = batch_rodrigues( 83 | var.reshape(-1, 3)).reshape(len(var), -1, 3, 3) 84 | else: 85 | # Simply pass the variable 86 | param_dict[key] = var 87 | 88 | return body_model( 89 | return_full_pose=True, get_skin=True, **param_dict) 90 | else: 91 | def model_forward(): 92 | param_dict = {} 93 | for key, var in var_dict.items(): 94 | # Decode the axis-angles 95 | if 'pose' in key or 'orient' in key: 96 | param_dict[key] = batch_rodrigues( 97 | var.reshape(-1, 3)).reshape(len(var), -1, 3, 3) 98 | else: 99 | # Simply pass the variable 100 | param_dict[key] = var 101 | 102 | return body_model(return_full_pose=True, get_skin=True, 103 | **param_dict) 104 | return model_forward 105 | 106 | 107 | def build_edge_closure( 108 | body_model: nn.Module, 109 | var_dict: Dict[str, Tensor], 110 | edge_loss: nn.Module, 111 | optimizer_dict, 112 | gt_vertices: Tensor, 113 | per_part: bool = True, 114 | part_key: Optional[str] = None, 115 | jidx: Optional[int] = None, 116 | part: Optional[Tensor] = None 117 | ) -> Callable: 118 | ''' Builds the closure for the edge objective 119 | ''' 120 | optimizer = optimizer_dict['optimizer'] 121 | create_graph = optimizer_dict['create_graph'] 122 | 123 | if per_part: 124 | params_to_opt = [part] 125 | else: 126 | params_to_opt = [p for key, p in var_dict.items() if 'pose' in key] 127 | 128 | model_forward = build_model_forward_closure( 129 | body_model, var_dict, per_part=per_part, part_key=part_key, 130 | jidx=jidx, part=part) 131 | 132 | def closure(backward=True): 133 | if backward: 134 | optimizer.zero_grad() 135 | 136 | body_model_output = model_forward() 137 | est_vertices = body_model_output['vertices'] 138 | 139 | loss = edge_loss(est_vertices, gt_vertices) 140 | if backward: 141 | if create_graph: 142 | # Use this instead of .backward to avoid GPU memory leaks 143 | grads = torch.autograd.grad( 144 | loss, params_to_opt, create_graph=True) 145 | torch.autograd.backward( 146 | params_to_opt, grads, create_graph=True) 147 | else: 148 | loss.backward() 149 | 150 | return loss 151 | return closure 152 | 153 | 154 | def build_vertex_closure( 155 | body_model: nn.Module, 156 | var_dict: Dict[str, Tensor], 157 | optimizer_dict, 158 | gt_vertices: Tensor, 159 | vertex_loss: nn.Module, 160 | mask_ids=None, 161 | per_part: bool = True, 162 | part_key: Optional[str] = None, 163 | jidx: Optional[int] = None, 164 | part: Optional[Tensor] = None, 165 | params_to_opt: Optional[Tensor] = None, 166 | ) -> Callable: 167 | ''' Builds the closure for the vertex objective 168 | ''' 169 | optimizer = optimizer_dict['optimizer'] 170 | create_graph = optimizer_dict['create_graph'] 171 | 172 | model_forward = build_model_forward_closure( 173 | body_model, var_dict, per_part=per_part, part_key=part_key, 174 | jidx=jidx, part=part) 175 | 176 | if params_to_opt is None: 177 | params_to_opt = [p for key, p in var_dict.items()] 178 | 179 | def closure(backward=True): 180 | if backward: 181 | optimizer.zero_grad() 182 | 183 | body_model_output = model_forward() 184 | est_vertices = body_model_output['vertices'] 185 | 186 | loss = vertex_loss( 187 | est_vertices[:, mask_ids] if mask_ids is not None else 188 | est_vertices, 189 | gt_vertices[:, mask_ids] if mask_ids is not None else gt_vertices) 190 | if backward: 191 | if create_graph: 192 | # Use this instead of .backward to avoid GPU memory leaks 193 | grads = torch.autograd.grad( 194 | loss, params_to_opt, create_graph=True) 195 | torch.autograd.backward( 196 | params_to_opt, grads, create_graph=True) 197 | else: 198 | loss.backward() 199 | 200 | return loss 201 | return closure 202 | 203 | 204 | def get_variables( 205 | batch_size: int, 206 | body_model: nn.Module, 207 | dtype: torch.dtype = torch.float32 208 | ) -> Dict[str, Tensor]: 209 | var_dict = {} 210 | 211 | device = next(body_model.buffers()).device 212 | 213 | if (body_model.name() == 'SMPL' or body_model.name() == 'SMPL+H' or 214 | body_model.name() == 'SMPL-X'): 215 | var_dict.update({ 216 | 'transl': torch.zeros( 217 | [batch_size, 3], device=device, dtype=dtype), 218 | 'global_orient': torch.zeros( 219 | [batch_size, 1, 3], device=device, dtype=dtype), 220 | 'body_pose': torch.zeros( 221 | [batch_size, body_model.NUM_BODY_JOINTS, 3], 222 | device=device, dtype=dtype), 223 | 'betas': torch.zeros([batch_size, body_model.num_betas], 224 | dtype=dtype, device=device), 225 | }) 226 | 227 | if body_model.name() == 'SMPL+H' or body_model.name() == 'SMPL-X': 228 | var_dict.update( 229 | left_hand_pose=torch.zeros( 230 | [batch_size, body_model.NUM_HAND_JOINTS, 3], device=device, 231 | dtype=dtype), 232 | right_hand_pose=torch.zeros( 233 | [batch_size, body_model.NUM_HAND_JOINTS, 3], device=device, 234 | dtype=dtype), 235 | ) 236 | 237 | if body_model.name() == 'SMPL-X': 238 | var_dict.update( 239 | jaw_pose=torch.zeros([batch_size, 1, 3], 240 | device=device, dtype=dtype), 241 | leye_pose=torch.zeros([batch_size, 1, 3], 242 | device=device, dtype=dtype), 243 | reye_pose=torch.zeros([batch_size, 1, 3], 244 | device=device, dtype=dtype), 245 | expression=torch.zeros( 246 | [batch_size, body_model.num_expression_coeffs], 247 | device=device, dtype=dtype), 248 | ) 249 | 250 | # Toggle gradients to True 251 | for key, val in var_dict.items(): 252 | val.requires_grad_(True) 253 | 254 | return var_dict 255 | 256 | 257 | def run_fitting( 258 | exp_cfg, 259 | batch: Dict[str, Tensor], 260 | body_model: nn.Module, 261 | def_matrix: Tensor, 262 | mask_ids: Optional = None 263 | ) -> Dict[str, Tensor]: 264 | ''' Runs fitting 265 | ''' 266 | vertices = batch['vertices'] 267 | faces = batch['faces'] 268 | 269 | batch_size = len(vertices) 270 | dtype, device = vertices.dtype, vertices.device 271 | summary_steps = exp_cfg.get('summary_steps') 272 | interactive = exp_cfg.get('interactive') 273 | 274 | # Get the parameters from the model 275 | var_dict = get_variables(batch_size, body_model) 276 | 277 | # Build the optimizer object for the current batch 278 | optim_cfg = exp_cfg.get('optim', {}) 279 | 280 | def_vertices = apply_deformation_transfer(def_matrix, vertices, faces) 281 | 282 | if mask_ids is None: 283 | f_sel = np.ones_like(body_model.faces[:, 0], dtype=np.bool_) 284 | else: 285 | f_per_v = [[] for _ in range(body_model.get_num_verts())] 286 | [f_per_v[vv].append(iff) for iff, ff in enumerate(body_model.faces) 287 | for vv in ff] 288 | f_sel = list(set(tuple(sum([f_per_v[vv] for vv in mask_ids], [])))) 289 | vpe = get_vertices_per_edge( 290 | body_model.v_template.detach().cpu().numpy(), body_model.faces[f_sel]) 291 | 292 | def log_closure(): 293 | return summary_closure(def_vertices, var_dict, body_model, 294 | mask_ids=mask_ids) 295 | 296 | edge_fitting_cfg = exp_cfg.get('edge_fitting', {}) 297 | edge_loss = build_loss(type='vertex-edge', gt_edges=vpe, est_edges=vpe, 298 | **edge_fitting_cfg) 299 | edge_loss = edge_loss.to(device=device) 300 | 301 | vertex_fitting_cfg = exp_cfg.get('vertex_fitting', {}) 302 | vertex_loss = build_loss(**vertex_fitting_cfg) 303 | vertex_loss = vertex_loss.to(device=device) 304 | 305 | per_part = edge_fitting_cfg.get('per_part', True) 306 | logger.info(f'Per-part: {per_part}') 307 | # Optimize edge-based loss to initialize pose 308 | if per_part: 309 | for key, var in tqdm(var_dict.items(), desc='Parts'): 310 | if 'pose' not in key: 311 | continue 312 | 313 | for jidx in tqdm(range(var.shape[1]), desc='Joints'): 314 | part = torch.zeros( 315 | [batch_size, 3], dtype=dtype, device=device, 316 | requires_grad=True) 317 | # Build the optimizer for the current part 318 | optimizer_dict = build_optimizer([part], optim_cfg) 319 | closure = build_edge_closure( 320 | body_model, var_dict, edge_loss, optimizer_dict, 321 | def_vertices, per_part=per_part, part_key=key, jidx=jidx, 322 | part=part) 323 | 324 | minimize(optimizer_dict['optimizer'], closure, 325 | params=[part], 326 | summary_closure=log_closure, 327 | summary_steps=summary_steps, 328 | interactive=interactive, 329 | **optim_cfg) 330 | with torch.no_grad(): 331 | var[:, jidx] = part 332 | else: 333 | optimizer_dict = build_optimizer(list(var_dict.values()), optim_cfg) 334 | closure = build_edge_closure( 335 | body_model, var_dict, edge_loss, optimizer_dict, 336 | def_vertices, per_part=per_part) 337 | 338 | minimize(optimizer_dict['optimizer'], closure, 339 | params=var_dict.values(), 340 | summary_closure=log_closure, 341 | summary_steps=summary_steps, 342 | interactive=interactive, 343 | **optim_cfg) 344 | 345 | if 'translation' in var_dict: 346 | optimizer_dict = build_optimizer([var_dict['translation']], optim_cfg) 347 | closure = build_vertex_closure( 348 | body_model, var_dict, 349 | optimizer_dict, 350 | def_vertices, 351 | vertex_loss=vertex_loss, 352 | mask_ids=mask_ids, 353 | per_part=False, 354 | params_to_opt=[var_dict['translation']], 355 | ) 356 | # Optimize translation 357 | minimize(optimizer_dict['optimizer'], 358 | closure, 359 | params=[var_dict['translation']], 360 | summary_closure=log_closure, 361 | summary_steps=summary_steps, 362 | interactive=interactive, 363 | **optim_cfg) 364 | 365 | # Optimize all model parameters with vertex-based loss 366 | optimizer_dict = build_optimizer(list(var_dict.values()), optim_cfg) 367 | closure = build_vertex_closure( 368 | body_model, var_dict, 369 | optimizer_dict, 370 | def_vertices, 371 | vertex_loss=vertex_loss, 372 | per_part=False, 373 | mask_ids=mask_ids) 374 | minimize(optimizer_dict['optimizer'], closure, 375 | params=list(var_dict.values()), 376 | summary_closure=log_closure, 377 | summary_steps=summary_steps, 378 | interactive=interactive, 379 | **optim_cfg) 380 | 381 | param_dict = {} 382 | for key, var in var_dict.items(): 383 | # Decode the axis-angles 384 | if 'pose' in key or 'orient' in key: 385 | param_dict[key] = batch_rodrigues( 386 | var.reshape(-1, 3)).reshape(len(var), -1, 3, 3) 387 | else: 388 | # Simply pass the variable 389 | param_dict[key] = var 390 | 391 | body_model_output = body_model( 392 | return_full_pose=True, get_skin=True, **param_dict) 393 | var_dict.update(body_model_output) 394 | var_dict['faces'] = body_model.faces 395 | 396 | return var_dict 397 | -------------------------------------------------------------------------------- /transfer_model/utils/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from .np_utils import to_np, rel_change 18 | from .torch_utils import from_torch 19 | from .timer import Timer, timer_decorator 20 | from .typing import * 21 | from .pose_utils import batch_rodrigues, batch_rot2aa 22 | from .metrics import v2v 23 | from .def_transfer import read_deformation_transfer, apply_deformation_transfer 24 | from .mesh_utils import get_vertices_per_edge 25 | from .o3d_utils import np_mesh_to_o3d 26 | -------------------------------------------------------------------------------- /transfer_model/utils/def_transfer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | import os 18 | import os.path as osp 19 | import pickle 20 | 21 | import numpy as np 22 | import torch 23 | from loguru import logger 24 | 25 | from .typing import Tensor 26 | 27 | 28 | def read_deformation_transfer( 29 | deformation_transfer_path: str, 30 | device=None, 31 | use_normal: bool = False, 32 | ) -> Tensor: 33 | ''' Reads a deformation transfer 34 | ''' 35 | if device is None: 36 | device = torch.device('cpu') 37 | assert osp.exists(deformation_transfer_path), ( 38 | 'Deformation transfer path does not exist:' 39 | f' {deformation_transfer_path}') 40 | logger.info( 41 | f'Loading deformation transfer from: {deformation_transfer_path}') 42 | # Read the deformation transfer matrix 43 | with open(deformation_transfer_path, 'rb') as f: 44 | def_transfer_setup = pickle.load(f, encoding='latin1') 45 | if 'mtx' in def_transfer_setup: 46 | def_matrix = def_transfer_setup['mtx'] 47 | if hasattr(def_matrix, 'todense'): 48 | def_matrix = def_matrix.todense() 49 | def_matrix = np.array(def_matrix, dtype=np.float32) 50 | if not use_normal: 51 | num_verts = def_matrix.shape[1] // 2 52 | def_matrix = def_matrix[:, :num_verts] 53 | elif 'matrix' in def_transfer_setup: 54 | def_matrix = def_transfer_setup['matrix'] 55 | else: 56 | valid_keys = ['mtx', 'matrix'] 57 | raise KeyError(f'Deformation transfer setup must contain {valid_keys}') 58 | 59 | def_matrix = torch.tensor(def_matrix, device=device, dtype=torch.float32) 60 | return def_matrix 61 | 62 | 63 | def apply_deformation_transfer( 64 | def_matrix: Tensor, 65 | vertices: Tensor, 66 | faces: Tensor, 67 | use_normals=False 68 | ) -> Tensor: 69 | ''' Applies the deformation transfer on the given meshes 70 | ''' 71 | if use_normals: 72 | raise NotImplementedError 73 | else: 74 | def_vertices = torch.einsum('mn,bni->bmi', [def_matrix, vertices]) 75 | return def_vertices 76 | -------------------------------------------------------------------------------- /transfer_model/utils/mesh_utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Code from Chumpy and OpenDR. Placed here to avoid chumpy dependency 16 | # The original code can be found in https://github.com/MPI-IS/mesh 17 | import numpy as np 18 | import scipy.sparse as sp 19 | 20 | 21 | def row(A): 22 | return A.reshape((1, -1)) 23 | 24 | 25 | def col(A): 26 | return A.reshape((-1, 1)) 27 | 28 | 29 | def get_vert_connectivity(mesh_v, mesh_f): 30 | """Returns a sparse matrix (of size #verts x #verts) where each nonzero 31 | element indicates a neighborhood relation. For example, if there is a 32 | nonzero element in position (15,12), that means vertex 15 is connected 33 | by an edge to vertex 12.""" 34 | 35 | vpv = sp.csc_matrix((len(mesh_v), len(mesh_v))) 36 | 37 | # for each column in the faces... 38 | for i in range(3): 39 | IS = mesh_f[:, i] 40 | JS = mesh_f[:, (i + 1) % 3] 41 | data = np.ones(len(IS)) 42 | ij = np.vstack((row(IS.flatten()), row(JS.flatten()))) 43 | mtx = sp.csc_matrix((data, ij), shape=vpv.shape) 44 | vpv = vpv + mtx + mtx.T 45 | 46 | return vpv 47 | 48 | 49 | def get_vertices_per_edge(mesh_v, mesh_f): 50 | """Returns an Ex2 array of adjacencies between vertices, where 51 | each element in the array is a vertex index. Each edge is included 52 | only once. If output of get_faces_per_edge is provided, this is used to 53 | avoid call to get_vert_connectivity()""" 54 | 55 | vc = sp.coo_matrix(get_vert_connectivity(mesh_v, mesh_f)) 56 | result = np.hstack((col(vc.row), col(vc.col))) 57 | result = result[result[:, 0] < result[:, 1]] # for uniqueness 58 | 59 | return result 60 | -------------------------------------------------------------------------------- /transfer_model/utils/metrics.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | import numpy as np 18 | import torch 19 | 20 | 21 | def v2v(x, y): 22 | if torch.is_tensor(x): 23 | return (x - y).pow(2).sum(dim=-1).sqrt().mean() 24 | else: 25 | return np.sqrt(np.power(x - y, 2)).sum(axis=-1).mean() 26 | -------------------------------------------------------------------------------- /transfer_model/utils/np_utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | from __future__ import absolute_import 17 | from __future__ import print_function 18 | from __future__ import division 19 | 20 | import numpy as np 21 | 22 | 23 | def rel_change(prev_val, curr_val): 24 | return (prev_val - curr_val) / max([np.abs(prev_val), np.abs(curr_val), 1]) 25 | 26 | 27 | def max_grad_change(grad_arr): 28 | return grad_arr.abs().max() 29 | 30 | 31 | def to_np(array, dtype=np.float32): 32 | if hasattr(array, 'todense'): 33 | array = array.todense() 34 | return np.array(array, dtype=dtype) 35 | -------------------------------------------------------------------------------- /transfer_model/utils/o3d_utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | import open3d as o3d 18 | import torch 19 | 20 | Vector3d = o3d.utility.Vector3dVector 21 | Vector3i = o3d.utility.Vector3iVector 22 | 23 | Mesh = o3d.geometry.TriangleMesh 24 | 25 | 26 | def np_mesh_to_o3d(vertices, faces): 27 | if torch.is_tensor(vertices): 28 | vertices = vertices.detach().cpu().numpy() 29 | if torch.is_tensor(faces): 30 | faces = faces.detach().cpu().numpy() 31 | mesh = Mesh() 32 | mesh.vertices = Vector3d(vertices) 33 | mesh.triangles = Vector3i(faces) 34 | return mesh 35 | -------------------------------------------------------------------------------- /transfer_model/utils/pose_utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | import sys 18 | from typing import NewType, List, Dict, Optional 19 | import os 20 | import os.path as osp 21 | 22 | import pickle 23 | 24 | import torch 25 | import torch.nn as nn 26 | import torch.nn.functional as F 27 | 28 | from omegaconf import OmegaConf 29 | from loguru import logger 30 | 31 | from .typing import Tensor 32 | 33 | 34 | def rotation_matrix_to_cont_repr(x: Tensor) -> Tensor: 35 | assert len(x.shape) == 3, ( 36 | f'Expects an array of size Bx3x3, but received {x.shape}') 37 | return x[:, :3, :2] 38 | 39 | 40 | def cont_repr_to_rotation_matrix( 41 | x: Tensor 42 | ) -> Tensor: 43 | ''' Converts tensor in continous representation to rotation matrices 44 | ''' 45 | batch_size = x.shape[0] 46 | reshaped_input = x.view(-1, 3, 2) 47 | 48 | # Normalize the first vector 49 | b1 = F.normalize(reshaped_input[:, :, 0].clone(), dim=1) 50 | 51 | dot_prod = torch.sum( 52 | b1 * reshaped_input[:, :, 1].clone(), dim=1, keepdim=True) 53 | # Compute the second vector by finding the orthogonal complement to it 54 | b2 = F.normalize(reshaped_input[:, :, 1] - dot_prod * b1, dim=1) 55 | # Finish building the basis by taking the cross product 56 | b3 = torch.cross(b1, b2, dim=1) 57 | rot_mats = torch.stack([b1, b2, b3], dim=-1) 58 | 59 | return rot_mats.view(batch_size, -1, 3, 3) 60 | 61 | 62 | def batch_rodrigues( 63 | rot_vecs: Tensor, 64 | epsilon: float = 1e-8 65 | ) -> Tensor: 66 | ''' Calculates the rotation matrices for a batch of rotation vectors 67 | Parameters 68 | ---------- 69 | rot_vecs: torch.tensor Nx3 70 | array of N axis-angle vectors 71 | Returns 72 | ------- 73 | R: torch.tensor Nx3x3 74 | The rotation matrices for the given axis-angle parameters 75 | ''' 76 | assert len(rot_vecs.shape) == 2, ( 77 | f'Expects an array of size Bx3, but received {rot_vecs.shape}') 78 | 79 | batch_size = rot_vecs.shape[0] 80 | device = rot_vecs.device 81 | dtype = rot_vecs.dtype 82 | 83 | angle = torch.norm(rot_vecs + epsilon, dim=1, keepdim=True, p=2) 84 | rot_dir = rot_vecs / angle 85 | 86 | cos = torch.unsqueeze(torch.cos(angle), dim=1) 87 | sin = torch.unsqueeze(torch.sin(angle), dim=1) 88 | 89 | # Bx1 arrays 90 | rx, ry, rz = torch.split(rot_dir, 1, dim=1) 91 | K = torch.zeros((batch_size, 3, 3), dtype=dtype, device=device) 92 | 93 | zeros = torch.zeros((batch_size, 1), dtype=dtype, device=device) 94 | K = torch.cat([zeros, -rz, ry, rz, zeros, -rx, -ry, rx, zeros], dim=1) \ 95 | .view((batch_size, 3, 3)) 96 | 97 | ident = torch.eye(3, dtype=dtype, device=device).unsqueeze(dim=0) 98 | rot_mat = ident + sin * K + (1 - cos) * torch.bmm(K, K) 99 | return rot_mat 100 | 101 | 102 | def batch_rot2aa( 103 | Rs: Tensor, epsilon: float = 1e-7 104 | ) -> Tensor: 105 | """ 106 | Rs is B x 3 x 3 107 | void cMathUtil::RotMatToAxisAngle(const tMatrix& mat, tVector& out_axis, 108 | double& out_theta) 109 | { 110 | double c = 0.5 * (mat(0, 0) + mat(1, 1) + mat(2, 2) - 1); 111 | c = cMathUtil::Clamp(c, -1.0, 1.0); 112 | 113 | out_theta = std::acos(c); 114 | 115 | if (std::abs(out_theta) < 0.00001) 116 | { 117 | out_axis = tVector(0, 0, 1, 0); 118 | } 119 | else 120 | { 121 | double m21 = mat(2, 1) - mat(1, 2); 122 | double m02 = mat(0, 2) - mat(2, 0); 123 | double m10 = mat(1, 0) - mat(0, 1); 124 | double denom = std::sqrt(m21 * m21 + m02 * m02 + m10 * m10); 125 | out_axis[0] = m21 / denom; 126 | out_axis[1] = m02 / denom; 127 | out_axis[2] = m10 / denom; 128 | out_axis[3] = 0; 129 | } 130 | } 131 | """ 132 | 133 | cos = 0.5 * (torch.einsum('bii->b', [Rs]) - 1) 134 | cos = torch.clamp(cos, -1 + epsilon, 1 - epsilon) 135 | 136 | theta = torch.acos(cos) 137 | 138 | m21 = Rs[:, 2, 1] - Rs[:, 1, 2] 139 | m02 = Rs[:, 0, 2] - Rs[:, 2, 0] 140 | m10 = Rs[:, 1, 0] - Rs[:, 0, 1] 141 | denom = torch.sqrt(m21 * m21 + m02 * m02 + m10 * m10 + epsilon) 142 | 143 | axis0 = torch.where(torch.abs(theta) < 0.00001, m21, m21 / denom) 144 | axis1 = torch.where(torch.abs(theta) < 0.00001, m02, m02 / denom) 145 | axis2 = torch.where(torch.abs(theta) < 0.00001, m10, m10 / denom) 146 | 147 | return theta.unsqueeze(1) * torch.stack([axis0, axis1, axis2], 1) 148 | -------------------------------------------------------------------------------- /transfer_model/utils/timer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | import time 18 | import numpy as np 19 | import torch 20 | 21 | from loguru import logger 22 | 23 | 24 | class Timer(object): 25 | def __init__(self, name='', sync=False): 26 | super(Timer, self).__init__() 27 | self.elapsed = [] 28 | self.name = name 29 | self.sync = sync 30 | 31 | def __enter__(self): 32 | if self.sync: 33 | torch.cuda.synchronize() 34 | self.start = time.perf_counter() 35 | 36 | def __exit__(self, type, value, traceback): 37 | if self.sync: 38 | torch.cuda.synchronize() 39 | elapsed = time.perf_counter() - self.start 40 | self.elapsed.append(elapsed) 41 | logger.info(f'[{self.name}]: {np.mean(self.elapsed):.3f}') 42 | 43 | 44 | def timer_decorator(sync=False, name=''): 45 | def wrapper(method): 46 | elapsed = [] 47 | 48 | def timed(*args, **kw): 49 | if sync: 50 | torch.cuda.synchronize() 51 | ts = time.perf_counter() 52 | result = method(*args, **kw) 53 | if sync: 54 | torch.cuda.synchronize() 55 | te = time.perf_counter() 56 | elapsed.append(te - ts) 57 | logger.info(f'[{name}]: {np.mean(elapsed):.3f}') 58 | return result 59 | return timed 60 | return wrapper 61 | -------------------------------------------------------------------------------- /transfer_model/utils/torch_utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | import numpy as np 18 | import torch 19 | 20 | 21 | def from_torch(x, dtype=np.float32): 22 | if torch.is_tensor(x): 23 | x = x.detach().cpu().numpy() 24 | return x.astype(dtype) 25 | -------------------------------------------------------------------------------- /transfer_model/utils/typing.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is 4 | # holder of all proprietary rights on this computer program. 5 | # You can only use this computer program if you have closed 6 | # a license agreement with MPG or you get the right to use the computer 7 | # program from someone who is authorized to grant you that right. 8 | # Any use of the computer program without a valid license is prohibited and 9 | # liable to prosecution. 10 | # 11 | # Copyright©2020 Max-Planck-Gesellschaft zur Förderung 12 | # der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute 13 | # for Intelligent Systems. All rights reserved. 14 | # 15 | # Contact: Vassilis Choutas, vassilis.choutas@tuebingen.mpg.de 16 | 17 | from typing import NewType, List, Union 18 | import numpy as np 19 | import torch 20 | 21 | __all__ = [ 22 | 'Tensor', 23 | 'Array', 24 | ] 25 | 26 | Tensor = NewType('Tensor', torch.Tensor) 27 | Array = NewType('Array', np.ndarray) 28 | -------------------------------------------------------------------------------- /transfer_model/view_pkl.py: -------------------------------------------------------------------------------- 1 | import os.path as osp 2 | import argparse 3 | 4 | import numpy as np 5 | import torch 6 | 7 | import pyrender 8 | import trimesh 9 | 10 | import smplx 11 | 12 | from tqdm.auto import tqdm, trange 13 | 14 | from pathlib import Path 15 | 16 | def main(model_folder, 17 | motion_file, 18 | model_type='smplx', 19 | ext='npz', 20 | gender='neutral', 21 | plot_joints=False, 22 | num_betas=10, 23 | sample_expression=True, 24 | num_expression_coeffs=10, 25 | use_face_contour=False): 26 | 27 | # open motion file 28 | motion = np.load(motion_file, allow_pickle=True) 29 | _motion = {} 30 | for k,v in motion.items(): 31 | if isinstance(v, np.ndarray): 32 | print(k, motion[k].shape, motion[k].dtype) 33 | if motion[k].dtype in ("