├── .env-files ├── Dockerfile.github ├── Dockerfile.gitlab ├── Gemfile.github └── Gemfile.gitlab ├── .gitignore ├── .gitlab-ci.yml ├── CNAME ├── Gemfile ├── Gemfile.lock ├── README.md ├── _config.yml ├── _includes └── sidebar.html ├── _posts └── 2020-07-07-MONO-model.md ├── assets ├── .gitkeep ├── FAQs │ ├── PAMPJPE.png │ ├── dmpl.gif │ ├── internal-geo1.png │ ├── internal-geo2.png │ ├── internal-geo3.png │ ├── neutral.png │ └── preg-flickr.png ├── SMPLX_OpenPose_mapping │ ├── README.md │ ├── smplx_openpose25.json │ └── smplx_openpose_test.py ├── SMPL_body_segmentation │ ├── smpl │ │ ├── smpl_segmentation_on_template.png │ │ └── smpl_vert_segmentation.json │ └── smplx │ │ ├── smplx_segmentation.png │ │ └── smplx_vert_segmentation.json ├── SMPL_formulation.png ├── clipart-under-construction.png ├── flame.png ├── images_digidoppel │ ├── clothing-problems.png │ ├── internal-geometry-problems.png │ └── measurements │ │ ├── BodyVis-Web-ankle_girth.png │ │ ├── BodyVis-Web-arm_scye_girth.png │ │ ├── BodyVis-Web-armlength_shoulder_elbow.pdf │ │ ├── BodyVis-Web-armlength_shoulder_elbow.png │ │ ├── BodyVis-Web-armlength_shoulder_wrist.png │ │ ├── BodyVis-Web-armlength_spine_wrist.pdf │ │ ├── BodyVis-Web-armlength_spine_wrist.png │ │ ├── BodyVis-Web-chest_max_girth.png │ │ ├── BodyVis-Web-crotch_length.png │ │ ├── BodyVis-Web-foot_length.png │ │ ├── BodyVis-Web-height.png │ │ ├── BodyVis-Web-hip_height.png │ │ ├── BodyVis-Web-hip_max_girth.png │ │ ├── BodyVis-Web-inseam.png │ │ ├── BodyVis-Web-neck_base_girth.png │ │ ├── BodyVis-Web-shoulder_breadth.png │ │ ├── BodyVis-Web-thigh_max_girth.png │ │ ├── BodyVis-Web-waist_height.png │ │ └── BodyVis-Web-waist_min_girth.png ├── mano.png ├── meshcapade-logo.png ├── smal.png ├── smil.png ├── smpl_fbx.png ├── smpl_textured_female.png ├── smpl_textured_male.png ├── smplx.png ├── star.png └── vibe.png ├── docker-compose.yml └── wiki ├── .gitkeep ├── FAQs.md ├── SMPL.md ├── body_and_pose.md ├── digidoppel.md └── quick-links.md /.env-files/Dockerfile.github: -------------------------------------------------------------------------------- 1 | FROM ruby:2.7.0 2 | 3 | ENV LC_ALL=C.UTF-8=value 4 | 5 | ADD . /srv/jekyll 6 | 7 | WORKDIR /srv/jekyll 8 | 9 | RUN bundle install --gemfile=.env-files/Gemfile.github 10 | 11 | EXPOSE 4000 -------------------------------------------------------------------------------- /.env-files/Dockerfile.gitlab: -------------------------------------------------------------------------------- 1 | FROM ruby:2.7.0 2 | 3 | ENV LC_ALL=C.UTF-8=value 4 | 5 | ADD . /srv/jekyll 6 | 7 | WORKDIR /srv/jekyll 8 | 9 | RUN bundle install --gemfile=.env-files/Gemfile.gitlab 10 | 11 | EXPOSE 4000 -------------------------------------------------------------------------------- /.env-files/Gemfile.github: -------------------------------------------------------------------------------- 1 | source 'http://rubygems.org' 2 | gem 'github-pages', group: :jekyll_plugins 3 | gem "jekyll-gitlab-metadata" # for cross compatibility -------------------------------------------------------------------------------- /.env-files/Gemfile.gitlab: -------------------------------------------------------------------------------- 1 | source 'http://rubygems.org' 2 | gem "jekyll-avatar" 3 | gem "jekyll-coffeescript" 4 | gem "jekyll-default-layout" 5 | gem "jekyll-feed" 6 | gem "jekyll-gist" 7 | gem "jekyll-paginate" 8 | gem "jekyll-mentions" 9 | gem "jekyll-optional-front-matter" 10 | gem "jekyll-readme-index" 11 | gem "jekyll-redirect-from" 12 | gem "jekyll-remote-theme" 13 | gem "jekyll-relative-links" 14 | gem "jekyll-seo-tag" 15 | gem "jekyll-sitemap" 16 | gem "jekyll-titles-from-headings" 17 | gem "jemoji" 18 | gem "jekyll-gitlab-metadata" 19 | gem "kramdown-parser-gfm" -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | _site 2 | Gemfile.*.lock 3 | .sass-cache 4 | .vscode 5 | .jekyll-cache -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | image: ruby:2.7.0 2 | 3 | variables: 4 | JEKYLL_ENV: production 5 | LC_ALL: C.UTF-8 6 | BUNDLE_GEMFILE: .env-files/Gemfile.gitlab 7 | 8 | before_script: 9 | - bundle install --gemfile=.env-files/Gemfile.gitlab 10 | 11 | pages: 12 | stage: deploy 13 | script: 14 | - bundle exec jekyll build -d public 15 | artifacts: 16 | paths: 17 | - public 18 | only: 19 | - master -------------------------------------------------------------------------------- /CNAME: -------------------------------------------------------------------------------- 1 | meshcapade.wiki -------------------------------------------------------------------------------- /Gemfile: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | source "https://rubygems.org" 4 | 5 | git_source(:github) {|repo_name| "https://github.com/#{repo_name}" } 6 | 7 | # gem "rails" 8 | 9 | gem "jekyll", "~> 3.6" 10 | 11 | gem "jekyll-avatar", "~> 0.7.0" 12 | 13 | gem "jekyll-coffeescript", "~> 2.0" 14 | 15 | gem "jekyll-default-layout", "~> 0.1.5" 16 | 17 | gem "jekyll-feed", "~> 0.15.1" 18 | 19 | gem "jekyll-gist", "~> 1.5" 20 | 21 | gem "jekyll-paginate", "~> 1.1" 22 | 23 | gem "jekyll-mentions", "~> 1.6" 24 | 25 | gem "jekyll-optional-front-matter", "~> 0.3.2" 26 | 27 | gem "jekyll-readme-index", "~> 0.3.0" 28 | 29 | gem "jekyll-redirect-from", "~> 0.16.0" 30 | 31 | gem "jekyll-remote-theme", "~> 0.4.2" 32 | 33 | gem "jekyll-relative-links", "~> 0.6.1" 34 | 35 | gem "jekyll-seo-tag", "~> 2.7" 36 | 37 | gem "jekyll-sitemap", "~> 1.4" 38 | 39 | gem "jekyll-titles-from-headings", "~> 0.5.3" 40 | 41 | gem "jemoji", "~> 0.12.0" 42 | 43 | gem "jekyll-gitlab-metadata", "~> 0.8.0" 44 | 45 | gem "kramdown-parser-gfm", "~> 1.1" 46 | -------------------------------------------------------------------------------- /Gemfile.lock: -------------------------------------------------------------------------------- 1 | GEM 2 | remote: https://rubygems.org/ 3 | specs: 4 | activesupport (6.0.3.4) 5 | concurrent-ruby (~> 1.0, >= 1.0.2) 6 | i18n (>= 0.7, < 2) 7 | minitest (~> 5.1) 8 | tzinfo (~> 1.1) 9 | zeitwerk (~> 2.2, >= 2.2.2) 10 | addressable (2.7.0) 11 | public_suffix (>= 2.0.2, < 5.0) 12 | coffee-script (2.4.1) 13 | coffee-script-source 14 | execjs 15 | coffee-script-source (1.12.2) 16 | colorator (1.1.0) 17 | concurrent-ruby (1.1.8) 18 | em-websocket (0.5.2) 19 | eventmachine (>= 0.12.9) 20 | http_parser.rb (~> 0.6.0) 21 | eventmachine (1.2.7) 22 | execjs (2.7.0) 23 | faraday (1.3.0) 24 | faraday-net_http (~> 1.0) 25 | multipart-post (>= 1.2, < 3) 26 | ruby2_keywords 27 | faraday-net_http (1.0.1) 28 | ffi (1.14.2) 29 | forwardable-extended (2.6.0) 30 | gemoji (3.0.1) 31 | html-pipeline (2.14.0) 32 | activesupport (>= 2) 33 | nokogiri (>= 1.4) 34 | http_parser.rb (0.6.0) 35 | i18n (0.9.5) 36 | concurrent-ruby (~> 1.0) 37 | jekyll (3.9.0) 38 | addressable (~> 2.4) 39 | colorator (~> 1.0) 40 | em-websocket (~> 0.5) 41 | i18n (~> 0.7) 42 | jekyll-sass-converter (~> 1.0) 43 | jekyll-watch (~> 2.0) 44 | kramdown (>= 1.17, < 3) 45 | liquid (~> 4.0) 46 | mercenary (~> 0.3.3) 47 | pathutil (~> 0.9) 48 | rouge (>= 1.7, < 4) 49 | safe_yaml (~> 1.0) 50 | jekyll-avatar (0.7.0) 51 | jekyll (>= 3.0, < 5.0) 52 | jekyll-coffeescript (2.0.0) 53 | coffee-script (~> 2.2) 54 | coffee-script-source (~> 1.12) 55 | jekyll-default-layout (0.1.5) 56 | jekyll (>= 3.0, < 5.0) 57 | jekyll-feed (0.15.1) 58 | jekyll (>= 3.7, < 5.0) 59 | jekyll-gist (1.5.0) 60 | octokit (~> 4.2) 61 | jekyll-gitlab-metadata (0.8.0) 62 | jekyll (~> 3.6) 63 | jekyll-mentions (1.6.0) 64 | html-pipeline (~> 2.3) 65 | jekyll (>= 3.7, < 5.0) 66 | jekyll-optional-front-matter (0.3.2) 67 | jekyll (>= 3.0, < 5.0) 68 | jekyll-paginate (1.1.0) 69 | jekyll-readme-index (0.3.0) 70 | jekyll (>= 3.0, < 5.0) 71 | jekyll-redirect-from (0.16.0) 72 | jekyll (>= 3.3, < 5.0) 73 | jekyll-relative-links (0.6.1) 74 | jekyll (>= 3.3, < 5.0) 75 | jekyll-remote-theme (0.4.2) 76 | addressable (~> 2.0) 77 | jekyll (>= 3.5, < 5.0) 78 | jekyll-sass-converter (>= 1.0, <= 3.0.0, != 2.0.0) 79 | rubyzip (>= 1.3.0, < 3.0) 80 | jekyll-sass-converter (1.5.2) 81 | sass (~> 3.4) 82 | jekyll-seo-tag (2.7.1) 83 | jekyll (>= 3.8, < 5.0) 84 | jekyll-sitemap (1.4.0) 85 | jekyll (>= 3.7, < 5.0) 86 | jekyll-titles-from-headings (0.5.3) 87 | jekyll (>= 3.3, < 5.0) 88 | jekyll-watch (2.2.1) 89 | listen (~> 3.0) 90 | jemoji (0.12.0) 91 | gemoji (~> 3.0) 92 | html-pipeline (~> 2.2) 93 | jekyll (>= 3.0, < 5.0) 94 | kramdown (2.3.0) 95 | rexml 96 | kramdown-parser-gfm (1.1.0) 97 | kramdown (~> 2.0) 98 | liquid (4.0.3) 99 | listen (3.4.1) 100 | rb-fsevent (~> 0.10, >= 0.10.3) 101 | rb-inotify (~> 0.9, >= 0.9.10) 102 | mercenary (0.3.6) 103 | mini_portile2 (2.5.0) 104 | minitest (5.14.3) 105 | multipart-post (2.1.1) 106 | nokogiri (1.11.1) 107 | mini_portile2 (~> 2.5.0) 108 | racc (~> 1.4) 109 | octokit (4.20.0) 110 | faraday (>= 0.9) 111 | sawyer (~> 0.8.0, >= 0.5.3) 112 | pathutil (0.16.2) 113 | forwardable-extended (~> 2.6) 114 | public_suffix (4.0.6) 115 | racc (1.5.2) 116 | rb-fsevent (0.10.4) 117 | rb-inotify (0.10.1) 118 | ffi (~> 1.0) 119 | rexml (3.2.4) 120 | rouge (3.26.0) 121 | ruby2_keywords (0.0.4) 122 | rubyzip (2.3.0) 123 | safe_yaml (1.0.5) 124 | sass (3.7.4) 125 | sass-listen (~> 4.0.0) 126 | sass-listen (4.0.0) 127 | rb-fsevent (~> 0.9, >= 0.9.4) 128 | rb-inotify (~> 0.9, >= 0.9.7) 129 | sawyer (0.8.2) 130 | addressable (>= 2.3.5) 131 | faraday (> 0.8, < 2.0) 132 | thread_safe (0.3.6) 133 | tzinfo (1.2.9) 134 | thread_safe (~> 0.1) 135 | zeitwerk (2.4.2) 136 | 137 | PLATFORMS 138 | ruby 139 | 140 | DEPENDENCIES 141 | jekyll (~> 3.6) 142 | jekyll-avatar (~> 0.7.0) 143 | jekyll-coffeescript (~> 2.0) 144 | jekyll-default-layout (~> 0.1.5) 145 | jekyll-feed (~> 0.15.1) 146 | jekyll-gist (~> 1.5) 147 | jekyll-gitlab-metadata (~> 0.8.0) 148 | jekyll-mentions (~> 1.6) 149 | jekyll-optional-front-matter (~> 0.3.2) 150 | jekyll-paginate (~> 1.1) 151 | jekyll-readme-index (~> 0.3.0) 152 | jekyll-redirect-from (~> 0.16.0) 153 | jekyll-relative-links (~> 0.6.1) 154 | jekyll-remote-theme (~> 0.4.2) 155 | jekyll-seo-tag (~> 2.7) 156 | jekyll-sitemap (~> 1.4) 157 | jekyll-titles-from-headings (~> 0.5.3) 158 | jemoji (~> 0.12.0) 159 | kramdown-parser-gfm (~> 1.1) 160 | 161 | BUNDLED WITH 162 | 2.1.4 163 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

Meshcapade Wiki

2 | 3 | Welcome to Meshcapade Wiki! 4 | 5 | Meshcapade is revolutionizing the world of human body modeling, sizing, and animation. We employ state of the art statistical models that encode realistic body shapes, sizes, and motions. 6 | 7 | This wiki is dedicated to providing more in-depth knowledge about our parametric models, body and shape estimation technologies and our automated processing services. 8 | 9 | 10 | Automated Processing Platform: [digidoppel](https://digidoppel.com/) 11 | ----------------------------------------------------------- 12 | `digidoppel` is a one-stop-shop - where users can input their complex data from many different sources ranging from expensive high-end 3D scanning systems, to even photos/videos from anyone's smartphone, and instantly receive an accurate 3D avatar. This avatar can easily be imported it into a vast array of commercial tools, ranging from apparel design and animation to even generating AI training data 13 | 14 | * Avatars from 3D scans (scan alignment) 15 | * Avatars from measurements 16 | * Avatars from RGB-D (Lidar, Kinect, RealSense) 17 | * Avatars from images/video 18 | 19 | 20 | Parametric Models 21 | ----------------- 22 | 23 | ### What is a parametric model? 24 | To understand humans and their behavior we utilize 3D parametric models of the body and its movement. Such models facilitate reasoning about the different ways in which we all interact with the world around us, such as human-object interaction, human-to-human contact, effects of compression due to external forces, as well as posture, gait and emotion related effects. These models allow us to explain the relationship of the 3D body to the 3D world around us. We learn realistic 3D models of the human body that can be parametrized according to shape or pose independently. Specifically, we factorize the changes in the body due to body shape differences from the changes due to pose using thousands of high resolution 3D scans. 25 | 26 | Among these methods the [SMPL Model](./wiki/SMPL.md) has become the standard for research on human body shape and pose. This model is especially helpful for research because the model's body shape and pose parameters can be easily optimized to fit the model to data from many different sources, e.g. 3D scans, RGB-D, MOCAP and videos, and at the same time the model output itself can be easily used in any standard 3D graphics software for animation, gaming or visualization. Going beyond SMPL, we also have **MANO**, a 3D hand model learned from around 2000 hand scans of different people in many poses. and **FLAME** which uses a novel dataset of 4D facial sequences to capture realistic 3D head shape, jaw articulation, eye movement, blinking, and facial expressions. Combining all these parametic models into a single full body, face and hand model, is **SMPL-X** . Finally, the latest iteration in the SMPL model family is **STAR**, which introduces a new parametrization for the SMPL model, making it more compact and even easier to use for AI-based pipelines, while retaining all the useful usability features as the earlier models. 27 | 28 | More details about all the parametrics models within the "SMPL family" are available on the [SMPL wiki](./wiki/SMPL.md). 29 | 30 | ### Human Body Models 31 | The human body is certainly central to our lives and is commonly depicted in images and video. We are developing the world's most realistic models of the body by learning their shape and how they move from data. Our goal is to make 3D models of the body look and move in ways that make them indistinguishable from real humans. Such virtual humans can be used in special effects and will play an important role in emerging virtual reality systems. They can also be used in computer vision to generate training data for learning methods or can be fit directly to sensor data. What makes this hard is that the human body is highly articulated, deforms with kinematic changes, and exhibits large shape variability across subjects. 32 | 33 | #### 1. Hand Model 34 | *MANO* is created from the SMPL hand topology, and has analogous components to those in SMPL: a template shape, kinematic tree, shape and pose blend shapes, blend weights and a joint regressor. 35 | 36 | #### 2. Face Model 37 | *FLAME* adapts the SMPL body model formulation to create a statistical head model that is significantly more accurate and expressive than existing head and face models, while remaining compatible with standard graphics software. In contrast to existing models, FLAME explicitly models head pose and eyeball rotation. 38 | 39 | #### 3. Infant Model 40 | *SMIL* learns a statistical for 3D shape and 3D pose estimation of infants. It is the first work on learning a statistical 3D body model from lowquality, incomplete RGB-D data of freely moving humans. 41 | 42 | 43 | ### Animal Models 44 | The detection, tracking, and analysis of animals has many applications in biology, neuroscience, ecology, farming, and entertainment. Despite the wide applicability, the computer vision community has focused more heavily on modeling humans, estimating human pose, and analyzing human behavior. Can we take the best practices learned from the analysis of humans and apply these directly to animals? To address this, the *SMAL* model takes the approach for 3D human pose and shape modeling and extends it to modeling animals. 45 | 46 | 47 | Body & Pose Estimation Methods 48 | ------------------------ 49 | 50 | We influence the world around us through our bodies. We express our emotions through our facial expressions and body posture. We manipulate and change the world with our hands. For computers to be full partners with humans, they have to see us and understand our behavior. They have to recognize our facial expressions, our gestures, our movements and our actions. This means that we need robust algorithms and expressive representation that can capture human pose, motion, and behavior. 51 | 52 | Representing and extracting 3D body shape and pose has not been the dominant paradigm in the field but this is now changing. The introduction of our SMPL body model helped change this. SMPL is accurate, easy to use, compatible with game engines, differentiable, and is now widely used both in research and industry. It can be easily fit to image data "top down" or integrated into the end-to-end training of neural networks. 53 | 54 | In the last few years many publications have shown how to fit SMPL to image data and how to train deep networks end-to-end to extract full-body shape and pose from single images or video. The ultimate goal in this space is to understand behavior. To do so we first want to capture it at scale. This means robustly and efficiently tracking human behavior in natural settings and relating that behavior to the 3D world around the person. 55 | 56 | More details about shape and pose estimation methods designed to work around SMPL are available on the [Body & Pose wiki](./wiki/body_and_pose.md). 57 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | remote_theme: Drassil/git-wiki-theme@master 2 | # (string) Title of your wiki 3 | title: "Meshcapade Wiki" 4 | # (string) if you've installed your wiki in subfolder, you must change this configuration 5 | # with your folder name, otherwise leave it empty 6 | # baseurl: "/wiki" 7 | # (string) Description of your wiki 8 | description: A Wiki on Body-Modelling Technology, maintained by Meshcapade GmbH. 9 | # (boolean) Enable/disable wiki page list in sidebar 10 | show_wiki_pages: true 11 | # (integer) Maximum number of wiki page to shown in sidebar 12 | show_wiki_pages_limit: 3 13 | # (boolean) Enable/disable blog feature 14 | blog_feature: false 15 | # (boolean) Enable/disable wiki posts list in sidebar (needs blog_feature enabled) 16 | show_wiki_posts: false 17 | # (integer) Maximum number of wiki posts to shown in sidebar 18 | show_wiki_posts_limit: 0 19 | # from jekyll (read jekyll doc) 20 | paginate: 5 21 | paginate_path: "/blog/page:num" 22 | permalink: /blog/posts/:year/:month/:day/:title:output_ext 23 | # (boolean) Enable/disable download buttons in sidebar 24 | show_downloads: false 25 | # (string) Specify branch rendered by gitpages allowing wiki tool buttons to work 26 | git_branch: "main" 27 | # (string) Url of logo image, it can be full, absolute or relative. 28 | logo_url: /assets/meshcapade-logo.png 29 | # (string) The UA-XXXXX-Y code from google analytic to enable GA on your wiki 30 | google_analytics: 31 | # (string) folder where wiki pages are stored, it's needed for tool buttons 32 | wiki_folder: "wiki" 33 | # (boolean) if you're using github wiki as submodule then this config 34 | # must be enabled to allow tool buttons to work properly 35 | use_github_wiki: false 36 | # (boolean) Enable "Edit with Prose.io" button in tools, it's a 3rd party 37 | # service to edit github markdown pages easily 38 | use_prose_io: false 39 | # Select search_engine component from: 40 | # - js: it uses a built in javascript component that uses generated js object 41 | # - js_rss: it uses a built in javascript component that uses generated sitemap_full.xml to search inside your wiki with lunr library (slow and experimental) 42 | # - github : it uses internal github repository search 43 | # - google : it uses cse search bar, you need to configure google_cse_token 44 | # 45 | search_engine : "js" 46 | # Setting google custom search engine for google 47 | # cse search bar (https://cse.google.it/cse/) 48 | google_cse_token: 49 | 50 | # (string) path of site root. Normally it's must be empty because _config.yml resides in the root of your repository. 51 | # If you have _config.yml and your site in a subfolder, then change this config accordly 52 | site_root: 53 | 54 | # 55 | # Jekyll configurations 56 | # 57 | 58 | # You can customize it changing default layout for all pages 59 | # More info: https://jekyllrb.com/docs/configuration/ 60 | # 61 | # git-wiki includes some internal themes that you can choose 62 | # check _layouts folder 63 | # 64 | markdown: kramdown 65 | highlighter: rouge 66 | kramdown: 67 | input: GFM 68 | syntax_highlighter: rouge 69 | 70 | defaults: 71 | - 72 | scope: 73 | path: "wiki" 74 | values: 75 | permalink: /:basename 76 | - 77 | scope: 78 | path: "" # an empty string here means all files in the project 79 | values: 80 | layout: "git-wiki-default" 81 | - 82 | scope: 83 | path: "" 84 | type: "pages" 85 | values: 86 | layout: "git-wiki-default" 87 | - 88 | scope: 89 | path: "" 90 | type: "posts" 91 | values: 92 | layout: "git-wiki-post" 93 | - 94 | scope: 95 | path: blog 96 | values: 97 | layout: "git-wiki-blog" 98 | sass: 99 | style: compressed 100 | plugins: 101 | - jekyll-avatar 102 | - jekyll-coffeescript 103 | - jekyll-default-layout 104 | - jekyll-feed 105 | - jekyll-gist 106 | - jekyll-paginate 107 | - jekyll-mentions 108 | - jekyll-optional-front-matter 109 | - jekyll-readme-index 110 | - jekyll-redirect-from 111 | - jekyll-remote-theme 112 | - jekyll-relative-links 113 | - jekyll-seo-tag 114 | - jekyll-sitemap 115 | - jekyll-titles-from-headings 116 | - jemoji 117 | - jekyll-gitlab-metadata 118 | 119 | 120 | # 121 | # INCLUDING HOOKS 122 | # They are optional, change them only if you need 123 | # Check wiki documentation to learn how they work 124 | # 125 | 126 | inc_before_toc : 127 | inc_after_toc : 128 | inc_before_content : 129 | inc_after_content : 130 | inc_before_footer : 131 | inc_after_footer : 132 | inc_before_head : 133 | inc_after_head : 134 | inc_before_meta : 135 | inc_after_meta : 136 | inc_before_scripts : 137 | inc_after_scripts : 138 | inc_before_styles : 139 | inc_after_styles : 140 | inc_before_header : 141 | inc_after_header : 142 | inc_before_tail : 143 | inc_after_tail : 144 | inc_before_tools : 145 | inc_after_tools : 146 | 147 | inc_before_page_list : 148 | inc_after_page_list : sidebar.html 149 | inc_before_post_list : 150 | inc_after_post_list : 151 | -------------------------------------------------------------------------------- /_includes/sidebar.html: -------------------------------------------------------------------------------- 1 | 2 | {% if page.usemathjax %} 3 | 8 | 9 | {% endif %} 10 | 11 | 12 | 13 | Menu (Edit): 14 | 15 | 23 | -------------------------------------------------------------------------------- /_posts/2020-07-07-MONO-model.md: -------------------------------------------------------------------------------- 1 | --- 2 | published: true 3 | tags: SMPL-model, Technology-licensing 4 | --- 5 | 6 | ## MANO Model 7 | 8 | The MANO Model is a realistic representation of the human hand that supports realistic changes in hand shape and pose. MANO is learned from around 1000 high-resolution 3D scans in a wide variety of hand poses. 9 | 10 | Find out more about MANO publication here: 11 | http://mano.is.tue.mpg.de/ 12 | Find out more about licensing MANO and other Meshcapade technologies here: 13 | https://lnkd.in/etNCDNb 14 | 15 | ![](https://1.bp.blogspot.com/-EplrYAJyqTM/XwTlczb_LTI/AAAAAAAAACg/2vEWMlCMKwQ4ZjaXtl6d_AQvnG2_7DKhgCK4BGAsYHg/s480/MANO%2BModel%2BGIF.gif) 16 | -------------------------------------------------------------------------------- /assets/.gitkeep: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /assets/FAQs/PAMPJPE.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/FAQs/PAMPJPE.png -------------------------------------------------------------------------------- /assets/FAQs/dmpl.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/FAQs/dmpl.gif -------------------------------------------------------------------------------- /assets/FAQs/internal-geo1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/FAQs/internal-geo1.png -------------------------------------------------------------------------------- /assets/FAQs/internal-geo2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/FAQs/internal-geo2.png -------------------------------------------------------------------------------- /assets/FAQs/internal-geo3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/FAQs/internal-geo3.png -------------------------------------------------------------------------------- /assets/FAQs/neutral.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/FAQs/neutral.png -------------------------------------------------------------------------------- /assets/FAQs/preg-flickr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/FAQs/preg-flickr.png -------------------------------------------------------------------------------- /assets/SMPLX_OpenPose_mapping/README.md: -------------------------------------------------------------------------------- 1 | Information about the smplx_openpose25.json file: 2 | ------------------------------------------------- 3 | 4 | The JSON file contains following keys: 5 | 6 | - smplx_keypoint_names: The names of the SMPL-X keypoints 7 | - openpose25_keypoint_names: The names of the OpenPose keypoints. The order for OpenPose here is: 8 | - 25 body keypoints 9 | - 21 left hand keypoints 10 | - 21 right hand keypoints 11 | - 51 facial landmarks 12 | - 17 contour landmarks 13 | - openpose_idxs: The indices of the OpenPose keypoint array. 14 | - smplx_idxs: The corresponding SMPL-X indices. 15 | 16 | The attached script shows how to access the SMPLX keypoint corresponding to each OpenPose keypoint. 17 | 18 | Note, some SMPLX keypoints do not match with the OpenPose keypoints, this is the list of keypoints that will not have a mapping: 19 | 20 | spine1 21 | spine2 22 | spine3 23 | left_foot 24 | right_foot 25 | left_collar 26 | right_collar 27 | head 28 | left_eye_smplx 29 | right_eye_smplx -------------------------------------------------------------------------------- /assets/SMPLX_OpenPose_mapping/smplx_openpose25.json: -------------------------------------------------------------------------------- 1 | { 2 | "smplx_idxs": [ 3 | 55, 4 | 12, 5 | 17, 6 | 19, 7 | 21, 8 | 16, 9 | 18, 10 | 20, 11 | 0, 12 | 2, 13 | 5, 14 | 8, 15 | 1, 16 | 4, 17 | 7, 18 | 56, 19 | 57, 20 | 58, 21 | 59, 22 | 60, 23 | 61, 24 | 62, 25 | 63, 26 | 64, 27 | 65, 28 | 20, 29 | 37, 30 | 38, 31 | 39, 32 | 66, 33 | 25, 34 | 26, 35 | 27, 36 | 67, 37 | 28, 38 | 29, 39 | 30, 40 | 68, 41 | 34, 42 | 35, 43 | 36, 44 | 69, 45 | 31, 46 | 32, 47 | 33, 48 | 70, 49 | 21, 50 | 52, 51 | 53, 52 | 54, 53 | 71, 54 | 40, 55 | 41, 56 | 42, 57 | 72, 58 | 43, 59 | 44, 60 | 45, 61 | 73, 62 | 49, 63 | 50, 64 | 51, 65 | 74, 66 | 46, 67 | 47, 68 | 48, 69 | 75, 70 | 76, 71 | 77, 72 | 78, 73 | 79, 74 | 80, 75 | 81, 76 | 82, 77 | 83, 78 | 84, 79 | 85, 80 | 86, 81 | 87, 82 | 88, 83 | 89, 84 | 90, 85 | 91, 86 | 92, 87 | 93, 88 | 94, 89 | 95, 90 | 96, 91 | 97, 92 | 98, 93 | 99, 94 | 100, 95 | 101, 96 | 102, 97 | 103, 98 | 104, 99 | 105, 100 | 106, 101 | 107, 102 | 108, 103 | 109, 104 | 110, 105 | 111, 106 | 112, 107 | 113, 108 | 114, 109 | 115, 110 | 116, 111 | 117, 112 | 118, 113 | 119, 114 | 120, 115 | 121, 116 | 122, 117 | 123, 118 | 124, 119 | 125, 120 | 126, 121 | 127, 122 | 128, 123 | 129, 124 | 130, 125 | 131, 126 | 132, 127 | 133, 128 | 134, 129 | 135, 130 | 136, 131 | 137, 132 | 138, 133 | 139, 134 | 140, 135 | 141, 136 | 142, 137 | 143 138 | ], 139 | "smplx_keypoint_names": [ 140 | "pelvis", 141 | "left_hip", 142 | "right_hip", 143 | "spine1", 144 | "left_knee", 145 | "right_knee", 146 | "spine2", 147 | "left_ankle", 148 | "right_ankle", 149 | "spine3", 150 | "left_foot", 151 | "right_foot", 152 | "neck", 153 | "left_collar", 154 | "right_collar", 155 | "head", 156 | "left_shoulder", 157 | "right_shoulder", 158 | "left_elbow", 159 | "right_elbow", 160 | "left_wrist", 161 | "right_wrist", 162 | "jaw", 163 | "left_eye_smplx", 164 | "right_eye_smplx", 165 | "left_index1", 166 | "left_index2", 167 | "left_index3", 168 | "left_middle1", 169 | "left_middle2", 170 | "left_middle3", 171 | "left_pinky1", 172 | "left_pinky2", 173 | "left_pinky3", 174 | "left_ring1", 175 | "left_ring2", 176 | "left_ring3", 177 | "left_thumb1", 178 | "left_thumb2", 179 | "left_thumb3", 180 | "right_index1", 181 | "right_index2", 182 | "right_index3", 183 | "right_middle1", 184 | "right_middle2", 185 | "right_middle3", 186 | "right_pinky1", 187 | "right_pinky2", 188 | "right_pinky3", 189 | "right_ring1", 190 | "right_ring2", 191 | "right_ring3", 192 | "right_thumb1", 193 | "right_thumb2", 194 | "right_thumb3", 195 | "nose", 196 | "right_eye", 197 | "left_eye", 198 | "right_ear", 199 | "left_ear", 200 | "left_big_toe", 201 | "left_small_toe", 202 | "left_heel", 203 | "right_big_toe", 204 | "right_small_toe", 205 | "right_heel", 206 | "left_thumb", 207 | "left_index", 208 | "left_middle", 209 | "left_ring", 210 | "left_pinky", 211 | "right_thumb", 212 | "right_index", 213 | "right_middle", 214 | "right_ring", 215 | "right_pinky", 216 | "right_eye_brow1", 217 | "right_eye_brow2", 218 | "right_eye_brow3", 219 | "right_eye_brow4", 220 | "right_eye_brow5", 221 | "left_eye_brow5", 222 | "left_eye_brow4", 223 | "left_eye_brow3", 224 | "left_eye_brow2", 225 | "left_eye_brow1", 226 | "nose1", 227 | "nose2", 228 | "nose3", 229 | "nose4", 230 | "right_nose_2", 231 | "right_nose_1", 232 | "nose_middle", 233 | "left_nose_1", 234 | "left_nose_2", 235 | "right_eye1", 236 | "right_eye2", 237 | "right_eye3", 238 | "right_eye4", 239 | "right_eye5", 240 | "right_eye6", 241 | "left_eye4", 242 | "left_eye3", 243 | "left_eye2", 244 | "left_eye1", 245 | "left_eye6", 246 | "left_eye5", 247 | "right_mouth_1", 248 | "right_mouth_2", 249 | "right_mouth_3", 250 | "mouth_top", 251 | "left_mouth_3", 252 | "left_mouth_2", 253 | "left_mouth_1", 254 | "left_mouth_5", 255 | "left_mouth_4", 256 | "mouth_bottom", 257 | "right_mouth_4", 258 | "right_mouth_5", 259 | "right_lip_1", 260 | "right_lip_2", 261 | "lip_top", 262 | "left_lip_2", 263 | "left_lip_1", 264 | "left_lip_3", 265 | "lip_bottom", 266 | "right_lip_3", 267 | "right_contour_1", 268 | "right_contour_2", 269 | "right_contour_3", 270 | "right_contour_4", 271 | "right_contour_5", 272 | "right_contour_6", 273 | "right_contour_7", 274 | "right_contour_8", 275 | "contour_middle", 276 | "left_contour_8", 277 | "left_contour_7", 278 | "left_contour_6", 279 | "left_contour_5", 280 | "left_contour_4", 281 | "left_contour_3", 282 | "left_contour_2", 283 | "left_contour_1" 284 | ], 285 | "openpose25_keypoint_names": [ 286 | "nose", 287 | "neck", 288 | "right_shoulder", 289 | "right_elbow", 290 | "right_wrist", 291 | "left_shoulder", 292 | "left_elbow", 293 | "left_wrist", 294 | "pelvis", 295 | "right_hip", 296 | "right_knee", 297 | "right_ankle", 298 | "left_hip", 299 | "left_knee", 300 | "left_ankle", 301 | "right_eye", 302 | "left_eye", 303 | "right_ear", 304 | "left_ear", 305 | "left_big_toe", 306 | "left_small_toe", 307 | "left_heel", 308 | "right_big_toe", 309 | "right_small_toe", 310 | "right_heel", 311 | "left_wrist", 312 | "left_thumb1", 313 | "left_thumb2", 314 | "left_thumb3", 315 | "left_thumb", 316 | "left_index1", 317 | "left_index2", 318 | "left_index3", 319 | "left_index", 320 | "left_middle1", 321 | "left_middle2", 322 | "left_middle3", 323 | "left_middle", 324 | "left_ring1", 325 | "left_ring2", 326 | "left_ring3", 327 | "left_ring", 328 | "left_pinky1", 329 | "left_pinky2", 330 | "left_pinky3", 331 | "left_pinky", 332 | "right_wrist", 333 | "right_thumb1", 334 | "right_thumb2", 335 | "right_thumb3", 336 | "right_thumb", 337 | "right_index1", 338 | "right_index2", 339 | "right_index3", 340 | "right_index", 341 | "right_middle1", 342 | "right_middle2", 343 | "right_middle3", 344 | "right_middle", 345 | "right_ring1", 346 | "right_ring2", 347 | "right_ring3", 348 | "right_ring", 349 | "right_pinky1", 350 | "right_pinky2", 351 | "right_pinky3", 352 | "right_pinky", 353 | "right_eye_brow1", 354 | "right_eye_brow2", 355 | "right_eye_brow3", 356 | "right_eye_brow4", 357 | "right_eye_brow5", 358 | "left_eye_brow5", 359 | "left_eye_brow4", 360 | "left_eye_brow3", 361 | "left_eye_brow2", 362 | "left_eye_brow1", 363 | "nose1", 364 | "nose2", 365 | "nose3", 366 | "nose4", 367 | "right_nose_2", 368 | "right_nose_1", 369 | "nose_middle", 370 | "left_nose_1", 371 | "left_nose_2", 372 | "right_eye1", 373 | "right_eye2", 374 | "right_eye3", 375 | "right_eye4", 376 | "right_eye5", 377 | "right_eye6", 378 | "left_eye4", 379 | "left_eye3", 380 | "left_eye2", 381 | "left_eye1", 382 | "left_eye6", 383 | "left_eye5", 384 | "right_mouth_1", 385 | "right_mouth_2", 386 | "right_mouth_3", 387 | "mouth_top", 388 | "left_mouth_3", 389 | "left_mouth_2", 390 | "left_mouth_1", 391 | "left_mouth_5", 392 | "left_mouth_4", 393 | "mouth_bottom", 394 | "right_mouth_4", 395 | "right_mouth_5", 396 | "right_lip_1", 397 | "right_lip_2", 398 | "lip_top", 399 | "left_lip_2", 400 | "left_lip_1", 401 | "left_lip_3", 402 | "lip_bottom", 403 | "right_lip_3", 404 | "right_contour_1", 405 | "right_contour_2", 406 | "right_contour_3", 407 | "right_contour_4", 408 | "right_contour_5", 409 | "right_contour_6", 410 | "right_contour_7", 411 | "right_contour_8", 412 | "contour_middle", 413 | "left_contour_8", 414 | "left_contour_7", 415 | "left_contour_6", 416 | "left_contour_5", 417 | "left_contour_4", 418 | "left_contour_3", 419 | "left_contour_2", 420 | "left_contour_1" 421 | ], 422 | "openpose_idxs": [ 423 | 0, 424 | 1, 425 | 2, 426 | 3, 427 | 4, 428 | 5, 429 | 6, 430 | 7, 431 | 8, 432 | 9, 433 | 10, 434 | 11, 435 | 12, 436 | 13, 437 | 14, 438 | 15, 439 | 16, 440 | 17, 441 | 18, 442 | 19, 443 | 20, 444 | 21, 445 | 22, 446 | 23, 447 | 24, 448 | 25, 449 | 26, 450 | 27, 451 | 28, 452 | 29, 453 | 30, 454 | 31, 455 | 32, 456 | 33, 457 | 34, 458 | 35, 459 | 36, 460 | 37, 461 | 38, 462 | 39, 463 | 40, 464 | 41, 465 | 42, 466 | 43, 467 | 44, 468 | 45, 469 | 46, 470 | 47, 471 | 48, 472 | 49, 473 | 50, 474 | 51, 475 | 52, 476 | 53, 477 | 54, 478 | 55, 479 | 56, 480 | 57, 481 | 58, 482 | 59, 483 | 60, 484 | 61, 485 | 62, 486 | 63, 487 | 64, 488 | 65, 489 | 66, 490 | 67, 491 | 68, 492 | 69, 493 | 70, 494 | 71, 495 | 72, 496 | 73, 497 | 74, 498 | 75, 499 | 76, 500 | 77, 501 | 78, 502 | 79, 503 | 80, 504 | 81, 505 | 82, 506 | 83, 507 | 84, 508 | 85, 509 | 86, 510 | 87, 511 | 88, 512 | 89, 513 | 90, 514 | 91, 515 | 92, 516 | 93, 517 | 94, 518 | 95, 519 | 96, 520 | 97, 521 | 98, 522 | 99, 523 | 100, 524 | 101, 525 | 102, 526 | 103, 527 | 104, 528 | 105, 529 | 106, 530 | 107, 531 | 108, 532 | 109, 533 | 110, 534 | 111, 535 | 112, 536 | 113, 537 | 114, 538 | 115, 539 | 116, 540 | 117, 541 | 118, 542 | 119, 543 | 120, 544 | 121, 545 | 122, 546 | 123, 547 | 124, 548 | 125, 549 | 126, 550 | 127, 551 | 128, 552 | 129, 553 | 130, 554 | 131, 555 | 132, 556 | 133, 557 | 134 558 | ] 559 | } -------------------------------------------------------------------------------- /assets/SMPLX_OpenPose_mapping/smplx_openpose_test.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | with open('./smplx_openpose25_200519.json') as f: 4 | data = json.load(f) 5 | 6 | wrong_idxs = [] 7 | print 'OP \t| SMPLX\n____________________' 8 | for idx in range(len(data['openpose_idxs'])): 9 | opose_keyname = data['openpose25_keypoint_names'][idx] 10 | smplx_keyname = data['smplx_keypoint_names'][data['smplx_idxs'][idx]] 11 | if opose_keyname != smplx_keyname: 12 | wrong_idxs.append(idx) 13 | else: 14 | print "%s \t| %s" % (opose_keyname, smplx_keyname) 15 | 16 | if wrong_idxs: 17 | print "ERROR! The following keypoint names didn't match up: \n", wrong_idxs 18 | else: 19 | print "No mismatched keypoints found found!" -------------------------------------------------------------------------------- /assets/SMPL_body_segmentation/smpl/smpl_segmentation_on_template.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/SMPL_body_segmentation/smpl/smpl_segmentation_on_template.png -------------------------------------------------------------------------------- /assets/SMPL_body_segmentation/smplx/smplx_segmentation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/SMPL_body_segmentation/smplx/smplx_segmentation.png -------------------------------------------------------------------------------- /assets/SMPL_formulation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/SMPL_formulation.png -------------------------------------------------------------------------------- /assets/clipart-under-construction.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/clipart-under-construction.png -------------------------------------------------------------------------------- /assets/flame.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/flame.png -------------------------------------------------------------------------------- /assets/images_digidoppel/clothing-problems.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/clothing-problems.png -------------------------------------------------------------------------------- /assets/images_digidoppel/internal-geometry-problems.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/internal-geometry-problems.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-ankle_girth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-ankle_girth.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-arm_scye_girth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-arm_scye_girth.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-armlength_shoulder_elbow.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-armlength_shoulder_elbow.pdf -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-armlength_shoulder_elbow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-armlength_shoulder_elbow.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-armlength_shoulder_wrist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-armlength_shoulder_wrist.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-armlength_spine_wrist.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-armlength_spine_wrist.pdf -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-armlength_spine_wrist.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-armlength_spine_wrist.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-chest_max_girth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-chest_max_girth.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-crotch_length.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-crotch_length.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-foot_length.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-foot_length.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-height.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-height.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-hip_height.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-hip_height.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-hip_max_girth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-hip_max_girth.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-inseam.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-inseam.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-neck_base_girth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-neck_base_girth.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-shoulder_breadth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-shoulder_breadth.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-thigh_max_girth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-thigh_max_girth.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-waist_height.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-waist_height.png -------------------------------------------------------------------------------- /assets/images_digidoppel/measurements/BodyVis-Web-waist_min_girth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/images_digidoppel/measurements/BodyVis-Web-waist_min_girth.png -------------------------------------------------------------------------------- /assets/mano.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/mano.png -------------------------------------------------------------------------------- /assets/meshcapade-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/meshcapade-logo.png -------------------------------------------------------------------------------- /assets/smal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/smal.png -------------------------------------------------------------------------------- /assets/smil.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/smil.png -------------------------------------------------------------------------------- /assets/smpl_fbx.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/smpl_fbx.png -------------------------------------------------------------------------------- /assets/smpl_textured_female.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/smpl_textured_female.png -------------------------------------------------------------------------------- /assets/smpl_textured_male.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/smpl_textured_male.png -------------------------------------------------------------------------------- /assets/smplx.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/smplx.png -------------------------------------------------------------------------------- /assets/star.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/star.png -------------------------------------------------------------------------------- /assets/vibe.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Meshcapade/wiki/23b9518fc1492c52c0bfba2e3623c95f1c5aafb6/assets/vibe.png -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | github-wiki-skeleton: 4 | build: 5 | context: . 6 | dockerfile: .env-files/Dockerfile.github 7 | ports: 8 | - 4000:4000 9 | - 35729:35729 10 | environment: 11 | - BUNDLE_GEMFILE=.env-files/Gemfile.github 12 | volumes: 13 | - .:/srv/jekyll 14 | - github_site:/srv/jekyll/_site 15 | command: bundle exec jekyll serve --host 0.0.0.0 --force_polling --livereload 16 | gitlab-wiki-skeleton: 17 | build: 18 | context: . 19 | dockerfile: .env-files/Dockerfile.gitlab 20 | ports: 21 | - 4000:4000 22 | - 35729:35729 23 | environment: 24 | - BUNDLE_GEMFILE=.env-files/Gemfile.gitlab 25 | volumes: 26 | - .:/srv/jekyll 27 | - gitlab_site:/srv/jekyll/_site 28 | command: 'bundle exec jekyll serve --host 0.0.0.0 --force_polling --livereload' 29 | volumes: 30 | github_site: 31 | gitlab_site: -------------------------------------------------------------------------------- /wiki/.gitkeep: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /wiki/FAQs.md: -------------------------------------------------------------------------------- 1 | --- 2 | usemathjax: true 3 | --- 4 | 5 | ___ 6 | # FAQs 7 | 8 | ## Scan Alignment 9 | 10 | > How long does a typical model-fit to scan data (500k-1000k vertices, covering most of the scanned body) take, if one would perform the calculations on a normal offline PC (let’s say i7, 32GB Ram)? 11 | 12 | The processing for meshcapade.me is online, so your PC setup is not relevant. However, we have created a downloadable version of the software which can run on a PC. For a PC you described (i7, 32GB Ram) the runtime will be between 15-25 minutes. If you would like to find out details of the downloadable software, we can share the licensing details with you. 13 | 14 | > Do you recommend specific hardware like graphic cards to speed up the process? 15 | 16 | Not at the moment. We are working on GPU-enabled version of the software which will allow much faster processing. We will announce this later this year. 17 | 18 | > Do you have different “fit-stages” where the parametric model is generated first and other fine-fit procedures are added consecutively? 19 | 20 | 21 | Yes, as you might have already seen on meshcapade.me, when you upload a 3D scan, there is one option called "Refinement". This option allows you to set whether the additional refinement stages are allowed to be run. If this is set to 22 | 23 | - "none" then the output will be directly from the SMPL model's shape space. Best to use when the input scan is extremely sparse or noisy (e.g. scan have multiple layers of point-cloud surfaces). 24 | - "low": it will run a refinement stage which allow the vertices for the output mesh to move more freely away from the SMPL model shape parameters. This is best to use if the input 3D scan is noisy (e.g. scans from multi-sensor systems, but also containing spurious scan points that are not expected in our model, like hair, clothing etc). 25 | - "standard": allows higher refinement. Use this setting when input 3D scan is not too noisy (e.g. high resolution 3D scanning systems). 26 | - "high": highest refinement level. Use this setting when input 3D scan is extremely clean with no spurious noise (e.g. if the scan has been cleaned up through a separate process). 27 | 28 | 29 | > My Scan alignment doesn't look right, what went wrong? 30 | 31 | #### Problems with Internal Geometry 32 | 33 | 34 | The scan alignment process is designed to work with 3D meshes acquired through 3D scanning process. 35 | 36 | 37 | ![Noisy Scan](/assets/FAQs/internal-geo1.png) 38 | 39 | Scans from a 3D scanner can be noisy, this noise is expected and our scan alignment process can handle these. 40 | 41 | 42 | 43 | 44 | However, the 3D scans are expected to only contain an outer surface of the body, and no internal surfaces. Generally when artists create 3D body models in some 3D animation program, they might create additional geometry on the inside of the body surface, e.g. 45 | An artist might create internal geometry for eyes and mouth for the face, as shown below: 46 | 47 | ![Face Holes](/assets/FAQs/internal-geo2.png) 48 | 49 | Or there the limbs and neck joints might be created as separate geometry objects, so they will have additional geometry where the limb closes off in the 3D model. 50 | These internal geometry pieces have to be removed before they can be sent for processing on meshcapade.me. 51 | 52 | ![Noisy Body](/assets/FAQs/internal-geo3.png) 53 | 54 | ## meshcapade.me API 55 | 56 | > What exactly is the API? 57 | 58 | The API is a digital human cloud platform for creating animation-ready avatars from scans, hand-measurements, and more. You can find out more details about it [here](https://meshcapade.com/infopages/api.html). 59 | 60 | > How can I license the API? 61 | 62 | We offer different pricing options and packages for the API. For more information, please check our licensing page here. If you have higher volumes of data, please contact us on info@meshcapade.com. 63 | 64 | > What am I allowed to do with the generated avatars? Can I share it with my users? 65 | 66 | The output files are free for all uses under SMPL Body license, please contact us on info@meshcapade.com. 67 | 68 | > How is security being handled for the API? 69 | 70 | You can review our Privacy Policy [here](https://info.meshcapade.com/faqs/api-privacy-policy). 71 | 72 | > Is your API available as an offline solution/SDK? 73 | 74 | Yes. Contact us at info@meshcapade.com. 75 | 76 | > Is your API available on a mobile application? 77 | 78 | Yes. Contact us at info@meshcapade.com. 79 | 80 | > Is there a trial for the API? 81 | 82 | When you choose the pay as you go option, you receive 10 Free credits on initial sign up that you can use to try out the API. 83 | 84 | > Is there a sample output for the API? 85 | 86 | Yes, we have some sample outputs for our bodies-from-scans and bodies-from-measurements API. Please contact us on info@meshcapade.com to share these sample files with you. 87 | 88 | 89 | ## MOCAP Processing: IMU Data 90 | 91 | > How does MoSH work with IMU data? 92 | 93 | Short answer: IMU mocap isn't ideal but if you have the 3D marker trajectories from the IMU system, how about you guys send us a sample of the data and we'll test how well it works. 94 | 95 | Long answer: MoSH is designed to work with optical mocap. It uses the 3D location of the optical markers to fit the SMPL model into each frame of data. The problem with IMU based mocap systems is that you might get the 3D locations of the markers, but there is a general drift associated with IMUs that can cause huge errors over time. Therefore, usually the best option to fit SMPL to IMU is to use any of the IMU based works (e.g. deep initial poser - this is a bit old, but I'm sure there are newer methods out there as well). At Meshcapade we don't have a license for any of these IMU based works though since most are exclusively for academic use only. But we can still try to use Mosh with certain stronger constraints to make it work in spite of the drift and noise in the 3D marker locations from IMU. But we can't guarantee how well it will work but happy to give it a try. 96 | 97 | Here is a sample file that shows the format for marker data that the mosh code expects: 98 | - [Npz format](https://drive.google.com/file/d/11tEf-WMwhpWI0fokznz3AqhTPz93S1z6/view?usp=sharing) 99 | - [C3d formt](https://drive.google.com/file/d/1yMon2dTiQO-mP2a4_XpYQ_skWjgxOO0h/view?usp=sharing) 100 | 101 | 102 | ## SMPL - Technical Details 103 | 104 | 105 | > I want to use AMASS but I only want SMPL parameters, not SMPLH. How do I get that? 106 | 107 | SMPL-H is exactly SMPL with the addition of an articulated hand parameters. Hand parameteres can be ignored by ignoring the joints 22 and above. So the remaining parameters, ignoring the hand, are exactly the same as SMPL (ie betas and thetas. 108 | 109 | > What is PA-MPJPE? What is a Procrustes Problem? 110 | 111 | MPJPE stands for Mean Per Joint Position Error. Per joint position error is the euclidean distance between ground truth and prediction for a joint. MPJPE measures the mean of per joint position errors for all joints. Most methods estimate the 3D pose and shape in the camera coordinate frame not in the world. Often, the bodies are tilted relative to the world, and sometimes flipped. This results in high errors even if the pose is roughly right. MPJPE is calculated after translating the root ('pelvis') of estimated body to the groundtruth root. But, this does not get rid of the rotation issues. 112 | 113 | PA-MPJPE uses Procrustes alignment (PA) to solve for translation, scale and rotation between the estimated body and the ground truth. MPJPE is calculated after the estimated body is aligned to the ground truth by the Procrustes method, which is a similarity transformation. 114 | 115 | 116 | 117 | Procrustes Problem: Given correspondences of points $$A_i \in \mathbb{R}^3 $$ and $$ B_i \in \mathbb{R}^3 $$ find the scaling, rotation, and traslation transformation, called *similitude* transformation that satisfies 118 | 119 | $$ A_i = sRB_i + T $$ 120 | 121 | for $$ R \in SO(3), T \in \mathbb{R}, and s \in \mathbb{R}^+ $$. 122 | 123 | How do we solve for R, T from $$ n $$ point correspondences? 124 | 125 | Three non-collinear points suffice: each triangle $$ A_{i=1...3} $$ and $$ B_{i=1..3} $$ make an orthogonal basis. 126 | 127 | 128 | 129 | $$ (A_{21} (A_{21} X A_{31}) X A_{21} A_{21} X A_{31}) $$ 130 | 131 | and 132 | 133 | $$ (B_{21} (B_{21} X B_{31}) X B_{21} B_{21} X B_{31}) $$ 134 | 135 | Rotation between two orthogonal bases (one for the B and one for the A) is unique. 136 | 137 | We then solve a minimization problem for $$ N > 3 $$ point correspondences which is the translation is centroid of A points ($$ \textbf{x}_i $$) and centroid if B points ($$ \textbf{y}_i $$) rotated. 138 | 139 | We compute the $$ d X d $$ covariance matrix 140 | 141 | $$ S = XWY^T $$ 142 | 143 | where X and Y are the $$ d X n $$ matrices that have ($$ \textbf{x}_i $$) and ($$ \textbf{y}_i $$) as their columns, respectively and $$ W = diag(w_1, w_2,..., w_n) $$. 144 | 145 | We finally compute the [singular value decomposition](https://en.wikipedia.org/wiki/Singular_value_decomposition), $$ S = U\Sigma V^T$$, to get the rotation and optimal translation. 146 | 147 | 148 | Please refer to [this document](https://igl.ethz.ch/projects/ARAP/svd_rot.pdf) for more detailed derivation. 149 | 150 | Sample code for such transformation is available [here](https://github.com/nkolot/SPIN/blob/5c796852ca7ca7373e104e8489aa5864323fbf84/utils/pose_utils.py). 151 | 152 | ![PAMPJPE](/assets/FAQs/PAMPJPE.png) 153 | 154 | > How do I add soft-tissue dynamics to SMPL? 155 | 156 | You can animate soft-tissue dynamics with DMPL parameters. AMASS uses 8 DMPL parameters to extract soft-tissue motions realistically from a sparse set of markers. 157 | 158 | 159 | ``` 160 | 161 | from human_body_prior.body_model.body_model import BodyModel 162 | 163 | bm_fname = osp.join(support_dir, 'body_models/smplh/{}/model.npz'.format(subject_gender)) 164 | dmpl_fname = osp.join(support_dir, 'body_models/dmpls/{}/model.npz'.format(subject_gender)) 165 | 166 | num_betas = 16 # number of body parameters 167 | num_dmpls = 8 # number of DMPL parameters 168 | 169 | bm = BodyModel(bm_fname=bm_fname, num_betas=num_betas, num_dmpls=num_dmpls, dmpl_fname=dmpl_fname).to(comp_device) 170 | faces = c2c(bm.f) 171 | 172 | ``` 173 | Please refer to [this notebook](https://github.com/nghorbani/amass/blob/08ca36ce9b37969f72d7251eb61564a7fd421e15/notebooks/04-AMASS_DMPL.ipynb) from the AMASS repository for visualization of DMPL bodies. 174 | 175 | With (left) and without (right) DMPL parameters: 176 | 177 | ![PAMPJPE](/assets/FAQs/dmpl.gif) 178 | 179 | 180 | > How do I get a gender neutral model? 181 | 182 | ![PAMPJPE](/assets/FAQs/neutral.png) 183 | 184 | SMPL gender neutral models are available for SMPL , SMPL+H , SMPL-X and STAR. They are created by training both male and female subjects together. 185 | 186 | > How do I convert between SMPL and SMPL-X (and STAR)? 187 | 188 | Please use [`transfer_model`](https://github.com/vchoutas/smplx/tree/master/transfer_model) for this. 189 | 190 | 191 | > How do I get SMPL with 300 shape components? 192 | 193 | SMPL, SMPLX and STAR are currently available with "full shape space" of 300 shape components. Contact us for licensing. 194 | 195 | 196 | > How do I get just the face (or hand) vertices in SMPL-X? 197 | 198 | We provide FLAME and MANO vertex indices of the SMPL-X body as vertex index lists to download. Indexing the SMPL-X body with these index lists returns the vertices that correspond to the MANO and FLAME body parts. This can be done in python as follows: 199 | 200 | ``` 201 | 202 | import pickle 203 | import numpy as np 204 | from psbody.mesh import Mesh 205 | 206 | # Load SMPL-X mesh 207 | SMPLX_mesh = Mesh(filename='./smplx_template.obj') 208 | 209 | # Load FLAME vertex ids 210 | SMPLX_FLAME_vertex_ids = np.load('./SMPLX_FLAME_vertex_ids.npy') 211 | # Load MANO vertex ids 212 | SMPLX_MANO_vertex_ids = pickle.load('./MANO_SMPLX_vertex_ids.pkl', 'r') 213 | 214 | # Extract FLAME vertices from SMPLX_mesh 215 | verts_FLAME = SMPLX_mesh.v[SMPLX_FLAME_vertex_ids] 216 | 217 | # Extract MANO vertices from SMPLX_mesh 218 | verts_MANO_left = SMPLX_mesh.v[SMPLX_MANO_vertex_ids['left_hand']] 219 | verts_MANO_right = SMPLX_mesh.v[SMPLX_MANO_vertex_ids['right_hand']] 220 | 221 | ``` 222 | 223 | 224 | > How do I get the joints out of SMPL? 225 | 226 | 227 | The easiest way to get joints from SMPL is to use the SMPL class in `body_model.py` from our SMPLX package. 228 | 229 | ``` 230 | 231 | # Initialize the SMPL constructor object with the correct model path. 232 | smpl_object = SMPL(model_dir='') 233 | 234 | # Run forward function 235 | smpl_output = smpl_object.forward(betas, pose, global_orientation) 236 | 237 | # Extract joints and vertices 238 | joints = smpl_output.joints 239 | vertices = smpl_output.vertices 240 | ``` 241 | 242 | > How do I visualize SMPL in Blender? 243 | 244 | Blender accepts `.obj ` and `.fbx` file types, so once you have the vertices, export them as a mesh and open them in blender. We also offer a blender plugin for more sophisticated usecases. 245 | 246 | ``` 247 | import trimesh 248 | mesh = trimesh.Trimesh(vertices=vertices, 249 | faces=smpl_object.faces, 250 | process=False, 251 | maintain_order=True) 252 | mesh_fname = 'my_mesh.obj' 253 | mesh.export(mesh_fname) 254 | ``` 255 | 256 | 257 | > How do I sample body shapes? 258 | 259 | As [mentioned](/SMPL.html), shape space of SMPL is defined by PCA (Principal Component Analysis). PCA returns vectors $$ B_i $$ of unit norm. For convenience we scale them by $$ \sigma $$. So to sample according to the Gaussian over body shapes, you can just sample a Gaussian where the covariance matrix is the identity. 260 | 261 | To sample a shape where 95% of training subjects lie ([$$ -2\sigma, 2 \sigma $$]) in python, 262 | 263 | ``` 264 | betas_nb = len(model.betas) 265 | amplitude = 2 266 | model.betas[:] = (np.random.rand(beta_nb) - 0.5) * amplitude 267 | ``` 268 | 269 | > How do I keep plausible shapes while optimizing for shape parameters? 270 | 271 | You can use shape prior by regularizing the shape vector. This can be done by scaling each dimention by its corresponding variance, i.e. 272 | 273 | $$ L(\beta) = \sum_i \sigma_i^2 \beta_i^2 $$ 274 | 275 | 276 | Note that in practice L2 loss is applied on betas directly. 277 | 278 | 279 | > I need a pose prior for my application, how do I do that? 280 | 281 | Do check out [VPoser](https://github.com/nghorbani/human_body_prior). Vposer 282 | 283 | - SMPL body pose prior as latent code of variational autoencoder 284 | - Trained on AMASS 285 | - End-to-end differentiable 286 | - Provides a way to penalize impossible poses while admitting valid ones 287 | - Effectively models correlations among the joints of the body 288 | - Can be used to generate valid 3D human poses for data-dependent tasks 289 | - Enables inverse kinematic in batch mode without requiring initialization 290 | 291 | 311 | 312 | 313 | 314 | > I ran SMPLify and the body shape is not right. What happened? 315 | 316 | This is natural; estimating a full-3D shape from only a sparse set of 2D joints is highly ambiguous. 317 | Think of a pregnant woman; her shape changes drastically, while her skeletal joints stay nearly the same. 318 | 319 | ![Preg](/assets/FAQs/preg-flickr.png) 320 | 321 | To estimate 3D shape, we need information beyond 2D joints. SMPL is limited to the distribution of the training data. It can’t represent babies, body builders, amputees, and so on. 322 | 323 | 324 | > Can I get the SMPL training data? 325 | 326 | Sorry, no. 327 | 328 | The SMPL shape space is trained from CAESAR, which we cannot distribute. 329 | STAR uses CAESAR and SizeUSA and the same applies. 330 | Our pose dataset is also not available due to human subjects limitations. 331 | But the FAUST dataset does provide registered poses. 332 | 333 | 334 | > How can I train my own SMPL model? 335 | 336 | Currently we do not have training code online. 337 | 338 | Good (well registered) training data is key. This is very hard to produce. 339 | It is not so easy to train a SMPL model from scratch. 340 | Curating the data, evaluating intermediate models, fixing problems, adding more data, etc. is all necessary to avoid spurious long-range correlations and artifacts. 341 | 342 | > How does the UV Map work? Do you have UV maps for SMPL, SMPL-X, etc? 343 | 344 | UV maps provide a mapping of each 3D mesh vertex into 2D image space by unwrapping the 3D mesh surface. This technique allows then to map image color information onto the 3D model at a high quality even on low-resolution meshes. 345 | 346 | - UV maps for SMPL and SMPL-X are provided on the SMPL/SMPL-X websites. 347 | - Blender SMPL-X add-on ([github page](https://github.com/Meshcapade/SMPL_blender_addon)) already has UV maps setup. 348 | - UV maps are identical between male/female/neutral models so you can easily swap textures. 349 | 350 | ## Blender SMPL Plugin Questions 351 | 352 | > Where can I get latest blender plugin? 353 | 354 | It is available on our public [github page](https://github.com/Meshcapade/SMPL_blender_addon). 355 | 356 | 357 | > I want to pose SMPL myself. How can I do it? 358 | 359 | Please use our blender plugin for this. Use our blender plugin to 360 | 361 | - bring SMPL/SMPLX model into Blender. 362 | - pose SMPL/SMPLX models. 363 | - export current full-body pose in Rodrigues format to console for later use in Python code. 364 | - auto-calculate pose corrective weights for current pose. 365 | 366 | > How do I generate animations in FBX format? 367 | 368 | - Use the SMPL-X for Blender add-on 369 | - Keyframe the desired motions with Blender pose tools 370 | - For each individual frame keyframe pose corrective weights using the SMPL-X Blender add-on functionality. This can be automated with Blender Python API. 371 | Future versions of the SMPL-X Blender add-on will help automating this process. 372 | - Export to FBX. 373 | 374 | -------------------------------------------------------------------------------- /wiki/SMPL.md: -------------------------------------------------------------------------------- 1 | **SMPL: A Skinned Multi-Person Linear Model** is a realistic 3D model of the human body that is based on skinning and blend shapes and is learned from thousands of 3D body scans. 2 | 3 | The human body is certainly central to our lives and is commonly depicted in images and video. We are developing the world's most realistic models of the body by learning their shape and how they move from data. Our goal is to make 3D models of the body look and move in ways that make them indistinguishable from real humans. Such virtual humans can be used in special effects and will play an important role in emerging virtual reality systems. They can also be used in computer vision to generate training data for learning methods or can be fit directly to sensor data. What makes this hard is that the human body is highly articulated, deforms with kinematic changes, and exhibits large shape variability across subjects. 4 | 5 | SMPL makes the human body model as simple and standard as possible. The simplicity of our formulation means that SMPL can be trained from large amounts of data. On top of that, with the low polygon count, a simple vertex topology (for both men and women models), a clean quad structure, and a standard rig, SMPL makes a realistic learned model accessible to animators as well as computer vision researchers. That means, SMPL model can realistically represent a wide range of human body shapes, can be posed with natural pose-dependent deformations, exhibits soft-tissue dynamics, is efficient to animate, and is compatible with existing rendering engines. 6 | 7 | 8 | # Definition 9 | ![](../assets/SMPL_formulation.png) 10 | *SMPL model. (a) Template mesh with blend weights indicated by color and joints shown in white. (b) With identity-driven blendshape contribution only; vertex and joint locations are linear in shape vector ~. (c) With the addition of of pose blend shapes in preparation for the split pose; note the expansion of the hips. (d) Deformed vertices reposed by dual quaternion skinning for the split pose.* 11 | 12 | The SMPL model decomposes body shape into identity-dependent shape and non-rigid pose-dependent shape. We take a vertex-based skinning approach that uses corrective blend shapes. A single blend shape is represented as a vector of concatenated vertex offsets. We begin with an artist created rigged mesh with `N = 6890` vertices and `K = 23` joints. The mesh has the same topology for men and women, spatially varying resolution, a clean quad structure, a segmentation into parts, initial blend weights, and a skeletal rig. These components are further defined below: 13 | 14 | ## Template Mesh `T` 15 | A 3D mesh that defines the 3D topology (e.g. number of vertices, polygons, skeleton joints) used by the SMPL-Model. 16 | 17 | ## Shape Components `ß` 18 | Identity-dependent mesh surface descriptors represented as vectors of concatenated vertex offsets from the Template Mesh. Consider these as an array of deltas that can be added as a layer on top of the template shape mesh `(T + ß)` in order to create different human bodies of varying body shape. Essentially, this layer can create different, realistic human identities. Typically the SMPL body models contain up to 300 shape components, but with just 10 shape components, the majority of variation due to body shape changes, like height, weight, waist size, shoulder breadth, etc. can be observed. Using more components provides a more granular control over specific body features, creating dimples, skin folds and facial feature variations. 19 | 20 | ## Pose Components `θ` 21 | Pose-dependent mesh surface descriptors represented as vectors of concatenated vertex offsets from the Template Mesh. Consider these as another array of deltas that can be added as a layer on top of the template shape and the shape components `(T + ß + θ)` in order to represent the muscle-based deformations produced on our bodies whenever we rotate any joints. For example, this layer will create the bulging effect around the elbows of the 3D body whenever the elbows of the template mesh are folded in. 22 | 23 | ## Dynamics Components `φ` 24 | Soft-tissue-dependent mesh surface descriptors represented as vectors of concatenated vertex offsets from the Template Mesh. Consider these as another array of deltas that can be added as a layer on top of the other layers `(T + ß + θ + φ)` in order to represent the soft-tissue deformations produced during fast motions, such as the jiggling of fat-tissue during running. 25 | 26 | ## Model Software 27 | This is the core software to provide functionality to load the model components, and a parametric function that uses these components to generate 3D human meshes with varying identities in different poses. 28 | 29 | 30 | # Related Models: The SMPL Family 31 | The formulation of the SMPL model has given rise to much research and further development of related models that use the same formulation to create a new model to focus on specific segments of the body (e.g. hands, faces), or to represent a new body type (infants), or even to create a similar representation for animals. 32 | 33 | ## SMIL 34 | ![](../assets/smil.png) 35 | 36 | [SMIL](https://ps.is.mpg.de/code/skinned-multi-infant-linear-model-smil) is the first work on 3D shape and 3D pose estimation of infants, as well as the first work on learning a statistical 3D body model from lowquality, incomplete RGB-D data of freely moving humans. It provides a fundamental tool that can form a component in a fully automatic system for the General Movement Assessment for early detection of neurodevelopmental disorders. 37 | 38 | Other statistical body models within the SMPL family aim to describe the surface of humans or animals in a low-dimensional space. These on dense surface data captured from cooperative, easy-to-instruct subjects. Infants present a major challenge in terms of data acquisition as they are not cooperative and cannot be instructed. Unlike previous work on human body models, there are hardly any repositories of high quality scans of infants. Therefore, SMIL is a 3D body model learned from RGB-D sequences of freely moving infants. 39 | 40 | ## SMAL 41 | ![](../assets/smal.png) 42 | 43 | [SMAL](https://smal.is.tue.mpg.de/): Skinned Multi-Animal Linear Model of 3D Animal Shape, is a 3D articulated model that can represent animals including lions, tigers, horses, cows, hippos, dogs. 44 | 45 | While The SMPL model is learned from thousands of 3D scans of people in specific poses, this is infeasible with live animals. They are clearly much less cooperative than humans! SMAL introduces the idea to learn the model from a small set of 3D scans of toy figurines in arbitrary poses, including lions, cats, tigers, dogs, horses, cows, foxes, deers, zebras, and hippos. This approach requires new tools for aligning scans of animals with different shape and size to a common template. With the alignment to a common template we learn a shape space representing the training animals. New animal shapes can be sampled from the model, posed, animated, and fit to data. 46 | 47 | 48 | ## MANO and SMPL+H 49 | ![](../assets/mano.png) 50 | 51 | **MANO:** *Embodied Hands: Modeling and Capturing Hands and Bodies Together* 52 | 53 | [MANO](https://mano.is.tue.mpg.de/) is created from the SMPL hand topology, and has analogous components to those in SMPL: a template shape, kinematic tree, shape and pose blend shapes, blend weights and a joint regressor. 54 | 55 | Bodies and hands are literally inseparable. Yet, despite this, research on modeling bodies and hands has progressed separately. Significant advances have been made on learning realistic 3D statistical shape models of full bodies but these models typically have limited, or no, hand articulation. Similarly there is significant work on tracking hands using depth sensors and video sequences but these hands are modeled and tracked in isolation from the body. Hands and body together are important for communication and a complete picture of our actions, emotions, and intentions is not possible without the joint analysis of hands and bodies. The growth of interest in virtual and augmented reality has increased this need for characters and avatars that combine realistic bodies and hands in motion. MANO provides a new approach to capture the 4D motion of the hands and body together. 56 | 57 | ## FLAME 58 | ![](../assets/flame.png) 59 | 60 | **FLAME model:** *variations for shape, expression, pose, and appearance. For shape, expression, and appearance variations, the first 3 principal components are visualized at ±3 standard deviations. The pose variations are visualized at ±π/6 (head pose) and 0,π/8 (jaw articulation).* 61 | 62 | [FLAME](https://flame.is.tue.mpg.de/) adapts the SMPL body model formulation to create a statistical head model that is significantly more accurate and expressive than existing head and face models, while remaining compatible with standard graphics software. In contrast to existing models, FLAME explicitly models head pose and eyeball rotation. 63 | 64 | There is a significant gap in the field of 3D face modeling. At one end of the spectrum are highly accurate, photo-realistic, 3D models of individuals that are learned from scans or images of that individual and/or involve significant input from a 3D artist. At the other end are simple generic face models that can be fit to images, video, or RGB-D data but that lack realism. What is missing are generic 3D face models that are compact, can be fit to data, capture realistic 3D face details, and enable animation. The goal of the FLAME model is to move the “low end” models towards the “high end” by learning a model of facial shape and expression from 4D scans (sequences of 3D scans). 65 | 66 | ## SMPL-X 67 | ![](../assets/smplx.png) 68 | 69 | **SMPL vs SMPL+H vs SMPL-X:** *Comparison of SMPL (left), SMPL+H (middle) and SMPL-X (right). The results show a clear increase in expressiveness from let to right, as model gets richer from body-only (SMPL) to include hands (SMPL+H) or hands and face (SMPL-X)* 70 | 71 | [SMPL-X](https://smpl-x.is.tue.mpg.de/) combines the developments from *SMPL, MANO and FLAME* models to create a unified model, called SMPL-X, for SMPL eXpressive, with shape parameters trained jointly for the face, hands and body. 72 | 73 | Starting with an artist designed 3D template, whose face and hands match the templates of [FLAME](#FLAME) and [MANO](#MANO-and-SMPL+H), SMPl-X is learned from four datasets of 3D human scans. The shape space parameters, are trained on 3800 alignments in an A-pose capturing variations across identities. The body pose space parameters are trained on 1786 alignments in diverse poses. Since the full body scans have limited resolution for the hands and face, SMPL-X also leverages the parameters of MANO and FLAME, learned from 1500 hand and 3800 head high-resolution scans respectively. More specifically, SMPl-X uses the pose space and pose corrective blendshapes of MANO for the hands and the expression space E of FLAME 74 | 75 | ## STAR 76 | ![](../assets/star.png) 77 | 78 | **SMPL vs STAR:** *Examples of some SMPL limitations. Heat maps illustrate the magnitude of the pose-corrective offsets highlighting the spurious long-range correlations learned by the SMPL pose corrective blend shapes. Bending one elbow results in a visible bulge in the other elbow.* 79 | 80 | [STAR](https://star.is.tue.mpg.de/): Sparse Trained Articulated Regressor, is a new compact human body model that is more accurate than SMPL yet has sparse and spatially local blend shapes, such that a joint only influences a sparse set of vertices that are geodesically close to it. 81 | 82 | The original SMPL model has several limitations. First, SMPL has a huge number of parameters resulting from its use of global blend shapes. These dense pose-corrective offsets relate every vertex on the mesh to all the joints in the kinematic tree, capturing spurious long-range correlations. To address this, STAR introduces per-joint pose correctives and learns the subset of mesh vertices that are influenced by each joint movement. This sparse formulation results in more realistic deformations and significantly reduces the number of model parameters to 20% of SMPL. When trained on the same data as SMPL, STAR generalizes better despite having many fewer parameters. Second, SMPL factors pose-dependent deformations from body shape while, in reality, people with different shapes deform differently. Consequently, STAR presents shape-dependent posecorrective blend shapes that depend on both body pose and BMI. Third, the shape space of SMPL was trained on a much smaller training set of around 4000 subjects. So SMPL is not rich enough to capture the variation in the human population. STAR addresses this by training the shape space with an additional 10,000 scans of male and female subjects, and shows that this results in better model generalization. STAR is compact, generalizes better to new bodies and is a drop-in replacement for SMPL. 83 | 84 | 85 | # Download Models 86 | 87 | ## Academic Use 88 | Each of the SMPL Model variants are freely available to download and use for academic purposes. You can find information for downloading the model and related code for each model at the respective academic page as listed below: 89 | 90 | | **Model** | **Description** | **Link** | 91 | |-----------|-------------------------------------------------------|----------------------------- | 92 | | SMPL | Human body model (without hand and face articulation)| https://smpl.is.tue.mpg.de/ | 93 | | SMPL+H | Human body + hand model (without face articulation) | https://mano.is.tue.mpg.de/ | 94 | | MANO | Hand model | https://mano.is.tue.mpg.de/ | 95 | | SMPL-X | Human body + hand + face model | https://smpl-x.is.tue.mpg.de/| 96 | | STAR | Human body model (more compact & faster SMPL) | https://star.is.tue.mpg.de/ | 97 | | SMIL | SMPL infant model | https://ps.is.mpg.de/publications/hesse-micai-2018| 98 | | SMAL | SMPL animal model | https://ps.is.mpg.de/code/smal | 99 | 100 | 101 | 102 | ## Commercial Use 103 | For commercial use the SMPL model and body model variants can all be sublicensed through Meshcapade's licensing options listed here: 104 | 105 | https://meshcapade.com/infopages/licensing.html 106 | 107 | Meshcapade's commercial licensing for SMPL Model includes access to all all the adult model variants of SMPL (SMPL+H, SMPL-X, STAR), while the infant model [SMIL](#SMIL), and animal model [SMAL](#SMAL) are expected to be available as separate licenses soon! 108 | 109 | 110 | # Model Tools 111 | 112 | ## Mesh Templates & Samples 113 | You can find various samples of template meshes, animated outputs and textures for the SMPL and related models below. 114 | 115 | ### Sample OBJs with textures 116 | ![](../assets/smpl_textured_female.png) ![](../assets/smpl_textured_male.png) 117 | 118 | a) **[Sample texture with SMPL topology](https://app.box.com/s/mdx2m368j9m0jgkkjnf67l6blrwrt20f)**: 119 | Made available under [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/) License. Male & Female models in OBJ format, with sample textures. These samples represent the mesh topology used in [SMPL](#SMPL), [SMPL+H](#MANO-and-SMPL+H) and [STAR](#STAR) models. 120 | 121 | b) **[Sample texture with SMPL-X topology](https://app.box.com/s/ei0gk8295o3pu7qugrisgucnpfqoghsm)**: 122 | Made available under [CC-BY 4.0](https://creativecommons.org/licenses/by/4.0/) License. Male & Female models in OBJ format, with sample textures. These samples represent the mesh topology used in [SMPL-X](#SMPL-X) model. 123 | 124 | ### Sample FBX with animation 125 | 126 | *[SMPL animated mesh with textures](https://app.box.com/s/732qw6hcxqnprdqeya2s8zcr6gzvw2ub)* 127 | Rigged male SMPL model with default texture, animated using motion capture. 128 | 129 | ### Body-part segmentation 130 | 131 | a) **[SMPL / SMPLH / STAR part segmentation](../assets/SMPL_body_segmentation/smpl/smpl_vert_segmentation.json)** 132 | This body-part segmentation defines part segmentation information for SMPL, SMPL+H and STAR models. 133 | 134 | 135 | 136 | 137 | b) **[SMPL-X / SUPR part segmentation](../assets/SMPL_body_segmentation/smplx/smplx_vert_segmentation.json)** 138 | This body-part segmentation defines part segmentation information for only the SMPL-X & SUPR models. 139 | 140 | 141 | 142 | ### Skeleton layout 143 | 144 | The SMPL model's pose parameters in the python model files are defined in Rodrigues formulation. Each triplet of pose-parameters corresponds to one skeleton joint. The joint-name for each pose-parameter-triplet is defined in the mapping below for all 3 variations of the SMPL model. Note that these layouts also show the kinematic tree of the model as used in the SMPL FBX files. 145 | 146 | 147 | a) **SMPL and STAR skeletons**: 148 | 149 | ``` 150 | 0: 'pelvis', 151 | 1: 'left_hip', 152 | 2: 'right_hip', 153 | 3: 'spine1', 154 | 4: 'left_knee', 155 | 5: 'right_knee', 156 | 6: 'spine2', 157 | 7: 'left_ankle', 158 | 8: 'right_ankle', 159 | 9: 'spine3', 160 | 10: 'left_foot', 161 | 11: 'right_foot', 162 | 12: 'neck', 163 | 13: 'left_collar', 164 | 14: 'right_collar', 165 | 15: 'head', 166 | 16: 'left_shoulder', 167 | 17: 'right_shoulder', 168 | 18: 'left_elbow', 169 | 19: 'right_elbow', 170 | 20: 'left_wrist', 171 | 21: 'right_wrist', 172 | 22: 'left_hand', 173 | 23: 'right_hand' 174 | ``` 175 | 176 | b) **SMPL-H skeleton**: 177 | 178 | ``` 179 | 0: 'pelvis', 180 | 1: 'left_hip', 181 | 2: 'right_hip', 182 | 3: 'spine1', 183 | 4: 'left_knee', 184 | 5: 'right_knee', 185 | 6: 'spine2', 186 | 7: 'left_ankle', 187 | 8: 'right_ankle', 188 | 9: 'spine3', 189 | 10: 'left_foot', 190 | 11: 'right_foot', 191 | 12: 'neck', 192 | 13: 'left_collar', 193 | 14: 'right_collar', 194 | 15: 'head', 195 | 16: 'left_shoulder', 196 | 17: 'right_shoulder', 197 | 18: 'left_elbow', 198 | 19: 'right_elbow', 199 | 20: 'left_wrist', 200 | 21: 'right_wrist', 201 | 22: 'left_index1', 202 | 23: 'left_index2', 203 | 24: 'left_index3', 204 | 25: 'left_middle1', 205 | 26: 'left_middle2', 206 | 27: 'left_middle3', 207 | 28: 'left_pinky1', 208 | 29: 'left_pinky2', 209 | 30: 'left_pinky3', 210 | 31: 'left_ring1', 211 | 32: 'left_ring2', 212 | 33: 'left_ring3', 213 | 34: 'left_thumb1', 214 | 35: 'left_thumb2', 215 | 36: 'left_thumb3', 216 | 37: 'right_index1', 217 | 38: 'right_index2', 218 | 39: 'right_index3', 219 | 40: 'right_middle1', 220 | 41: 'right_middle2', 221 | 42: 'right_middle3', 222 | 43: 'right_pinky1', 223 | 44: 'right_pinky2', 224 | 45: 'right_pinky3', 225 | 46: 'right_ring1', 226 | 47: 'right_ring2', 227 | 48: 'right_ring3', 228 | 49: 'right_thumb1', 229 | 50: 'right_thumb2', 230 | 51: 'right_thumb3' 231 | ``` 232 | 233 | c) **SMPL-X skeleton**: 234 | 235 | ``` 236 | 0: 'pelvis', 237 | 1: 'left_hip', 238 | 2: 'right_hip', 239 | 3: 'spine1', 240 | 4: 'left_knee', 241 | 5: 'right_knee', 242 | 6: 'spine2', 243 | 7: 'left_ankle', 244 | 8: 'right_ankle', 245 | 9: 'spine3', 246 | 10: 'left_foot', 247 | 11: 'right_foot', 248 | 12: 'neck', 249 | 13: 'left_collar', 250 | 14: 'right_collar', 251 | 15: 'head', 252 | 16: 'left_shoulder', 253 | 17: 'right_shoulder', 254 | 18: 'left_elbow', 255 | 19: 'right_elbow', 256 | 20: 'left_wrist', 257 | 21: 'right_wrist', 258 | 22: 'jaw', 259 | 23: 'left_eye', 260 | 24: 'right_eye', 261 | 25: 'left_index1', 262 | 26: 'left_index2', 263 | 27: 'left_index3', 264 | 28: 'left_middle1', 265 | 29: 'left_middle2', 266 | 30: 'left_middle3', 267 | 31: 'left_pinky1', 268 | 32: 'left_pinky2', 269 | 33: 'left_pinky3', 270 | 34: 'left_ring1', 271 | 35: 'left_ring2', 272 | 36: 'left_ring3', 273 | 37: 'left_thumb1', 274 | 38: 'left_thumb2', 275 | 39: 'left_thumb3', 276 | 40: 'right_index1', 277 | 41: 'right_index2', 278 | 42: 'right_index3', 279 | 43: 'right_middle1', 280 | 44: 'right_middle2', 281 | 45: 'right_middle3', 282 | 46: 'right_pinky1', 283 | 47: 'right_pinky2', 284 | 48: 'right_pinky3', 285 | 49: 'right_ring1', 286 | 50: 'right_ring2', 287 | 51: 'right_ring3', 288 | 52: 'right_thumb1', 289 | 53: 'right_thumb2', 290 | 54: 'right_thumb3' 291 | ``` 292 | 293 | d) **SUPR skeleton**: 294 | ``` 295 | 0: 'pelvis', 296 | 1: 'left_hip', 297 | 2: 'right_hip', 298 | 3: 'spine1', 299 | 4: 'left_knee', 300 | 5: 'right_knee', 301 | 6: 'spine2', 302 | 7: 'left_ankle', 303 | 8: 'right_ankle', 304 | 9: 'spine3', 305 | 10: 'left_foot', 306 | 11: 'right_foot', 307 | 12: 'neck', 308 | 13: 'left_collar', 309 | 14: 'right_collar', 310 | 15: 'head', 311 | 16: 'left_shoulder', 312 | 17: 'right_shoulder', 313 | 18: 'left_elbow', 314 | 19: 'right_elbow', 315 | 20: 'left_wrist', 316 | 21: 'right_wrist', 317 | 22: 'jaw', 318 | 23: 'left_eye', 319 | 24: 'right_eye', 320 | 25: 'left_index1', 321 | 26: 'left_index2', 322 | 27: 'left_index3', 323 | 28: 'left_middle1', 324 | 29: 'left_middle2', 325 | 30: 'left_middle3', 326 | 31: 'left_pinky1', 327 | 32: 'left_pinky2', 328 | 33: 'left_pinky3', 329 | 34: 'left_ring1', 330 | 35: 'left_ring2', 331 | 36: 'left_ring3', 332 | 37: 'left_thumb1', 333 | 38: 'left_thumb2', 334 | 39: 'left_thumb3', 335 | 40: 'right_index1', 336 | 41: 'right_index2', 337 | 42: 'right_index3', 338 | 43: 'right_middle1', 339 | 44: 'right_middle2', 340 | 45: 'right_middle3', 341 | 46: 'right_pinky1', 342 | 47: 'right_pinky2', 343 | 48: 'right_pinky3', 344 | 49: 'right_ring1', 345 | 50: 'right_ring2', 346 | 51: 'right_ring3', 347 | 52: 'right_thumb1', 348 | 53: 'right_thumb2', 349 | 54: 'right_thumb3', 350 | 55: 'left_bigtoe1', 351 | 56: 'left_bigtoe2', 352 | 57: 'left_indextoe1', 353 | 58: 'left_indextoe2', 354 | 59: 'left_middletoe1', 355 | 60: 'left_middletoe2', 356 | 61: 'left_ringtoe1', 357 | 62: 'left_ringtoe2', 358 | 63: 'left_pinkytoe1', 359 | 64: 'left_pinkytoe2', 360 | 65: 'right_bigtoe1', 361 | 66: 'right_bigtoe2', 362 | 67: 'right_indextoe1', 363 | 68: 'right_indextoe2', 364 | 69: 'right_middletoe1', 365 | 70: 'right_middletoe2', 366 | 71: 'right_ringtoe1', 367 | 72: 'right_ringtoe2', 368 | 73: 'right_pinkytoe1', 369 | 74: 'right_pinkytoe2' 370 | ``` 371 | 372 | > Note: The [Meshcapade Me](https://me.meshcapade.com/) platform returns pose parameters in SMPL-X format as [`.smpl`](https://github.com/Meshcapade/smplcodec) files. 373 | 374 | 375 | ### Using SMPL pose parameters 376 | To convert the SMPL pose parameters from Rodrigues triplets to rotation matrix notation (3x3 metrices), you can use OpenCV's method [cv.rodrigues()](https://docs.opencv.org/3.4/d9/d0c/group__calib3d.html#ga61585db663d9da06b68e70cfbf6a1eac). 377 | 378 | 379 | ## SMPL Software, Plugins & Scripts 380 | 381 | ### SMPL in Python 382 | * [SMPL, SMPL+H, SMPL-X](https://github.com/vchoutas/smplx) 383 | * [STAR](https://github.com/ahmedosman/STAR) 384 | * [smplcodec](https://github.com/Meshcapade/smplcodec) 385 | 386 | ### SMPL in Rust 387 | * [smpl-rs](https://github.com/Meshcapade/smpl-rs) 388 | 389 | ### SMPL for Animation 390 | * [SMPL in Maya](https://github.com/Meshcapade/SMPL_maya_plugin) 391 | * [SMPL in Blender](https://www.youtube.com/watch?v=DY2k29Jef94) 392 | * [SMPL in Unity](https://app.box.com/s/31zc131kaql0epmi5012xlmyk4lwsbat) 393 | * [SMPL in Unreal](https://www.youtube.com/watch?v=fuB4OWxOhME) 394 | 395 | Also check out this video for more on SMPL in Blender, Unity and Unreal Engine: 396 | [https://www.youtube.com/watch?v=m8i00zG6mZI](https://www.youtube.com/watch?v=m8i00zG6mZI) 397 | 398 | ### Inter-Model operability 399 | * [MANO & FLAME correspondences](https://github.com/vchoutas/smplx#mano-and-flame-correspondences) 400 | * [SMPL-SMPLX-STAR conversions](https://github.com/vchoutas/smplx/tree/master/transfer_model) 401 | * [SMPLX OpenPose mapping](../assets/SMPLX_OpenPose_mapping/README.md) 402 | 403 | 404 | 405 | 406 | -------------------------------------------------------------------------------- /wiki/body_and_pose.md: -------------------------------------------------------------------------------- 1 | ### Body Shape and Pose Estimation Methods 2 | 3 | Details of the methods we offer can be found in these academic publications: 4 | 5 | * SMPLify-X: [https://smpl-x.is.tue.mpg.de/](https://smpl-x.is.tue.mpg.de/) 6 | * SHAPY: [https://shapy.is.tue.mpg.de/](https://shapy.is.tue.mpg.de/) 7 | * PIXIE: [https://pixie.is.tue.mpg.de/](https://pixie.is.tue.mpg.de/) 8 | * VIBE: [https://ps.is.mpg.de/publications/vibe-cvpr-2020](https://ps.is.mpg.de/publications/vibe-cvpr-2020) 9 | * DECA: [https://ps.is.mpg.de/publications/feng-siggraph-2021](https://ps.is.mpg.de/publications/feng-siggraph-2021) 10 | * EMOCA: [https://emoca.is.tue.mpg.de/](https://emoca.is.tue.mpg.de/) 11 | * MICA: [https://zielon.github.io/mica/](https://zielon.github.io/mica/) 12 | -------------------------------------------------------------------------------- /wiki/digidoppel.md: -------------------------------------------------------------------------------- 1 | # digidoppel 2 | 3 | [digidoppel](https://digidoppel.com/) is our online platform which allows users to convert different kinds of inputs into realistic animation-ready 3D avatars. Users can use 3D scans or body measurements (more options for avatars from mocap, images & video coming soon!) 4 | 5 | ## Option 1: Avatars from Scans 6 | On digidoppel, you can create an avatar using 3D scans. 7 | 8 | [![Wacth on Youtube](https://img.youtube.com/vi/0vRPJdf-RlQ/0.jpg)](https://www.youtube.com/watch?v=0vRPJdf-RlQ) 9 | 10 | ### Input files and mesh sizes 11 | The platform accepts OBJ and PLY files as input. At the moment only static 3D scans can be processed automatically on the digidoppel platform, for 4D scans, please contact us at [support@meshcapade.com](mailto:support@meshcapade.com) with a small sample of your 4D data. 12 | 13 | We limit the input mesh size to under 1million vertices so please make sure your input file does not have more than 1M veritces when you upload the scan. 14 | 15 | ### Scans with textures 16 | The digidoppel platform can automatically transfer the texture from a scan to the output we create. You can upload a scan with texture as a zipped file. Make sure the zipped file contains ONLY the scan and texture image file (there should be no hidden files in the zip archive). 17 | 18 | The output created will include the processed mesh (in OBJ, FBX, etc. format you choose) + a texture file transformed to our body topology. 19 | 20 | 21 | ### Problems with Internal Geometry 22 | 23 | The scan alignment process is designed to work with 3D meshes acquired through 3D scanning process. 24 | 25 | Scans from a 3D scanner can be noisy, this noise is expected and our scan alignment process can handle these. 26 | 27 | However, the 3D scans are expected to only contain an outer surface of the body, and no internal surfaces. Generally when artists create 3D body models in some 3D animation program, they might create additional geometry on the inside of the body surface, e.g. 28 | An artist might create internal geometry for eyes and mouth for the face, as shown below: 29 | 30 | ![internal-geometry-problems](../assets/images_digidoppel/internal-geometry-problems.png) 31 | 32 | Or there the limbs and neck joints might be created as separate geometry objects, so they will have additional geometry where the limb closes off in the 3D model. 33 | These internal geometry pieces have to be removed before they can be sent for processing on digidoppel. 34 | 35 | ### Problems with long hair 36 | 37 | Input meshes with hair occluding the neck will not function well. Please make sure that hair is not falling onto the shoulders. It is best if it is tied up in a bun or in a haircap. 38 | 39 | 40 | ### Problems with loose clothing 41 | 42 | Input meshes with clothing do function, but the tighter the clothing is, the better. Loose clothing or accessories may cause alignment to fail, take a very long time, or produce subpar results. Keep in mind that the training data was done on with models in tight-fitted clothing. 43 | 44 | ![clothing-problems](../assets/images_digidoppel/clothing-problems.png) 45 | 46 | 47 | ### Creating a thigh gap 48 | 49 | For apparel simulation, often users want there to be a gap in the thigh area so that the vertices of the left and right thighs are not touching or intersecting. To help with this, we have created a 'thigh gap' option which allows users to create an artificial gap in the thigh area for OBJ output files. 50 | 51 | [![Wacth on Youtube](https://img.youtube.com/vi/Jurq4H5vxJs/0.jpg)](https://www.youtube.com/watch?v=Jurq4H5vxJs) 52 | 53 | Back to top 54 | 55 | 56 | ## Option 2: Avatars from measurements 57 | On digidoppel, you can create an avatar using body measurements. Below is a list of body measurements that are currently available for use on the digidoppel platform: 58 | 59 | [![Wacth on Youtube](https://img.youtube.com/vi/MZD4actpeDw/0.jpg)](https://www.youtube.com/watch?v=MZD4actpeDw) 60 | 61 | How does it work? We use machine learning to convert body measurements into an accurate 3D avatar. 62 | 63 | ### Measurements descriptions 64 | 65 | 66 | | **Measurement** | **Description** | **Preview** | 67 | |--------------------------- |-------------------------------------------------------|-------------| 68 | | Height | The line segment to the lowest center point to the highest on the mesh. | | 69 | | Chest circumference at maximum | The loop resulting from slicing a torso mesh segment transversally at a fixed "nipple level" vertex. | | 70 | | Shoulder Breadth | The length of the greatest extents within a defined "shoulder geometry" set of vertices from the sagittal axis. | | 71 | | Neck circumference at base | The loop segment of edges across the cervicale landmark at the juncture of the neck and the shoulders of the mesh. | | 72 | | Arm length (spine to wrist) | The line segment of edges of the mesh measured from cervicale through acromion to the wrist at the ulnar styloid landmark. | | 73 | | Arm length (shoulder to wrist) | The line segment of edges of the mesh measured by subtracting spine-shoulder length measurement from spine-wrist length measurement. | | 74 | | Arm length (shoulder to elbow) | The line segment of edges of the mesh measured by subtracting the spine-shoulder length measurement from the spine-elbow length measurement. | | 75 | | Arm circumference at scye | The loop resulting from slicing an arm segment with a sagittal plane at a fixed vertex for armpit. | | 76 | | Waist Circumference | Given the range of vertice roughly representing the region at and below the navel and above the hip bone, slice the mesh transversally at each of those specified points, and pick the SMALLEST slice for the waist measurement. | | 77 | | Waist Height | The length from the bottom of the mesh up to the height of the waist circumference measurement. | | 78 | | Hip circumference | Given a range of vertices roughly representing the region at and below the navel and above the hip bone, slice the mesh transversally at each of those specified points, and pick the LARGEST slice for the hip measurement. | | 79 | | Hip Height | The length from the bottom of the mesh up to the height of the hip circumference measurement. | | 80 | | Thigh Circumference | Given a range of fixed thigh vertices, transversally slice the mesh at these locations and use the LARGEST slice. | | 81 | | Inseam | The height measured from the bottom of the mesh to a specified crotch vertex. | | 82 | | Crotch length | The line segment of vertices from top of navel, down along a sagittal segment to the opposing side. | | 83 | | Foot Length | A line segment from the rearmost vertex on the foot to the foremost. | | 84 | | Ankle circumference | The loop resulting from slicing a leg segment with a transverse plane at a fixed vertex for ankle. | | 85 | 86 | 87 | ## Output file options 88 | 89 | ### File formats 90 | 91 | All avatars created on digidoppel can be exported as 92 | - OBJ (static, posed file): available on web & API 93 | - FBX (animated or static file): available on web & API 94 | - PC2 (animated or static file): available only on API 95 | 96 | All OBJ and FBX files are fully compatible with game engines and cloth simulation programs. 97 | 98 | ### Using Meshcapade avatars in CLO3D 99 | [![Wacth on Youtube](https://img.youtube.com/vi/qVYelkFkkTM/0.jpg)](https://www.youtube.com/watch?v=qVYelkFkkTM) 100 | 101 | ### Using Meshcapade avatars in Browzwear's VStitcher 102 | [![Wacth on Youtube](https://img.youtube.com/vi/l5MlWFmWRts/0.jpg)](https://www.youtube.com/watch?v=l5MlWFmWRts) 103 | 104 | Read more about it here: [Bring Meshcapade’s Capabilities Directly to VStitcher](https://browzwear.com/vizualize-your-designs-on-animation-ready-avatars-from-body-measurements-and-scans-with-meshcapades-vstitcher-integration-2/) 105 | 106 | ### Using Meshcapade avatars in Optitex 107 | 108 | Create your animatable avatar from measurements or 3D scans on our platform and select ‘Optitex’ compatibility mode. 109 | Read more about this mode in our [help guide](https://meshcapade.notion.site/Optitex-Export-Compatibility-Help-Guide-1cb5a280c9884481b96075b8b5793a75). 110 | 111 | To enable Optitex Export compatibility, first select “.fbx” as the Output file format, select “animation” as the Output composition, and then select the desired animation sequence: 112 | 113 | ![image](https://user-images.githubusercontent.com/2546603/194862067-c55ba2c7-9efb-4749-af78-c0b2368b50a2.png) 114 | 115 | Then, select “Optitex” as the Compatibility Mode: 116 | 117 | ![image](https://user-images.githubusercontent.com/2546603/194862281-d348c624-1c70-4a1b-98c5-ebfd13c5fea4.png) 118 | 119 | Back to top 120 | 121 | ## Animations with digidoppel 122 | 123 | For all avatars created on digidoppel, users can add a static pose or animations (for FBX output option). 124 | Below is our Animation Guide showing what motions are possible with digidoppel. The animation guide is also available on the digidoppel documentation, [here](https://digidoppel.com/documentation/#animation-guide): 125 | 126 | | **Name** | **Visualization**| 127 | |-----------|--------------------| 128 | |A to Bodybuilder | A to Bodybuilder | 129 | |A to Catwalk | A to Catwalk | 130 | |A to Dancing in Rain | A to Dancing in Rain | 131 | |A to Hands Front | A to Hands Front | 132 | |A to Hip Hop | A to Hip Hop | 133 | |A to Irish Dance | A to Irish Dance | 134 | |A to Model | A to Model | 135 | |A to Salsa | A to Salsa | 136 | |A to Stretches | A to Stretches | 137 | |A to Walk | A to Walk | 138 | |Attention | Attention | 139 | |Contra Pose | Contra Pose | 140 | |Sidestep | Sidestep | 141 | |Wide to A Pose | Wide to A Pose | 142 | |Wide to Arms Retracted | Wide to Arms Retracted | 143 | |Wide to Catwalk | Wide to Catwalk | 144 | |Wide to I Pose | Wide to I Pose | 145 | |Wide to Squat | Wide to Squat | 146 | |Wide to Toe Touch | Wide to Toe Touch | 147 | 148 | Back to top 149 | 150 | ## FAQs 151 | 152 | > What exactly is digidoppel? 153 | 154 | The is a cloud platform for creating animation-ready avatars from scans, hand-measurements, and more. You can sign up for free [here](https://digidoppel.com/). 155 | 156 | > Can I use it through an API? 157 | 158 | Yes, you can find API documentation on the digidoppel page here: [https://digidoppel.com/documentation/](https://digidoppel.com/documentation/) 159 | 160 | > What am I allowed to do with the generated avatars? Can I share it with my users? 161 | 162 | The output files made available to users under the [SMPL Body license](https://smpl.is.tue.mpg.de/bodylicense.html). The output files created from digidoppel are free to share and use in research and commercial projects, with the exception of uses 163 | 164 | - to generate defamatory, harassing, pornographic, obscene, or racist material whether commercial or not; or 165 | - for purposes of mass-surveillance or for creating a digital-double of any person in avatar or other form of digital replication without such person’s explicit consent. 166 | 167 | > What are the usage rights and terms for the platform? 168 | 169 | The platform is made available under the Terms of Service available [here](https://digidoppel.com/terms-of-service/#terms-go-3). 170 | 171 | > Are there any restrictions on what kind of usage is not allowed on the platform? 172 | 173 | The platform must not be used to 174 | - Sublicense any APIs for use by a third party or create an API Client that substantially replicates or competes with the APIs. 175 | - Attempt to interfere with the normal functioning of the APIs or the servers or networks providing the APIs. 176 | - Attempt to use our APIs in a manner that exceeds or circumvents, rate limits or constitutes excessive or harmful usage. 177 | - Attempt to access the APIs in a manner that compromises, circumvents, or tests the vulnerability of any of our security measures, except with express prior written approval by Meshcapade. 178 | - Attempt toreverse engineer or extract source code, trade secrets, or know-how of our APIs. 179 | 180 | 181 | > How is security being handled for the API? 182 | 183 | You can review our Privacy Policy [here](https://info.meshcapade.com/faqs/api-privacy-policy). 184 | 185 | > Is your API available as an offline solution/SDK? 186 | 187 | Yes. Please contact us at [info@meshcapade.com](info@meshcapade.com) for help. 188 | 189 | > Is there a trial for the API? 190 | 191 | You don't need a trial, sign is free and you receive 20 free credits at sign up. You can find out more about pricing here [https://digidoppel.com/pricing/](https://digidoppel.com/pricing/). 192 | 193 | Back to top 194 | 195 | -------------------------------------------------------------------------------- /wiki/quick-links.md: -------------------------------------------------------------------------------- 1 | # Quick Links 2 | 3 | ## Academic links for SMPL and related Parametric 3D Models 4 | 5 | ____________________________________________________________________________________________________ 6 | | **Model** | **Description** | **Link** | 7 | |-----------|-------------------------------------------------------|----------------------------- | 8 | | SMPL | Human body model (without hand and face articulation)| https://smpl.is.tue.mpg.de/ | 9 | | SMPL+H | Hand model | https://mano.is.tue.mpg.de/ | 10 | | MANO | Human body + hand model (without face articulation) | https://mano.is.tue.mpg.de/ | 11 | | SMPL-X | Human body + hand + face model | https://smpl-x.is.tue.mpg.de/| 12 | | STAR | Human body model (more compact & faster SMPL) | https://star.is.tue.mpg.de/ | 13 | | SMIL | SMPL infant model | https://ps.is.mpg.de/publications/hesse-micai-2018| 14 | | SMAL | SMPL animal model | https://ps.is.mpg.de/code/smal | 15 | 16 | --------------------------------------------------------------------------------