├── .gitignore
├── Pictures_for_Github_only
├── Online Result.png
└── mainframework.png
├── README.md
├── cldetection_utils.py
├── configs
└── CLdetection2023
│ └── srpose_s2.py
├── inference_single_image.py
├── install_env.sh
├── mmpose_package
└── mmpose
│ ├── .gitignore
│ ├── .owners.yml
│ ├── .pre-commit-config.yaml
│ ├── .pylintrc
│ ├── .readthedocs.yml
│ ├── CITATION.cff
│ ├── LICENSE
│ ├── MANIFEST.in
│ ├── README.md
│ ├── README_CN.md
│ ├── configs
│ ├── _base_
│ │ ├── datasets
│ │ │ ├── 300w.py
│ │ │ ├── aflw.py
│ │ │ ├── aic.py
│ │ │ ├── animalpose.py
│ │ │ ├── ap10k.py
│ │ │ ├── atrw.py
│ │ │ ├── campus.py
│ │ │ ├── cephalometric.py
│ │ │ ├── cephalometric_isbi2015.py
│ │ │ ├── coco.py
│ │ │ ├── coco_aic.py
│ │ │ ├── coco_openpose.py
│ │ │ ├── coco_wholebody.py
│ │ │ ├── coco_wholebody_face.py
│ │ │ ├── coco_wholebody_hand.py
│ │ │ ├── cofw.py
│ │ │ ├── crowdpose.py
│ │ │ ├── deepfashion2.py
│ │ │ ├── deepfashion_full.py
│ │ │ ├── deepfashion_lower.py
│ │ │ ├── deepfashion_upper.py
│ │ │ ├── fly.py
│ │ │ ├── freihand2d.py
│ │ │ ├── h36m.py
│ │ │ ├── halpe.py
│ │ │ ├── horse10.py
│ │ │ ├── interhand2d.py
│ │ │ ├── interhand3d.py
│ │ │ ├── jhmdb.py
│ │ │ ├── lapa.py
│ │ │ ├── locust.py
│ │ │ ├── macaque.py
│ │ │ ├── mhp.py
│ │ │ ├── mpi_inf_3dhp.py
│ │ │ ├── mpii.py
│ │ │ ├── mpii_trb.py
│ │ │ ├── ochuman.py
│ │ │ ├── onehand10k.py
│ │ │ ├── panoptic_body3d.py
│ │ │ ├── panoptic_hand2d.py
│ │ │ ├── posetrack18.py
│ │ │ ├── rhd2d.py
│ │ │ ├── shelf.py
│ │ │ ├── wflw.py
│ │ │ └── zebra.py
│ │ └── default_runtime.py
│ ├── animal_2d_keypoint
│ │ ├── README.md
│ │ ├── rtmpose
│ │ │ ├── README.md
│ │ │ └── ap10k
│ │ │ │ ├── rtmpose-m_8xb64-210e_ap10k-256x256.py
│ │ │ │ ├── rtmpose_ap10k.md
│ │ │ │ └── rtmpose_ap10k.yml
│ │ └── topdown_heatmap
│ │ │ ├── README.md
│ │ │ ├── animalpose
│ │ │ ├── hrnet_animalpose.md
│ │ │ ├── hrnet_animalpose.yml
│ │ │ ├── resnet_animalpose.md
│ │ │ ├── resnet_animalpose.yml
│ │ │ ├── td-hm_hrnet-w32_8xb64-210e_animalpose-256x256.py
│ │ │ ├── td-hm_hrnet-w48_8xb64-210e_animalpose-256x256.py
│ │ │ ├── td-hm_res101_8xb64-210e_animalpose-256x256.py
│ │ │ ├── td-hm_res152_8xb32-210e_animalpose-256x256.py
│ │ │ └── td-hm_res50_8xb64-210e_animalpose-256x256.py
│ │ │ ├── ap10k
│ │ │ ├── cspnext-m_udp_8xb64-210e_ap10k-256x256.py
│ │ │ ├── cspnext_udp_ap10k.md
│ │ │ ├── cspnext_udp_ap10k.yml
│ │ │ ├── hrnet_ap10k.md
│ │ │ ├── hrnet_ap10k.yml
│ │ │ ├── resnet_ap10k.md
│ │ │ ├── resnet_ap10k.yml
│ │ │ ├── td-hm_hrnet-w32_8xb64-210e_ap10k-256x256.py
│ │ │ ├── td-hm_hrnet-w48_8xb64-210e_ap10k-256x256.py
│ │ │ ├── td-hm_res101_8xb64-210e_ap10k-256x256.py
│ │ │ └── td-hm_res50_8xb64-210e_ap10k-256x256.py
│ │ │ ├── locust
│ │ │ ├── resnet_locust.md
│ │ │ ├── resnet_locust.yml
│ │ │ ├── td-hm_res101_8xb64-210e_locust-160x160.py
│ │ │ ├── td-hm_res152_8xb32-210e_locust-160x160.py
│ │ │ └── td-hm_res50_8xb64-210e_locust-160x160.py
│ │ │ └── zebra
│ │ │ ├── resnet_zebra.md
│ │ │ ├── resnet_zebra.yml
│ │ │ ├── td-hm_res101_8xb64-210e_zebra-160x160.py
│ │ │ ├── td-hm_res152_8xb32-210e_zebra-160x160.py
│ │ │ └── td-hm_res50_8xb64-210e_zebra-160x160.py
│ ├── body_2d_keypoint
│ │ ├── README.md
│ │ ├── associative_embedding
│ │ │ ├── README.md
│ │ │ └── coco
│ │ │ │ └── ae_hrnet-w32_8xb24-300e_coco-512x512.py
│ │ ├── cid
│ │ │ └── coco
│ │ │ │ ├── cid_hrnet-w32_8xb20-140e_coco-512x512.py
│ │ │ │ ├── cid_hrnet-w48_8xb20-140e_coco-512x512.py
│ │ │ │ ├── hrnet_coco.md
│ │ │ │ └── hrnet_coco.yml
│ │ ├── dekr
│ │ │ ├── README.md
│ │ │ ├── coco
│ │ │ │ ├── dekr_hrnet-w32_8xb10-140e_coco-512x512.py
│ │ │ │ ├── dekr_hrnet-w48_8xb10-140e_coco-640x640.py
│ │ │ │ ├── hrnet_coco.md
│ │ │ │ └── hrnet_coco.yml
│ │ │ └── crowdpose
│ │ │ │ ├── dekr_hrnet-w32_8xb10-300e_crowdpose-512x512.py
│ │ │ │ ├── dekr_hrnet-w48_8xb5-300e_crowdpose-640x640.py
│ │ │ │ ├── hrnet_crowdpose.md
│ │ │ │ └── hrnet_crowdpose.yml
│ │ ├── integral_regression
│ │ │ ├── README.md
│ │ │ └── coco
│ │ │ │ ├── ipr_res50_8xb64-210e_coco-256x256.py
│ │ │ │ ├── ipr_res50_debias-8xb64-210e_coco-256x256.py
│ │ │ │ ├── ipr_res50_dsnt-8xb64-210e_coco-256x256.py
│ │ │ │ ├── resnet_debias_coco.md
│ │ │ │ ├── resnet_debias_coco.yml
│ │ │ │ ├── resnet_dsnt_coco.md
│ │ │ │ ├── resnet_dsnt_coco.yml
│ │ │ │ ├── resnet_ipr_coco.md
│ │ │ │ └── resnet_ipr_coco.yml
│ │ ├── rtmpose
│ │ │ ├── README.md
│ │ │ ├── body8
│ │ │ │ ├── rtmpose-l_8xb256-210e_body8-256x192.py
│ │ │ │ ├── rtmpose-l_8xb256-210e_body8-384x288.py
│ │ │ │ ├── rtmpose-m_8xb256-210e_body8-256x192.py
│ │ │ │ ├── rtmpose-m_8xb256-210e_body8-384x288.py
│ │ │ │ ├── rtmpose-s_8xb256-210e_body8-256x192.py
│ │ │ │ ├── rtmpose-t_8xb256-210e_body8-256x192.py
│ │ │ │ ├── rtmpose_body8.md
│ │ │ │ └── rtmpose_body8.yml
│ │ │ ├── coco
│ │ │ │ ├── rtmpose-l_8xb256-420e_aic-coco-256x192.py
│ │ │ │ ├── rtmpose-l_8xb256-420e_aic-coco-384x288.py
│ │ │ │ ├── rtmpose-l_8xb256-420e_coco-256x192.py
│ │ │ │ ├── rtmpose-m_8xb256-420e_aic-coco-256x192.py
│ │ │ │ ├── rtmpose-m_8xb256-420e_aic-coco-384x288.py
│ │ │ │ ├── rtmpose-m_8xb256-420e_coco-256x192.py
│ │ │ │ ├── rtmpose-s_8xb256-420e_aic-coco-256x192.py
│ │ │ │ ├── rtmpose-s_8xb256-420e_coco-256x192.py
│ │ │ │ ├── rtmpose-t_8xb256-420e_aic-coco-256x192.py
│ │ │ │ ├── rtmpose-t_8xb256-420e_coco-256x192.py
│ │ │ │ ├── rtmpose_coco.md
│ │ │ │ └── rtmpose_coco.yml
│ │ │ ├── crowdpose
│ │ │ │ ├── rtmpose-m_8xb64-210e_crowdpose-256x192.py
│ │ │ │ ├── rtmpose_crowdpose.md
│ │ │ │ └── rtmpose_crowdpose.yml
│ │ │ └── mpii
│ │ │ │ ├── rtmpose-m_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── rtmpose_mpii.md
│ │ │ │ └── rtmpose_mpii.yml
│ │ ├── simcc
│ │ │ ├── README.md
│ │ │ ├── coco
│ │ │ │ ├── mobilenetv2_coco.md
│ │ │ │ ├── mobilenetv2_coco.yml
│ │ │ │ ├── resnet_coco.md
│ │ │ │ ├── resnet_coco.yml
│ │ │ │ ├── simcc_mobilenetv2_wo-deconv-8xb64-210e_coco-256x192.py
│ │ │ │ ├── simcc_res50_8xb32-140e_coco-384x288.py
│ │ │ │ ├── simcc_res50_8xb64-210e_coco-256x192.py
│ │ │ │ ├── simcc_vipnas-mbv3_8xb64-210e_coco-256x192.py
│ │ │ │ ├── vipnas_coco.md
│ │ │ │ └── vipnas_coco.yml
│ │ │ └── mpii
│ │ │ │ └── simcc_res50_wo-deconv-8xb64-210e_mpii-256x256.py
│ │ ├── topdown_heatmap
│ │ │ ├── README.md
│ │ │ ├── aic
│ │ │ │ ├── hrnet_aic.md
│ │ │ │ ├── hrnet_aic.yml
│ │ │ │ ├── resnet_aic.md
│ │ │ │ ├── resnet_aic.yml
│ │ │ │ ├── td-hm_hrnet-w32_8xb64-210e_aic-256x192.py
│ │ │ │ └── td-hm_res101_8xb64-210e_aic-256x192.py
│ │ │ ├── coco
│ │ │ │ ├── alexnet_coco.md
│ │ │ │ ├── alexnet_coco.yml
│ │ │ │ ├── cpm_coco.md
│ │ │ │ ├── cpm_coco.yml
│ │ │ │ ├── cspnext-l_udp_8xb256-210e_aic-coco-256x192.py
│ │ │ │ ├── cspnext-l_udp_8xb256-210e_coco-256x192.py
│ │ │ │ ├── cspnext-m_udp_8xb256-210e_aic-coco-256x192.py
│ │ │ │ ├── cspnext-m_udp_8xb256-210e_coco-256x192.py
│ │ │ │ ├── cspnext-s_udp_8xb256-210e_aic-coco-256x192.py
│ │ │ │ ├── cspnext-s_udp_8xb256-210e_coco-256x192.py
│ │ │ │ ├── cspnext-tiny_udp_8xb256-210e_aic-coco-256x192.py
│ │ │ │ ├── cspnext-tiny_udp_8xb256-210e_coco-256x192.py
│ │ │ │ ├── cspnext_udp_coco.md
│ │ │ │ ├── cspnext_udp_coco.yml
│ │ │ │ ├── hourglass_coco.md
│ │ │ │ ├── hourglass_coco.yml
│ │ │ │ ├── hrformer_coco.md
│ │ │ │ ├── hrformer_coco.yml
│ │ │ │ ├── hrnet_augmentation_coco.md
│ │ │ │ ├── hrnet_augmentation_coco.yml
│ │ │ │ ├── hrnet_coco.md
│ │ │ │ ├── hrnet_coco.yml
│ │ │ │ ├── hrnet_coco_aic.md
│ │ │ │ ├── hrnet_dark_coco.md
│ │ │ │ ├── hrnet_dark_coco.yml
│ │ │ │ ├── hrnet_fp16_coco.md
│ │ │ │ ├── hrnet_udp_coco.md
│ │ │ │ ├── hrnet_udp_coco.yml
│ │ │ │ ├── litehrnet_coco.md
│ │ │ │ ├── litehrnet_coco.yml
│ │ │ │ ├── mobilenetv2_coco.md
│ │ │ │ ├── mobilenetv2_coco.yml
│ │ │ │ ├── mspn_coco.md
│ │ │ │ ├── mspn_coco.yml
│ │ │ │ ├── pvt_coco.md
│ │ │ │ ├── pvt_coco.yml
│ │ │ │ ├── resnest_coco.md
│ │ │ │ ├── resnest_coco.yml
│ │ │ │ ├── resnet_coco.md
│ │ │ │ ├── resnet_coco.yml
│ │ │ │ ├── resnet_dark_coco.md
│ │ │ │ ├── resnet_dark_coco.yml
│ │ │ │ ├── resnet_fp16_coco.md
│ │ │ │ ├── resnetv1d_coco.md
│ │ │ │ ├── resnetv1d_coco.yml
│ │ │ │ ├── resnext_coco.md
│ │ │ │ ├── resnext_coco.yml
│ │ │ │ ├── rsn_coco.md
│ │ │ │ ├── rsn_coco.yml
│ │ │ │ ├── scnet_coco.md
│ │ │ │ ├── scnet_coco.yml
│ │ │ │ ├── seresnet_coco.md
│ │ │ │ ├── seresnet_coco.yml
│ │ │ │ ├── shufflenetv1_coco.md
│ │ │ │ ├── shufflenetv1_coco.yml
│ │ │ │ ├── shufflenetv2_coco.md
│ │ │ │ ├── shufflenetv2_coco.yml
│ │ │ │ ├── swin_coco.md
│ │ │ │ ├── swin_coco.yml
│ │ │ │ ├── td-hm_2xmspn50_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_2xrsn50_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_3xmspn50_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_3xrsn50_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_4xmspn50_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_ViTPose-base-simple_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_ViTPose-base_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_ViTPose-huge-simple_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_ViTPose-huge_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_ViTPose-large-simple_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_ViTPose-large_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_ViTPose-small-simple_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_ViTPose-small_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_alexnet_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_cpm_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_cpm_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hourglass52_8xb32-210e_coco-256x256.py
│ │ │ │ ├── td-hm_hourglass52_8xb32-210e_coco-384x384.py
│ │ │ │ ├── td-hm_hrformer-base_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrformer-base_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_hrformer-small_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrformer-small_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_hrnet-w32_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrnet-w32_8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_hrnet-w32_8xb64-210e_coco-aic-256x192-combine.py
│ │ │ │ ├── td-hm_hrnet-w32_8xb64-210e_coco-aic-256x192-merge.py
│ │ │ │ ├── td-hm_hrnet-w32_coarsedropout-8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrnet-w32_dark-8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrnet-w32_dark-8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_hrnet-w32_fp16-8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrnet-w32_gridmask-8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrnet-w32_photometric-8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrnet-w32_udp-8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrnet-w32_udp-8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_hrnet-w32_udp-regress-8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrnet-w48_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrnet-w48_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_hrnet-w48_dark-8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrnet-w48_dark-8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_hrnet-w48_udp-8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_hrnet-w48_udp-8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_litehrnet-18_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_litehrnet-18_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_litehrnet-30_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_litehrnet-30_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_mobilenetv2_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_mobilenetv2_8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_mspn50_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_pvt-s_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_pvtv2-b2_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_res101_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_res101_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_res101_dark-8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_res101_dark-8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_res152_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_res152_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_res152_dark-8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_res152_dark-8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_res50_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_res50_8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_res50_dark-8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_res50_dark-8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_res50_fp16-8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_resnest101_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_resnest101_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_resnest200_8xb16-210e_coco-384x288.py
│ │ │ │ ├── td-hm_resnest200_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_resnest269_8xb16-210e_coco-384x288.py
│ │ │ │ ├── td-hm_resnest269_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_resnest50_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_resnest50_8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_resnetv1d101_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_resnetv1d101_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_resnetv1d152_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_resnetv1d152_8xb48-210e_coco-384x288.py
│ │ │ │ ├── td-hm_resnetv1d50_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_resnetv1d50_8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_resnext101_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_resnext101_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_resnext152_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_resnext152_8xb48-210e_coco-384x288.py
│ │ │ │ ├── td-hm_resnext50_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_resnext50_8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_rsn18_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_rsn50_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_scnet101_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_scnet101_8xb48-210e_coco-384x288.py
│ │ │ │ ├── td-hm_scnet50_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_scnet50_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_seresnet101_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_seresnet101_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_seresnet152_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_seresnet152_8xb48-210e_coco-384x288.py
│ │ │ │ ├── td-hm_seresnet50_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_seresnet50_8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_shufflenetv1_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_shufflenetv1_8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_shufflenetv2_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_shufflenetv2_8xb64-210e_coco-384x288.py
│ │ │ │ ├── td-hm_swin-b-p4-w7_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_swin-b-p4-w7_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_swin-l-p4-w7_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_swin-l-p4-w7_8xb32-210e_coco-384x288.py
│ │ │ │ ├── td-hm_swin-t-p4-w7_8xb32-210e_coco-256x192.py
│ │ │ │ ├── td-hm_vgg16-bn_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_vipnas-mbv3_8xb64-210e_coco-256x192.py
│ │ │ │ ├── td-hm_vipnas-res50_8xb64-210e_coco-256x192.py
│ │ │ │ ├── vgg_coco.md
│ │ │ │ ├── vgg_coco.yml
│ │ │ │ ├── vipnas_coco.md
│ │ │ │ ├── vipnas_coco.yml
│ │ │ │ ├── vitpose_coco.md
│ │ │ │ └── vitpose_coco.yml
│ │ │ ├── crowdpose
│ │ │ │ ├── cspnext-m_udp_8xb64-210e_crowpose-256x192.py
│ │ │ │ ├── cspnext_udp_crowdpose.md
│ │ │ │ ├── cspnext_udp_crowdpose.yml
│ │ │ │ ├── hrnet_crowdpose.md
│ │ │ │ ├── hrnet_crowdpose.yml
│ │ │ │ ├── resnet_crowdpose.md
│ │ │ │ ├── resnet_crowdpose.yml
│ │ │ │ ├── td-hm_hrnet-w32_8xb64-210e_crowdpose-256x192.py
│ │ │ │ ├── td-hm_res101_8xb64-210e_crowdpose-256x192.py
│ │ │ │ ├── td-hm_res101_8xb64-210e_crowdpose-320x256.py
│ │ │ │ ├── td-hm_res152_8xb64-210e_crowdpose-256x192.py
│ │ │ │ └── td-hm_res50_8xb64-210e_crowdpose-256x192.py
│ │ │ ├── jhmdb
│ │ │ │ ├── cpm_jhmdb.md
│ │ │ │ ├── cpm_jhmdb.yml
│ │ │ │ ├── resnet_jhmdb.md
│ │ │ │ ├── resnet_jhmdb.yml
│ │ │ │ ├── td-hm_cpm_8xb32-40e_jhmdb-sub1-368x368.py
│ │ │ │ ├── td-hm_cpm_8xb32-40e_jhmdb-sub2-368x368.py
│ │ │ │ ├── td-hm_cpm_8xb32-40e_jhmdb-sub3-368x368.py
│ │ │ │ ├── td-hm_res50-2deconv_8xb64-40e_jhmdb-sub1-256x256.py
│ │ │ │ ├── td-hm_res50-2deconv_8xb64-40e_jhmdb-sub2-256x256.py
│ │ │ │ ├── td-hm_res50-2deconv_8xb64-40e_jhmdb-sub3-256x256.py
│ │ │ │ ├── td-hm_res50_8xb64-20e_jhmdb-sub1-256x256.py
│ │ │ │ ├── td-hm_res50_8xb64-20e_jhmdb-sub2-256x256.py
│ │ │ │ └── td-hm_res50_8xb64-20e_jhmdb-sub3-256x256.py
│ │ │ ├── mpii
│ │ │ │ ├── cpm_mpii.md
│ │ │ │ ├── cpm_mpii.yml
│ │ │ │ ├── cspnext-m_udp_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── cspnext_udp_mpii.md
│ │ │ │ ├── cspnext_udp_mpii.yml
│ │ │ │ ├── hourglass_mpii.md
│ │ │ │ ├── hourglass_mpii.yml
│ │ │ │ ├── hrnet_dark_mpii.md
│ │ │ │ ├── hrnet_dark_mpii.yml
│ │ │ │ ├── hrnet_mpii.md
│ │ │ │ ├── hrnet_mpii.yml
│ │ │ │ ├── litehrnet_mpii.md
│ │ │ │ ├── litehrnet_mpii.yml
│ │ │ │ ├── mobilenetv2_mpii.md
│ │ │ │ ├── mobilenetv2_mpii.yml
│ │ │ │ ├── resnet_mpii.md
│ │ │ │ ├── resnet_mpii.yml
│ │ │ │ ├── resnetv1d_mpii.md
│ │ │ │ ├── resnetv1d_mpii.yml
│ │ │ │ ├── resnext_mpii.md
│ │ │ │ ├── resnext_mpii.yml
│ │ │ │ ├── scnet_mpii.md
│ │ │ │ ├── scnet_mpii.yml
│ │ │ │ ├── seresnet_mpii.md
│ │ │ │ ├── seresnet_mpii.yml
│ │ │ │ ├── shufflenetv1_mpii.md
│ │ │ │ ├── shufflenetv1_mpii.yml
│ │ │ │ ├── shufflenetv2_mpii.md
│ │ │ │ ├── shufflenetv2_mpii.yml
│ │ │ │ ├── td-hm_cpm_8xb64-210e_mpii-368x368.py
│ │ │ │ ├── td-hm_hourglass52_8xb32-210e_mpii-384x384.py
│ │ │ │ ├── td-hm_hourglass52_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_hrnet-w32_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_hrnet-w32_dark-8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_hrnet-w48_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_hrnet-w48_dark-8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_litehrnet-18_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_litehrnet-30_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_mobilenetv2_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_res101_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_res152_8xb32-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_res50_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_resnetv1d101_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_resnetv1d152_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_resnetv1d50_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_resnext152_8xb32-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_scnet101_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_scnet50_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_seresnet101_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_seresnet152_8xb32-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_seresnet50_8xb64-210e_mpii-256x256.py
│ │ │ │ ├── td-hm_shufflenetv1_8xb64-210e_mpii-256x256.py
│ │ │ │ └── td-hm_shufflenetv2_8xb64-210e_mpii-256x256.py
│ │ │ └── posetrack18
│ │ │ │ ├── hrnet_posetrack18.md
│ │ │ │ ├── hrnet_posetrack18.yml
│ │ │ │ ├── resnet_posetrack18.md
│ │ │ │ ├── resnet_posetrack18.yml
│ │ │ │ ├── td-hm_hrnet-w32_8xb64-20e_posetrack18-256x192.py
│ │ │ │ ├── td-hm_hrnet-w32_8xb64-20e_posetrack18-384x288.py
│ │ │ │ ├── td-hm_hrnet-w48_8xb64-20e_posetrack18-256x192.py
│ │ │ │ ├── td-hm_hrnet-w48_8xb64-20e_posetrack18-384x288.py
│ │ │ │ └── td-hm_res50_8xb64-20e_posetrack18-256x192.py
│ │ └── topdown_regression
│ │ │ ├── README.md
│ │ │ ├── coco
│ │ │ ├── mobilenetv2_rle_coco.md
│ │ │ ├── mobilenetv2_rle_coco.yml
│ │ │ ├── resnet_coco.md
│ │ │ ├── resnet_coco.yml
│ │ │ ├── resnet_rle_coco.md
│ │ │ ├── resnet_rle_coco.yml
│ │ │ ├── td-reg_mobilenetv2_rle-pretrained-8xb64-210e_coco-256x192.py
│ │ │ ├── td-reg_res101_8xb64-210e_coco-256x192.py
│ │ │ ├── td-reg_res101_rle-8xb64-210e_coco-256x192.py
│ │ │ ├── td-reg_res152_8xb64-210e_coco-256x192.py
│ │ │ ├── td-reg_res152_rle-8xb64-210e_coco-256x192.py
│ │ │ ├── td-reg_res152_rle-8xb64-210e_coco-384x288.py
│ │ │ ├── td-reg_res50_8xb64-210e_coco-256x192.py
│ │ │ ├── td-reg_res50_rle-8xb64-210e_coco-256x192.py
│ │ │ └── td-reg_res50_rle-pretrained-8xb64-210e_coco-256x192.py
│ │ │ └── mpii
│ │ │ ├── resnet_mpii.md
│ │ │ ├── resnet_mpii.yml
│ │ │ ├── resnet_rle_mpii.md
│ │ │ ├── resnet_rle_mpii.yml
│ │ │ ├── td-reg_res101_8xb64-210e_mpii-256x256.py
│ │ │ ├── td-reg_res152_8xb64-210e_mpii-256x256.py
│ │ │ ├── td-reg_res50_8xb64-210e_mpii-256x256.py
│ │ │ └── td-reg_res50_rle-8xb64-210e_mpii-256x256.py
│ ├── body_3d_keypoint
│ │ └── README.md
│ ├── face_2d_keypoint
│ │ ├── README.md
│ │ ├── rtmpose
│ │ │ ├── README.md
│ │ │ ├── coco_wholebody_face
│ │ │ │ ├── rtmpose-m_8xb32-60e_coco-wholebody-face-256x256.py
│ │ │ │ ├── rtmpose_coco_wholebody_face.md
│ │ │ │ └── rtmpose_coco_wholebody_face.yml
│ │ │ ├── lapa
│ │ │ │ ├── rtmpose-m_8xb64-120e_lapa-256x256.py
│ │ │ │ ├── rtmpose_lapa.md
│ │ │ │ └── rtmpose_lapa.yml
│ │ │ └── wflw
│ │ │ │ ├── rtmpose-m_8xb64-60e_wflw-256x256.py
│ │ │ │ ├── rtmpose_wflw.md
│ │ │ │ └── rtmpose_wflw.yml
│ │ ├── topdown_heatmap
│ │ │ ├── 300w
│ │ │ │ ├── hrnetv2_300w.md
│ │ │ │ ├── hrnetv2_300w.yml
│ │ │ │ └── td-hm_hrnetv2-w18_8xb64-60e_300w-256x256.py
│ │ │ ├── README.md
│ │ │ ├── aflw
│ │ │ │ ├── hrnetv2_aflw.md
│ │ │ │ ├── hrnetv2_aflw.yml
│ │ │ │ ├── hrnetv2_dark_aflw.md
│ │ │ │ ├── hrnetv2_dark_aflw.yml
│ │ │ │ ├── td-hm_hrnetv2-w18_8xb64-60e_aflw-256x256.py
│ │ │ │ └── td-hm_hrnetv2-w18_dark-8xb64-60e_aflw-256x256.py
│ │ │ ├── coco_wholebody_face
│ │ │ │ ├── hourglass_coco_wholebody_face.md
│ │ │ │ ├── hourglass_coco_wholebody_face.yml
│ │ │ │ ├── hrnetv2_coco_wholebody_face.md
│ │ │ │ ├── hrnetv2_coco_wholebody_face.yml
│ │ │ │ ├── hrnetv2_dark_coco_wholebody_face.md
│ │ │ │ ├── hrnetv2_dark_coco_wholebody_face.yml
│ │ │ │ ├── mobilenetv2_coco_wholebody_face.md
│ │ │ │ ├── mobilenetv2_coco_wholebody_face.yml
│ │ │ │ ├── resnet_coco_wholebody_face.md
│ │ │ │ ├── resnet_coco_wholebody_face.yml
│ │ │ │ ├── scnet_coco_wholebody_face.md
│ │ │ │ ├── scnet_coco_wholebody_face.yml
│ │ │ │ ├── td-hm_hourglass52_8xb32-60e_coco-wholebody-face-256x256.py
│ │ │ │ ├── td-hm_hrnetv2-w18_8xb32-60e_coco-wholebody-face-256x256.py
│ │ │ │ ├── td-hm_hrnetv2-w18_dark-8xb32-60e_coco-wholebody-face-256x256.py
│ │ │ │ ├── td-hm_mobilenetv2_8xb32-60e_coco-wholebody-face-256x256.py
│ │ │ │ ├── td-hm_res50_8xb32-60e_coco-wholebody-face-256x256.py
│ │ │ │ └── td-hm_scnet50_8xb32-60e_coco-wholebody-face-256x256.py
│ │ │ ├── cofw
│ │ │ │ ├── hrnetv2_cofw.md
│ │ │ │ ├── hrnetv2_cofw.yml
│ │ │ │ └── td-hm_hrnetv2-w18_8xb64-60e_cofw-256x256.py
│ │ │ └── wflw
│ │ │ │ ├── hrnetv2_awing_wflw.md
│ │ │ │ ├── hrnetv2_awing_wflw.yml
│ │ │ │ ├── hrnetv2_dark_wflw.md
│ │ │ │ ├── hrnetv2_dark_wflw.yml
│ │ │ │ ├── hrnetv2_wflw.md
│ │ │ │ ├── hrnetv2_wflw.yml
│ │ │ │ ├── td-hm_hrnetv2-w18_8xb64-60e_wflw-256x256.py
│ │ │ │ ├── td-hm_hrnetv2-w18_awing-8xb64-60e_wflw-256x256.py
│ │ │ │ └── td-hm_hrnetv2-w18_dark-8xb64-60e_wflw-256x256.py
│ │ └── topdown_regression
│ │ │ ├── README.md
│ │ │ └── wflw
│ │ │ ├── resnet_softwingloss_wflw.md
│ │ │ ├── resnet_softwingloss_wflw.yml
│ │ │ ├── resnet_wflw.md
│ │ │ ├── resnet_wflw.yml
│ │ │ ├── resnet_wingloss_wflw.md
│ │ │ ├── resnet_wingloss_wflw.yml
│ │ │ ├── td-reg_res50_8xb64-210e_wflw-256x256.py
│ │ │ ├── td-reg_res50_softwingloss_8xb64-210e_wflw-256x256.py
│ │ │ └── td-reg_res50_wingloss_8xb64-210e_wflw-256x256.py
│ ├── fashion_2d_keypoint
│ │ ├── README.md
│ │ └── topdown_heatmap
│ │ │ └── deepfashion2
│ │ │ ├── res50_deepfashion2.md
│ │ │ ├── res50_deepfasion2.yml
│ │ │ ├── td-hm_res50_1xb64-210e_deepfasion2-long-sleeved-dress-256x192.py
│ │ │ ├── td-hm_res50_1xb64-210e_deepfasion2-skirt-256x192.py
│ │ │ ├── td-hm_res50_1xb64-210e_deepfasion2-vest-dress-256x192.py
│ │ │ ├── td-hm_res50_2xb64-210e_deepfasion2-trousers-256x192.py
│ │ │ ├── td-hm_res50_3xb64-210e_deepfasion2-shorts-256x192.py
│ │ │ ├── td-hm_res50_4xb64-210e_deepfasion2-short-sleeved-dress-256x192.py
│ │ │ ├── td-hm_res50_4xb64-210e_deepfasion2-sling-256x192.py
│ │ │ ├── td-hm_res50_4xb64-210e_deepfasion2-sling-dress-256x192.py
│ │ │ ├── td-hm_res50_4xb64-210e_deepfasion2-vest-256x192.py
│ │ │ ├── td-hm_res50_6xb64-210e_deepfasion2-short-sleeved-shirt-256x192.py
│ │ │ ├── td-hm_res50_8xb64-210e_deepfasion2-long-sleeved-outwear-256x192.py
│ │ │ ├── td-hm_res50_8xb64-210e_deepfasion2-long-sleeved-shirt-256x192.py
│ │ │ └── td-hm_res50_8xb64-210e_deepfasion2-short-sleeved-outwear-256x192.py
│ ├── hand_2d_keypoint
│ │ ├── README.md
│ │ ├── rtmpose
│ │ │ ├── README.md
│ │ │ ├── coco_wholebody_hand
│ │ │ │ ├── rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py
│ │ │ │ ├── rtmpose_coco_wholebody_hand.md
│ │ │ │ └── rtmpose_coco_wholebody_hand.yml
│ │ │ └── hand5
│ │ │ │ ├── rtmpose-m_8xb256-210e_hand5-256x256.py
│ │ │ │ ├── rtmpose_hand5.md
│ │ │ │ └── rtmpose_hand5.yml
│ │ ├── topdown_heatmap
│ │ │ ├── README.md
│ │ │ ├── coco_wholebody_hand
│ │ │ │ ├── hourglass_coco_wholebody_hand.md
│ │ │ │ ├── hourglass_coco_wholebody_hand.yml
│ │ │ │ ├── hrnetv2_coco_wholebody_hand.md
│ │ │ │ ├── hrnetv2_coco_wholebody_hand.yml
│ │ │ │ ├── hrnetv2_dark_coco_wholebody_hand.md
│ │ │ │ ├── hrnetv2_dark_coco_wholebody_hand.yml
│ │ │ │ ├── litehrnet_coco_wholebody_hand.md
│ │ │ │ ├── litehrnet_coco_wholebody_hand.yml
│ │ │ │ ├── mobilenetv2_coco_wholebody_hand.md
│ │ │ │ ├── mobilenetv2_coco_wholebody_hand.yml
│ │ │ │ ├── resnet_coco_wholebody_hand.md
│ │ │ │ ├── resnet_coco_wholebody_hand.yml
│ │ │ │ ├── scnet_coco_wholebody_hand.md
│ │ │ │ ├── scnet_coco_wholebody_hand.yml
│ │ │ │ ├── td-hm_hourglass52_8xb32-210e_coco-wholebody-hand-256x256.py
│ │ │ │ ├── td-hm_hrnetv2-w18_8xb32-210e_coco-wholebody-hand-256x256.py
│ │ │ │ ├── td-hm_hrnetv2-w18_dark-8xb32-210e_coco-wholebody-hand-256x256.py
│ │ │ │ ├── td-hm_litehrnet-w18_8xb32-210e_coco-wholebody-hand-256x256.py
│ │ │ │ ├── td-hm_mobilenetv2_8xb32-210e_coco-wholebody-hand-256x256.py
│ │ │ │ ├── td-hm_res50_8xb32-210e_coco-wholebody-hand-256x256.py
│ │ │ │ └── td-hm_scnet50_8xb32-210e_coco-wholebody-hand-256x256.py
│ │ │ ├── freihand2d
│ │ │ │ ├── resnet_freihand2d.md
│ │ │ │ ├── resnet_freihand2d.yml
│ │ │ │ └── td-hm_res50_8xb64-100e_freihand2d-224x224.py
│ │ │ ├── onehand10k
│ │ │ │ ├── hrnetv2_dark_onehand10k.md
│ │ │ │ ├── hrnetv2_dark_onehand10k.yml
│ │ │ │ ├── hrnetv2_onehand10k.md
│ │ │ │ ├── hrnetv2_onehand10k.yml
│ │ │ │ ├── hrnetv2_udp_onehand10k.md
│ │ │ │ ├── hrnetv2_udp_onehand10k.yml
│ │ │ │ ├── mobilenetv2_onehand10k.md
│ │ │ │ ├── mobilenetv2_onehand10k.yml
│ │ │ │ ├── resnet_onehand10k.md
│ │ │ │ ├── resnet_onehand10k.yml
│ │ │ │ ├── td-hm_hrnetv2-w18_8xb64-210e_onehand10k-256x256.py
│ │ │ │ ├── td-hm_hrnetv2-w18_dark-8xb64-210e_onehand10k-256x256.py
│ │ │ │ ├── td-hm_hrnetv2-w18_udp-8xb64-210e_onehand10k-256x256.py
│ │ │ │ ├── td-hm_mobilenetv2_8xb64-210e_onehand10k-256x256.py
│ │ │ │ └── td-hm_res50_8xb32-210e_onehand10k-256x256.py
│ │ │ └── rhd2d
│ │ │ │ ├── hrnetv2_dark_rhd2d.md
│ │ │ │ ├── hrnetv2_dark_rhd2d.yml
│ │ │ │ ├── hrnetv2_rhd2d.md
│ │ │ │ ├── hrnetv2_rhd2d.yml
│ │ │ │ ├── hrnetv2_udp_rhd2d.md
│ │ │ │ ├── hrnetv2_udp_rhd2d.yml
│ │ │ │ ├── mobilenetv2_rhd2d.md
│ │ │ │ ├── mobilenetv2_rhd2d.yml
│ │ │ │ ├── resnet_rhd2d.md
│ │ │ │ ├── resnet_rhd2d.yml
│ │ │ │ ├── td-hm_hrnetv2-w18_8xb64-210e_rhd2d-256x256.py
│ │ │ │ ├── td-hm_hrnetv2-w18_dark-8xb64-210e_rhd2d-256x256.py
│ │ │ │ ├── td-hm_hrnetv2-w18_udp-8xb64-210e_rhd2d-256x256.py
│ │ │ │ ├── td-hm_mobilenetv2_8xb64-210e_rhd2d-256x256.py
│ │ │ │ └── td-hm_res50_8xb64-210e_rhd2d-256x256.py
│ │ └── topdown_regression
│ │ │ ├── README.md
│ │ │ ├── onehand10k
│ │ │ ├── resnet_onehand10k.md
│ │ │ ├── resnet_onehand10k.yml
│ │ │ └── td-reg_res50_8xb64-210e_onehand10k-256x256.py
│ │ │ └── rhd2d
│ │ │ ├── resnet_rhd2d.md
│ │ │ ├── resnet_rhd2d.yml
│ │ │ └── td-reg_res50_8xb64-210e_rhd2d-256x256.py
│ ├── hand_3d_keypoint
│ │ └── README.md
│ ├── hand_gesture
│ │ └── README.md
│ └── wholebody_2d_keypoint
│ │ ├── README.md
│ │ ├── rtmpose
│ │ ├── README.md
│ │ └── coco-wholebody
│ │ │ ├── rtmpose-l_8xb32-270e_coco-wholebody-384x288.py
│ │ │ ├── rtmpose-l_8xb64-270e_coco-wholebody-256x192.py
│ │ │ ├── rtmpose-m_8xb64-270e_coco-wholebody-256x192.py
│ │ │ ├── rtmpose_coco-wholebody.md
│ │ │ └── rtmpose_coco-wholebody.yml
│ │ └── topdown_heatmap
│ │ ├── README.md
│ │ └── coco-wholebody
│ │ ├── cspnext-l_udp_8xb64-210e_coco-wholebody-256x192.py
│ │ ├── cspnext-m_udp_8xb64-210e_coco-wholebody-256x192.py
│ │ ├── cspnext_udp_coco-wholebody.md
│ │ ├── cspnext_udp_coco-wholebody.yml
│ │ ├── hrnet_coco-wholebody.md
│ │ ├── hrnet_coco-wholebody.yml
│ │ ├── hrnet_dark_coco-wholebody.md
│ │ ├── hrnet_dark_coco-wholebody.yml
│ │ ├── resnet_coco-wholebody.md
│ │ ├── resnet_coco-wholebody.yml
│ │ ├── td-hm_hrnet-w32_8xb64-210e_coco-wholebody-256x192.py
│ │ ├── td-hm_hrnet-w32_8xb64-210e_coco-wholebody-384x288.py
│ │ ├── td-hm_hrnet-w32_dark-8xb64-210e_coco-wholebody-256x192.py
│ │ ├── td-hm_hrnet-w48_8xb32-210e_coco-wholebody-256x192.py
│ │ ├── td-hm_hrnet-w48_8xb32-210e_coco-wholebody-384x288.py
│ │ ├── td-hm_hrnet-w48_dark-8xb32-210e_coco-wholebody-384x288.py
│ │ ├── td-hm_res101_8xb32-210e_coco-wholebody-256x192.py
│ │ ├── td-hm_res101_8xb32-210e_coco-wholebody-384x288.py
│ │ ├── td-hm_res152_8xb32-210e_coco-wholebody-256x192.py
│ │ ├── td-hm_res152_8xb32-210e_coco-wholebody-384x288.py
│ │ ├── td-hm_res50_8xb64-210e_coco-wholebody-256x192.py
│ │ ├── td-hm_res50_8xb64-210e_coco-wholebody-384x288.py
│ │ ├── td-hm_vipnas-mbv3_8xb64-210e_coco-wholebody-256x192.py
│ │ ├── td-hm_vipnas-mbv3_dark-8xb64-210e_coco-wholebody-256x192.py
│ │ ├── td-hm_vipnas-res50_8xb64-210e_coco-wholebody-256x192.py
│ │ ├── td-hm_vipnas-res50_dark-8xb64-210e_coco-wholebody-256x192.py
│ │ ├── vipnas_coco-wholebody.md
│ │ ├── vipnas_coco-wholebody.yml
│ │ ├── vipnas_dark_coco-wholebody.md
│ │ └── vipnas_dark_coco-wholebody.yml
│ ├── demo
│ ├── MMPose_Tutorial.ipynb
│ ├── bottomup_demo.py
│ ├── docs
│ │ ├── en
│ │ │ ├── 2d_animal_demo.md
│ │ │ ├── 2d_face_demo.md
│ │ │ ├── 2d_hand_demo.md
│ │ │ ├── 2d_human_pose_demo.md
│ │ │ ├── 2d_wholebody_pose_demo.md
│ │ │ ├── mmdet_modelzoo.md
│ │ │ └── webcam_api_demo.md
│ │ └── zh_cn
│ │ │ ├── 2d_animal_demo.md
│ │ │ ├── 2d_face_demo.md
│ │ │ ├── 2d_hand_demo.md
│ │ │ ├── 2d_human_pose_demo.md
│ │ │ ├── 2d_wholebody_pose_demo.md
│ │ │ ├── mmdet_modelzoo.md
│ │ │ └── webcam_api_demo.md
│ ├── image_demo.py
│ ├── inferencer_demo.py
│ ├── mmdetection_cfg
│ │ ├── cascade_rcnn_x101_64x4d_fpn_1class.py
│ │ ├── cascade_rcnn_x101_64x4d_fpn_coco.py
│ │ ├── faster_rcnn_r50_fpn_1class.py
│ │ ├── faster_rcnn_r50_fpn_coco.py
│ │ ├── mask_rcnn_r50_fpn_2x_coco.py
│ │ ├── ssdlite_mobilenetv2-scratch_8xb24-600e_coco.py
│ │ ├── ssdlite_mobilenetv2_scratch_600e_onehand.py
│ │ ├── yolov3_d53_320_273e_coco.py
│ │ └── yolox-s_8xb8-300e_coco-face.py
│ ├── mmtracking_cfg
│ │ ├── deepsort_faster-rcnn_fpn_4e_mot17-private-half.py
│ │ └── tracktor_faster-rcnn_r50_fpn_4e_mot17-private.py
│ ├── resources
│ │ ├── demo.mp4
│ │ ├── demo_coco.gif
│ │ └── sunglasses.jpg
│ ├── topdown_demo_with_mmdet.py
│ ├── webcam_api_demo.py
│ └── webcam_cfg
│ │ ├── human_animal_pose.py
│ │ ├── human_pose.py
│ │ └── test_camera.py
│ ├── docker
│ ├── Dockerfile
│ └── serve
│ │ ├── Dockerfile
│ │ ├── config.properties
│ │ └── entrypoint.sh
│ ├── docs
│ ├── en
│ │ ├── Makefile
│ │ ├── _static
│ │ │ ├── css
│ │ │ │ └── readthedocs.css
│ │ │ └── images
│ │ │ │ └── mmpose-logo.png
│ │ ├── advanced_guides
│ │ │ ├── codecs.md
│ │ │ ├── customize_datasets.md
│ │ │ ├── customize_logging.md
│ │ │ ├── customize_optimizer.md
│ │ │ ├── customize_transforms.md
│ │ │ ├── dataflow.md
│ │ │ ├── how_to_deploy.md
│ │ │ ├── implement_new_models.md
│ │ │ └── model_analysis.md
│ │ ├── api.rst
│ │ ├── collect_modelzoo.py
│ │ ├── collect_projects.py
│ │ ├── conf.py
│ │ ├── contribution_guide.md
│ │ ├── dataset_zoo
│ │ │ ├── 2d_animal_keypoint.md
│ │ │ ├── 2d_body_keypoint.md
│ │ │ ├── 2d_face_keypoint.md
│ │ │ ├── 2d_fashion_landmark.md
│ │ │ ├── 2d_hand_keypoint.md
│ │ │ ├── 2d_wholebody_keypoint.md
│ │ │ ├── 3d_body_keypoint.md
│ │ │ ├── 3d_body_mesh.md
│ │ │ ├── 3d_hand_keypoint.md
│ │ │ └── dataset_tools.md
│ │ ├── faq.md
│ │ ├── guide_to_framework.md
│ │ ├── index.rst
│ │ ├── installation.md
│ │ ├── make.bat
│ │ ├── merge_docs.sh
│ │ ├── migration.md
│ │ ├── notes
│ │ │ ├── benchmark.md
│ │ │ ├── changelog.md
│ │ │ ├── ecosystem.md
│ │ │ └── pytorch_2.md
│ │ ├── overview.md
│ │ ├── projects
│ │ │ └── projects.md
│ │ ├── quick_run.md
│ │ ├── stats.py
│ │ ├── switch_language.md
│ │ ├── user_guides
│ │ │ ├── configs.md
│ │ │ ├── inference.md
│ │ │ ├── mixed_datasets.md
│ │ │ ├── prepare_datasets.md
│ │ │ └── train_and_test.md
│ │ ├── visualization.md
│ │ └── webcam_api.rst
│ ├── src
│ │ └── papers
│ │ │ ├── algorithms
│ │ │ ├── associative_embedding.md
│ │ │ ├── awingloss.md
│ │ │ ├── cid.md
│ │ │ ├── cpm.md
│ │ │ ├── dark.md
│ │ │ ├── debias_ipr.md
│ │ │ ├── deeppose.md
│ │ │ ├── dekr.md
│ │ │ ├── dsnt.md
│ │ │ ├── higherhrnet.md
│ │ │ ├── hmr.md
│ │ │ ├── hourglass.md
│ │ │ ├── hrnet.md
│ │ │ ├── hrnetv2.md
│ │ │ ├── internet.md
│ │ │ ├── ipr.md
│ │ │ ├── litehrnet.md
│ │ │ ├── mspn.md
│ │ │ ├── posewarper.md
│ │ │ ├── rle.md
│ │ │ ├── rsn.md
│ │ │ ├── rtmpose.md
│ │ │ ├── scnet.md
│ │ │ ├── simcc.md
│ │ │ ├── simplebaseline2d.md
│ │ │ ├── simplebaseline3d.md
│ │ │ ├── softwingloss.md
│ │ │ ├── udp.md
│ │ │ ├── videopose3d.md
│ │ │ ├── vipnas.md
│ │ │ ├── vitpose.md
│ │ │ ├── voxelpose.md
│ │ │ └── wingloss.md
│ │ │ ├── backbones
│ │ │ ├── alexnet.md
│ │ │ ├── cpm.md
│ │ │ ├── higherhrnet.md
│ │ │ ├── hourglass.md
│ │ │ ├── hrformer.md
│ │ │ ├── hrnet.md
│ │ │ ├── hrnetv2.md
│ │ │ ├── litehrnet.md
│ │ │ ├── mobilenetv2.md
│ │ │ ├── mspn.md
│ │ │ ├── pvt.md
│ │ │ ├── pvtv2.md
│ │ │ ├── resnest.md
│ │ │ ├── resnet.md
│ │ │ ├── resnetv1d.md
│ │ │ ├── resnext.md
│ │ │ ├── rsn.md
│ │ │ ├── scnet.md
│ │ │ ├── seresnet.md
│ │ │ ├── shufflenetv1.md
│ │ │ ├── shufflenetv2.md
│ │ │ ├── swin.md
│ │ │ ├── vgg.md
│ │ │ └── vipnas.md
│ │ │ ├── datasets
│ │ │ ├── 300w.md
│ │ │ ├── aflw.md
│ │ │ ├── aic.md
│ │ │ ├── animalpose.md
│ │ │ ├── ap10k.md
│ │ │ ├── atrw.md
│ │ │ ├── campus_and_shelf.md
│ │ │ ├── coco.md
│ │ │ ├── coco_wholebody.md
│ │ │ ├── coco_wholebody_face.md
│ │ │ ├── coco_wholebody_hand.md
│ │ │ ├── cofw.md
│ │ │ ├── crowdpose.md
│ │ │ ├── deepfashion.md
│ │ │ ├── fly.md
│ │ │ ├── freihand.md
│ │ │ ├── h36m.md
│ │ │ ├── halpe.md
│ │ │ ├── horse10.md
│ │ │ ├── interhand.md
│ │ │ ├── jhmdb.md
│ │ │ ├── locust.md
│ │ │ ├── macaque.md
│ │ │ ├── mhp.md
│ │ │ ├── mpi_inf_3dhp.md
│ │ │ ├── mpii.md
│ │ │ ├── mpii_trb.md
│ │ │ ├── ochuman.md
│ │ │ ├── onehand10k.md
│ │ │ ├── panoptic.md
│ │ │ ├── panoptic_body3d.md
│ │ │ ├── posetrack18.md
│ │ │ ├── rhd.md
│ │ │ ├── wflw.md
│ │ │ └── zebra.md
│ │ │ └── techniques
│ │ │ ├── albumentations.md
│ │ │ ├── awingloss.md
│ │ │ ├── dark.md
│ │ │ ├── fp16.md
│ │ │ ├── fpn.md
│ │ │ ├── rle.md
│ │ │ ├── smoothnet.md
│ │ │ ├── softwingloss.md
│ │ │ ├── udp.md
│ │ │ └── wingloss.md
│ └── zh_cn
│ │ ├── Makefile
│ │ ├── _static
│ │ ├── css
│ │ │ └── readthedocs.css
│ │ └── images
│ │ │ └── mmpose-logo.png
│ │ ├── advanced_guides
│ │ ├── codecs.md
│ │ ├── customize_datasets.md
│ │ ├── customize_logging.md
│ │ ├── customize_optimizer.md
│ │ ├── customize_transforms.md
│ │ ├── dataflow.md
│ │ ├── how_to_deploy.md
│ │ ├── implement_new_models.md
│ │ └── model_analysis.md
│ │ ├── api.rst
│ │ ├── collect_modelzoo.py
│ │ ├── collect_projects.py
│ │ ├── conf.py
│ │ ├── contribution_guide.md
│ │ ├── dataset_zoo
│ │ ├── 2d_animal_keypoint.md
│ │ ├── 2d_body_keypoint.md
│ │ ├── 2d_face_keypoint.md
│ │ ├── 2d_fashion_landmark.md
│ │ ├── 2d_hand_keypoint.md
│ │ ├── 2d_wholebody_keypoint.md
│ │ ├── 3d_body_keypoint.md
│ │ ├── 3d_body_mesh.md
│ │ ├── 3d_hand_keypoint.md
│ │ └── dataset_tools.md
│ │ ├── faq.md
│ │ ├── guide_to_framework.md
│ │ ├── index.rst
│ │ ├── installation.md
│ │ ├── make.bat
│ │ ├── merge_docs.sh
│ │ ├── migration.md
│ │ ├── notes
│ │ ├── changelog.md
│ │ ├── ecosystem.md
│ │ ├── projects.md
│ │ └── pytorch_2.md
│ │ ├── overview.md
│ │ ├── quick_run.md
│ │ ├── stats.py
│ │ ├── switch_language.md
│ │ ├── user_guides
│ │ ├── advanced_training.md
│ │ ├── configs.md
│ │ ├── inference.md
│ │ ├── mixed_datasets.md
│ │ ├── prepare_datasets.md
│ │ ├── train_and_test.md
│ │ ├── useful_tools.md
│ │ └── visualization.md
│ │ └── webcam_api.rst
│ ├── mmpose
│ ├── __init__.py
│ ├── apis
│ │ ├── __init__.py
│ │ ├── inference.py
│ │ ├── inferencers
│ │ │ ├── __init__.py
│ │ │ ├── base_mmpose_inferencer.py
│ │ │ ├── mmpose_inferencer.py
│ │ │ ├── pose2d_inferencer.py
│ │ │ └── utils
│ │ │ │ ├── __init__.py
│ │ │ │ ├── default_det_models.py
│ │ │ │ └── get_model_alias.py
│ │ └── webcam
│ │ │ ├── __init__.py
│ │ │ ├── nodes
│ │ │ ├── __init__.py
│ │ │ ├── base_visualizer_node.py
│ │ │ ├── helper_nodes
│ │ │ │ ├── __init__.py
│ │ │ │ ├── monitor_node.py
│ │ │ │ ├── object_assigner_node.py
│ │ │ │ └── recorder_node.py
│ │ │ ├── model_nodes
│ │ │ │ ├── __init__.py
│ │ │ │ ├── detector_node.py
│ │ │ │ └── pose_estimator_node.py
│ │ │ ├── node.py
│ │ │ ├── registry.py
│ │ │ └── visualizer_nodes
│ │ │ │ ├── __init__.py
│ │ │ │ ├── bigeye_effect_node.py
│ │ │ │ ├── notice_board_node.py
│ │ │ │ ├── object_visualizer_node.py
│ │ │ │ └── sunglasses_effect_node.py
│ │ │ ├── utils
│ │ │ ├── __init__.py
│ │ │ ├── buffer.py
│ │ │ ├── event.py
│ │ │ ├── image_capture.py
│ │ │ ├── message.py
│ │ │ ├── misc.py
│ │ │ └── pose.py
│ │ │ └── webcam_executor.py
│ ├── codecs
│ │ ├── __init__.py
│ │ ├── associative_embedding.py
│ │ ├── base.py
│ │ ├── decoupled_heatmap.py
│ │ ├── integral_regression_label.py
│ │ ├── megvii_heatmap.py
│ │ ├── msra_heatmap.py
│ │ ├── regression_label.py
│ │ ├── simcc_label.py
│ │ ├── spr.py
│ │ ├── udp_heatmap.py
│ │ └── utils
│ │ │ ├── __init__.py
│ │ │ ├── gaussian_heatmap.py
│ │ │ ├── instance_property.py
│ │ │ ├── offset_heatmap.py
│ │ │ ├── post_processing.py
│ │ │ └── refinement.py
│ ├── datasets
│ │ ├── __init__.py
│ │ ├── builder.py
│ │ ├── dataset_wrappers.py
│ │ ├── datasets
│ │ │ ├── __init__.py
│ │ │ ├── animal
│ │ │ │ ├── __init__.py
│ │ │ │ ├── animalpose_dataset.py
│ │ │ │ ├── ap10k_dataset.py
│ │ │ │ ├── atrw_dataset.py
│ │ │ │ ├── fly_dataset.py
│ │ │ │ ├── horse10_dataset.py
│ │ │ │ ├── locust_dataset.py
│ │ │ │ ├── macaque_dataset.py
│ │ │ │ └── zebra_dataset.py
│ │ │ ├── base
│ │ │ │ ├── __init__.py
│ │ │ │ └── base_coco_style_dataset.py
│ │ │ ├── body
│ │ │ │ ├── __init__.py
│ │ │ │ ├── aic_dataset.py
│ │ │ │ ├── coco_dataset.py
│ │ │ │ ├── crowdpose_dataset.py
│ │ │ │ ├── jhmdb_dataset.py
│ │ │ │ ├── mhp_dataset.py
│ │ │ │ ├── mpii_dataset.py
│ │ │ │ ├── mpii_trb_dataset.py
│ │ │ │ ├── ochuman_dataset.py
│ │ │ │ ├── posetrack18_dataset.py
│ │ │ │ └── posetrack18_video_dataset.py
│ │ │ ├── face
│ │ │ │ ├── __init__.py
│ │ │ │ ├── aflw_dataset.py
│ │ │ │ ├── coco_wholebody_face_dataset.py
│ │ │ │ ├── cofw_dataset.py
│ │ │ │ ├── face_300w_dataset.py
│ │ │ │ ├── lapa_dataset.py
│ │ │ │ └── wflw_dataset.py
│ │ │ ├── fashion
│ │ │ │ ├── __init__.py
│ │ │ │ ├── deepfashion2_dataset.py
│ │ │ │ └── deepfashion_dataset.py
│ │ │ ├── hand
│ │ │ │ ├── __init__.py
│ │ │ │ ├── coco_wholebody_hand_dataset.py
│ │ │ │ ├── freihand_dataset.py
│ │ │ │ ├── onehand10k_dataset.py
│ │ │ │ ├── panoptic_hand2d_dataset.py
│ │ │ │ └── rhd2d_dataset.py
│ │ │ ├── head
│ │ │ │ ├── __init__.py
│ │ │ │ ├── cephalometric_dataset.py
│ │ │ │ └── cephalometric_dataset_ISBI2015.py
│ │ │ ├── utils.py
│ │ │ └── wholebody
│ │ │ │ ├── __init__.py
│ │ │ │ ├── coco_wholebody_dataset.py
│ │ │ │ └── halpe_dataset.py
│ │ ├── samplers.py
│ │ └── transforms
│ │ │ ├── __init__.py
│ │ │ ├── bottomup_transforms.py
│ │ │ ├── common_transforms.py
│ │ │ ├── converting.py
│ │ │ ├── formatting.py
│ │ │ ├── loading.py
│ │ │ └── topdown_transforms.py
│ ├── engine
│ │ ├── __init__.py
│ │ ├── hooks
│ │ │ ├── __init__.py
│ │ │ ├── ema_hook.py
│ │ │ └── visualization_hook.py
│ │ └── optim_wrappers
│ │ │ ├── __init__.py
│ │ │ └── layer_decay_optim_wrapper.py
│ ├── evaluation
│ │ ├── __init__.py
│ │ ├── functional
│ │ │ ├── __init__.py
│ │ │ ├── keypoint_eval.py
│ │ │ └── nms.py
│ │ └── metrics
│ │ │ ├── __init__.py
│ │ │ ├── cephalometric_metric.py
│ │ │ ├── coco_metric.py
│ │ │ ├── coco_wholebody_metric.py
│ │ │ ├── keypoint_2d_metrics.py
│ │ │ ├── keypoint_partition_metric.py
│ │ │ └── posetrack18_metric.py
│ ├── models
│ │ ├── __init__.py
│ │ ├── backbones
│ │ │ ├── __init__.py
│ │ │ ├── alexnet.py
│ │ │ ├── base_backbone.py
│ │ │ ├── cpm.py
│ │ │ ├── hourglass.py
│ │ │ ├── hourglass_ae.py
│ │ │ ├── hrformer.py
│ │ │ ├── hrnet.py
│ │ │ ├── litehrnet.py
│ │ │ ├── mobilenet_v2.py
│ │ │ ├── mobilenet_v3.py
│ │ │ ├── monai_unet.py
│ │ │ ├── mspn.py
│ │ │ ├── pvt.py
│ │ │ ├── regnet.py
│ │ │ ├── resnest.py
│ │ │ ├── resnet.py
│ │ │ ├── resnext.py
│ │ │ ├── rsn.py
│ │ │ ├── scnet.py
│ │ │ ├── seresnet.py
│ │ │ ├── seresnext.py
│ │ │ ├── shufflenet_v1.py
│ │ │ ├── shufflenet_v2.py
│ │ │ ├── swin.py
│ │ │ ├── tcn.py
│ │ │ ├── utils
│ │ │ │ ├── __init__.py
│ │ │ │ ├── channel_shuffle.py
│ │ │ │ ├── ckpt_convert.py
│ │ │ │ ├── inverted_residual.py
│ │ │ │ ├── make_divisible.py
│ │ │ │ ├── se_layer.py
│ │ │ │ └── utils.py
│ │ │ ├── v2v_net.py
│ │ │ ├── vgg.py
│ │ │ ├── vipnas_mbv3.py
│ │ │ └── vipnas_resnet.py
│ │ ├── builder.py
│ │ ├── data_preprocessors
│ │ │ ├── __init__.py
│ │ │ └── data_preprocessor.py
│ │ ├── heads
│ │ │ ├── __init__.py
│ │ │ ├── base_head.py
│ │ │ ├── coord_cls_heads
│ │ │ │ ├── __init__.py
│ │ │ │ ├── rtmcc_head.py
│ │ │ │ └── simcc_head.py
│ │ │ ├── heatmap_heads
│ │ │ │ ├── __init__.py
│ │ │ │ ├── ae_head.py
│ │ │ │ ├── cid_head.py
│ │ │ │ ├── cpm_head.py
│ │ │ │ ├── heatmap_head.py
│ │ │ │ ├── mspn_head.py
│ │ │ │ ├── srpose_head.py
│ │ │ │ └── vipnas_head.py
│ │ │ ├── hybrid_heads
│ │ │ │ ├── __init__.py
│ │ │ │ └── dekr_head.py
│ │ │ └── regression_heads
│ │ │ │ ├── __init__.py
│ │ │ │ ├── dsnt_head.py
│ │ │ │ ├── integral_regression_head.py
│ │ │ │ ├── regression_head.py
│ │ │ │ └── rle_head.py
│ │ ├── losses
│ │ │ ├── __init__.py
│ │ │ ├── ae_loss.py
│ │ │ ├── classification_loss.py
│ │ │ ├── heatmap_loss.py
│ │ │ ├── loss_wrappers.py
│ │ │ └── regression_loss.py
│ │ ├── necks
│ │ │ ├── __init__.py
│ │ │ ├── fmap_proc_neck.py
│ │ │ ├── fpn.py
│ │ │ ├── gap_neck.py
│ │ │ └── posewarper_neck.py
│ │ ├── pose_estimators
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── bottomup.py
│ │ │ └── topdown.py
│ │ └── utils
│ │ │ ├── __init__.py
│ │ │ ├── check_and_update_config.py
│ │ │ ├── ckpt_convert.py
│ │ │ ├── geometry.py
│ │ │ ├── ops.py
│ │ │ ├── realnvp.py
│ │ │ ├── regularizations.py
│ │ │ ├── rtmcc_block.py
│ │ │ ├── transformer.py
│ │ │ └── tta.py
│ ├── registry.py
│ ├── structures
│ │ ├── __init__.py
│ │ ├── bbox
│ │ │ ├── __init__.py
│ │ │ └── transforms.py
│ │ ├── keypoint
│ │ │ ├── __init__.py
│ │ │ └── transforms.py
│ │ ├── multilevel_pixel_data.py
│ │ ├── pose_data_sample.py
│ │ └── utils.py
│ ├── testing
│ │ ├── __init__.py
│ │ └── _utils.py
│ ├── utils
│ │ ├── __init__.py
│ │ ├── camera.py
│ │ ├── collect_env.py
│ │ ├── config_utils.py
│ │ ├── hooks.py
│ │ ├── logger.py
│ │ ├── setup_env.py
│ │ ├── tensor_utils.py
│ │ ├── timer.py
│ │ └── typing.py
│ ├── version.py
│ └── visualization
│ │ ├── __init__.py
│ │ ├── local_visualizer.py
│ │ ├── opencv_backend_visualizer.py
│ │ └── simcc_vis.py
│ ├── model-index.yml
│ ├── projects
│ ├── README.md
│ ├── awesome-mmpose
│ │ └── README.md
│ ├── example_project
│ │ ├── README.md
│ │ ├── configs
│ │ │ └── example-head-loss_hrnet-w32_8xb64-210e_coco-256x192.py
│ │ └── models
│ │ │ ├── __init__.py
│ │ │ ├── example_head.py
│ │ │ └── example_loss.py
│ ├── faq.md
│ ├── mmpose4aigc
│ │ ├── README.md
│ │ ├── README_CN.md
│ │ ├── download_models.sh
│ │ ├── install_posetracker_linux.sh
│ │ ├── mmpose_openpose.sh
│ │ ├── mmpose_style_skeleton.sh
│ │ └── openpose_visualization.py
│ ├── rtmpose
│ │ ├── README.md
│ │ ├── README_CN.md
│ │ ├── benchmark
│ │ │ ├── README.md
│ │ │ └── README_CN.md
│ │ ├── examples
│ │ │ ├── README.md
│ │ │ ├── RTMPose-Deploy
│ │ │ │ ├── README.md
│ │ │ │ ├── README_CN.md
│ │ │ │ └── Windows
│ │ │ │ │ └── OnnxRumtime-CPU
│ │ │ │ │ └── src
│ │ │ │ │ └── RTMPoseOnnxRuntime
│ │ │ │ │ ├── characterset_convert.h
│ │ │ │ │ ├── main.cpp
│ │ │ │ │ ├── rtmdet_onnxruntime.cpp
│ │ │ │ │ ├── rtmdet_onnxruntime.h
│ │ │ │ │ ├── rtmpose_onnxruntime.cpp
│ │ │ │ │ ├── rtmpose_onnxruntime.h
│ │ │ │ │ ├── rtmpose_tracker_onnxruntime.cpp
│ │ │ │ │ ├── rtmpose_tracker_onnxruntime.h
│ │ │ │ │ └── rtmpose_utils.h
│ │ │ └── onnxruntime
│ │ │ │ ├── README.md
│ │ │ │ ├── README_CN.md
│ │ │ │ ├── human-pose.jpeg
│ │ │ │ ├── main.py
│ │ │ │ └── requirements.txt
│ │ ├── rtmdet
│ │ │ ├── hand
│ │ │ │ └── rtmdet_nano_320-8xb32_hand.py
│ │ │ └── person
│ │ │ │ ├── rtmdet_m_640-8xb32_coco-person.py
│ │ │ │ └── rtmdet_nano_320-8xb32_coco-person.py
│ │ └── rtmpose
│ │ │ ├── animal_2d_keypoint
│ │ │ └── rtmpose-m_8xb64-210e_ap10k-256x256.py
│ │ │ ├── body_2d_keypoint
│ │ │ ├── rtmpose-l_8xb256-420e_coco-256x192.py
│ │ │ ├── rtmpose-l_8xb256-420e_coco-384x288.py
│ │ │ ├── rtmpose-m_8xb256-420e_coco-256x192.py
│ │ │ ├── rtmpose-m_8xb256-420e_coco-384x288.py
│ │ │ ├── rtmpose-s_8xb256-420e_coco-256x192.py
│ │ │ └── rtmpose-t_8xb256-420e_coco-256x192.py
│ │ │ ├── face_2d_keypoint
│ │ │ └── rtmpose-m_8xb64-120e_lapa-256x256.py
│ │ │ ├── hand_2d_keypoint
│ │ │ └── rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py
│ │ │ ├── pretrain_cspnext_udp
│ │ │ ├── cspnext-l_udp_8xb256-210e_coco-256x192.py
│ │ │ ├── cspnext-m_udp_8xb256-210e_coco-256x192.py
│ │ │ ├── cspnext-s_udp_8xb256-210e_coco-256x192.py
│ │ │ └── cspnext-tiny_udp_8xb256-210e_coco-256x192.py
│ │ │ ├── pruning
│ │ │ ├── README.md
│ │ │ ├── README_CN.md
│ │ │ ├── group_fisher_deploy_rtmpose-s_8xb256-420e_aic-coco-256x192.py
│ │ │ ├── group_fisher_deploy_rtmpose-s_8xb256-420e_coco-256x192.py
│ │ │ ├── group_fisher_finetune_rtmpose-s_8xb256-420e_aic-coco-256x192.py
│ │ │ ├── group_fisher_finetune_rtmpose-s_8xb256-420e_coco-256x192.py
│ │ │ ├── group_fisher_prune_rtmpose-s_8xb256-420e_aic-coco-256x192.py
│ │ │ └── group_fisher_prune_rtmpose-s_8xb256-420e_coco-256x192.py
│ │ │ └── wholebody_2d_keypoint
│ │ │ ├── rtmpose-l_8xb32-270e_coco-wholebody-384x288.py
│ │ │ ├── rtmpose-l_8xb64-270e_coco-wholebody-256x192.py
│ │ │ └── rtmpose-m_8xb64-270e_coco-wholebody-256x192.py
│ └── yolox-pose
│ │ ├── README.md
│ │ ├── configs
│ │ ├── _base_
│ │ │ ├── datasets
│ │ │ └── default_runtime.py
│ │ ├── yolox-pose_l_4xb64-300e_coco.py
│ │ ├── yolox-pose_m_4xb64-300e_coco.py
│ │ ├── yolox-pose_s_8xb32-300e_coco.py
│ │ └── yolox-pose_tiny_4xb64-300e_coco.py
│ │ ├── datasets
│ │ ├── __init__.py
│ │ ├── bbox_keypoint_structure.py
│ │ ├── coco_dataset.py
│ │ └── transforms.py
│ │ ├── demo
│ │ ├── models
│ │ ├── __init__.py
│ │ ├── assigner.py
│ │ ├── data_preprocessor.py
│ │ ├── oks_loss.py
│ │ ├── utils.py
│ │ └── yolox_pose_head.py
│ │ └── tools
│ ├── pytest.ini
│ ├── requirements.txt
│ ├── requirements
│ ├── albu.txt
│ ├── build.txt
│ ├── docs.txt
│ ├── mminstall.txt
│ ├── optional.txt
│ ├── poseval.txt
│ ├── readthedocs.txt
│ ├── runtime.txt
│ └── tests.txt
│ ├── resources
│ └── mmpose-logo.png
│ ├── setup.cfg
│ ├── setup.py
│ ├── tests
│ ├── test_apis
│ │ ├── test_inference.py
│ │ ├── test_inferencers
│ │ │ ├── test_mmpose_inferencer.py
│ │ │ └── test_pose2d_inferencer.py
│ │ └── test_webcam
│ │ │ ├── test_nodes
│ │ │ ├── test_big_eye_effect_node.py
│ │ │ ├── test_detector_node.py
│ │ │ ├── test_monitor_node.py
│ │ │ ├── test_notice_board_node.py
│ │ │ ├── test_object_assigner_node.py
│ │ │ ├── test_object_visualizer_node.py
│ │ │ ├── test_pose_estimator_node.py
│ │ │ ├── test_recorder_node.py
│ │ │ └── test_sunglasses_effect_node.py
│ │ │ ├── test_utils
│ │ │ ├── test_buffer.py
│ │ │ ├── test_event.py
│ │ │ ├── test_image_capture.py
│ │ │ ├── test_message.py
│ │ │ ├── test_misc.py
│ │ │ └── test_pose.py
│ │ │ └── test_webcam_executor.py
│ ├── test_codecs
│ │ ├── test_associative_embedding.py
│ │ ├── test_decoupled_heatmap.py
│ │ ├── test_integral_regression_label.py
│ │ ├── test_megvii_heatmap.py
│ │ ├── test_msra_heatmap.py
│ │ ├── test_regression_label.py
│ │ ├── test_simcc_label.py
│ │ ├── test_spr.py
│ │ └── test_udp_heatmap.py
│ ├── test_datasets
│ │ ├── test_datasets
│ │ │ ├── test_animal_datasets
│ │ │ │ ├── test_animalpose_dataset.py
│ │ │ │ ├── test_ap10k_dataset.py
│ │ │ │ ├── test_atrw_dataset.py
│ │ │ │ ├── test_fly_dataset.py
│ │ │ │ ├── test_horse10_dataset.py
│ │ │ │ ├── test_locust_dataset.py
│ │ │ │ ├── test_macaque_dataset.py
│ │ │ │ └── test_zebra_dataset.py
│ │ │ ├── test_body_datasets
│ │ │ │ ├── test_aic_dataset.py
│ │ │ │ ├── test_coco_dataset.py
│ │ │ │ ├── test_crowdpose_dataset.py
│ │ │ │ ├── test_jhmdb_dataset.py
│ │ │ │ ├── test_mhp_dataset.py
│ │ │ │ ├── test_mpii_dataset.py
│ │ │ │ ├── test_mpii_trb_dataset.py
│ │ │ │ ├── test_ochuman_dataset.py
│ │ │ │ ├── test_posetrack18_dataset.py
│ │ │ │ └── test_posetrack18_video_dataset.py
│ │ │ ├── test_dataset_wrappers
│ │ │ │ └── test_combined_dataset.py
│ │ │ ├── test_face_datasets
│ │ │ │ ├── test_aflw_dataset.py
│ │ │ │ ├── test_coco_wholebody_face_dataset.py
│ │ │ │ ├── test_cofw_dataset.py
│ │ │ │ ├── test_face_300w_dataset.py
│ │ │ │ ├── test_lapa_dataset.py
│ │ │ │ └── test_wflw_dataset.py
│ │ │ ├── test_fashion_datasets
│ │ │ │ └── test_deepfashion_dataset.py
│ │ │ ├── test_hand_datasets
│ │ │ │ ├── test_coco_wholebody_hand_dataset.py
│ │ │ │ ├── test_freihand_dataset.py
│ │ │ │ ├── test_onehand10k_dataset.py
│ │ │ │ ├── test_panoptic_hand2d_dataset.py
│ │ │ │ └── test_rhd2d_dataset.py
│ │ │ └── test_wholebody_datasets
│ │ │ │ ├── test_coco_wholebody_dataset.py
│ │ │ │ └── test_halpe_dataset.py
│ │ └── test_transforms
│ │ │ ├── test_bottomup_transforms.py
│ │ │ ├── test_common_transforms.py
│ │ │ ├── test_converting.py
│ │ │ ├── test_formatting.py
│ │ │ ├── test_loading.py
│ │ │ └── test_topdown_transforms.py
│ ├── test_engine
│ │ └── test_hooks
│ │ │ └── test_visualization_hook.py
│ ├── test_evaluation
│ │ ├── test_functional
│ │ │ ├── test_keypoint_eval.py
│ │ │ └── test_nms.py
│ │ └── test_metrics
│ │ │ ├── test_coco_metric.py
│ │ │ ├── test_coco_wholebody_metric.py
│ │ │ ├── test_keypoint_2d_metrics.py
│ │ │ ├── test_keypoint_partition_metric.py
│ │ │ └── test_posetrack18_metric.py
│ ├── test_models
│ │ ├── test_backbones
│ │ │ ├── test_alexnet.py
│ │ │ ├── test_backbones_utils.py
│ │ │ ├── test_cpm.py
│ │ │ ├── test_hourglass.py
│ │ │ ├── test_hrformer.py
│ │ │ ├── test_hrnet.py
│ │ │ ├── test_litehrnet.py
│ │ │ ├── test_mobilenet_v2.py
│ │ │ ├── test_mobilenet_v3.py
│ │ │ ├── test_mspn.py
│ │ │ ├── test_pvt.py
│ │ │ ├── test_regnet.py
│ │ │ ├── test_resnest.py
│ │ │ ├── test_resnet.py
│ │ │ ├── test_resnext.py
│ │ │ ├── test_rsn.py
│ │ │ ├── test_scnet.py
│ │ │ ├── test_seresnet.py
│ │ │ ├── test_seresnext.py
│ │ │ ├── test_shufflenet_v1.py
│ │ │ ├── test_shufflenet_v2.py
│ │ │ ├── test_swin.py
│ │ │ ├── test_tcn.py
│ │ │ ├── test_v2v_net.py
│ │ │ ├── test_vgg.py
│ │ │ ├── test_vipnas_mbv3.py
│ │ │ └── test_vipnas_resnet.py
│ │ ├── test_heads
│ │ │ ├── test_heatmap_heads
│ │ │ │ ├── test_ae_head.py
│ │ │ │ ├── test_cid_head.py
│ │ │ │ ├── test_cpm_head.py
│ │ │ │ ├── test_heatmap_head.py
│ │ │ │ ├── test_mspn_head.py
│ │ │ │ ├── test_rtmcc_head.py
│ │ │ │ ├── test_simcc_head.py
│ │ │ │ └── test_vipnas_head.py
│ │ │ ├── test_hybrid_heads
│ │ │ │ └── test_dekr_head.py
│ │ │ └── test_regression_heads
│ │ │ │ ├── test_dsnt_head.py
│ │ │ │ ├── test_integral_regression_head.py
│ │ │ │ ├── test_regression_head.py
│ │ │ │ └── test_rle_head.py
│ │ ├── test_losses
│ │ │ ├── test_ae_loss.py
│ │ │ ├── test_classification_losses.py
│ │ │ ├── test_heatmap_losses.py
│ │ │ └── test_regression_losses.py
│ │ ├── test_necks
│ │ │ └── test_fmap_proc_neck.py
│ │ ├── test_pose_estimators
│ │ │ ├── test_bottomup.py
│ │ │ └── test_topdown.py
│ │ └── test_utils
│ │ │ └── test_check_and_update_config.py
│ ├── test_structures
│ │ ├── test_multilevel_pixel_data.py
│ │ └── test_pose_data_sample.py
│ ├── test_utils
│ │ └── test_setup_env.py
│ └── test_visualization
│ │ └── test_pose_visualizer.py
│ └── tools
│ ├── analysis_tools
│ ├── analyze_logs.py
│ ├── get_flops.py
│ └── print_config.py
│ ├── dataset_converters
│ ├── h36m_to_coco.py
│ ├── lapa2coco.py
│ ├── mat2json.py
│ ├── parse_animalpose_dataset.py
│ ├── parse_cofw_dataset.py
│ ├── parse_deepposekit_dataset.py
│ ├── parse_macaquepose_dataset.py
│ ├── preprocess_h36m.py
│ └── preprocess_mpi_inf_3dhp.py
│ ├── dist_test.sh
│ ├── dist_train.sh
│ ├── misc
│ ├── browse_dataset.py
│ ├── keypoints2coco_without_mmdet.py
│ └── publish_model.py
│ ├── slurm_test.sh
│ ├── slurm_train.sh
│ ├── test.py
│ ├── torchserve
│ ├── mmpose2torchserve.py
│ ├── mmpose_handler.py
│ └── test_torchserver.py
│ └── train.py
├── requirements.txt
├── step1_test_mmpose.py
├── step2_prepare_coco_dataset.py
├── step3_train_and_evaluation.py
└── step4_test_and_visualize.py
/.gitignore:
--------------------------------------------------------------------------------
1 | data
2 | MMPose-checkpoints
3 | work_dirs
4 | *.log
5 | __pycache__
6 | *.__pycache__
7 | *.pyc
8 | external_repos
--------------------------------------------------------------------------------
/Pictures_for_Github_only/Online Result.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/5k5000/CLdetection2023/d1a01536ad892134c4dd728c87dc9ac1d87b8e11/Pictures_for_Github_only/Online Result.png
--------------------------------------------------------------------------------
/Pictures_for_Github_only/mainframework.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/5k5000/CLdetection2023/d1a01536ad892134c4dd728c87dc9ac1d87b8e11/Pictures_for_Github_only/mainframework.png
--------------------------------------------------------------------------------
/install_env.sh:
--------------------------------------------------------------------------------
1 | conda create -n LMD python=3.10
2 | conda activate LMD
3 | pip install -r requirements.txt
4 | pip install -U openmim
5 | cd mmpose_package/mmpose
6 | pip install -e .
7 | mim install mmengine
8 | mim install "mmcv>=2.0.0"
9 | pip install --upgrade numpy
10 |
11 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/.owners.yml:
--------------------------------------------------------------------------------
1 | assign:
2 | issues: enabled
3 | pull_requests: disabled
4 | strategy:
5 | # random
6 | daily-shift-based
7 | scedule:
8 | '*/1 * * * *'
9 | assignees:
10 | - Tau-J
11 | - LareinaM
12 | - Ben-Louis
13 | - LareinaM
14 | - Ben-Louis
15 | - Tau-J
16 | - Tau-J
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | formats:
4 | - epub
5 |
6 | python:
7 | version: 3.7
8 | install:
9 | - requirements: requirements/docs.txt
10 | - requirements: requirements/readthedocs.txt
11 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/CITATION.cff:
--------------------------------------------------------------------------------
1 | cff-version: 1.2.0
2 | message: "If you use this software, please cite it as below."
3 | authors:
4 | - name: "MMPose Contributors"
5 | title: "OpenMMLab Pose Estimation Toolbox and Benchmark"
6 | date-released: 2020-08-31
7 | url: "https://github.com/open-mmlab/mmpose"
8 | license: Apache-2.0
9 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include requirements/*.txt
2 | include mmpose/.mim/model-index.yml
3 | recursive-include mmpose/.mim/configs *.py *.yml
4 | recursive-include mmpose/.mim/tools *.py *.sh
5 | recursive-include mmpose/.mim/demo *.py
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/animal_2d_keypoint/README.md:
--------------------------------------------------------------------------------
1 | # 2D Animal Keypoint Detection
2 |
3 | 2D animal keypoint detection (animal pose estimation) aims to detect the key-point of different species, including rats,
4 | dogs, macaques, and cheetah. It provides detailed behavioral analysis for neuroscience, medical and ecology applications.
5 |
6 | ## Data preparation
7 |
8 | Please follow [DATA Preparation](/docs/en/dataset_zoo/2d_animal_keypoint.md) to prepare data.
9 |
10 | ## Demo
11 |
12 | Please follow [DEMO](/demo/docs/en/2d_animal_demo.md) to generate fancy demos.
13 |
14 | 
15 |
16 | 
17 |
18 | 
19 |
20 | 
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/animal_2d_keypoint/rtmpose/README.md:
--------------------------------------------------------------------------------
1 | # RTMPose
2 |
3 | Recent studies on 2D pose estimation have achieved excellent performance on public benchmarks, yet its application in the industrial community still suffers from heavy model parameters and high latency.
4 | In order to bridge this gap, we empirically study five aspects that affect the performance of multi-person pose estimation algorithms: paradigm, backbone network, localization algorithm, training strategy, and deployment inference, and present a high-performance real-time multi-person pose estimation framework, **RTMPose**, based on MMPose.
5 | Our RTMPose-m achieves **75.8% AP** on COCO with **90+ FPS** on an Intel i7-11700 CPU and **430+ FPS** on an NVIDIA GTX 1660 Ti GPU, and RTMPose-l achieves **67.0% AP** on COCO-WholeBody with **130+ FPS**, outperforming existing open-source libraries.
6 | To further evaluate RTMPose's capability in critical real-time applications, we also report the performance after deploying on the mobile device.
7 |
8 | ## Results and Models
9 |
10 | ### AP-10K Dataset
11 |
12 | Results on AP-10K validation set
13 |
14 | | Model | Input Size | AP | Details and Download |
15 | | :-------: | :--------: | :---: | :------------------------------------------: |
16 | | RTMPose-m | 256x256 | 0.722 | [rtmpose_cp10k.md](./ap10k/rtmpose_ap10k.md) |
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/animal_2d_keypoint/rtmpose/ap10k/rtmpose_ap10k.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/animal_2d_keypoint/rtmpose/ap10k/rtmpose-m_8xb64-210e_ap10k-256x256.py
3 | In Collection: RTMPose
4 | Alias: animal
5 | Metadata:
6 | Architecture:
7 | - RTMPose
8 | Training Data: AP-10K
9 | Name: rtmpose-m_8xb64-210e_ap10k-256x256
10 | Results:
11 | - Dataset: AP-10K
12 | Metrics:
13 | AP: 0.722
14 | AP@0.5: 0.939
15 | AP@0.75: 0.788
16 | AP (L): 0.728
17 | AP (M): 0.569
18 | Task: Animal 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-ap10k_pt-aic-coco_210e-256x256-7a041aa1_20230206.pth
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/animal_2d_keypoint/topdown_heatmap/animalpose/hrnet_animalpose.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/animal_2d_keypoint/topdown_heatmap/animalpose/td-hm_hrnet-w32_8xb64-210e_animalpose-256x256.py
3 | In Collection: HRNet
4 | Metadata:
5 | Architecture: &id001
6 | - HRNet
7 | Training Data: Animal-Pose
8 | Name: td-hm_hrnet-w32_8xb64-210e_animalpose-256x256
9 | Results:
10 | - Dataset: Animal-Pose
11 | Metrics:
12 | AP: 0.740
13 | AP@0.5: 0.959
14 | AP@0.75: 0.833
15 | AR: 0.780
16 | AR@0.5: 0.965
17 | Task: Animal 2D Keypoint
18 | Weights: https://download.openmmlab.com/mmpose/animal/hrnet/hrnet_w32_animalpose_256x256-1aa7f075_20210426.pth
19 | - Config: configs/animal_2d_keypoint/topdown_heatmap/animalpose/td-hm_hrnet-w48_8xb64-210e_animalpose-256x256.py
20 | In Collection: HRNet
21 | Metadata:
22 | Architecture: *id001
23 | Training Data: Animal-Pose
24 | Name: td-hm_hrnet-w48_8xb64-210e_animalpose-256x256
25 | Results:
26 | - Dataset: Animal-Pose
27 | Metrics:
28 | AP: 0.738
29 | AP@0.5: 0.958
30 | AP@0.75: 0.831
31 | AR: 0.778
32 | AR@0.5: 0.962
33 | Task: Animal 2D Keypoint
34 | Weights: https://download.openmmlab.com/mmpose/animal/hrnet/hrnet_w48_animalpose_256x256-34644726_20210426.pth
35 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/animal_2d_keypoint/topdown_heatmap/ap10k/cspnext_udp_ap10k.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/animal_2d_keypoint/topdown_heatmap/ap10k/cspnext-m_udp_8xb64-210e_ap10k-256x256.py
3 | In Collection: UDP
4 | Metadata:
5 | Architecture: &id001
6 | - UDP
7 | - HRNet
8 | Training Data: AP-10K
9 | Name: cspnext-m_udp_8xb64-210e_ap10k-256x256
10 | Results:
11 | - Dataset: AP-10K
12 | Metrics:
13 | AP: 0.703
14 | AP@0.5: 0.944
15 | AP@0.75: 0.776
16 | AP (L): 0.71
17 | AP (M): 0.513
18 | Task: Animal 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-ap10k_pt-in1k_210e-256x256-1f2d947a_20230123.pth
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/animal_2d_keypoint/topdown_heatmap/ap10k/hrnet_ap10k.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/animal_2d_keypoint/topdown_heatmap/ap10k/td-hm_hrnet-w32_8xb64-210e_ap10k-256x256.py
3 | In Collection: HRNet
4 | Metadata:
5 | Architecture: &id001
6 | - HRNet
7 | Training Data: AP-10K
8 | Name: td-hm_hrnet-w32_8xb64-210e_ap10k-256x256
9 | Results:
10 | - Dataset: AP-10K
11 | Metrics:
12 | AP: 0.722
13 | AP@0.5: 0.935
14 | AP@0.75: 0.789
15 | AP (L): 0.729
16 | AP (M): 0.557
17 | Task: Animal 2D Keypoint
18 | Weights: https://download.openmmlab.com/mmpose/animal/hrnet/hrnet_w32_ap10k_256x256-18aac840_20211029.pth
19 | - Config: configs/animal_2d_keypoint/topdown_heatmap/ap10k/td-hm_hrnet-w48_8xb64-210e_ap10k-256x256.py
20 | In Collection: HRNet
21 | Metadata:
22 | Architecture: *id001
23 | Training Data: AP-10K
24 | Name: td-hm_hrnet-w48_8xb64-210e_ap10k-256x256
25 | Results:
26 | - Dataset: AP-10K
27 | Metrics:
28 | AP: 0.728
29 | AP@0.5: 0.936
30 | AP@0.75: 0.802
31 | AP (L): 0.735
32 | AP (M): 0.577
33 | Task: Animal 2D Keypoint
34 | Weights: https://download.openmmlab.com/mmpose/animal/hrnet/hrnet_w48_ap10k_256x256-d95ab412_20211029.pth
35 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/animal_2d_keypoint/topdown_heatmap/ap10k/resnet_ap10k.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/animal_2d_keypoint/topdown_heatmap/ap10k/td-hm_res50_8xb64-210e_ap10k-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture: &id001
6 | - SimpleBaseline2D
7 | - ResNet
8 | Training Data: AP-10K
9 | Name: td-hm_res50_8xb64-210e_ap10k-256x256
10 | Results:
11 | - Dataset: AP-10K
12 | Metrics:
13 | AP: 0.680
14 | AP@0.5: 0.926
15 | AP@0.75: 0.738
16 | AP (L): 0.687
17 | AP (M): 0.552
18 | Task: Animal 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/animal/resnet/res50_ap10k_256x256-35760eb8_20211029.pth
20 | - Config: configs/animal_2d_keypoint/topdown_heatmap/ap10k/td-hm_res101_8xb64-210e_ap10k-256x256.py
21 | In Collection: SimpleBaseline2D
22 | Metadata:
23 | Architecture: *id001
24 | Training Data: AP-10K
25 | Name: td-hm_res101_8xb64-210e_ap10k-256x256
26 | Results:
27 | - Dataset: AP-10K
28 | Metrics:
29 | AP: 0.681
30 | AP@0.5: 0.921
31 | AP@0.75: 0.751
32 | AP (L): 0.690
33 | AP (M): 0.545
34 | Task: Animal 2D Keypoint
35 | Weights: https://download.openmmlab.com/mmpose/animal/resnet/res101_ap10k_256x256-9edfafb9_20211029.pth
36 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/README.md:
--------------------------------------------------------------------------------
1 | # Human Body 2D Pose Estimation
2 |
3 | Multi-person human pose estimation is defined as the task of detecting the poses (or keypoints) of all people from an input image.
4 |
5 | Existing approaches can be categorized into top-down and bottom-up approaches.
6 |
7 | Top-down methods (e.g. DeepPose) divide the task into two stages: human detection and pose estimation. They perform human detection first, followed by single-person pose estimation given human bounding boxes.
8 |
9 | Bottom-up approaches (e.g. Associative Embedding) first detect all the keypoints and then group/associate them into person instances.
10 |
11 | ## Data preparation
12 |
13 | Please follow [DATA Preparation](/docs/en/dataset_zoo/2d_body_keypoint.md) to prepare data.
14 |
15 | ## Demo
16 |
17 | Please follow [Demo](/demo/docs/en/2d_human_pose_demo.md#2d-human-pose-demo) to run demos.
18 |
19 |
20 |

21 |
22 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/associative_embedding/README.md:
--------------------------------------------------------------------------------
1 | # Associative embedding: End-to-end learning for joint detection and grouping (AE)
2 |
3 | Associative Embedding is one of the most popular 2D bottom-up pose estimation approaches, that first detect all the keypoints and then group/associate them into person instances.
4 |
5 | In order to group all the predicted keypoints to individuals, a tag is also predicted for each detected keypoint. Tags of the same person are similar, while tags of different people are different. Thus the keypoints can be grouped according to the tags.
6 |
7 |
8 |

9 |
10 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/dekr/README.md:
--------------------------------------------------------------------------------
1 | # Bottom-up Human Pose Estimation via Disentangled Keypoint Regression (DEKR)
2 |
3 |
4 |
5 |
6 | DEKR (CVPR'2021)
7 |
8 | ```bibtex
9 | @inproceedings{geng2021bottom,
10 | title={Bottom-up human pose estimation via disentangled keypoint regression},
11 | author={Geng, Zigang and Sun, Ke and Xiao, Bin and Zhang, Zhaoxiang and Wang, Jingdong},
12 | booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition},
13 | pages={14676--14686},
14 | year={2021}
15 | }
16 | ```
17 |
18 |
19 |
20 | DEKR is a popular 2D bottom-up pose estimation approach that simultaneously detects all the instances and regresses the offsets from the instance centers to joints.
21 |
22 | In order to predict the offsets more accurately, the offsets of different joints are regressed using separated branches with deformable convolutional layers. Thus convolution kernels with different shapes are adopted to extract features for the corresponding joint.
23 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/dekr/crowdpose/hrnet_crowdpose.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w32_8xb10-300e_crowdpose-512x512.py
3 | In Collection: DEKR
4 | Metadata:
5 | Architecture: &id001
6 | - DEKR
7 | - HRNet
8 | Training Data: CrowdPose
9 | Name: dekr_hrnet-w32_8xb10-300e_crowdpose-512x512
10 | Results:
11 | - Dataset: CrowdPose
12 | Metrics:
13 | AP: 0.663
14 | AP@0.5: 0.857
15 | AP@0.75: 0.714
16 | AP (E): 0.74
17 | AP (M): 0.671
18 | AP (L): 0.576
19 | Task: Body 2D Keypoint
20 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w32_8xb10-140e_crowdpose-512x512_147bae97-20221228.pth
21 | - Config: configs/body_2d_keypoint/dekr/crowdpose/dekr_hrnet-w48_8xb5-300e_crowdpose-640x640.py
22 | In Collection: DEKR
23 | Metadata:
24 | Architecture: *id001
25 | Training Data: CrowdPose
26 | Name: dekr_hrnet-w48_8xb5-300e_crowdpose-640x640
27 | Results:
28 | - Dataset: CrowdPose
29 | Metrics:
30 | AP: 0.679
31 | AP@0.5: 0.869
32 | AP@0.75: 0.731
33 | AP (E): 0.753
34 | AP (M): 0.688
35 | AP (L): 0.593
36 | Task: Body 2D Keypoint
37 | Weights: https://download.openmmlab.com/mmpose/bottom_up/dekr/hrnet_w48_crowdpose_640x640-ef6b6040_20220930.pth
38 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/integral_regression/README.md:
--------------------------------------------------------------------------------
1 | # Top-down integral-regression-based pose estimation
2 |
3 | Top-down methods divide the task into two stages: object detection, followed by single-object pose estimation given object bounding boxes. At the 2nd stage, integral regression based methods use a simple integral operation relates and unifies the heatmap and joint regression differentiably, thus obtain the keypoint coordinates given the features extracted from the bounding box area, following the paradigm introduced in [Integral Human Pose Regression](https://arxiv.org/abs/1711.08229).
4 |
5 | ## Results and Models
6 |
7 | ### COCO Dataset
8 |
9 | Results on COCO val2017 with detector having human AP of 56.4 on COCO val2017 dataset
10 |
11 | | Model | Input Size | AP | AR | Details and Download |
12 | | :------------------: | :--------: | :---: | :---: | :---------------------------------------------------: |
13 | | ResNet-50+Debias-IPR | 256x256 | 0.675 | 0.765 | [resnet_debias_coco.md](./coco/resnet_debias_coco.md) |
14 | | ResNet-50+DSNT | 256x256 | 0.674 | 0.764 | [resnet_dsnt_coco.md](./coco/resnet_dsnt_coco.md) |
15 | | ResNet-50+IPR | 256x256 | 0.633 | 0.730 | [resnet_ipr_coco.md](./coco/resnet_ipr_coco.md) |
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/integral_regression/coco/resnet_debias_coco.yml:
--------------------------------------------------------------------------------
1 | Collections:
2 | - Name: DebiasIPR
3 | Paper:
4 | Title: Removing the Bias of Integral Pose Regression
5 | URL: https://openaccess.thecvf.com/content/ICCV2021/papers/Gu_Removing_the_Bias_of_Integral_Pose_Regression_ICCV_2021_paper.pdf
6 | README: https://github.com/open-mmlab/mmpose/blob/main/docs/src/papers/algorithms/debias_ipr.md
7 | Models:
8 | - Config: configs/body_2d_keypoint/integral_regression/coco/ipr_res50_debias--8xb64-210e_coco-256x256.py
9 | In Collection: DebiasIPR
10 | Metadata:
11 | Architecture: &id001
12 | - Debias
13 | - ResNet
14 | Training Data: COCO
15 | Name: ipr_res50_debias--8xb64-210e_coco-256x256
16 | Results:
17 | - Dataset: COCO
18 | Metrics:
19 | AP: 0.675
20 | AP@0.5: 0.872
21 | AP@0.75: 0.74
22 | AR: 0.765
23 | AR@0.5: 0.928
24 | Task: Body 2D Keypoint
25 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/integral_regression/coco/ipr_res50_debias-8xb64-210e_coco-256x256-055a7699_20220913.pth
26 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/integral_regression/coco/resnet_dsnt_coco.yml:
--------------------------------------------------------------------------------
1 | Collections:
2 | - Name: DSNT
3 | Paper:
4 | Title: Numerical Coordinate Regression with Convolutional Neural Networks
5 | URL: https://arxiv.org/abs/1801.07372v2
6 | README: https://github.com/open-mmlab/mmpose/blob/main/docs/src/papers/algorithms/dsnt.md
7 | Models:
8 | - Config: configs/body_2d_keypoint/integral_regression/coco/ipr_res50_dsnt-8xb64-210e_coco-256x256.py
9 | In Collection: DSNT
10 | Metadata:
11 | Architecture: &id001
12 | - DSNT
13 | - ResNet
14 | Training Data: COCO
15 | Name: ipr_res50_dsnt-8xb64-210e_coco-256x256
16 | Results:
17 | - Dataset: COCO
18 | Metrics:
19 | AP: 0.674
20 | AP@0.5: 0.87
21 | AP@0.75: 0.744
22 | AR: 0.764
23 | AR@0.5: 0.928
24 | Task: Body 2D Keypoint
25 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/integral_regression/coco/ipr_res50_dsnt-8xb64-210e_coco-256x256-441eedc0_20220913.pth
26 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/integral_regression/coco/resnet_ipr_coco.yml:
--------------------------------------------------------------------------------
1 | Collections:
2 | - Name: IPR
3 | Paper:
4 | Title: Integral human pose regression
5 | URL: https://arxiv.org/abs/1711.08229
6 | README: https://github.com/open-mmlab/mmpose/blob/main/docs/src/papers/algorithms/ipr.md
7 | Models:
8 | - Config: configs/body_2d_keypoint/integral_regression/coco/ipr_res50_8xb64-210e_coco-256x256.py
9 | In Collection: IPR
10 | Metadata:
11 | Architecture: &id001
12 | - IPR
13 | - ResNet
14 | Training Data: COCO
15 | Name: ipr_res50_8xb64-210e_coco-256x256
16 | Results:
17 | - Dataset: COCO
18 | Metrics:
19 | AP: 0.633
20 | AP@0.5: 0.86
21 | AP@0.75: 0.703
22 | AR: 0.73
23 | AR@0.5: 0.919
24 | Task: Body 2D Keypoint
25 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/integral_regression/coco/ipr_res50_8xb64-210e_coco-256x256-a3898a33_20220913.pth
26 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/rtmpose/crowdpose/rtmpose_crowdpose.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/rtmpose/crowdpose/rtmpose-m_8xb64-210e_crowdpose-256x192.py
3 | In Collection: RTMPose
4 | Metadata:
5 | Architecture:
6 | - RTMPose
7 | Training Data: CrowdPose
8 | Name: rtmpose-t_8xb256-420e_coco-256x192
9 | Results:
10 | - Dataset: CrowdPose
11 | Metrics:
12 | AP: 0.706
13 | AP@0.5: 0.841
14 | AP@0.75: 0.765
15 | AP (E): 0.799
16 | AP (M): 0.719
17 | AP (L): 0.582
18 | Task: Body 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-crowdpose_pt-aic-coco_210e-256x192-e6192cac_20230224.pth
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/rtmpose/mpii/rtmpose_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/rtmpose/mpii/rtmpose-m_8xb64-210e_mpii-256x256.py
3 | In Collection: RTMPose
4 | Metadata:
5 | Architecture:
6 | - RTMPose
7 | Training Data: MPII
8 | Name: rtmpose-m_8xb64-210e_mpii-256x256
9 | Results:
10 | - Dataset: MPII
11 | Metrics:
12 | Mean: 0.907
13 | Mean@0.1: 0.348
14 | Task: Body 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-mpii_pt-aic-coco_210e-256x256-ec4dbec8_20230206.pth
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/simcc/coco/mobilenetv2_coco.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/simcc/coco/simcc_mobilenetv2_wo-deconv-8xb64-210e_coco-256x192.py
3 | In Collection: SimCC
4 | Metadata:
5 | Architecture: &id001
6 | - SimCC
7 | - MobilenetV2
8 | Training Data: COCO
9 | Name: simcc_mobilenetv2_wo-deconv-8xb64-210e_coco-256x192
10 | Results:
11 | - Dataset: COCO
12 | Metrics:
13 | AP: 0.62
14 | AP@0.5: 0.855
15 | AP@0.75: 0.697
16 | AR: 0.678
17 | AR@0.5: 0.902
18 | Task: Body 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/simcc/coco/simcc_mobilenetv2_wo-deconv-8xb64-210e_coco-256x192-4b0703bb_20221010.pth
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/simcc/coco/vipnas_coco.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/simcc/coco/simcc_vipnas-mbv3_8xb64-210e_coco-256x192.py
3 | In Collection: SimCC
4 | Metadata:
5 | Architecture: &id001
6 | - SimCC
7 | - ViPNAS
8 | Training Data: COCO
9 | Name: simcc_vipnas-mbv3_8xb64-210e_coco-256x192
10 | Results:
11 | - Dataset: COCO
12 | Metrics:
13 | AP: 0.695
14 | AP@0.5: 0.883
15 | AP@0.75: 0.772
16 | AR: 0.755
17 | AR@0.5: 0.927
18 | Task: Body 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/simcc/coco/simcc_vipnas-mbv3_8xb64-210e_coco-256x192-719f3489_20220922.pth
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/aic/hrnet_aic.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/aic/td-hm_hrnet-w32_8xb64-210e_aic-256x192.py
3 | In Collection: HRNet
4 | Metadata:
5 | Architecture:
6 | - HRNet
7 | Training Data: AI Challenger
8 | Name: td-hm_hrnet-w32_8xb64-210e_aic-256x192
9 | Results:
10 | - Dataset: AI Challenger
11 | Metrics:
12 | AP: 0.323
13 | AP@0.5: 0.761
14 | AP@0.75: 0.218
15 | AR: 0.366
16 | AR@0.5: 0.789
17 | Task: Body 2D Keypoint
18 | Weights: https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w32_aic_256x192-30a4e465_20200826.pth
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/aic/resnet_aic.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/aic/td-hm_res101_8xb64-210e_aic-256x192.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - ResNet
8 | Training Data: AI Challenger
9 | Name: td-hm_res101_8xb64-210e_aic-256x192
10 | Results:
11 | - Dataset: AI Challenger
12 | Metrics:
13 | AP: 0.294
14 | AP@0.5: 0.736
15 | AP@0.75: 0.172
16 | AR: 0.337
17 | AR@0.5: 0.762
18 | Task: Body 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/top_down/resnet/res101_aic_256x192-79b35445_20200826.pth
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/alexnet_coco.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_alexnet_8xb64-210e_coco-256x192.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - AlexNet
8 | Training Data: COCO
9 | Name: td-hm_alexnet_8xb64-210e_coco-256x192
10 | Results:
11 | - Dataset: COCO
12 | Metrics:
13 | AP: 0.448
14 | AP@0.5: 0.767
15 | AP@0.75: 0.461
16 | AR: 0.521
17 | AR@0.5: 0.829
18 | Task: Body 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/top_down/alexnet/alexnet_coco_256x192-a7b1fd15_20200727.pth
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/mobilenetv2_coco.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_mobilenetv2_8xb64-210e_coco-256x192.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture: &id001
6 | - SimpleBaseline2D
7 | - MobilenetV2
8 | Training Data: COCO
9 | Name: td-hm_mobilenetv2_8xb64-210e_coco-256x192
10 | Results:
11 | - Dataset: COCO
12 | Metrics:
13 | AP: 0.648
14 | AP@0.5: 0.874
15 | AP@0.75: 0.725
16 | AR: 0.709
17 | AR@0.5: 0.918
18 | Task: Body 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_mobilenetv2_8xb64-210e_coco-256x192-55a04c35_20221016.pth
20 | - Config: configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_mobilenetv2_8xb64-210e_coco-384x288.py
21 | In Collection: SimpleBaseline2D
22 | Metadata:
23 | Architecture: *id001
24 | Training Data: COCO
25 | Name: td-hm_mobilenetv2_8xb64-210e_coco-384x288
26 | Results:
27 | - Dataset: COCO
28 | Metrics:
29 | AP: 0.677
30 | AP@0.5: 0.882
31 | AP@0.75: 0.746
32 | AR: 0.734
33 | AR@0.5: 0.920
34 | Task: Body 2D Keypoint
35 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_mobilenetv2_8xb64-210e_coco-384x288-d3ab1457_20221013.pth
36 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/pvt_coco.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_pvt-s_8xb64-210e_coco-256x192.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture: &id001
6 | - SimpleBaseline2D
7 | - PVT
8 | Training Data: COCO
9 | Name: td-hm_pvt-s_8xb64-210e_coco-256x192
10 | Results:
11 | - Dataset: COCO
12 | Metrics:
13 | AP: 0.714
14 | AP@0.5: 0.896
15 | AP@0.75: 0.794
16 | AR: 0.773
17 | AR@0.5: 0.936
18 | Task: Body 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/top_down/pvt/pvt_small_coco_256x192-4324a49d_20220501.pth
20 | - Config: configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_pvtv2-b2_8xb64-210e_coco-256x192.py
21 | In Collection: SimpleBaseline2D
22 | Metadata:
23 | Architecture: *id001
24 | Training Data: COCO
25 | Name: td-hm_pvtv2-b2_8xb64-210e_coco-256x192
26 | Results:
27 | - Dataset: COCO
28 | Metrics:
29 | AP: 0.737
30 | AP@0.5: 0.905
31 | AP@0.75: 0.812
32 | AR: 0.791
33 | AR@0.5: 0.942
34 | Task: Body 2D Keypoint
35 | Weights: https://download.openmmlab.com/mmpose/top_down/pvt/pvtv2_b2_coco_256x192-b4212737_20220501.pth
36 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/shufflenetv1_coco.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_shufflenetv1_8xb64-210e_coco-256x192.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture: &id001
6 | - SimpleBaseline2D
7 | - ShufflenetV1
8 | Training Data: COCO
9 | Name: td-hm_shufflenetv1_8xb64-210e_coco-256x192
10 | Results:
11 | - Dataset: COCO
12 | Metrics:
13 | AP: 0.587
14 | AP@0.5: 0.849
15 | AP@0.75: 0.654
16 | AR: 0.654
17 | AR@0.5: 0.896
18 | Task: Body 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_shufflenetv1_8xb64-210e_coco-256x192-7a7ea4f4_20221013.pth
20 | - Config: configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_shufflenetv1_8xb64-210e_coco-384x288.py
21 | In Collection: SimpleBaseline2D
22 | Metadata:
23 | Architecture: *id001
24 | Training Data: COCO
25 | Name: td-hm_shufflenetv1_8xb64-210e_coco-384x288
26 | Results:
27 | - Dataset: COCO
28 | Metrics:
29 | AP: 0.626
30 | AP@0.5: 0.862
31 | AP@0.75: 0.696
32 | AR: 0.687
33 | AR@0.5: 0.903
34 | Task: Body 2D Keypoint
35 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_shufflenetv1_8xb64-210e_coco-384x288-8342f8ba_20221013.pth
36 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/shufflenetv2_coco.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_shufflenetv2_8xb64-210e_coco-256x192.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture: &id001
6 | - SimpleBaseline2D
7 | - ShufflenetV2
8 | Training Data: COCO
9 | Name: td-hm_shufflenetv2_8xb64-210e_coco-256x192
10 | Results:
11 | - Dataset: COCO
12 | Metrics:
13 | AP: 0.602
14 | AP@0.5: 0.857
15 | AP@0.75: 0.672
16 | AR: 0.668
17 | AR@0.5: 0.902
18 | Task: Body 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_shufflenetv2_8xb64-210e_coco-256x192-51fb931e_20221014.pth
20 | - Config: configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_shufflenetv2_8xb64-210e_coco-384x288.py
21 | In Collection: SimpleBaseline2D
22 | Metadata:
23 | Architecture: *id001
24 | Training Data: COCO
25 | Name: td-hm_shufflenetv2_8xb64-210e_coco-384x288
26 | Results:
27 | - Dataset: COCO
28 | Metrics:
29 | AP: 0.638
30 | AP@0.5: 0.866
31 | AP@0.75: 0.707
32 | AR: 0.699
33 | AR@0.5: 0.91
34 | Task: Body 2D Keypoint
35 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_heatmap/coco/td-hm_shufflenetv2_8xb64-210e_coco-384x288-d30ab55c_20221014.pth
36 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_hrnet-w32_fp16-8xb64-210e_coco-256x192.py:
--------------------------------------------------------------------------------
1 | _base_ = ['./td-hm_hrnet-w32_8xb64-210e_coco-256x192.py']
2 |
3 | # fp16 settings
4 | optim_wrapper = dict(
5 | type='AmpOptimWrapper',
6 | loss_scale='dynamic',
7 | )
8 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_res50_fp16-8xb64-210e_coco-256x192.py:
--------------------------------------------------------------------------------
1 | _base_ = ['./td-hm_res50_8xb64-210e_coco-256x192.py']
2 |
3 | # fp16 settings
4 | optim_wrapper = dict(
5 | type='AmpOptimWrapper',
6 | loss_scale='dynamic',
7 | )
8 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/coco/vgg_coco.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/coco/td-hm_vgg16-bn_8xb64-210e_coco-256x192.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - VGG
8 | Training Data: COCO
9 | Name: td-hm_vgg16-bn_8xb64-210e_coco-256x192
10 | Results:
11 | - Dataset: COCO
12 | Metrics:
13 | AP: 0.699
14 | AP@0.5: 0.89
15 | AP@0.75: 0.769
16 | AR: 0.754
17 | AR@0.5: 0.927
18 | Task: Body 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/top_down/vgg/vgg16_bn_coco_256x192-7e7c58d6_20210517.pth
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/crowdpose/cspnext_udp_crowdpose.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/crowdpose/cspnext-m_udp_8xb64-210e_crowpose-256x192.py
3 | In Collection: UDP
4 | Metadata:
5 | Architecture:
6 | - UDP
7 | - CSPNeXt
8 | Training Data: CrowdPose
9 | Name: cspnext-m_udp_8xb64-210e_crowpose-256x192
10 | Results:
11 | - Dataset: CrowdPose
12 | Metrics:
13 | AP: 0.662
14 | AP (E): 0.759
15 | AP (H): 0.539
16 | AP (M): 0.675
17 | AP@0.5: 0.821
18 | AP@0.75: 0.723
19 | Task: Body 2D Keypoint
20 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-crowdpose_pt-in1k_210e-256x192-f591079f_20230123.pth
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/crowdpose/hrnet_crowdpose.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/crowdpose/td-hm_hrnet-w32_8xb64-210e_crowdpose-256x192.py
3 | In Collection: HRNet
4 | Metadata:
5 | Architecture:
6 | - HRNet
7 | Training Data: CrowdPose
8 | Name: td-hm_hrnet-w32_8xb64-210e_crowdpose-256x192
9 | Results:
10 | - Dataset: CrowdPose
11 | Metrics:
12 | AP: 0.675
13 | AP (E): 0.77
14 | AP (H): 0.553
15 | AP (M): 0.687
16 | AP@0.5: 0.825
17 | AP@0.75: 0.729
18 | Task: Body 2D Keypoint
19 | Weights: https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w32_crowdpose_256x192-960be101_20201227.pth
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/mpii/cpm_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_cpm_8xb64-210e_mpii-368x368.py
3 | In Collection: CPM
4 | Metadata:
5 | Architecture:
6 | - CPM
7 | Training Data: MPII
8 | Name: td-hm_cpm_8xb64-210e_mpii-368x368
9 | Results:
10 | - Dataset: MPII
11 | Metrics:
12 | Mean: 0.876
13 | Mean@0.1: 0.285
14 | Task: Body 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/top_down/cpm/cpm_mpii_368x368-116e62b8_20200822.pth
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/mpii/cspnext_udp_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/cspnext-m_udp_8xb64-210e_mpii-256x256.py
3 | In Collection: UDP
4 | Metadata:
5 | Architecture:
6 | - UDP
7 | - CSPNeXt
8 | Training Data: MPII
9 | Name: cspnext-m_udp_8xb64-210e_mpii-256x256
10 | Results:
11 | - Dataset: MPII
12 | Metrics:
13 | Mean: 0.902
14 | Mean@0.1: 0.303
15 | Task: Body 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-mpii_pt-in1k_210e-256x256-68d0402f_20230208.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/mpii/hourglass_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_hourglass52_8xb64-210e_mpii-256x256.py
3 | In Collection: Hourglass
4 | Metadata:
5 | Architecture: &id001
6 | - Hourglass
7 | Training Data: MPII
8 | Name: td-hm_hourglass52_8xb64-210e_mpii-256x256
9 | Results:
10 | - Dataset: MPII
11 | Metrics:
12 | Mean: 0.889
13 | Mean@0.1: 0.317
14 | Task: Body 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/top_down/hourglass/hourglass52_mpii_256x256-ae358435_20200812.pth
16 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_hourglass52_8xb32-210e_mpii-384x384.py
17 | In Collection: Hourglass
18 | Metadata:
19 | Architecture: *id001
20 | Training Data: MPII
21 | Name: td-hm_hourglass52_8xb32-210e_mpii-384x384
22 | Results:
23 | - Dataset: MPII
24 | Metrics:
25 | Mean: 0.894
26 | Mean@0.1: 0.367
27 | Task: Body 2D Keypoint
28 | Weights: https://download.openmmlab.com/mmpose/top_down/hourglass/hourglass52_mpii_384x384-04090bc3_20200812.pth
29 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/mpii/hrnet_dark_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_hrnet-w32_dark-8xb64-210e_mpii-256x256.py
3 | In Collection: DarkPose
4 | Metadata:
5 | Architecture: &id001
6 | - HRNet
7 | - DarkPose
8 | Training Data: MPII
9 | Name: td-hm_hrnet-w32_dark-8xb64-210e_mpii-256x256
10 | Results:
11 | - Dataset: MPII
12 | Metrics:
13 | Mean: 0.904
14 | Mean@0.1: 0.354
15 | Task: Body 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w32_mpii_256x256_dark-f1601c5b_20200927.pth
17 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_hrnet-w48_dark-8xb64-210e_mpii-256x256.py
18 | In Collection: DarkPose
19 | Metadata:
20 | Architecture: *id001
21 | Training Data: MPII
22 | Name: td-hm_hrnet-w48_dark-8xb64-210e_mpii-256x256
23 | Results:
24 | - Dataset: MPII
25 | Metrics:
26 | Mean: 0.905
27 | Mean@0.1: 0.36
28 | Task: Body 2D Keypoint
29 | Weights: https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w48_mpii_256x256_dark-0decd39f_20200927.pth
30 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/mpii/hrnet_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_hrnet-w32_8xb64-210e_mpii-256x256.py
3 | In Collection: HRNet
4 | Metadata:
5 | Architecture: &id001
6 | - HRNet
7 | Training Data: MPII
8 | Name: td-hm_hrnet-w32_8xb64-210e_mpii-256x256
9 | Results:
10 | - Dataset: MPII
11 | Metrics:
12 | Mean: 0.9
13 | Mean@0.1: 0.334
14 | Task: Body 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w32_mpii_256x256-6c4f923f_20200812.pth
16 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_hrnet-w48_8xb64-210e_mpii-256x256.py
17 | In Collection: HRNet
18 | Metadata:
19 | Architecture: *id001
20 | Training Data: MPII
21 | Name: td-hm_hrnet-w48_8xb64-210e_mpii-256x256
22 | Results:
23 | - Dataset: MPII
24 | Metrics:
25 | Mean: 0.901
26 | Mean@0.1: 0.337
27 | Task: Body 2D Keypoint
28 | Weights: https://download.openmmlab.com/mmpose/top_down/hrnet/hrnet_w48_mpii_256x256-92cab7bd_20200812.pth
29 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/mpii/litehrnet_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_litehrnet-18_8xb64-210e_mpii-256x256.py
3 | In Collection: LiteHRNet
4 | Metadata:
5 | Architecture: &id001
6 | - LiteHRNet
7 | Training Data: MPII
8 | Name: td-hm_litehrnet-18_8xb64-210e_mpii-256x256
9 | Results:
10 | - Dataset: MPII
11 | Metrics:
12 | Mean: 0.859
13 | Mean@0.1: 0.26
14 | Task: Body 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/top_down/litehrnet/litehrnet18_mpii_256x256-cabd7984_20210623.pth
16 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_litehrnet-30_8xb64-210e_mpii-256x256.py
17 | In Collection: LiteHRNet
18 | Metadata:
19 | Architecture: *id001
20 | Training Data: MPII
21 | Name: td-hm_litehrnet-30_8xb64-210e_mpii-256x256
22 | Results:
23 | - Dataset: MPII
24 | Metrics:
25 | Mean: 0.869
26 | Mean@0.1: 0.271
27 | Task: Body 2D Keypoint
28 | Weights: https://download.openmmlab.com/mmpose/top_down/litehrnet/litehrnet30_mpii_256x256-faae8bd8_20210622.pth
29 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/mpii/mobilenetv2_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_mobilenetv2_8xb64-210e_mpii-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - MobilenetV2
8 | Training Data: MPII
9 | Name: td-hm_mobilenetv2_8xb64-210e_mpii-256x256
10 | Results:
11 | - Dataset: MPII
12 | Metrics:
13 | Mean: 0.854
14 | Mean@0.1: 0.234
15 | Task: Body 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/top_down/mobilenetv2/mobilenetv2_mpii_256x256-e068afa7_20200812.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/mpii/resnext_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_resnext152_8xb32-210e_mpii-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - ResNext
8 | Training Data: MPII
9 | Name: td-hm_resnext152_8xb32-210e_mpii-256x256
10 | Results:
11 | - Dataset: MPII
12 | Metrics:
13 | Mean: 0.887
14 | Mean@0.1: 0.294
15 | Task: Body 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/top_down/resnext/resnext152_mpii_256x256-df302719_20200927.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/mpii/scnet_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_scnet50_8xb64-210e_mpii-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture: &id001
6 | - SimpleBaseline2D
7 | - SCNet
8 | Training Data: MPII
9 | Name: td-hm_scnet50_8xb64-210e_mpii-256x256
10 | Results:
11 | - Dataset: MPII
12 | Metrics:
13 | Mean: 0.888
14 | Mean@0.1: 0.29
15 | Task: Body 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/top_down/scnet/scnet50_mpii_256x256-a54b6af5_20200812.pth
17 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_scnet101_8xb64-210e_mpii-256x256.py
18 | In Collection: SimpleBaseline2D
19 | Metadata:
20 | Architecture: *id001
21 | Training Data: MPII
22 | Name: td-hm_scnet101_8xb64-210e_mpii-256x256
23 | Results:
24 | - Dataset: MPII
25 | Metrics:
26 | Mean: 0.887
27 | Mean@0.1: 0.293
28 | Task: Body 2D Keypoint
29 | Weights: https://download.openmmlab.com/mmpose/top_down/scnet/scnet101_mpii_256x256-b4c2d184_20200812.pth
30 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/mpii/shufflenetv1_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_shufflenetv1_8xb64-210e_mpii-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - ShufflenetV1
8 | Training Data: MPII
9 | Name: td-hm_shufflenetv1_8xb64-210e_mpii-256x256
10 | Results:
11 | - Dataset: MPII
12 | Metrics:
13 | Mean: 0.824
14 | Mean@0.1: 0.195
15 | Task: Body 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/top_down/shufflenetv1/shufflenetv1_mpii_256x256-dcc1c896_20200925.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/mpii/shufflenetv2_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/mpii/td-hm_shufflenetv2_8xb64-210e_mpii-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - ShufflenetV2
8 | Training Data: MPII
9 | Name: td-hm_shufflenetv2_8xb64-210e_mpii-256x256
10 | Results:
11 | - Dataset: MPII
12 | Metrics:
13 | Mean: 0.828
14 | Mean@0.1: 0.205
15 | Task: Body 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/top_down/shufflenetv2/shufflenetv2_mpii_256x256-4fb9df2d_20200925.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_heatmap/posetrack18/resnet_posetrack18.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_heatmap/posetrack18/td-hm_res50_8xb64-20e_posetrack18-256x192.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture: &id001
6 | - SimpleBaseline2D
7 | - ResNet
8 | Training Data: PoseTrack18
9 | Name: td-hm_res50_8xb64-20e_posetrack18-256x192
10 | Results:
11 | - Dataset: PoseTrack18
12 | Metrics:
13 | Ankl: 74.2
14 | Elb: 82.5
15 | Head: 86.5
16 | Hip: 80.1
17 | Knee: 78.8
18 | Shou: 87.7
19 | Total: 81.2
20 | Wri: 75.8
21 | Task: Body 2D Keypoint
22 | Weights: https://download.openmmlab.com/mmpose/top_down/resnet/res50_posetrack18_256x192-a62807c7_20201028.pth
23 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_regression/coco/mobilenetv2_rle_coco.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_regression/coco/td-reg_mobilenetv2_rle-pretrained-8xb64-210e_coco-256x192.py
3 | In Collection: RLE
4 | Metadata:
5 | Architecture: &id001
6 | - DeepPose
7 | - RLE
8 | - MobileNet
9 | Training Data: COCO
10 | Name: td-reg_mobilenetv2_rle-pretrained-8xb64-210e_coco-256x192
11 | Results:
12 | - Dataset: COCO
13 | Metrics:
14 | AP: 0.593
15 | AP@0.5: 0.836
16 | AP@0.75: 0.66
17 | AR: 0.644
18 | AR@0.5: 0.877
19 | Task: Body 2D Keypoint
20 | Weights: https://download.openmmlab.com/mmpose/v1/body_2d_keypoint/topdown_regression/coco/td-reg_mobilenetv2_rle-pretrained-8xb64-210e_coco-256x192-39b73bd5_20220922.pth
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_2d_keypoint/topdown_regression/mpii/resnet_rle_mpii.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/body_2d_keypoint/topdown_regression/mpii/td-reg_res50_rle-8xb64-210e_mpii-256x256.py
3 | In Collection: RLE
4 | Metadata:
5 | Architecture:
6 | - DeepPose
7 | - RLE
8 | - ResNet
9 | Training Data: MPII
10 | Name: td-reg_res50_rle-8xb64-210e_mpii-256x256
11 | Results:
12 | - Dataset: MPII
13 | Metrics:
14 | Mean: 0.861
15 | Mean@0.1: 0.277
16 | Task: Body 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/top_down/deeppose/deeppose_res50_mpii_256x256_rle-5f92a619_20220504.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/body_3d_keypoint/README.md:
--------------------------------------------------------------------------------
1 | # Human Body 3D Pose Estimation
2 |
3 | 3D human body pose estimation aims at predicting the X, Y, Z coordinates of human body joints. Based on the camera number to capture the images or videos, existing works can be further divided into multi-view methods and single-view (monocular) methods.
4 |
5 | ## Data preparation
6 |
7 | Please follow [DATA Preparation](/docs/en/dataset_zoo/3d_body_keypoint.md) to prepare data.
8 |
9 | ## Demo
10 |
11 | Please follow [Demo](/demo/docs/en/3d_human_pose_demo.md) to run demos.
12 |
13 | 
14 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/README.md:
--------------------------------------------------------------------------------
1 | # 2D Face Landmark Detection
2 |
3 | 2D face landmark detection (also referred to as face alignment) is defined as the task of detecting the face keypoints from an input image.
4 |
5 | Normally, the input images are cropped face images, where the face locates at the center;
6 | or the rough location (or the bounding box) of the hand is provided.
7 |
8 | ## Data preparation
9 |
10 | Please follow [DATA Preparation](/docs/en/dataset_zoo/2d_face_keypoint.md) to prepare data.
11 |
12 | ## Demo
13 |
14 | Please follow [Demo](/demo/docs/en/2d_face_demo.md) to run demos.
15 |
16 | 
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/rtmpose/coco_wholebody_face/rtmpose_coco_wholebody_face.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/rtmpose/coco_wholebody_face/rtmpose-m_8xb32-60e_coco-wholebody-face-256x256.py
3 | In Collection: RTMPose
4 | Metadata:
5 | Architecture:
6 | - RTMPose
7 | Training Data: COCO-WholeBody-Face
8 | Name: rtmpose-m_8xb32-60e_coco-wholebody-face-256x256
9 | Results:
10 | - Dataset: COCO-WholeBody-Face
11 | Metrics:
12 | NME: 0.0466
13 | Task: Face 2D Keypoint
14 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody-face_pt-aic-coco_60e-256x256-62026ef2_20230228.pth
15 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/rtmpose/lapa/rtmpose_lapa.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/rtmpose/lapa/rtmpose-m_8xb64-120e_lapa-256x256.py
3 | In Collection: RTMPose
4 | Alias: face
5 | Metadata:
6 | Architecture:
7 | - RTMPose
8 | Training Data: LaPa
9 | Name: rtmpose-m_8xb64-120e_lapa-256x256
10 | Results:
11 | - Dataset: WFLW
12 | Metrics:
13 | NME: 1.29
14 | Task: Face 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-lapa_pt-aic-coco_120e-256x256-762b1ae2_20230422.pth
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/rtmpose/wflw/rtmpose_wflw.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/rtmpose/wflw/rtmpose-m_8xb64-60e_wflw-256x256.py
3 | In Collection: RTMPose
4 | Alias: face
5 | Metadata:
6 | Architecture:
7 | - RTMPose
8 | Training Data: WFLW
9 | Name: rtmpose-m_8xb64-60e_wflw-256x256
10 | Results:
11 | - Dataset: WFLW
12 | Metrics:
13 | NME: 4.01
14 | Task: Face 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-wflw_pt-aic-coco_60e-256x256-dc1dcdcf_20230228.pth
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/300w/hrnetv2_300w.yml:
--------------------------------------------------------------------------------
1 | Collections:
2 | - Name: HRNetv2
3 | Paper:
4 | Title: Deep High-Resolution Representation Learning for Visual Recognition
5 | URL: https://ieeexplore.ieee.org/abstract/document/9052469/
6 | README: https://github.com/open-mmlab/mmpose/blob/main/docs/src/papers/backbones/hrnetv2.md
7 | Models:
8 | - Config: configs/face_2d_keypoint/topdown_heatmap/300w/td-hm_hrnetv2-w18_8xb64-60e_300w-256x256.py
9 | In Collection: HRNetv2
10 | Metadata:
11 | Architecture:
12 | - HRNetv2
13 | Training Data: 300W
14 | Name: td-hm_hrnetv2-w18_8xb64-60e_300w-256x256
15 | Results:
16 | - Dataset: 300W
17 | Metrics:
18 | NME challenge: 5.64
19 | NME common: 2.92
20 | NME full: 3.45
21 | NME test: 4.1
22 | Task: Face 2D Keypoint
23 | Weights: https://download.openmmlab.com/mmpose/face/hrnetv2/hrnetv2_w18_300w_256x256-eea53406_20211019.pth
24 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/aflw/hrnetv2_aflw.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/aflw/td-hm_hrnetv2-w18_8xb64-60e_aflw-256x256.py
3 | In Collection: HRNetv2
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | Training Data: AFLW
8 | Name: td-hm_hrnetv2-w18_8xb64-60e_aflw-256x256
9 | Results:
10 | - Dataset: AFLW
11 | Metrics:
12 | NME frontal: 1.27
13 | NME full: 1.41
14 | Task: Face 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/face/hrnetv2/hrnetv2_w18_aflw_256x256-f2bbc62b_20210125.pth
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/aflw/hrnetv2_dark_aflw.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/aflw/td-hm_hrnetv2-w18_dark-8xb64-60e_aflw-256x256.py
3 | In Collection: DarkPose
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | - DarkPose
8 | Training Data: AFLW
9 | Name: td-hm_hrnetv2-w18_dark-8xb64-60e_aflw-256x256
10 | Results:
11 | - Dataset: AFLW
12 | Metrics:
13 | NME frontal: 1.19
14 | NME full: 1.34
15 | Task: Face 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/face/darkpose/hrnetv2_w18_aflw_256x256_dark-219606c0_20210125.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/hourglass_coco_wholebody_face.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/td-hm_hourglass52_8xb32-60e_coco-wholebody-face-256x256.py
3 | In Collection: Hourglass
4 | Metadata:
5 | Architecture:
6 | - Hourglass
7 | Training Data: COCO-WholeBody-Face
8 | Name: td-hm_hourglass52_8xb32-60e_coco-wholebody-face-256x256
9 | Results:
10 | - Dataset: COCO-WholeBody-Face
11 | Metrics:
12 | NME: 0.0587
13 | Task: Face 2D Keypoint
14 | Weights: https://download.openmmlab.com/mmpose/face/hourglass/hourglass52_coco_wholebody_face_256x256-6994cf2e_20210909.pth
15 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/hrnetv2_coco_wholebody_face.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/td-hm_hrnetv2-w18_8xb32-60e_coco-wholebody-face-256x256.py
3 | In Collection: HRNetv2
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | Training Data: COCO-WholeBody-Face
8 | Name: td-hm_hrnetv2-w18_8xb32-60e_coco-wholebody-face-256x256
9 | Results:
10 | - Dataset: COCO-WholeBody-Face
11 | Metrics:
12 | NME: 0.0569
13 | Task: Face 2D Keypoint
14 | Weights: https://download.openmmlab.com/mmpose/face/hrnetv2/hrnetv2_w18_coco_wholebody_face_256x256-c1ca469b_20210909.pth
15 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/hrnetv2_dark_coco_wholebody_face.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/td-hm_hrnetv2-w18_dark-8xb32-60e_coco-wholebody-face-256x256.py
3 | In Collection: DarkPose
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | - DarkPose
8 | Training Data: COCO-WholeBody-Face
9 | Name: td-hm_hrnetv2-w18_dark-8xb32-60e_coco-wholebody-face-256x256
10 | Results:
11 | - Dataset: COCO-WholeBody-Face
12 | Metrics:
13 | NME: 0.0513
14 | Task: Face 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/face/darkpose/hrnetv2_w18_coco_wholebody_face_256x256_dark-3d9a334e_20210909.pth
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/mobilenetv2_coco_wholebody_face.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/td-hm_mobilenetv2_8xb32-60e_coco-wholebody-face-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - MobilenetV2
8 | Training Data: COCO-WholeBody-Face
9 | Name: td-hm_mobilenetv2_8xb32-60e_coco-wholebody-face-256x256
10 | Results:
11 | - Dataset: COCO-WholeBody-Face
12 | Metrics:
13 | NME: 0.0611
14 | Task: Face 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/face/mobilenetv2/mobilenetv2_coco_wholebody_face_256x256-4a3f096e_20210909.pth
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/resnet_coco_wholebody_face.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/td-hm_res50_8xb32-60e_coco-wholebody-face-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - ResNet
8 | Training Data: COCO-WholeBody-Face
9 | Name: td-hm_res50_8xb32-60e_coco-wholebody-face-256x256
10 | Results:
11 | - Dataset: COCO-WholeBody-Face
12 | Metrics:
13 | NME: 0.0582
14 | Task: Face 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/face/resnet/res50_coco_wholebody_face_256x256-5128edf5_20210909.pth
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/scnet_coco_wholebody_face.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/coco_wholebody_face/td-hm_scnet50_8xb32-60e_coco-wholebody-face-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - SCNet
8 | Training Data: COCO-WholeBody-Face
9 | Name: td-hm_scnet50_8xb32-60e_coco-wholebody-face-256x256
10 | Results:
11 | - Dataset: COCO-WholeBody-Face
12 | Metrics:
13 | NME: 0.0567
14 | Task: Face 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/face/scnet/scnet50_coco_wholebody_face_256x256-a0183f5f_20210909.pth
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/cofw/hrnetv2_cofw.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/cofw/td-hm_hrnetv2-w18_8xb64-60e_cofw-256x256.py
3 | In Collection: HRNetv2
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | Training Data: COFW
8 | Name: td-hm_hrnetv2-w18_8xb64-60e_cofw-256x256
9 | Results:
10 | - Dataset: COFW
11 | Metrics:
12 | NME: 3.48
13 | Task: Face 2D Keypoint
14 | Weights: https://download.openmmlab.com/mmpose/face/hrnetv2/hrnetv2_w18_cofw_256x256-49243ab8_20211019.pth
15 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/wflw/hrnetv2_awing_wflw.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/wflw/td-hm_hrnetv2-w18_awing-8xb64-60e_wflw-256x256.py
3 | In Collection: HRNetv2
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | - AdaptiveWingloss
8 | Training Data: WFLW
9 | Name: td-hm_hrnetv2-w18_awing-8xb64-60e_wflw-256x256
10 | Results:
11 | - Dataset: WFLW
12 | Metrics:
13 | NME blur: 4.59
14 | NME expression: 4.28
15 | NME illumination: 3.97
16 | NME makeup: 3.87
17 | NME occlusion: 4.78
18 | NME pose: 6.94
19 | NME test: 4.02
20 | Task: Face 2D Keypoint
21 | Weights: https://download.openmmlab.com/mmpose/face/hrnetv2/hrnetv2_w18_wflw_256x256_awing-5af5055c_20211212.pth
22 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/wflw/hrnetv2_dark_wflw.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/wflw/td-hm_hrnetv2-w18_dark-8xb64-60e_wflw-256x256.py
3 | In Collection: DarkPose
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | - DarkPose
8 | Training Data: WFLW
9 | Name: td-hm_hrnetv2-w18_dark-8xb64-60e_wflw-256x256
10 | Results:
11 | - Dataset: WFLW
12 | Metrics:
13 | NME blur: 4.56
14 | NME expression: 4.29
15 | NME illumination: 3.96
16 | NME makeup: 3.89
17 | NME occlusion: 4.78
18 | NME pose: 6.98
19 | NME test: 3.98
20 | Task: Face 2D Keypoint
21 | Weights: https://download.openmmlab.com/mmpose/face/darkpose/hrnetv2_w18_wflw_256x256_dark-3f8e0c2c_20210125.pth
22 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_heatmap/wflw/hrnetv2_wflw.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_heatmap/wflw/td-hm_hrnetv2-w18_8xb64-60e_wflw-256x256.py
3 | In Collection: HRNetv2
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | Training Data: WFLW
8 | Name: td-hm_hrnetv2-w18_8xb64-60e_wflw-256x256
9 | Results:
10 | - Dataset: WFLW
11 | Metrics:
12 | NME blur: 4.58
13 | NME expression: 4.33
14 | NME illumination: 3.99
15 | NME makeup: 3.94
16 | NME occlusion: 4.83
17 | NME pose: 6.97
18 | NME test: 4.06
19 | Task: Face 2D Keypoint
20 | Weights: https://download.openmmlab.com/mmpose/face/hrnetv2/hrnetv2_w18_wflw_256x256-2bf032a6_20210125.pth
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_regression/wflw/resnet_softwingloss_wflw.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_regression/wflw/td-reg_res50_softwingloss_8xb64-210e_wflw-256x256.py
3 | In Collection: ResNet
4 | Metadata:
5 | Architecture:
6 | - DeepPose
7 | - ResNet
8 | - SoftWingloss
9 | Training Data: WFLW
10 | Name: td-reg_res50_softwingloss_8xb64-210e_wflw-256x256
11 | Results:
12 | - Dataset: WFLW
13 | Metrics:
14 | NME: 4.44
15 | Task: Face 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/face/deeppose/deeppose_res50_wflw_256x256_softwingloss-4d34f22a_20211212.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_regression/wflw/resnet_wflw.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_regression/wflw/td-reg_res50_8xb64-210e_wflw-256x256.py
3 | In Collection: ResNet
4 | Metadata:
5 | Architecture:
6 | - DeepPose
7 | - ResNet
8 | Training Data: WFLW
9 | Name: td-reg_res50_8x64e-210e_wflw-256x256
10 | Results:
11 | - Dataset: WFLW
12 | Metrics:
13 | NME: 4.88
14 | Task: Face 2D Keypoint
15 | Weights: https://download.openmmlab.com/mmpose/face/deeppose/deeppose_res50_wflw_256x256-92d0ba7f_20210303.pth
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/face_2d_keypoint/topdown_regression/wflw/resnet_wingloss_wflw.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/face_2d_keypoint/topdown_regression/wflw/td-reg_res50_wingloss_8xb64-210e_wflw-256x256.py
3 | In Collection: ResNet
4 | Metadata:
5 | Architecture:
6 | - DeepPose
7 | - ResNet
8 | - WingLoss
9 | Training Data: WFLW
10 | Name: td-reg_res50_wingloss_8xb64-210e_wflw-256x256
11 | Results:
12 | - Dataset: WFLW
13 | Metrics:
14 | NME: 4.67
15 | Task: Face 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/face/deeppose/deeppose_res50_wflw_256x256_wingloss-f82a5e53_20210303.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/fashion_2d_keypoint/README.md:
--------------------------------------------------------------------------------
1 | # 2D Fashion Landmark Detection
2 |
3 | 2D fashion landmark detection (also referred to as fashion alignment) aims to detect the key-point located at the functional region of clothes, for example the neckline and the cuff.
4 |
5 | ## Data preparation
6 |
7 | Please follow [DATA Preparation](/docs/en/dataset_zoo/2d_fashion_landmark.md) to prepare data.
8 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/README.md:
--------------------------------------------------------------------------------
1 | # 2D Hand Pose Estimation
2 |
3 | 2D hand pose estimation is defined as the task of detecting the poses (or keypoints) of the hand from an input image.
4 |
5 | Normally, the input images are cropped hand images, where the hand locates at the center;
6 | or the rough location (or the bounding box) of the hand is provided.
7 |
8 | ## Data preparation
9 |
10 | Please follow [DATA Preparation](/docs/en/dataset_zoo/2d_hand_keypoint.md) to prepare data.
11 |
12 | ## Demo
13 |
14 | Please follow [Demo](/demo/docs/en/2d_hand_demo.md) to run demos.
15 |
16 | 
17 |
18 | 
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/rtmpose/coco_wholebody_hand/rtmpose_coco_wholebody_hand.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/rtmpose/coco_wholebody_hand/rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256.py
3 | In Collection: RTMPose
4 | Alias: hand
5 | Metadata:
6 | Architecture:
7 | - RTMPose
8 | Training Data: COCO-WholeBody-Hand
9 | Name: rtmpose-m_8xb32-210e_coco-wholebody-hand-256x256
10 | Results:
11 | - Dataset: COCO-WholeBody-Hand
12 | Metrics:
13 | AUC: 0.815
14 | EPE: 4.51
15 | PCK@0.2: 0.837
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-coco-wholebody-hand_pt-aic-coco_210e-256x256-99477206_20230228.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/rtmpose/hand5/rtmpose_hand5.yml:
--------------------------------------------------------------------------------
1 | Collections:
2 | - Name: RTMPose
3 | Paper:
4 | Title: "RTMPose: Real-Time Multi-Person Pose Estimation based on MMPose"
5 | URL: https://arxiv.org/abs/2303.07399
6 | README: https://github.com/open-mmlab/mmpose/blob/main/projects/rtmpose/README.md
7 | Models:
8 | - Config: configs/hand_2d_keypoint/rtmpose/hand5/rtmpose-m_8xb256-210e_hand5-256x256.py
9 | In Collection: RTMPose
10 | Metadata:
11 | Architecture: &id001
12 | - RTMPose
13 | Training Data: &id002
14 | - COCO-Wholebody-Hand
15 | - OneHand10K
16 | - FreiHand2d
17 | - RHD2d
18 | - Halpe
19 | Name: rtmpose-m_8xb256-210e_hand5-256x256
20 | Results:
21 | - Dataset: Hand5
22 | Metrics:
23 | PCK@0.2: 0.964
24 | AUC: 0.839
25 | EPE: 5.06
26 | Task: Hand 2D Keypoint
27 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-hand5_pt-aic-coco_210e-256x256-74fb594_20230320.pth
28 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/hourglass_coco_wholebody_hand.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/td-hm_hourglass52_8xb32-210e_coco-wholebody-hand-256x256.py
3 | In Collection: Hourglass
4 | Metadata:
5 | Architecture:
6 | - Hourglass
7 | Training Data: COCO-WholeBody-Hand
8 | Name: td-hm_hourglass52_8xb32-210e_coco-wholebody-hand-256x256
9 | Results:
10 | - Dataset: COCO-WholeBody-Hand
11 | Metrics:
12 | AUC: 0.835
13 | EPE: 4.54
14 | PCK@0.2: 0.804
15 | Task: Hand 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/hand/hourglass/hourglass52_coco_wholebody_hand_256x256-7b05c6db_20210909.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/hrnetv2_coco_wholebody_hand.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/td-hm_hrnetv2-w18_8xb32-210e_coco-wholebody-hand-256x256.py
3 | In Collection: HRNetv2
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | Training Data: COCO-WholeBody-Hand
8 | Name: td-hm_hrnetv2-w18_8xb32-210e_coco-wholebody-hand-256x256
9 | Results:
10 | - Dataset: COCO-WholeBody-Hand
11 | Metrics:
12 | AUC: 0.84
13 | EPE: 4.39
14 | PCK@0.2: 0.813
15 | Task: Hand 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/hand/hrnetv2/hrnetv2_w18_coco_wholebody_hand_256x256-1c028db7_20210908.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/hrnetv2_dark_coco_wholebody_hand.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/td-hm_hrnetv2-w18_dark-8xb32-210e_coco-wholebody-hand-256x256.py
3 | In Collection: DarkPose
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | - DarkPose
8 | Training Data: COCO-WholeBody-Hand
9 | Name: td-hm_hrnetv2-w18_dark-8xb32-210e_coco-wholebody-hand-256x256
10 | Results:
11 | - Dataset: COCO-WholeBody-Hand
12 | Metrics:
13 | AUC: 0.84
14 | EPE: 4.37
15 | PCK@0.2: 0.814
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/dark/hrnetv2_w18_coco_wholebody_hand_256x256_dark-a9228c9c_20210908.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/litehrnet_coco_wholebody_hand.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/td-hm_litehrnet-w18_8xb32-210e_coco-wholebody-hand-256x256.py
3 | In Collection: LiteHRNet
4 | Metadata:
5 | Architecture:
6 | - LiteHRNet
7 | Training Data: COCO-WholeBody-Hand
8 | Name: td-hm_litehrnet-w18_8xb32-210e_coco-wholebody-hand-256x256
9 | Results:
10 | - Dataset: COCO-WholeBody-Hand
11 | Metrics:
12 | AUC: 0.83
13 | EPE: 4.77
14 | PCK@0.2: 0.795
15 | Task: Hand 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/hand/litehrnet/litehrnet_w18_coco_wholebody_hand_256x256-d6945e6a_20210908.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/mobilenetv2_coco_wholebody_hand.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/td-hm_mobilenetv2_8xb32-210e_coco-wholebody-hand-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - MobilenetV2
8 | Training Data: COCO-WholeBody-Hand
9 | Name: td-hm_mobilenetv2_8xb32-210e_coco-wholebody-hand-256x256
10 | Results:
11 | - Dataset: COCO-WholeBody-Hand
12 | Metrics:
13 | AUC: 0.829
14 | EPE: 4.77
15 | PCK@0.2: 0.795
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/mobilenetv2/mobilenetv2_coco_wholebody_hand_256x256-06b8c877_20210909.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/resnet_coco_wholebody_hand.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/td-hm_res50_8xb32-210e_coco-wholebody-hand-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - ResNet
8 | Training Data: COCO-WholeBody-Hand
9 | Name: td-hm_res50_8xb32-210e_coco-wholebody-hand-256x256
10 | Results:
11 | - Dataset: COCO-WholeBody-Hand
12 | Metrics:
13 | AUC: 0.833
14 | EPE: 4.64
15 | PCK@0.2: 0.8
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/resnet/res50_coco_wholebody_hand_256x256-8dbc750c_20210908.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/scnet_coco_wholebody_hand.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/coco_wholebody_hand/td-hm_scnet50_8xb32-210e_coco-wholebody-hand-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SCNet
7 | Training Data: COCO-WholeBody-Hand
8 | Name: td-hm_scnet50_8xb32-210e_coco-wholebody-hand-256x256
9 | Results:
10 | - Dataset: COCO-WholeBody-Hand
11 | Metrics:
12 | AUC: 0.834
13 | EPE: 4.55
14 | PCK@0.2: 0.803
15 | Task: Hand 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/hand/scnet/scnet50_coco_wholebody_hand_256x256-e73414c7_20210909.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/freihand2d/resnet_freihand2d.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/freihand2d/td-hm_res50_8xb64-100e_freihand2d-224x224.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - ResNet
8 | Training Data: FreiHand
9 | Name: td-hm_res50_8xb64-100e_freihand2d-224x224
10 | Results:
11 | - Dataset: FreiHand
12 | Metrics:
13 | AUC: 0.868
14 | EPE: 3.27
15 | PCK@0.2: 0.999
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/resnet/res50_freihand_224x224-ff0799bc_20200914.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/onehand10k/hrnetv2_dark_onehand10k.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/onehand10k/td-hm_hrnetv2-w18_dark-8xb64-210e_onehand10k-256x256.py
3 | In Collection: DarkPose
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | - DarkPose
8 | Training Data: OneHand10K
9 | Name: td-hm_hrnetv2-w18_dark-8xb64-210e_onehand10k-256x256
10 | Results:
11 | - Dataset: OneHand10K
12 | Metrics:
13 | AUC: 0.572
14 | EPE: 23.96
15 | PCK@0.2: 0.99
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/dark/hrnetv2_w18_onehand10k_256x256_dark-a2f80c64_20210330.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/onehand10k/hrnetv2_onehand10k.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/onehand10k/td-hm_hrnetv2-w18_8xb64-210e_onehand10k-256x256.py
3 | In Collection: HRNetv2
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | Training Data: OneHand10K
8 | Name: td-hm_hrnetv2-w18_8xb64-210e_onehand10k-256x256
9 | Results:
10 | - Dataset: OneHand10K
11 | Metrics:
12 | AUC: 0.567
13 | EPE: 24.26
14 | PCK@0.2: 0.99
15 | Task: Hand 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/hand/hrnetv2/hrnetv2_w18_onehand10k_256x256-30bc9c6b_20210330.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/onehand10k/hrnetv2_udp_onehand10k.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/onehand10k/td-hm_hrnetv2-w18_udp-8xb64-210e_onehand10k-256x256.py
3 | In Collection: UDP
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | - UDP
8 | Training Data: OneHand10K
9 | Name: td-hm_hrnetv2-w18_udp-8xb64-210e_onehand10k-256x256
10 | Results:
11 | - Dataset: OneHand10K
12 | Metrics:
13 | AUC: 0.571
14 | EPE: 23.88
15 | PCK@0.2: 0.99
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/udp/hrnetv2_w18_onehand10k_256x256_udp-0d1b515d_20210330.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/onehand10k/mobilenetv2_onehand10k.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/onehand10k/td-hm_mobilenetv2_8xb64-210e_onehand10k-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - MobilenetV2
8 | Training Data: OneHand10K
9 | Name: td-hm_mobilenetv2_8xb64-210e_onehand10k-256x256
10 | Results:
11 | - Dataset: OneHand10K
12 | Metrics:
13 | AUC: 0.537
14 | EPE: 28.56
15 | PCK@0.2: 0.986
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/mobilenetv2/mobilenetv2_onehand10k_256x256-f3a3d90e_20210330.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/onehand10k/resnet_onehand10k.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/onehand10k/td-hm_res50_8xb32-210e_onehand10k-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - ResNet
8 | Training Data: OneHand10K
9 | Name: td-hm_res50_8xb32-210e_onehand10k-256x256
10 | Results:
11 | - Dataset: OneHand10K
12 | Metrics:
13 | AUC: 0.555
14 | EPE: 25.16
15 | PCK@0.2: 0.989
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/resnet/res50_onehand10k_256x256-739c8639_20210330.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/rhd2d/hrnetv2_dark_rhd2d.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/rhd2d/td-hm_hrnetv2-w18_dark-8xb64-210e_rhd2d-256x256.py
3 | In Collection: DarkPose
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | - DarkPose
8 | Training Data: RHD
9 | Name: td-hm_hrnetv2-w18_dark-8xb64-210e_rhd2d-256x256
10 | Results:
11 | - Dataset: RHD
12 | Metrics:
13 | AUC: 0.903
14 | EPE: 2.18
15 | PCK@0.2: 0.992
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/dark/hrnetv2_w18_rhd2d_256x256_dark-4df3a347_20210330.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/rhd2d/hrnetv2_rhd2d.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/rhd2d/td-hm_hrnetv2-w18_8xb64-210e_rhd2d-256x256.py
3 | In Collection: HRNetv2
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | Training Data: RHD
8 | Name: td-hm_hrnetv2-w18_8xb64-210e_rhd2d-256x256
9 | Results:
10 | - Dataset: RHD
11 | Metrics:
12 | AUC: 0.902
13 | EPE: 2.21
14 | PCK@0.2: 0.992
15 | Task: Hand 2D Keypoint
16 | Weights: https://download.openmmlab.com/mmpose/hand/hrnetv2/hrnetv2_w18_rhd2d_256x256-95b20dd8_20210330.pth
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/rhd2d/hrnetv2_udp_rhd2d.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/rhd2d/td-hm_hrnetv2-w18_udp-8xb64-210e_rhd2d-256x256.py
3 | In Collection: UDP
4 | Metadata:
5 | Architecture:
6 | - HRNetv2
7 | - UDP
8 | Training Data: RHD
9 | Name: td-hm_hrnetv2-w18_udp-8xb64-210e_rhd2d-256x256
10 | Results:
11 | - Dataset: RHD
12 | Metrics:
13 | AUC: 0.902
14 | EPE: 2.19
15 | PCKh@0.7: 0.992
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/udp/hrnetv2_w18_rhd2d_256x256_udp-63ba6007_20210330.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/rhd2d/mobilenetv2_rhd2d.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/rhd2d/td-hm_mobilenetv2_8xb64-210e_rhd2d-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - MobilenetV2
8 | Training Data: RHD
9 | Name: td-hm_mobilenetv2_8xb64-210e_rhd2d-256x256
10 | Results:
11 | - Dataset: RHD
12 | Metrics:
13 | AUC: 0.883
14 | EPE: 2.79
15 | PCK@0.2: 0.985
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/mobilenetv2/mobilenetv2_rhd2d_256x256-85fa02db_20210330.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_heatmap/rhd2d/resnet_rhd2d.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_heatmap/rhd2d/td-hm_res50_8xb64-210e_rhd2d-256x256.py
3 | In Collection: SimpleBaseline2D
4 | Metadata:
5 | Architecture:
6 | - SimpleBaseline2D
7 | - ResNet
8 | Training Data: RHD
9 | Name: td-hm_res50_8xb64-210e_rhd2d-256x256
10 | Results:
11 | - Dataset: RHD
12 | Metrics:
13 | AUC: 0.898
14 | EPE: 2.32
15 | PCK@0.2: 0.991
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/resnet/res50_rhd2d_256x256-5dc7e4cc_20210330.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_regression/onehand10k/resnet_onehand10k.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_regression/onehand10k/td-reg_res50_8xb64-210e_onehand10k-256x256.py
3 | In Collection: DeepPose
4 | Metadata:
5 | Architecture:
6 | - DeepPose
7 | - ResNet
8 | Training Data: OneHand10K
9 | Name: td-reg_res50_8xb64-210e_onehand10k-256x256
10 | Results:
11 | - Dataset: OneHand10K
12 | Metrics:
13 | AUC: 0.485
14 | EPE: 34.21
15 | PCK@0.2: 0.99
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/deeppose/deeppose_res50_onehand10k_256x256-cbddf43a_20210330.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_2d_keypoint/topdown_regression/rhd2d/resnet_rhd2d.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/hand_2d_keypoint/topdown_regression/rhd2d/td-reg_res50_8xb64-210e_rhd2d-256x256.py
3 | In Collection: DeepPose
4 | Metadata:
5 | Architecture:
6 | - DeepPose
7 | - ResNet
8 | Training Data: RHD
9 | Name: td-reg_res50_8xb64-210e_rhd2d-256x256
10 | Results:
11 | - Dataset: RHD
12 | Metrics:
13 | AUC: 0.865
14 | EPE: 3.32
15 | PCK@0.2: 0.988
16 | Task: Hand 2D Keypoint
17 | Weights: https://download.openmmlab.com/mmpose/hand/deeppose/deeppose_res50_rhd2d_256x256-37f1c4d3_20210330.pth
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_3d_keypoint/README.md:
--------------------------------------------------------------------------------
1 | # 3D Hand Pose Estimation
2 |
3 | 3D hand pose estimation is defined as the task of detecting the poses (or keypoints) of the hand from an input image.
4 |
5 | ## Data preparation
6 |
7 | Please follow [DATA Preparation](/docs/en/dataset_zoo/3d_hand_keypoint.md) to prepare data.
8 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/hand_gesture/README.md:
--------------------------------------------------------------------------------
1 | # Gesture Recognition
2 |
3 | Gesture recognition aims to recognize the hand gestures in the video, such as thumbs up.
4 |
5 | ## Data preparation
6 |
7 | Please follow [DATA Preparation](/docs/en/dataset_zoo/2d_hand_gesture.md) to prepare data.
8 |
9 | ## Demo
10 |
11 | Please follow [Demo](/demo/docs/en/gesture_recognition_demo.md) to run the demo.
12 |
13 |
14 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/wholebody_2d_keypoint/README.md:
--------------------------------------------------------------------------------
1 | # 2D Human Whole-Body Pose Estimation
2 |
3 | 2D human whole-body pose estimation aims to localize dense landmarks on the entire human body including face, hands, body, and feet.
4 |
5 | Existing approaches can be categorized into top-down and bottom-up approaches.
6 |
7 | Top-down methods divide the task into two stages: human detection and whole-body pose estimation. They perform human detection first, followed by single-person whole-body pose estimation given human bounding boxes.
8 |
9 | Bottom-up approaches (e.g. AE) first detect all the whole-body keypoints and then group/associate them into person instances.
10 |
11 | ## Data preparation
12 |
13 | Please follow [DATA Preparation](/docs/en/dataset_zoo/2d_wholebody_keypoint.md) to prepare data.
14 |
15 | ## Demo
16 |
17 | Please follow [Demo](/demo/docs/en/2d_wholebody_pose_demo.md) to run demos.
18 |
19 | 
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/configs/wholebody_2d_keypoint/topdown_heatmap/coco-wholebody/cspnext_udp_coco-wholebody.yml:
--------------------------------------------------------------------------------
1 | Models:
2 | - Config: configs/wholebody_2d_keypoint/topdown_heatmap/coco-wholebody/cspnext-m_udp_8xb64-210e_coco-wholebody-256x192.py
3 | In Collection: UDP
4 | Metadata:
5 | Architecture: &id001
6 | - UDP
7 | - CSPNeXt
8 | Training Data: COCO-WholeBody
9 | Name: cspnext-m_udp_8xb64-210e_coco-wholebody-256x192
10 | Results:
11 | - Dataset: COCO-WholeBody
12 | Metrics:
13 | Body AP: 0.687
14 | Body AR: 0.735
15 | Face AP: 0.697
16 | Face AR: 0.755
17 | Foot AP: 0.680
18 | Foot AR: 0.763
19 | Hand AP: 0.46
20 | Hand AR: 0.567
21 | Whole AP: 0.567
22 | Whole AR: 0.641
23 | Task: Wholebody 2D Keypoint
24 | Weights: https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/cspnext-m_udp-coco-wholebody_pt-in1k_210e-256x192-320fa258_20230123.pth
25 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/demo/resources/demo.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/5k5000/CLdetection2023/d1a01536ad892134c4dd728c87dc9ac1d87b8e11/mmpose_package/mmpose/demo/resources/demo.mp4
--------------------------------------------------------------------------------
/mmpose_package/mmpose/demo/resources/demo_coco.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/5k5000/CLdetection2023/d1a01536ad892134c4dd728c87dc9ac1d87b8e11/mmpose_package/mmpose/demo/resources/demo_coco.gif
--------------------------------------------------------------------------------
/mmpose_package/mmpose/demo/resources/sunglasses.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/5k5000/CLdetection2023/d1a01536ad892134c4dd728c87dc9ac1d87b8e11/mmpose_package/mmpose/demo/resources/sunglasses.jpg
--------------------------------------------------------------------------------
/mmpose_package/mmpose/demo/webcam_cfg/test_camera.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | executor_cfg = dict(
3 | name='Test Webcam',
4 | camera_id=0,
5 | camera_max_fps=30,
6 | nodes=[
7 | dict(
8 | type='MonitorNode',
9 | name='monitor',
10 | enable_key='m',
11 | enable=False,
12 | input_buffer='_frame_',
13 | output_buffer='display'),
14 | # 'RecorderNode':
15 | # This node save the output video into a file.
16 | dict(
17 | type='RecorderNode',
18 | name='recorder',
19 | out_video_file='webcam_api_output.mp4',
20 | input_buffer='display',
21 | output_buffer='_display_')
22 | ])
23 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docker/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG PYTORCH="1.8.1"
2 | ARG CUDA="10.2"
3 | ARG CUDNN="7"
4 |
5 | FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel
6 |
7 | ENV TORCH_CUDA_ARCH_LIST="6.0 6.1 7.0+PTX"
8 | ENV TORCH_NVCC_FLAGS="-Xfatbin -compress-all"
9 | ENV CMAKE_PREFIX_PATH="$(dirname $(which conda))/../"
10 |
11 | # To fix GPG key error when running apt-get update
12 | RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu1804/x86_64/3bf863cc.pub
13 | RUN apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu1804/x86_64/7fa2af80.pub
14 |
15 | RUN apt-get update && apt-get install -y git ninja-build libglib2.0-0 libsm6 libxrender-dev libxext6 libgl1-mesa-glx\
16 | && apt-get clean \
17 | && rm -rf /var/lib/apt/lists/*
18 |
19 | # Install xtcocotools
20 | RUN pip install cython
21 | RUN pip install xtcocotools
22 |
23 | # Install MMEngine and MMCV
24 | RUN pip install openmim
25 | RUN mim install mmengine "mmcv>=2.0.0"
26 |
27 | # Install MMPose
28 | RUN conda clean --all
29 | RUN git clone https://github.com/open-mmlab/mmpose.git /mmpose
30 | WORKDIR /mmpose
31 | RUN git checkout main
32 | ENV FORCE_CUDA="1"
33 | RUN pip install -r requirements/build.txt
34 | RUN pip install --no-cache-dir -e .
35 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docker/serve/config.properties:
--------------------------------------------------------------------------------
1 | inference_address=http://0.0.0.0:8080
2 | management_address=http://0.0.0.0:8081
3 | metrics_address=http://0.0.0.0:8082
4 | model_store=/home/model-server/model-store
5 | load_models=all
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docker/serve/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | if [[ "$1" = "serve" ]]; then
5 | shift 1
6 | torchserve --start --ts-config /home/model-server/config.properties
7 | else
8 | eval "$@"
9 | fi
10 |
11 | # prevent docker exit
12 | tail -f /dev/null
13 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/_static/css/readthedocs.css:
--------------------------------------------------------------------------------
1 | .header-logo {
2 | background-image: url("../images/mmpose-logo.png");
3 | background-size: 120px 50px;
4 | height: 50px;
5 | width: 120px;
6 | }
7 |
8 | table.autosummary td {
9 | width: 35%
10 | }
11 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/_static/images/mmpose-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/5k5000/CLdetection2023/d1a01536ad892134c4dd728c87dc9ac1d87b8e11/mmpose_package/mmpose/docs/en/_static/images/mmpose-logo.png
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/advanced_guides/customize_logging.md:
--------------------------------------------------------------------------------
1 | # Customize Logging
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/advanced_guides/customize_optimizer.md:
--------------------------------------------------------------------------------
1 | # Customize Optimizer and Scheduler
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/advanced_guides/customize_transforms.md:
--------------------------------------------------------------------------------
1 | # Customize Data Transformation and Augmentation
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/advanced_guides/dataflow.md:
--------------------------------------------------------------------------------
1 | # Dataflow in MMPose
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/advanced_guides/how_to_deploy.md:
--------------------------------------------------------------------------------
1 | # How to Deploy MMPose Models
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/advanced_guides/implement_new_models.md:
--------------------------------------------------------------------------------
1 | # Implement New Models
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/notes/ecosystem.md:
--------------------------------------------------------------------------------
1 | # Ecosystem
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/notes/pytorch_2.md:
--------------------------------------------------------------------------------
1 | # PyTorch 2.0 Compatibility and Benchmarks
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/en/switch_language.md:
--------------------------------------------------------------------------------
1 | ## English
2 |
3 | ## 简体中文
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/algorithms/ipr.md:
--------------------------------------------------------------------------------
1 | # Integral Human Pose Regression
2 |
3 |
4 |
5 |
6 | IPR (ECCV'2018)
7 |
8 | ```bibtex
9 | @inproceedings{sun2018integral,
10 | title={Integral human pose regression},
11 | author={Sun, Xiao and Xiao, Bin and Wei, Fangyin and Liang, Shuang and Wei, Yichen},
12 | booktitle={Proceedings of the European conference on computer vision (ECCV)},
13 | pages={529--545},
14 | year={2018}
15 | }
16 | ```
17 |
18 |
19 |
20 | ## Abstract
21 |
22 |
23 |
24 | State-of-the-art human pose estimation methods are based on heat map representation. In spite of the good performance, the representation has a few issues in nature, such as not differentiable and quantization error. This work shows that a simple integral operation relates and unifies the heat map representation and joint regression, thus avoiding the above issues. It is differentiable, efficient, and compatible with any heat map based methods. Its effectiveness is convincingly validated via comprehensive ablation experiments under various settings, specifically on 3D pose estimation, for the first time.
25 |
26 |
27 |
28 |
29 |

30 |
31 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/algorithms/simplebaseline2d.md:
--------------------------------------------------------------------------------
1 | # Simple baselines for human pose estimation and tracking
2 |
3 |
4 |
5 |
6 | SimpleBaseline2D (ECCV'2018)
7 |
8 | ```bibtex
9 | @inproceedings{xiao2018simple,
10 | title={Simple baselines for human pose estimation and tracking},
11 | author={Xiao, Bin and Wu, Haiping and Wei, Yichen},
12 | booktitle={Proceedings of the European conference on computer vision (ECCV)},
13 | pages={466--481},
14 | year={2018}
15 | }
16 | ```
17 |
18 |
19 |
20 | ## Abstract
21 |
22 |
23 |
24 | There has been significant progress on pose estimation and increasing interests on pose tracking in recent years. At the same time, the overall algorithm and system complexity increases as well, making the algorithm analysis and comparison more difficult. This work provides simple and effective baseline methods. They are helpful for inspiring and evaluating new ideas for the field. State-of-the-art results are achieved on challenging benchmarks.
25 |
26 |
27 |
28 |
29 |

30 |
31 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/300w.md:
--------------------------------------------------------------------------------
1 | # 300 faces in-the-wild challenge: Database and results
2 |
3 |
4 |
5 |
6 | 300W (IMAVIS'2016)
7 |
8 | ```bibtex
9 | @article{sagonas2016300,
10 | title={300 faces in-the-wild challenge: Database and results},
11 | author={Sagonas, Christos and Antonakos, Epameinondas and Tzimiropoulos, Georgios and Zafeiriou, Stefanos and Pantic, Maja},
12 | journal={Image and vision computing},
13 | volume={47},
14 | pages={3--18},
15 | year={2016},
16 | publisher={Elsevier}
17 | }
18 | ```
19 |
20 |
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/aflw.md:
--------------------------------------------------------------------------------
1 | # Annotated facial landmarks in the wild: A large-scale, real-world database for facial landmark localization
2 |
3 |
4 |
5 |
6 | AFLW (ICCVW'2011)
7 |
8 | ```bibtex
9 | @inproceedings{koestinger2011annotated,
10 | title={Annotated facial landmarks in the wild: A large-scale, real-world database for facial landmark localization},
11 | author={Koestinger, Martin and Wohlhart, Paul and Roth, Peter M and Bischof, Horst},
12 | booktitle={2011 IEEE international conference on computer vision workshops (ICCV workshops)},
13 | pages={2144--2151},
14 | year={2011},
15 | organization={IEEE}
16 | }
17 | ```
18 |
19 |
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/aic.md:
--------------------------------------------------------------------------------
1 | # Ai challenger: A large-scale dataset for going deeper in image understanding
2 |
3 |
4 |
5 |
6 | AI Challenger (ArXiv'2017)
7 |
8 | ```bibtex
9 | @article{wu2017ai,
10 | title={Ai challenger: A large-scale dataset for going deeper in image understanding},
11 | author={Wu, Jiahong and Zheng, He and Zhao, Bo and Li, Yixin and Yan, Baoming and Liang, Rui and Wang, Wenjia and Zhou, Shipei and Lin, Guosen and Fu, Yanwei and others},
12 | journal={arXiv preprint arXiv:1711.06475},
13 | year={2017}
14 | }
15 | ```
16 |
17 |
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/animalpose.md:
--------------------------------------------------------------------------------
1 | # Cross-Domain Adaptation for Animal Pose Estimation
2 |
3 |
4 |
5 |
6 | Animal-Pose (ICCV'2019)
7 |
8 | ```bibtex
9 | @InProceedings{Cao_2019_ICCV,
10 | author = {Cao, Jinkun and Tang, Hongyang and Fang, Hao-Shu and Shen, Xiaoyong and Lu, Cewu and Tai, Yu-Wing},
11 | title = {Cross-Domain Adaptation for Animal Pose Estimation},
12 | booktitle = {The IEEE International Conference on Computer Vision (ICCV)},
13 | month = {October},
14 | year = {2019}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/ap10k.md:
--------------------------------------------------------------------------------
1 | # AP-10K: A Benchmark for Animal Pose Estimation in the Wild
2 |
3 |
4 |
5 |
6 | AP-10K (NeurIPS'2021)
7 |
8 | ```bibtex
9 | @misc{yu2021ap10k,
10 | title={AP-10K: A Benchmark for Animal Pose Estimation in the Wild},
11 | author={Hang Yu and Yufei Xu and Jing Zhang and Wei Zhao and Ziyu Guan and Dacheng Tao},
12 | year={2021},
13 | eprint={2108.12617},
14 | archivePrefix={arXiv},
15 | primaryClass={cs.CV}
16 | }
17 | ```
18 |
19 |
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/atrw.md:
--------------------------------------------------------------------------------
1 | # ATRW: A Benchmark for Amur Tiger Re-identification in the Wild
2 |
3 |
4 |
5 |
6 | ATRW (ACM MM'2020)
7 |
8 | ```bibtex
9 | @inproceedings{li2020atrw,
10 | title={ATRW: A Benchmark for Amur Tiger Re-identification in the Wild},
11 | author={Li, Shuyuan and Li, Jianguo and Tang, Hanlin and Qian, Rui and Lin, Weiyao},
12 | booktitle={Proceedings of the 28th ACM International Conference on Multimedia},
13 | pages={2590--2598},
14 | year={2020}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/campus_and_shelf.md:
--------------------------------------------------------------------------------
1 | # 3D Pictorial Structures for Multiple Human Pose Estimation
2 |
3 |
4 |
5 |
6 | Campus and Shelf (CVPR'2014)
7 |
8 | ```bibtex
9 | @inproceedings {belagian14multi,
10 | title = {{3D} Pictorial Structures for Multiple Human Pose Estimation},
11 | author = {Belagiannis, Vasileios and Amin, Sikandar and Andriluka, Mykhaylo and Schiele, Bernt and Navab
12 | Nassir and Ilic, Slobodan},
13 | booktitle = {IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR)},
14 | year = {2014},
15 | month = {June},
16 | organization={IEEE}
17 | }
18 | ```
19 |
20 |
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/coco.md:
--------------------------------------------------------------------------------
1 | # Microsoft coco: Common objects in context
2 |
3 |
4 |
5 |
6 | COCO (ECCV'2014)
7 |
8 | ```bibtex
9 | @inproceedings{lin2014microsoft,
10 | title={Microsoft coco: Common objects in context},
11 | author={Lin, Tsung-Yi and Maire, Michael and Belongie, Serge and Hays, James and Perona, Pietro and Ramanan, Deva and Doll{\'a}r, Piotr and Zitnick, C Lawrence},
12 | booktitle={European conference on computer vision},
13 | pages={740--755},
14 | year={2014},
15 | organization={Springer}
16 | }
17 | ```
18 |
19 |
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/coco_wholebody.md:
--------------------------------------------------------------------------------
1 | # Whole-Body Human Pose Estimation in the Wild
2 |
3 |
4 |
5 |
6 | COCO-WholeBody (ECCV'2020)
7 |
8 | ```bibtex
9 | @inproceedings{jin2020whole,
10 | title={Whole-Body Human Pose Estimation in the Wild},
11 | author={Jin, Sheng and Xu, Lumin and Xu, Jin and Wang, Can and Liu, Wentao and Qian, Chen and Ouyang, Wanli and Luo, Ping},
12 | booktitle={Proceedings of the European Conference on Computer Vision (ECCV)},
13 | year={2020}
14 | }
15 | ```
16 |
17 |
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/coco_wholebody_face.md:
--------------------------------------------------------------------------------
1 | # Whole-Body Human Pose Estimation in the Wild
2 |
3 |
4 |
5 |
6 | COCO-WholeBody-Face (ECCV'2020)
7 |
8 | ```bibtex
9 | @inproceedings{jin2020whole,
10 | title={Whole-Body Human Pose Estimation in the Wild},
11 | author={Jin, Sheng and Xu, Lumin and Xu, Jin and Wang, Can and Liu, Wentao and Qian, Chen and Ouyang, Wanli and Luo, Ping},
12 | booktitle={Proceedings of the European Conference on Computer Vision (ECCV)},
13 | year={2020}
14 | }
15 | ```
16 |
17 |
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/coco_wholebody_hand.md:
--------------------------------------------------------------------------------
1 | # Whole-Body Human Pose Estimation in the Wild
2 |
3 |
4 |
5 |
6 | COCO-WholeBody-Hand (ECCV'2020)
7 |
8 | ```bibtex
9 | @inproceedings{jin2020whole,
10 | title={Whole-Body Human Pose Estimation in the Wild},
11 | author={Jin, Sheng and Xu, Lumin and Xu, Jin and Wang, Can and Liu, Wentao and Qian, Chen and Ouyang, Wanli and Luo, Ping},
12 | booktitle={Proceedings of the European Conference on Computer Vision (ECCV)},
13 | year={2020}
14 | }
15 | ```
16 |
17 |
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/cofw.md:
--------------------------------------------------------------------------------
1 | # Robust face landmark estimation under occlusion
2 |
3 |
4 |
5 |
6 | COFW (ICCV'2013)
7 |
8 | ```bibtex
9 | @inproceedings{burgos2013robust,
10 | title={Robust face landmark estimation under occlusion},
11 | author={Burgos-Artizzu, Xavier P and Perona, Pietro and Doll{\'a}r, Piotr},
12 | booktitle={Proceedings of the IEEE international conference on computer vision},
13 | pages={1513--1520},
14 | year={2013}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/crowdpose.md:
--------------------------------------------------------------------------------
1 | # CrowdPose: Efficient Crowded Scenes Pose Estimation and A New Benchmark
2 |
3 |
4 |
5 |
6 | CrowdPose (CVPR'2019)
7 |
8 | ```bibtex
9 | @article{li2018crowdpose,
10 | title={CrowdPose: Efficient Crowded Scenes Pose Estimation and A New Benchmark},
11 | author={Li, Jiefeng and Wang, Can and Zhu, Hao and Mao, Yihuan and Fang, Hao-Shu and Lu, Cewu},
12 | journal={arXiv preprint arXiv:1812.00324},
13 | year={2018}
14 | }
15 | ```
16 |
17 |
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/deepfashion.md:
--------------------------------------------------------------------------------
1 | # DeepFashion: Powering Robust Clothes Recognition and Retrieval with Rich Annotations
2 |
3 |
4 |
5 |
6 | DeepFashion (CVPR'2016)
7 |
8 | ```bibtex
9 | @inproceedings{liuLQWTcvpr16DeepFashion,
10 | author = {Liu, Ziwei and Luo, Ping and Qiu, Shi and Wang, Xiaogang and Tang, Xiaoou},
11 | title = {DeepFashion: Powering Robust Clothes Recognition and Retrieval with Rich Annotations},
12 | booktitle = {Proceedings of IEEE Conference on Computer Vision and Pattern Recognition (CVPR)},
13 | month = {June},
14 | year = {2016}
15 | }
16 | ```
17 |
18 |
19 |
20 |
21 |
22 |
23 | DeepFashion (ECCV'2016)
24 |
25 | ```bibtex
26 | @inproceedings{liuYLWTeccv16FashionLandmark,
27 | author = {Liu, Ziwei and Yan, Sijie and Luo, Ping and Wang, Xiaogang and Tang, Xiaoou},
28 | title = {Fashion Landmark Detection in the Wild},
29 | booktitle = {European Conference on Computer Vision (ECCV)},
30 | month = {October},
31 | year = {2016}
32 | }
33 | ```
34 |
35 |
36 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/fly.md:
--------------------------------------------------------------------------------
1 | # Fast animal pose estimation using deep neural networks
2 |
3 |
4 |
5 |
6 | Vinegar Fly (Nature Methods'2019)
7 |
8 | ```bibtex
9 | @article{pereira2019fast,
10 | title={Fast animal pose estimation using deep neural networks},
11 | author={Pereira, Talmo D and Aldarondo, Diego E and Willmore, Lindsay and Kislin, Mikhail and Wang, Samuel S-H and Murthy, Mala and Shaevitz, Joshua W},
12 | journal={Nature methods},
13 | volume={16},
14 | number={1},
15 | pages={117--125},
16 | year={2019},
17 | publisher={Nature Publishing Group}
18 | }
19 | ```
20 |
21 |
22 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/freihand.md:
--------------------------------------------------------------------------------
1 | # Freihand: A dataset for markerless capture of hand pose and shape from single rgb images
2 |
3 |
4 |
5 |
6 | FreiHand (ICCV'2019)
7 |
8 | ```bibtex
9 | @inproceedings{zimmermann2019freihand,
10 | title={Freihand: A dataset for markerless capture of hand pose and shape from single rgb images},
11 | author={Zimmermann, Christian and Ceylan, Duygu and Yang, Jimei and Russell, Bryan and Argus, Max and Brox, Thomas},
12 | booktitle={Proceedings of the IEEE International Conference on Computer Vision},
13 | pages={813--822},
14 | year={2019}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/h36m.md:
--------------------------------------------------------------------------------
1 | # Human3.6M: Large Scale Datasets and Predictive Methods for 3D Human Sensing in Natural Environments
2 |
3 |
4 |
5 |
6 | Human3.6M (TPAMI'2014)
7 |
8 | ```bibtex
9 | @article{h36m_pami,
10 | author = {Ionescu, Catalin and Papava, Dragos and Olaru, Vlad and Sminchisescu, Cristian},
11 | title = {Human3.6M: Large Scale Datasets and Predictive Methods for 3D Human Sensing in Natural Environments},
12 | journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence},
13 | publisher = {IEEE Computer Society},
14 | volume = {36},
15 | number = {7},
16 | pages = {1325-1339},
17 | month = {jul},
18 | year = {2014}
19 | }
20 | ```
21 |
22 |
23 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/halpe.md:
--------------------------------------------------------------------------------
1 | # PaStaNet: Toward Human Activity Knowledge Engine
2 |
3 |
4 |
5 |
6 | Halpe (CVPR'2020)
7 |
8 | ```bibtex
9 | @inproceedings{li2020pastanet,
10 | title={PaStaNet: Toward Human Activity Knowledge Engine},
11 | author={Li, Yong-Lu and Xu, Liang and Liu, Xinpeng and Huang, Xijie and Xu, Yue and Wang, Shiyi and Fang, Hao-Shu and Ma, Ze and Chen, Mingyang and Lu, Cewu},
12 | booktitle={CVPR},
13 | year={2020}
14 | }
15 | ```
16 |
17 |
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/horse10.md:
--------------------------------------------------------------------------------
1 | # Pretraining boosts out-of-domain robustness for pose estimation
2 |
3 |
4 |
5 |
6 | Horse-10 (WACV'2021)
7 |
8 | ```bibtex
9 | @inproceedings{mathis2021pretraining,
10 | title={Pretraining boosts out-of-domain robustness for pose estimation},
11 | author={Mathis, Alexander and Biasi, Thomas and Schneider, Steffen and Yuksekgonul, Mert and Rogers, Byron and Bethge, Matthias and Mathis, Mackenzie W},
12 | booktitle={Proceedings of the IEEE/CVF Winter Conference on Applications of Computer Vision},
13 | pages={1859--1868},
14 | year={2021}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/interhand.md:
--------------------------------------------------------------------------------
1 | # InterHand2.6M: A dataset and baseline for 3D interacting hand pose estimation from a single RGB image
2 |
3 |
4 |
5 |
6 | InterHand2.6M (ECCV'2020)
7 |
8 | ```bibtex
9 | @article{moon2020interhand2,
10 | title={InterHand2.6M: A dataset and baseline for 3D interacting hand pose estimation from a single RGB image},
11 | author={Moon, Gyeongsik and Yu, Shoou-I and Wen, He and Shiratori, Takaaki and Lee, Kyoung Mu},
12 | journal={arXiv preprint arXiv:2008.09309},
13 | year={2020},
14 | publisher={Springer}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/jhmdb.md:
--------------------------------------------------------------------------------
1 | # Towards understanding action recognition
2 |
3 |
4 |
5 |
6 | JHMDB (ICCV'2013)
7 |
8 | ```bibtex
9 | @inproceedings{Jhuang:ICCV:2013,
10 | title = {Towards understanding action recognition},
11 | author = {H. Jhuang and J. Gall and S. Zuffi and C. Schmid and M. J. Black},
12 | booktitle = {International Conf. on Computer Vision (ICCV)},
13 | month = Dec,
14 | pages = {3192-3199},
15 | year = {2013}
16 | }
17 | ```
18 |
19 |
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/locust.md:
--------------------------------------------------------------------------------
1 | # DeepPoseKit, a software toolkit for fast and robust animal pose estimation using deep learning
2 |
3 |
4 |
5 |
6 | Desert Locust (Elife'2019)
7 |
8 | ```bibtex
9 | @article{graving2019deepposekit,
10 | title={DeepPoseKit, a software toolkit for fast and robust animal pose estimation using deep learning},
11 | author={Graving, Jacob M and Chae, Daniel and Naik, Hemal and Li, Liang and Koger, Benjamin and Costelloe, Blair R and Couzin, Iain D},
12 | journal={Elife},
13 | volume={8},
14 | pages={e47994},
15 | year={2019},
16 | publisher={eLife Sciences Publications Limited}
17 | }
18 | ```
19 |
20 |
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/macaque.md:
--------------------------------------------------------------------------------
1 | # MacaquePose: A novel ‘in the wild’macaque monkey pose dataset for markerless motion capture
2 |
3 |
4 |
5 |
6 | MacaquePose (bioRxiv'2020)
7 |
8 | ```bibtex
9 | @article{labuguen2020macaquepose,
10 | title={MacaquePose: A novel ‘in the wild’macaque monkey pose dataset for markerless motion capture},
11 | author={Labuguen, Rollyn and Matsumoto, Jumpei and Negrete, Salvador and Nishimaru, Hiroshi and Nishijo, Hisao and Takada, Masahiko and Go, Yasuhiro and Inoue, Ken-ichi and Shibata, Tomohiro},
12 | journal={bioRxiv},
13 | year={2020},
14 | publisher={Cold Spring Harbor Laboratory}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/mhp.md:
--------------------------------------------------------------------------------
1 | # Understanding humans in crowded scenes: Deep nested adversarial learning and a new benchmark for multi-human parsing
2 |
3 |
4 |
5 |
6 | MHP (ACM MM'2018)
7 |
8 | ```bibtex
9 | @inproceedings{zhao2018understanding,
10 | title={Understanding humans in crowded scenes: Deep nested adversarial learning and a new benchmark for multi-human parsing},
11 | author={Zhao, Jian and Li, Jianshu and Cheng, Yu and Sim, Terence and Yan, Shuicheng and Feng, Jiashi},
12 | booktitle={Proceedings of the 26th ACM international conference on Multimedia},
13 | pages={792--800},
14 | year={2018}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/mpi_inf_3dhp.md:
--------------------------------------------------------------------------------
1 | # Monocular 3D Human Pose Estimation In The Wild Using Improved CNN Supervision
2 |
3 |
4 |
5 |
6 | MPI-INF-3DHP (3DV'2017)
7 |
8 | ```bibtex
9 | @inproceedings{mono-3dhp2017,
10 | author = {Mehta, Dushyant and Rhodin, Helge and Casas, Dan and Fua, Pascal and Sotnychenko, Oleksandr and Xu, Weipeng and Theobalt, Christian},
11 | title = {Monocular 3D Human Pose Estimation In The Wild Using Improved CNN Supervision},
12 | booktitle = {3D Vision (3DV), 2017 Fifth International Conference on},
13 | url = {http://gvv.mpi-inf.mpg.de/3dhp_dataset},
14 | year = {2017},
15 | organization={IEEE},
16 | doi={10.1109/3dv.2017.00064},
17 | }
18 | ```
19 |
20 |
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/mpii.md:
--------------------------------------------------------------------------------
1 | # 2D Human Pose Estimation: New Benchmark and State of the Art Analysis
2 |
3 |
4 |
5 |
6 | MPII (CVPR'2014)
7 |
8 | ```bibtex
9 | @inproceedings{andriluka14cvpr,
10 | author = {Mykhaylo Andriluka and Leonid Pishchulin and Peter Gehler and Schiele, Bernt},
11 | title = {2D Human Pose Estimation: New Benchmark and State of the Art Analysis},
12 | booktitle = {IEEE Conference on Computer Vision and Pattern Recognition (CVPR)},
13 | year = {2014},
14 | month = {June}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/mpii_trb.md:
--------------------------------------------------------------------------------
1 | # TRB: A Novel Triplet Representation for Understanding 2D Human Body
2 |
3 |
4 |
5 |
6 | MPII-TRB (ICCV'2019)
7 |
8 | ```bibtex
9 | @inproceedings{duan2019trb,
10 | title={TRB: A Novel Triplet Representation for Understanding 2D Human Body},
11 | author={Duan, Haodong and Lin, Kwan-Yee and Jin, Sheng and Liu, Wentao and Qian, Chen and Ouyang, Wanli},
12 | booktitle={Proceedings of the IEEE International Conference on Computer Vision},
13 | pages={9479--9488},
14 | year={2019}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/ochuman.md:
--------------------------------------------------------------------------------
1 | # Pose2seg: Detection free human instance segmentation
2 |
3 |
4 |
5 |
6 | OCHuman (CVPR'2019)
7 |
8 | ```bibtex
9 | @inproceedings{zhang2019pose2seg,
10 | title={Pose2seg: Detection free human instance segmentation},
11 | author={Zhang, Song-Hai and Li, Ruilong and Dong, Xin and Rosin, Paul and Cai, Zixi and Han, Xi and Yang, Dingcheng and Huang, Haozhi and Hu, Shi-Min},
12 | booktitle={Proceedings of the IEEE conference on computer vision and pattern recognition},
13 | pages={889--898},
14 | year={2019}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/onehand10k.md:
--------------------------------------------------------------------------------
1 | # Mask-pose cascaded cnn for 2d hand pose estimation from single color image
2 |
3 |
4 |
5 |
6 | OneHand10K (TCSVT'2019)
7 |
8 | ```bibtex
9 | @article{wang2018mask,
10 | title={Mask-pose cascaded cnn for 2d hand pose estimation from single color image},
11 | author={Wang, Yangang and Peng, Cong and Liu, Yebin},
12 | journal={IEEE Transactions on Circuits and Systems for Video Technology},
13 | volume={29},
14 | number={11},
15 | pages={3258--3268},
16 | year={2018},
17 | publisher={IEEE}
18 | }
19 | ```
20 |
21 |
22 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/panoptic.md:
--------------------------------------------------------------------------------
1 | # Hand keypoint detection in single images using multiview bootstrapping
2 |
3 |
4 |
5 |
6 | CMU Panoptic HandDB (CVPR'2017)
7 |
8 | ```bibtex
9 | @inproceedings{simon2017hand,
10 | title={Hand keypoint detection in single images using multiview bootstrapping},
11 | author={Simon, Tomas and Joo, Hanbyul and Matthews, Iain and Sheikh, Yaser},
12 | booktitle={Proceedings of the IEEE conference on Computer Vision and Pattern Recognition},
13 | pages={1145--1153},
14 | year={2017}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/panoptic_body3d.md:
--------------------------------------------------------------------------------
1 | # Panoptic Studio: A Massively Multiview System for Social Motion Capture
2 |
3 |
4 |
5 |
6 | CMU Panoptic (ICCV'2015)
7 |
8 | ```bibtex
9 | @Article = {joo_iccv_2015,
10 | author = {Hanbyul Joo, Hao Liu, Lei Tan, Lin Gui, Bart Nabbe, Iain Matthews, Takeo Kanade, Shohei Nobuhara, and Yaser Sheikh},
11 | title = {Panoptic Studio: A Massively Multiview System for Social Motion Capture},
12 | booktitle = {ICCV},
13 | year = {2015}
14 | }
15 | ```
16 |
17 |
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/posetrack18.md:
--------------------------------------------------------------------------------
1 | # Posetrack: A benchmark for human pose estimation and tracking
2 |
3 |
4 |
5 |
6 | PoseTrack18 (CVPR'2018)
7 |
8 | ```bibtex
9 | @inproceedings{andriluka2018posetrack,
10 | title={Posetrack: A benchmark for human pose estimation and tracking},
11 | author={Andriluka, Mykhaylo and Iqbal, Umar and Insafutdinov, Eldar and Pishchulin, Leonid and Milan, Anton and Gall, Juergen and Schiele, Bernt},
12 | booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition},
13 | pages={5167--5176},
14 | year={2018}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/rhd.md:
--------------------------------------------------------------------------------
1 | # Learning to Estimate 3D Hand Pose from Single RGB Images
2 |
3 |
4 |
5 |
6 | RHD (ICCV'2017)
7 |
8 | ```bibtex
9 | @TechReport{zb2017hand,
10 | author={Christian Zimmermann and Thomas Brox},
11 | title={Learning to Estimate 3D Hand Pose from Single RGB Images},
12 | institution={arXiv:1705.01389},
13 | year={2017},
14 | note="https://arxiv.org/abs/1705.01389",
15 | url="https://lmb.informatik.uni-freiburg.de/projects/hand3d/"
16 | }
17 | ```
18 |
19 |
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/wflw.md:
--------------------------------------------------------------------------------
1 | # Look at boundary: A boundary-aware face alignment algorithm
2 |
3 |
4 |
5 |
6 | WFLW (CVPR'2018)
7 |
8 | ```bibtex
9 | @inproceedings{wu2018look,
10 | title={Look at boundary: A boundary-aware face alignment algorithm},
11 | author={Wu, Wayne and Qian, Chen and Yang, Shuo and Wang, Quan and Cai, Yici and Zhou, Qiang},
12 | booktitle={Proceedings of the IEEE conference on computer vision and pattern recognition},
13 | pages={2129--2138},
14 | year={2018}
15 | }
16 | ```
17 |
18 |
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/datasets/zebra.md:
--------------------------------------------------------------------------------
1 | # DeepPoseKit, a software toolkit for fast and robust animal pose estimation using deep learning
2 |
3 |
4 |
5 |
6 | Grévy’s Zebra (Elife'2019)
7 |
8 | ```bibtex
9 | @article{graving2019deepposekit,
10 | title={DeepPoseKit, a software toolkit for fast and robust animal pose estimation using deep learning},
11 | author={Graving, Jacob M and Chae, Daniel and Naik, Hemal and Li, Liang and Koger, Benjamin and Costelloe, Blair R and Couzin, Iain D},
12 | journal={Elife},
13 | volume={8},
14 | pages={e47994},
15 | year={2019},
16 | publisher={eLife Sciences Publications Limited}
17 | }
18 | ```
19 |
20 |
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/techniques/albumentations.md:
--------------------------------------------------------------------------------
1 | # Albumentations: fast and flexible image augmentations
2 |
3 |
4 |
5 |
6 | Albumentations (Information'2020)
7 |
8 | ```bibtex
9 | @article{buslaev2020albumentations,
10 | title={Albumentations: fast and flexible image augmentations},
11 | author={Buslaev, Alexander and Iglovikov, Vladimir I and Khvedchenya, Eugene and Parinov, Alex and Druzhinin, Mikhail and Kalinin, Alexandr A},
12 | journal={Information},
13 | volume={11},
14 | number={2},
15 | pages={125},
16 | year={2020},
17 | publisher={Multidisciplinary Digital Publishing Institute}
18 | }
19 | ```
20 |
21 |
22 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/src/papers/techniques/fp16.md:
--------------------------------------------------------------------------------
1 | # Mixed Precision Training
2 |
3 |
4 |
5 |
6 | FP16 (ArXiv'2017)
7 |
8 | ```bibtex
9 | @article{micikevicius2017mixed,
10 | title={Mixed precision training},
11 | author={Micikevicius, Paulius and Narang, Sharan and Alben, Jonah and Diamos, Gregory and Elsen, Erich and Garcia, David and Ginsburg, Boris and Houston, Michael and Kuchaiev, Oleksii and Venkatesh, Ganesh and others},
12 | journal={arXiv preprint arXiv:1710.03740},
13 | year={2017}
14 | }
15 | ```
16 |
17 |
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/_static/css/readthedocs.css:
--------------------------------------------------------------------------------
1 | .header-logo {
2 | background-image: url("../images/mmpose-logo.png");
3 | background-size: 120px 50px;
4 | height: 50px;
5 | width: 120px;
6 | }
7 |
8 | table.autosummary td {
9 | width: 35%
10 | }
11 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/_static/images/mmpose-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/5k5000/CLdetection2023/d1a01536ad892134c4dd728c87dc9ac1d87b8e11/mmpose_package/mmpose/docs/zh_cn/_static/images/mmpose-logo.png
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/advanced_guides/customize_datasets.md:
--------------------------------------------------------------------------------
1 | # Customize Datasets
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/advanced_guides/customize_logging.md:
--------------------------------------------------------------------------------
1 | # Customize Logging
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/advanced_guides/customize_optimizer.md:
--------------------------------------------------------------------------------
1 | # Customize Optimizer and Scheduler
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/advanced_guides/customize_transforms.md:
--------------------------------------------------------------------------------
1 | # Customize Data Transformation and Augmentation
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/advanced_guides/dataflow.md:
--------------------------------------------------------------------------------
1 | # Dataflow in MMPose
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/advanced_guides/how_to_deploy.md:
--------------------------------------------------------------------------------
1 | # How to Deploy MMPose Models
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/advanced_guides/implement_new_models.md:
--------------------------------------------------------------------------------
1 | # Implement New Models
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/dataset_zoo/2d_fashion_landmark.md:
--------------------------------------------------------------------------------
1 | # 2D服装关键点数据集
2 |
3 | 内容建设中……
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/notes/ecosystem.md:
--------------------------------------------------------------------------------
1 | # Ecosystem
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/notes/pytorch_2.md:
--------------------------------------------------------------------------------
1 | # PyTorch 2.0 Compatibility and Benchmarks
2 |
3 | Coming soon.
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/switch_language.md:
--------------------------------------------------------------------------------
1 | ## 简体中文
2 |
3 | ## English
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/user_guides/train_and_test.md:
--------------------------------------------------------------------------------
1 | # 训练与测试
2 |
3 | 中文内容建设中,暂时请查阅[英文版文档](../../en/user_guides/train_and_test.md)
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/user_guides/useful_tools.md:
--------------------------------------------------------------------------------
1 | # 常用工具
2 |
3 | 中文内容建设中,暂时请查阅[英文版文档](../../en/user_guides/useful_tools.md)
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/docs/zh_cn/user_guides/visualization.md:
--------------------------------------------------------------------------------
1 | # 可视化
2 |
3 | 中文内容建设中,暂时请查阅[英文版文档](../../en/user_guides/visualization.md)
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | import mmcv
3 | import mmengine
4 | from mmengine.utils import digit_version
5 |
6 | from .version import __version__, short_version
7 |
8 | mmcv_minimum_version = '2.0.0rc4'
9 | mmcv_maximum_version = '2.1.0'
10 | mmcv_version = digit_version(mmcv.__version__)
11 |
12 | mmengine_minimum_version = '0.6.0'
13 | mmengine_maximum_version = '1.0.0'
14 | mmengine_version = digit_version(mmengine.__version__)
15 |
16 | assert (mmcv_version >= digit_version(mmcv_minimum_version)
17 | and mmcv_version <= digit_version(mmcv_maximum_version)), \
18 | f'MMCV=={mmcv.__version__} is used but incompatible. ' \
19 | f'Please install mmcv>={mmcv_minimum_version}, <={mmcv_maximum_version}.'
20 |
21 | assert (mmengine_version >= digit_version(mmengine_minimum_version)
22 | and mmengine_version <= digit_version(mmengine_maximum_version)), \
23 | f'MMEngine=={mmengine.__version__} is used but incompatible. ' \
24 | f'Please install mmengine>={mmengine_minimum_version}, ' \
25 | f'<={mmengine_maximum_version}.'
26 |
27 | __all__ = ['__version__', 'short_version']
28 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .inference import inference_bottomup, inference_topdown, init_model
3 | from .inferencers import MMPoseInferencer, Pose2DInferencer
4 |
5 | __all__ = [
6 | 'init_model', 'inference_topdown', 'inference_bottomup',
7 | 'Pose2DInferencer', 'MMPoseInferencer'
8 | ]
9 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/inferencers/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .mmpose_inferencer import MMPoseInferencer
3 | from .pose2d_inferencer import Pose2DInferencer
4 | from .utils import get_model_aliases
5 |
6 | __all__ = ['Pose2DInferencer', 'MMPoseInferencer', 'get_model_aliases']
7 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/inferencers/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .default_det_models import default_det_models
3 | from .get_model_alias import get_model_aliases
4 |
5 | __all__ = ['default_det_models', 'get_model_aliases']
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/inferencers/utils/default_det_models.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | import os.path as osp
3 |
4 | from mmengine.config.utils import MODULE2PACKAGE
5 | from mmengine.utils import get_installed_path
6 |
7 | mmpose_path = get_installed_path(MODULE2PACKAGE['mmpose'])
8 |
9 | default_det_models = dict(
10 | human=dict(model='rtmdet-m', weights=None, cat_ids=(0, )),
11 | face=dict(
12 | model=osp.join(mmpose_path, '.mim',
13 | 'demo/mmdetection_cfg/yolox-s_8xb8-300e_coco-face.py'),
14 | weights='https://download.openmmlab.com/mmpose/mmdet_pretrained/'
15 | 'yolo-x_8xb8-300e_coco-face_13274d7c.pth',
16 | cat_ids=(0, )),
17 | hand=dict(
18 | model=osp.join(
19 | mmpose_path, '.mim', 'demo/mmdetection_cfg/'
20 | 'ssdlite_mobilenetv2_scratch_600e_onehand.py'),
21 | weights='https://download.openmmlab.com/mmpose/mmdet_pretrained/'
22 | 'ssdlite_mobilenetv2_scratch_600e_onehand-4f9f8686_20220523.pth',
23 | cat_ids=(0, )),
24 | animal=dict(
25 | model='rtmdet-m',
26 | weights=None,
27 | cat_ids=(15, 16, 17, 18, 19, 20, 21, 22, 23)),
28 | )
29 |
30 | default_det_models['body'] = default_det_models['human']
31 | default_det_models['wholebody'] = default_det_models['human']
32 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/webcam/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .webcam_executor import WebcamExecutor
3 |
4 | __all__ = ['WebcamExecutor']
5 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/webcam/nodes/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .base_visualizer_node import BaseVisualizerNode
3 | from .helper_nodes import MonitorNode, ObjectAssignerNode, RecorderNode
4 | from .model_nodes import DetectorNode, TopdownPoseEstimatorNode
5 | from .node import Node
6 | from .registry import NODES
7 | from .visualizer_nodes import (BigeyeEffectNode, NoticeBoardNode,
8 | ObjectVisualizerNode, SunglassesEffectNode)
9 |
10 | __all__ = [
11 | 'BaseVisualizerNode', 'NODES', 'MonitorNode', 'ObjectAssignerNode',
12 | 'RecorderNode', 'DetectorNode', 'TopdownPoseEstimatorNode', 'Node',
13 | 'BigeyeEffectNode', 'NoticeBoardNode', 'ObjectVisualizerNode',
14 | 'ObjectAssignerNode', 'SunglassesEffectNode'
15 | ]
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/webcam/nodes/helper_nodes/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .monitor_node import MonitorNode
3 | from .object_assigner_node import ObjectAssignerNode
4 | from .recorder_node import RecorderNode
5 |
6 | __all__ = ['MonitorNode', 'ObjectAssignerNode', 'RecorderNode']
7 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/webcam/nodes/model_nodes/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .detector_node import DetectorNode
3 | from .pose_estimator_node import TopdownPoseEstimatorNode
4 |
5 | __all__ = ['DetectorNode', 'TopdownPoseEstimatorNode']
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/webcam/nodes/registry.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from mmengine.registry import Registry
3 |
4 | NODES = Registry('node')
5 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/webcam/nodes/visualizer_nodes/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .bigeye_effect_node import BigeyeEffectNode
3 | from .notice_board_node import NoticeBoardNode
4 | from .object_visualizer_node import ObjectVisualizerNode
5 | from .sunglasses_effect_node import SunglassesEffectNode
6 |
7 | __all__ = [
8 | 'ObjectVisualizerNode', 'NoticeBoardNode', 'SunglassesEffectNode',
9 | 'BigeyeEffectNode'
10 | ]
11 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/webcam/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .buffer import BufferManager
3 | from .event import EventManager
4 | from .image_capture import ImageCapture
5 | from .message import FrameMessage, Message, VideoEndingMessage
6 | from .misc import (copy_and_paste, expand_and_clamp, get_cached_file_path,
7 | get_config_path, is_image_file, limit_max_fps,
8 | load_image_from_disk_or_url, screen_matting)
9 | from .pose import (get_eye_keypoint_ids, get_face_keypoint_ids,
10 | get_hand_keypoint_ids, get_mouth_keypoint_ids,
11 | get_wrist_keypoint_ids)
12 |
13 | __all__ = [
14 | 'BufferManager', 'EventManager', 'FrameMessage', 'Message',
15 | 'limit_max_fps', 'VideoEndingMessage', 'load_image_from_disk_or_url',
16 | 'get_cached_file_path', 'screen_matting', 'get_config_path',
17 | 'expand_and_clamp', 'copy_and_paste', 'is_image_file', 'ImageCapture',
18 | 'get_eye_keypoint_ids', 'get_face_keypoint_ids', 'get_wrist_keypoint_ids',
19 | 'get_mouth_keypoint_ids', 'get_hand_keypoint_ids'
20 | ]
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/apis/webcam/utils/image_capture.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from typing import Union
3 |
4 | import cv2
5 | import numpy as np
6 |
7 | from .misc import load_image_from_disk_or_url
8 |
9 |
10 | class ImageCapture:
11 | """A mock-up of cv2.VideoCapture that always return a const image.
12 |
13 | Args:
14 | image (str | ndarray): The image path or image data
15 | """
16 |
17 | def __init__(self, image: Union[str, np.ndarray]):
18 | if isinstance(image, str):
19 | self.image = load_image_from_disk_or_url(image)
20 | else:
21 | self.image = image
22 |
23 | def isOpened(self):
24 | return (self.image is not None)
25 |
26 | def read(self):
27 | return True, self.image.copy()
28 |
29 | def release(self):
30 | pass
31 |
32 | def get(self, propId):
33 | if propId == cv2.CAP_PROP_FRAME_WIDTH:
34 | return self.image.shape[1]
35 | elif propId == cv2.CAP_PROP_FRAME_HEIGHT:
36 | return self.image.shape[0]
37 | elif propId == cv2.CAP_PROP_FPS:
38 | return np.nan
39 | else:
40 | raise NotImplementedError()
41 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/codecs/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .associative_embedding import AssociativeEmbedding
3 | from .decoupled_heatmap import DecoupledHeatmap
4 | from .integral_regression_label import IntegralRegressionLabel
5 | from .megvii_heatmap import MegviiHeatmap
6 | from .msra_heatmap import MSRAHeatmap
7 | from .regression_label import RegressionLabel
8 | from .simcc_label import SimCCLabel
9 | from .spr import SPR
10 | from .udp_heatmap import UDPHeatmap
11 |
12 | __all__ = [
13 | 'MSRAHeatmap', 'MegviiHeatmap', 'UDPHeatmap', 'RegressionLabel',
14 | 'SimCCLabel', 'IntegralRegressionLabel', 'AssociativeEmbedding', 'SPR',
15 | 'DecoupledHeatmap'
16 | ]
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/codecs/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .gaussian_heatmap import (generate_gaussian_heatmaps,
3 | generate_udp_gaussian_heatmaps,
4 | generate_unbiased_gaussian_heatmaps)
5 | from .instance_property import (get_diagonal_lengths, get_instance_bbox,
6 | get_instance_root)
7 | from .offset_heatmap import (generate_displacement_heatmap,
8 | generate_offset_heatmap)
9 | from .post_processing import (batch_heatmap_nms, gaussian_blur,
10 | gaussian_blur1d, get_heatmap_maximum,
11 | get_simcc_maximum, get_simcc_normalized)
12 | from .refinement import (refine_keypoints, refine_keypoints_dark,
13 | refine_keypoints_dark_udp, refine_simcc_dark)
14 |
15 | __all__ = [
16 | 'generate_gaussian_heatmaps', 'generate_udp_gaussian_heatmaps',
17 | 'generate_unbiased_gaussian_heatmaps', 'gaussian_blur',
18 | 'get_heatmap_maximum', 'get_simcc_maximum', 'generate_offset_heatmap',
19 | 'batch_heatmap_nms', 'refine_keypoints', 'refine_keypoints_dark',
20 | 'refine_keypoints_dark_udp', 'generate_displacement_heatmap',
21 | 'refine_simcc_dark', 'gaussian_blur1d', 'get_diagonal_lengths',
22 | 'get_instance_root', 'get_instance_bbox', 'get_simcc_normalized'
23 | ]
24 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .builder import build_dataset
3 | from .dataset_wrappers import CombinedDataset
4 | from .datasets import * # noqa
5 | from .samplers import MultiSourceSampler
6 | from .transforms import * # noqa
7 |
8 | __all__ = ['build_dataset', 'CombinedDataset', 'MultiSourceSampler']
9 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/datasets/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .animal import * # noqa: F401, F403
3 | from .base import * # noqa: F401, F403
4 | from .body import * # noqa: F401, F403
5 | from .face import * # noqa: F401, F403
6 | from .fashion import * # noqa: F401, F403
7 | from .hand import * # noqa: F401, F403
8 | from .wholebody import * # noqa: F401, F403
9 | from .head import *
10 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/datasets/animal/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .animalpose_dataset import AnimalPoseDataset
3 | from .ap10k_dataset import AP10KDataset
4 | from .atrw_dataset import ATRWDataset
5 | from .fly_dataset import FlyDataset
6 | from .horse10_dataset import Horse10Dataset
7 | from .locust_dataset import LocustDataset
8 | from .macaque_dataset import MacaqueDataset
9 | from .zebra_dataset import ZebraDataset
10 |
11 | __all__ = [
12 | 'AnimalPoseDataset', 'AP10KDataset', 'Horse10Dataset', 'MacaqueDataset',
13 | 'FlyDataset', 'LocustDataset', 'ZebraDataset', 'ATRWDataset'
14 | ]
15 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/datasets/base/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .base_coco_style_dataset import BaseCocoStyleDataset
3 |
4 | __all__ = ['BaseCocoStyleDataset']
5 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/datasets/body/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .aic_dataset import AicDataset
3 | from .coco_dataset import CocoDataset
4 | from .crowdpose_dataset import CrowdPoseDataset
5 | from .jhmdb_dataset import JhmdbDataset
6 | from .mhp_dataset import MhpDataset
7 | from .mpii_dataset import MpiiDataset
8 | from .mpii_trb_dataset import MpiiTrbDataset
9 | from .ochuman_dataset import OCHumanDataset
10 | from .posetrack18_dataset import PoseTrack18Dataset
11 | from .posetrack18_video_dataset import PoseTrack18VideoDataset
12 |
13 | __all__ = [
14 | 'CocoDataset', 'MpiiDataset', 'MpiiTrbDataset', 'AicDataset',
15 | 'CrowdPoseDataset', 'OCHumanDataset', 'MhpDataset', 'PoseTrack18Dataset',
16 | 'JhmdbDataset', 'PoseTrack18VideoDataset'
17 | ]
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/datasets/face/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .aflw_dataset import AFLWDataset
3 | from .coco_wholebody_face_dataset import CocoWholeBodyFaceDataset
4 | from .cofw_dataset import COFWDataset
5 | from .face_300w_dataset import Face300WDataset
6 | from .lapa_dataset import LapaDataset
7 | from .wflw_dataset import WFLWDataset
8 |
9 | __all__ = [
10 | 'Face300WDataset', 'WFLWDataset', 'AFLWDataset', 'COFWDataset',
11 | 'CocoWholeBodyFaceDataset', 'LapaDataset'
12 | ]
13 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/datasets/fashion/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .deepfashion2_dataset import DeepFashion2Dataset
3 | from .deepfashion_dataset import DeepFashionDataset
4 |
5 | __all__ = ['DeepFashionDataset', 'DeepFashion2Dataset']
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/datasets/fashion/deepfashion2_dataset.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from mmpose.registry import DATASETS
3 | from ..base import BaseCocoStyleDataset
4 |
5 |
6 | @DATASETS.register_module(name='DeepFashion2Dataset')
7 | class DeepFashion2Dataset(BaseCocoStyleDataset):
8 | """DeepFashion2 dataset for fashion landmark detection."""
9 |
10 | METAINFO: dict = dict(from_file='configs/_base_/datasets/deepfashion2.py')
11 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/datasets/hand/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .coco_wholebody_hand_dataset import CocoWholeBodyHandDataset
3 | from .freihand_dataset import FreiHandDataset
4 | from .onehand10k_dataset import OneHand10KDataset
5 | from .panoptic_hand2d_dataset import PanopticHand2DDataset
6 | from .rhd2d_dataset import Rhd2DDataset
7 |
8 | __all__ = [
9 | 'OneHand10KDataset', 'FreiHandDataset', 'PanopticHand2DDataset',
10 | 'Rhd2DDataset', 'CocoWholeBodyHandDataset'
11 | ]
12 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/datasets/head/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .cephalometric_dataset import CephalometricDataset
3 | from .cephalometric_dataset_ISBI2015 import CephalometricDataset_ISBI2015
4 |
5 | __all__ = ['CephalometricDataset',"CephalometricDataset_ISBI2015"]
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/datasets/wholebody/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .coco_wholebody_dataset import CocoWholeBodyDataset
3 | from .halpe_dataset import HalpeDataset
4 |
5 | __all__ = ['CocoWholeBodyDataset', 'HalpeDataset']
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/datasets/transforms/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .bottomup_transforms import (BottomupGetHeatmapMask, BottomupRandomAffine,
3 | BottomupResize)
4 | from .common_transforms import (Albumentation, GenerateTarget,
5 | GetBBoxCenterScale, PhotometricDistortion,
6 | RandomBBoxTransform, RandomFlip,
7 | RandomHalfBody)
8 | from .converting import KeypointConverter
9 | from .formatting import PackPoseInputs
10 | from .loading import LoadImage
11 | from .topdown_transforms import TopdownAffine
12 |
13 | __all__ = [
14 | 'GetBBoxCenterScale', 'RandomBBoxTransform', 'RandomFlip',
15 | 'RandomHalfBody', 'TopdownAffine', 'Albumentation',
16 | 'PhotometricDistortion', 'PackPoseInputs', 'LoadImage',
17 | 'BottomupGetHeatmapMask', 'BottomupRandomAffine', 'BottomupResize',
18 | 'GenerateTarget', 'KeypointConverter'
19 | ]
20 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/engine/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .hooks import * # noqa: F401, F403
3 | from .optim_wrappers import * # noqa: F401, F403
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/engine/hooks/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .ema_hook import ExpMomentumEMA
3 | from .visualization_hook import PoseVisualizationHook
4 |
5 | __all__ = ['PoseVisualizationHook', 'ExpMomentumEMA']
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/engine/optim_wrappers/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .layer_decay_optim_wrapper import LayerDecayOptimWrapperConstructor
3 |
4 | __all__ = ['LayerDecayOptimWrapperConstructor']
5 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/evaluation/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .functional import * # noqa: F401,F403
3 | from .metrics import * # noqa: F401,F403
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/evaluation/functional/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .keypoint_eval import (keypoint_auc, keypoint_epe, keypoint_nme,
3 | keypoint_pck_accuracy,
4 | multilabel_classification_accuracy,
5 | pose_pck_accuracy, simcc_pck_accuracy)
6 | from .nms import nms, oks_nms, soft_oks_nms
7 |
8 | __all__ = [
9 | 'keypoint_pck_accuracy', 'keypoint_auc', 'keypoint_nme', 'keypoint_epe',
10 | 'pose_pck_accuracy', 'multilabel_classification_accuracy',
11 | 'simcc_pck_accuracy', 'nms', 'oks_nms', 'soft_oks_nms'
12 | ]
13 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/evaluation/metrics/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .coco_metric import CocoMetric
3 | from .coco_wholebody_metric import CocoWholeBodyMetric
4 | from .keypoint_2d_metrics import (AUC, EPE, NME, JhmdbPCKAccuracy,
5 | MpiiPCKAccuracy, PCKAccuracy)
6 | from .keypoint_partition_metric import KeypointPartitionMetric
7 | from .posetrack18_metric import PoseTrack18Metric
8 | from .cephalometric_metric import CephalometricMetric
9 |
10 | __all__ = [
11 | 'CocoMetric', 'PCKAccuracy', 'MpiiPCKAccuracy', 'JhmdbPCKAccuracy', 'AUC',
12 | 'EPE', 'NME', 'PoseTrack18Metric', 'CocoWholeBodyMetric',
13 | 'KeypointPartitionMetric', 'CephalometricMetric'
14 | ]
15 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .backbones import * # noqa
3 | from .builder import (BACKBONES, HEADS, LOSSES, NECKS, build_backbone,
4 | build_head, build_loss, build_neck, build_pose_estimator,
5 | build_posenet)
6 | from .data_preprocessors import * # noqa
7 | from .heads import * # noqa
8 | from .losses import * # noqa
9 | from .necks import * # noqa
10 | from .pose_estimators import * # noqa
11 |
12 | __all__ = [
13 | 'BACKBONES', 'HEADS', 'NECKS', 'LOSSES', 'build_backbone', 'build_head',
14 | 'build_loss', 'build_posenet', 'build_neck', 'build_pose_estimator'
15 | ]
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/backbones/base_backbone.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from abc import ABCMeta, abstractmethod
3 |
4 | from mmengine.model import BaseModule
5 |
6 |
7 | class BaseBackbone(BaseModule, metaclass=ABCMeta):
8 | """Base backbone.
9 |
10 | This class defines the basic functions of a backbone. Any backbone that
11 | inherits this class should at least define its own `forward` function.
12 | """
13 |
14 | @abstractmethod
15 | def forward(self, x):
16 | """Forward function.
17 |
18 | Args:
19 | x (Tensor | tuple[Tensor]): x could be a torch.Tensor or a tuple of
20 | torch.Tensor, containing input data for forward computation.
21 | """
22 |
23 | def train(self, mode=True):
24 | """Set module status before forward computation.
25 |
26 | Args:
27 | mode (bool): Whether it is train_mode or test_mode
28 | """
29 | super(BaseBackbone, self).train(mode)
30 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/backbones/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .channel_shuffle import channel_shuffle
3 | from .inverted_residual import InvertedResidual
4 | from .make_divisible import make_divisible
5 | from .se_layer import SELayer
6 | from .utils import get_state_dict, load_checkpoint
7 |
8 | __all__ = [
9 | 'channel_shuffle', 'make_divisible', 'InvertedResidual', 'SELayer',
10 | 'load_checkpoint', 'get_state_dict'
11 | ]
12 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/backbones/utils/channel_shuffle.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | import torch
3 |
4 |
5 | def channel_shuffle(x, groups):
6 | """Channel Shuffle operation.
7 |
8 | This function enables cross-group information flow for multiple groups
9 | convolution layers.
10 |
11 | Args:
12 | x (Tensor): The input tensor.
13 | groups (int): The number of groups to divide the input tensor
14 | in the channel dimension.
15 |
16 | Returns:
17 | Tensor: The output tensor after channel shuffle operation.
18 | """
19 |
20 | batch_size, num_channels, height, width = x.size()
21 | assert (num_channels % groups == 0), ('num_channels should be '
22 | 'divisible by groups')
23 | channels_per_group = num_channels // groups
24 |
25 | x = x.view(batch_size, groups, channels_per_group, height, width)
26 | x = torch.transpose(x, 1, 2).contiguous()
27 | x = x.view(batch_size, groups * channels_per_group, height, width)
28 |
29 | return x
30 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/backbones/utils/make_divisible.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | def make_divisible(value, divisor, min_value=None, min_ratio=0.9):
3 | """Make divisible function.
4 |
5 | This function rounds the channel number down to the nearest value that can
6 | be divisible by the divisor.
7 |
8 | Args:
9 | value (int): The original channel number.
10 | divisor (int): The divisor to fully divide the channel number.
11 | min_value (int, optional): The minimum value of the output channel.
12 | Default: None, means that the minimum value equal to the divisor.
13 | min_ratio (float, optional): The minimum ratio of the rounded channel
14 | number to the original channel number. Default: 0.9.
15 | Returns:
16 | int: The modified output channel number
17 | """
18 |
19 | if min_value is None:
20 | min_value = divisor
21 | new_value = max(min_value, int(value + divisor / 2) // divisor * divisor)
22 | # Make sure that round down does not go down by more than (1-min_ratio).
23 | if new_value < min_ratio * value:
24 | new_value += divisor
25 | return new_value
26 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/builder.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | import warnings
3 |
4 | from mmpose.registry import MODELS
5 |
6 | BACKBONES = MODELS
7 | NECKS = MODELS
8 | HEADS = MODELS
9 | LOSSES = MODELS
10 | POSE_ESTIMATORS = MODELS
11 |
12 |
13 | def build_backbone(cfg):
14 | """Build backbone."""
15 | return BACKBONES.build(cfg)
16 |
17 |
18 | def build_neck(cfg):
19 | """Build neck."""
20 | return NECKS.build(cfg)
21 |
22 |
23 | def build_head(cfg):
24 | """Build head."""
25 | return HEADS.build(cfg)
26 |
27 |
28 | def build_loss(cfg):
29 | """Build loss."""
30 | return LOSSES.build(cfg)
31 |
32 |
33 | def build_pose_estimator(cfg):
34 | """Build pose estimator."""
35 | return POSE_ESTIMATORS.build(cfg)
36 |
37 |
38 | def build_posenet(cfg):
39 | """Build posenet."""
40 | warnings.warn(
41 | '``build_posenet`` will be deprecated soon, '
42 | 'please use ``build_pose_estimator`` instead.', DeprecationWarning)
43 | return build_pose_estimator(cfg)
44 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/data_preprocessors/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .data_preprocessor import PoseDataPreprocessor
3 |
4 | __all__ = ['PoseDataPreprocessor']
5 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/data_preprocessors/data_preprocessor.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from mmengine.model import ImgDataPreprocessor
3 |
4 | from mmpose.registry import MODELS
5 |
6 |
7 | @MODELS.register_module()
8 | class PoseDataPreprocessor(ImgDataPreprocessor):
9 | """Image pre-processor for pose estimation tasks."""
10 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/heads/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .base_head import BaseHead
3 | from .coord_cls_heads import RTMCCHead, SimCCHead
4 | from .heatmap_heads import (AssociativeEmbeddingHead, CIDHead, CPMHead,
5 | HeatmapHead, MSPNHead, ViPNASHead)
6 | from .hybrid_heads import DEKRHead
7 | from .regression_heads import (DSNTHead, IntegralRegressionHead,
8 | RegressionHead, RLEHead)
9 |
10 | __all__ = [
11 | 'BaseHead', 'HeatmapHead', 'CPMHead', 'MSPNHead', 'ViPNASHead',
12 | 'RegressionHead', 'IntegralRegressionHead', 'SimCCHead', 'RLEHead',
13 | 'DSNTHead', 'AssociativeEmbeddingHead', 'DEKRHead', 'CIDHead', 'RTMCCHead'
14 | ]
15 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/heads/coord_cls_heads/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .rtmcc_head import RTMCCHead
3 | from .simcc_head import SimCCHead
4 |
5 | __all__ = ['SimCCHead', 'RTMCCHead']
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/heads/heatmap_heads/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .ae_head import AssociativeEmbeddingHead
3 | from .cid_head import CIDHead
4 | from .cpm_head import CPMHead
5 | from .heatmap_head import HeatmapHead,HeatmapHead_withSigmoid
6 | from .mspn_head import MSPNHead
7 | from .vipnas_head import ViPNASHead
8 | from .srpose_head import SRPoseHead
9 | __all__ = [
10 | 'HeatmapHead', 'CPMHead', 'MSPNHead', 'ViPNASHead',
11 | 'AssociativeEmbeddingHead', 'CIDHead','HeatmapHead_withSigmoid','SRPoseHead'
12 | ]
13 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/heads/hybrid_heads/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .dekr_head import DEKRHead
3 |
4 | __all__ = [
5 | 'DEKRHead',
6 | ]
7 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/heads/regression_heads/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .dsnt_head import DSNTHead
3 | from .integral_regression_head import IntegralRegressionHead
4 | from .regression_head import RegressionHead
5 | from .rle_head import RLEHead
6 |
7 | __all__ = [
8 | 'RegressionHead',
9 | 'IntegralRegressionHead',
10 | 'DSNTHead',
11 | 'RLEHead',
12 | ]
13 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/losses/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .ae_loss import AssociativeEmbeddingLoss
3 | from .classification_loss import BCELoss, JSDiscretLoss, KLDiscretLoss
4 | from .heatmap_loss import (AdaptiveWingLoss, KeypointMSELoss,
5 | KeypointOHKMMSELoss,FocalHeatmapLoss_5k,JointsMSELoss,KeypointL1Loss)
6 | from .loss_wrappers import CombinedLoss, MultipleLossWrapper,MultipleLossWrapper_5k
7 | from .regression_loss import (BoneLoss, L1Loss, MPJPELoss, MSELoss, RLELoss,
8 | SemiSupervisionLoss, SmoothL1Loss,
9 | SoftWeightSmoothL1Loss, SoftWingLoss, WingLoss)
10 |
11 | __all__ = [
12 | 'KeypointMSELoss','KeypointL1Loss','KeypointOHKMMSELoss', 'SmoothL1Loss', 'WingLoss','FocalHeatmapLoss_5k',
13 | 'MPJPELoss', 'MSELoss', 'L1Loss', 'BCELoss', 'BoneLoss',
14 | 'SemiSupervisionLoss', 'SoftWingLoss', 'AdaptiveWingLoss', 'RLELoss','JointsMSELoss',
15 | 'KLDiscretLoss', 'MultipleLossWrapper', 'JSDiscretLoss', 'CombinedLoss',
16 | 'AssociativeEmbeddingLoss', 'SoftWeightSmoothL1Loss','MultipleLossWrapper_5k'
17 | ]
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/necks/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .fmap_proc_neck import FeatureMapProcessor
3 | from .fpn import FPN
4 | from .gap_neck import GlobalAveragePooling
5 | from .posewarper_neck import PoseWarperNeck
6 |
7 | __all__ = [
8 | 'GlobalAveragePooling', 'PoseWarperNeck', 'FPN', 'FeatureMapProcessor'
9 | ]
10 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/necks/gap_neck.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | import torch
3 | import torch.nn as nn
4 |
5 | from mmpose.registry import MODELS
6 |
7 |
8 | @MODELS.register_module()
9 | class GlobalAveragePooling(nn.Module):
10 | """Global Average Pooling neck.
11 |
12 | Note that we use `view` to remove extra channel after pooling. We do not
13 | use `squeeze` as it will also remove the batch dimension when the tensor
14 | has a batch dimension of size 1, which can lead to unexpected errors.
15 | """
16 |
17 | def __init__(self):
18 | super().__init__()
19 | self.gap = nn.AdaptiveAvgPool2d((1, 1))
20 |
21 | def init_weights(self):
22 | pass
23 |
24 | def forward(self, inputs):
25 | """Forward function."""
26 |
27 | if isinstance(inputs, tuple):
28 | outs = tuple([self.gap(x) for x in inputs])
29 | outs = tuple(
30 | [out.view(x.size(0), -1) for out, x in zip(outs, inputs)])
31 | elif isinstance(inputs, list):
32 | outs = [self.gap(x) for x in inputs]
33 | outs = [out.view(x.size(0), -1) for out, x in zip(outs, inputs)]
34 | elif isinstance(inputs, torch.Tensor):
35 | outs = self.gap(inputs)
36 | outs = outs.view(inputs.size(0), -1)
37 | else:
38 | raise TypeError('neck inputs should be tuple or torch.tensor')
39 | return outs
40 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/pose_estimators/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .bottomup import BottomupPoseEstimator
3 | from .topdown import TopdownPoseEstimator
4 |
5 | __all__ = ['TopdownPoseEstimator', 'BottomupPoseEstimator']
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/models/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .check_and_update_config import check_and_update_config
3 | from .ckpt_convert import pvt_convert
4 | from .rtmcc_block import RTMCCBlock, rope
5 | from .transformer import PatchEmbed, nchw_to_nlc, nlc_to_nchw
6 |
7 | __all__ = [
8 | 'PatchEmbed', 'nchw_to_nlc', 'nlc_to_nchw', 'pvt_convert', 'RTMCCBlock',
9 | 'rope', 'check_and_update_config'
10 | ]
11 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/structures/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .bbox import (bbox_cs2xywh, bbox_cs2xyxy, bbox_xywh2cs, bbox_xywh2xyxy,
3 | bbox_xyxy2cs, bbox_xyxy2xywh, flip_bbox,
4 | get_udp_warp_matrix, get_warp_matrix)
5 | from .keypoint import flip_keypoints
6 | from .multilevel_pixel_data import MultilevelPixelData
7 | from .pose_data_sample import PoseDataSample
8 | from .utils import merge_data_samples, revert_heatmap, split_instances
9 |
10 | __all__ = [
11 | 'PoseDataSample', 'MultilevelPixelData', 'bbox_cs2xywh', 'bbox_cs2xyxy',
12 | 'bbox_xywh2cs', 'bbox_xywh2xyxy', 'bbox_xyxy2cs', 'bbox_xyxy2xywh',
13 | 'flip_bbox', 'get_udp_warp_matrix', 'get_warp_matrix', 'flip_keypoints',
14 | 'merge_data_samples', 'revert_heatmap', 'split_instances'
15 | ]
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/structures/bbox/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .transforms import (bbox_cs2xywh, bbox_cs2xyxy, bbox_xywh2cs,
3 | bbox_xywh2xyxy, bbox_xyxy2cs, bbox_xyxy2xywh,
4 | flip_bbox, get_udp_warp_matrix, get_warp_matrix)
5 |
6 | __all__ = [
7 | 'bbox_cs2xywh', 'bbox_cs2xyxy', 'bbox_xywh2cs', 'bbox_xywh2xyxy',
8 | 'bbox_xyxy2cs', 'bbox_xyxy2xywh', 'flip_bbox', 'get_udp_warp_matrix',
9 | 'get_warp_matrix'
10 | ]
11 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/structures/keypoint/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 |
3 | from .transforms import flip_keypoints
4 |
5 | __all__ = ['flip_keypoints']
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/testing/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from ._utils import (get_coco_sample, get_config_file, get_packed_inputs,
3 | get_pose_estimator_cfg, get_repo_dir)
4 |
5 | __all__ = [
6 | 'get_packed_inputs', 'get_coco_sample', 'get_config_file',
7 | 'get_pose_estimator_cfg', 'get_repo_dir'
8 | ]
9 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .camera import SimpleCamera, SimpleCameraTorch
3 | from .collect_env import collect_env
4 | from .config_utils import adapt_mmdet_pipeline
5 | from .logger import get_root_logger
6 | from .setup_env import register_all_modules, setup_multi_processes
7 | from .timer import StopWatch
8 |
9 | __all__ = [
10 | 'get_root_logger', 'collect_env', 'StopWatch', 'setup_multi_processes',
11 | 'register_all_modules', 'SimpleCamera', 'SimpleCameraTorch',
12 | 'adapt_mmdet_pipeline'
13 | ]
14 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/utils/collect_env.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from mmengine.utils import get_git_hash
3 | from mmengine.utils.dl_utils import collect_env as collect_base_env
4 |
5 | import mmpose
6 |
7 |
8 | def collect_env():
9 | env_info = collect_base_env()
10 | env_info['MMPose'] = (mmpose.__version__ + '+' + get_git_hash(digits=7))
11 | return env_info
12 |
13 |
14 | if __name__ == '__main__':
15 | for name, val in collect_env().items():
16 | print(f'{name}: {val}')
17 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/utils/config_utils.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from mmpose.utils.typing import ConfigDict
3 |
4 |
5 | def adapt_mmdet_pipeline(cfg: ConfigDict) -> ConfigDict:
6 | """Converts pipeline types in MMDetection's test dataloader to use the
7 | 'mmdet' namespace.
8 |
9 | Args:
10 | cfg (ConfigDict): Configuration dictionary for MMDetection.
11 |
12 | Returns:
13 | ConfigDict: Configuration dictionary with updated pipeline types.
14 | """
15 | # use lazy import to avoid hard dependence on mmdet
16 | from mmdet.datasets import transforms
17 |
18 | if 'test_dataloader' not in cfg:
19 | return cfg
20 |
21 | pipeline = cfg.test_dataloader.dataset.pipeline
22 | for trans in pipeline:
23 | if trans['type'] in dir(transforms):
24 | trans['type'] = 'mmdet.' + trans['type']
25 |
26 | return cfg
27 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/utils/logger.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | import logging
3 |
4 | from mmengine.logging import MMLogger
5 |
6 |
7 | def get_root_logger(log_file=None, log_level=logging.INFO):
8 | """Use `MMLogger` class in mmengine to get the root logger.
9 |
10 | The logger will be initialized if it has not been initialized. By default a
11 | StreamHandler will be added. If `log_file` is specified, a FileHandler will
12 | also be added. The name of the root logger is the top-level package name,
13 | e.g., "mmpose".
14 |
15 | Args:
16 | log_file (str | None): The log filename. If specified, a FileHandler
17 | will be added to the root logger.
18 | log_level (int): The root logger level. Note that only the process of
19 | rank 0 is affected, while other processes will set the level to
20 | "Error" and be silent most of the time.
21 |
22 | Returns:
23 | logging.Logger: The root logger.
24 | """
25 | return MMLogger('MMLogger', __name__.split('.')[0], log_file, log_level)
26 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/utils/typing.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from typing import Dict, List, Optional, Tuple, Union
3 |
4 | from mmengine.config import ConfigDict
5 | from mmengine.structures import InstanceData, PixelData
6 | from torch import Tensor
7 |
8 | from mmpose.structures import PoseDataSample
9 |
10 | # Type hint of config data
11 | ConfigType = Union[ConfigDict, dict]
12 | OptConfigType = Optional[ConfigType]
13 | # Type hint of one or more config data
14 | MultiConfig = Union[ConfigType, List[ConfigType]]
15 | OptMultiConfig = Optional[MultiConfig]
16 | # Type hint of data samples
17 | SampleList = List[PoseDataSample]
18 | OptSampleList = Optional[SampleList]
19 | InstanceList = List[InstanceData]
20 | PixelDataList = List[PixelData]
21 | Predictions = Union[InstanceList, Tuple[InstanceList, PixelDataList]]
22 | # Type hint of model outputs
23 | ForwardResults = Union[Dict[str, Tensor], List[PoseDataSample], Tuple[Tensor],
24 | Tensor]
25 | # Type hint of features
26 | # - Tuple[Tensor]: multi-level features extracted by the network
27 | # - List[Tuple[Tensor]]: multiple feature pyramids for TTA
28 | # - List[List[Tuple[Tensor]]]: multi-scale feature pyramids
29 | Features = Union[Tuple[Tensor], List[Tuple[Tensor]], List[List[Tuple[Tensor]]]]
30 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/version.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Open-MMLab. All rights reserved.
2 |
3 | __version__ = '1.0.0'
4 | short_version = __version__
5 |
6 |
7 | def parse_version_info(version_str):
8 | """Parse a version string into a tuple.
9 |
10 | Args:
11 | version_str (str): The version string.
12 | Returns:
13 | tuple[int | str]: The version info, e.g., "1.3.0" is parsed into
14 | (1, 3, 0), and "2.0.0rc1" is parsed into (2, 0, 0, 'rc1').
15 | """
16 | version_info = []
17 | for x in version_str.split('.'):
18 | if x.isdigit():
19 | version_info.append(int(x))
20 | elif x.find('rc') != -1:
21 | patch_version = x.split('rc')
22 | version_info.append(int(patch_version[0]))
23 | version_info.append(f'rc{patch_version[1]}')
24 | elif x.find('b') != -1:
25 | patch_version = x.split('b')
26 | version_info.append(int(patch_version[0]))
27 | version_info.append(f'b{patch_version[1]}')
28 | return tuple(version_info)
29 |
30 |
31 | version_info = parse_version_info(__version__)
32 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/mmpose/visualization/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from .local_visualizer import PoseLocalVisualizer
3 |
4 | __all__ = ['PoseLocalVisualizer']
5 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/example_project/configs/example-head-loss_hrnet-w32_8xb64-210e_coco-256x192.py:
--------------------------------------------------------------------------------
1 | # Directly inherit the entire recipe you want to use.
2 | _base_ = 'mmpose::body_2d_keypoint/topdown_heatmap/coco/' \
3 | 'td-hm_hrnet-w32_8xb64-210e_coco-256x192.py'
4 |
5 | # This line is to import your own modules.
6 | custom_imports = dict(imports='models')
7 |
8 | # Modify the model to use your own head and loss.
9 | _base_['model']['head'] = dict(
10 | type='ExampleHead',
11 | in_channels=32,
12 | out_channels=17,
13 | deconv_out_channels=None,
14 | loss=dict(type='ExampleLoss', use_target_weight=True),
15 | decoder=_base_['codec'])
16 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/example_project/models/__init__.py:
--------------------------------------------------------------------------------
1 | from .example_head import ExampleHead
2 | from .example_loss import ExampleLoss
3 |
4 | __all__ = ['ExampleHead', 'ExampleLoss']
5 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/mmpose4aigc/download_models.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright (c) OpenMMLab. All rights reserved.
3 |
4 | # Create models folder
5 | mkdir models
6 |
7 | # Go to models folder
8 | cd models
9 |
10 | # Download det model
11 | wget https://download.openmmlab.com/mmpose/v1/projects/rtmpose/rtmdet_nano_8xb32-100e_coco-obj365-person-05d8511e.pth
12 |
13 | # Download pose model
14 | wget https://download.openmmlab.com/mmpose/v1/projects/rtmposev1/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth
15 |
16 | # Go back mmpose4aigc
17 | cd ..
18 |
19 | # Success
20 | echo "Download completed."
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/mmpose4aigc/install_posetracker_linux.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright (c) OpenMMLab. All rights reserved.
3 |
4 | # Download pre-compiled files
5 | wget https://github.com/open-mmlab/mmdeploy/releases/download/v1.0.0/mmdeploy-1.0.0-linux-x86_64-cxx11abi.tar.gz
6 |
7 | # Unzip files
8 | tar -xzvf mmdeploy-1.0.0-linux-x86_64-cxx11abi.tar.gz
9 |
10 | # Go to the sdk folder
11 | cd mmdeploy-1.0.0-linux-x86_64-cxx11abi
12 |
13 | # Init environment
14 | source set_env.sh
15 |
16 | # If opencv 3+ is not installed on your system, execute the following command.
17 | # If it is installed, skip this command
18 | bash install_opencv.sh
19 |
20 | # Compile executable programs
21 | bash build_sdk.sh
22 |
23 | # Download models
24 | wget https://download.openmmlab.com/mmpose/v1/projects/rtmpose/rtmpose-cpu.zip
25 |
26 | # Unzip files
27 | unzip rtmpose-cpu.zip
28 |
29 | # Success
30 | echo "Installation completed."
31 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/mmpose4aigc/mmpose_openpose.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright (c) OpenMMLab. All rights reserved.
3 |
4 | INPUT_IMAGE=$1
5 |
6 | python openpose_visualization.py \
7 | ../rtmpose/rtmdet/person/rtmdet_nano_320-8xb32_coco-person.py \
8 | models/rtmdet_nano_8xb32-100e_coco-obj365-person-05d8511e.pth \
9 | ../rtmpose/rtmpose/body_2d_keypoint/rtmpose-m_8xb256-420e_coco-256x192.py \
10 | models/rtmpose-m_simcc-aic-coco_pt-aic-coco_420e-256x192-63eb25f7_20230126.pth \
11 | --input $INPUT_IMAGE \
12 | --device cuda:0 \
13 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/mmpose4aigc/mmpose_style_skeleton.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright (c) OpenMMLab. All rights reserved.
3 |
4 | WORKSPACE=mmdeploy-1.0.0-linux-x86_64-cxx11abi
5 | export LD_LIBRARY_PATH=${WORKSPACE}/lib:${WORKSPACE}/thirdparty/onnxruntime/lib:$LD_LIBRARY_PATH
6 |
7 | INPUT_IMAGE=$1
8 |
9 | ${WORKSPACE}/bin/pose_tracker \
10 | ${WORKSPACE}/rtmpose-ort/rtmdet-nano \
11 | ${WORKSPACE}/rtmpose-ort/rtmpose-m \
12 | $INPUT_IMAGE \
13 | --background black \
14 | --skeleton ${WORKSPACE}/rtmpose-ort/t2i-adapter_skeleton.txt \
15 | --output ./skeleton_res.jpg \
16 | --pose_kpt_thr 0.4 \
17 | --show -1
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/examples/README.md:
--------------------------------------------------------------------------------
1 | ## List of examples
2 |
3 | ### 1. RTMPose-Deploy (without MMDeploy)
4 |
5 | RTMPose-Deploy is a C++ code example that does not use MMDEPLOY for RTMPose localized deployment.
6 |
7 | - [Original Repository](https://github.com/HW140701/RTMPose-Deploy)
8 |
9 | ### 2. RTMPose inference with ONNXRuntime (Python)
10 |
11 | This example shows how to run RTMPose inference with ONNXRuntime in Python.
12 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/examples/RTMPose-Deploy/README.md:
--------------------------------------------------------------------------------
1 | # RTMPose-Deploy
2 |
3 | [中文说明](./README_CN.md)
4 |
5 | RTMPose-Deploy is a C ++ code example that does **NOT** use MMDEPLOY for RTMPose localized deployment.
6 |
7 | At present, RTMPose-Deploy has completed the deployment of RTMDetnano and RTMPose on the Windows system. This example only contains the source code. If you want a complete project example, please refer to:[https://github.com/HW140701/RTMPose-Deploy](https://github.com/HW140701/RTMPose-Deploy) . This project provides a complete VS2019 project and release package.
8 |
9 | Subsequent will consider adding the use of C ++ Tensorrt SDK on the Windows system to deploy RTMDet-nano and RTMPose.
10 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/examples/RTMPose-Deploy/README_CN.md:
--------------------------------------------------------------------------------
1 | # RTMPose-Deploy
2 |
3 | RTMPose-Deploy 是一个**不使用** mmdeploy 进行 RTMPose 本地化部署的 C++ 代码示例。
4 |
5 | 目前,RTMPose-Deploy 已完成在 Windows 系统上使用 OnnxRuntime CPU 对 RTMDet-nano 和 RTMPose 完成了部署。本示例只包含了源代码,如果你想要完整的项目示例可以参考:[https://github.com/HW140701/RTMPose-Deploy](https://github.com/HW140701/RTMPose-Deploy),这个仓库提供了完整的 VS2019 工程和发布包。
6 |
7 | 后续会考虑添加在 Windows 系统上使用 C++ TensorRT SDK 对 RTMDet-nano 和 RTMPose 进行部署。
8 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/examples/RTMPose-Deploy/Windows/OnnxRumtime-CPU/src/RTMPoseOnnxRuntime/rtmdet_onnxruntime.h:
--------------------------------------------------------------------------------
1 | #ifndef _RTM_DET_ONNX_RUNTIME_H_
2 | #define _RTM_DET_ONNX_RUNTIME_H_
3 |
4 | #include
5 |
6 | #include "opencv2/opencv.hpp"
7 |
8 | #include "onnxruntime_cxx_api.h"
9 | #include "cpu_provider_factory.h"
10 | #include "rtmpose_utils.h"
11 |
12 |
13 | class RTMDetOnnxruntime
14 | {
15 | public:
16 | RTMDetOnnxruntime() = delete;
17 | RTMDetOnnxruntime(const std::string& onnx_model_path);
18 | virtual~RTMDetOnnxruntime();
19 |
20 | public:
21 | DetectBox Inference(const cv::Mat& input_mat);
22 |
23 | private:
24 | void PrintModelInfo(Ort::Session& session);
25 |
26 | private:
27 | Ort::Env m_env;
28 | Ort::Session m_session;
29 |
30 | };
31 |
32 | #endif // !_RTM_DET_ONNX_RUNTIME_H_
33 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/examples/RTMPose-Deploy/Windows/OnnxRumtime-CPU/src/RTMPoseOnnxRuntime/rtmpose_onnxruntime.h:
--------------------------------------------------------------------------------
1 | #ifndef _RTM_POSE_ONNXRUNTIME_H_
2 | #define _RTM_POSE_ONNXRUNTIME_H_
3 |
4 | #include
5 |
6 | #include "onnxruntime_cxx_api.h"
7 | #include "cpu_provider_factory.h"
8 | #include "opencv2/opencv.hpp"
9 |
10 | #include "rtmdet_onnxruntime.h"
11 | #include "rtmpose_utils.h"
12 |
13 | class RTMPoseOnnxruntime
14 | {
15 | public:
16 | RTMPoseOnnxruntime() = delete;
17 | RTMPoseOnnxruntime(const std::string& onnx_model_path);
18 | virtual~RTMPoseOnnxruntime();
19 |
20 | public:
21 | std::vector Inference(const cv::Mat& input_mat, const DetectBox& box);
22 |
23 | private:
24 | std::pair CropImageByDetectBox(const cv::Mat& input_image, const DetectBox& box);
25 |
26 | private:
27 | void PrintModelInfo(Ort::Session& session);
28 |
29 | private:
30 | Ort::Env m_env;
31 | Ort::Session m_session;
32 | };
33 |
34 | #endif // !_RTM_POSE_ONNXRUNTIME_H_
35 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/examples/RTMPose-Deploy/Windows/OnnxRumtime-CPU/src/RTMPoseOnnxRuntime/rtmpose_tracker_onnxruntime.cpp:
--------------------------------------------------------------------------------
1 | #include "rtmpose_tracker_onnxruntime.h"
2 |
3 | RTMPoseTrackerOnnxruntime::RTMPoseTrackerOnnxruntime(const std::string& det_model_path, const std::string& pose_model_path, int dectect_interval)
4 | :m_rtm_det_ptr(nullptr),
5 | m_rtm_pose_ptr(nullptr),
6 | m_frame_num(0),
7 | m_dectect_interval(dectect_interval)
8 | {
9 | m_rtm_det_ptr = std::make_unique(det_model_path);
10 | m_rtm_pose_ptr = std::make_unique(pose_model_path);
11 | }
12 |
13 | RTMPoseTrackerOnnxruntime::~RTMPoseTrackerOnnxruntime()
14 | {
15 | }
16 |
17 | std::pair> RTMPoseTrackerOnnxruntime::Inference(const cv::Mat& input_mat)
18 | {
19 | std::pair> result;
20 |
21 | if (m_rtm_det_ptr == nullptr || m_rtm_pose_ptr == nullptr)
22 | return result;
23 |
24 | if (m_frame_num % m_dectect_interval == 0)
25 | {
26 | m_detect_box = m_rtm_det_ptr->Inference(input_mat);
27 | }
28 |
29 | std::vector pose_result = m_rtm_pose_ptr->Inference(input_mat, m_detect_box);
30 |
31 | m_frame_num += 1;
32 |
33 | return std::make_pair(m_detect_box, pose_result);
34 | }
35 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/examples/RTMPose-Deploy/Windows/OnnxRumtime-CPU/src/RTMPoseOnnxRuntime/rtmpose_tracker_onnxruntime.h:
--------------------------------------------------------------------------------
1 | #ifndef _RTM_POSE_TRACKER_ONNXRUNTIME_H_
2 | #define _RTM_POSE_TRACKER_ONNXRUNTIME_H_
3 |
4 | #include "rtmdet_onnxruntime.h"
5 | #include "rtmpose_onnxruntime.h"
6 |
7 | #include
8 | #include
9 |
10 | class RTMPoseTrackerOnnxruntime
11 | {
12 | public:
13 | RTMPoseTrackerOnnxruntime() = delete;
14 | RTMPoseTrackerOnnxruntime(
15 | const std::string& det_model_path,
16 | const std::string& pose_model_path,
17 | int dectect_interval = 10
18 | );
19 | virtual~RTMPoseTrackerOnnxruntime();
20 |
21 | public:
22 | std::pair> Inference(const cv::Mat& input_mat);
23 |
24 | private:
25 | std::unique_ptr m_rtm_det_ptr;
26 | std::unique_ptr m_rtm_pose_ptr;
27 | unsigned int m_frame_num;
28 | DetectBox m_detect_box;
29 | int m_dectect_interval;
30 | };
31 |
32 | #endif // !_RTM_POSE_TRACKER_ONNXRUNTIME_H_
33 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/examples/onnxruntime/human-pose.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/5k5000/CLdetection2023/d1a01536ad892134c4dd728c87dc9ac1d87b8e11/mmpose_package/mmpose/projects/rtmpose/examples/onnxruntime/human-pose.jpeg
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/examples/onnxruntime/requirements.txt:
--------------------------------------------------------------------------------
1 | loguru==0.6.0
2 | numpy==1.21.6
3 | onnxruntime==1.14.1
4 | onnxruntime-gpu==1.8.1
5 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/rtmdet/person/rtmdet_m_640-8xb32_coco-person.py:
--------------------------------------------------------------------------------
1 | _base_ = 'mmdet::rtmdet/rtmdet_m_8xb32-300e_coco.py'
2 |
3 | checkpoint = 'https://download.openmmlab.com/mmdetection/v3.0/rtmdet/cspnext_rsb_pretrain/cspnext-m_8xb256-rsb-a1-600e_in1k-ecb3bbd9.pth' # noqa
4 |
5 | model = dict(
6 | backbone=dict(
7 | init_cfg=dict(
8 | type='Pretrained', prefix='backbone.', checkpoint=checkpoint)),
9 | bbox_head=dict(num_classes=1),
10 | test_cfg=dict(
11 | nms_pre=1000,
12 | min_bbox_size=0,
13 | score_thr=0.05,
14 | nms=dict(type='nms', iou_threshold=0.6),
15 | max_per_img=100))
16 |
17 | train_dataloader = dict(dataset=dict(metainfo=dict(classes=('person', ))))
18 |
19 | val_dataloader = dict(dataset=dict(metainfo=dict(classes=('person', ))))
20 | test_dataloader = val_dataloader
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/rtmpose/pruning/group_fisher_finetune_rtmpose-s_8xb256-420e_aic-coco-256x192.py:
--------------------------------------------------------------------------------
1 | #############################################################################
2 | """# You have to fill these args.
3 |
4 | _base_(str): The path to your pruning config file.
5 | pruned_path (str): The path to the checkpoint of the pruned model.
6 | finetune_lr (float): The lr rate to finetune. Usually, we directly use the lr
7 | rate of the pretrain.
8 | """
9 |
10 | _base_ = './group_fisher_prune_rtmpose-s_8xb256-420e_aic-coco-256x192.py' # noqa
11 | pruned_path = 'https://download.openmmlab.com/mmrazor/v1/pruning/group_fisher/rtmpose-s/group_fisher_prune_rtmpose-s_8xb256-420e_aic-coco-256x192.pth' # noqa
12 | finetune_lr = 4e-3
13 | ##############################################################################
14 |
15 | algorithm = _base_.model
16 | algorithm.init_cfg = dict(type='Pretrained', checkpoint=pruned_path)
17 |
18 | model = dict(
19 | _delete_=True,
20 | _scope_='mmrazor',
21 | type='GroupFisherSubModel',
22 | algorithm=algorithm,
23 | )
24 |
25 | # restore lr
26 | optim_wrapper = dict(optimizer=dict(lr=finetune_lr))
27 |
28 | # remove pruning related hooks
29 | custom_hooks = _base_.custom_hooks[:-2]
30 |
31 | # delete ddp
32 | model_wrapper_cfg = None
33 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/rtmpose/rtmpose/pruning/group_fisher_finetune_rtmpose-s_8xb256-420e_coco-256x192.py:
--------------------------------------------------------------------------------
1 | #############################################################################
2 | """# You have to fill these args.
3 |
4 | _base_(str): The path to your pruning config file.
5 | pruned_path (str): The path to the checkpoint of the pruned model.
6 | finetune_lr (float): The lr rate to finetune. Usually, we directly use the lr
7 | rate of the pretrain.
8 | """
9 |
10 | _base_ = './group_fisher_prune_rtmpose-s_8xb256-420e_coco-256x192.py'
11 | pruned_path = 'https://download.openmmlab.com/mmrazor/v1/pruning/group_fisher/rtmpose-s/group_fisher_prune_rtmpose-s_8xb256-420e_coco-256x192.pth' # noqa
12 | finetune_lr = 4e-3
13 | ##############################################################################
14 |
15 | algorithm = _base_.model
16 | algorithm.init_cfg = dict(type='Pretrained', checkpoint=pruned_path)
17 | # algorithm.update(dict(architecture=dict(test_cfg=dict(flip_test=False), ))) # disable flip test # noqa
18 |
19 | model = dict(
20 | _delete_=True,
21 | _scope_='mmrazor',
22 | type='GroupFisherSubModel',
23 | algorithm=algorithm,
24 | )
25 |
26 | # restore lr
27 | optim_wrapper = dict(optimizer=dict(lr=finetune_lr))
28 |
29 | # remove pruning related hooks
30 | custom_hooks = _base_.custom_hooks[:-2]
31 |
32 | # delete ddp
33 | model_wrapper_cfg = None
34 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/yolox-pose/configs/_base_/datasets:
--------------------------------------------------------------------------------
1 | ../../../../configs/_base_/datasets
2 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/yolox-pose/configs/_base_/default_runtime.py:
--------------------------------------------------------------------------------
1 | default_scope = 'mmyolo'
2 | custom_imports = dict(imports=['models', 'datasets'])
3 |
4 | # hooks
5 | default_hooks = dict(
6 | timer=dict(type='IterTimerHook'),
7 | logger=dict(type='LoggerHook', interval=50),
8 | param_scheduler=dict(type='ParamSchedulerHook'),
9 | checkpoint=dict(type='CheckpointHook', interval=10, max_keep_ckpts=3),
10 | sampler_seed=dict(type='DistSamplerSeedHook'),
11 | visualization=dict(type='mmpose.PoseVisualizationHook', enable=False),
12 | )
13 |
14 | # multi-processing backend
15 | env_cfg = dict(
16 | cudnn_benchmark=False,
17 | mp_cfg=dict(mp_start_method='fork', opencv_num_threads=0),
18 | dist_cfg=dict(backend='nccl'),
19 | )
20 |
21 | # visualizer
22 | vis_backends = [dict(type='LocalVisBackend')]
23 | visualizer = dict(
24 | type='mmpose.PoseLocalVisualizer',
25 | vis_backends=vis_backends,
26 | name='visualizer')
27 |
28 | # logger
29 | log_processor = dict(
30 | type='LogProcessor', window_size=50, by_epoch=True, num_digits=6)
31 | log_level = 'INFO'
32 | load_from = None
33 | resume = False
34 |
35 | # file I/O backend
36 | file_client_args = dict(backend='disk')
37 |
38 | # training/validation/testing progress
39 | train_cfg = dict()
40 | val_cfg = dict(type='ValLoop')
41 | test_cfg = dict(type='TestLoop')
42 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/yolox-pose/configs/yolox-pose_l_4xb64-300e_coco.py:
--------------------------------------------------------------------------------
1 | _base_ = ['./yolox-pose_s_8xb32-300e_coco.py']
2 |
3 | # model settings
4 | model = dict(
5 | init_cfg=dict(checkpoint='https://download.openmmlab.com/mmyolo/v0/yolox/'
6 | 'yolox_l_fast_8xb8-300e_coco/yolox_l_fast_8xb8-300e_'
7 | 'coco_20230213_160715-c731eb1c.pth'),
8 | backbone=dict(
9 | deepen_factor=1.0,
10 | widen_factor=1.0,
11 | ),
12 | neck=dict(
13 | deepen_factor=1.0,
14 | widen_factor=1.0,
15 | ),
16 | bbox_head=dict(head_module=dict(widen_factor=1.0)))
17 |
18 | train_dataloader = dict(batch_size=64)
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/yolox-pose/configs/yolox-pose_m_4xb64-300e_coco.py:
--------------------------------------------------------------------------------
1 | _base_ = ['./yolox-pose_s_8xb32-300e_coco.py']
2 |
3 | # model settings
4 | model = dict(
5 | init_cfg=dict(checkpoint='https://download.openmmlab.com/mmyolo/v0/yolox/'
6 | 'yolox_m_fast_8xb32-300e-rtmdet-hyp_coco/yolox_m_fast_8xb32'
7 | '-300e-rtmdet-hyp_coco_20230210_144328-e657e182.pth'),
8 | backbone=dict(
9 | deepen_factor=0.67,
10 | widen_factor=0.75,
11 | ),
12 | neck=dict(
13 | deepen_factor=0.67,
14 | widen_factor=0.75,
15 | ),
16 | bbox_head=dict(head_module=dict(widen_factor=0.75)))
17 |
18 | train_dataloader = dict(batch_size=64)
19 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/yolox-pose/datasets/__init__.py:
--------------------------------------------------------------------------------
1 | from .bbox_keypoint_structure import * # noqa
2 | from .coco_dataset import * # noqa
3 | from .transforms import * # noqa
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/yolox-pose/datasets/coco_dataset.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from typing import Any
3 |
4 | from mmengine.dataset import force_full_init
5 | from mmyolo.registry import DATASETS
6 |
7 | from mmpose.datasets import CocoDataset as MMPoseCocoDataset
8 |
9 |
10 | @DATASETS.register_module()
11 | class CocoDataset(MMPoseCocoDataset):
12 |
13 | @force_full_init
14 | def prepare_data(self, idx) -> Any:
15 | data_info = self.get_data_info(idx)
16 | data_info['dataset'] = self
17 | return self.pipeline(data_info)
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/yolox-pose/demo:
--------------------------------------------------------------------------------
1 | ../../demo
2 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/yolox-pose/models/__init__.py:
--------------------------------------------------------------------------------
1 | from .assigner import * # noqa
2 | from .data_preprocessor import * # noqa
3 | from .oks_loss import * # noqa
4 | from .utils import * # noqa
5 | from .yolox_pose_head import * # noqa
6 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/yolox-pose/models/data_preprocessor.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from typing import List, Tuple
3 |
4 | from mmdet.models import BatchSyncRandomResize
5 | from mmyolo.registry import MODELS
6 | from torch import Tensor
7 |
8 | from mmpose.structures import PoseDataSample
9 |
10 |
11 | @MODELS.register_module()
12 | class PoseBatchSyncRandomResize(BatchSyncRandomResize):
13 | """Batch random resize which synchronizes the random size across ranks.
14 |
15 | This transform is similar to `mmdet.BatchSyncRandomResize`, but it also
16 | rescales the keypoints coordinates simultaneously.
17 | """
18 |
19 | def forward(self, inputs: Tensor, data_samples: List[PoseDataSample]
20 | ) -> Tuple[Tensor, List[PoseDataSample]]:
21 |
22 | inputs = inputs.float()
23 | h, w = inputs.shape[-2:]
24 | if self._input_size is None:
25 | self._input_size = (h, w)
26 | scale_y = self._input_size[0] / h
27 | scale_x = self._input_size[1] / w
28 | if scale_x != 1 or scale_y != 1:
29 | for data_sample in data_samples:
30 | data_sample.gt_instances.keypoints[..., 0] *= scale_x
31 | data_sample.gt_instances.keypoints[..., 1] *= scale_y
32 |
33 | return super().forward(inputs, data_samples)
34 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/projects/yolox-pose/tools:
--------------------------------------------------------------------------------
1 | ../../tools
2 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | addopts = --xdoctest --xdoctest-style=auto
3 | norecursedirs = .git ignore build __pycache__ data docker docs .eggs .mim tests/legacy
4 |
5 | filterwarnings= default
6 | ignore:.*No cfgstr given in Cacher constructor or call.*:Warning
7 | ignore:.*Define the __nice__ method for.*:Warning
8 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/requirements.txt:
--------------------------------------------------------------------------------
1 | -r requirements/build.txt
2 | -r requirements/runtime.txt
3 | -r requirements/tests.txt
4 | -r requirements/optional.txt
5 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/requirements/albu.txt:
--------------------------------------------------------------------------------
1 | albumentations>=0.3.2 --no-binary qudida,albumentations
2 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/requirements/build.txt:
--------------------------------------------------------------------------------
1 | # These must be installed before building mmpose
2 | numpy
3 | torch>=1.6
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/requirements/docs.txt:
--------------------------------------------------------------------------------
1 | docutils==0.16.0
2 | markdown
3 | myst-parser
4 | -e git+https://github.com/gaotongxiao/pytorch_sphinx_theme.git#egg=pytorch_sphinx_theme
5 | sphinx==4.5.0
6 | sphinx_copybutton
7 | sphinx_markdown_tables
8 | urllib3<2.0.0
9 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/requirements/mminstall.txt:
--------------------------------------------------------------------------------
1 | mmcv>=2.0.0,<2.1.0
2 | mmdet>=3.0.0,<3.1.0
3 | mmengine>=0.4.0,<1.0.0
4 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/requirements/optional.txt:
--------------------------------------------------------------------------------
1 | requests
2 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/requirements/poseval.txt:
--------------------------------------------------------------------------------
1 | poseval@git+https://github.com/svenkreiss/poseval.git
2 | shapely==1.8.4
3 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/requirements/readthedocs.txt:
--------------------------------------------------------------------------------
1 | mmcv>=2.0.0rc4
2 | mmengine>=0.6.0,<1.0.0
3 | munkres
4 | regex
5 | scipy
6 | titlecase
7 | torch>1.6
8 | torchvision
9 | xtcocotools>=1.13
10 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/requirements/runtime.txt:
--------------------------------------------------------------------------------
1 | chumpy
2 | json_tricks
3 | matplotlib
4 | munkres
5 | numpy
6 | opencv-python
7 | pillow
8 | scipy
9 | torchvision
10 | xtcocotools>=1.12
11 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/requirements/tests.txt:
--------------------------------------------------------------------------------
1 | coverage
2 | flake8
3 | interrogate
4 | isort==4.3.21
5 | parameterized
6 | pytest
7 | pytest-runner
8 | xdoctest>=0.10.0
9 | yapf
10 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/resources/mmpose-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/5k5000/CLdetection2023/d1a01536ad892134c4dd728c87dc9ac1d87b8e11/mmpose_package/mmpose/resources/mmpose-logo.png
--------------------------------------------------------------------------------
/mmpose_package/mmpose/setup.cfg:
--------------------------------------------------------------------------------
1 | [bdist_wheel]
2 | universal=1
3 |
4 | [aliases]
5 | test=pytest
6 |
7 | [yapf]
8 | based_on_style = pep8
9 | blank_line_before_nested_class_or_def = true
10 | split_before_expression_after_opening_paren = true
11 | split_penalty_import_names=0
12 | SPLIT_PENALTY_AFTER_OPENING_BRACKET=800
13 |
14 | [isort]
15 | line_length = 79
16 | multi_line_output = 0
17 | extra_standard_library = pkg_resources,setuptools
18 | known_first_party = mmpose
19 | known_third_party = PIL,cv2,h5py,json_tricks,matplotlib,mmcv,munkres,numpy,pytest,pytorch_sphinx_theme,requests,scipy,seaborn,spacepy,titlecase,torch,torchvision,webcam_apis,xmltodict,xtcocotools
20 | no_lines_before = STDLIB,LOCALFOLDER
21 | default_section = THIRDPARTY
22 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tests/test_apis/test_webcam/test_utils/test_event.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | import unittest
3 | from threading import Event
4 |
5 | from mmpose.apis.webcam.utils.event import EventManager
6 |
7 |
8 | class TestEventManager(unittest.TestCase):
9 |
10 | def test_event_manager(self):
11 | event_manager = EventManager()
12 |
13 | # test register_event
14 | event_manager.register_event('example_event')
15 | self.assertIn('example_event', event_manager._events)
16 | self.assertIsInstance(event_manager._events['example_event'], Event)
17 | self.assertFalse(event_manager.is_set('example_event'))
18 |
19 | # test event operations
20 | event_manager.set('q', is_keyboard=True)
21 | self.assertIn('_keyboard_q', event_manager._events)
22 | self.assertTrue(event_manager.is_set('q', is_keyboard=True))
23 |
24 | flag = event_manager.wait('q', is_keyboard=True)
25 | self.assertTrue(flag)
26 |
27 | event_manager.wait_and_handle('q', is_keyboard=True)
28 | event_manager.clear('q', is_keyboard=True)
29 | self.assertFalse(event_manager._events['_keyboard_q']._flag)
30 |
31 |
32 | if __name__ == '__main__':
33 | unittest.main()
34 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tests/test_apis/test_webcam/test_webcam_executor.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | import unittest
3 |
4 | from mmengine import Config
5 |
6 | from mmpose.apis.webcam import WebcamExecutor
7 |
8 |
9 | class TestWebcamExecutor(unittest.TestCase):
10 |
11 | def setUp(self) -> None:
12 | config = Config.fromfile('demo/webcam_cfg/test_camera.py').executor_cfg
13 | config.camera_id = 'tests/data/posetrack18/videos/' \
14 | '000001_mpiinew_test/000001_mpiinew_test.mp4'
15 | self.executor = WebcamExecutor(**config)
16 |
17 | def test_init(self):
18 |
19 | self.assertEqual(len(self.executor.node_list), 2)
20 | self.assertEqual(self.executor.node_list[0].name, 'monitor')
21 | self.assertEqual(self.executor.node_list[1].name, 'recorder')
22 |
23 |
24 | if __name__ == '__main__':
25 | unittest.main()
26 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tests/test_datasets/test_transforms/test_loading.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from unittest import TestCase
3 |
4 | import numpy as np
5 | from mmcv import imread
6 |
7 | from mmpose.datasets.transforms.loading import LoadImage
8 |
9 |
10 | class TestLoadImage(TestCase):
11 |
12 | def test_load_image(self):
13 |
14 | transform = LoadImage()
15 | results = dict(img_path='tests/data/coco/000000000785.jpg')
16 |
17 | results = transform(results)
18 |
19 | self.assertIsInstance(results['img'], np.ndarray)
20 |
21 | def test_with_input_image(self):
22 | transform = LoadImage(to_float32=True)
23 |
24 | img_path = 'tests/data/coco/000000000785.jpg'
25 | results = dict(
26 | img_path=img_path, img=imread(img_path).astype(np.uint8))
27 |
28 | results = transform(results)
29 |
30 | self.assertIsInstance(results['img'], np.ndarray)
31 | self.assertTrue(results['img'].dtype, np.float32)
32 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tests/test_models/test_backbones/test_alexnet.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from unittest import TestCase
3 |
4 | import torch
5 |
6 | from mmpose.models.backbones import AlexNet
7 |
8 |
9 | class TestAlexNet(TestCase):
10 |
11 | def test_alexnet_backbone(self):
12 | """Test alexnet backbone."""
13 | model = AlexNet(-1)
14 | model.train()
15 |
16 | imgs = torch.randn(1, 3, 256, 192)
17 | feat = model(imgs)
18 | self.assertIsInstance(feat, tuple)
19 | self.assertEqual(feat[-1].shape, (1, 256, 7, 5))
20 |
21 | model = AlexNet(1)
22 | model.train()
23 |
24 | imgs = torch.randn(1, 3, 224, 224)
25 | feat = model(imgs)
26 | self.assertIsInstance(feat, tuple)
27 | self.assertEqual(feat[-1].shape, (1, 1))
28 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tests/test_models/test_backbones/test_mspn.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from unittest import TestCase
3 |
4 | import torch
5 |
6 | from mmpose.models.backbones import MSPN
7 |
8 |
9 | class TestMSPN(TestCase):
10 |
11 | def test_mspn_backbone(self):
12 | with self.assertRaises(AssertionError):
13 | # MSPN's num_stages should larger than 0
14 | MSPN(num_stages=0)
15 | with self.assertRaises(AssertionError):
16 | # MSPN's num_units should larger than 1
17 | MSPN(num_units=1)
18 | with self.assertRaises(AssertionError):
19 | # len(num_blocks) should equal num_units
20 | MSPN(num_units=2, num_blocks=[2, 2, 2])
21 |
22 | # Test MSPN's outputs
23 | model = MSPN(num_stages=2, num_units=2, num_blocks=[2, 2])
24 | model.init_weights()
25 | model.train()
26 |
27 | imgs = torch.randn(1, 3, 511, 511)
28 | feat = model(imgs)
29 | self.assertEqual(len(feat), 2)
30 | self.assertEqual(len(feat[0]), 2)
31 | self.assertEqual(len(feat[1]), 2)
32 | self.assertEqual(feat[0][0].shape, torch.Size([1, 256, 64, 64]))
33 | self.assertEqual(feat[0][1].shape, torch.Size([1, 256, 128, 128]))
34 | self.assertEqual(feat[1][0].shape, torch.Size([1, 256, 64, 64]))
35 | self.assertEqual(feat[1][1].shape, torch.Size([1, 256, 128, 128]))
36 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tests/test_models/test_backbones/test_v2v_net.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from unittest import TestCase
3 |
4 | import torch
5 |
6 | from mmpose.models.backbones import V2VNet
7 |
8 |
9 | class TestV2Vnet(TestCase):
10 |
11 | def test_v2v_net(self):
12 | """Test V2VNet."""
13 | model = V2VNet(input_channels=17, output_channels=15)
14 | input = torch.randn(2, 17, 32, 32, 32)
15 | output = model(input)
16 | self.assertIsInstance(output, tuple)
17 | self.assertEqual(output[-1].shape, (2, 15, 32, 32, 32))
18 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tests/test_models/test_losses/test_classification_losses.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | from unittest import TestCase
3 |
4 | import torch
5 |
6 | from mmpose.models.losses.classification_loss import InfoNCELoss
7 |
8 |
9 | class TestInfoNCELoss(TestCase):
10 |
11 | def test_loss(self):
12 |
13 | # test loss w/o target_weight
14 | loss = InfoNCELoss(temperature=0.05)
15 |
16 | fake_pred = torch.arange(5 * 2).reshape(5, 2).float()
17 | self.assertTrue(
18 | torch.allclose(loss(fake_pred), torch.tensor(5.4026), atol=1e-4))
19 |
20 | # check if the value of temperature is positive
21 | with self.assertRaises(AssertionError):
22 | loss = InfoNCELoss(temperature=0.)
23 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tools/analysis_tools/print_config.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) OpenMMLab. All rights reserved.
2 | import argparse
3 |
4 | from mmengine import Config, DictAction
5 |
6 |
7 | def parse_args():
8 | parser = argparse.ArgumentParser(description='Print the whole config')
9 | parser.add_argument('config', help='config file path')
10 | parser.add_argument(
11 | '--options', nargs='+', action=DictAction, help='arguments in dict')
12 | args = parser.parse_args()
13 |
14 | return args
15 |
16 |
17 | def main():
18 | args = parse_args()
19 |
20 | cfg = Config.fromfile(args.config)
21 | if args.options is not None:
22 | cfg.merge_from_dict(args.options)
23 | print(f'Config:\n{cfg.pretty_text}')
24 |
25 |
26 | if __name__ == '__main__':
27 | main()
28 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tools/dist_test.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Copyright (c) OpenMMLab. All rights reserved.
3 |
4 | CONFIG=$1
5 | CHECKPOINT=$2
6 | GPUS=$3
7 | NNODES=${NNODES:-1}
8 | NODE_RANK=${NODE_RANK:-0}
9 | PORT=${PORT:-29500}
10 | MASTER_ADDR=${MASTER_ADDR:-"127.0.0.1"}
11 |
12 | PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
13 | python -m torch.distributed.launch \
14 | --nnodes=$NNODES \
15 | --node_rank=$NODE_RANK \
16 | --master_addr=$MASTER_ADDR \
17 | --nproc_per_node=$GPUS \
18 | --master_port=$PORT \
19 | $(dirname "$0")/test.py \
20 | $CONFIG \
21 | $CHECKPOINT \
22 | --launcher pytorch \
23 | ${@:4}
24 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tools/dist_train.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Copyright (c) OpenMMLab. All rights reserved.
3 |
4 | CONFIG=$1
5 | GPUS=$2
6 | NNODES=${NNODES:-1}
7 | NODE_RANK=${NODE_RANK:-0}
8 | PORT=${PORT:-29500}
9 | MASTER_ADDR=${MASTER_ADDR:-"127.0.0.1"}
10 |
11 | PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
12 | python -m torch.distributed.launch \
13 | --nnodes=$NNODES \
14 | --node_rank=$NODE_RANK \
15 | --master_addr=$MASTER_ADDR \
16 | --nproc_per_node=$GPUS \
17 | --master_port=$PORT \
18 | step3_train_and_evaluation.py \
19 | $CONFIG \
20 | --launcher pytorch ${@:3}
21 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tools/slurm_test.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Copyright (c) OpenMMLab. All rights reserved.
3 |
4 | set -x
5 |
6 | PARTITION=$1
7 | JOB_NAME=$2
8 | CONFIG=$3
9 | CHECKPOINT=$4
10 | GPUS=${GPUS:-8}
11 | GPUS_PER_NODE=${GPUS_PER_NODE:-8}
12 | CPUS_PER_TASK=${CPUS_PER_TASK:-5}
13 | PY_ARGS=${@:5}
14 | SRUN_ARGS=${SRUN_ARGS:-""}
15 |
16 | PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
17 | srun -p ${PARTITION} \
18 | --job-name=${JOB_NAME} \
19 | --gres=gpu:${GPUS_PER_NODE} \
20 | --ntasks=${GPUS} \
21 | --ntasks-per-node=${GPUS_PER_NODE} \
22 | --cpus-per-task=${CPUS_PER_TASK} \
23 | --kill-on-bad-exit=1 \
24 | ${SRUN_ARGS} \
25 | python -u tools/test.py ${CONFIG} ${CHECKPOINT} --launcher="slurm" ${PY_ARGS}
26 |
--------------------------------------------------------------------------------
/mmpose_package/mmpose/tools/slurm_train.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Copyright (c) OpenMMLab. All rights reserved.
3 |
4 | set -x
5 |
6 | PARTITION=$1
7 | JOB_NAME=$2
8 | CONFIG=$3
9 | WORK_DIR=$4
10 | GPUS=${GPUS:-8}
11 | GPUS_PER_NODE=${GPUS_PER_NODE:-8}
12 | CPUS_PER_TASK=${CPUS_PER_TASK:-5}
13 | SRUN_ARGS=${SRUN_ARGS:-""}
14 | PY_ARGS=${@:5}
15 |
16 | PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \
17 | srun -p ${PARTITION} \
18 | --job-name=${JOB_NAME} \
19 | --gres=gpu:${GPUS_PER_NODE} \
20 | --ntasks=${GPUS} \
21 | --ntasks-per-node=${GPUS_PER_NODE} \
22 | --cpus-per-task=${CPUS_PER_TASK} \
23 | --kill-on-bad-exit=1 \
24 | ${SRUN_ARGS} \
25 | python -u tools/train.py ${CONFIG} --work-dir=${WORK_DIR} --launcher="slurm" ${PY_ARGS}
26 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | # ===================================================================
2 | # Do not change these packages !!!
3 | # Since you are using a Docker image that already has the PyTorch library installed, there is no need to reinstall Torch.
4 | # ===================================================================
5 | arrow==1.2.3
6 | binaryornot==0.4.4
7 | build==0.10.0
8 | certifi==2022.12.7
9 | chardet==5.1.0
10 | charset-normalizer==3.1.0
11 | click==8.1.3
12 | cookiecutter==2.1.1
13 | idna==3.4
14 | imageio[tifffile]==2.27.0
15 | jinja2==3.1.2
16 | jinja2-time==0.2.0
17 | joblib==1.2.0
18 | markupsafe==2.1.2
19 | numpy==1.21.6
20 | packaging==23.1
21 | pandas==1.3.5
22 | pillow==9.5.0
23 | pip-tools==6.13.0
24 | pyproject-hooks==1.0.0
25 | python-dateutil==2.8.2
26 | python-slugify==8.0.1
27 | pytz==2023.3
28 | pyyaml==6.0
29 | requests==2.28.2
30 | scikit-learn==1.0.2
31 | scipy==1.7.3
32 | simpleitk==2.2.1
33 | six==1.16.0
34 | text-unidecode==1.3
35 | threadpoolctl==3.1.0
36 | tifffile==2021.11.2
37 | tomli==2.0.1
38 | tzdata==2023.3
39 | urllib3==1.26.15
40 | wheel==0.40.0
41 | scikit-image==0.19.3
42 | evalutils==0.3.1
43 |
44 | # ===================================================================
45 | # If you have other additional dependencies, please list them below.
46 | # ===================================================================
47 |
--------------------------------------------------------------------------------
/step1_test_mmpose.py:
--------------------------------------------------------------------------------
1 | # !/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 |
4 | import mmpose
5 | print(mmpose.__version__)
--------------------------------------------------------------------------------