├── .gitignore ├── AI Challenger ├── 043758c591b58f39a01648c49b5154ad1e01d400.jpg ├── 8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg ├── aic.json ├── aic_coco_format.json ├── aic_coco_vised_043758c591b58f39a01648c49b5154ad1e01d400.jpg ├── aic_coco_vised_8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg ├── aic_vised_043758c591b58f39a01648c49b5154ad1e01d400.jpg └── aic_vised_8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg ├── COCO ├── 000000007511.jpg ├── 000000017905.jpg ├── 000000285138.jpg ├── coco.json ├── coco_vised_000000007511.jpg ├── coco_vised_000000017905.jpg ├── coco_vised_000000285138.jpg ├── coco_vised_whiteBG_000000007511.jpg ├── coco_vised_whiteBG_000000017905.jpg └── coco_vised_whiteBG_000000285138.jpg ├── MPII ├── 000003072.jpg ├── mpii.json ├── mpii_vised_000003072.jpg └── mpii_vised_whiteBG_000003072.jpg ├── README.md ├── aic2coco.py └── vis.py /.gitignore: -------------------------------------------------------------------------------- 1 | # IntelliJ project files 2 | *.idea 3 | *.iml 4 | out 5 | gen 6 | 7 | ### Vim template 8 | [._]*.s[a-w][a-z] 9 | [._]s[a-w][a-z] 10 | *.un~ 11 | Session.vim 12 | .netrwhist 13 | *~ 14 | 15 | ### IPythonNotebook template 16 | # Temporary data 17 | .ipynb_checkpoints/ 18 | 19 | ### Python template 20 | # Byte-compiled / optimized / DLL files 21 | __pycache__/ 22 | *.py[cod] 23 | *$py.class 24 | 25 | # C extensions 26 | *.so 27 | 28 | # Distribution / packaging 29 | .Python 30 | env/ 31 | build/ 32 | develop-eggs/ 33 | dist/ 34 | downloads/ 35 | eggs/ 36 | .eggs/ 37 | #lib/ 38 | #lib64/ 39 | parts/ 40 | sdist/ 41 | var/ 42 | *.egg-info/ 43 | .installed.cfg 44 | *.egg 45 | 46 | # PyInstaller 47 | # Usually these files are written by a python script from a template 48 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 49 | *.manifest 50 | *.spec 51 | 52 | # Installer logs 53 | pip-log.txt 54 | pip-delete-this-directory.txt 55 | 56 | # Unit test / coverage reports 57 | htmlcov/ 58 | .tox/ 59 | .coverage 60 | .coverage.* 61 | .cache 62 | nosetests.xml 63 | coverage.xml 64 | *,cover 65 | 66 | # Translations 67 | *.mo 68 | *.pot 69 | 70 | # Django stuff: 71 | *.log 72 | 73 | # Sphinx documentation 74 | docs/_build/ 75 | 76 | # PyBuilder 77 | target/ 78 | 79 | *.ipynb 80 | *.params 81 | *.vscode/ -------------------------------------------------------------------------------- /AI Challenger/043758c591b58f39a01648c49b5154ad1e01d400.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/AI Challenger/043758c591b58f39a01648c49b5154ad1e01d400.jpg -------------------------------------------------------------------------------- /AI Challenger/8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/AI Challenger/8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg -------------------------------------------------------------------------------- /AI Challenger/aic.json: -------------------------------------------------------------------------------- 1 | { 2 | "images": [ 3 | { 4 | "url": "http://news.sogou.com/", 5 | "file_name": "043758c591b58f39a01648c49b5154ad1e01d400.jpg", 6 | "height": 874, 7 | "width": 658, 8 | "id": 1 9 | }, 10 | { 11 | "url": "http://news.sogou.com/", 12 | "file_name": "8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg", 13 | "height": 689, 14 | "width": 1000, 15 | "id": 3 16 | } 17 | ], 18 | "annotations": [ 19 | { 20 | "bbox": [ 21 | 298, 22 | 36, 23 | 329, 24 | 647 25 | ], 26 | "keypoints": [ 27 | 400, 28 | 183, 29 | 2, 30 | 380, 31 | 319, 32 | 2, 33 | 358, 34 | 416, 35 | 2, 36 | 559, 37 | 217, 38 | 2, 39 | 557, 40 | 357, 41 | 2, 42 | 590, 43 | 294, 44 | 2, 45 | 428, 46 | 444, 47 | 2, 48 | 394, 49 | 664, 50 | 2, 51 | 0, 52 | 0, 53 | 0, 54 | 511, 55 | 445, 56 | 2, 57 | 504, 58 | 662, 59 | 2, 60 | 0, 61 | 0, 62 | 0, 63 | 466, 64 | 51, 65 | 2, 66 | 485, 67 | 179, 68 | 2 69 | ], 70 | "num_keypoints": 12, 71 | "image_id": 3, 72 | "scores": [], 73 | "category_id": 1, 74 | "id": 4, 75 | "iscrowd": 0, 76 | "area": 212863 77 | }, 78 | { 79 | "bbox": [ 80 | 57, 81 | 125, 82 | 582, 83 | 743 84 | ], 85 | "keypoints": [ 86 | 235, 87 | 455, 88 | 1, 89 | 183, 90 | 678, 91 | 1, 92 | 204, 93 | 470, 94 | 2, 95 | 514, 96 | 387, 97 | 2, 98 | 433, 99 | 660, 100 | 2, 101 | 219, 102 | 715, 103 | 2, 104 | 389, 105 | 826, 106 | 1, 107 | 0, 108 | 0, 109 | 0, 110 | 0, 111 | 0, 112 | 0, 113 | 555, 114 | 813, 115 | 2, 116 | 0, 117 | 0, 118 | 0, 119 | 0, 120 | 0, 121 | 0, 122 | 191, 123 | 183, 124 | 2, 125 | 324, 126 | 395, 127 | 2 128 | ], 129 | "num_keypoints": 10, 130 | "image_id": 1, 131 | "scores": [], 132 | "category_id": 1, 133 | "id": 2, 134 | "iscrowd": 0, 135 | "area": 432426 136 | } 137 | ] 138 | } -------------------------------------------------------------------------------- /AI Challenger/aic_coco_format.json: -------------------------------------------------------------------------------- 1 | { 2 | "images": [ 3 | { 4 | "url": "http://news.sogou.com/", 5 | "file_name": "043758c591b58f39a01648c49b5154ad1e01d400.jpg", 6 | "height": 874, 7 | "width": 658, 8 | "id": 600001 9 | }, 10 | { 11 | "url": "http://news.sogou.com/", 12 | "file_name": "8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg", 13 | "height": 689, 14 | "width": 1000, 15 | "id": 600003 16 | } 17 | ], 18 | "annotations": [ 19 | { 20 | "bbox": [ 21 | 57, 22 | 125, 23 | 582, 24 | 743 25 | ], 26 | "keypoints": [ 27 | 0, 28 | 0, 29 | 0, 30 | 0, 31 | 0, 32 | 0, 33 | 0, 34 | 0, 35 | 0, 36 | 0, 37 | 0, 38 | 0, 39 | 0, 40 | 0, 41 | 0, 42 | 514, 43 | 387, 44 | 2, 45 | 235, 46 | 455, 47 | 1, 48 | 433, 49 | 660, 50 | 2, 51 | 183, 52 | 678, 53 | 1, 54 | 219, 55 | 715, 56 | 2, 57 | 204, 58 | 470, 59 | 2, 60 | 555, 61 | 813, 62 | 2, 63 | 389, 64 | 826, 65 | 1, 66 | 0, 67 | 0, 68 | 0, 69 | 0, 70 | 0, 71 | 0, 72 | 0, 73 | 0, 74 | 0, 75 | 0, 76 | 0, 77 | 0 78 | ], 79 | "num_keypoints": 10, 80 | "image_id": 600001, 81 | "scores": [], 82 | "category_id": 1, 83 | "id": 900100582002, 84 | "iscrowd": 0, 85 | "area": 432426 86 | }, 87 | { 88 | "bbox": [ 89 | 298, 90 | 36, 91 | 329, 92 | 647 93 | ], 94 | "keypoints": [ 95 | 0, 96 | 0, 97 | 0, 98 | 0, 99 | 0, 100 | 0, 101 | 0, 102 | 0, 103 | 0, 104 | 0, 105 | 0, 106 | 0, 107 | 0, 108 | 0, 109 | 0, 110 | 559, 111 | 217, 112 | 2, 113 | 400, 114 | 183, 115 | 2, 116 | 557, 117 | 357, 118 | 2, 119 | 380, 120 | 319, 121 | 2, 122 | 590, 123 | 294, 124 | 2, 125 | 358, 126 | 416, 127 | 2, 128 | 511, 129 | 445, 130 | 2, 131 | 428, 132 | 444, 133 | 2, 134 | 504, 135 | 662, 136 | 2, 137 | 394, 138 | 664, 139 | 2, 140 | 0, 141 | 0, 142 | 0, 143 | 0, 144 | 0, 145 | 0 146 | ], 147 | "num_keypoints": 12, 148 | "image_id": 600003, 149 | "scores": [], 150 | "category_id": 1, 151 | "id": 900100582004, 152 | "iscrowd": 0, 153 | "area": 212863 154 | } 155 | ] 156 | } -------------------------------------------------------------------------------- /AI Challenger/aic_coco_vised_043758c591b58f39a01648c49b5154ad1e01d400.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/AI Challenger/aic_coco_vised_043758c591b58f39a01648c49b5154ad1e01d400.jpg -------------------------------------------------------------------------------- /AI Challenger/aic_coco_vised_8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/AI Challenger/aic_coco_vised_8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg -------------------------------------------------------------------------------- /AI Challenger/aic_vised_043758c591b58f39a01648c49b5154ad1e01d400.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/AI Challenger/aic_vised_043758c591b58f39a01648c49b5154ad1e01d400.jpg -------------------------------------------------------------------------------- /AI Challenger/aic_vised_8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/AI Challenger/aic_vised_8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg -------------------------------------------------------------------------------- /COCO/000000007511.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/COCO/000000007511.jpg -------------------------------------------------------------------------------- /COCO/000000017905.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/COCO/000000017905.jpg -------------------------------------------------------------------------------- /COCO/000000285138.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/COCO/000000285138.jpg -------------------------------------------------------------------------------- /COCO/coco.json: -------------------------------------------------------------------------------- 1 | { 2 | "images": [ 3 | { 4 | "license": 3, 5 | "file_name": "000000017905.jpg", 6 | "coco_url": "http://images.cocodataset.org/val2017/000000017905.jpg", 7 | "height": 640, 8 | "width": 480, 9 | "date_captured": "2013-11-16 18:01:33", 10 | "flickr_url": "http://farm1.staticflickr.com/44/173771776_53b9c22bb6_z.jpg", 11 | "id": 17905 12 | }, 13 | { 14 | "license": 5, 15 | "file_name": "000000285138.jpg", 16 | "coco_url": "http://images.cocodataset.org/train2017/000000285138.jpg", 17 | "height": 640, 18 | "width": 435, 19 | "date_captured": "2013-11-20 01:25:49", 20 | "flickr_url": "http://farm8.staticflickr.com/7002/6405527157_738ce0b0f1_z.jpg", 21 | "id": 285138 22 | } 23 | ], 24 | "annotations": [ 25 | { 26 | "segmentation": [ 27 | [ 28 | 190.53, 29 | 455.67, 30 | 185.46, 31 | 432.52, 32 | 186.91, 33 | 433.96, 34 | 186.18, 35 | 427.45, 36 | 186.18, 37 | 419.49, 38 | 186.18, 39 | 415.87, 40 | 189.08, 41 | 421.66, 42 | 189.08, 43 | 425.28, 44 | 189.08, 45 | 429.62, 46 | 186.91, 47 | 432.52, 48 | 189.08, 49 | 433.96, 50 | 193.42, 51 | 430.35, 52 | 197.76, 53 | 423.11, 54 | 195.59, 55 | 414.43, 56 | 192.7, 57 | 410.81, 58 | 191.25, 59 | 408.64, 60 | 189.8, 61 | 407.19, 62 | 189.08, 63 | 402.13, 64 | 189.08, 65 | 395.61, 66 | 188.35, 67 | 386.21, 68 | 188.35, 69 | 383.31, 70 | 186.91, 71 | 363.78, 72 | 184.74, 73 | 339.9, 74 | 192.7, 75 | 334.11, 76 | 184.01, 77 | 297.21, 78 | 178.22, 79 | 291.42, 80 | 153.62, 81 | 281.29, 82 | 145.66, 83 | 279.84, 84 | 144.22, 85 | 267.54, 86 | 150.01, 87 | 258.13, 88 | 150.01, 89 | 250.18, 90 | 150.01, 91 | 242.22, 92 | 147.83, 93 | 234.98, 94 | 144.22, 95 | 229.91, 96 | 140.6, 97 | 229.19, 98 | 134.81, 99 | 229.19, 100 | 126.85, 101 | 229.19, 102 | 122.51, 103 | 231.36, 104 | 120.34, 105 | 234.26, 106 | 116, 107 | 240.05, 108 | 115.27, 109 | 248, 110 | 113.83, 111 | 254.52, 112 | 115.27, 113 | 261.03, 114 | 117.44, 115 | 265.37, 116 | 120.34, 117 | 268.99, 118 | 121.06, 119 | 282.01, 120 | 94.29, 121 | 295.04, 122 | 89.95, 123 | 303.72, 124 | 85.61, 125 | 322.53, 126 | 81.27, 127 | 337.73, 128 | 90.67, 129 | 342.79, 130 | 88.5, 131 | 363.78, 132 | 88.5, 133 | 382.59, 134 | 88.5, 135 | 405.74, 136 | 89.95, 137 | 420.22, 138 | 88.5, 139 | 431.79, 140 | 92.84, 141 | 443.37, 142 | 102.97, 143 | 447.71, 144 | 101.53, 145 | 441.2, 146 | 105.14, 147 | 437.58, 148 | 105.87, 149 | 430.35, 150 | 102.25, 151 | 417.32, 152 | 99.36, 153 | 413.7, 154 | 105.87, 155 | 374.63, 156 | 110.21, 157 | 382.59, 158 | 108.76, 159 | 393.44, 160 | 107.31, 161 | 405.02, 162 | 107.31, 163 | 417.32, 164 | 108.04, 165 | 433.96, 166 | 109.49, 167 | 449.16, 168 | 113.1, 169 | 463.63, 170 | 111.66, 171 | 478.1, 172 | 113.83, 173 | 481, 174 | 114.55, 175 | 491.85, 176 | 114.55, 177 | 520.07, 178 | 114.55, 179 | 533.09, 180 | 119.62, 181 | 563.48, 182 | 118.89, 183 | 570.72, 184 | 115.27, 185 | 577.23, 186 | 108.04, 187 | 586.64, 188 | 111.66, 189 | 593.87, 190 | 126.13, 191 | 591.7, 192 | 133.36, 193 | 585.91, 194 | 136.26, 195 | 576.51, 196 | 134.81, 197 | 571.44, 198 | 132.64, 199 | 556.25, 200 | 136.26, 201 | 531.65, 202 | 136.26, 203 | 512.11, 204 | 134.81, 205 | 490.4, 206 | 141.32, 207 | 481, 208 | 139.88, 209 | 468.69, 210 | 145.66, 211 | 433.24, 212 | 152.18, 213 | 474.48, 214 | 155.79, 215 | 473.04, 216 | 157.24, 217 | 488.23, 218 | 160.86, 219 | 496.19, 220 | 163.75, 221 | 512.83, 222 | 166.65, 223 | 534.54, 224 | 172.44, 225 | 552.63, 226 | 172.44, 227 | 564.93, 228 | 169.54, 229 | 576.51, 230 | 189.08, 231 | 591.7, 232 | 200.66, 233 | 588.81, 234 | 199.21, 235 | 583.74, 236 | 191.25, 237 | 574.34, 238 | 187.63, 239 | 571.44, 240 | 186.18, 241 | 534.54, 242 | 184.74, 243 | 507.04, 244 | 181.12, 245 | 488.95, 246 | 178.95, 247 | 475.21, 248 | 178.95, 249 | 475.21, 250 | 177.5, 251 | 473.04, 252 | 191.97, 253 | 467.25, 254 | 191.97, 255 | 467.25, 256 | 189.8, 257 | 453.5 258 | ] 259 | ], 260 | "num_keypoints": 17, 261 | "area": 23736.03525, 262 | "iscrowd": 0, 263 | "keypoints": [ 264 | 132, 265 | 261, 266 | 2, 267 | 139, 268 | 254, 269 | 2, 270 | 126, 271 | 254, 272 | 2, 273 | 147, 274 | 257, 275 | 2, 276 | 117, 277 | 258, 278 | 2, 279 | 171, 280 | 299, 281 | 2, 282 | 104, 283 | 303, 284 | 2, 285 | 177, 286 | 351, 287 | 2, 288 | 98, 289 | 360, 290 | 2, 291 | 182, 292 | 398, 293 | 2, 294 | 94, 295 | 414, 296 | 2, 297 | 164, 298 | 406, 299 | 2, 300 | 121, 301 | 407, 302 | 2, 303 | 170, 304 | 489, 305 | 2, 306 | 124, 307 | 488, 308 | 2, 309 | 182, 310 | 570, 311 | 2, 312 | 124, 313 | 569, 314 | 2 315 | ], 316 | "image_id": 17905, 317 | "bbox": [ 318 | 81.27, 319 | 229.19, 320 | 119.39, 321 | 364.68 322 | ], 323 | "category_id": 1, 324 | "id": 2157397 325 | }, 326 | { 327 | "segmentation": [ 328 | [ 329 | 204.22, 330 | 243.06, 331 | 230.11, 332 | 240.18, 333 | 220.04, 334 | 211.42, 335 | 207.1, 336 | 197.03, 337 | 195.6, 338 | 184.09, 339 | 191.28, 340 | 159.64, 341 | 194.16, 342 | 136.63, 343 | 204.22, 344 | 102.11, 345 | 217.17, 346 | 80.54, 347 | 235.87, 348 | 70.47, 349 | 253.12, 350 | 61.84, 351 | 284.76, 352 | 60.4, 353 | 303.46, 354 | 63.28, 355 | 343.73, 356 | 77.66, 357 | 365.3, 358 | 102.11, 359 | 372.49, 360 | 139.51, 361 | 372.49, 362 | 161.08, 363 | 373.93, 364 | 175.46, 365 | 368.18, 366 | 202.79, 367 | 353.8, 368 | 214.29, 369 | 346.61, 370 | 225.8, 371 | 327.91, 372 | 241.62, 373 | 325.03, 374 | 243.06, 375 | 335.1, 376 | 250.25, 377 | 349.48, 378 | 264.63, 379 | 378.25, 380 | 273.26, 381 | 392.63, 382 | 296.27, 383 | 409.89, 384 | 313.53, 385 | 425.71, 386 | 327.91, 387 | 430.02, 388 | 350.92, 389 | 435, 390 | 437.21, 391 | 434.34, 392 | 491.87, 393 | 421.39, 394 | 536.45, 395 | 372.49, 396 | 570.97, 397 | 352.36, 398 | 579.6, 399 | 345.17, 400 | 622.74, 401 | 332.22, 402 | 638.56, 403 | 245.93, 404 | 640, 405 | 179.78, 406 | 640, 407 | 162.52, 408 | 635.69, 409 | 149.57, 410 | 640, 411 | 136.63, 412 | 640, 413 | 135.19, 414 | 599.73, 415 | 171.15, 416 | 608.36, 417 | 214.29, 418 | 593.98, 419 | 237.3, 420 | 562.34, 421 | 241.62, 422 | 526.38, 423 | 241.62, 424 | 454.47, 425 | 217.17, 426 | 360.99, 427 | 205.66, 428 | 264.63 429 | ], 430 | [ 431 | 38.83, 432 | 258.88, 433 | 38.83, 434 | 291.96, 435 | 20.13, 436 | 350.92, 437 | 28.76, 438 | 422.83, 439 | 33.08, 440 | 438.65, 441 | 56.09, 442 | 438.65, 443 | 71.91, 444 | 435.78, 445 | 70.47, 446 | 417.08, 447 | 70.47, 448 | 356.67, 449 | 58.97, 450 | 266.07, 451 | 51.78, 452 | 218.61, 453 | 35.96, 454 | 245.93 455 | ] 456 | ], 457 | "num_keypoints": 13, 458 | "area": 108199.9553, 459 | "iscrowd": 0, 460 | "keypoints": [ 461 | 268, 462 | 204, 463 | 2, 464 | 308, 465 | 177, 466 | 2, 467 | 250, 468 | 171, 469 | 2, 470 | 341, 471 | 186, 472 | 2, 473 | 220, 474 | 174, 475 | 2, 476 | 401, 477 | 337, 478 | 2, 479 | 162, 480 | 276, 481 | 1, 482 | 399, 483 | 502, 484 | 2, 485 | 48, 486 | 417, 487 | 2, 488 | 273, 489 | 584, 490 | 2, 491 | 56, 492 | 292, 493 | 2, 494 | 284, 495 | 629, 496 | 2, 497 | 148, 498 | 599, 499 | 1, 500 | 0, 501 | 0, 502 | 0, 503 | 0, 504 | 0, 505 | 0, 506 | 0, 507 | 0, 508 | 0, 509 | 0, 510 | 0, 511 | 0 512 | ], 513 | "image_id": 285138, 514 | "bbox": [ 515 | 20.13, 516 | 60.4, 517 | 414.87, 518 | 579.6 519 | ], 520 | "category_id": 1, 521 | "id": 448422 522 | } 523 | ] 524 | } -------------------------------------------------------------------------------- /COCO/coco_vised_000000007511.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/COCO/coco_vised_000000007511.jpg -------------------------------------------------------------------------------- /COCO/coco_vised_000000017905.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/COCO/coco_vised_000000017905.jpg -------------------------------------------------------------------------------- /COCO/coco_vised_000000285138.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/COCO/coco_vised_000000285138.jpg -------------------------------------------------------------------------------- /COCO/coco_vised_whiteBG_000000007511.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/COCO/coco_vised_whiteBG_000000007511.jpg -------------------------------------------------------------------------------- /COCO/coco_vised_whiteBG_000000017905.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/COCO/coco_vised_whiteBG_000000017905.jpg -------------------------------------------------------------------------------- /COCO/coco_vised_whiteBG_000000285138.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/COCO/coco_vised_whiteBG_000000285138.jpg -------------------------------------------------------------------------------- /MPII/000003072.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/MPII/000003072.jpg -------------------------------------------------------------------------------- /MPII/mpii.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "joints_vis": [ 4 | 1, 5 | 1, 6 | 1, 7 | 1, 8 | 1, 9 | 1, 10 | 1, 11 | 1, 12 | 1, 13 | 1, 14 | 1, 15 | 1, 16 | 1, 17 | 1, 18 | 1, 19 | 1 20 | ], 21 | "joints": [ 22 | [ 23 | 738.0, 24 | 538.0 25 | ], 26 | [ 27 | 734.0, 28 | 440.0 29 | ], 30 | [ 31 | 717.0, 32 | 355.0 33 | ], 34 | [ 35 | 770.0, 36 | 355.0 37 | ], 38 | [ 39 | 766.0, 40 | 443.0 41 | ], 42 | [ 43 | 768.0, 44 | 523.0 45 | ], 46 | [ 47 | 744.0, 48 | 355.0 49 | ], 50 | [ 51 | 737.0, 52 | 216.0 53 | ], 54 | [ 55 | 739.0207, 56 | 197.2623 57 | ], 58 | [ 59 | 745.9793, 60 | 132.7377 61 | ], 62 | [ 63 | 639.0, 64 | 302.0 65 | ], 66 | [ 67 | 684.0, 68 | 276.0 69 | ], 70 | [ 71 | 692.0, 72 | 217.0 73 | ], 74 | [ 75 | 782.0, 76 | 215.0 77 | ], 78 | [ 79 | 805.0, 80 | 280.0 81 | ], 82 | [ 83 | 850.0, 84 | 308.0 85 | ] 86 | ], 87 | "image": "000003072.jpg", 88 | "scale": 1.946961, 89 | "center": [ 90 | 754.0, 91 | 335.0 92 | ] 93 | }, 94 | { 95 | "joints_vis": [ 96 | 1, 97 | 1, 98 | 1, 99 | 1, 100 | 1, 101 | 1, 102 | 1, 103 | 1, 104 | 1, 105 | 1, 106 | 1, 107 | 1, 108 | 1, 109 | 1, 110 | 1, 111 | 1 112 | ], 113 | "joints": [ 114 | [ 115 | 410.0, 116 | 565.0 117 | ], 118 | [ 119 | 418.0, 120 | 424.0 121 | ], 122 | [ 123 | 420.0, 124 | 293.0 125 | ], 126 | [ 127 | 456.0, 128 | 296.0 129 | ], 130 | [ 131 | 460.0, 132 | 423.0 133 | ], 134 | [ 135 | 442.0, 136 | 538.0 137 | ], 138 | [ 139 | 438.0, 140 | 295.0 141 | ], 142 | [ 143 | 434.0, 144 | 188.0 145 | ], 146 | [ 147 | 444.4566, 148 | 161.4563 149 | ], 150 | [ 151 | 475.5434, 152 | 82.5437 153 | ], 154 | [ 155 | 437.0, 156 | 261.0 157 | ], 158 | [ 159 | 367.0, 160 | 261.0 161 | ], 162 | [ 163 | 389.0, 164 | 184.0 165 | ], 166 | [ 167 | 479.0, 168 | 192.0 169 | ], 170 | [ 171 | 474.0, 172 | 250.0 173 | ], 174 | [ 175 | 487.0, 176 | 277.0 177 | ] 178 | ], 179 | "image": "000003072.jpg", 180 | "scale": 2.544453, 181 | "center": [ 182 | 446.0, 183 | 328.0 184 | ] 185 | } 186 | ] -------------------------------------------------------------------------------- /MPII/mpii_vised_000003072.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/MPII/mpii_vised_000003072.jpg -------------------------------------------------------------------------------- /MPII/mpii_vised_whiteBG_000003072.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Indigo6/Human-Pose-Estimation-datasets-annot-format/ade8d4cf0d3b6db15d1f9b1392643bee19e54158/MPII/mpii_vised_whiteBG_000003072.jpg -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Human Pose Estimation datasets annot format 2 | 3 | **Pelease refer to this article for further guide/cookbook of datasets and download of AIC(AI Challenger):** 4 | 5 | [2D多人姿态估计入门指南(0):数据集使用指南(内含 AI Challenger/AIC 下载与使用指南)](https://zhuanlan.zhihu.com/p/485949832) 6 | 7 | ## 1. MPII 8 | + 坐标点: 9 | >[[738.0, 538.0], [734.0, 440.0], [717.0, 355.0], [770.0, 355.0], 10 | > [766.0, 443.0], [768.0, 523.0], [744.0, 355.0], [737.0, 216.0], 11 | > [739.0207, 197.2623], [745.9793, 132.7377], [639.0, 302.0], 12 | > [684.0, 276.0], [692.0, 217.0], [782.0, 215.0], [805.0, 280.0], 13 | > [850.0, 308.0]] 14 | 15 | + 输出图: 16 | 17 | ![vised_000003072.jpg](MPII/mpii_vised_000003072.jpg) 18 | 19 | 20 | ## 2. COCO 21 | + 数据格式 22 | > "joint_vis": { 0: "invisible", 1: "occlude", 2: "visible" } 23 | 24 | > "keypoints": { 25 | 0: "nose", 26 | 1: "left_eye", 27 | 2: "right_eye", 28 | 3: "left_ear", 29 | 4: "right_ear", 30 | 5: "left_shoulder", 31 | 6: "right_shoulder", 32 | 7: "left_elbow", 33 | 8: "right_elbow", 34 | 9: "left_wrist", 35 | 10: "right_wrist", 36 | 11: "left_hip", 37 | 12: "right_hip", 38 | 13: "left_knee", 39 | 14: "right_knee", 40 | 15: "left_ankle", 41 | 16: "right_ankle" 42 | }, 43 | 44 | > "skeleton": [[16, 14], [14, 12], [15, 13], [13, 11], [12, 11], [6, 12], [5, 11], [6, 5], [6, 8], 45 | [8, 10], [5, 7], [7, 9], [3, 1], [1, 0], [0, 2], [2, 4]] 46 | 47 | ### 示例1:确认特征点位置 48 | + 坐标点 49 | > [132,261,2,139,254,2,126,254,2,147,257,2,117,258,2,171,299,2,104,303,2,177,351,2,98,360,2,182,398,2,94,414,2,164,406,2,121,407,2,170,489,2,124,488,2,182,570,2,124,569,2] 50 | 51 | + 输出图 52 | 53 | ![vised_000000017905.jpg](COCO/coco_vised_000000017905.jpg) 54 | 55 | 56 | ### 示例2:确认可见性标注格式 57 | + 坐标点 58 | > [268, 204, 2, 308, 177, 2, 250, 171, 2, 341, 186, 2, 220, 174, 2, 401, 337, 2, 59 | 162, 276, 1, 399, 502, 2, 48, 417, 2, 273, 584, 2, 56, 292, 2, 284, 629, 2, 60 | 148, 599, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] 61 | 62 | + 输出图 63 | 64 | ![vised_000000285138.jpg](COCO/coco_vised_000000285138.jpg) 65 | 66 | 67 | 68 | ## 3. AI Challenger 69 | + 数据格式 70 | > "joint_vis": { 1: "visible", 2: "occlude", 3: "invisible"} 71 | 72 | > "keypoints": { 0: "right shoulder", 1: "right elbow", 2: "right wrist", 3: "left shoulder", 4: "left elbow", 5: "left wrist", 6: "right hip", 7: "right knee", 8: "right ankle", 9: "left hip", 10: "left knee", 11: "left ankle", 12: "head tops" 13: "upper neck" } 73 | 74 | > "skeleton": [ 75 | [12,13],[13,0],[0,1],[1,2],[13,3],[3,4],[4,5], 76 | [13,6],[6,7],[7,8],[13,9],[9,10],[10,11]] 77 | 78 | ### 示例1:确认特征点位置 79 | + 坐标点 80 | > [400, 183, 1, 380, 319, 1, 358, 416, 1, 559, 217, 1, 557, 357, 1, 590, 294, 1, 81 | 428, 444, 1, 394, 664, 1, 0, 0, 3, 511, 445, 1, 504, 662, 1, 0, 0, 3, 466, 51, 1, 82 | 485, 179, 1] 83 | 84 | + 输出图 85 | 86 | ![vised_8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg](AI%20Challenger/aic_vised_8776db91659bf1b9abada9bbc9d9f15d0b085642.jpg) 87 | 88 | 89 | ### 示例2:确认可见性标注格式 90 | + 坐标点 91 | > [235, 455, 2, 183, 678, 2, 204, 470, 1, 514, 387, 1, 433, 660, 1, 92 | 219, 715, 1, 389, 826, 2, 0, 0, 3, 0, 0, 3, 555, 813, 1, 0, 0, 3, 93 | 0, 0, 3, 191, 183, 1, 324, 395, 1] 94 | 95 | + 输出图 96 | 97 | ![vised_043758c591b58f39a01648c49b5154ad1e01d400.jpg](AI%20Challenger/aic_vised_043758c591b58f39a01648c49b5154ad1e01d400.jpg) 98 | -------------------------------------------------------------------------------- /aic2coco.py: -------------------------------------------------------------------------------- 1 | import json 2 | import copy 3 | 4 | aic_train_path = "aic/annotations/aic_train.json" 5 | aic_val_path = "aic/annotations/aic_val.json" 6 | coco_train_path = "coco/annotations/person_keypoints_train2017.json" 7 | coco_val_path = "coco/annotations/person_keypoints_val2017.json" 8 | 9 | with open(aic_train_path, 'r') as f: 10 | aic_train = json.load(f) 11 | 12 | with open(aic_val_path, 'r') as f: 13 | aic_val = json.load(f) 14 | 15 | with open(coco_train_path, 'r') as f: 16 | coco_train = json.load(f) 17 | 18 | with open(coco_val_path, 'r') as f: 19 | coco_val = json.load(f) 20 | 21 | extra2coco = [6, 8, 10, 5, 7, 9, 12, 14, 16, 11, 13, 15, -1, -1] 22 | coco2extra = [-1, -1, -1, -1, -1, 3, 0, 4, 1, 5, 2, 9, 6, 10, 7, 11, 8] 23 | 24 | 25 | aic_train['categories'] = coco_train['categories'] 26 | for i in range(len(aic_train['images'])): 27 | aic_train['images'][i]['id'] += 600000 28 | for i in range(len(aic_train['annotations'])): 29 | aic_keypoints = copy.deepcopy(aic_train['annotations'][i]['keypoints']) 30 | aic_train['annotations'][i]['keypoints'] = [0] * 51 31 | for j in range(5, 17): 32 | aic_idx = coco2extra[j] 33 | aic_train['annotations'][i]['keypoints'][3*j] = aic_keypoints[3*aic_idx] 34 | aic_train['annotations'][i]['keypoints'][3*j+1] = aic_keypoints[3*aic_idx+1] 35 | aic_train['annotations'][i]['keypoints'][3*j+2] = aic_keypoints[3*aic_idx+2] 36 | aic_train['annotations'][i]['image_id'] += 600000 37 | aic_train['annotations'][i]['id'] += 900100582000 38 | 39 | with open('aic/annotations/aic_train_coco_format.json', 'w+') as f: 40 | json.dump(aic_train, f) 41 | 42 | for i in range(len(aic_val['images'])): 43 | aic_val['images'][i]['id'] += 600000 + 210000 44 | aic_val['categories'] = coco_train['categories'] 45 | for i in range(len(aic_val['annotations'])): 46 | aic_keypoints = copy.deepcopy(aic_val['annotations'][i]['keypoints']) 47 | aic_val['annotations'][i]['keypoints'] = [0] * 51 48 | for j in range(5, 17): 49 | aic_idx = coco2extra[j] 50 | aic_val['annotations'][i]['keypoints'][3*j] = aic_keypoints[3*aic_idx] 51 | aic_val['annotations'][i]['keypoints'][3*j+1] = aic_keypoints[3*aic_idx+1] 52 | aic_val['annotations'][i]['keypoints'][3*j+2] = aic_keypoints[3*aic_idx+2] 53 | aic_val['annotations'][i]['image_id'] += 600000 + 210000 54 | aic_val['annotations'][i]['id'] += 900100582000 + 210000 55 | 56 | with open('aic/annotations/aic_val_coco_format.json', 'w+') as f: 57 | json.dump(aic_val, f) 58 | 59 | coco_train['images'].extend(aic_train['images']) 60 | coco_train['annotations'].extend(aic_train['annotations']) 61 | print("+ aic train: {} samples.".format(len(coco_train['annotations']))) 62 | with open('coco/annotations/coco_train_aic_train.json', 'w+') as f: 63 | json.dump(coco_train, f) 64 | 65 | coco_train['images'].extend(aic_val['images']) 66 | coco_train['annotations'].extend(aic_val['annotations']) 67 | print("+ aic train and val: {} samples.".format(len(coco_train['annotations']))) 68 | with open('coco/annotations/coco_train_aic_trainval.json', 'w+') as f: 69 | json.dump(coco_train, f) 70 | 71 | print('test') -------------------------------------------------------------------------------- /vis.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import cv2 3 | import json 4 | import os 5 | 6 | import numpy as np 7 | 8 | dataset_indexes = { 9 | 'mpii': { 10 | 0: 'right_ankle', 11 | 1: 'right_knee', 12 | 2: 'right_hip', 13 | 3: 'left_hip', 14 | 4: 'left_knee', 15 | 5: 'left_ankle', 16 | 6: 'pelvis', 17 | 7: 'thorax', 18 | 8: 'upper_neck', 19 | 9: 'head_top', 20 | 10: 'right_wrist', 21 | 11: 'right_elbow', 22 | 12: 'right_shoulder', 23 | 13: 'left_shoulder', 24 | 14: 'left_elbow', 25 | 15: 'left_wrist' 26 | }, 27 | 'coco': { 28 | 0: 'nose', 29 | 1: 'left_eye', 30 | 2: 'right_eye', 31 | 3: 'left_ear', 32 | 4: 'right_ear', 33 | 5: 'left_shoulder', 34 | 6: 'right_shoulder', 35 | 7: 'left_elbow', 36 | 8: 'right_elbow', 37 | 9: 'left_wrist', 38 | 10: 'right_wrist', 39 | 11: 'left_hip', 40 | 12: 'right_hip', 41 | 13: 'left_knee', 42 | 14: 'right_knee', 43 | 15: 'left_ankle', 44 | 16: 'right_ankle' 45 | }, 46 | 'aic': { 47 | 0: "right_shoulder", 48 | 1: "right_elbow", 49 | 2: "right_wrist", 50 | 3: "left_shoulder", 51 | 4: "left_elbow", 52 | 5: "left_wrist", 53 | 6: "right_hip", 54 | 7: "right_knee", 55 | 8: "right_ankle", 56 | 9: "left_hip", 57 | 10: "left_knee", 58 | 11: "left_ankle", 59 | 12: "head_top", 60 | 13: "neck" 61 | } 62 | } 63 | 64 | dataset_skeletons = { 65 | 'mpii': [[10, 11], [11, 12], [15, 14], [14, 13], [0, 1], [1, 2], [5, 4], [4, 3], 66 | [2, 6], [3, 6], [12, 7], [13, 7], [6, 7], [7, 8], [8, 9]], 67 | 'coco': [[16, 14], [14, 12], [15, 13], [13, 11], [12, 11], [6, 12], [5, 11], [6, 5], [6, 8], 68 | [8, 10], [5, 7], [7, 9], [3, 1], [1, 0], [0, 2], [2, 4]] 69 | } 70 | 71 | 72 | def get_roi(my_annot, my_dataset): 73 | if my_dataset == "mpii": 74 | center = np.array(my_annot["center"]) 75 | my_scale = np.array([my_annot["scale"] * 200] * 2) 76 | my_roi = np.append((center - my_scale / 2), my_scale).astype(np.int32) 77 | else: 78 | my_roi = np.array(my_annot["bbox"], dtype=np.int32) 79 | 80 | return my_roi 81 | 82 | 83 | def get_suitable_wh(my_roi): 84 | screen = np.array([1600, 900]) 85 | my_scale = min(screen / roi[2:]) 86 | return int(my_roi[2] * my_scale), int(my_roi[3] * my_scale), my_scale 87 | 88 | 89 | if __name__ == '__main__': 90 | parser = argparse.ArgumentParser(description='Visualize Dataset Annotation Format') 91 | # general 92 | parser.add_argument('--name', 93 | help='dataset name', 94 | default="mpii", 95 | choices=["mpii", "coco", "aic", "aic_coco"]) 96 | 97 | args = parser.parse_args() 98 | dataset = args.name 99 | 100 | dataset_roots = { 101 | "mpii": "MPII/", 102 | "coco": "COCO/", 103 | "aic": "AI Challenger/", 104 | "aic_coco": "AI Challenger/" 105 | } 106 | 107 | dataset_annots = { 108 | "mpii": "mpii.json", 109 | "coco": "coco.json", 110 | "aic": "aic.json", 111 | "aic_coco": "aic_coco_format.json" 112 | } 113 | 114 | root = dataset_roots[dataset] 115 | annot_file = os.path.join(root, dataset_annots[dataset]) 116 | write_dst = os.path.join(root, r"{}_vised_".format(dataset)) 117 | write_dst_whiteBG = os.path.join(root, r"{}_vised_whiteBG_".format(dataset)) 118 | indexes = dataset_indexes[dataset] 119 | skeletons = dataset_skeletons[dataset] 120 | 121 | with open(annot_file) as f: 122 | annots = json.load(f) 123 | 124 | if dataset != "mpii": 125 | id2name = {} 126 | for image_info in annots['images']: 127 | id2name[image_info['id']] = image_info['file_name'] 128 | annotations = annots['annotations'] 129 | else: 130 | annotations = annots 131 | 132 | for annot in annotations: 133 | roi = get_roi(annot, dataset) 134 | w, h, scale = get_suitable_wh(roi) 135 | 136 | if dataset == "mpii": 137 | img_name = annot["image"] 138 | label = annot['joints'] 139 | label = [[(item[0] - roi[0]) * scale, (item[1] - roi[1]) * scale] 140 | for item in label] 141 | else: 142 | img_name = id2name[annot['image_id']] 143 | label = annot['keypoints'] 144 | label = [[(label[i] - roi[0]) * scale, (label[i + 1] - roi[1]) * scale] 145 | for i in range(0, len(label), 3)] 146 | 147 | label = np.array(label, dtype=np.int32) 148 | # print(label) 149 | 150 | img = cv2.imread(os.path.join(root, img_name)) 151 | img = img[roi[1]:roi[1] + roi[3], roi[0]:roi[0] + roi[2], :] 152 | img = cv2.resize(img, (w, h)) 153 | 154 | for i, key in enumerate(label): 155 | cv2.circle(img, (key[0], key[1]), 5, (0, 0, 255), -1, lineType=cv2.LINE_AA) 156 | cv2.putText(img, str(i), (key[0], key[1]), cv2.FONT_HERSHEY_SIMPLEX, 1.5, (0, 255, 0), 2, 157 | lineType=cv2.LINE_AA) 158 | 159 | cv2.imwrite(write_dst + img_name, img) 160 | cv2.imshow('src', img) 161 | cv2.waitKey(0) 162 | 163 | whiteBG_img = np.zeros([h+300, w+300, 3], dtype=np.uint8) + 255 164 | for i, key in enumerate(label): 165 | cv2.circle(whiteBG_img, (key[0]+50, key[1]+50), 5, (0, 0, 0), -1, lineType=cv2.LINE_AA) 166 | for p, q in skeletons: 167 | cv2.line(whiteBG_img, label[p]+50, label[q]+50, (0, 0, 0), 2, lineType=cv2.LINE_AA) 168 | 169 | cv2.imwrite(write_dst_whiteBG + img_name, whiteBG_img) 170 | cv2.imshow('src', whiteBG_img) 171 | cv2.waitKey(0) 172 | --------------------------------------------------------------------------------