├── .gitignore ├── README.md ├── data ├── alpha_test.csv ├── beta_test.csv ├── data.txt ├── data_bin.txt ├── label_test.csv └── result.txt ├── glad.py ├── glad_bin.py ├── glad_naive.py └── gladtest.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # PyInstaller 26 | # Usually these files are written by a python script from a template 27 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 28 | *.manifest 29 | *.spec 30 | 31 | # Installer logs 32 | pip-log.txt 33 | pip-delete-this-directory.txt 34 | 35 | # Unit test / coverage reports 36 | htmlcov/ 37 | .tox/ 38 | .coverage 39 | .cache 40 | nosetests.xml 41 | coverage.xml 42 | 43 | # Translations 44 | *.mo 45 | *.pot 46 | 47 | # Django stuff: 48 | *.log 49 | 50 | # Sphinx documentation 51 | docs/_build/ 52 | 53 | # PyBuilder 54 | target/ 55 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # python-glad # 2 | A Python Implementation of GLAD. 3 | 4 | ## GLAD ## 5 | Algorithm for aggregating labels given by labeler more acculately than majority vote. 6 | 7 | * Whitehill, Jacob, et al. "Whose vote should count more: Optimal integration of labels from labelers of unknown expertise." Advances in neural information processing systems. 2009. 8 | 9 | ## Dependency ## 10 | * numpy 11 | * scipy 12 | 13 | ## Usage ## 14 | ```bash 15 | $ python glad.py data/data.txt 16 | ``` 17 | 18 | ## Input Format ## 19 | ``` 20 | First line: 21 | 22 | Following lines: 23 | 24 | ``` 25 | * The task IDs must be integers and must be 0.... 26 | * The labeler IDs must be integers and must be 0.... 27 | -------------------------------------------------------------------------------- /data/alpha_test.csv: -------------------------------------------------------------------------------- 1 | # id,alpha 2 | 1,0.31859 3 | 2,0.29873 4 | 3,0.36499 5 | 4,0.34467 6 | 5,0.35643 7 | 6,0.33652 8 | 7,0.39326 9 | 8,0.31781 10 | 9,0.30328 11 | 10,0.31059 12 | 11,0.36889 13 | 12,0.34452 14 | 13,0.36672 15 | 14,0.34592 16 | 15,0.32217 17 | 16,0.37900 18 | 17,1.97929 19 | 18,2.58659 20 | 19,2.44488 21 | 20,2.24527 22 | -------------------------------------------------------------------------------- /data/beta_test.csv: -------------------------------------------------------------------------------- 1 | # id,beta 2 | 1,4.01674 3 | 2,4.05188 4 | 3,2.45550 5 | 4,4.11996 6 | 5,4.86722 7 | 6,5.00670 8 | 7,3.85358 9 | 8,2.94166 10 | 9,5.04767 11 | 10,2.42973 12 | 11,2.55290 13 | 12,3.96966 14 | 13,1.66055 15 | 14,4.15522 16 | 15,6.62655 17 | 16,3.78605 18 | 17,4.99878 19 | 18,7.16955 20 | 19,1.26168 21 | 20,2.30131 22 | 21,4.02499 23 | 22,3.98396 24 | 23,2.46176 25 | 24,3.21810 26 | 25,5.32733 27 | 26,6.62655 28 | 27,3.19921 29 | 28,4.01623 30 | 29,2.01811 31 | 30,2.45358 32 | 31,4.09836 33 | 32,5.09548 34 | 33,5.08872 35 | 34,6.53871 36 | 35,3.04712 37 | 36,1.14489 38 | 37,3.00457 39 | 38,5.18160 40 | 39,2.44238 41 | 40,5.27700 42 | 41,4.23841 43 | 42,1.27284 44 | 43,2.33170 45 | 44,1.97391 46 | 45,5.12014 47 | 46,4.95289 48 | 47,10.98613 49 | 48,2.56442 50 | 49,2.52520 51 | 50,5.19768 52 | 51,5.01404 53 | 52,6.84783 54 | 53,4.04747 55 | 54,6.69019 56 | 55,3.17565 57 | 56,1.93762 58 | 57,6.70406 59 | 58,2.82254 60 | 59,6.84783 61 | 60,4.88193 62 | 61,3.80699 63 | 62,4.17971 64 | 63,5.14218 65 | 64,2.05937 66 | 65,3.33089 67 | 66,5.39390 68 | 67,3.90229 69 | 68,4.03140 70 | 69,4.91355 71 | 70,5.19944 72 | 71,4.99069 73 | 72,5.00726 74 | 73,2.51546 75 | 74,4.82863 76 | 75,5.01404 77 | 76,6.70406 78 | 77,5.21425 79 | 78,2.03273 80 | 79,3.85358 81 | 80,5.10244 82 | 81,3.18396 83 | 82,4.02764 84 | 83,1.79493 85 | 84,4.21642 86 | 85,1.99109 87 | 86,2.54461 88 | 87,5.08929 89 | 88,5.03624 90 | 89,3.15809 91 | 90,3.19091 92 | 91,5.14218 93 | 92,5.12702 94 | 93,4.00151 95 | 94,2.47663 96 | 95,3.16114 97 | 96,7.16955 98 | 97,3.04216 99 | 98,7.03236 100 | 99,3.91820 101 | 100,4.25016 102 | 101,3.08798 103 | 102,4.87539 104 | 103,4.14087 105 | 104,7.16955 106 | 105,0.94267 107 | 106,7.13625 108 | 107,5.46120 109 | 108,3.80708 110 | 109,3.92485 111 | 110,3.30610 112 | 111,1.70193 113 | 112,3.99377 114 | 113,2.44918 115 | 114,4.08705 116 | 115,5.23259 117 | 116,3.09707 118 | 117,5.06507 119 | 118,2.92468 120 | 119,3.19498 121 | 120,5.03867 122 | 121,3.80319 123 | 122,1.97687 124 | 123,3.18912 125 | 124,3.07506 126 | 125,5.10244 127 | 126,2.47292 128 | 127,3.70349 129 | 128,3.89371 130 | 129,2.15552 131 | 130,5.06091 132 | 131,2.36591 133 | 132,3.88677 134 | 133,1.50832 135 | 134,2.96055 136 | 135,3.21009 137 | 136,3.92975 138 | 137,2.00619 139 | 138,3.29415 140 | 139,6.71518 141 | 140,1.14142 142 | 141,4.08747 143 | 142,3.22834 144 | 143,4.06745 145 | 144,7.03236 146 | 145,4.15862 147 | 146,3.32511 148 | 147,3.12868 149 | 148,3.13865 150 | 149,1.78641 151 | 150,3.07469 152 | 151,5.20341 153 | 152,1.83972 154 | 153,3.86473 155 | 154,3.95271 156 | 155,3.76362 157 | 156,5.37831 158 | 157,3.85135 159 | 158,4.96126 160 | 159,3.11621 161 | 160,2.45980 162 | 161,2.48938 163 | 162,1.60934 164 | 163,3.16801 165 | 164,5.26308 166 | 165,5.01404 167 | 166,10.98613 168 | 167,5.11685 169 | 168,4.86722 170 | 169,5.26308 171 | 170,7.03236 172 | 171,4.11951 173 | 172,2.96184 174 | 173,6.77054 175 | 174,1.73493 176 | 175,7.13625 177 | 176,0.79132 178 | 177,3.25058 179 | 178,5.46120 180 | 179,5.27700 181 | 180,5.13692 182 | 181,3.13353 183 | 182,2.92637 184 | 183,4.04144 185 | 184,2.53929 186 | 185,3.05002 187 | 186,3.93196 188 | 187,4.17207 189 | 188,3.06156 190 | 189,3.89252 191 | 190,3.05001 192 | 191,3.12821 193 | 192,3.11295 194 | 193,3.02434 195 | 194,3.75802 196 | 195,5.08929 197 | 196,3.15808 198 | 197,4.90468 199 | 198,5.09278 200 | 199,2.96781 201 | 200,3.18775 202 | 201,3.20105 203 | 202,3.84137 204 | 203,2.36414 205 | 204,1.03481 206 | 205,10.98613 207 | 206,5.40902 208 | 207,3.25058 209 | 208,5.04767 210 | 209,3.26276 211 | 210,3.05554 212 | 211,4.96126 213 | 212,4.06457 214 | 213,4.07633 215 | 214,3.23295 216 | 215,3.12090 217 | 216,3.95385 218 | 217,6.83960 219 | 218,3.81398 220 | 219,3.08804 221 | 220,4.10739 222 | 221,3.19724 223 | 222,2.54436 224 | 223,3.93472 225 | 224,3.27096 226 | 225,2.36614 227 | 226,3.13759 228 | 227,5.04767 229 | 228,1.20435 230 | 229,7.00134 231 | 230,6.62655 232 | 231,5.11685 233 | 232,3.18515 234 | 233,4.11764 235 | 234,2.47325 236 | 235,3.09243 237 | 236,1.71311 238 | 237,3.90725 239 | 238,5.15480 240 | 239,4.03010 241 | 240,2.10796 242 | 241,5.15147 243 | 242,5.21833 244 | 243,5.16301 245 | 244,5.09424 246 | 245,5.35866 247 | 246,2.43676 248 | 247,2.16821 249 | 248,4.13330 250 | 249,6.90278 251 | 250,3.96173 252 | 251,1.52821 253 | 252,5.15147 254 | 253,3.94357 255 | 254,3.83050 256 | 255,1.24323 257 | 256,3.00260 258 | 257,7.02654 259 | 258,4.29864 260 | 259,3.02226 261 | 260,4.90468 262 | 261,3.13851 263 | 262,6.83960 264 | 263,5.14218 265 | 264,3.05461 266 | 265,5.39747 267 | 266,2.65636 268 | 267,3.85839 269 | 268,1.45570 270 | 269,2.54749 271 | 270,4.90468 272 | 271,4.05041 273 | 272,3.96304 274 | 273,3.24388 275 | 274,4.19979 276 | 275,3.87048 277 | 276,3.26055 278 | 277,2.36812 279 | 278,2.90879 280 | 279,3.95045 281 | 280,6.83960 282 | 281,2.61255 283 | 282,3.86960 284 | 283,5.37416 285 | 284,3.07352 286 | 285,3.94857 287 | 286,5.29674 288 | 287,3.99331 289 | 288,1.70930 290 | 289,5.40902 291 | 290,5.14218 292 | 291,2.47111 293 | 292,5.15147 294 | 293,3.20170 295 | 294,2.50524 296 | 295,10.98613 297 | 296,3.10312 298 | 297,4.16960 299 | 298,3.12433 300 | 299,4.09827 301 | 300,1.99222 302 | 301,3.01983 303 | 302,3.12697 304 | 303,3.12397 305 | 304,7.16955 306 | 305,3.24458 307 | 306,4.02730 308 | 307,5.29674 309 | 308,3.11128 310 | 309,2.95182 311 | 310,5.21833 312 | 311,3.15501 313 | 312,5.00726 314 | 313,3.91498 315 | 314,3.93608 316 | 315,4.06097 317 | 316,3.38718 318 | 317,2.58872 319 | 318,2.37972 320 | 319,3.94452 321 | 320,4.12998 322 | 321,3.17880 323 | 322,4.03138 324 | 323,2.53947 325 | 324,3.88637 326 | 325,2.45857 327 | 326,0.96306 328 | 327,5.11315 329 | 328,3.22412 330 | 329,4.86052 331 | 330,3.20232 332 | 331,2.07283 333 | 332,4.92015 334 | 333,1.10830 335 | 334,4.99341 336 | 335,2.47861 337 | 336,3.90725 338 | 337,4.91355 339 | 338,7.00134 340 | 339,5.28221 341 | 340,6.77054 342 | 341,1.74175 343 | 342,2.53505 344 | 343,2.59493 345 | 344,6.90278 346 | 345,3.03296 347 | 346,2.42333 348 | 347,3.80171 349 | 348,5.23259 350 | 349,1.42207 351 | 350,10.98613 352 | 351,3.06875 353 | 352,5.12208 354 | 353,3.96542 355 | 354,1.39616 356 | 355,5.14923 357 | 356,3.85472 358 | 357,2.39561 359 | 358,4.04164 360 | 359,3.02889 361 | 360,5.26308 362 | 361,3.03197 363 | 362,7.08350 364 | 363,3.00457 365 | 364,3.13599 366 | 365,5.23259 367 | 366,3.17902 368 | 367,6.83960 369 | 368,3.08899 370 | 369,3.18602 371 | 370,2.08293 372 | 371,2.56184 373 | 372,3.97441 374 | 373,4.21341 375 | 374,4.17207 376 | 375,10.98613 377 | 376,1.96560 378 | 377,0.95679 379 | 378,4.06539 380 | 379,2.47635 381 | 380,3.87094 382 | 381,2.10796 383 | 382,2.37265 384 | 383,5.04710 385 | 384,3.13625 386 | 385,1.81941 387 | 386,3.85472 388 | 387,0.87905 389 | 388,4.12705 390 | 389,2.46266 391 | 390,10.98613 392 | 391,4.19076 393 | 392,5.19772 394 | 393,2.41523 395 | 394,3.04397 396 | 395,3.19681 397 | 396,5.42896 398 | 397,3.73014 399 | 398,5.03867 400 | 399,3.95182 401 | 400,2.35057 402 | 401,1.99743 403 | 402,1.62847 404 | 403,3.93962 405 | 404,2.41709 406 | 405,1.30237 407 | 406,4.07022 408 | 407,2.51451 409 | 408,5.46120 410 | 409,2.47106 411 | 410,5.12145 412 | 411,5.11685 413 | 412,3.19861 414 | 413,1.52444 415 | 414,1.10300 416 | 415,4.08705 417 | 416,3.93331 418 | 417,5.23322 419 | 418,3.95931 420 | 419,3.01651 421 | 420,1.67430 422 | 421,3.22340 423 | 422,3.11832 424 | 423,4.86722 425 | 424,3.82959 426 | 425,2.51099 427 | 426,3.97198 428 | 427,3.03541 429 | 428,5.20341 430 | 429,10.98613 431 | 430,6.84880 432 | 431,1.97837 433 | 432,5.37416 434 | 433,7.08350 435 | 434,4.03645 436 | 435,2.46637 437 | 436,3.24856 438 | 437,3.86996 439 | 438,4.96126 440 | 439,2.39607 441 | 440,3.80171 442 | 441,3.11751 443 | 442,5.04552 444 | 443,6.69019 445 | 444,4.01860 446 | 445,4.14711 447 | 446,3.76362 448 | 447,5.23322 449 | 448,7.13625 450 | 449,2.58925 451 | 450,5.12644 452 | 451,6.62655 453 | 452,3.98151 454 | 453,3.04934 455 | 454,7.08350 456 | 455,4.20229 457 | 456,10.98613 458 | 457,5.12014 459 | 458,3.36742 460 | 459,4.26188 461 | 460,1.54395 462 | 461,3.87094 463 | 462,3.90851 464 | 463,4.11951 465 | 464,3.82910 466 | 465,4.07588 467 | 466,3.16975 468 | 467,7.16955 469 | 468,3.97585 470 | 469,1.95622 471 | 470,5.29674 472 | 471,5.42896 473 | 472,2.41685 474 | 473,2.47186 475 | 474,4.19166 476 | 475,3.01071 477 | 476,4.90523 478 | 477,2.56238 479 | 478,3.24182 480 | 479,3.23260 481 | 480,5.18160 482 | 481,5.14923 483 | 482,4.00983 484 | 483,6.69019 485 | 484,3.95931 486 | 485,3.04116 487 | 486,5.10441 488 | 487,3.14232 489 | 488,3.83676 490 | 489,6.83960 491 | 490,4.14465 492 | 491,2.95382 493 | 492,4.08462 494 | 493,4.15142 495 | 494,4.03645 496 | 495,5.22746 497 | 496,10.98613 498 | 497,7.16955 499 | 498,2.93387 500 | 499,1.78346 501 | 500,6.83960 502 | 501,1.96971 503 | 502,3.96035 504 | 503,0.89604 505 | 504,6.70406 506 | 505,2.13101 507 | 506,2.53464 508 | 507,3.05554 509 | 508,4.09778 510 | 509,7.00134 511 | 510,4.04195 512 | 511,3.09527 513 | 512,5.31197 514 | 513,2.45273 515 | 514,3.89117 516 | 515,3.07772 517 | 516,7.08350 518 | 517,5.03624 519 | 518,4.15862 520 | 519,5.10986 521 | 520,5.40902 522 | 521,6.62655 523 | 522,4.04144 524 | 523,4.99091 525 | 524,3.84267 526 | 525,4.07633 527 | 526,6.53871 528 | 527,2.45601 529 | 528,2.52616 530 | 529,1.69862 531 | 530,2.46923 532 | 531,5.42896 533 | 532,2.38392 534 | 533,4.19076 535 | 534,6.83960 536 | 535,1.70345 537 | 536,5.42896 538 | 537,5.18418 539 | 538,2.44647 540 | 539,3.01151 541 | 540,4.03106 542 | 541,2.59342 543 | 542,5.29674 544 | 543,5.42896 545 | 544,3.21761 546 | 545,5.06507 547 | 546,4.13330 548 | 547,5.10441 549 | 548,4.93574 550 | 549,3.08168 551 | 550,10.98613 552 | 551,4.95289 553 | 552,3.78605 554 | 553,6.77054 555 | 554,3.10723 556 | 555,4.03645 557 | 556,5.00188 558 | 557,4.92845 559 | 558,2.95620 560 | 559,1.66345 561 | 560,2.45239 562 | 561,2.49482 563 | 562,6.83960 564 | 563,5.21833 565 | 564,5.32733 566 | 565,3.23220 567 | 566,3.26785 568 | 567,3.11809 569 | 568,1.68396 570 | 569,2.54887 571 | 570,5.03012 572 | 571,3.94897 573 | 572,3.05295 574 | 573,3.15024 575 | 574,2.37273 576 | 575,5.00726 577 | 576,3.05826 578 | 577,3.22162 579 | 578,3.95182 580 | 579,2.47716 581 | 580,3.07114 582 | 581,2.56038 583 | 582,4.10496 584 | 583,3.19218 585 | 584,5.00188 586 | 585,1.79253 587 | 586,10.98613 588 | 587,3.03794 589 | 588,3.13865 590 | 589,3.14844 591 | 590,5.29327 592 | 591,5.46120 593 | 592,3.99331 594 | 593,1.82157 595 | 594,2.58391 596 | 595,4.06239 597 | 596,5.04710 598 | 597,5.19768 599 | 598,3.95473 600 | 599,5.04222 601 | 600,6.69019 602 | 601,3.17944 603 | 602,2.44553 604 | 603,3.93962 605 | 604,5.17190 606 | 605,3.13759 607 | 606,3.16457 608 | 607,1.14241 609 | 608,5.03624 610 | 609,5.19772 611 | 610,2.47160 612 | 611,2.48665 613 | 612,4.01525 614 | 613,2.41319 615 | 614,5.09548 616 | 615,5.26308 617 | 616,3.92308 618 | 617,3.73600 619 | 618,5.31680 620 | 619,3.85839 621 | 620,4.04747 622 | 621,6.62655 623 | 622,2.61655 624 | 623,1.51201 625 | 624,2.01257 626 | 625,3.98422 627 | 626,2.04427 628 | 627,2.00585 629 | 628,2.54653 630 | 629,5.00670 631 | 630,10.98613 632 | 631,2.58767 633 | 632,4.06280 634 | 633,7.13625 635 | 634,4.99341 636 | 635,1.50911 637 | 636,10.98613 638 | 637,3.92485 639 | 638,3.19544 640 | 639,3.77977 641 | 640,7.02654 642 | 641,5.28221 643 | 642,0.83168 644 | 643,6.83960 645 | 644,7.03236 646 | 645,5.01348 647 | 646,4.07435 648 | 647,3.13580 649 | 648,3.94000 650 | 649,5.12014 651 | 650,2.46775 652 | 651,3.12021 653 | 652,1.94981 654 | 653,5.08315 655 | 654,7.08350 656 | 655,6.70406 657 | 656,2.44907 658 | 657,2.43730 659 | 658,2.40389 660 | 659,5.15480 661 | 660,5.23311 662 | 661,3.85605 663 | 662,6.77054 664 | 663,4.22469 665 | 664,1.21859 666 | 665,3.84044 667 | 666,3.20910 668 | 667,5.42896 669 | 668,5.11315 670 | 669,5.06507 671 | 670,5.26369 672 | 671,4.03231 673 | 672,4.13339 674 | 673,4.16960 675 | 674,5.37771 676 | 675,2.35367 677 | 676,2.40218 678 | 677,2.58901 679 | 678,3.10055 680 | 679,4.20229 681 | 680,6.90278 682 | 681,3.99289 683 | 682,4.87539 684 | 683,2.02674 685 | 684,2.96599 686 | 685,6.71518 687 | 686,4.10871 688 | 687,2.99845 689 | 688,2.00972 690 | 689,6.83960 691 | 690,5.37416 692 | 691,2.37215 693 | 692,3.07932 694 | 693,3.12396 695 | 694,6.90278 696 | 695,4.88193 697 | 696,4.07727 698 | 697,5.07945 699 | 698,6.90278 700 | 699,4.12998 701 | 700,1.93929 702 | 701,1.99500 703 | 702,3.03172 704 | 703,1.56167 705 | 704,0.80026 706 | 705,1.21931 707 | 706,0.68404 708 | 707,0.96247 709 | 708,0.87564 710 | 709,1.19237 711 | 710,1.20919 712 | 711,1.62677 713 | 712,1.98994 714 | 713,1.14533 715 | 714,1.51859 716 | 715,1.72258 717 | 716,1.48284 718 | 717,1.69999 719 | 718,1.19228 720 | 719,2.42468 721 | 720,1.19436 722 | 721,1.33264 723 | 722,1.21720 724 | 723,1.05653 725 | 724,1.17767 726 | 725,1.62296 727 | 726,1.48403 728 | 727,1.78526 729 | 728,3.09977 730 | 729,0.99353 731 | 730,0.65319 732 | 731,1.32509 733 | 732,1.47119 734 | 733,1.10601 735 | 734,1.21088 736 | 735,1.30060 737 | 736,0.90579 738 | 737,1.08190 739 | 738,1.47772 740 | 739,1.48270 741 | 740,2.40856 742 | 741,1.45473 743 | 742,1.32780 744 | 743,1.74716 745 | 744,1.36482 746 | 745,3.06156 747 | 746,0.98128 748 | 747,1.70826 749 | 748,0.55787 750 | 749,2.02589 751 | 750,1.33438 752 | 751,1.17941 753 | 752,1.34493 754 | 753,1.67955 755 | 754,0.80892 756 | 755,1.57248 757 | 756,2.49823 758 | 757,2.00184 759 | 758,2.09239 760 | 759,0.67839 761 | 760,0.63511 762 | 761,1.08564 763 | 762,3.21828 764 | 763,1.68019 765 | 764,1.32229 766 | 765,0.96013 767 | 766,0.92211 768 | 767,0.61070 769 | 768,1.20405 770 | 769,0.60819 771 | 770,2.02733 772 | 771,1.87238 773 | 772,1.67992 774 | 773,0.80614 775 | 774,1.18751 776 | 775,2.40792 777 | 776,1.79978 778 | 777,2.50636 779 | 778,1.07113 780 | 779,1.74863 781 | 780,3.10921 782 | 781,1.50415 783 | 782,0.90770 784 | 783,2.52740 785 | 784,0.93225 786 | 785,2.49442 787 | 786,3.09818 788 | 787,0.99944 789 | 788,1.73647 790 | 789,1.42743 791 | 790,1.22504 792 | 791,2.08444 793 | 792,1.33027 794 | 793,2.07348 795 | 794,1.52161 796 | 795,1.49660 797 | 796,0.86678 798 | 797,0.90800 799 | 798,2.54835 800 | 799,1.19202 801 | 800,1.76771 802 | 801,1.69971 803 | 802,0.82978 804 | 803,0.88571 805 | 804,2.03823 806 | 805,1.49777 807 | 806,2.06377 808 | 807,1.07063 809 | 808,1.49189 810 | 809,1.79322 811 | 810,1.17693 812 | 811,1.06469 813 | 812,2.04324 814 | 813,1.21910 815 | 814,1.26502 816 | 815,0.80968 817 | 816,3.92089 818 | 817,1.31455 819 | 818,0.97073 820 | 819,1.33023 821 | 820,1.32044 822 | 821,1.20389 823 | 822,2.03284 824 | 823,1.18475 825 | 824,1.78937 826 | 825,0.79982 827 | 826,1.19195 828 | 827,1.18415 829 | 828,1.49981 830 | 829,0.88855 831 | 830,1.75931 832 | 831,1.10280 833 | 832,1.72458 834 | 833,0.84696 835 | 834,1.97767 836 | 835,1.17798 837 | 836,1.78649 838 | 837,1.79431 839 | 838,2.07782 840 | 839,1.49482 841 | 840,1.45634 842 | 841,0.83891 843 | 842,1.53588 844 | 843,1.72345 845 | 844,0.79901 846 | 845,0.99396 847 | 846,1.27911 848 | 847,2.47474 849 | 848,1.77346 850 | 849,3.14307 851 | 850,1.56026 852 | 851,1.67878 853 | 852,1.36537 854 | 853,1.32509 855 | 854,1.16448 856 | 855,0.91480 857 | 856,1.69661 858 | 857,0.81080 859 | 858,1.51515 860 | 859,3.10621 861 | 860,1.66290 862 | 861,0.83956 863 | 862,3.00651 864 | 863,1.31366 865 | 864,1.32477 866 | 865,1.22527 867 | 866,1.73393 868 | 867,1.71570 869 | 868,1.51389 870 | 869,1.71069 871 | 870,0.70411 872 | 871,1.92846 873 | 872,2.44961 874 | 873,2.33925 875 | 874,1.23638 876 | 875,0.76054 877 | 876,1.29899 878 | 877,1.54944 879 | 878,0.99778 880 | 879,1.34353 881 | 880,0.66344 882 | 881,1.01481 883 | 882,1.46805 884 | 883,0.87895 885 | 884,2.03841 886 | 885,1.49701 887 | 886,1.97430 888 | 887,1.94054 889 | 888,2.55965 890 | 889,2.39194 891 | 890,0.78832 892 | 891,1.19986 893 | 892,0.73344 894 | 893,2.04445 895 | 894,1.75503 896 | 895,1.57367 897 | 896,1.32593 898 | 897,1.16568 899 | 898,1.20761 900 | 899,2.57554 901 | 900,1.71867 902 | 901,1.53384 903 | 902,1.45970 904 | 903,1.21751 905 | 904,1.31901 906 | 905,0.93733 907 | 906,1.80420 908 | 907,1.49089 909 | 908,1.28624 910 | 909,1.35351 911 | 910,1.68466 912 | 911,1.90011 913 | 912,1.32328 914 | 913,0.86258 915 | 914,1.48562 916 | 915,1.05947 917 | 916,1.49419 918 | 917,0.79839 919 | 918,1.47209 920 | 919,1.31168 921 | 920,1.70459 922 | 921,1.48619 923 | 922,4.04803 924 | 923,0.74220 925 | 924,1.19926 926 | 925,0.93369 927 | 926,0.85376 928 | 927,1.19886 929 | 928,0.98192 930 | 929,0.84404 931 | 930,1.19973 932 | 931,1.11653 933 | 932,0.56199 934 | 933,1.36201 935 | 934,1.70499 936 | 935,1.64884 937 | 936,0.65094 938 | 937,1.72521 939 | 938,0.64211 940 | 939,0.88178 941 | 940,1.73083 942 | 941,0.74695 943 | 942,1.71120 944 | 943,0.61170 945 | 944,2.45981 946 | 945,2.01639 947 | 946,2.41925 948 | 947,1.33350 949 | 948,1.69448 950 | 949,1.51834 951 | 950,1.33637 952 | 951,1.54793 953 | 952,2.56365 954 | 953,1.10896 955 | 954,1.23923 956 | 955,2.06848 957 | 956,1.49042 958 | 957,0.55248 959 | 958,1.34979 960 | 959,0.98271 961 | 960,1.50140 962 | 961,1.92965 963 | 962,1.08607 964 | 963,0.85366 965 | 964,1.34381 966 | 965,1.47626 967 | 966,0.86019 968 | 967,1.17424 969 | 968,1.52605 970 | 969,0.94988 971 | 970,1.55136 972 | 971,1.09228 973 | 972,0.68026 974 | 973,1.68449 975 | 974,1.51256 976 | 975,1.20036 977 | 976,4.04931 978 | 977,2.00238 979 | 978,2.01545 980 | 979,1.91368 981 | 980,2.44004 982 | 981,1.45095 983 | 982,1.30971 984 | 983,1.00284 985 | 984,0.80799 986 | 985,2.46852 987 | 986,1.33159 988 | 987,1.31020 989 | 988,1.17408 990 | 989,0.55317 991 | 990,0.94104 992 | 991,1.19847 993 | 992,0.77204 994 | 993,1.44760 995 | 994,1.31434 996 | 995,1.68891 997 | 996,1.67896 998 | 997,1.49702 999 | 998,1.52569 1000 | 999,1.34929 1001 | 1000,1.18873 1002 | -------------------------------------------------------------------------------- /data/label_test.csv: -------------------------------------------------------------------------------- 1 | # id,z0,z1 2 | 1,0.00000,1.00000 3 | 2,0.00000,1.00000 4 | 3,1.00000,0.00000 5 | 4,1.00000,0.00000 6 | 5,1.00000,0.00000 7 | 6,0.00000,1.00000 8 | 7,1.00000,0.00000 9 | 8,1.00000,0.00000 10 | 9,0.00000,1.00000 11 | 10,0.00000,1.00000 12 | 11,1.00000,0.00000 13 | 12,1.00000,0.00000 14 | 13,0.00000,1.00000 15 | 14,0.00000,1.00000 16 | 15,0.00000,1.00000 17 | 16,0.00000,1.00000 18 | 17,1.00000,0.00000 19 | 18,1.00000,0.00000 20 | 19,0.99997,0.00003 21 | 20,1.00000,0.00000 22 | 21,1.00000,0.00000 23 | 22,0.00000,1.00000 24 | 23,1.00000,0.00000 25 | 24,0.00000,1.00000 26 | 25,1.00000,0.00000 27 | 26,1.00000,0.00000 28 | 27,1.00000,0.00000 29 | 28,0.00000,1.00000 30 | 29,0.00000,1.00000 31 | 30,1.00000,0.00000 32 | 31,0.00000,1.00000 33 | 32,1.00000,0.00000 34 | 33,1.00000,0.00000 35 | 34,1.00000,0.00000 36 | 35,0.00000,1.00000 37 | 36,0.00017,0.99983 38 | 37,1.00000,0.00000 39 | 38,1.00000,0.00000 40 | 39,1.00000,0.00000 41 | 40,1.00000,0.00000 42 | 41,0.00000,1.00000 43 | 42,0.99997,0.00003 44 | 43,1.00000,0.00000 45 | 44,0.00000,1.00000 46 | 45,0.00000,1.00000 47 | 46,1.00000,0.00000 48 | 47,0.00000,1.00000 49 | 48,0.00000,1.00000 50 | 49,0.00000,1.00000 51 | 50,0.00000,1.00000 52 | 51,0.00000,1.00000 53 | 52,0.00000,1.00000 54 | 53,1.00000,0.00000 55 | 54,1.00000,0.00000 56 | 55,0.00000,1.00000 57 | 56,1.00000,0.00000 58 | 57,1.00000,0.00000 59 | 58,1.00000,0.00000 60 | 59,0.00000,1.00000 61 | 60,1.00000,0.00000 62 | 61,0.00000,1.00000 63 | 62,0.00000,1.00000 64 | 63,0.00000,1.00000 65 | 64,1.00000,0.00000 66 | 65,0.00000,1.00000 67 | 66,1.00000,0.00000 68 | 67,0.00000,1.00000 69 | 68,0.00000,1.00000 70 | 69,1.00000,0.00000 71 | 70,1.00000,0.00000 72 | 71,0.00000,1.00000 73 | 72,1.00000,0.00000 74 | 73,0.00000,1.00000 75 | 74,1.00000,0.00000 76 | 75,1.00000,0.00000 77 | 76,1.00000,0.00000 78 | 77,0.00000,1.00000 79 | 78,0.00000,1.00000 80 | 79,1.00000,0.00000 81 | 80,1.00000,0.00000 82 | 81,1.00000,0.00000 83 | 82,1.00000,0.00000 84 | 83,1.00000,0.00000 85 | 84,1.00000,0.00000 86 | 85,1.00000,0.00000 87 | 86,0.00000,1.00000 88 | 87,0.00000,1.00000 89 | 88,0.00000,1.00000 90 | 89,1.00000,0.00000 91 | 90,1.00000,0.00000 92 | 91,1.00000,0.00000 93 | 92,0.00000,1.00000 94 | 93,1.00000,0.00000 95 | 94,0.00000,1.00000 96 | 95,0.00000,1.00000 97 | 96,1.00000,0.00000 98 | 97,0.00000,1.00000 99 | 98,0.00000,1.00000 100 | 99,0.00000,1.00000 101 | 100,0.00000,1.00000 102 | 101,1.00000,0.00000 103 | 102,1.00000,0.00000 104 | 103,0.00000,1.00000 105 | 104,1.00000,0.00000 106 | 105,0.00329,0.99671 107 | 106,1.00000,0.00000 108 | 107,1.00000,0.00000 109 | 108,1.00000,0.00000 110 | 109,1.00000,0.00000 111 | 110,1.00000,0.00000 112 | 111,1.00000,0.00000 113 | 112,0.00000,1.00000 114 | 113,0.00000,1.00000 115 | 114,1.00000,0.00000 116 | 115,0.00000,1.00000 117 | 116,0.00000,1.00000 118 | 117,0.00000,1.00000 119 | 118,0.00000,1.00000 120 | 119,0.00000,1.00000 121 | 120,0.00000,1.00000 122 | 121,1.00000,0.00000 123 | 122,1.00000,0.00000 124 | 123,0.00000,1.00000 125 | 124,0.00000,1.00000 126 | 125,0.00000,1.00000 127 | 126,1.00000,0.00000 128 | 127,1.00000,0.00000 129 | 128,0.00000,1.00000 130 | 129,1.00000,0.00000 131 | 130,0.00000,1.00000 132 | 131,0.00000,1.00000 133 | 132,0.00000,1.00000 134 | 133,0.00000,1.00000 135 | 134,1.00000,0.00000 136 | 135,1.00000,0.00000 137 | 136,1.00000,0.00000 138 | 137,1.00000,0.00000 139 | 138,1.00000,0.00000 140 | 139,0.00000,1.00000 141 | 140,0.99982,0.00018 142 | 141,0.00000,1.00000 143 | 142,0.00000,1.00000 144 | 143,1.00000,0.00000 145 | 144,0.00000,1.00000 146 | 145,0.00000,1.00000 147 | 146,0.00000,1.00000 148 | 147,1.00000,0.00000 149 | 148,0.00000,1.00000 150 | 149,0.00000,1.00000 151 | 150,0.00000,1.00000 152 | 151,1.00000,0.00000 153 | 152,0.00000,1.00000 154 | 153,1.00000,0.00000 155 | 154,1.00000,0.00000 156 | 155,1.00000,0.00000 157 | 156,0.00000,1.00000 158 | 157,0.00000,1.00000 159 | 158,0.00000,1.00000 160 | 159,0.00000,1.00000 161 | 160,0.00000,1.00000 162 | 161,1.00000,0.00000 163 | 162,0.00000,1.00000 164 | 163,0.00000,1.00000 165 | 164,1.00000,0.00000 166 | 165,0.00000,1.00000 167 | 166,0.00000,1.00000 168 | 167,1.00000,0.00000 169 | 168,0.00000,1.00000 170 | 169,0.00000,1.00000 171 | 170,0.00000,1.00000 172 | 171,0.00000,1.00000 173 | 172,0.00000,1.00000 174 | 173,0.00000,1.00000 175 | 174,1.00000,0.00000 176 | 175,0.00000,1.00000 177 | 176,0.02618,0.97382 178 | 177,0.00000,1.00000 179 | 178,1.00000,0.00000 180 | 179,1.00000,0.00000 181 | 180,1.00000,0.00000 182 | 181,0.00000,1.00000 183 | 182,1.00000,0.00000 184 | 183,1.00000,0.00000 185 | 184,0.00000,1.00000 186 | 185,1.00000,0.00000 187 | 186,0.00000,1.00000 188 | 187,1.00000,0.00000 189 | 188,1.00000,0.00000 190 | 189,1.00000,0.00000 191 | 190,1.00000,0.00000 192 | 191,0.00000,1.00000 193 | 192,1.00000,0.00000 194 | 193,1.00000,0.00000 195 | 194,0.00000,1.00000 196 | 195,1.00000,0.00000 197 | 196,1.00000,0.00000 198 | 197,1.00000,0.00000 199 | 198,1.00000,0.00000 200 | 199,1.00000,0.00000 201 | 200,1.00000,0.00000 202 | 201,0.00000,1.00000 203 | 202,1.00000,0.00000 204 | 203,1.00000,0.00000 205 | 204,0.00086,0.99914 206 | 205,0.00000,1.00000 207 | 206,1.00000,0.00000 208 | 207,1.00000,0.00000 209 | 208,0.00000,1.00000 210 | 209,1.00000,0.00000 211 | 210,1.00000,0.00000 212 | 211,1.00000,0.00000 213 | 212,1.00000,0.00000 214 | 213,1.00000,0.00000 215 | 214,1.00000,0.00000 216 | 215,0.00000,1.00000 217 | 216,1.00000,0.00000 218 | 217,1.00000,0.00000 219 | 218,0.00000,1.00000 220 | 219,0.00000,1.00000 221 | 220,1.00000,0.00000 222 | 221,0.00000,1.00000 223 | 222,1.00000,0.00000 224 | 223,1.00000,0.00000 225 | 224,0.00000,1.00000 226 | 225,1.00000,0.00000 227 | 226,1.00000,0.00000 228 | 227,1.00000,0.00000 229 | 228,0.99993,0.00007 230 | 229,0.00000,1.00000 231 | 230,0.00000,1.00000 232 | 231,1.00000,0.00000 233 | 232,0.00000,1.00000 234 | 233,0.00000,1.00000 235 | 234,0.00000,1.00000 236 | 235,1.00000,0.00000 237 | 236,1.00000,0.00000 238 | 237,0.00000,1.00000 239 | 238,0.00000,1.00000 240 | 239,1.00000,0.00000 241 | 240,0.00000,1.00000 242 | 241,0.00000,1.00000 243 | 242,1.00000,0.00000 244 | 243,0.00000,1.00000 245 | 244,1.00000,0.00000 246 | 245,1.00000,0.00000 247 | 246,0.00000,1.00000 248 | 247,0.00000,1.00000 249 | 248,1.00000,0.00000 250 | 249,1.00000,0.00000 251 | 250,0.00000,1.00000 252 | 251,0.00000,1.00000 253 | 252,1.00000,0.00000 254 | 253,1.00000,0.00000 255 | 254,0.00000,1.00000 256 | 255,0.99996,0.00004 257 | 256,1.00000,0.00000 258 | 257,1.00000,0.00000 259 | 258,0.00000,1.00000 260 | 259,0.00000,1.00000 261 | 260,1.00000,0.00000 262 | 261,1.00000,0.00000 263 | 262,0.00000,1.00000 264 | 263,0.00000,1.00000 265 | 264,0.00000,1.00000 266 | 265,0.00000,1.00000 267 | 266,1.00000,0.00000 268 | 267,1.00000,0.00000 269 | 268,0.00000,1.00000 270 | 269,0.00000,1.00000 271 | 270,1.00000,0.00000 272 | 271,0.00000,1.00000 273 | 272,1.00000,0.00000 274 | 273,1.00000,0.00000 275 | 274,0.00000,1.00000 276 | 275,0.00000,1.00000 277 | 276,0.00000,1.00000 278 | 277,0.00000,1.00000 279 | 278,0.00000,1.00000 280 | 279,1.00000,0.00000 281 | 280,0.00000,1.00000 282 | 281,1.00000,0.00000 283 | 282,0.00000,1.00000 284 | 283,0.00000,1.00000 285 | 284,1.00000,0.00000 286 | 285,0.00000,1.00000 287 | 286,1.00000,0.00000 288 | 287,0.00000,1.00000 289 | 288,1.00000,0.00000 290 | 289,0.00000,1.00000 291 | 290,0.00000,1.00000 292 | 291,0.00000,1.00000 293 | 292,0.00000,1.00000 294 | 293,1.00000,0.00000 295 | 294,1.00000,0.00000 296 | 295,1.00000,0.00000 297 | 296,1.00000,0.00000 298 | 297,1.00000,0.00000 299 | 298,1.00000,0.00000 300 | 299,1.00000,0.00000 301 | 300,0.00000,1.00000 302 | 301,1.00000,0.00000 303 | 302,0.00000,1.00000 304 | 303,0.00000,1.00000 305 | 304,1.00000,0.00000 306 | 305,0.00000,1.00000 307 | 306,1.00000,0.00000 308 | 307,1.00000,0.00000 309 | 308,1.00000,0.00000 310 | 309,1.00000,0.00000 311 | 310,0.00000,1.00000 312 | 311,1.00000,0.00000 313 | 312,0.00000,1.00000 314 | 313,0.00000,1.00000 315 | 314,0.00000,1.00000 316 | 315,1.00000,0.00000 317 | 316,1.00000,0.00000 318 | 317,1.00000,0.00000 319 | 318,1.00000,0.00000 320 | 319,0.00000,1.00000 321 | 320,0.00000,1.00000 322 | 321,1.00000,0.00000 323 | 322,0.00000,1.00000 324 | 323,1.00000,0.00000 325 | 324,1.00000,0.00000 326 | 325,0.00000,1.00000 327 | 326,0.00245,0.99755 328 | 327,0.00000,1.00000 329 | 328,0.00000,1.00000 330 | 329,0.00000,1.00000 331 | 330,0.00000,1.00000 332 | 331,0.00000,1.00000 333 | 332,0.00000,1.00000 334 | 333,0.00029,0.99971 335 | 334,0.00000,1.00000 336 | 335,0.00000,1.00000 337 | 336,1.00000,0.00000 338 | 337,1.00000,0.00000 339 | 338,1.00000,0.00000 340 | 339,1.00000,0.00000 341 | 340,1.00000,0.00000 342 | 341,1.00000,0.00000 343 | 342,0.00000,1.00000 344 | 343,1.00000,0.00000 345 | 344,0.00000,1.00000 346 | 345,0.00000,1.00000 347 | 346,1.00000,0.00000 348 | 347,1.00000,0.00000 349 | 348,0.00000,1.00000 350 | 349,1.00000,0.00000 351 | 350,1.00000,0.00000 352 | 351,0.00000,1.00000 353 | 352,1.00000,0.00000 354 | 353,0.00000,1.00000 355 | 354,1.00000,0.00000 356 | 355,1.00000,0.00000 357 | 356,1.00000,0.00000 358 | 357,0.00000,1.00000 359 | 358,0.00000,1.00000 360 | 359,1.00000,0.00000 361 | 360,1.00000,0.00000 362 | 361,1.00000,0.00000 363 | 362,0.00000,1.00000 364 | 363,1.00000,0.00000 365 | 364,0.00000,1.00000 366 | 365,1.00000,0.00000 367 | 366,1.00000,0.00000 368 | 367,0.00000,1.00000 369 | 368,1.00000,0.00000 370 | 369,0.00000,1.00000 371 | 370,0.00000,1.00000 372 | 371,1.00000,0.00000 373 | 372,1.00000,0.00000 374 | 373,0.00000,1.00000 375 | 374,1.00000,0.00000 376 | 375,1.00000,0.00000 377 | 376,1.00000,0.00000 378 | 377,0.99730,0.00270 379 | 378,0.00000,1.00000 380 | 379,1.00000,0.00000 381 | 380,0.00000,1.00000 382 | 381,0.00000,1.00000 383 | 382,0.00000,1.00000 384 | 383,0.00000,1.00000 385 | 384,0.00000,1.00000 386 | 385,1.00000,0.00000 387 | 386,1.00000,0.00000 388 | 387,0.99186,0.00814 389 | 388,0.00000,1.00000 390 | 389,0.00000,1.00000 391 | 390,1.00000,0.00000 392 | 391,1.00000,0.00000 393 | 392,0.00000,1.00000 394 | 393,1.00000,0.00000 395 | 394,0.00000,1.00000 396 | 395,0.00000,1.00000 397 | 396,0.00000,1.00000 398 | 397,1.00000,0.00000 399 | 398,1.00000,0.00000 400 | 399,1.00000,0.00000 401 | 400,1.00000,0.00000 402 | 401,0.00000,1.00000 403 | 402,1.00000,0.00000 404 | 403,0.00000,1.00000 405 | 404,1.00000,0.00000 406 | 405,0.00002,0.99998 407 | 406,0.00000,1.00000 408 | 407,0.00000,1.00000 409 | 408,1.00000,0.00000 410 | 409,1.00000,0.00000 411 | 410,0.00000,1.00000 412 | 411,0.00000,1.00000 413 | 412,0.00000,1.00000 414 | 413,0.00000,1.00000 415 | 414,0.99969,0.00031 416 | 415,0.00000,1.00000 417 | 416,0.00000,1.00000 418 | 417,0.00000,1.00000 419 | 418,0.00000,1.00000 420 | 419,1.00000,0.00000 421 | 420,0.00000,1.00000 422 | 421,0.00000,1.00000 423 | 422,0.00000,1.00000 424 | 423,1.00000,0.00000 425 | 424,1.00000,0.00000 426 | 425,1.00000,0.00000 427 | 426,1.00000,0.00000 428 | 427,0.00000,1.00000 429 | 428,0.00000,1.00000 430 | 429,1.00000,0.00000 431 | 430,1.00000,0.00000 432 | 431,0.00000,1.00000 433 | 432,1.00000,0.00000 434 | 433,0.00000,1.00000 435 | 434,0.00000,1.00000 436 | 435,0.00000,1.00000 437 | 436,1.00000,0.00000 438 | 437,1.00000,0.00000 439 | 438,1.00000,0.00000 440 | 439,1.00000,0.00000 441 | 440,0.00000,1.00000 442 | 441,0.00000,1.00000 443 | 442,1.00000,0.00000 444 | 443,0.00000,1.00000 445 | 444,0.00000,1.00000 446 | 445,1.00000,0.00000 447 | 446,1.00000,0.00000 448 | 447,0.00000,1.00000 449 | 448,1.00000,0.00000 450 | 449,1.00000,0.00000 451 | 450,0.00000,1.00000 452 | 451,1.00000,0.00000 453 | 452,1.00000,0.00000 454 | 453,0.00000,1.00000 455 | 454,0.00000,1.00000 456 | 455,0.00000,1.00000 457 | 456,1.00000,0.00000 458 | 457,1.00000,0.00000 459 | 458,0.00000,1.00000 460 | 459,1.00000,0.00000 461 | 460,1.00000,0.00000 462 | 461,0.00000,1.00000 463 | 462,0.00000,1.00000 464 | 463,1.00000,0.00000 465 | 464,0.00000,1.00000 466 | 465,0.00000,1.00000 467 | 466,0.00000,1.00000 468 | 467,1.00000,0.00000 469 | 468,0.00000,1.00000 470 | 469,0.00000,1.00000 471 | 470,1.00000,0.00000 472 | 471,0.00000,1.00000 473 | 472,0.00000,1.00000 474 | 473,1.00000,0.00000 475 | 474,0.00000,1.00000 476 | 475,1.00000,0.00000 477 | 476,0.00000,1.00000 478 | 477,1.00000,0.00000 479 | 478,0.00000,1.00000 480 | 479,0.00000,1.00000 481 | 480,0.00000,1.00000 482 | 481,1.00000,0.00000 483 | 482,1.00000,0.00000 484 | 483,0.00000,1.00000 485 | 484,0.00000,1.00000 486 | 485,1.00000,0.00000 487 | 486,0.00000,1.00000 488 | 487,1.00000,0.00000 489 | 488,1.00000,0.00000 490 | 489,1.00000,0.00000 491 | 490,1.00000,0.00000 492 | 491,0.00000,1.00000 493 | 492,0.00000,1.00000 494 | 493,0.00000,1.00000 495 | 494,1.00000,0.00000 496 | 495,1.00000,0.00000 497 | 496,1.00000,0.00000 498 | 497,0.00000,1.00000 499 | 498,0.00000,1.00000 500 | 499,0.00000,1.00000 501 | 500,1.00000,0.00000 502 | 501,0.00000,1.00000 503 | 502,1.00000,0.00000 504 | 503,0.00640,0.99360 505 | 504,0.00000,1.00000 506 | 505,1.00000,0.00000 507 | 506,0.00000,1.00000 508 | 507,1.00000,0.00000 509 | 508,0.00000,1.00000 510 | 509,1.00000,0.00000 511 | 510,1.00000,0.00000 512 | 511,0.00000,1.00000 513 | 512,1.00000,0.00000 514 | 513,1.00000,0.00000 515 | 514,0.00000,1.00000 516 | 515,0.00000,1.00000 517 | 516,0.00000,1.00000 518 | 517,1.00000,0.00000 519 | 518,1.00000,0.00000 520 | 519,0.00000,1.00000 521 | 520,0.00000,1.00000 522 | 521,1.00000,0.00000 523 | 522,0.00000,1.00000 524 | 523,0.00000,1.00000 525 | 524,0.00000,1.00000 526 | 525,1.00000,0.00000 527 | 526,0.00000,1.00000 528 | 527,0.00000,1.00000 529 | 528,0.00000,1.00000 530 | 529,1.00000,0.00000 531 | 530,1.00000,0.00000 532 | 531,0.00000,1.00000 533 | 532,0.00000,1.00000 534 | 533,0.00000,1.00000 535 | 534,1.00000,0.00000 536 | 535,0.00000,1.00000 537 | 536,1.00000,0.00000 538 | 537,0.00000,1.00000 539 | 538,1.00000,0.00000 540 | 539,0.00000,1.00000 541 | 540,0.00000,1.00000 542 | 541,0.00000,1.00000 543 | 542,1.00000,0.00000 544 | 543,1.00000,0.00000 545 | 544,1.00000,0.00000 546 | 545,1.00000,0.00000 547 | 546,1.00000,0.00000 548 | 547,1.00000,0.00000 549 | 548,0.00000,1.00000 550 | 549,1.00000,0.00000 551 | 550,1.00000,0.00000 552 | 551,0.00000,1.00000 553 | 552,1.00000,0.00000 554 | 553,0.00000,1.00000 555 | 554,0.00000,1.00000 556 | 555,0.00000,1.00000 557 | 556,0.00000,1.00000 558 | 557,0.00000,1.00000 559 | 558,0.00000,1.00000 560 | 559,0.00000,1.00000 561 | 560,0.00000,1.00000 562 | 561,0.00000,1.00000 563 | 562,1.00000,0.00000 564 | 563,1.00000,0.00000 565 | 564,1.00000,0.00000 566 | 565,1.00000,0.00000 567 | 566,0.00000,1.00000 568 | 567,1.00000,0.00000 569 | 568,0.00000,1.00000 570 | 569,0.00000,1.00000 571 | 570,0.00000,1.00000 572 | 571,0.00000,1.00000 573 | 572,0.00000,1.00000 574 | 573,1.00000,0.00000 575 | 574,0.00000,1.00000 576 | 575,0.00000,1.00000 577 | 576,0.00000,1.00000 578 | 577,1.00000,0.00000 579 | 578,0.00000,1.00000 580 | 579,0.00000,1.00000 581 | 580,1.00000,0.00000 582 | 581,0.00000,1.00000 583 | 582,1.00000,0.00000 584 | 583,1.00000,0.00000 585 | 584,0.00000,1.00000 586 | 585,1.00000,0.00000 587 | 586,0.00000,1.00000 588 | 587,0.00000,1.00000 589 | 588,0.00000,1.00000 590 | 589,1.00000,0.00000 591 | 590,1.00000,0.00000 592 | 591,1.00000,0.00000 593 | 592,1.00000,0.00000 594 | 593,0.00000,1.00000 595 | 594,1.00000,0.00000 596 | 595,1.00000,0.00000 597 | 596,0.00000,1.00000 598 | 597,0.00000,1.00000 599 | 598,0.00000,1.00000 600 | 599,1.00000,0.00000 601 | 600,0.00000,1.00000 602 | 601,1.00000,0.00000 603 | 602,1.00000,0.00000 604 | 603,0.00000,1.00000 605 | 604,0.00000,1.00000 606 | 605,0.00000,1.00000 607 | 606,1.00000,0.00000 608 | 607,0.99983,0.00017 609 | 608,0.00000,1.00000 610 | 609,0.00000,1.00000 611 | 610,1.00000,0.00000 612 | 611,0.00000,1.00000 613 | 612,0.00000,1.00000 614 | 613,0.00000,1.00000 615 | 614,0.00000,1.00000 616 | 615,0.00000,1.00000 617 | 616,1.00000,0.00000 618 | 617,1.00000,0.00000 619 | 618,0.00000,1.00000 620 | 619,0.00000,1.00000 621 | 620,1.00000,0.00000 622 | 621,0.00000,1.00000 623 | 622,1.00000,0.00000 624 | 623,1.00000,0.00000 625 | 624,1.00000,0.00000 626 | 625,0.00000,1.00000 627 | 626,0.00000,1.00000 628 | 627,1.00000,0.00000 629 | 628,0.00000,1.00000 630 | 629,0.00000,1.00000 631 | 630,1.00000,0.00000 632 | 631,1.00000,0.00000 633 | 632,1.00000,0.00000 634 | 633,1.00000,0.00000 635 | 634,0.00000,1.00000 636 | 635,0.00000,1.00000 637 | 636,1.00000,0.00000 638 | 637,1.00000,0.00000 639 | 638,0.00000,1.00000 640 | 639,1.00000,0.00000 641 | 640,1.00000,0.00000 642 | 641,1.00000,0.00000 643 | 642,0.98458,0.01542 644 | 643,1.00000,0.00000 645 | 644,0.00000,1.00000 646 | 645,1.00000,0.00000 647 | 646,1.00000,0.00000 648 | 647,0.00000,1.00000 649 | 648,0.00000,1.00000 650 | 649,0.00000,1.00000 651 | 650,0.00000,1.00000 652 | 651,0.00000,1.00000 653 | 652,1.00000,0.00000 654 | 653,0.00000,1.00000 655 | 654,1.00000,0.00000 656 | 655,1.00000,0.00000 657 | 656,1.00000,0.00000 658 | 657,1.00000,0.00000 659 | 658,1.00000,0.00000 660 | 659,1.00000,0.00000 661 | 660,0.00000,1.00000 662 | 661,0.00000,1.00000 663 | 662,1.00000,0.00000 664 | 663,0.00000,1.00000 665 | 664,0.00006,0.99994 666 | 665,0.00000,1.00000 667 | 666,1.00000,0.00000 668 | 667,0.00000,1.00000 669 | 668,0.00000,1.00000 670 | 669,1.00000,0.00000 671 | 670,0.00000,1.00000 672 | 671,0.00000,1.00000 673 | 672,1.00000,0.00000 674 | 673,1.00000,0.00000 675 | 674,1.00000,0.00000 676 | 675,0.00000,1.00000 677 | 676,1.00000,0.00000 678 | 677,1.00000,0.00000 679 | 678,1.00000,0.00000 680 | 679,0.00000,1.00000 681 | 680,1.00000,0.00000 682 | 681,1.00000,0.00000 683 | 682,1.00000,0.00000 684 | 683,1.00000,0.00000 685 | 684,1.00000,0.00000 686 | 685,1.00000,0.00000 687 | 686,0.00000,1.00000 688 | 687,0.00000,1.00000 689 | 688,1.00000,0.00000 690 | 689,1.00000,0.00000 691 | 690,0.00000,1.00000 692 | 691,1.00000,0.00000 693 | 692,1.00000,0.00000 694 | 693,1.00000,0.00000 695 | 694,0.00000,1.00000 696 | 695,1.00000,0.00000 697 | 696,1.00000,0.00000 698 | 697,1.00000,0.00000 699 | 698,0.00000,1.00000 700 | 699,1.00000,0.00000 701 | 700,1.00000,0.00000 702 | 701,0.00000,1.00000 703 | 702,0.00000,1.00000 704 | 703,1.00000,0.00000 705 | 704,0.97684,0.02316 706 | 705,0.00006,0.99994 707 | 706,0.09233,0.90767 708 | 707,0.99752,0.00248 709 | 708,0.00849,0.99151 710 | 709,0.00008,0.99992 711 | 710,0.00006,0.99994 712 | 711,0.00000,1.00000 713 | 712,0.00000,1.00000 714 | 713,0.99983,0.00017 715 | 714,1.00000,0.00000 716 | 715,1.00000,0.00000 717 | 716,1.00000,0.00000 718 | 717,1.00000,0.00000 719 | 718,0.99992,0.00008 720 | 719,0.00000,1.00000 721 | 720,0.99992,0.00008 722 | 721,0.99999,0.00001 723 | 722,0.00006,0.99994 724 | 723,0.00062,0.99938 725 | 724,0.00010,0.99990 726 | 725,1.00000,0.00000 727 | 726,1.00000,0.00000 728 | 727,0.00000,1.00000 729 | 728,1.00000,0.00000 730 | 729,0.00157,0.99843 731 | 730,0.87245,0.12755 732 | 731,0.99999,0.00001 733 | 732,1.00000,0.00000 734 | 733,0.00030,0.99970 735 | 734,0.00006,0.99994 736 | 735,0.99998,0.00002 737 | 736,0.99445,0.00555 738 | 737,0.99957,0.00043 739 | 738,1.00000,0.00000 740 | 739,1.00000,0.00000 741 | 740,1.00000,0.00000 742 | 741,0.00000,1.00000 743 | 742,0.00001,0.99999 744 | 743,1.00000,0.00000 745 | 744,0.00001,0.99999 746 | 745,1.00000,0.00000 747 | 746,0.00188,0.99812 748 | 747,1.00000,0.00000 749 | 748,0.59529,0.40471 750 | 749,1.00000,0.00000 751 | 750,0.00001,0.99999 752 | 751,0.99990,0.00010 753 | 752,0.99999,0.00001 754 | 753,0.00000,1.00000 755 | 754,0.02075,0.97925 756 | 755,1.00000,0.00000 757 | 756,0.00000,1.00000 758 | 757,0.00000,1.00000 759 | 758,1.00000,0.00000 760 | 759,0.09720,0.90280 761 | 760,0.15454,0.84546 762 | 761,0.00040,0.99960 763 | 762,0.00000,1.00000 764 | 763,1.00000,0.00000 765 | 764,0.00001,0.99999 766 | 765,0.00256,0.99744 767 | 766,0.00442,0.99558 768 | 767,0.79900,0.20100 769 | 768,0.00007,0.99993 770 | 769,0.20660,0.79340 771 | 770,0.00000,1.00000 772 | 771,1.00000,0.00000 773 | 772,1.00000,0.00000 774 | 773,0.02166,0.97834 775 | 774,0.00009,0.99991 776 | 775,1.00000,0.00000 777 | 776,0.00000,1.00000 778 | 777,0.00000,1.00000 779 | 778,0.00050,0.99950 780 | 779,0.00000,1.00000 781 | 780,0.00000,1.00000 782 | 781,0.00000,1.00000 783 | 782,0.99457,0.00543 784 | 783,0.00000,1.00000 785 | 784,0.00382,0.99618 786 | 785,1.00000,0.00000 787 | 786,0.00000,1.00000 788 | 787,0.00144,0.99856 789 | 788,1.00000,0.00000 790 | 789,1.00000,0.00000 791 | 790,0.99995,0.00005 792 | 791,1.00000,0.00000 793 | 792,0.99999,0.00001 794 | 793,0.00000,1.00000 795 | 794,1.00000,0.00000 796 | 795,0.00000,1.00000 797 | 796,0.99041,0.00959 798 | 797,0.99462,0.00538 799 | 798,1.00000,0.00000 800 | 799,0.00008,0.99992 801 | 800,1.00000,0.00000 802 | 801,1.00000,0.00000 803 | 802,0.01579,0.98421 804 | 803,0.99262,0.00738 805 | 804,1.00000,0.00000 806 | 805,1.00000,0.00000 807 | 806,0.00000,1.00000 808 | 807,0.99949,0.00051 809 | 808,1.00000,0.00000 810 | 809,1.00000,0.00000 811 | 810,0.00010,0.99990 812 | 811,0.00055,0.99945 813 | 812,1.00000,0.00000 814 | 813,0.99994,0.00006 815 | 814,0.99997,0.00003 816 | 815,0.02069,0.97931 817 | 816,1.00000,0.00000 818 | 817,0.99999,0.00001 819 | 818,0.00219,0.99781 820 | 819,0.00001,0.99999 821 | 820,0.00001,0.99999 822 | 821,0.00007,0.99993 823 | 822,0.00000,1.00000 824 | 823,0.99991,0.00009 825 | 824,1.00000,0.00000 826 | 825,0.97650,0.02350 827 | 826,0.99992,0.00008 828 | 827,0.99991,0.00009 829 | 828,1.00000,0.00000 830 | 829,0.99290,0.00710 831 | 830,1.00000,0.00000 832 | 831,0.00031,0.99969 833 | 832,0.00000,1.00000 834 | 833,0.01257,0.98743 835 | 834,0.00000,1.00000 836 | 835,0.99990,0.00010 837 | 836,0.00000,1.00000 838 | 837,0.00000,1.00000 839 | 838,0.00000,1.00000 840 | 839,0.00000,1.00000 841 | 840,1.00000,0.00000 842 | 841,0.01390,0.98610 843 | 842,0.00000,1.00000 844 | 843,1.00000,0.00000 845 | 844,0.97647,0.02353 846 | 845,0.99844,0.00156 847 | 846,0.99998,0.00002 848 | 847,0.00000,1.00000 849 | 848,1.00000,0.00000 850 | 849,1.00000,0.00000 851 | 850,0.00000,1.00000 852 | 851,0.00000,1.00000 853 | 852,0.00001,0.99999 854 | 853,0.00001,0.99999 855 | 854,0.00013,0.99987 856 | 855,0.00491,0.99509 857 | 856,1.00000,0.00000 858 | 857,0.97960,0.02040 859 | 858,0.00000,1.00000 860 | 859,0.00000,1.00000 861 | 860,0.00000,1.00000 862 | 861,0.01388,0.98612 863 | 862,1.00000,0.00000 864 | 863,0.00001,0.99999 865 | 864,0.00001,0.99999 866 | 865,0.99995,0.00005 867 | 866,1.00000,0.00000 868 | 867,0.00000,1.00000 869 | 868,0.00000,1.00000 870 | 869,0.00000,1.00000 871 | 870,0.07329,0.92671 872 | 871,1.00000,0.00000 873 | 872,0.00000,1.00000 874 | 873,1.00000,0.00000 875 | 874,0.00004,0.99996 876 | 875,0.03805,0.96195 877 | 876,0.00002,0.99998 878 | 877,1.00000,0.00000 879 | 878,0.00148,0.99852 880 | 879,0.00001,0.99999 881 | 880,0.88580,0.11420 882 | 881,0.99885,0.00115 883 | 882,1.00000,0.00000 884 | 883,0.00815,0.99185 885 | 884,1.00000,0.00000 886 | 885,1.00000,0.00000 887 | 886,1.00000,0.00000 888 | 887,0.00000,1.00000 889 | 888,0.00000,1.00000 890 | 889,0.00000,1.00000 891 | 890,0.97306,0.02694 892 | 891,0.99993,0.00007 893 | 892,0.94767,0.05233 894 | 893,1.00000,0.00000 895 | 894,0.00000,1.00000 896 | 895,0.00000,1.00000 897 | 896,0.00001,0.99999 898 | 897,0.00012,0.99988 899 | 898,0.99993,0.00007 900 | 899,0.00000,1.00000 901 | 900,0.00000,1.00000 902 | 901,1.00000,0.00000 903 | 902,1.00000,0.00000 904 | 903,0.00006,0.99994 905 | 904,0.00001,0.99999 906 | 905,0.99646,0.00354 907 | 906,0.00000,1.00000 908 | 907,1.00000,0.00000 909 | 908,0.00002,0.99998 910 | 909,0.99999,0.00001 911 | 910,0.00000,1.00000 912 | 911,0.00000,1.00000 913 | 912,0.00001,0.99999 914 | 913,0.98985,0.01015 915 | 914,0.00000,1.00000 916 | 915,0.99940,0.00060 917 | 916,0.00000,1.00000 918 | 917,0.02393,0.97607 919 | 918,0.00000,1.00000 920 | 919,0.00001,0.99999 921 | 920,1.00000,0.00000 922 | 921,1.00000,0.00000 923 | 922,0.00000,1.00000 924 | 923,0.95278,0.04722 925 | 924,0.99992,0.00008 926 | 925,0.00374,0.99626 927 | 926,0.01147,0.98853 928 | 927,0.00008,0.99992 929 | 928,0.99814,0.00186 930 | 929,0.98693,0.01307 931 | 930,0.99993,0.00007 932 | 931,0.00026,0.99974 933 | 932,0.37126,0.62874 934 | 933,0.00001,0.99999 935 | 934,0.00000,1.00000 936 | 935,0.00000,1.00000 937 | 936,0.13064,0.86936 938 | 937,1.00000,0.00000 939 | 938,0.85656,0.14344 940 | 939,0.00779,0.99221 941 | 940,1.00000,0.00000 942 | 941,0.04471,0.95529 943 | 942,0.00000,1.00000 944 | 943,0.19882,0.80118 945 | 944,1.00000,0.00000 946 | 945,1.00000,0.00000 947 | 946,0.00000,1.00000 948 | 947,0.00001,0.99999 949 | 948,1.00000,0.00000 950 | 949,1.00000,0.00000 951 | 950,0.00001,0.99999 952 | 951,0.00000,1.00000 953 | 952,0.00000,1.00000 954 | 953,0.99971,0.00029 955 | 954,0.99996,0.00004 956 | 955,1.00000,0.00000 957 | 956,0.00000,1.00000 958 | 957,0.50680,0.49320 959 | 958,0.99999,0.00001 960 | 959,0.99816,0.00184 961 | 960,1.00000,0.00000 962 | 961,1.00000,0.00000 963 | 962,0.99960,0.00040 964 | 963,0.01148,0.98852 965 | 964,0.99999,0.00001 966 | 965,0.00000,1.00000 967 | 966,0.98951,0.01049 968 | 967,0.99989,0.00011 969 | 968,0.00000,1.00000 970 | 969,0.99703,0.00297 971 | 970,1.00000,0.00000 972 | 971,0.99963,0.00037 973 | 972,0.90477,0.09523 974 | 973,1.00000,0.00000 975 | 974,1.00000,0.00000 976 | 975,0.99993,0.00007 977 | 976,0.00000,1.00000 978 | 977,1.00000,0.00000 979 | 978,1.00000,0.00000 980 | 979,0.00000,1.00000 981 | 980,1.00000,0.00000 982 | 981,0.00000,1.00000 983 | 982,0.99999,0.00001 984 | 983,0.00137,0.99863 985 | 984,0.97900,0.02100 986 | 985,0.00000,1.00000 987 | 986,0.00001,0.99999 988 | 987,0.99999,0.00001 989 | 988,0.99989,0.00011 990 | 989,0.46613,0.53387 991 | 990,0.99663,0.00337 992 | 991,0.99992,0.00008 993 | 992,0.96694,0.03306 994 | 993,1.00000,0.00000 995 | 994,0.99999,0.00001 996 | 995,0.00000,1.00000 997 | 996,1.00000,0.00000 998 | 997,1.00000,0.00000 999 | 998,0.00000,1.00000 1000 | 999,0.00001,0.99999 1001 | 1000,0.99991,0.00009 1002 | -------------------------------------------------------------------------------- /data/result.txt: -------------------------------------------------------------------------------- 1 | 1.000000 2 | 1.000000 3 | 0.000000 4 | 0.000000 5 | 0.000000 6 | 1.000000 7 | 0.000000 8 | 0.000000 9 | 1.000000 10 | 1.000000 11 | 0.000000 12 | 0.000000 13 | 1.000000 14 | 1.000000 15 | 1.000000 16 | 1.000000 17 | 0.000000 18 | 0.000000 19 | 0.000004 20 | 0.000000 21 | 0.000000 22 | 1.000000 23 | 0.000000 24 | 1.000000 25 | 0.000000 26 | 0.000000 27 | 0.000000 28 | 1.000000 29 | 1.000000 30 | 0.000000 31 | 1.000000 32 | 0.000000 33 | 0.000000 34 | 0.000000 35 | 1.000000 36 | 0.999960 37 | 0.000000 38 | 0.000000 39 | 0.000000 40 | 0.000000 41 | 1.000000 42 | 0.000003 43 | 0.000000 44 | 1.000000 45 | 1.000000 46 | 0.000000 47 | 1.000000 48 | 1.000000 49 | 1.000000 50 | 1.000000 51 | 1.000000 52 | 1.000000 53 | 0.000000 54 | 0.000000 55 | 1.000000 56 | 0.000000 57 | 0.000000 58 | 0.000000 59 | 1.000000 60 | 0.000000 61 | 1.000000 62 | 1.000000 63 | 1.000000 64 | 0.000000 65 | 1.000000 66 | 0.000000 67 | 1.000000 68 | 1.000000 69 | 0.000000 70 | 0.000000 71 | 1.000000 72 | 0.000000 73 | 1.000000 74 | 0.000000 75 | 0.000000 76 | 0.000000 77 | 1.000000 78 | 1.000000 79 | 0.000000 80 | 0.000000 81 | 0.000000 82 | 0.000000 83 | 0.000000 84 | 0.000000 85 | 0.000000 86 | 1.000000 87 | 1.000000 88 | 1.000000 89 | 0.000000 90 | 0.000000 91 | 0.000000 92 | 1.000000 93 | 0.000000 94 | 1.000000 95 | 1.000000 96 | 0.000000 97 | 1.000000 98 | 1.000000 99 | 1.000000 100 | 1.000000 101 | 0.000000 102 | 0.000000 103 | 1.000000 104 | 0.000000 105 | 0.997792 106 | 0.000000 107 | 0.000000 108 | 0.000000 109 | 0.000000 110 | 0.000000 111 | 0.000000 112 | 1.000000 113 | 1.000000 114 | 0.000000 115 | 1.000000 116 | 1.000000 117 | 1.000000 118 | 1.000000 119 | 1.000000 120 | 1.000000 121 | 0.000000 122 | 0.000000 123 | 1.000000 124 | 1.000000 125 | 1.000000 126 | 0.000000 127 | 0.000000 128 | 1.000000 129 | 0.000000 130 | 1.000000 131 | 1.000000 132 | 1.000000 133 | 0.999999 134 | 0.000000 135 | 0.000000 136 | 0.000000 137 | 0.000000 138 | 0.000000 139 | 1.000000 140 | 0.000022 141 | 1.000000 142 | 1.000000 143 | 0.000000 144 | 1.000000 145 | 1.000000 146 | 1.000000 147 | 0.000000 148 | 1.000000 149 | 1.000000 150 | 1.000000 151 | 0.000000 152 | 1.000000 153 | 0.000000 154 | 0.000000 155 | 0.000000 156 | 1.000000 157 | 1.000000 158 | 1.000000 159 | 1.000000 160 | 1.000000 161 | 0.000000 162 | 1.000000 163 | 1.000000 164 | 0.000000 165 | 1.000000 166 | 1.000000 167 | 0.000000 168 | 1.000000 169 | 1.000000 170 | 1.000000 171 | 1.000000 172 | 1.000000 173 | 1.000000 174 | 0.000000 175 | 1.000000 176 | 0.983120 177 | 1.000000 178 | 0.000000 179 | 0.000000 180 | 0.000000 181 | 1.000000 182 | 0.000000 183 | 0.000000 184 | 1.000000 185 | 0.000000 186 | 1.000000 187 | 0.000000 188 | 0.000000 189 | 0.000000 190 | 0.000000 191 | 1.000000 192 | 0.000000 193 | 0.000000 194 | 1.000000 195 | 0.000000 196 | 0.000000 197 | 0.000000 198 | 0.000000 199 | 0.000000 200 | 0.000000 201 | 1.000000 202 | 0.000000 203 | 0.000000 204 | 0.999801 205 | 1.000000 206 | 0.000000 207 | 0.000000 208 | 1.000000 209 | 0.000000 210 | 0.000000 211 | 0.000000 212 | 0.000000 213 | 0.000000 214 | 0.000000 215 | 1.000000 216 | 0.000000 217 | 0.000000 218 | 1.000000 219 | 1.000000 220 | 0.000000 221 | 1.000000 222 | 0.000000 223 | 0.000000 224 | 1.000000 225 | 0.000000 226 | 0.000000 227 | 0.000000 228 | 0.000020 229 | 1.000000 230 | 1.000000 231 | 0.000000 232 | 1.000000 233 | 1.000000 234 | 1.000000 235 | 0.000000 236 | 0.000000 237 | 1.000000 238 | 1.000000 239 | 0.000000 240 | 1.000000 241 | 1.000000 242 | 0.000000 243 | 1.000000 244 | 0.000000 245 | 0.000000 246 | 1.000000 247 | 1.000000 248 | 0.000000 249 | 0.000000 250 | 1.000000 251 | 0.999999 252 | 0.000000 253 | 0.000000 254 | 1.000000 255 | 0.000010 256 | 0.000000 257 | 0.000000 258 | 1.000000 259 | 1.000000 260 | 0.000000 261 | 0.000000 262 | 1.000000 263 | 1.000000 264 | 1.000000 265 | 1.000000 266 | 0.000000 267 | 0.000000 268 | 0.999997 269 | 1.000000 270 | 0.000000 271 | 1.000000 272 | 0.000000 273 | 0.000000 274 | 1.000000 275 | 1.000000 276 | 1.000000 277 | 1.000000 278 | 1.000000 279 | 0.000000 280 | 1.000000 281 | 0.000000 282 | 1.000000 283 | 1.000000 284 | 0.000000 285 | 1.000000 286 | 0.000000 287 | 1.000000 288 | 0.000000 289 | 1.000000 290 | 1.000000 291 | 1.000000 292 | 1.000000 293 | 0.000000 294 | 0.000000 295 | 0.000000 296 | 0.000000 297 | 0.000000 298 | 0.000000 299 | 0.000000 300 | 1.000000 301 | 0.000000 302 | 1.000000 303 | 1.000000 304 | 0.000000 305 | 1.000000 306 | 0.000000 307 | 0.000000 308 | 0.000000 309 | 0.000000 310 | 1.000000 311 | 0.000000 312 | 1.000000 313 | 1.000000 314 | 1.000000 315 | 0.000000 316 | 0.000000 317 | 0.000000 318 | 0.000000 319 | 1.000000 320 | 1.000000 321 | 0.000000 322 | 1.000000 323 | 0.000000 324 | 0.000000 325 | 1.000000 326 | 0.998781 327 | 1.000000 328 | 1.000000 329 | 1.000000 330 | 1.000000 331 | 1.000000 332 | 1.000000 333 | 0.999891 334 | 1.000000 335 | 1.000000 336 | 0.000000 337 | 0.000000 338 | 0.000000 339 | 0.000000 340 | 0.000000 341 | 0.000000 342 | 1.000000 343 | 0.000000 344 | 1.000000 345 | 1.000000 346 | 0.000000 347 | 0.000000 348 | 1.000000 349 | 0.000005 350 | 0.000000 351 | 1.000000 352 | 0.000000 353 | 1.000000 354 | 0.000001 355 | 0.000000 356 | 0.000000 357 | 1.000000 358 | 1.000000 359 | 0.000000 360 | 0.000000 361 | 0.000000 362 | 1.000000 363 | 0.000000 364 | 1.000000 365 | 0.000000 366 | 0.000000 367 | 1.000000 368 | 0.000000 369 | 1.000000 370 | 1.000000 371 | 0.000000 372 | 0.000000 373 | 1.000000 374 | 0.000000 375 | 0.000000 376 | 0.000000 377 | 0.000901 378 | 1.000000 379 | 0.000000 380 | 1.000000 381 | 1.000000 382 | 1.000000 383 | 1.000000 384 | 1.000000 385 | 0.000000 386 | 0.000000 387 | 0.003834 388 | 1.000000 389 | 1.000000 390 | 0.000000 391 | 0.000000 392 | 1.000000 393 | 0.000000 394 | 1.000000 395 | 1.000000 396 | 1.000000 397 | 0.000000 398 | 0.000000 399 | 0.000000 400 | 0.000000 401 | 1.000000 402 | 0.000000 403 | 1.000000 404 | 0.000000 405 | 0.999999 406 | 1.000000 407 | 1.000000 408 | 0.000000 409 | 0.000000 410 | 1.000000 411 | 1.000000 412 | 1.000000 413 | 0.999999 414 | 0.000122 415 | 1.000000 416 | 1.000000 417 | 1.000000 418 | 1.000000 419 | 0.000000 420 | 1.000000 421 | 1.000000 422 | 1.000000 423 | 0.000000 424 | 0.000000 425 | 0.000000 426 | 0.000000 427 | 1.000000 428 | 1.000000 429 | 0.000000 430 | 0.000000 431 | 1.000000 432 | 0.000000 433 | 1.000000 434 | 1.000000 435 | 1.000000 436 | 0.000000 437 | 0.000000 438 | 0.000000 439 | 0.000000 440 | 1.000000 441 | 1.000000 442 | 0.000000 443 | 1.000000 444 | 1.000000 445 | 0.000000 446 | 0.000000 447 | 1.000000 448 | 0.000000 449 | 0.000000 450 | 1.000000 451 | 0.000000 452 | 0.000000 453 | 1.000000 454 | 1.000000 455 | 1.000000 456 | 0.000000 457 | 0.000000 458 | 1.000000 459 | 0.000000 460 | 0.000000 461 | 1.000000 462 | 1.000000 463 | 0.000000 464 | 1.000000 465 | 1.000000 466 | 1.000000 467 | 0.000000 468 | 1.000000 469 | 1.000000 470 | 0.000000 471 | 1.000000 472 | 1.000000 473 | 0.000000 474 | 1.000000 475 | 0.000000 476 | 1.000000 477 | 0.000000 478 | 1.000000 479 | 1.000000 480 | 1.000000 481 | 0.000000 482 | 0.000000 483 | 1.000000 484 | 1.000000 485 | 0.000000 486 | 1.000000 487 | 0.000000 488 | 0.000000 489 | 0.000000 490 | 0.000000 491 | 1.000000 492 | 1.000000 493 | 1.000000 494 | 0.000000 495 | 0.000000 496 | 0.000000 497 | 1.000000 498 | 1.000000 499 | 1.000000 500 | 0.000000 501 | 1.000000 502 | 0.000000 503 | 0.996759 504 | 1.000000 505 | 0.000000 506 | 1.000000 507 | 0.000000 508 | 1.000000 509 | 0.000000 510 | 0.000000 511 | 1.000000 512 | 0.000000 513 | 0.000000 514 | 1.000000 515 | 1.000000 516 | 1.000000 517 | 0.000000 518 | 0.000000 519 | 1.000000 520 | 1.000000 521 | 0.000000 522 | 1.000000 523 | 1.000000 524 | 1.000000 525 | 0.000000 526 | 1.000000 527 | 1.000000 528 | 1.000000 529 | 0.000000 530 | 0.000000 531 | 1.000000 532 | 1.000000 533 | 1.000000 534 | 0.000000 535 | 1.000000 536 | 0.000000 537 | 1.000000 538 | 0.000000 539 | 1.000000 540 | 1.000000 541 | 1.000000 542 | 0.000000 543 | 0.000000 544 | 0.000000 545 | 0.000000 546 | 0.000000 547 | 0.000000 548 | 1.000000 549 | 0.000000 550 | 0.000000 551 | 1.000000 552 | 0.000000 553 | 1.000000 554 | 1.000000 555 | 1.000000 556 | 1.000000 557 | 1.000000 558 | 1.000000 559 | 1.000000 560 | 1.000000 561 | 1.000000 562 | 0.000000 563 | 0.000000 564 | 0.000000 565 | 0.000000 566 | 1.000000 567 | 0.000000 568 | 1.000000 569 | 1.000000 570 | 1.000000 571 | 1.000000 572 | 1.000000 573 | 0.000000 574 | 1.000000 575 | 1.000000 576 | 1.000000 577 | 0.000000 578 | 1.000000 579 | 1.000000 580 | 0.000000 581 | 1.000000 582 | 0.000000 583 | 0.000000 584 | 1.000000 585 | 0.000000 586 | 1.000000 587 | 1.000000 588 | 1.000000 589 | 0.000000 590 | 0.000000 591 | 0.000000 592 | 0.000000 593 | 1.000000 594 | 0.000000 595 | 0.000000 596 | 1.000000 597 | 1.000000 598 | 1.000000 599 | 0.000000 600 | 1.000000 601 | 0.000000 602 | 0.000000 603 | 1.000000 604 | 1.000000 605 | 1.000000 606 | 0.000000 607 | 0.000043 608 | 1.000000 609 | 1.000000 610 | 0.000000 611 | 1.000000 612 | 1.000000 613 | 1.000000 614 | 1.000000 615 | 1.000000 616 | 0.000000 617 | 0.000000 618 | 1.000000 619 | 1.000000 620 | 0.000000 621 | 1.000000 622 | 0.000000 623 | 0.000001 624 | 0.000000 625 | 1.000000 626 | 1.000000 627 | 0.000000 628 | 1.000000 629 | 1.000000 630 | 0.000000 631 | 0.000000 632 | 0.000000 633 | 0.000000 634 | 1.000000 635 | 0.999998 636 | 0.000000 637 | 0.000000 638 | 1.000000 639 | 0.000000 640 | 0.000000 641 | 0.000000 642 | 0.009652 643 | 0.000000 644 | 1.000000 645 | 0.000000 646 | 0.000000 647 | 1.000000 648 | 1.000000 649 | 1.000000 650 | 1.000000 651 | 1.000000 652 | 0.000000 653 | 1.000000 654 | 0.000000 655 | 0.000000 656 | 0.000000 657 | 0.000000 658 | 0.000000 659 | 0.000000 660 | 1.000000 661 | 1.000000 662 | 0.000000 663 | 1.000000 664 | 0.999986 665 | 1.000000 666 | 0.000000 667 | 1.000000 668 | 1.000000 669 | 0.000000 670 | 1.000000 671 | 1.000000 672 | 0.000000 673 | 0.000000 674 | 0.000000 675 | 1.000000 676 | 0.000000 677 | 0.000000 678 | 0.000000 679 | 1.000000 680 | 0.000000 681 | 0.000000 682 | 0.000000 683 | 0.000000 684 | 0.000000 685 | 0.000000 686 | 1.000000 687 | 1.000000 688 | 0.000000 689 | 0.000000 690 | 1.000000 691 | 0.000000 692 | 0.000000 693 | 0.000000 694 | 1.000000 695 | 0.000000 696 | 0.000000 697 | 0.000000 698 | 1.000000 699 | 0.000000 700 | 0.000000 701 | 1.000000 702 | 1.000000 703 | 0.000001 704 | 0.026272 705 | 0.999764 706 | 0.915462 707 | 0.000901 708 | 0.994456 709 | 0.999693 710 | 0.999746 711 | 1.000000 712 | 1.000000 713 | 0.000037 714 | 0.000001 715 | 0.000000 716 | 0.000002 717 | 0.000000 718 | 0.000321 719 | 1.000000 720 | 0.000303 721 | 0.000022 722 | 0.999985 723 | 0.997339 724 | 0.999637 725 | 0.000000 726 | 0.000002 727 | 1.000000 728 | 0.000000 729 | 0.994883 730 | 0.132945 731 | 0.000023 732 | 0.000003 733 | 0.998318 734 | 0.999753 735 | 0.000031 736 | 0.013599 737 | 0.002110 738 | 0.000002 739 | 0.000002 740 | 0.000000 741 | 0.999997 742 | 0.999979 743 | 0.000000 744 | 0.999986 745 | 0.000000 746 | 0.994124 747 | 0.000000 748 | 0.376595 749 | 0.000000 750 | 0.999980 751 | 0.000377 752 | 0.000018 753 | 1.000000 754 | 0.983291 755 | 0.000001 756 | 1.000000 757 | 1.000000 758 | 0.000000 759 | 0.893838 760 | 0.827071 761 | 0.997948 762 | 1.000000 763 | 0.000000 764 | 0.999976 765 | 0.998687 766 | 0.996907 767 | 0.218025 768 | 0.999721 769 | 0.776053 770 | 1.000000 771 | 0.000000 772 | 0.000000 773 | 0.987385 774 | 0.999668 775 | 0.000000 776 | 1.000000 777 | 1.000000 778 | 0.997662 779 | 1.000000 780 | 1.000000 781 | 0.999998 782 | 0.002651 783 | 1.000000 784 | 0.997354 785 | 0.000000 786 | 1.000000 787 | 0.995148 788 | 0.000000 789 | 0.000005 790 | 0.000014 791 | 0.000000 792 | 0.000023 793 | 1.000000 794 | 0.000001 795 | 0.999998 796 | 0.008307 797 | 0.013297 798 | 0.000000 799 | 0.999667 800 | 0.000000 801 | 0.000000 802 | 0.987713 803 | 0.006311 804 | 0.000000 805 | 0.000002 806 | 1.000000 807 | 0.002341 808 | 0.000002 809 | 0.000000 810 | 0.999633 811 | 0.997529 812 | 0.000000 813 | 0.000230 814 | 0.000056 815 | 0.987770 816 | 0.000000 817 | 0.000027 818 | 0.998952 819 | 0.999979 820 | 0.999974 821 | 0.999728 822 | 1.000000 823 | 0.000347 824 | 0.000000 825 | 0.014137 826 | 0.000330 827 | 0.000357 828 | 0.000002 829 | 0.004535 830 | 0.000000 831 | 0.998281 832 | 1.000000 833 | 0.992348 834 | 1.000000 835 | 0.000377 836 | 1.000000 837 | 1.000000 838 | 1.000000 839 | 0.999998 840 | 0.000003 841 | 0.970637 842 | 0.999999 843 | 0.000000 844 | 0.025815 845 | 0.005085 846 | 0.000043 847 | 1.000000 848 | 0.000000 849 | 0.000000 850 | 0.999999 851 | 1.000000 852 | 0.999987 853 | 0.999977 854 | 0.999545 855 | 0.997657 856 | 0.000000 857 | 0.012502 858 | 0.999999 859 | 1.000000 860 | 1.000000 861 | 0.991272 862 | 0.000000 863 | 0.999972 864 | 0.999975 865 | 0.000208 866 | 0.000000 867 | 1.000000 868 | 0.999998 869 | 1.000000 870 | 0.929425 871 | 0.000000 872 | 1.000000 873 | 0.000000 874 | 0.999807 875 | 0.970410 876 | 0.999967 877 | 0.000001 878 | 0.995037 879 | 0.999982 880 | 0.132109 881 | 0.004155 882 | 0.000003 883 | 0.996226 884 | 0.000000 885 | 0.000002 886 | 0.000000 887 | 1.000000 888 | 1.000000 889 | 1.000000 890 | 0.030120 891 | 0.000281 892 | 0.063372 893 | 0.000000 894 | 1.000000 895 | 0.999999 896 | 0.999977 897 | 0.999560 898 | 0.000257 899 | 1.000000 900 | 1.000000 901 | 0.000001 902 | 0.000003 903 | 0.999766 904 | 0.999976 905 | 0.009620 906 | 1.000000 907 | 0.000002 908 | 0.999960 909 | 0.000015 910 | 1.000000 911 | 1.000000 912 | 0.999977 913 | 0.008673 914 | 0.999998 915 | 0.000120 916 | 0.999998 917 | 0.985349 918 | 0.999998 919 | 0.999972 920 | 0.000000 921 | 0.000002 922 | 1.000000 923 | 0.057065 924 | 0.000299 925 | 0.997296 926 | 0.993078 927 | 0.999697 928 | 0.005801 929 | 0.008182 930 | 0.000294 931 | 0.999906 932 | 0.568264 933 | 0.999985 934 | 1.000000 935 | 1.000000 936 | 0.863502 937 | 0.000000 938 | 0.160300 939 | 0.993520 940 | 0.000000 941 | 0.954431 942 | 1.000000 943 | 0.783160 944 | 0.000000 945 | 0.000000 946 | 1.000000 947 | 0.999978 948 | 0.000000 949 | 0.000002 950 | 0.999979 951 | 0.999999 952 | 1.000000 953 | 0.000113 954 | 0.000179 955 | 0.000000 956 | 0.999998 957 | 0.511418 958 | 0.000017 959 | 0.005788 960 | 0.000002 961 | 0.000000 962 | 0.002025 963 | 0.993046 964 | 0.000017 965 | 0.999997 966 | 0.007106 967 | 0.000403 968 | 0.999999 969 | 0.002016 970 | 0.000001 971 | 0.000146 972 | 0.105640 973 | 0.000000 974 | 0.000001 975 | 0.000294 976 | 1.000000 977 | 0.000000 978 | 0.000000 979 | 1.000000 980 | 0.000000 981 | 0.999997 982 | 0.000029 983 | 0.995308 984 | 0.016273 985 | 1.000000 986 | 0.999979 987 | 0.000029 988 | 0.000398 989 | 0.541687 990 | 0.001724 991 | 0.000288 992 | 0.024867 993 | 0.000004 994 | 0.000028 995 | 1.000000 996 | 0.000000 997 | 0.000002 998 | 0.999999 999 | 0.999984 1000 | 0.000331 1001 | -------------------------------------------------------------------------------- /glad.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import argparse 5 | import logging 6 | import math 7 | import numpy as np 8 | import scipy as sp 9 | import scipy.stats 10 | import scipy.optimize 11 | import unittest 12 | import warnings 13 | 14 | THRESHOLD = 1e-5 15 | 16 | verbose = False 17 | debug = False 18 | logger = None 19 | 20 | 21 | # warnings.filterwarnings('error') 22 | 23 | 24 | class Dataset(object): 25 | def __init__(self, labels=None, 26 | numLabels=-1, numLabelers=-1, numTasks=-1, numClasses=-1, 27 | priorAlpha=None, priorBeta=None, priorZ=None, 28 | alpha=None, beta=None, probZ=None): 29 | self.labels = labels 30 | self.numLabels = numLabels 31 | self.numLabelers = numLabelers 32 | self.numTasks = numTasks 33 | self.numClasses = numClasses 34 | self.priorAlpha = priorAlpha 35 | self.priorBeta = priorBeta 36 | self.priorZ = priorZ 37 | self.alpha = alpha 38 | self.beta = beta 39 | self.probZ = probZ 40 | 41 | 42 | def init_logger(): 43 | global logger 44 | logger = logging.getLogger('GLAD') 45 | logger.setLevel(logging.DEBUG) 46 | log_fmt = '%(asctime)s/%(name)s[%(levelname)s]: %(message)s' 47 | logging.basicConfig(format=log_fmt) 48 | 49 | 50 | def sigmoid(x): 51 | return 1.0 / (1.0 + np.exp(-x)) 52 | 53 | 54 | def logsigmoid(x): 55 | return - np.log(1 + np.exp(-x)) 56 | 57 | 58 | def load_data(filename): 59 | data = Dataset() 60 | with open(filename) as f: 61 | # Read parameters 62 | header = f.readline().split() 63 | data.numLabels = int(header[0]) 64 | data.numLabelers = int(header[1]) 65 | data.numTasks = int(header[2]) 66 | data.numClasses = int(header[3]) 67 | data.priorZ = np.array([float(x) for x in header[4:]]) 68 | assert len(data.priorZ) == data.numClasses, 'Incorrect input header' 69 | assert data.priorZ.sum() == 1, 'Incorrect priorZ given' 70 | if verbose: 71 | logger.info('Reading {} labels of {} labelers over {} tasks for prior P(Z) = {}'.format(data.numLabels, 72 | data.numLabelers, 73 | data.numTasks, 74 | data.priorZ)) 75 | # Read Labels 76 | data.labels = np.zeros((data.numTasks, data.numLabelers)) 77 | for line in f: 78 | task, labeler, label = map(int, line.split()) 79 | if debug: 80 | logger.info("Read: task({})={} by labeler {}".format(task, label, labeler)) 81 | data.labels[task][labeler] = label + 1 82 | # Initialize Probs 83 | data.priorAlpha = np.ones(data.numLabelers) 84 | data.priorBeta = np.ones(data.numTasks) 85 | data.probZ = np.empty((data.numTasks, data.numClasses)) 86 | # data.priorZ = (np.zeros((data.numClasses, data.numTasks)).T + data.priorZ).T 87 | data.beta = np.empty(data.numTasks) 88 | data.alpha = np.empty(data.numLabelers) 89 | 90 | return data 91 | 92 | 93 | def EM(data): 94 | u"""Infer true labels, tasks' difficulty and workers' ability 95 | """ 96 | # Initialize parameters to starting values 97 | data.alpha = data.priorAlpha.copy() 98 | data.beta = data.priorBeta.copy() 99 | data.probZ[:] = data.priorZ[:] 100 | 101 | EStep(data) 102 | lastQ = computeQ(data) 103 | MStep(data) 104 | Q = computeQ(data) 105 | counter = 1 106 | while abs((Q - lastQ) / lastQ) > THRESHOLD: 107 | if verbose: logger.info('EM: iter={}'.format(counter)) 108 | lastQ = Q 109 | EStep(data) 110 | MStep(data) 111 | Q = computeQ(data) 112 | counter += 1 113 | 114 | 115 | def EStep(data): 116 | u"""Evaluate the posterior probability of true labels given observed labels and parameters 117 | """ 118 | 119 | def calcLogProbL(item, *args): 120 | j = int(item[0]) # task ID 121 | 122 | # List[boolean]: denotes if the worker i picked the focused class for the task j 123 | ## formally, delta[i, j] = True if l_ij == z_j for i = 0, ..., m-1 (m=# of workers) 124 | delta = args[0][j] 125 | noResp = args[1][j] 126 | oneMinusDelta = (~delta) & (~noResp) 127 | 128 | # List[float]: alpha_i * exp(beta_j) for i = 0, ..., m-1 129 | exponents = item[1:] 130 | 131 | # Log likelihood for the observations s.t. l_ij == z_j 132 | correct = logsigmoid(exponents[delta]).sum() 133 | # Log likelihood for the observations s.t. l_ij != z_j 134 | wrong = (logsigmoid(-exponents[oneMinusDelta]) - np.log(float(data.numClasses - 1))).sum() 135 | 136 | # Return log likelihood 137 | return correct + wrong 138 | 139 | if verbose: logger.info('EStep') 140 | data.probZ = np.tile(np.log(data.priorZ), data.numTasks).reshape(data.numTasks, data.numClasses) 141 | 142 | ab = np.dot(np.array([np.exp(data.beta)]).T, np.array([data.alpha])) 143 | ab = np.c_[np.arange(data.numTasks), ab] 144 | 145 | for k in range(data.numClasses): 146 | data.probZ[:, k] = np.apply_along_axis(calcLogProbL, 1, ab, 147 | (data.labels == k + 1), 148 | (data.labels == 0)) 149 | 150 | # Exponentiate and renormalize 151 | data.probZ = np.exp(data.probZ) 152 | s = data.probZ.sum(axis=1) 153 | data.probZ = (data.probZ.T / s).T 154 | assert not np.any(np.isnan(data.probZ)), 'Invalid Value [EStep]' 155 | assert not np.any(np.isinf(data.probZ)), 'Invalid Value [EStep]' 156 | 157 | return data 158 | 159 | 160 | def packX(data): 161 | return np.r_[data.alpha.copy(), data.beta.copy()] 162 | 163 | 164 | def unpackX(x, data): 165 | data.alpha = x[:data.numLabelers].copy() 166 | data.beta = x[data.numLabelers:].copy() 167 | 168 | 169 | def getBoundsX(data, alpha=(-100, 100), beta=(-100, 100)): 170 | alpha_bounds = np.array([[alpha[0], alpha[1]] for i in range(data.numLabelers)]) 171 | beta_bounds = np.array([[beta[0], beta[1]] for i in range(data.numLabelers)]) 172 | return np.r_[alpha_bounds, beta_bounds] 173 | 174 | 175 | def f(x, *args): 176 | u"""Return the value of the objective function 177 | """ 178 | data = args[0] 179 | d = Dataset(labels=data.labels, numLabels=data.numLabels, numLabelers=data.numLabelers, 180 | numTasks=data.numTasks, numClasses=data.numClasses, 181 | priorAlpha=data.priorAlpha, priorBeta=data.priorBeta, 182 | priorZ=data.priorZ, probZ=data.probZ) 183 | unpackX(x, d) 184 | return - computeQ(d) 185 | 186 | 187 | def df(x, *args): 188 | u"""Return gradient vector 189 | """ 190 | data = args[0] 191 | d = Dataset(labels=data.labels, numLabels=data.numLabels, numLabelers=data.numLabelers, 192 | numTasks=data.numTasks, numClasses=data.numClasses, 193 | priorAlpha=data.priorAlpha, priorBeta=data.priorBeta, 194 | priorZ=data.priorZ, probZ=data.probZ) 195 | unpackX(x, d) 196 | dQdAlpha, dQdBeta = gradientQ(d) 197 | # Flip the sign since we want to minimize 198 | assert not np.any(np.isinf(dQdAlpha)), 'Invalid Gradient Value [Alpha]' 199 | assert not np.any(np.isinf(dQdBeta)), 'Invalid Gradient Value [Beta]' 200 | assert not np.any(np.isnan(dQdAlpha)), 'Invalid Gradient Value [Alpha]' 201 | assert not np.any(np.isnan(dQdBeta)), 'Invalid Gradient Value [Beta]' 202 | return np.r_[-dQdAlpha, -dQdBeta] 203 | 204 | 205 | def MStep(data): 206 | if verbose: logger.info('MStep') 207 | initial_params = packX(data) 208 | params = sp.optimize.minimize(fun=f, x0=initial_params, args=(data,), method='CG', 209 | jac=df, tol=0.01, 210 | options={'maxiter': 25, 'disp': verbose}) 211 | if debug: 212 | logger.debug(params) 213 | unpackX(params.x, data) 214 | 215 | 216 | def computeQ(data): 217 | u"""Calculate the expectation of the joint likelihood 218 | """ 219 | Q = 0 220 | # Start with the expectation of the sum of priors over all tasks 221 | Q += (data.probZ * np.log(data.priorZ)).sum() 222 | 223 | # the expectation of the sum of posteriors over all tasks 224 | ab = np.dot(np.array([np.exp(data.beta)]).T, np.array([data.alpha])) 225 | 226 | # logSigma = - np.log(1 + np.exp(-ab)) 227 | logSigma = logsigmoid(ab) # logP 228 | idxna = np.isnan(logSigma) 229 | if np.any(idxna): 230 | logger.warning('an invalid value was assigned to np.log [computeQ]') 231 | logSigma[idxna] = ab[idxna] # For large negative x, -log(1 + exp(-x)) = x 232 | 233 | # logOneMinusSigma = - np.log(1 + np.exp(ab)) 234 | logOneMinusSigma = logsigmoid(-ab) - np.log(float(data.numClasses - 1)) # log((1-P)/(K-1)) 235 | idxna = np.isnan(logOneMinusSigma) 236 | if np.any(idxna): 237 | logger.warning('an invalid value was assigned to np.log [computeQ]') 238 | logOneMinusSigma[idxna] = -ab[idxna] # For large positive x, -log(1 + exp(x)) = x 239 | 240 | for k in range(data.numClasses): 241 | delta = (data.labels == k + 1) 242 | Q += (data.probZ[:, k] * logSigma.T).T[delta].sum() 243 | oneMinusDelta = (data.labels != k + 1) & (data.labels != 0) # label == 0 -> no response 244 | Q += (data.probZ[:, k] * logOneMinusSigma.T).T[oneMinusDelta].sum() 245 | 246 | # Add Gaussian (standard normal) prior for alpha 247 | Q += np.log(sp.stats.norm.pdf(data.alpha - data.priorAlpha)).sum() 248 | 249 | # Add Gaussian (standard normal) prior for beta 250 | Q += np.log(sp.stats.norm.pdf(data.beta - data.priorBeta)).sum() 251 | 252 | if debug: 253 | logger.debug('a[0]={} a[1]={} a[2]={} b[0]={}'.format(data.alpha[0], data.alpha[1], 254 | data.alpha[2], data.beta[0])) 255 | logger.debug('Q={}'.format(Q)) 256 | if np.isnan(Q): 257 | return -np.inf 258 | return Q 259 | 260 | 261 | def gradientQ(data): 262 | def dAlpha(item, *args): 263 | i = int(item[0]) # worker ID 264 | sigma_ab = item[1:] # List[float], dim=(n,): sigmoid(alpha_i * beta_j) for j = 0, ..., n-1 265 | 266 | # List[boolean], dim=(n,): denotes if the worker i picked the focused class for 267 | # task j (j=0, ..., n-1) 268 | delta = args[0][:, i] 269 | noResp = args[1][:, i] 270 | oneMinusDelta = (~delta) & (~noResp) 271 | 272 | # List[float], dim=(n,): Prob of the true label of the task j being the focused class (p^k) 273 | probZ = args[2] 274 | 275 | correct = probZ[delta] * np.exp(data.beta[delta]) * (1 - sigma_ab[delta]) 276 | wrong = probZ[oneMinusDelta] * np.exp(data.beta[oneMinusDelta]) * (-sigma_ab[oneMinusDelta]) 277 | # Note: The derivative in Whitehill et al.'s appendix has the term ln(K-1), which is incorrect. 278 | 279 | return correct.sum() + wrong.sum() 280 | 281 | def dBeta(item, *args): 282 | j = int(item[0]) # task ID 283 | sigma_ab = item[1:] # List[float], dim=(m,): sigmoid(alpha_i * beta_j) for i = 0, ..., m-1 284 | 285 | # List[boolean], dim=(m,): denotes if the worker i picked the focused class for 286 | # task j (i=0, ..., m-1) 287 | delta = args[0][j] 288 | noResp = args[1][j] 289 | oneMinusDelta = (~delta) & (~noResp) 290 | 291 | # float: Prob of the true label of the task j being the focused class (p^k) 292 | probZ = args[2][j] 293 | 294 | correct = probZ * data.alpha[delta] * (1 - sigma_ab[delta]) 295 | wrong = probZ * data.alpha[oneMinusDelta] * (-sigma_ab[oneMinusDelta]) 296 | 297 | return correct.sum() + wrong.sum() 298 | 299 | # prior prob. 300 | dQdAlpha = - (data.alpha - data.priorAlpha) 301 | dQdBeta = - (data.beta - data.priorBeta) 302 | 303 | ab = np.dot(np.array([np.exp(data.beta)]).T, np.array([data.alpha])) 304 | 305 | sigma = sigmoid(ab) 306 | sigma[np.isnan(sigma)] = 0 # :TODO check if this is correct 307 | 308 | labelersIdx = np.arange(data.numLabelers).reshape((1, data.numLabelers)) 309 | sigma = np.r_[labelersIdx, sigma] 310 | sigma = np.c_[np.arange(-1, data.numTasks), sigma] 311 | # sigma: List[List[float]]: dim=(n+1, m+1) where n = # of tasks and m = # of workers 312 | # sigma[0] = List[float]: worker IDs (-1, 0, ..., m-1) where the first -1 is a pad 313 | # sigma[:, 0] = List[float]: task IDs (-1, 0, ..., n-1) where the first -1 is a pad 314 | 315 | for k in range(data.numClasses): 316 | dQdAlpha += np.apply_along_axis(dAlpha, 0, sigma[:, 1:], 317 | (data.labels == k + 1), 318 | (data.labels == 0), 319 | data.probZ[:, k]) 320 | 321 | dQdBeta += np.apply_along_axis(dBeta, 1, sigma[1:], 322 | (data.labels == k + 1), 323 | (data.labels == 0), 324 | data.probZ[:, k]) * np.exp(data.beta) 325 | 326 | if debug: 327 | logger.debug('dQdAlpha[0]={} dQdAlpha[1]={} dQdAlpha[2]={} dQdBeta[0]={}'.format(dQdAlpha[0], dQdAlpha[1], 328 | dQdAlpha[2], dQdBeta[0])) 329 | return dQdAlpha, dQdBeta 330 | 331 | 332 | def output(data): 333 | alpha = np.c_[np.arange(data.numLabelers), data.alpha] 334 | np.savetxt('data/alpha.csv', alpha, fmt=['%d', '%.5f'], delimiter=',', header='id,alpha') 335 | beta = np.c_[np.arange(data.numTasks), np.exp(data.beta)] 336 | np.savetxt('data/beta.csv', beta, fmt=['%d', '%.5f'], delimiter=',', header='id,beta') 337 | probZ = np.c_[np.arange(data.numTasks), data.probZ] 338 | np.savetxt(fname='data/probZ.csv', 339 | X=probZ, 340 | fmt=['%d'] + (['%.5f'] * data.numClasses), 341 | delimiter=',', 342 | header='id,' + ','.join(['z' + str(k) for k in range(data.numClasses)])) 343 | label = np.c_[np.arange(data.numTasks), np.argmax(data.probZ, axis=1)] 344 | np.savetxt('data/label_glad.csv', label, fmt=['%d', '%d'], delimiter=',', header='id,label') 345 | 346 | 347 | def outputResults(data): 348 | for i in range(data.numLabelers): 349 | print('Alpha[{idx}] = {val:.5f}'.format(idx=i, val=data.alpha[i])) 350 | 351 | for j in range(data.numTasks): 352 | print('Beta[{idx}] = {val:.5f}'.format(idx=j, val=np.exp(data.beta[j]))) 353 | 354 | for j in range(data.numTasks): 355 | print('P(Z({idx})=1) = {val:.5f}'.format(idx=j, val=data.probZ1[j])) 356 | 357 | 358 | def main(args=None): 359 | global debug, verbose 360 | debug = args.debug 361 | if debug == True: 362 | verbose = True 363 | else: 364 | verbose = args.verbose 365 | 366 | data = load_data(args.filename) 367 | EM(data) 368 | 369 | output(data) 370 | # outputResults(data) 371 | return 372 | 373 | 374 | if __name__ == '__main__': 375 | init_logger() 376 | parser = argparse.ArgumentParser() 377 | parser.add_argument('filename') 378 | parser.add_argument('-v', '--verbose', action='store_true', default=False) 379 | parser.add_argument('-d', '--debug', action='store_true', default=False) 380 | args = parser.parse_args() 381 | 382 | code = main(args) 383 | exit(code) 384 | -------------------------------------------------------------------------------- /glad_bin.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import argparse 5 | import logging 6 | import math 7 | import numpy as np 8 | import scipy as sp 9 | import scipy.stats 10 | import scipy.optimize 11 | import unittest 12 | import warnings 13 | 14 | 15 | THRESHOLD = 1e-5 16 | 17 | verbose = False 18 | debug = False 19 | logger = None 20 | # warnings.filterwarnings('error') 21 | 22 | 23 | class Dataset(object): 24 | def __init__(self, labels=None, numLabels=-1, numLabelers=-1, numTasks=-1, 25 | priorAlpha=None, priorBeta=None, priorZ1=None, 26 | alpha=None, beta=None, probZ1=None, probZ0=None): 27 | self.labels = labels 28 | self.numLabels = numLabels 29 | self.numLabelers = numLabelers 30 | self.numTasks = numTasks 31 | self.priorAlpha = priorAlpha 32 | self.priorBeta = priorBeta 33 | self.priorZ1 = priorZ1 34 | self.alpha = alpha 35 | self.beta = beta 36 | self.probZ1 = probZ1 37 | self.probZ0 = probZ0 38 | 39 | def init_logger(): 40 | global logger 41 | logger = logging.getLogger('GLAD') 42 | logger.setLevel(logging.DEBUG) 43 | log_fmt = '%(asctime)s/%(name)s[%(levelname)s]: %(message)s' 44 | logging.basicConfig(format=log_fmt) 45 | 46 | def sigmoid(x): 47 | return 1.0 / (1.0 + np.exp(-x)) 48 | 49 | def logsigmoid(x): 50 | return - np.log(1 + np.exp(-x)) 51 | 52 | def load_data(filename): 53 | data = Dataset() 54 | with open(filename) as f: 55 | # Read parameters 56 | header = f.readline().split() 57 | data.numLabels = int(header[0]) 58 | data.numLabelers = int(header[1]) 59 | data.numTasks = int(header[2]) 60 | data.priorZ1 = float(header[3]) 61 | if verbose: 62 | logger.info('Reading {} labels of {} labelers over {} tasks for prior P(Z=1) = {}'.format(data.numLabels, data.numLabelers, data.numTasks, data.priorZ1)) 63 | # Read Labels 64 | data.labels = np.zeros((data.numTasks, data.numLabelers)) 65 | for line in f: 66 | task, labeler, label = map(int, line.split()) 67 | if verbose: 68 | logger.info("Read: task({})={} by labeler {}".format(task, label, labeler)) 69 | data.labels[task][labeler] = label + 1 70 | # Initialize Probs 71 | data.priorAlpha = np.ones(data.numLabelers) 72 | data.priorBeta = np.ones(data.numTasks) 73 | data.probZ1 = np.empty(data.numTasks) 74 | data.probZ0 = np.empty(data.numTasks) 75 | data.priorZ1 = np.zeros(data.numTasks) + data.priorZ1 76 | data.beta = np.empty(data.numTasks) 77 | data.alpha = np.empty(data.numLabelers) 78 | 79 | return data 80 | 81 | def EM(data): 82 | u"""Infer true labels, tasks' difficulty and workers' ability 83 | """ 84 | # Initialize parameters to starting values 85 | data.alpha = data.priorAlpha.copy() 86 | data.beta = data.priorBeta.copy() 87 | 88 | EStep(data) 89 | lastQ = computeQ(data) 90 | dQdAlpha, dQdBeta = gradientQ(data) 91 | MStep(data) 92 | Q = computeQ(data) 93 | counter = 1 94 | while abs((Q - lastQ)/lastQ) > THRESHOLD: 95 | if verbose: logger.info('EM: iter={}'.format(counter)) 96 | lastQ = Q 97 | EStep(data) 98 | MStep(data) 99 | Q = computeQ(data) 100 | counter += 1 101 | 102 | def EStep(data): 103 | u"""Evaluate the posterior probability of true labels given observed labels and parameters 104 | """ 105 | def calcLogProbL(item, *args): 106 | i = item[0] 107 | idx = args[0][int(i)] 108 | row = item[1:] 109 | return logsigmoid(row[idx]).sum() + logsigmoid(-row[np.invert(idx)]).sum() 110 | 111 | if verbose: logger.info('EStep') 112 | data.probZ1 = np.log(data.priorZ1) 113 | data.probZ0 = np.log(1 - data.priorZ1) 114 | 115 | ab = np.dot(np.array([np.exp(data.beta)]).T, np.array([data.alpha])) 116 | ab[data.labels == 0] = 0 # drop ab with no response 117 | ab = np.c_[np.arange(data.numTasks), ab] 118 | 119 | # TODO: Z1 -> Z2 120 | data.probZ1 = np.apply_along_axis(calcLogProbL, 1, ab, (data.labels == 2)) 121 | data.probZ0 = np.apply_along_axis(calcLogProbL, 1, ab, (data.labels == 1)) 122 | 123 | # Exponentiate and renormalize 124 | data.probZ1 = np.exp(data.probZ1) 125 | data.probZ0 = np.exp(data.probZ0) 126 | data.probZ1 = data.probZ1 / (data.probZ1 + data.probZ0) 127 | data.probZ0 = 1 - data.probZ1 128 | # TODO: nan -> abort 129 | 130 | return data 131 | 132 | def packX(data): 133 | return np.r_[data.alpha.copy(), data.beta.copy()] 134 | 135 | def unpackX(x, data): 136 | data.alpha = x[:data.numLabelers].copy() 137 | data.beta = x[data.numLabelers:].copy() 138 | 139 | def getBoundsX(data, alpha=(-100, 100), beta=(-100, 100)): 140 | alpha_bounds = np.array([[alpha[0], alpha[1]] for i in range(data.numLabelers)]) 141 | beta_bounds = np.array([[beta[0], beta[1]] for i in range(data.numLabelers)]) 142 | return np.r_[alpha_bounds, beta_bounds] 143 | 144 | def f(x, *args): 145 | u"""Return the value of the objective function 146 | """ 147 | data = args[0] 148 | d = Dataset(labels=data.labels, numLabels=data.numLabels, numLabelers=data.numLabelers, 149 | numTasks=data.numTasks, 150 | priorAlpha=data.priorAlpha, priorBeta=data.priorBeta, priorZ1=data.priorZ1, 151 | probZ1=data.probZ1, probZ0=data.probZ0) 152 | unpackX(x, d) 153 | return - computeQ(d) 154 | 155 | def df(x, *args): 156 | u"""Return gradient vector 157 | """ 158 | data = args[0] 159 | d = Dataset(labels=data.labels, numLabels=data.numLabels, numLabelers=data.numLabelers, 160 | numTasks=data.numTasks, 161 | priorAlpha=data.priorAlpha, priorBeta=data.priorBeta, priorZ1=data.priorZ1, 162 | probZ1=data.probZ1, probZ0=data.probZ0) 163 | unpackX(x, d) 164 | dQdAlpha, dQdBeta = gradientQ(d) 165 | # Flip the sign since we want to minimize 166 | return np.r_[-dQdAlpha, -dQdBeta] 167 | 168 | def MStep(data): 169 | if verbose: logger.info('MStep') 170 | initial_params = packX(data) 171 | params = sp.optimize.minimize(fun=f, x0=initial_params, args=(data,), method='CG', 172 | jac=df, tol=0.01, 173 | options={'maxiter': 25, 'disp': verbose}) 174 | if debug: logger.debug(params) 175 | unpackX(params.x, data) 176 | 177 | def computeQ(data): 178 | u"""Calculate the expectation of the joint likelihood 179 | """ 180 | Q = 0 181 | # Start with the expectation of the sum of priors over all tasks 182 | Q += (data.probZ1 * np.log(data.priorZ1)).sum() 183 | Q += (data.probZ0 * np.log(1 - data.priorZ1)).sum() 184 | 185 | # the expectation of the sum of posteriors over all tasks 186 | ab = np.dot(np.array([np.exp(data.beta)]).T, np.array([data.alpha])) 187 | 188 | logSigma = - np.log(1 + np.exp(-ab)) 189 | idxna = np.isnan(logSigma) 190 | if np.any(idxna): logger.warning('an invalid value was assigned to np.log [computeQ]') 191 | logSigma[idxna] = ab[idxna] # For large negative x, -log(1 + exp(-x)) = x 192 | 193 | logOneMinusSigma = - np.log(1 + np.exp(ab)) 194 | idxna = np.isnan(logOneMinusSigma) 195 | if np.any(idxna): logger.warning('an invalid value was assigned to np.log [computeQ]') 196 | logOneMinusSigma[idxna] = -ab[idxna] # For large positive x, -log(1 + exp(x)) = x 197 | 198 | # class = 1 :TODO 199 | idx = (data.labels == 2) 200 | Q += (data.probZ1 * logSigma.T).T[idx].sum() 201 | Q += (data.probZ1 * logOneMinusSigma.T).T[np.invert(idx)].sum() 202 | # class = 1 :TODO 203 | idx = (data.labels == 1) 204 | Q += (data.probZ0 * logSigma.T).T[idx].sum() 205 | Q += (data.probZ0 * logOneMinusSigma.T).T[np.invert(idx)].sum() 206 | 207 | # Add Gaussian (standard normal) prior for alpha 208 | Q += np.log(sp.stats.norm.pdf(data.alpha - data.priorAlpha)).sum() 209 | 210 | # Add Gaussian (standard normal) prior for beta 211 | Q += np.log(sp.stats.norm.pdf(data.beta - data.priorBeta)).sum() 212 | 213 | if debug: 214 | logger.debug('a[0]={} a[1]={} a[2]={} b[0]={}'.format(data.alpha[0], data.alpha[1], 215 | data.alpha[2], data.beta[0])) 216 | logger.debug('Q={}'.format(Q)) 217 | if np.isnan(Q): 218 | return -np.inf 219 | return Q 220 | 221 | 222 | def gradientQ(data): 223 | def dAlpha(item, *args): 224 | idx = args[0][:, int(item[0])] 225 | probZ = args[1] 226 | row = item[1:] 227 | correct = ((1 - row) * probZ)[idx] 228 | wrong = -(row * probZ)[np.invert(idx)] 229 | return correct.sum() + wrong.sum() 230 | 231 | def dBeta(item, *args): 232 | idx = args[0][int(item[0])] 233 | alpha = args[1] 234 | row = item[1:] 235 | correct = ((1 - row) * alpha)[idx] 236 | wrong = -(row * alpha)[np.invert(idx)] 237 | return correct.sum() + wrong.sum() 238 | 239 | dQdAlpha = - (data.alpha - data.priorAlpha) 240 | dQdBeta = - (data.beta - data.priorBeta) 241 | 242 | ab = np.dot(np.array([np.exp(data.beta)]).T, np.array([data.alpha])) 243 | 244 | sigma = sigmoid(ab) 245 | sigma[data.labels == 0] = 0 # drop ab with no response 246 | sigma[np.isnan(sigma)] = 0 # :TODO check if this is correct 247 | 248 | labelersIdx = np.arange(data.numLabelers).reshape((1,data.numLabelers)) 249 | sigma = np.r_[labelersIdx, sigma] 250 | sigma = np.c_[np.arange(-1, data.numTasks), sigma] 251 | 252 | 253 | # print(data.probZ1 * np.exp(data.beta)) 254 | dQdAlpha += np.apply_along_axis(dAlpha, 0, sigma[:, 1:], 255 | (data.labels == 2), data.probZ1 * np.exp(data.beta)) 256 | dQdAlpha += np.apply_along_axis(dAlpha, 0, sigma[:, 1:], 257 | (data.labels == 1), data.probZ0 * np.exp(data.beta)) 258 | 259 | dQdBeta += np.apply_along_axis(dBeta, 1, sigma[1:], 260 | (data.labels == 2), data.alpha) * data.probZ1 * np.exp(data.beta) 261 | dQdBeta += np.apply_along_axis(dBeta, 1, sigma[1:], 262 | (data.labels == 1), data.alpha) * data.probZ0 * np.exp(data.beta) 263 | 264 | if debug: 265 | logger.debug('dQdAlpha[0]={} dQdAlpha[1]={} dQdAlpha[2]={} dQdBeta[0]={}'.format(dQdAlpha[0], dQdAlpha[1], 266 | dQdAlpha[2], dQdBeta[0])) 267 | return dQdAlpha, dQdBeta 268 | 269 | 270 | def output(data): 271 | alpha = np.c_[np.arange(1, data.numLabelers+1), data.alpha] 272 | np.savetxt('alpha.csv', alpha, fmt=['%d', '%.5f'], delimiter=',', header='id,alpha') 273 | beta = np.c_[np.arange(1, data.numTasks+1), np.exp(data.beta)] 274 | np.savetxt('beta.csv', beta, fmt=['%d', '%.5f'], delimiter=',', header='id,beta') 275 | label = np.c_[np.arange(1, data.numTasks+1), data.probZ0, data.probZ1] 276 | np.savetxt('label.csv', label, fmt=['%d', '%.5f', '%.5f'], delimiter=',', header='id,z0,z1') 277 | 278 | 279 | def outputResults(data): 280 | for i in range(data.numLabelers): 281 | print('Alpha[{idx}] = {val:.5f}'.format(idx=i, val=data.alpha[i])) 282 | 283 | for j in range(data.numTasks): 284 | print('Beta[{idx}] = {val:.5f}'.format(idx=j, val=np.exp(data.beta[j]))) 285 | 286 | for j in range(data.numTasks): 287 | print('P(Z({idx})=1) = {val:.5f}'.format(idx=j, val=data.probZ1[j])) 288 | 289 | 290 | def main(args=None): 291 | global debug, verbose 292 | debug = args.debug 293 | if debug == True: 294 | verbose = True 295 | else: 296 | verbose = args.verbose 297 | 298 | data = load_data(args.filename) 299 | EM(data) 300 | 301 | output(data) 302 | # outputResults(data) 303 | return 304 | 305 | 306 | if __name__ == '__main__': 307 | init_logger() 308 | parser = argparse.ArgumentParser() 309 | parser.add_argument('filename') 310 | parser.add_argument('-v', '--verbose', action='store_true', default=False) 311 | parser.add_argument('-d', '--debug', action='store_true', default=False) 312 | args = parser.parse_args() 313 | 314 | code = main(args) 315 | exit(code) 316 | -------------------------------------------------------------------------------- /glad_naive.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import argparse 5 | import logging 6 | import math 7 | import numpy as np 8 | import scipy as sp 9 | import scipy.stats 10 | import scipy.optimize 11 | import unittest 12 | import warnings 13 | 14 | 15 | THRESHOLD = 1e-5 16 | 17 | verbose = False 18 | debug = False 19 | logger = None 20 | warnings.filterwarnings('error') 21 | 22 | class Label: 23 | def __init__(self, taskIdx=-1, labelerId=-1, label=-1): 24 | self.taskIdx = taskIdx 25 | self.labelerId = labelerId 26 | self.label = label 27 | 28 | class Dataset: 29 | def __init__(self, labels=[], numLabels=-1, numLabelers=-1, numTasks=-1, 30 | priorAlpha=None, priorBeta=None, priorZ1=None, 31 | alpha=None, beta=None, probZ1=None, probZ0=None): 32 | self.labels = labels 33 | self.numLabels = numLabels 34 | self.numLabelers = numLabelers 35 | self.numTasks = numTasks 36 | self.priorAlpha = priorAlpha 37 | self.priorBeta = priorBeta 38 | self.priorZ1 = priorZ1 39 | self.alpha = alpha 40 | self.beta = beta 41 | self.probZ1 = probZ1 42 | self.probZ0 = probZ0 43 | 44 | def init_logger(): 45 | global logger 46 | logger = logging.getLogger('GLAD') 47 | logger.setLevel(logging.DEBUG) 48 | log_fmt = '%(asctime)s/%(name)s[%(levelname)s]: %(message)s' 49 | logging.basicConfig(format=log_fmt) 50 | 51 | def sigmoid(x): 52 | return 1.0 / (1.0 + np.exp(-x)) 53 | 54 | def load_data(filename): 55 | data = Dataset() 56 | with open(filename) as f: 57 | # Read parameters 58 | header = f.readline().split() 59 | data.numLabels = int(header[0]) 60 | data.numLabelers = int(header[1]) 61 | data.numTasks = int(header[2]) 62 | data.priorZ1 = float(header[3]) 63 | if verbose: 64 | logger.info('Reading {} labels of {} labelers over {} tasks for prior P(Z=1) = {}'.format(data.numLabels, data.numLabelers, data.numTasks, data.priorZ1)) 65 | # Read Labels 66 | for line in f: 67 | task, labeler, label = map(int, line.split()) 68 | if verbose: 69 | logger.info("Read: task({})={} by labeler {}".format(task, label, labeler)) 70 | item = Label(taskIdx=task, labelerId=labeler, label=label) 71 | data.labels.append(item) 72 | # Initialize Probs 73 | data.priorAlpha = np.ones(data.numLabelers) 74 | data.priorBeta = np.ones(data.numTasks) 75 | data.probZ1 = np.empty(data.numTasks) 76 | data.probZ0 = np.empty(data.numTasks) 77 | data.priorZ1 = np.zeros(data.numTasks) + data.priorZ1 78 | data.beta = np.empty(data.numTasks) 79 | data.alpha = np.empty(data.numLabelers) 80 | 81 | return data 82 | 83 | def EM(data): 84 | u"""Infer true labels, tasks' difficulty and workers' ability 85 | """ 86 | # Initialize parameters to starting values 87 | data.alpha = data.priorAlpha.copy() 88 | data.beta = data.priorBeta.copy() 89 | 90 | EStep(data) 91 | lastQ = computeQ(data) 92 | MStep(data) 93 | Q = computeQ(data) 94 | counter = 1 95 | while abs((Q - lastQ)/lastQ) > THRESHOLD: 96 | if verbose: logger.info('EM: iter={}'.format(counter)) 97 | lastQ = Q 98 | EStep(data) 99 | MStep(data) 100 | Q = computeQ(data) 101 | counter += 1 102 | 103 | def EStep(data): 104 | u"""Evaluate the posterior probability of true labels given observed labels and parameters 105 | """ 106 | if verbose: logger.info('EStep') 107 | data.probZ1 = np.log(data.priorZ1) 108 | data.probZ0 = np.log(1 - data.priorZ1) 109 | 110 | for item in data.labels: 111 | i = item.labelerId 112 | j = item.taskIdx 113 | lij = item.label 114 | data.probZ1[j] += logProbL(lij, 1, data.alpha[i], data.beta[j]) 115 | data.probZ0[j] += logProbL(lij, 0, data.alpha[i], data.beta[j]) 116 | 117 | # Exponentiate and renormalize 118 | data.probZ1 = np.exp(data.probZ1) 119 | data.probZ0 = np.exp(data.probZ0) 120 | data.probZ1 = data.probZ1 / (data.probZ1 + data.probZ0) 121 | data.probZ0 = 1 - data.probZ1 122 | # TODO: nan -> abort 123 | 124 | return data 125 | 126 | def packX(data): 127 | return np.r_[data.alpha.copy(), data.beta.copy()] 128 | 129 | def unpackX(x, data): 130 | data.alpha = x[:data.numLabelers] 131 | data.beta = x[data.numLabelers:] 132 | 133 | def getBoundsX(data, alpha=(-100, 100), beta=(-100, 100)): 134 | alpha_bounds = np.array([[alpha[0], alpha[1]] for i in range(data.numLabelers)]) 135 | beta_bounds = np.array([[beta[0], beta[1]] for i in range(data.numLabelers)]) 136 | return np.r_[alpha_bounds, beta_bounds] 137 | 138 | def f(x, *args): 139 | u"""Return the value of the objective function 140 | """ 141 | data = args[0] 142 | d = Dataset(labels=data.labels, numLabels=data.numLabels, numLabelers=data.numLabelers, 143 | numTasks=data.numTasks, 144 | priorAlpha=data.priorAlpha, priorBeta=data.priorBeta, priorZ1=data.priorZ1, 145 | probZ1=data.probZ1, probZ0=data.probZ0) 146 | unpackX(x, d) 147 | return - computeQ(d) 148 | 149 | def df(x, *args): 150 | u"""Return gradient vector 151 | """ 152 | data = args[0] 153 | d = Dataset(labels=data.labels, numLabels=data.numLabels, numLabelers=data.numLabelers, 154 | numTasks=data.numTasks, 155 | priorAlpha=data.priorAlpha, priorBeta=data.priorBeta, priorZ1=data.priorZ1, 156 | probZ1=data.probZ1, probZ0=data.probZ0) 157 | unpackX(x, d) 158 | dQdAlpha, dQdBeta = gradientQ(d) 159 | # Flip the sign since we want to minimize 160 | return np.r_[-dQdAlpha, -dQdBeta] 161 | 162 | def MStep(data): 163 | if verbose: logger.info('MStep') 164 | initial_params = packX(data) 165 | params = sp.optimize.minimize(fun=f, x0=initial_params, args=(data,), method='CG', 166 | jac=df, tol=0.01, 167 | options={'maxiter': 25, 'disp': verbose}) 168 | if debug: logger.debug(params) 169 | unpackX(params.x, data) 170 | 171 | def computeQ(data): 172 | u"""Calculate the expectation of the joint likelihood 173 | """ 174 | Q = 0 175 | # Start with the expectation of the sum of priors over all tasks 176 | Q += (data.probZ1 * np.log(data.priorZ1)).sum() 177 | Q += (data.probZ0 * np.log(1 - data.priorZ1)).sum() 178 | 179 | # the expectation of the sum of posteriors over all tasks 180 | for item in data.labels: 181 | i = item.labelerId 182 | j = item.taskIdx 183 | alpha = data.alpha[i] 184 | beta = data.beta[j] 185 | lij = item.label 186 | try: 187 | logSigma = - np.log(1 + np.exp(- np.exp(beta) * alpha)) 188 | except Warning: 189 | # For large negative x, -log(1 + exp(-x)) = x 190 | logSigma = np.exp(beta) * alpha; 191 | try: 192 | logOneMinusSigma = - np.log(1 + np.exp(np.exp(beta) * alpha)) 193 | except Warning: 194 | # For large positive x, -log(1 + exp(x)) = x 195 | logOneMinusSigma = - np.exp(beta) * alpha; 196 | 197 | Q += data.probZ1[j] * (lij * logSigma + (1 - lij) * logOneMinusSigma) + data.probZ0[j] * ((1 - lij) * logSigma + lij * logOneMinusSigma) 198 | 199 | # Add Gaussian (standard normal) prior for alpha 200 | try: 201 | Q += np.log(sp.stats.norm.pdf(data.alpha - data.priorAlpha)).sum() 202 | except Warning: 203 | logger.warning('an invalid value was assigned to np.log [computeQ]') 204 | Q = np.nan 205 | 206 | # Add Gaussian (standard normal) prior for beta 207 | try: 208 | Q += np.log(sp.stats.norm.pdf(data.beta - data.priorBeta)).sum() 209 | except Warning: 210 | logger.warning('an invalid value was assigned to np.log [computeQ]') 211 | Q = np.nan 212 | 213 | if debug: 214 | logger.debug('a[0]={} a[1]={} a[2]={} b[0]={}'.format(data.alpha[0], data.alpha[1], 215 | data.alpha[2], data.beta[0])) 216 | logger.debug('Q={}'.format(Q)) 217 | return Q 218 | 219 | 220 | def gradientQ(data): 221 | dQdAlpha = - (data.alpha - data.priorAlpha) 222 | dQdBeta = - (data.beta - data.priorBeta) 223 | 224 | for item in data.labels: 225 | i = item.labelerId 226 | j = item.taskIdx 227 | alpha = data.alpha[i] 228 | beta = data.beta[j] 229 | lij = item.label 230 | try: 231 | sigma = sigmoid(np.exp(beta) * alpha) 232 | except Warning: 233 | if alpha < 0: 234 | sigma = 0 235 | else: 236 | raise 237 | 238 | dQdAlpha[i] += (data.probZ1[j] * (lij - sigma) + data.probZ0[j] * (1 - lij - sigma)) * np.exp(beta) 239 | dQdBeta[j] += (data.probZ1[j] * (lij - sigma) + data.probZ0[j] * (1 - lij - sigma)) * alpha * np.exp(beta) 240 | if debug: 241 | logger.debug('dQdAlpha[0]={} dQdAlpha[1]={} dQdAlpha[2]={} dQdBeta[0]={}'.format(dQdAlpha[0], dQdAlpha[1], 242 | dQdAlpha[2], dQdBeta[0])) 243 | return dQdAlpha, dQdBeta 244 | 245 | 246 | def logProbL(l, z, alphaI, betaJ): 247 | u"""Return log posterior probability of the label given true label, difficulity and ability 248 | """ 249 | if (z == l): 250 | p = - np.log(1 + np.exp(- np.exp(betaJ) * alphaI)) 251 | else: 252 | p = - np.log(1 + np.exp(np.exp(betaJ) * alphaI)) 253 | return p 254 | 255 | 256 | def output(data): 257 | alpha = np.c_[np.arange(1, data.numLabelers+1), data.alpha] 258 | np.savetxt('alpha.csv', alpha, fmt=['%d', '%.5f'], delimiter=',', header='id,alpha') 259 | beta = np.c_[np.arange(1, data.numTasks+1), data.beta] 260 | np.savetxt('beta.csv', beta, fmt=['%d', '%.5f'], delimiter=',', header='id,beta') 261 | label = np.c_[np.arange(1, data.numTasks+1), data.probZ0, data.probZ1] 262 | np.savetxt('label.csv', label, fmt=['%d', '%.5f', '%.5f'], delimiter=',', header='id,z0,z1') 263 | 264 | def outputResults(data): 265 | for i in range(data.numLabelers): 266 | print('Alpha[{idx}] = {val:.5f}'.format(idx=i, val=data.alpha[i])) 267 | 268 | for j in range(data.numTasks): 269 | print('Beta[{idx}] = {val:.5f}'.format(idx=j, val=np.exp(data.beta[j]))) 270 | 271 | for j in range(data.numTasks): 272 | print('P(Z({idx})=1) = {val:.5f}'.format(idx=j, val=data.probZ1[j])) 273 | 274 | 275 | def main(args=None): 276 | global debug, verbose 277 | debug = args.debug 278 | if debug == True: 279 | verbose = True 280 | else: 281 | verbose = args.verbose 282 | 283 | data = load_data(args.filename) 284 | EM(data) 285 | 286 | output(data) 287 | outputResults(data) 288 | return 0 289 | 290 | 291 | if __name__ == '__main__': 292 | init_logger() 293 | parser = argparse.ArgumentParser() 294 | parser.add_argument('filename') 295 | parser.add_argument('-v', '--verbose', action='store_true', default=False) 296 | parser.add_argument('-d', '--debug', action='store_true', default=False) 297 | args = parser.parse_args() 298 | 299 | code = main(args) 300 | exit(code) 301 | -------------------------------------------------------------------------------- /gladtest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import argparse 5 | import logging 6 | import numpy as np 7 | import unittest 8 | import glad 9 | 10 | verbose = False 11 | debug = True 12 | logger = None 13 | 14 | 15 | class TestGLAD(unittest.TestCase): 16 | 17 | def setUp(self): 18 | self.data = glad.load_data('./data/data.txt') 19 | 20 | def test_EM(self): 21 | self.data.alpha = self.data.priorAlpha.copy() 22 | self.data.beta = self.data.priorBeta.copy() 23 | glad.EStep(self.data) 24 | np.testing.assert_allclose(self.data.probZ[:5, 1], 25 | [1.000000,1.000000,0.000000,0.000000,0.000000], 26 | atol=1e-3) 27 | np.testing.assert_allclose(self.data.probZ[:5, 0], 28 | [0.000000,0.000000,1.000000,1.000000,1.000000], 29 | atol=1e-3) 30 | Q = glad.computeQ(self.data) 31 | np.testing.assert_allclose(Q, -15490.194235, atol=1e-3) 32 | dQdAlpha, dQdBeta = glad.gradientQ(self.data) 33 | np.testing.assert_allclose(dQdAlpha[:5], 34 | [-623.990996,-621.437809,-526.274895,-556.247219,-553.387000], 35 | atol=1e-3) 36 | np.testing.assert_allclose(dQdBeta[:5], 37 | [-4.789440,-4.789440,-10.226004,-4.789440,-2.071159], 38 | atol=1e-3) 39 | 40 | def init_logger(): 41 | global logger 42 | logger = logging.getLogger('GLADTest') 43 | logger.setLevel(logging.DEBUG) 44 | log_fmt = '%(asctime)s/%(name)s[%(levelname)s]: %(message)s' 45 | logging.basicConfig(format=log_fmt) 46 | 47 | glad.init_logger() 48 | 49 | 50 | if __name__ == '__main__': 51 | init_logger() 52 | parser = argparse.ArgumentParser() 53 | # parser.add_argument('filename') 54 | parser.add_argument('-v', '--verbose', action='store_true', default=False) 55 | parser.add_argument('-d', '--debug', action='store_true', default=False) 56 | args = parser.parse_args() 57 | 58 | logger.info('Start Unit Test') 59 | unittest.main() 60 | suite = unittest.TestLoader().loadTestsFromTestCase(TestGLAD) 61 | unittest.TextTestRunner(verbosity=2).run(suite) 62 | exit(0) 63 | --------------------------------------------------------------------------------