├── Embedding └── download_link.txt ├── README.md ├── Structured └── Amazon-Google │ ├── tableA.csv │ ├── tableB.csv │ ├── test.csv │ ├── train.csv │ └── valid.csv ├── data ├── Amazon-Google.info └── ind.Amazon-Google.adj ├── graph_att.py ├── highway.py ├── model.py ├── train.py └── utils.py /Embedding/download_link.txt: -------------------------------------------------------------------------------- 1 | down load glove.6B.200d.txt from https://nlp.stanford.edu/projects/glove/ -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GraphER 2 | The code of our AAAI'20 paper "GraphER: Token-Centric Entity Resolution with Graph Convolutional Neural Networks" 3 | 4 | # Usage: 5 | 6 | * Firstly, download the embedding file "glove.6B.200d.txt" from https://nlp.stanford.edu/projects/glove/, and put it into Embedding folder 7 | 8 | * Secondly, initialize the ER-graph, e.g., for Amazon-Google dataset 9 | python graph_att.py Amazon-Google 10 | 11 | * Finally, run the training: 12 | python train.py 13 | -------------------------------------------------------------------------------- /Structured/Amazon-Google/test.csv: -------------------------------------------------------------------------------- 1 | ltable_id,rtable_id,label 2 | 393,831,0 3 | 559,324,0 4 | 558,3023,0 5 | 762,1618,0 6 | 1262,2860,0 7 | 971,2804,0 8 | 654,1183,0 9 | 423,2847,0 10 | 858,1805,0 11 | 722,2331,0 12 | 1198,3033,0 13 | 522,1512,0 14 | 561,1136,0 15 | 606,2867,0 16 | 1170,861,0 17 | 614,945,0 18 | 1241,3145,0 19 | 1111,2755,0 20 | 270,2177,0 21 | 225,2837,0 22 | 597,242,0 23 | 661,946,0 24 | 705,2054,0 25 | 1191,567,1 26 | 606,1485,0 27 | 1030,2320,1 28 | 1105,783,0 29 | 1225,2383,0 30 | 536,750,1 31 | 375,532,0 32 | 1022,2865,0 33 | 781,3199,0 34 | 93,3160,0 35 | 578,2195,0 36 | 276,3022,1 37 | 646,836,0 38 | 511,2821,0 39 | 490,1514,0 40 | 587,1001,0 41 | 1109,2852,0 42 | 799,1592,0 43 | 1269,1433,1 44 | 1258,2490,0 45 | 553,897,0 46 | 539,1311,0 47 | 897,3017,0 48 | 22,1435,1 49 | 997,1599,0 50 | 348,239,0 51 | 112,3119,0 52 | 604,2850,0 53 | 1124,234,0 54 | 1148,1183,0 55 | 267,263,0 56 | 310,562,0 57 | 779,792,0 58 | 1186,1017,0 59 | 232,1437,0 60 | 1185,2510,0 61 | 632,3126,0 62 | 497,945,0 63 | 856,5,0 64 | 250,367,0 65 | 131,330,0 66 | 427,2326,0 67 | 546,750,0 68 | 1262,192,1 69 | 1308,713,1 70 | 270,1447,0 71 | 510,2056,1 72 | 251,36,0 73 | 26,897,0 74 | 508,3060,0 75 | 197,1892,0 76 | 615,369,0 77 | 105,2626,0 78 | 233,1917,0 79 | 571,1075,0 80 | 508,533,0 81 | 1078,2081,0 82 | 751,2189,0 83 | 515,2313,0 84 | 874,1319,0 85 | 648,1773,0 86 | 1029,2081,0 87 | 831,2970,0 88 | 606,2411,0 89 | 578,2332,0 90 | 546,986,0 91 | 736,3032,0 92 | 890,2143,0 93 | 776,907,0 94 | 1322,911,0 95 | 558,2143,0 96 | 620,2807,0 97 | 1322,1146,0 98 | 773,1014,0 99 | 930,254,0 100 | 604,3199,0 101 | 1362,744,0 102 | 157,25,0 103 | 845,180,1 104 | 333,2915,0 105 | 829,3151,0 106 | 641,1177,0 107 | 477,2466,1 108 | 517,1667,0 109 | 443,1643,0 110 | 146,2853,0 111 | 276,3048,0 112 | 808,2125,0 113 | 1119,1351,0 114 | 511,998,0 115 | 605,2837,0 116 | 896,159,1 117 | 591,515,0 118 | 559,2860,0 119 | 602,897,0 120 | 753,2847,0 121 | 517,361,0 122 | 753,915,0 123 | 1141,2811,1 124 | 404,2890,1 125 | 1018,3156,0 126 | 587,3190,0 127 | 231,1925,0 128 | 594,1037,0 129 | 586,1485,0 130 | 557,338,0 131 | 556,1475,0 132 | 1024,2468,0 133 | 25,2099,0 134 | 897,452,1 135 | 971,647,0 136 | 32,2008,1 137 | 139,426,0 138 | 1099,767,1 139 | 823,2474,0 140 | 1004,2232,0 141 | 209,1937,0 142 | 1038,2638,0 143 | 848,843,0 144 | 235,1970,0 145 | 547,2863,0 146 | 320,3087,0 147 | 369,3062,0 148 | 1246,857,0 149 | 971,196,0 150 | 265,939,0 151 | 271,2877,0 152 | 571,111,0 153 | 157,1735,0 154 | 523,3085,0 155 | 557,190,0 156 | 897,921,0 157 | 465,151,0 158 | 208,1781,0 159 | 510,3062,0 160 | 1001,2544,0 161 | 132,2272,0 162 | 233,1153,0 163 | 944,2561,0 164 | 167,2850,0 165 | 559,326,1 166 | 731,3021,0 167 | 318,3039,0 168 | 606,2847,0 169 | 591,2158,0 170 | 1090,973,1 171 | 599,2996,0 172 | 1174,1213,0 173 | 1013,1566,0 174 | 1078,2669,0 175 | 559,2839,0 176 | 615,977,0 177 | 1068,2544,0 178 | 431,1773,0 179 | 588,1842,0 180 | 520,3092,0 181 | 993,3063,0 182 | 501,808,1 183 | 810,709,0 184 | 270,3060,0 185 | 198,64,0 186 | 580,2530,0 187 | 145,1137,0 188 | 1002,2478,0 189 | 1287,792,0 190 | 892,2839,0 191 | 420,2845,0 192 | 625,294,1 193 | 1052,2946,0 194 | 554,2062,0 195 | 548,523,0 196 | 86,1584,0 197 | 1266,1060,1 198 | 547,2816,0 199 | 1078,2723,0 200 | 511,915,0 201 | 653,2030,0 202 | 799,1735,0 203 | 1284,3056,0 204 | 1036,1106,0 205 | 1148,1188,0 206 | 991,2900,0 207 | 287,1485,0 208 | 1340,648,1 209 | 608,939,0 210 | 512,111,0 211 | 588,151,0 212 | 1127,2238,0 213 | 146,2518,0 214 | 518,1805,0 215 | 362,2078,0 216 | 650,1776,0 217 | 1093,948,0 218 | 19,1237,1 219 | 518,3187,0 220 | 662,3109,0 221 | 554,3016,0 222 | 614,1170,0 223 | 484,750,0 224 | 618,1485,0 225 | 498,2153,0 226 | 1083,1949,0 227 | 490,2441,0 228 | 1078,2102,0 229 | 998,1449,0 230 | 538,2200,1 231 | 866,312,0 232 | 1275,2273,0 233 | 624,2224,0 234 | 802,2327,0 235 | 441,1086,0 236 | 1078,313,0 237 | 585,2517,1 238 | 1341,647,1 239 | 574,1132,0 240 | 360,3030,0 241 | 181,1602,0 242 | 517,986,1 243 | 517,355,0 244 | 139,939,0 245 | 736,902,0 246 | 839,3094,0 247 | 827,1060,0 248 | 553,242,0 249 | 109,2501,0 250 | 1176,2329,0 251 | 589,3055,0 252 | 1038,2102,0 253 | 477,195,0 254 | 453,2076,0 255 | 570,2266,0 256 | 753,1132,0 257 | 244,137,0 258 | 520,3134,0 259 | 851,1080,0 260 | 546,190,0 261 | 168,2164,1 262 | 1118,1678,0 263 | 1051,1442,0 264 | 431,111,0 265 | 830,237,0 266 | 653,741,0 267 | 1273,326,0 268 | 1166,2205,0 269 | 892,324,0 270 | 1206,537,1 271 | 52,2008,0 272 | 497,1095,0 273 | 616,239,0 274 | 413,2873,1 275 | 587,2848,0 276 | 1004,852,0 277 | 333,257,0 278 | 606,2165,1 279 | 44,966,0 280 | 40,456,0 281 | 750,330,0 282 | 1258,1032,0 283 | 338,3059,0 284 | 539,1124,0 285 | 223,1583,0 286 | 371,2847,0 287 | 511,1902,0 288 | 1290,3056,0 289 | 396,3133,0 290 | 616,2252,0 291 | 897,521,0 292 | 1020,3132,0 293 | 1089,1372,1 294 | 935,3186,0 295 | 428,2956,0 296 | 1275,2937,1 297 | 81,1592,1 298 | 527,190,0 299 | 333,2154,0 300 | 514,2600,0 301 | 1111,2621,0 302 | 583,1160,0 303 | 1176,2150,0 304 | 181,1584,0 305 | 294,1431,0 306 | 625,944,0 307 | 990,2390,0 308 | 333,448,0 309 | 696,3106,0 310 | 518,1207,0 311 | 15,291,0 312 | 1050,243,0 313 | 490,2853,0 314 | 535,2818,0 315 | 1246,1280,0 316 | 592,426,0 317 | 93,1805,0 318 | 105,2804,0 319 | 852,2383,0 320 | 1122,2860,0 321 | 883,1017,0 322 | 1268,1059,1 323 | 592,2377,0 324 | 195,405,0 325 | 784,251,0 326 | 1308,2146,0 327 | 763,412,0 328 | 541,2867,1 329 | 608,1146,0 330 | 888,375,0 331 | 645,3160,0 332 | 267,444,0 333 | 753,273,0 334 | 536,2839,0 335 | 346,2865,1 336 | 350,151,0 337 | 1001,2083,0 338 | 353,1166,0 339 | 1132,2993,0 340 | 556,2227,0 341 | 808,2100,0 342 | 572,3032,0 343 | 923,955,0 344 | 557,2671,0 345 | 1275,2399,0 346 | 1183,892,1 347 | 654,2154,0 348 | 143,1830,1 349 | 1050,2774,0 350 | 615,1213,0 351 | 532,1367,0 352 | 216,192,0 353 | 135,36,0 354 | 797,2597,1 355 | 823,1686,0 356 | 1265,1282,0 357 | 664,211,1 358 | 1317,491,0 359 | 528,1367,0 360 | 1322,99,0 361 | 1282,953,0 362 | 781,1687,0 363 | 1148,2363,0 364 | 1258,2273,0 365 | 1190,1026,1 366 | 951,330,0 367 | 145,2441,0 368 | 1004,2410,0 369 | 876,953,0 370 | 27,1701,0 371 | 287,3028,0 372 | 167,540,0 373 | 604,2821,0 374 | 599,2851,0 375 | 518,1075,0 376 | 1182,798,0 377 | 175,2998,0 378 | 78,878,0 379 | 890,2062,0 380 | 859,1440,0 381 | 839,145,0 382 | 497,1075,0 383 | 781,3029,0 384 | 88,1213,0 385 | 216,923,0 386 | 1190,1040,0 387 | 950,807,0 388 | 1299,3108,0 389 | 392,1878,0 390 | 734,598,0 391 | 796,2323,0 392 | 557,70,0 393 | 1004,2271,0 394 | 221,95,0 395 | 753,1643,0 396 | 1106,3106,0 397 | 274,2313,0 398 | 665,3154,0 399 | 761,1767,0 400 | 350,2393,0 401 | 854,191,1 402 | 715,321,0 403 | 897,2308,0 404 | 44,3016,0 405 | 496,375,0 406 | 910,70,0 407 | 346,1583,0 408 | 481,1153,0 409 | 589,3215,0 410 | 443,959,0 411 | 490,1767,0 412 | 608,911,0 413 | 270,3164,0 414 | 556,179,0 415 | 1268,2772,0 416 | 512,1169,0 417 | 622,288,1 418 | 518,1182,0 419 | 695,162,0 420 | 484,2395,0 421 | 62,2127,1 422 | 492,2172,0 423 | 537,895,1 424 | 604,2411,0 425 | 772,902,0 426 | 1024,2338,0 427 | 769,3011,0 428 | 748,965,0 429 | 516,485,0 430 | 991,423,0 431 | 1118,2393,0 432 | 565,2193,0 433 | 881,2039,0 434 | 1272,1028,0 435 | 1362,2840,0 436 | 1225,1447,0 437 | 374,3141,0 438 | 796,5,0 439 | 837,444,0 440 | 577,3055,0 441 | 632,36,0 442 | 1305,1007,0 443 | 565,893,0 444 | 547,1007,0 445 | 518,1007,0 446 | 781,2850,0 447 | 1287,1026,0 448 | 1334,3161,0 449 | 1262,531,0 450 | 248,125,0 451 | 604,1497,0 452 | 635,2019,0 453 | 743,3003,0 454 | 82,632,0 455 | 876,566,0 456 | 526,3148,0 457 | 1322,984,0 458 | 565,921,0 459 | 394,2757,0 460 | 589,3063,0 461 | 371,945,0 462 | 910,1213,0 463 | 38,2058,1 464 | 1190,107,0 465 | 316,2624,0 466 | 562,893,0 467 | 763,386,1 468 | 1019,3060,0 469 | 604,2877,0 470 | 1053,2280,0 471 | 890,423,0 472 | 251,73,0 473 | 510,2089,0 474 | 135,1497,0 475 | 503,1770,0 476 | 736,955,0 477 | 5,364,0 478 | 543,2860,0 479 | 1257,2920,0 480 | 1282,172,0 481 | 839,1135,0 482 | 602,426,0 483 | 696,3110,0 484 | 1064,185,0 485 | 897,190,0 486 | 717,500,0 487 | 504,371,0 488 | 935,541,0 489 | 1340,2333,0 490 | 834,2871,0 491 | 492,2441,0 492 | 606,3209,0 493 | 531,111,0 494 | 321,2759,0 495 | 593,151,1 496 | 452,2377,0 497 | 124,2089,0 498 | 511,137,0 499 | 1281,3157,0 500 | 462,937,0 501 | 407,953,0 502 | 574,2518,0 503 | 249,1798,0 504 | 967,989,0 505 | 1269,1062,0 506 | 851,2926,0 507 | 578,3175,0 508 | 1201,3106,0 509 | 1269,2765,0 510 | 584,1643,0 511 | 301,219,0 512 | 270,1148,0 513 | 1078,1033,0 514 | 632,1169,0 515 | 960,2122,0 516 | 504,2292,0 517 | 478,136,0 518 | 1287,1052,0 519 | 1259,3154,0 520 | 255,1161,0 521 | 736,1213,0 522 | 1360,610,1 523 | 3,348,0 524 | 272,2337,1 525 | 1225,3003,0 526 | 162,1493,1 527 | 1272,2399,0 528 | 722,3212,0 529 | 1115,1062,0 530 | 1331,638,1 531 | 522,1010,0 532 | 106,1698,0 533 | 586,3080,0 534 | 1115,1955,1 535 | 1185,562,1 536 | 897,911,0 537 | 577,3134,0 538 | 284,330,0 539 | 348,2206,0 540 | 798,3108,0 541 | 571,2821,0 542 | 82,2884,0 543 | 553,257,0 544 | 865,2251,0 545 | 594,2591,0 546 | 531,3109,0 547 | 992,533,0 548 | 654,2468,0 549 | 431,1163,0 550 | 1127,2156,0 551 | 4,1905,1 552 | 412,182,1 553 | 737,1180,0 554 | 736,2212,0 555 | 1076,456,0 556 | 558,3048,0 557 | 1242,788,0 558 | 267,145,0 559 | 592,580,0 560 | 753,2833,0 561 | 1246,1341,0 562 | 638,2621,0 563 | 1300,737,1 564 | 572,1564,1 565 | 781,3190,0 566 | 179,2334,0 567 | 1341,3021,0 568 | 267,1595,0 569 | 839,444,0 570 | 903,2527,1 571 | 67,1495,0 572 | 875,2612,0 573 | 431,77,0 574 | 512,2803,0 575 | 779,1427,0 576 | 738,2544,0 577 | 1284,2720,0 578 | 389,621,0 579 | 222,3111,0 580 | 837,897,0 581 | 1287,2510,0 582 | 1284,462,0 583 | 953,850,1 584 | 244,1163,0 585 | 606,2474,0 586 | 923,1180,0 587 | 820,703,0 588 | 1334,430,0 589 | 1053,163,0 590 | 601,2105,0 591 | 781,1816,0 592 | 1138,547,0 593 | 642,190,0 594 | 425,3036,0 595 | 606,2518,0 596 | 609,2452,0 597 | 27,3070,0 598 | 465,977,0 599 | 49,2072,1 600 | 1321,260,0 601 | 797,457,0 602 | 871,316,0 603 | 92,987,0 604 | 989,822,1 605 | 1213,2837,1 606 | 851,1040,0 607 | 883,190,0 608 | 346,1907,0 609 | 540,5,1 610 | 462,345,0 611 | 516,3017,0 612 | 465,368,0 613 | 361,1853,0 614 | 267,2807,0 615 | 350,1763,0 616 | 1046,2348,1 617 | 860,515,0 618 | 376,1115,0 619 | 722,2198,0 620 | 333,2860,0 621 | 452,309,0 622 | 882,2139,0 623 | 453,2045,0 624 | 544,1182,0 625 | 689,1127,0 626 | 1190,1485,0 627 | 858,1007,0 628 | 26,485,0 629 | 371,2818,0 630 | 556,3014,0 631 | 40,2093,0 632 | 312,2801,0 633 | 607,2544,0 634 | 592,485,0 635 | 1199,954,0 636 | 646,3022,0 637 | 1196,791,0 638 | 1196,3025,0 639 | 1148,312,0 640 | 496,1715,0 641 | 265,465,0 642 | 669,2984,0 643 | 914,2049,0 644 | 1055,758,1 645 | 541,1010,0 646 | 711,3163,0 647 | 803,1146,0 648 | 910,2444,0 649 | 357,2108,0 650 | 1145,3199,0 651 | 88,3161,0 652 | 554,832,0 653 | 490,945,0 654 | 1341,2062,0 655 | 853,371,0 656 | 251,2818,0 657 | 1330,2606,0 658 | 589,3200,0 659 | 1205,536,1 660 | 225,998,0 661 | 1038,2550,0 662 | 1295,702,1 663 | 653,2765,0 664 | 1251,314,0 665 | 808,2960,0 666 | 432,165,0 667 | 820,2032,0 668 | 544,2853,0 669 | 510,2064,0 670 | 539,1367,0 671 | 76,362,0 672 | 194,67,0 673 | 284,1447,0 674 | 1196,2133,0 675 | 1024,2273,0 676 | 587,1512,0 677 | 1016,548,0 678 | 853,986,0 679 | 798,671,1 680 | 727,3222,0 681 | 631,2318,1 682 | 1257,313,0 683 | 1186,324,0 684 | 781,2474,0 685 | 950,3032,0 686 | 823,1095,0 687 | 1334,1146,0 688 | 1105,462,0 689 | 859,2613,0 690 | 604,1120,0 691 | 1275,2400,0 692 | 267,3213,0 693 | 1181,2860,0 694 | 355,1484,0 695 | 1056,1157,0 696 | 605,2411,0 697 | 697,2780,1 698 | 1008,1907,0 699 | 753,1014,0 700 | 208,1770,0 701 | 87,1120,0 702 | 518,1120,0 703 | 914,2585,0 704 | 606,2137,0 705 | 550,3094,0 706 | 939,2132,0 707 | 350,946,0 708 | 879,1060,0 709 | 176,423,0 710 | 109,1655,0 711 | 497,1512,0 712 | 1349,946,0 713 | 599,1475,0 714 | 1186,1612,0 715 | 646,3023,0 716 | 557,452,0 717 | 1290,204,0 718 | 361,452,0 719 | 923,371,0 720 | 611,1036,0 721 | 901,150,1 722 | 593,367,0 723 | 602,430,0 724 | 784,1020,0 725 | 1278,447,0 726 | 817,2839,0 727 | 47,2862,0 728 | 583,2194,1 729 | 431,1828,0 730 | 1305,1485,0 731 | 512,16,0 732 | 866,192,0 733 | 506,1010,0 734 | 523,1497,0 735 | 587,2821,0 736 | 860,222,0 737 | 349,435,0 738 | 267,1180,0 739 | 654,3160,0 740 | 608,257,0 741 | 49,548,0 742 | 572,355,0 743 | 1092,978,1 744 | 1185,1062,0 745 | 594,100,0 746 | 975,461,0 747 | 539,922,0 748 | 1330,371,0 749 | 1050,772,0 750 | 995,2135,0 751 | 826,2175,0 752 | 593,1564,0 753 | 1231,3094,0 754 | 1159,651,0 755 | 29,251,0 756 | 883,4,0 757 | 394,3221,0 758 | 944,2565,0 759 | 592,1923,0 760 | 554,2996,0 761 | 910,946,0 762 | 1114,1362,0 763 | 1319,477,0 764 | 715,399,1 765 | 779,1039,0 766 | 821,1519,0 767 | 839,331,0 768 | 1259,1157,0 769 | 1176,2820,0 770 | 214,2357,0 771 | 1329,1666,0 772 | 306,2976,0 773 | 214,1533,0 774 | 859,2660,0 775 | 255,1772,0 776 | 578,1686,0 777 | 1190,2008,0 778 | 823,2850,0 779 | 15,2453,0 780 | 181,878,0 781 | 329,3041,0 782 | 432,2398,0 783 | 44,2092,0 784 | 176,1655,0 785 | 588,430,0 786 | 875,2438,0 787 | 979,1344,0 788 | 361,430,0 789 | 571,2600,0 790 | 325,2619,0 791 | 1190,2752,0 792 | 309,403,0 793 | 465,1612,0 794 | 1246,1199,0 795 | 15,791,0 796 | 615,1612,0 797 | 273,8,1 798 | 657,705,0 799 | 396,1367,0 800 | 1176,2686,0 801 | 363,2854,1 802 | 890,2996,0 803 | 1262,368,0 804 | 584,1828,0 805 | 265,371,0 806 | 950,363,0 807 | 220,15,0 808 | 781,3183,0 809 | 577,2146,1 810 | 839,986,0 811 | 228,1633,0 812 | 646,1666,0 813 | 322,190,0 814 | 925,572,0 815 | 294,3128,0 816 | 993,3033,0 817 | 355,3095,0 818 | 847,1573,0 819 | 751,2728,0 820 | 752,629,0 821 | 531,1633,0 822 | 944,2589,0 823 | 1081,905,0 824 | 705,1083,0 825 | 587,1095,0 826 | 111,473,0 827 | 532,1214,0 828 | 531,77,0 829 | 981,2062,0 830 | 1293,3060,0 831 | 1038,219,0 832 | 588,254,0 833 | 27,2199,0 834 | 736,257,0 835 | 139,960,0 836 | 636,2547,0 837 | 816,777,0 838 | 79,3095,0 839 | 984,2234,1 840 | 1255,1086,0 841 | 737,2649,0 842 | 1235,1900,0 843 | 517,2649,1 844 | 999,3207,0 845 | 157,1151,0 846 | 1002,1440,0 847 | 532,1350,0 848 | 1174,369,0 849 | 1262,448,0 850 | 541,2441,0 851 | 490,1007,0 852 | 606,945,0 853 | 590,457,0 854 | 883,448,0 855 | 288,3006,0 856 | 1275,2435,0 857 | 517,331,0 858 | 372,740,0 859 | 773,1512,0 860 | 1287,2744,0 861 | 1244,1879,0 862 | 620,448,0 863 | 571,2877,0 864 | 858,2816,0 865 | 547,1095,0 866 | 991,1655,0 867 | 443,73,0 868 | 618,1970,0 869 | 614,273,0 870 | 225,3175,0 871 | 1262,358,0 872 | 287,448,0 873 | 665,2917,0 874 | 231,1712,0 875 | 146,1162,0 876 | 273,1214,0 877 | 1114,2425,0 878 | 389,3023,0 879 | 512,2791,0 880 | 219,728,0 881 | 211,1636,0 882 | 270,1007,0 883 | 700,412,1 884 | 599,3009,0 885 | 584,1169,0 886 | 897,939,0 887 | 654,2998,0 888 | 851,2402,0 889 | 615,99,0 890 | 531,2513,1 891 | 654,2112,0 892 | 1322,324,0 893 | 1086,1109,0 894 | 1273,2471,1 895 | 753,2961,0 896 | 328,3043,0 897 | 297,2967,0 898 | 992,2247,1 899 | 823,904,0 900 | 1105,1433,0 901 | 837,893,0 902 | 346,1164,0 903 | 35,2544,0 904 | 443,2959,0 905 | 389,1666,0 906 | 516,1678,0 907 | 470,1980,0 908 | 866,314,0 909 | 381,2640,1 910 | 711,279,0 911 | 26,242,0 912 | 1289,3157,0 913 | 282,2986,0 914 | 616,1514,1 915 | 412,917,0 916 | 574,2816,0 917 | 547,915,0 918 | 606,1010,0 919 | 1185,3202,0 920 | 1011,1090,0 921 | 1019,1485,0 922 | 371,2837,0 923 | 287,330,0 924 | 1179,342,0 925 | 858,3187,0 926 | 565,372,0 927 | 368,2864,0 928 | 17,1984,1 929 | 574,3215,0 930 | 532,2302,0 931 | 1118,338,0 932 | 910,75,0 933 | 883,372,0 934 | 823,2206,0 935 | 1258,2402,1 936 | 580,922,0 937 | 514,2816,0 938 | 1272,783,0 939 | 773,2316,0 940 | 937,1551,0 941 | 1281,3162,0 942 | 999,1103,0 943 | 1225,1373,0 944 | 1293,2998,0 945 | 858,2853,0 946 | 606,998,0 947 | 527,216,0 948 | 606,1159,0 949 | 625,127,0 950 | 514,1497,0 951 | 965,2819,0 952 | 528,3087,0 953 | 772,923,0 954 | 1185,535,0 955 | 646,832,0 956 | 1221,2323,0 957 | 1170,1167,0 958 | 1308,2367,0 959 | 1105,2720,0 960 | 572,954,0 961 | 851,2273,0 962 | 67,1117,0 963 | 641,333,1 964 | 726,2788,1 965 | 157,37,0 966 | 799,2612,0 967 | 1186,946,0 968 | 254,1777,0 969 | 1322,893,0 970 | 382,216,0 971 | 46,2622,0 972 | 216,1564,0 973 | 1272,1039,0 974 | 431,1715,0 975 | 1096,1351,1 976 | 282,2987,0 977 | 211,3091,0 978 | 1179,2850,0 979 | 948,2077,0 980 | 1279,523,0 981 | 753,3199,0 982 | 287,2141,0 983 | 1177,1585,0 984 | 474,998,0 985 | 858,1207,0 986 | 444,2048,0 987 | 943,880,1 988 | 823,1162,0 989 | 1330,508,0 990 | 1326,678,1 991 | 551,2216,0 992 | 1074,2095,1 993 | 454,2945,0 994 | 517,566,0 995 | 504,531,0 996 | 1078,2402,0 997 | 670,558,0 998 | 547,2877,1 999 | 279,1437,0 1000 | 323,3119,0 1001 | 1179,1816,0 1002 | 24,1583,0 1003 | 371,2206,0 1004 | 132,582,0 1005 | 823,1514,0 1006 | 93,1643,0 1007 | 559,176,0 1008 | 515,2227,0 1009 | 274,647,0 1010 | 736,2193,0 1011 | 1272,1048,1 1012 | 604,2848,0 1013 | 1310,2771,0 1014 | 617,2999,0 1015 | 1127,2392,0 1016 | 41,851,0 1017 | 1287,2065,0 1018 | 248,36,0 1019 | 1329,2143,1 1020 | 1099,2922,0 1021 | 1038,2310,0 1022 | 514,3027,0 1023 | 402,2945,0 1024 | 369,3007,0 1025 | 157,49,0 1026 | 587,2833,0 1027 | 602,71,0 1028 | 1257,2030,0 1029 | 1287,1039,0 1030 | 1001,2550,0 1031 | 1287,1733,0 1032 | 622,878,0 1033 | 516,960,0 1034 | 271,945,0 1035 | 6,327,0 1036 | 371,2848,0 1037 | 101,1670,1 1038 | 135,1922,0 1039 | 1326,2105,0 1040 | 1075,1384,0 1041 | 316,305,0 1042 | 35,2050,0 1043 | 541,1007,0 1044 | 517,372,0 1045 | 859,802,0 1046 | 441,1059,0 1047 | 317,1928,0 1048 | 285,2946,0 1049 | 751,3167,0 1050 | 515,307,0 1051 | 561,3034,0 1052 | 350,430,0 1053 | 574,1636,0 1054 | 7,1617,0 1055 | 1139,2502,0 1056 | 1177,2833,0 1057 | 522,1120,0 1058 | 146,2316,0 1059 | 574,1010,0 1060 | 856,3111,0 1061 | 883,3188,0 1062 | 766,389,1 1063 | 146,998,0 1064 | 1246,1321,0 1065 | 26,921,0 1066 | 543,1151,0 1067 | 364,2503,0 1068 | 5,202,0 1069 | 361,558,0 1070 | 804,1355,0 1071 | 1229,902,0 1072 | 1148,2839,0 1073 | 512,286,0 1074 | 1324,1115,0 1075 | 124,1442,0 1076 | 1004,2270,0 1077 | 1341,3016,0 1078 | 1258,1083,0 1079 | 789,3108,0 1080 | 518,945,0 1081 | 1258,1040,0 1082 | 26,4,0 1083 | 348,332,0 1084 | 734,2060,0 1085 | 455,769,0 1086 | 355,2004,0 1087 | 1014,2987,0 1088 | 1321,1475,0 1089 | 620,190,1 1090 | 565,1017,0 1091 | 737,639,0 1092 | 827,2756,0 1093 | 1204,3127,0 1094 | 527,182,0 1095 | 639,322,1 1096 | 774,2393,0 1097 | 1176,2395,0 1098 | 883,531,0 1099 | 599,3016,0 1100 | 606,1095,0 1101 | 2,1881,1 1102 | 858,3027,0 1103 | 1252,145,0 1104 | 803,354,0 1105 | 109,3048,0 1106 | 1293,1485,0 1107 | 662,3160,0 1108 | 781,1686,0 1109 | 804,2207,0 1110 | 455,3036,0 1111 | 271,1007,0 1112 | 321,2174,0 1113 | 46,2078,1 1114 | 903,2027,0 1115 | 484,3224,0 1116 | 1190,2399,0 1117 | 1055,772,0 1118 | 562,430,0 1119 | 820,691,1 1120 | 653,2402,0 1121 | 950,294,0 1122 | 44,2996,0 1123 | 1048,1140,0 1124 | 581,1690,0 1125 | 946,1157,0 1126 | 763,1647,0 1127 | 753,3126,0 1128 | 1115,2273,0 1129 | 678,260,1 1130 | 1177,3199,0 1131 | 556,2501,0 1132 | 453,2987,0 1133 | 348,1512,0 1134 | 1177,1182,0 1135 | 858,2833,0 1136 | 736,324,0 1137 | 594,3070,0 1138 | 641,147,0 1139 | 1278,653,0 1140 | 128,2253,0 1141 | 632,1026,0 1142 | 171,1182,0 1143 | 518,1105,0 1144 | 579,2318,0 1145 | 516,558,0 1146 | 570,784,0 1147 | 892,960,0 1148 | 1174,923,1 1149 | 1044,2566,0 1150 | 823,1170,0 1151 | 556,3009,0 1152 | 604,1163,0 1153 | 578,3091,0 1154 | 284,2363,0 1155 | 1257,1039,0 1156 | 23,2039,0 1157 | 109,2227,1 1158 | 1290,2490,0 1159 | 312,663,0 1160 | 604,808,0 1161 | 382,2292,0 1162 | 605,2853,0 1163 | 502,1651,0 1164 | 502,1785,0 1165 | 974,2307,0 1166 | 1180,2132,1 1167 | 965,2314,1 1168 | 837,1180,0 1169 | 876,151,0 1170 | 784,2577,1 1171 | 1269,2085,0 1172 | 828,2819,0 1173 | 632,2792,0 1174 | 549,1738,0 1175 | 269,1973,0 1176 | 1009,817,1 1177 | 1080,2026,0 1178 | 45,163,0 1179 | 944,2550,0 1180 | 789,2562,1 1181 | 602,1150,0 1182 | 554,647,0 1183 | 1053,759,1 1184 | 1124,878,0 1185 | 1105,2926,0 1186 | 1150,925,1 1187 | 258,2669,0 1188 | 858,3190,0 1189 | 565,531,0 1190 | 565,369,0 1191 | 823,998,0 1192 | 574,3199,0 1193 | 1068,1157,0 1194 | 310,2161,0 1195 | 632,2688,0 1196 | 875,1031,0 1197 | 1242,1139,0 1198 | 133,1875,0 1199 | 1116,3067,0 1200 | 616,286,0 1201 | 731,196,0 1202 | 1264,1433,0 1203 | 1243,517,1 1204 | 1341,307,0 1205 | 652,3216,0 1206 | 337,3061,0 1207 | 1177,1497,0 1208 | 207,1525,1 1209 | 1348,600,1 1210 | 418,2884,0 1211 | 1071,1197,0 1212 | 645,286,0 1213 | 760,577,0 1214 | 264,1180,0 1215 | 662,1720,0 1216 | 1264,792,0 1217 | 1321,307,0 1218 | 606,2998,0 1219 | 225,1518,0 1220 | 619,290,1 1221 | 881,2283,0 1222 | 512,915,0 1223 | 368,603,0 1224 | 389,423,0 1225 | 615,954,0 1226 | 859,2398,0 1227 | 930,2860,0 1228 | 523,1137,0 1229 | 157,3164,0 1230 | 541,2877,0 1231 | 79,2489,0 1232 | 710,2565,0 1233 | 837,3166,0 1234 | 78,1584,0 1235 | 520,1402,0 1236 | 891,2860,0 1237 | 518,2195,0 1238 | 176,1475,1 1239 | 222,2685,0 1240 | 433,2242,1 1241 | 539,2843,0 1242 | 1353,3021,0 1243 | 609,3134,0 1244 | 522,2867,0 1245 | 272,1186,0 1246 | 230,2334,0 1247 | 608,521,0 1248 | 944,1167,0 1249 | 597,521,1 1250 | 1270,466,1 1251 | 654,533,0 1252 | 875,178,0 1253 | 876,324,0 1254 | 1322,921,0 1255 | 511,2863,0 1256 | 731,647,0 1257 | 779,535,0 1258 | 604,2137,0 1259 | 1182,2608,0 1260 | 264,331,0 1261 | 250,1923,0 1262 | 562,954,1 1263 | 1162,2238,0 1264 | 465,99,0 1265 | 431,3109,1 1266 | 1229,1082,1 1267 | 348,375,0 1268 | 941,2264,0 1269 | 632,1497,0 1270 | 532,1311,0 1271 | 1053,2045,0 1272 | 645,2853,0 1273 | 1170,2331,0 1274 | 900,333,0 1275 | 1008,1190,0 1276 | 605,2818,0 1277 | 26,2391,0 1278 | 145,375,0 1279 | 1259,2544,0 1280 | 1174,176,0 1281 | 734,2045,0 1282 | 198,125,0 1283 | 821,185,1 1284 | 851,1955,0 1285 | 1036,3036,0 1286 | 500,1157,0 1287 | 934,663,0 1288 | 566,2799,0 1289 | 892,426,0 1290 | 999,1870,0 1291 | 592,2154,0 1292 | 181,534,0 1293 | 114,3080,0 1294 | 1198,1108,0 1295 | 858,2137,0 1296 | 592,372,0 1297 | 1175,265,0 1298 | 1353,260,0 1299 | 1072,561,0 1300 | 578,2837,0 1301 | 6,192,0 1302 | 910,1865,0 1303 | 654,2766,0 1304 | 1174,521,0 1305 | 517,465,0 1306 | 1038,2046,0 1307 | 837,368,0 1308 | 602,254,0 1309 | 1300,1140,0 1310 | 222,1006,0 1311 | 1292,36,0 1312 | 799,1961,0 1313 | 516,70,0 1314 | 100,1542,0 1315 | 548,3034,0 1316 | 176,2062,0 1317 | 1300,840,0 1318 | 648,1534,0 1319 | 303,3,0 1320 | 1078,2432,0 1321 | 271,2837,0 1322 | 650,2450,0 1323 | 401,2948,0 1324 | 1138,934,1 1325 | 554,2143,0 1326 | 93,375,0 1327 | 350,3161,0 1328 | 1257,1083,0 1329 | 516,75,0 1330 | 981,621,0 1331 | 760,1464,0 1332 | 1105,3056,0 1333 | 88,521,0 1334 | 584,1497,0 1335 | 999,2002,0 1336 | 961,1140,0 1337 | 797,2799,0 1338 | 228,1011,0 1339 | 1272,1083,0 1340 | 511,1159,0 1341 | 251,1893,0 1342 | 1262,352,0 1343 | 55,2105,1 1344 | 580,3133,0 1345 | 628,625,0 1346 | 515,196,0 1347 | 389,836,0 1348 | 516,984,0 1349 | 605,2867,0 1350 | 736,4,1 1351 | 522,945,0 1352 | 787,2567,1 1353 | 656,229,1 1354 | 1322,190,0 1355 | 281,2990,0 1356 | 515,260,0 1357 | 1145,2863,0 1358 | 359,1561,0 1359 | 875,165,0 1360 | 1026,2327,1 1361 | 930,521,0 1362 | 1259,456,1 1363 | 452,324,0 1364 | 851,2400,0 1365 | 930,531,0 1366 | 465,917,0 1367 | 858,3171,0 1368 | 1303,3110,0 1369 | 349,2076,0 1370 | 657,288,0 1371 | 856,2323,0 1372 | 132,344,0 1373 | 559,331,0 1374 | 866,700,0 1375 | 71,1559,0 1376 | 444,2757,0 1377 | 1105,1427,0 1378 | 993,3080,0 1379 | 267,750,0 1380 | 371,2863,0 1381 | 325,3108,0 1382 | 736,893,0 1383 | 661,521,0 1384 | 465,1146,0 1385 | 1357,413,0 1386 | 799,804,1 1387 | 1116,966,0 1388 | 1272,1062,0 1389 | 1257,792,0 1390 | 216,430,0 1391 | 556,3022,0 1392 | 558,2851,1 1393 | 584,1137,0 1394 | 510,1837,0 1395 | 779,2587,1 1396 | 1005,891,0 1397 | 724,2788,0 1398 | 137,1717,0 1399 | 484,190,0 1400 | 999,2263,1 1401 | 316,1762,0 1402 | 326,3113,0 1403 | 1145,3091,0 1404 | 26,954,0 1405 | 615,331,0 1406 | 523,1514,0 1407 | 121,1768,1 1408 | 188,1513,1 1409 | 302,2983,0 1410 | 1145,1686,0 1411 | 357,1437,0 1412 | 263,52,0 1413 | 837,145,0 1414 | 518,2818,0 1415 | 599,2313,0 1416 | 1303,3106,0 1417 | 1284,1080,1 1418 | 546,145,0 1419 | 520,1099,0 1420 | 142,92,0 1421 | 1349,354,0 1422 | 722,1117,0 1423 | 319,3110,0 1424 | 512,3027,0 1425 | 1119,2135,0 1426 | 930,2839,0 1427 | 253,469,0 1428 | 615,372,0 1429 | 264,371,0 1430 | 176,1666,0 1431 | 348,1497,0 1432 | 345,1520,1 1433 | 126,2914,0 1434 | 172,1962,0 1435 | 859,2478,0 1436 | 910,955,0 1437 | 638,327,1 1438 | 97,1637,1 1439 | 322,939,0 1440 | 3,1385,0 1441 | 276,2161,0 1442 | 467,2084,0 1443 | 1334,566,0 1444 | 825,2620,1 1445 | 553,371,0 1446 | 1290,2765,0 1447 | 1011,948,0 1448 | 541,1686,0 1449 | 829,1072,0 1450 | 211,726,0 1451 | 837,426,0 1452 | 608,1017,0 1453 | 874,788,0 1454 | 1146,2452,0 1455 | 705,2920,0 1456 | 951,2915,0 1457 | 672,2846,1 1458 | 1337,2117,0 1459 | 1269,2065,0 1460 | 44,845,0 1461 | 793,1690,0 1462 | 753,2112,0 1463 | 325,3106,0 1464 | 620,521,0 1465 | 543,331,0 1466 | 371,915,0 1467 | 145,2600,0 1468 | 547,1105,0 1469 | 747,248,0 1470 | 258,1767,0 1471 | 448,2147,1 1472 | 1066,2023,0 1473 | 582,2799,0 1474 | 265,984,0 1475 | 991,832,1 1476 | 608,192,0 1477 | 852,1446,0 1478 | 168,2820,0 1479 | 1018,398,0 1480 | 605,1011,0 1481 | 409,2902,0 1482 | 169,1501,1 1483 | 773,2137,0 1484 | 484,3019,0 1485 | 88,893,0 1486 | 287,2591,0 1487 | 858,2497,0 1488 | 578,2165,0 1489 | 1145,2847,0 1490 | 134,1882,0 1491 | 971,260,0 1492 | 709,2768,0 1493 | 837,954,0 1494 | 1315,671,0 1495 | 322,893,0 1496 | 1275,783,0 1497 | 1080,2057,0 1498 | 539,3062,0 1499 | 60,2106,1 1500 | 202,2219,0 1501 | 571,915,0 1502 | 517,750,0 1503 | 285,3030,1 1504 | 614,998,0 1505 | 515,621,0 1506 | 154,1896,0 1507 | 578,2867,0 1508 | 93,2850,0 1509 | 588,3161,0 1510 | 550,2419,0 1511 | 157,1865,0 1512 | 546,1900,0 1513 | 50,2962,0 1514 | 1130,521,0 1515 | 960,2325,0 1516 | 432,208,0 1517 | 331,3106,0 1518 | 774,1675,0 1519 | 1349,977,0 1520 | 1050,2986,0 1521 | 412,2193,0 1522 | 779,2139,0 1523 | 847,1818,0 1524 | 1174,984,0 1525 | 620,2791,0 1526 | 591,2176,1 1527 | 1254,527,1 1528 | 1048,840,0 1529 | 208,1870,0 1530 | 548,3080,0 1531 | 1115,1039,0 1532 | 614,1636,0 1533 | 389,3021,0 1534 | 225,2137,0 1535 | 527,3019,0 1536 | 597,151,0 1537 | 43,3110,0 1538 | 759,983,0 1539 | 736,338,0 1540 | 644,2863,0 1541 | 1154,2867,0 1542 | 1316,936,0 1543 | 1170,2250,0 1544 | 493,45,0 1545 | 939,2921,0 1546 | 446,1216,0 1547 | 1008,2824,0 1548 | 1050,759,0 1549 | 1354,593,1 1550 | 360,2946,0 1551 | 1296,692,1 1552 | 1173,412,0 1553 | 1215,1170,0 1554 | 139,923,0 1555 | 946,2584,0 1556 | 753,2441,0 1557 | 1320,685,1 1558 | 588,939,0 1559 | 267,639,0 1560 | 511,955,0 1561 | 1050,3132,0 1562 | 425,1593,0 1563 | 536,3166,0 1564 | 465,1213,0 1565 | 610,277,0 1566 | 827,2321,0 1567 | 830,2494,0 1568 | 348,3027,0 1569 | 355,1171,0 1570 | 879,2812,1 1571 | 764,387,1 1572 | 819,2055,0 1573 | 615,151,0 1574 | 198,1836,0 1575 | 157,71,0 1576 | 485,2434,1 1577 | 737,371,0 1578 | 880,297,0 1579 | 1075,2188,0 1580 | 865,2496,1 1581 | 654,3003,0 1582 | 873,1070,0 1583 | 577,1136,0 1584 | 1108,1450,0 1585 | 581,3039,0 1586 | 348,2137,0 1587 | 736,954,0 1588 | 1175,1599,0 1589 | 518,1585,0 1590 | 819,2153,0 1591 | 8,1936,1 1592 | 653,2573,0 1593 | 357,2041,0 1594 | 553,369,0 1595 | 897,950,0 1596 | 631,2529,0 1597 | 465,954,0 1598 | 359,1240,0 1599 | 1190,2009,0 1600 | 310,2008,0 1601 | 496,924,0 1602 | 523,273,0 1603 | 120,1754,1 1604 | 225,2411,0 1605 | 233,608,0 1606 | 1106,2452,0 1607 | 109,2062,0 1608 | 1292,1828,0 1609 | 1055,2045,0 1610 | 620,182,0 1611 | 443,3126,0 1612 | 741,485,0 1613 | 1195,1342,0 1614 | 1251,558,0 1615 | 1306,1584,0 1616 | 1114,3035,0 1617 | 1019,312,0 1618 | 578,2257,0 1619 | 77,1606,1 1620 | 594,2853,0 1621 | 1133,2171,1 1622 | 316,1171,0 1623 | 63,2129,1 1624 | 603,2020,0 1625 | 47,2133,0 1626 | 930,558,0 1627 | 823,2364,0 1628 | 808,462,0 1629 | 1264,204,0 1630 | 512,1026,0 1631 | 910,3017,0 1632 | 799,732,0 1633 | 888,273,0 1634 | 1269,1083,0 1635 | 389,2143,0 1636 | 624,2269,0 1637 | 225,1170,0 1638 | 92,1517,0 1639 | 1324,674,1 1640 | 525,2494,0 1641 | 87,2821,0 1642 | 1176,2687,0 1643 | 892,355,0 1644 | 950,994,0 1645 | 547,2464,0 1646 | 823,2877,0 1647 | 233,744,0 1648 | 361,354,0 1649 | 176,3021,0 1650 | 321,3120,1 1651 | 1201,3119,0 1652 | 866,2177,0 1653 | 431,1137,0 1654 | 382,2251,0 1655 | 1330,3213,0 1656 | 732,1293,0 1657 | 547,1010,0 1658 | 573,1646,1 1659 | 1215,1973,0 1660 | 389,307,0 1661 | 1262,984,0 1662 | 157,3077,0 1663 | 15,1183,0 1664 | 263,2162,0 1665 | 768,382,1 1666 | 396,3034,0 1667 | 1014,3132,0 1668 | 946,2039,0 1669 | 511,3204,0 1670 | 1176,1364,0 1671 | 139,368,0 1672 | 594,606,1 1673 | 1110,2018,1 1674 | 531,1720,0 1675 | 706,198,0 1676 | 1330,1696,0 1677 | 496,2441,0 1678 | 1264,1427,0 1679 | 804,2132,0 1680 | 981,3021,0 1681 | 452,2524,1 1682 | 648,1190,0 1683 | 867,782,0 1684 | 1170,1117,0 1685 | 786,370,0 1686 | 324,2064,0 1687 | 844,969,1 1688 | 225,2165,0 1689 | 1083,983,1 1690 | 274,1666,0 1691 | 897,2393,0 1692 | 1002,802,1 1693 | 270,2141,0 1694 | 618,1183,0 1695 | 13,2425,0 1696 | 778,3039,0 1697 | 1294,2159,0 1698 | 29,3207,0 1699 | 593,946,0 1700 | 646,3021,0 1701 | 1037,768,1 1702 | 1212,2060,0 1703 | 1054,762,0 1704 | 574,3190,0 1705 | 803,3188,0 1706 | 852,2998,0 1707 | 282,2835,0 1708 | 540,2521,0 1709 | 1332,288,0 1710 | 1118,426,0 1711 | 819,558,0 1712 | 597,1678,0 1713 | 1241,3055,0 1714 | 284,312,0 1715 | 606,1886,0 1716 | 642,428,0 1717 | 480,1519,0 1718 | 646,2143,0 1719 | 1102,967,1 1720 | 572,372,0 1721 | 541,3190,0 1722 | 204,53,0 1723 | 625,2755,0 1724 | 498,2027,0 1725 | 541,3187,0 1726 | 578,1687,0 1727 | 133,46,0 1728 | 892,2807,0 1729 | 490,327,0 1730 | 933,3119,0 1731 | 543,1667,0 1732 | 186,1518,1 1733 | 599,3021,0 1734 | 601,2048,1 1735 | 592,176,0 1736 | 671,2993,0 1737 | 557,448,0 1738 | 624,195,0 1739 | 556,3048,0 1740 | 1196,2508,0 1741 | 258,2803,0 1742 | 1212,2133,0 1743 | 883,216,0 1744 | 668,220,1 1745 | 642,911,0 1746 | 910,1017,0 1747 | 578,3027,0 1748 | 571,2474,0 1749 | 1258,1044,1 1750 | 339,3066,0 1751 | 1061,1559,0 1752 | 592,955,0 1753 | 517,2839,0 1754 | 1158,985,0 1755 | 1064,691,0 1756 | 138,1991,0 1757 | 484,921,0 1758 | 285,450,0 1759 | 597,426,0 1760 | 599,423,0 1761 | 110,2592,0 1762 | 662,77,0 1763 | 907,1118,0 1764 | 425,2560,0 1765 | 553,999,0 1766 | 578,3187,0 1767 | 146,3183,0 1768 | 522,3199,0 1769 | 1130,2393,0 1770 | 135,100,0 1771 | 522,2837,0 1772 | 1195,1033,0 1773 | 699,2054,0 1774 | 565,452,0 1775 | 1099,2425,1 1776 | 350,99,0 1777 | 423,2816,0 1778 | 970,2714,0 1779 | 518,2600,0 1780 | 271,2497,0 1781 | 804,2274,0 1782 | 511,3220,0 1783 | 916,3106,0 1784 | 640,323,1 1785 | 584,3160,0 1786 | 1262,3161,0 1787 | 493,2391,0 1788 | 12,1159,0 1789 | 895,2560,0 1790 | 129,3159,0 1791 | 333,168,0 1792 | 26,367,0 1793 | 396,1311,0 1794 | 548,3063,0 1795 | 26,3017,0 1796 | 225,1001,0 1797 | 1284,2960,0 1798 | 1072,2046,1 1799 | 564,1097,0 1800 | 1202,2892,0 1801 | 606,2257,0 1802 | 0,1878,1 1803 | 662,2959,0 1804 | 1022,306,0 1805 | 275,3108,0 1806 | 1304,1116,0 1807 | 70,1485,0 1808 | 873,3063,0 1809 | 398,2938,0 1810 | 44,832,0 1811 | 45,2849,0 1812 | 1227,1088,1 1813 | 275,2995,0 1814 | 329,2894,0 1815 | 877,195,1 1816 | 821,2096,0 1817 | 165,1728,0 1818 | 109,3016,0 1819 | 541,3194,0 1820 | 27,3160,0 1821 | 1177,1014,0 1822 | 135,1129,0 1823 | 1262,324,0 1824 | 1111,2680,0 1825 | 274,832,0 1826 | 840,2292,0 1827 | 654,2961,0 1828 | 687,97,0 1829 | 602,1146,0 1830 | 642,216,0 1831 | 1190,2447,0 1832 | 1196,2076,0 1833 | 547,3171,0 1834 | 252,1786,0 1835 | 1322,368,0 1836 | 515,423,0 1837 | 492,2853,0 1838 | 389,3022,0 1839 | 539,1214,0 1840 | 1264,3089,0 1841 | 917,3132,0 1842 | 1223,2852,0 1843 | 395,2906,1 1844 | 586,3134,0 1845 | 334,3031,1 1846 | 114,3033,0 1847 | 157,1958,0 1848 | 989,2097,0 1849 | 271,1207,0 1850 | 184,1771,0 1851 | 946,3140,0 1852 | 852,3003,0 1853 | 279,2108,1 1854 | 784,412,0 1855 | 578,1636,0 1856 | 606,2853,0 1857 | 1005,292,0 1858 | 907,3110,0 1859 | 680,266,1 1860 | 1037,3036,0 1861 | 852,2591,0 1862 | 973,461,0 1863 | 736,372,0 1864 | 350,923,0 1865 | 443,3117,0 1866 | 548,2516,0 1867 | 571,430,0 1868 | 271,1497,0 1869 | 662,111,0 1870 | 1246,1268,0 1871 | 514,2818,0 1872 | 187,1516,1 1873 | 1056,2584,0 1874 | 820,185,0 1875 | 1198,1367,0 1876 | 1127,2109,0 1877 | 1053,2835,0 1878 | 271,831,0 1879 | 751,2707,0 1880 | 625,1409,0 1881 | 176,2996,0 1882 | 1176,540,0 1883 | 229,2141,0 1884 | 557,939,0 1885 | 1284,2009,0 1886 | 1229,145,0 1887 | 146,915,0 1888 | 511,2877,0 1889 | 857,2167,0 1890 | 1337,1844,0 1891 | 546,2649,0 1892 | 528,1265,0 1893 | 820,2011,0 1894 | 875,1592,0 1895 | 834,3012,0 1896 | 1105,107,0 1897 | 1072,402,0 1898 | 229,312,0 1899 | 866,203,1 1900 | 176,845,0 1901 | 1251,1738,0 1902 | 1272,1342,1 1903 | 681,1600,0 1904 | 840,2175,0 1905 | 1177,2411,0 1906 | 917,772,0 1907 | 208,1940,0 1908 | 912,763,0 1909 | 145,3126,0 1910 | 543,145,0 1911 | 105,3022,0 1912 | 74,1599,1 1913 | 812,2797,1 1914 | 490,100,0 1915 | 114,3034,0 1916 | 129,2958,0 1917 | 465,1763,0 1918 | 1179,891,0 1919 | 1214,549,1 1920 | 827,2335,0 1921 | 594,1447,0 1922 | 514,3085,0 1923 | 997,2255,1 1924 | 1022,1484,0 1925 | 606,2818,0 1926 | 544,1585,0 1927 | 632,332,0 1928 | 202,59,0 1929 | 1300,9,0 1930 | 1014,773,1 1931 | 486,2446,1 1932 | 536,3094,0 1933 | 971,845,0 1934 | 1110,2023,0 1935 | 1305,1852,0 1936 | 341,556,0 1937 | 570,2853,0 1938 | 1039,3069,0 1939 | 548,1068,0 1940 | 1264,2510,0 1941 | 971,2313,0 1942 | 216,372,0 1943 | 1185,2926,0 1944 | 1145,2137,0 1945 | 824,1322,0 1946 | 496,1828,0 1947 | 572,368,0 1948 | 382,1089,0 1949 | 112,3106,0 1950 | 876,951,0 1951 | 1127,2331,0 1952 | 753,125,0 1953 | 443,1715,0 1954 | 541,1120,0 1955 | 131,3107,1 1956 | 577,2837,0 1957 | 1057,919,0 1958 | 1078,2936,0 1959 | 146,3029,0 1960 | 27,1188,0 1961 | 425,1118,0 1962 | 858,3204,0 1963 | 1017,2062,0 1964 | 532,0,0 1965 | 567,3143,0 1966 | 176,621,0 1967 | 1069,1787,0 1968 | 620,352,0 1969 | 805,1637,0 1970 | 605,3175,0 1971 | 516,1146,0 1972 | 523,111,0 1973 | 520,2301,0 1974 | 408,2813,0 1975 | 23,2108,0 1976 | 364,1900,0 1977 | 606,2848,0 1978 | 1303,743,1 1979 | 514,3199,0 1980 | 1305,1105,0 1981 | 863,891,0 1982 | 1176,2595,0 1983 | 645,1514,0 1984 | 90,344,0 1985 | 599,832,0 1986 | 589,1138,0 1987 | 492,1961,0 1988 | 874,787,0 1989 | 184,1553,0 1990 | 863,2392,0 1991 | 588,521,0 1992 | 1255,1078,1 1993 | 821,1561,1 1994 | 1308,2773,0 1995 | 993,1214,0 1996 | 702,410,1 1997 | 608,216,0 1998 | 593,893,0 1999 | 944,2139,0 2000 | 520,922,0 2001 | 465,485,0 2002 | 1290,1057,0 2003 | 803,2393,0 2004 | 1353,1475,0 2005 | 82,878,0 2006 | 578,1652,0 2007 | 1111,2811,0 2008 | 947,2070,0 2009 | 12,2847,0 2010 | 858,998,0 2011 | 975,1517,0 2012 | 645,924,0 2013 | 208,1865,0 2014 | 892,428,0 2015 | 374,2815,1 2016 | 511,1973,0 2017 | 3,1879,1 2018 | 1021,2839,0 2019 | 522,2821,0 2020 | 570,795,1 2021 | 1105,3026,0 2022 | 632,1011,0 2023 | 515,2804,1 2024 | 1004,1424,0 2025 | 586,1823,0 2026 | 225,2847,0 2027 | 844,2463,1 2028 | 951,2998,0 2029 | 250,2524,0 2030 | 586,3133,0 2031 | 1038,2083,0 2032 | 760,397,1 2033 | 951,2839,0 2034 | 1186,70,0 2035 | 67,3151,0 2036 | 48,2078,0 2037 | 851,3056,0 2038 | 593,897,0 2039 | 823,2518,0 2040 | 1170,1072,0 2041 | 148,2334,0 2042 | 993,1311,0 2043 | 652,318,1 2044 | 511,1748,0 2045 | 348,1007,0 2046 | 295,2969,0 2047 | 827,2183,0 2048 | 592,355,0 2049 | 225,3171,0 2050 | 118,2199,0 2051 | 1269,1026,0 2052 | 312,2771,0 2053 | 250,1148,0 2054 | 518,1137,0 2055 | 1174,372,0 2056 | 923,893,0 2057 | 23,312,0 2058 | 3,1068,0 2059 | 593,372,0 2060 | 800,3053,0 2061 | 837,1595,0 2062 | 319,3106,0 2063 | 1242,787,0 2064 | 892,2308,0 2065 | 597,1146,0 2066 | 27,2591,0 2067 | 1110,2036,0 2068 | 557,2473,0 2069 | 142,1769,0 2070 | 87,3194,0 2071 | 593,558,0 2072 | 678,845,0 2073 | 1075,797,0 2074 | 736,2393,0 2075 | 1166,2167,1 2076 | 320,3054,0 2077 | 309,3050,0 2078 | 516,331,0 2079 | 514,1132,0 2080 | 361,70,0 2081 | 432,554,0 2082 | 522,1207,0 2083 | 910,902,0 2084 | 630,274,1 2085 | 602,1842,0 2086 | 553,946,0 2087 | 5,345,0 2088 | 1190,2426,1 2089 | 989,2151,0 2090 | 516,902,0 2091 | 577,3143,0 2092 | 1038,741,0 2093 | 1068,2093,0 2094 | 93,2195,0 2095 | 492,3160,0 2096 | 1101,2370,0 2097 | 1264,3003,0 2098 | 837,531,0 2099 | 267,190,0 2100 | 265,902,0 2101 | 948,2038,0 2102 | 518,998,0 2103 | 1349,1017,0 2104 | 937,1757,0 2105 | 364,145,0 2106 | 614,3175,0 2107 | 157,145,0 2108 | 614,2364,0 2109 | 991,2062,0 2110 | 614,3190,0 2111 | 412,1017,0 2112 | 1078,2139,0 2113 | 516,3032,0 2114 | 61,3046,0 2115 | 243,141,1 2116 | 695,2768,0 2117 | 145,1643,0 2118 | 225,1162,0 2119 | 1341,2851,0 2120 | 1307,716,1 2121 | 999,2265,0 2122 | 671,3036,0 2123 | 604,2165,0 2124 | 1115,1033,0 2125 | 258,1864,0 2126 | 1053,243,0 2127 | 1014,2290,0 2128 | 827,2249,0 2129 | 1066,2108,0 2130 | 1170,2392,0 2131 | 670,3183,0 2132 | 710,2139,0 2133 | 609,3034,0 2134 | 573,752,0 2135 | 1286,2336,1 2136 | 798,3110,0 2137 | 851,783,0 2138 | 738,2018,0 2139 | 820,2355,0 2140 | 193,1524,1 2141 | 541,1687,0 2142 | 1290,562,0 2143 | 382,939,0 2144 | 512,2271,0 2145 | 1198,1350,0 2146 | 1360,1908,0 2147 | 958,986,0 2148 | 532,922,1 2149 | 1024,1032,0 2150 | 515,2996,0 2151 | 786,2568,1 2152 | 562,192,0 2153 | 883,3161,0 2154 | 513,2527,0 2155 | 333,1675,0 2156 | 161,1491,1 2157 | 310,3103,1 2158 | 580,1099,0 2159 | 27,1183,0 2160 | 452,338,0 2161 | 713,978,0 2162 | 827,2758,1 2163 | 518,915,1 2164 | 586,922,0 2165 | 753,2877,0 2166 | 586,3034,0 2167 | 276,2143,0 2168 | 875,2615,0 2169 | 749,345,1 2170 | 407,1647,0 2171 | 606,1011,0 2172 | 219,1769,0 2173 | 364,1696,0 2174 | 333,891,0 2175 | 1198,1214,0 2176 | 1305,137,0 2177 | 762,396,1 2178 | 1322,452,0 2179 | 982,2762,1 2180 | 578,831,0 2181 | 333,1183,0 2182 | 3,199,0 2183 | 105,3021,0 2184 | 642,448,0 2185 | 47,2987,0 2186 | 517,984,0 2187 | 251,2959,0 2188 | 674,256,1 2189 | 1262,367,0 2190 | 804,2122,0 2191 | 1177,2518,0 2192 | 361,977,0 2193 | 1174,354,0 2194 | 496,111,0 2195 | 197,1999,0 2196 | 25,1445,1 2197 | 1090,892,0 2198 | 515,3009,0 2199 | 837,508,0 2200 | 142,1817,0 2201 | 1246,1223,0 2202 | 930,960,0 2203 | 462,202,0 2204 | 1020,2589,0 2205 | 837,371,0 2206 | 556,906,0 2207 | 359,2894,0 2208 | 518,2877,0 2209 | 137,1890,0 2210 | 196,76,0 2211 | 578,3220,0 2212 | 967,277,0 2213 | 44,2851,0 2214 | 322,452,0 2215 | 322,521,0 2216 | 350,242,0 2217 | 355,1210,0 2218 | 444,1432,0 2219 | 858,2877,0 2220 | 207,2287,0 2221 | 145,1132,0 2222 | 497,2877,0 2223 | 592,950,0 2224 | 496,2818,0 2225 | 808,1052,0 2226 | 886,2309,1 2227 | 574,1975,0 2228 | 1328,1114,0 2229 | 762,1447,0 2230 | 468,3005,0 2231 | 858,3215,0 2232 | 1149,920,1 2233 | 584,959,1 2234 | 883,2671,0 2235 | 465,242,0 2236 | 265,917,0 2237 | 112,3110,0 2238 | 583,2844,0 2239 | 393,1852,0 2240 | 695,1548,0 2241 | 330,1201,0 2242 | 781,2206,0 2243 | 1169,1168,0 2244 | 781,3204,0 2245 | 644,3107,0 2246 | 285,2856,0 2247 | 632,1514,0 2248 | 91,3145,0 2249 | 363,563,0 2250 | 1262,893,0 2251 | 27,2853,0 2252 | 44,1666,0 2253 | 18,2873,0 2254 | 93,1497,0 2255 | 1084,2108,0 2256 | 892,372,0 2257 | 829,1495,0 2258 | 527,960,0 2259 | 971,1475,0 2260 | 544,111,0 2261 | 26,902,0 2262 | 1322,3161,0 2263 | 126,2958,0 2264 | 1170,2802,0 2265 | 278,3108,0 2266 | 270,192,0 2267 | 225,3204,0 2268 | 587,1514,0 2269 | 1051,3062,0 2270 | 606,1014,0 2271 | 837,946,0 2272 | 593,242,0 2273 | 1142,932,0 2274 | 923,986,0 2275 | 578,1075,0 2276 | 578,1585,0 2277 | 79,1190,0 2278 | 324,2982,0 2279 | 558,621,0 2280 | 1141,995,0 2281 | 492,2150,0 2282 | 788,2347,0 2283 | 883,1612,0 2284 | 497,1170,0 2285 | 204,1961,0 2286 | 396,3063,0 2287 | 1272,2490,0 2288 | 587,1170,0 2289 | 336,3037,0 2290 | 646,2501,0 2291 | 642,1927,0 2292 | 342,570,0 2293 | 1241,3092,0 2294 | 518,3194,0 2295 | -------------------------------------------------------------------------------- /Structured/Amazon-Google/valid.csv: -------------------------------------------------------------------------------- 1 | ltable_id,rtable_id,label 2 | 897,75,0 3 | 827,2333,0 4 | 600,2176,0 5 | 1284,2765,0 6 | 396,3087,0 7 | 248,3126,0 8 | 105,2313,0 9 | 618,352,0 10 | 609,3063,0 11 | 773,1159,0 12 | 597,1213,0 13 | 135,1791,0 14 | 496,1169,0 15 | 586,1447,0 16 | 333,68,0 17 | 607,2584,0 18 | 371,2165,0 19 | 685,246,1 20 | 884,118,1 21 | 79,2865,0 22 | 1349,242,0 23 | 517,324,0 24 | 512,137,0 25 | 514,1137,0 26 | 645,3093,0 27 | 26,430,0 28 | 543,257,0 29 | 523,2607,0 30 | 1114,3082,0 31 | 1066,204,0 32 | 1091,2779,1 33 | 737,145,0 34 | 975,2714,0 35 | 346,1534,0 36 | 283,3110,0 37 | 549,1150,0 38 | 1038,1115,0 39 | 1258,2720,0 40 | 551,108,0 41 | 804,1086,0 42 | 1176,784,0 43 | 1246,1264,0 44 | 230,1709,0 45 | 118,2793,0 46 | 504,955,0 47 | 1218,3006,0 48 | 793,1106,0 49 | 124,2770,0 50 | 331,2082,0 51 | 737,2839,0 52 | 605,2833,0 53 | 1266,1448,0 54 | 876,371,0 55 | 520,3133,0 56 | 654,2524,0 57 | 88,369,0 58 | 1190,2839,0 59 | 1115,535,0 60 | 642,324,0 61 | 103,1659,1 62 | 580,1823,0 63 | 548,2367,0 64 | 436,2782,0 65 | 763,618,0 66 | 61,2111,1 67 | 1215,2206,0 68 | 412,372,0 69 | 511,111,0 70 | 531,1569,0 71 | 128,843,0 72 | 1282,324,0 73 | 54,1600,0 74 | 602,1853,0 75 | 549,3003,0 76 | 512,1129,0 77 | 705,1955,0 78 | 918,561,0 79 | 858,1129,0 80 | 1105,2960,0 81 | 516,324,0 82 | 138,3148,0 83 | 859,183,0 84 | 858,1636,0 85 | 938,829,0 86 | 1176,2174,0 87 | 1053,2345,0 88 | 15,2141,0 89 | 67,2674,0 90 | 465,430,1 91 | 622,1584,0 92 | 917,773,0 93 | 853,508,0 94 | 1147,928,1 95 | 690,333,0 96 | 930,358,0 97 | 602,566,0 98 | 6,1923,1 99 | 1252,986,0 100 | 1217,553,1 101 | 1342,646,1 102 | 512,2961,0 103 | 346,793,0 104 | 244,1001,0 105 | 23,3003,0 106 | 518,1170,0 107 | 1262,4,0 108 | 562,955,0 109 | 614,2848,0 110 | 827,2505,0 111 | 90,2792,0 112 | 760,2824,0 113 | 1074,659,0 114 | 823,1472,0 115 | 1078,2550,0 116 | 600,926,0 117 | 1196,1438,0 118 | 1137,2163,0 119 | 925,3016,0 120 | 21,3000,0 121 | 960,575,0 122 | 542,222,1 123 | 970,1517,0 124 | 875,2590,0 125 | 881,1821,0 126 | 661,939,0 127 | 584,239,0 128 | 204,35,0 129 | 124,1837,1 130 | 305,3106,0 131 | 501,33,0 132 | 146,3123,0 133 | 654,352,0 134 | 406,2894,0 135 | 1272,1057,0 136 | 991,196,0 137 | 432,801,0 138 | 1341,3014,0 139 | 1186,242,0 140 | 490,831,0 141 | 423,2867,0 142 | 558,307,0 143 | 890,2313,0 144 | 816,2803,0 145 | 614,1512,0 146 | 937,2216,0 147 | 133,80,0 148 | 290,2997,0 149 | 356,2842,1 150 | 602,151,0 151 | 1272,1433,0 152 | 1035,401,0 153 | 1342,1058,0 154 | 608,176,0 155 | 431,1132,0 156 | 248,1899,0 157 | 541,1652,0 158 | 348,2847,0 159 | 362,3106,0 160 | 1225,1090,0 161 | 678,307,0 162 | 827,2243,0 163 | 611,1823,0 164 | 578,915,0 165 | 632,2959,0 166 | 1130,1612,0 167 | 1130,216,0 168 | 235,291,0 169 | 852,2688,0 170 | 431,125,0 171 | 496,36,0 172 | 657,234,1 173 | 947,2939,0 174 | 560,1708,0 175 | 6,25,0 176 | 1275,1083,0 177 | 244,777,0 178 | 229,1525,0 179 | 64,2099,0 180 | 371,3190,0 181 | 539,3087,0 182 | 587,3027,0 183 | 516,1017,0 184 | 738,2050,0 185 | 1148,2141,0 186 | 1080,2096,1 187 | 1229,263,0 188 | 1182,801,1 189 | 836,2700,1 190 | 827,1355,0 191 | 1025,790,1 192 | 274,3023,0 193 | 582,357,0 194 | 349,2862,1 195 | 1130,893,0 196 | 605,1816,0 197 | 348,3126,0 198 | 1186,960,0 199 | 1244,1068,1 200 | 753,2766,0 201 | 251,111,0 202 | 1353,3014,0 203 | 320,3038,0 204 | 261,67,0 205 | 1151,1000,0 206 | 1190,2402,0 207 | 635,1127,0 208 | 270,2315,0 209 | 211,2512,0 210 | 113,1687,1 211 | 135,2513,0 212 | 541,1075,0 213 | 465,3188,0 214 | 907,3106,0 215 | 361,324,0 216 | 615,558,0 217 | 832,1548,0 218 | 693,2217,0 219 | 930,182,0 220 | 620,369,0 221 | 382,1418,0 222 | 683,2076,0 223 | 588,1017,0 224 | 211,685,0 225 | 587,998,0 226 | 609,3055,0 227 | 90,1640,1 228 | 1145,2518,0 229 | 1262,986,0 230 | 1241,551,0 231 | 602,331,0 232 | 364,324,0 233 | 1321,3009,0 234 | 515,3023,0 235 | 448,1692,0 236 | 1246,1261,0 237 | 632,2791,0 238 | 1262,176,0 239 | 1072,2561,0 240 | 910,372,0 241 | 502,1757,0 242 | 389,2227,0 243 | 599,2062,0 244 | 1273,172,0 245 | 204,1793,0 246 | 558,2501,0 247 | 90,2112,0 248 | 851,1039,0 249 | 918,3212,0 250 | 1252,263,0 251 | 618,203,0 252 | 452,2154,0 253 | 497,2821,0 254 | 1001,2040,0 255 | 284,2789,0 256 | 861,2282,0 257 | 981,307,0 258 | 807,3105,1 259 | 606,362,0 260 | 722,1085,0 261 | 553,954,0 262 | 755,376,0 263 | 363,1009,0 264 | 393,1129,0 265 | 208,110,0 266 | 554,906,0 267 | 520,1214,0 268 | 109,260,0 269 | 699,1121,0 270 | 415,3072,0 271 | 1001,2587,0 272 | 852,1701,0 273 | 370,2826,1 274 | 504,368,0 275 | 773,3027,0 276 | 859,801,0 277 | 779,2044,0 278 | 547,831,0 279 | 348,3093,0 280 | 447,2157,1 281 | 114,922,0 282 | 907,1593,1 283 | 380,1210,0 284 | 26,2807,0 285 | 1139,2205,0 286 | 520,1124,0 287 | 1076,2544,0 288 | 1118,182,0 289 | 15,312,0 290 | 1160,304,0 291 | 250,2154,1 292 | 722,1072,0 293 | 113,3109,0 294 | 528,3133,0 295 | 303,2981,0 296 | 484,508,0 297 | 861,2054,1 298 | 515,2008,0 299 | 216,531,0 300 | 1326,3221,0 301 | 1334,917,0 302 | 543,2606,0 303 | 546,1180,0 304 | 432,1054,0 305 | 899,2478,0 306 | 1174,192,0 307 | 479,2418,0 308 | 453,2072,0 309 | 1044,1118,0 310 | 991,1666,0 311 | 518,3199,0 312 | 608,75,0 313 | 592,531,0 314 | 412,1865,0 315 | 687,2369,0 316 | 550,3126,0 317 | 910,2393,0 318 | 267,1900,0 319 | 774,362,0 320 | 541,831,0 321 | 1132,1293,0 322 | 532,2843,0 323 | 1246,1312,0 324 | 548,2329,0 325 | 1269,1733,0 326 | 687,2801,0 327 | 597,531,0 328 | 961,840,0 329 | 347,2757,0 330 | 67,3155,0 331 | 93,1011,0 332 | 991,3009,0 333 | 853,1900,0 334 | 788,3036,0 335 | 1164,651,0 336 | 501,1170,0 337 | 1176,2187,0 338 | 557,182,0 339 | 273,3080,0 340 | 1022,2624,0 341 | 531,3117,0 342 | 736,1612,0 343 | 1025,292,0 344 | 1139,1347,0 345 | 581,3036,0 346 | 208,721,0 347 | 94,3110,0 348 | 481,2840,0 349 | 265,3161,0 350 | 1148,891,0 351 | 330,3045,0 352 | 1014,772,0 353 | 490,1478,0 354 | 781,1170,0 355 | 722,850,0 356 | 1279,1031,0 357 | 1176,2173,0 358 | 1272,1026,0 359 | 798,2588,1 360 | 506,904,0 361 | 616,332,0 362 | 1185,2061,0 363 | 574,915,0 364 | 829,645,0 365 | 1004,2266,0 366 | 955,929,0 367 | 145,1961,0 368 | 1249,1506,1 369 | 520,1311,0 370 | 1169,1441,0 371 | 382,182,0 372 | 523,2441,0 373 | 574,1165,0 374 | 890,2161,0 375 | 512,2853,1 376 | 174,2430,0 377 | 523,1182,0 378 | 635,3102,0 379 | 553,368,0 380 | 584,2607,0 381 | 257,1939,0 382 | 827,2296,0 383 | 1334,558,0 384 | 678,3014,0 385 | 642,521,0 386 | 897,70,0 387 | 235,732,0 388 | 646,3048,0 389 | 382,921,0 390 | 12,3027,0 391 | 967,2547,0 392 | 225,3029,0 393 | 1055,163,0 394 | 1010,3195,0 395 | 1198,1307,0 396 | 549,362,0 397 | 615,242,0 398 | 257,1769,0 399 | 1272,204,0 400 | 500,2890,0 401 | 369,2034,0 402 | 508,2998,0 403 | 973,734,0 404 | 1069,2556,0 405 | 1292,111,0 406 | 606,3199,0 407 | 608,3188,0 408 | 1174,242,0 409 | 1264,2447,0 410 | 819,2131,0 411 | 578,1760,0 412 | 652,679,0 413 | 382,521,0 414 | 837,2607,0 415 | 632,3094,0 416 | 937,140,0 417 | 823,2600,1 418 | 1223,2811,0 419 | 1153,916,1 420 | 1362,906,0 421 | 883,70,0 422 | 587,2803,0 423 | 1259,1821,0 424 | 906,2559,1 425 | 208,1569,0 426 | 497,3175,0 427 | 248,3117,0 428 | 555,546,1 429 | 349,2119,0 430 | 662,375,0 431 | 1044,1593,0 432 | 632,2920,0 433 | 557,960,0 434 | 632,2833,0 435 | 995,1351,0 436 | 565,254,0 437 | 952,2123,0 438 | 222,94,0 439 | 1032,2325,1 440 | 250,3060,0 441 | 582,316,0 442 | 514,3126,0 443 | 196,3114,0 444 | 781,2863,0 445 | 481,494,0 446 | 572,371,0 447 | 559,580,0 448 | 895,2540,1 449 | 518,2411,0 450 | 198,3072,0 451 | 753,16,0 452 | 348,2419,0 453 | 607,1441,1 454 | 314,330,0 455 | 557,4,0 456 | 1160,1599,0 457 | 605,1162,0 458 | 548,3145,0 459 | 804,2756,0 460 | 562,984,0 461 | 614,47,0 462 | 118,1183,0 463 | 1134,936,1 464 | 1115,2752,0 465 | 49,2076,0 466 | 511,1001,0 467 | 1310,2525,0 468 | 236,424,0 469 | 374,3211,0 470 | 591,228,0 471 | 287,3003,0 472 | 606,1182,0 473 | 707,1084,0 474 | 939,1155,0 475 | 543,430,0 476 | 516,1564,0 477 | 625,327,0 478 | 1105,562,0 479 | 807,1159,0 480 | 382,1368,0 481 | 1290,2752,0 482 | 88,1842,0 483 | 554,1153,0 484 | 1315,1690,0 485 | 659,238,1 486 | 218,1545,0 487 | 1332,705,0 488 | 382,531,0 489 | 522,3183,0 490 | 605,945,0 491 | 260,1848,0 492 | 1323,672,1 493 | 910,354,1 494 | 547,2837,0 495 | 1321,3023,0 496 | 547,3194,0 497 | 216,151,0 498 | 773,1880,0 499 | 1246,1273,0 500 | 389,647,0 501 | 528,3080,0 502 | 270,3003,0 503 | 1212,2862,0 504 | 821,2355,0 505 | 620,2199,0 506 | 694,421,1 507 | 554,2313,0 508 | 823,3183,0 509 | 349,2072,0 510 | 1279,3157,0 511 | 271,2195,0 512 | 1028,2325,0 513 | 752,346,1 514 | 350,331,0 515 | 1054,2356,1 516 | 1004,2745,0 517 | 1284,2061,0 518 | 941,767,0 519 | 250,314,0 520 | 1321,2804,0 521 | 546,902,0 522 | 1258,2400,0 523 | 580,3080,0 524 | 553,1792,0 525 | 498,1093,0 526 | 923,367,0 527 | 892,172,0 528 | 524,2621,0 529 | 1225,1078,0 530 | 544,2818,0 531 | 1115,3056,0 532 | 606,3029,0 533 | 1035,2343,1 534 | 208,37,0 535 | 791,3036,0 536 | 1324,1212,0 537 | 1238,1089,1 538 | 534,2070,1 539 | 1349,182,0 540 | 982,2269,1 541 | 93,1169,0 542 | 1262,1180,0 543 | 914,2993,0 544 | 412,70,0 545 | 1290,1033,1 546 | 3,1328,0 547 | 393,1864,0 548 | 1290,2426,0 549 | 781,945,0 550 | 1038,2067,0 551 | 504,946,0 552 | 930,897,1 553 | 661,3188,0 554 | 93,808,0 555 | 544,1132,0 556 | 578,1095,0 557 | 858,1473,0 558 | 971,2996,0 559 | 543,426,0 560 | 574,1095,0 561 | 587,111,0 562 | 838,1648,0 563 | 1186,182,0 564 | 34,2041,1 565 | 18,1216,1 566 | 1338,3030,0 567 | 999,2813,0 568 | 428,2145,0 569 | 1224,504,1 570 | 1244,787,0 571 | 132,364,0 572 | 892,932,0 573 | 1051,2034,0 574 | 142,728,0 575 | 543,893,0 576 | 611,3087,0 577 | 604,36,0 578 | 559,3077,0 579 | 1317,499,0 580 | 1145,1007,0 581 | 274,3009,0 582 | 616,3027,0 583 | 497,1129,0 584 | 210,1090,0 585 | 689,2019,0 586 | 50,2994,0 587 | 235,3003,0 588 | 991,2851,0 589 | 504,897,0 590 | 49,3189,0 591 | 5,937,0 592 | 981,2313,0 593 | 1174,531,0 594 | 899,1440,0 595 | 1264,1083,0 596 | 6,2524,0 597 | 562,946,0 598 | 350,977,0 599 | 623,297,1 600 | 443,348,0 601 | 1115,782,0 602 | 585,2384,0 603 | 377,2824,0 604 | 211,1075,0 605 | 1272,2927,1 606 | 27,891,0 607 | 614,2441,0 608 | 1157,913,1 609 | 620,426,0 610 | 1019,2383,1 611 | 1330,145,0 612 | 584,3093,0 613 | 981,3048,0 614 | 504,2603,1 615 | 211,2821,0 616 | 490,2847,0 617 | 1175,442,0 618 | 883,939,0 619 | 632,2468,0 620 | 412,2112,0 621 | 791,2571,1 622 | 776,337,0 623 | 1177,2847,0 624 | 244,1767,0 625 | 634,2608,0 626 | 1205,337,0 627 | 556,2008,0 628 | 229,1485,0 629 | 269,2518,0 630 | 462,364,0 631 | 1034,1566,0 632 | 925,2143,0 633 | 105,2851,0 634 | 557,1612,0 635 | 229,3214,0 636 | 974,840,1 637 | 320,2773,0 638 | 522,3126,0 639 | 736,3188,0 640 | 1264,192,0 641 | 625,1418,0 642 | 1158,1396,0 643 | 832,162,0 644 | 352,2871,1 645 | 6,68,0 646 | 29,936,0 647 | 897,1146,0 648 | 732,935,0 649 | 548,1036,0 650 | 685,993,0 651 | 195,1481,0 652 | 1113,2222,0 653 | 1021,3003,0 654 | 1133,2811,0 655 | 1177,1120,0 656 | 105,307,0 657 | 1176,2271,0 658 | 93,2513,0 659 | 527,750,0 660 | 349,3189,0 661 | 586,857,0 662 | 309,2019,0 663 | 15,2839,0 664 | 1334,182,0 665 | 895,3039,0 666 | 951,3160,0 667 | 1176,2618,0 668 | 30,176,0 669 | 892,352,0 670 | 196,198,0 671 | 1057,1614,0 672 | 44,2062,1 673 | 1170,292,0 674 | 557,1865,0 675 | 606,3187,0 676 | 511,2853,0 677 | 518,1163,0 678 | 271,1075,0 679 | 593,939,0 680 | 497,1162,0 681 | 499,1580,1 682 | 689,1020,0 683 | 974,737,0 684 | 614,2600,0 685 | 364,372,0 686 | 13,2289,0 687 | 597,946,0 688 | 493,402,0 689 | 276,307,0 690 | 1177,1159,0 691 | 1238,1060,0 692 | 210,2333,0 693 | 608,254,0 694 | 1190,891,0 695 | 67,1167,0 696 | 241,459,0 697 | 851,2720,0 698 | 736,946,0 699 | 542,2249,1 700 | 800,2077,0 701 | 565,182,0 702 | 937,134,0 703 | 270,2998,0 704 | 50,2090,1 705 | 722,2182,0 706 | 1164,3070,0 707 | 79,1439,0 708 | 578,2847,0 709 | 73,1565,1 710 | 827,1418,0 711 | 498,989,0 712 | 322,2199,1 713 | 309,1127,0 714 | 546,372,0 715 | 276,1666,0 716 | 137,1701,0 717 | 1118,452,0 718 | 892,216,0 719 | 355,1762,0 720 | 829,2828,0 721 | 27,2793,0 722 | 645,3029,0 723 | 823,1001,0 724 | 1021,1183,0 725 | 1119,2460,0 726 | 551,1785,0 727 | 555,3106,0 728 | 738,2108,0 729 | 7,1903,0 730 | 1284,782,0 731 | 1262,1564,0 732 | 457,3151,0 733 | 531,1751,0 734 | 40,433,0 735 | 27,2766,0 736 | 890,2804,0 737 | 228,1545,0 738 | 804,2205,0 739 | 1287,2400,0 740 | 287,3160,0 741 | 1341,3009,0 742 | 355,1534,0 743 | 951,1497,0 744 | 971,307,0 745 | 466,2269,0 746 | 808,782,0 747 | 49,2133,0 748 | 365,3119,0 749 | 26,986,0 750 | 1272,2273,0 751 | 736,70,0 752 | 1355,2119,0 753 | 129,1820,0 754 | 458,3026,0 755 | 497,2816,0 756 | 837,372,0 757 | 1270,413,0 758 | 608,566,0 759 | 496,2837,0 760 | 910,1853,0 761 | 1258,1026,0 762 | 132,937,0 763 | 274,3022,0 764 | 950,327,0 765 | 988,2239,1 766 | 249,3072,0 767 | 211,29,0 768 | 735,3106,0 769 | 93,3199,0 770 | 536,2391,0 771 | 30,151,0 772 | 779,741,0 773 | 887,3039,0 774 | 400,3110,0 775 | 389,3009,1 776 | 348,998,0 777 | 1053,650,0 778 | 522,3171,0 779 | 543,368,0 780 | 524,2839,0 781 | 777,3108,0 782 | 657,1602,0 783 | 717,451,1 784 | 1003,806,1 785 | 839,1667,0 786 | 1124,2183,1 787 | 888,924,0 788 | 781,3175,0 789 | 332,3082,0 790 | 937,108,0 791 | 388,664,0 792 | 837,3161,0 793 | 114,1214,0 794 | 340,3064,0 795 | 40,3154,0 796 | 929,171,1 797 | 738,3152,0 798 | 952,651,0 799 | 550,1643,0 800 | 976,1680,0 801 | 763,223,0 802 | 158,2606,0 803 | 578,2411,0 804 | 1275,2752,0 805 | 208,88,0 806 | 102,3152,0 807 | 1258,313,0 808 | 7,1810,0 809 | 407,2973,0 810 | 1257,1062,0 811 | 1019,2998,0 812 | 631,2114,0 813 | 1176,2379,0 814 | 654,2915,0 815 | 536,1900,0 816 | 616,924,0 817 | 951,342,0 818 | 832,2777,0 819 | 1080,691,0 820 | 565,3161,0 821 | 517,444,0 822 | 1272,2765,0 823 | 562,367,0 824 | 350,917,1 825 | 605,2518,0 826 | 355,2956,0 827 | 736,448,0 828 | 405,2892,0 829 | 248,2959,0 830 | 736,939,0 831 | 436,979,0 832 | 371,3029,0 833 | 396,3033,0 834 | 944,2102,0 835 | 490,1159,0 836 | 784,953,0 837 | 1130,939,1 838 | 45,2986,0 839 | 584,3094,0 840 | 1148,330,0 841 | 1186,71,0 842 | 761,2681,1 843 | 561,2773,0 844 | 1293,352,0 845 | 883,521,0 846 | 781,1010,0 847 | 204,1935,0 848 | 722,208,0 849 | 1241,3034,0 850 | 878,194,1 851 | 431,2513,0 852 | 803,950,0 853 | 1004,2412,0 854 | 266,2517,0 855 | 6,3003,0 856 | 6,362,0 857 | 346,2866,0 858 | 1024,2435,1 859 | 3,1300,0 860 | 835,2689,1 861 | 539,0,1 862 | 229,3060,0 863 | 146,904,0 864 | 1262,430,0 865 | 531,1899,0 866 | 645,1643,0 867 | 982,969,0 868 | 748,1248,0 869 | 146,2847,0 870 | 1145,2332,0 871 | 452,2177,0 872 | 15,1142,0 873 | 93,903,0 874 | 874,1034,0 875 | 587,3171,0 876 | 1293,1447,0 877 | 146,1159,0 878 | 510,2770,0 879 | 1105,2510,1 880 | 557,372,0 881 | 1301,725,0 882 | 274,260,0 883 | 106,1979,0 884 | 214,105,0 885 | 47,2076,1 886 | 171,1686,0 887 | 271,915,0 888 | 142,1939,0 889 | 753,2195,0 890 | 208,1722,0 891 | 76,1897,0 892 | 597,2393,0 893 | 93,945,0 894 | 620,3019,1 895 | 588,369,0 896 | 497,998,0 897 | 602,893,0 898 | 1321,2851,0 899 | 1334,1865,0 900 | 273,1350,0 901 | 892,566,0 902 | 799,726,0 903 | 264,750,0 904 | 1321,3014,0 905 | 1099,2265,0 906 | 1259,2018,0 907 | 461,2361,0 908 | 1305,2833,0 909 | 32,2161,0 910 | 3,1272,0 911 | 276,832,0 912 | 869,1525,0 913 | 225,915,0 914 | 996,835,1 915 | 1145,2474,0 916 | 654,3060,0 917 | 377,793,0 918 | 620,2839,0 919 | 620,2308,1 920 | 258,1001,0 921 | 1290,1433,0 922 | 176,2851,0 923 | 1251,566,0 924 | 208,1146,0 925 | 1257,535,0 926 | 589,3152,0 927 | 512,924,0 928 | 157,3213,0 929 | 78,1538,0 930 | 1264,2927,0 931 | 93,1585,0 932 | 605,2863,0 933 | 951,448,0 934 | 12,1014,0 935 | 1076,2584,0 936 | 1269,783,1 937 | 250,257,0 938 | 1068,3180,0 939 | 1022,1762,0 940 | 551,1708,0 941 | 222,1195,0 942 | 346,1484,0 943 | 731,260,0 944 | 436,2783,1 945 | 562,580,0 946 | 1322,182,0 947 | 621,3095,0 948 | 146,3175,0 949 | 1276,318,0 950 | 685,1177,0 951 | 608,2112,0 952 | 788,2396,0 953 | 957,1034,0 954 | 1183,333,0 955 | 43,3108,0 956 | 531,1893,0 957 | 1114,767,0 958 | 541,1095,0 959 | 808,2510,0 960 | 111,2143,0 961 | 90,2688,0 962 | 1020,163,0 963 | 119,1101,0 964 | 412,939,0 965 | 382,2167,0 966 | 515,2501,0 967 | 6,448,0 968 | 587,137,0 969 | 1251,168,0 970 | 145,1514,0 971 | 632,3220,0 972 | 892,2393,0 973 | 382,2198,0 974 | 993,2472,1 975 | 536,2860,0 976 | 412,99,0 977 | 795,2334,0 978 | 605,2850,0 979 | 1020,759,0 980 | 23,3060,0 981 | 848,2670,1 982 | 1078,2040,0 983 | 1349,531,0 984 | 731,2804,0 985 | 784,3207,0 986 | 1056,2055,0 987 | 242,720,0 988 | 516,521,0 989 | 518,2332,0 990 | 1238,1085,0 991 | 1130,1763,0 992 | 1050,760,1 993 | 3,1139,0 994 | 798,3106,0 995 | 97,2127,0 996 | 827,2921,0 997 | 695,2778,1 998 | 1050,2589,0 999 | 438,2215,1 1000 | 113,2513,0 1001 | 654,192,0 1002 | 137,1613,0 1003 | 542,2911,0 1004 | 497,2480,0 1005 | 1349,1842,0 1006 | 512,192,0 1007 | 1022,1171,0 1008 | 1118,1186,0 1009 | 1196,2397,0 1010 | 26,368,0 1011 | 66,1574,1 1012 | 522,2364,0 1013 | 531,1014,0 1014 | 518,2850,0 1015 | 1043,3065,0 1016 | 1190,1447,0 1017 | 197,1978,0 1018 | 376,1181,0 1019 | 792,492,1 1020 | 315,2879,0 1021 | 1350,2912,0 1022 | 753,375,0 1023 | 79,3144,0 1024 | 620,951,0 1025 | 1322,70,0 1026 | 523,2818,0 1027 | 803,921,0 1028 | 523,332,0 1029 | 250,2199,0 1030 | 174,1484,1 1031 | 624,2201,0 1032 | 531,1961,0 1033 | 865,2163,0 1034 | 970,59,0 1035 | 1147,1600,0 1036 | 971,832,0 1037 | 107,1900,0 1038 | 543,1180,0 1039 | 276,423,0 1040 | 520,1367,0 1041 | 145,332,0 1042 | 274,2996,0 1043 | 79,305,0 1044 | 1085,920,0 1045 | 592,428,0 1046 | 265,2649,0 1047 | 389,2062,0 1048 | 24,1439,1 1049 | 1114,2289,0 1050 | 543,902,0 1051 | 606,1497,0 1052 | 1332,878,0 1053 | 532,1121,0 1054 | 348,2877,0 1055 | 40,2018,0 1056 | 933,3106,0 1057 | 371,2518,1 1058 | 1043,709,0 1059 | 561,3063,0 1060 | 553,939,0 1061 | 934,2771,0 1062 | 587,3204,0 1063 | 1334,324,0 1064 | 696,418,1 1065 | 611,3063,0 1066 | 701,3154,0 1067 | 520,1036,0 1068 | 701,370,0 1069 | 570,839,0 1070 | 507,2992,0 1071 | 512,332,0 1072 | 1279,3145,0 1073 | 1259,1441,0 1074 | 389,2851,0 1075 | 587,3220,0 1076 | 287,2853,0 1077 | 1264,1040,1 1078 | 567,2318,0 1079 | 1282,309,0 1080 | 874,1385,0 1081 | 973,2219,0 1082 | 30,735,0 1083 | 597,1865,0 1084 | 1005,2363,1 1085 | 565,1678,0 1086 | 1145,1014,0 1087 | 1101,2238,0 1088 | 1360,1810,0 1089 | 490,367,0 1090 | 890,3022,0 1091 | 433,1112,0 1092 | 543,1135,0 1093 | 1323,2157,0 1094 | 859,2516,0 1095 | 852,2793,0 1096 | 753,1512,0 1097 | 1328,640,1 1098 | 736,192,0 1099 | 492,29,0 1100 | 593,984,0 1101 | 490,1129,0 1102 | 531,1132,0 1103 | 522,2847,1 1104 | 1264,2926,0 1105 | 516,151,0 1106 | 302,3000,0 1107 | 139,1017,0 1108 | 510,2388,0 1109 | 93,1514,0 1110 | 543,192,0 1111 | 82,1607,0 1112 | 587,1105,0 1113 | 605,2316,0 1114 | 300,2985,0 1115 | 1290,2927,0 1116 | 670,224,1 1117 | 1306,1538,0 1118 | 930,338,0 1119 | 67,1074,0 1120 | 121,1981,0 1121 | 1191,147,0 1122 | 1115,2426,0 1123 | 851,2468,0 1124 | 1322,369,0 1125 | 425,2540,0 1126 | 140,582,0 1127 | 554,423,0 1128 | 383,3119,0 1129 | 506,381,1 1130 | 1266,2418,0 1131 | 892,2391,0 1132 | 544,3093,0 1133 | 30,371,0 1134 | 727,3148,0 1135 | 374,574,0 1136 | 372,424,0 1137 | 341,547,0 1138 | 355,2624,0 1139 | 490,3175,0 1140 | 396,922,0 1141 | 1074,3207,0 1142 | 722,2789,0 1143 | 547,2480,0 1144 | 218,1137,0 1145 | 892,190,0 1146 | 355,1439,0 1147 | 1001,2139,0 1148 | 279,2067,0 1149 | 422,2374,1 1150 | 615,176,0 1151 | 572,257,0 1152 | 615,3161,0 1153 | 264,3166,0 1154 | 519,3171,0 1155 | 1078,2030,0 1156 | 1177,2877,0 1157 | 873,3080,0 1158 | 539,1402,0 1159 | 514,239,0 1160 | 265,1213,0 1161 | 1287,2008,0 1162 | 909,169,1 1163 | 233,1178,0 1164 | 1111,327,0 1165 | 1263,1022,0 1166 | 620,531,0 1167 | 316,1534,0 1168 | 1290,2400,0 1169 | 597,1763,0 1170 | 779,2432,0 1171 | 1091,779,1 1172 | 250,1675,0 1173 | 1334,946,0 1174 | 910,371,0 1175 | 1177,904,0 1176 | 779,2490,0 1177 | 480,2056,0 1178 | 1176,898,1 1179 | 753,2792,0 1180 | 512,2816,0 1181 | 604,2867,0 1182 | 823,1207,0 1183 | 1026,2247,0 1184 | 490,1852,0 1185 | 1289,2507,0 1186 | 614,2867,0 1187 | 388,2145,0 1188 | 814,2070,0 1189 | 1157,905,0 1190 | 803,338,0 1191 | 1051,2770,0 1192 | 79,1484,0 1193 | 250,309,0 1194 | 350,368,0 1195 | 341,167,0 1196 | 865,2197,0 1197 | 885,119,1 1198 | 1238,1448,1 1199 | 598,955,1 1200 | 587,2600,0 1201 | 665,1022,0 1202 | 515,3022,0 1203 | 549,533,0 1204 | 1038,2587,0 1205 | 807,2408,0 1206 | 1244,1139,0 1207 | 1269,2447,0 1208 | 689,2002,0 1209 | 638,2324,0 1210 | 597,190,0 1211 | 320,3055,1 1212 | 804,2183,0 1213 | 645,1137,0 1214 | 799,2614,0 1215 | 347,3221,0 1216 | 26,192,0 1217 | 428,2801,0 1218 | 79,2866,0 1219 | 638,2811,0 1220 | 1062,2126,0 1221 | 622,1607,0 1222 | 910,151,0 1223 | 396,3092,0 1224 | 550,959,0 1225 | 1290,2920,0 1226 | 560,1817,0 1227 | 208,1777,0 1228 | 105,3048,0 1229 | 541,3171,0 1230 | 606,100,0 1231 | 1242,1066,1 1232 | 250,3164,0 1233 | 146,3190,0 1234 | 1305,100,0 1235 | 1241,3133,0 1236 | 1014,2060,0 1237 | 1310,2801,0 1238 | 1083,1178,0 1239 | 1093,294,0 1240 | 661,558,0 1241 | 1276,491,1 1242 | 145,1011,0 1243 | 388,3144,0 1244 | 276,2313,0 1245 | 490,1886,0 1246 | 823,2803,0 1247 | 1329,3014,0 1248 | 26,426,0 1249 | 816,1207,0 1250 | 396,264,0 1251 | 737,263,0 1252 | 124,3007,0 1253 | 1207,905,0 1254 | 1196,2743,0 1255 | 26,566,0 1256 | 605,2821,0 1257 | 3,857,0 1258 | 1118,354,0 1259 | 707,2783,0 1260 | 1287,2752,0 1261 | 991,845,0 1262 | 228,2513,0 1263 | 1293,2141,0 1264 | 775,2811,0 1265 | 45,435,0 1266 | 516,3188,0 1267 | 518,904,0 1268 | 606,3171,0 1269 | 82,1584,1 1270 | 265,176,0 1271 | 235,1485,0 1272 | 520,1350,0 1273 | 522,2480,0 1274 | 738,2841,0 1275 | 578,2497,0 1276 | 962,11,0 1277 | 731,2851,0 1278 | 412,354,0 1279 | 547,1170,0 1280 | 975,2219,1 1281 | 858,1075,0 1282 | 642,331,0 1283 | 1170,2889,0 1284 | 827,2473,0 1285 | 222,2686,0 1286 | 858,2821,0 1287 | 52,3108,0 1288 | 1182,805,0 1289 | 713,398,1 1290 | 722,2250,0 1291 | 1016,780,1 1292 | 225,2518,0 1293 | 518,2165,0 1294 | 808,2161,0 1295 | 8,2136,0 1296 | 557,521,0 1297 | 587,1207,0 1298 | 641,993,0 1299 | 211,3190,0 1300 | 918,2379,0 1301 | 556,260,0 1302 | 1031,2857,1 1303 | 689,403,0 1304 | 590,996,0 1305 | 492,1933,0 1306 | 371,2850,0 1307 | 145,2818,0 1308 | 1096,2135,1 1309 | 1177,2850,1 1310 | 662,1875,0 1311 | 1329,2851,0 1312 | 1145,3171,0 1313 | 858,3194,0 1314 | 261,1494,1 1315 | 490,1864,0 1316 | 614,1162,0 1317 | 1103,2352,0 1318 | 369,2388,0 1319 | 823,1075,0 1320 | 145,1843,0 1321 | 594,1161,0 1322 | 980,849,1 1323 | 930,986,0 1324 | 753,1182,0 1325 | 635,3207,0 1326 | 764,1047,0 1327 | 1353,3009,0 1328 | 615,71,0 1329 | 620,172,0 1330 | 592,192,0 1331 | 473,2485,1 1332 | 570,2410,0 1333 | 753,286,0 1334 | 586,1124,0 1335 | 705,2273,0 1336 | 1169,1153,0 1337 | 536,1135,0 1338 | 30,2860,0 1339 | 322,369,0 1340 | 751,2734,0 1341 | 1115,2009,0 1342 | 271,3199,0 1343 | 723,1497,0 1344 | 1284,1062,1 1345 | 484,372,0 1346 | 735,2026,0 1347 | 44,621,0 1348 | 1316,1020,0 1349 | 645,332,1 1350 | 736,3161,0 1351 | 608,960,0 1352 | 803,1865,0 1353 | 852,100,0 1354 | 580,2843,0 1355 | 541,3220,0 1356 | 1222,560,1 1357 | 559,986,0 1358 | 875,2373,0 1359 | 431,1751,0 1360 | 1282,355,1 1361 | 361,531,0 1362 | 823,1473,0 1363 | 202,2214,0 1364 | 806,2581,1 1365 | 492,2314,0 1366 | 1075,2516,0 1367 | 384,2950,0 1368 | 469,3190,0 1369 | 859,2124,0 1370 | 465,372,0 1371 | 1138,1114,0 1372 | 316,3095,1 1373 | 517,3094,0 1374 | 264,1135,0 1375 | 562,151,0 1376 | 866,1007,0 1377 | 309,3157,0 1378 | 923,430,0 1379 | 264,444,0 1380 | 385,2949,0 1381 | 604,2518,0 1382 | 883,1166,0 1383 | 139,75,0 1384 | 208,1901,0 1385 | 106,1980,1 1386 | 1103,802,0 1387 | 513,277,0 1388 | 804,2802,0 1389 | 441,1078,0 1390 | 662,2853,0 1391 | 516,426,0 1392 | 70,1560,0 1393 | 711,3207,0 1394 | 812,2627,1 1395 | 312,214,0 1396 | 361,368,0 1397 | 727,1113,0 1398 | 251,1643,0 1399 | 287,2998,0 1400 | 678,3009,0 1401 | 524,2811,0 1402 | 27,100,0 1403 | 1157,339,0 1404 | 617,663,0 1405 | 1272,1080,0 1406 | 683,2278,0 1407 | 553,955,0 1408 | 594,381,0 1409 | 1099,1362,1 1410 | 605,2474,0 1411 | 1257,2009,0 1412 | 1257,204,0 1413 | 33,2029,1 1414 | 52,3106,0 1415 | 883,182,0 1416 | 599,621,0 1417 | 602,1612,0 1418 | 142,1962,0 1419 | 904,152,1 1420 | 881,2018,0 1421 | 229,2839,0 1422 | 274,3048,1 1423 | 852,1525,0 1424 | 605,2877,0 1425 | 512,1585,0 1426 | 1289,940,0 1427 | 502,134,0 1428 | 1258,2426,0 1429 | 1001,2081,0 1430 | 487,2287,0 1431 | 592,3032,0 1432 | 661,984,0 1433 | 253,3159,0 1434 | 1190,1083,1 1435 | 957,787,0 1436 | 1287,2399,0 1437 | 737,2606,0 1438 | 565,75,0 1439 | 779,2009,0 1440 | 897,254,0 1441 | 638,1409,0 1442 | 1160,442,1 1443 | 323,3108,0 1444 | 1044,3039,0 1445 | 527,2395,0 1446 | 303,2892,0 1447 | 499,2446,0 1448 | 578,3194,0 1449 | 1290,535,0 1450 | 605,2847,0 1451 | 1078,1342,0 1452 | 614,3171,0 1453 | 274,307,0 1454 | 176,260,0 1455 | 604,904,0 1456 | 518,2821,0 1457 | 1185,1427,0 1458 | 628,677,0 1459 | 615,566,0 1460 | 553,1213,0 1461 | 542,228,0 1462 | 216,897,0 1463 | 521,1773,0 1464 | 881,2067,0 1465 | 533,485,0 1466 | 635,251,0 1467 | 1148,2791,0 1468 | 574,1162,0 1469 | 1190,2400,0 1470 | 522,1643,0 1471 | 780,3036,0 1472 | 233,1168,0 1473 | 616,111,0 1474 | 584,1132,0 1475 | 519,998,0 1476 | 273,1307,0 1477 | 1225,1779,0 1478 | 562,426,0 1479 | 246,3109,0 1480 | 821,2057,0 1481 | 412,921,0 1482 | 402,3047,0 1483 | 1043,755,1 1484 | 578,3190,0 1485 | 9,3115,0 1486 | 1196,462,0 1487 | 604,2959,0 1488 | 238,1939,0 1489 | 547,2867,0 1490 | 781,2518,0 1491 | 536,444,0 1492 | 814,2507,1 1493 | 44,836,0 1494 | 900,2166,0 1495 | 465,893,0 1496 | 840,1085,0 1497 | 1074,3050,0 1498 | 917,775,0 1499 | 218,2853,0 1500 | 121,1154,0 1501 | 1196,576,1 1502 | 528,2976,0 1503 | 1114,2336,0 1504 | 1054,2557,0 1505 | 195,3114,0 1506 | 1108,1416,0 1507 | 93,1010,0 1508 | 512,2818,0 1509 | 557,2471,0 1510 | 593,3161,0 1511 | 1124,288,0 1512 | 492,3109,0 1513 | 571,2847,0 1514 | 27,1525,0 1515 | 324,1449,1 1516 | 13,1362,0 1517 | 1002,2352,0 1518 | 907,3108,0 1519 | 577,3038,0 1520 | 985,819,1 1521 | 946,456,0 1522 | 1049,2024,0 1523 | 1334,254,0 1524 | 229,1183,0 1525 | 616,2441,0 1526 | 971,2851,0 1527 | 146,1105,0 1528 | 512,2766,0 1529 | 1169,906,1 1530 | 1245,518,1 1531 | 581,1118,0 1532 | 13,1547,0 1533 | 185,1550,1 1534 | 176,307,0 1535 | 1170,850,0 1536 | 562,953,0 1537 | 520,1265,0 1538 | 579,2114,0 1539 | 369,3047,0 1540 | 1114,2265,0 1541 | 496,273,0 1542 | 146,1132,0 1543 | 789,3119,0 1544 | 1272,2426,0 1545 | 677,1684,1 1546 | 492,1011,0 1547 | 539,1265,0 1548 | 507,279,0 1549 | 129,2914,0 1550 | 355,1190,0 1551 | 26,911,0 1552 | 524,948,0 1553 | 923,192,0 1554 | 265,324,0 1555 | 522,3175,0 1556 | 511,1805,0 1557 | 749,344,0 1558 | 1284,2432,0 1559 | 1262,954,0 1560 | 547,1880,0 1561 | 287,312,0 1562 | 638,1418,0 1563 | 143,1815,0 1564 | 526,3105,0 1565 | 685,2166,0 1566 | 1208,540,1 1567 | 597,75,0 1568 | 547,2195,0 1569 | 722,2802,1 1570 | 570,794,0 1571 | 282,3189,0 1572 | 816,2654,1 1573 | 773,2474,0 1574 | 26,2791,0 1575 | 586,2853,0 1576 | 947,889,1 1577 | 800,3196,0 1578 | 804,2325,0 1579 | 1105,1052,0 1580 | 571,3091,0 1581 | 139,369,0 1582 | 588,331,0 1583 | 883,2393,1 1584 | 971,3009,0 1585 | 544,332,0 1586 | 276,3021,0 1587 | 1318,3005,0 1588 | 605,1512,0 1589 | 934,2525,0 1590 | 1251,977,0 1591 | 941,2265,1 1592 | 645,3126,0 1593 | 548,2296,0 1594 | 1324,219,0 1595 | 250,2112,0 1596 | 646,196,0 1597 | 645,1843,0 1598 | 564,2532,0 1599 | 606,1075,0 1600 | 1114,2813,0 1601 | 1020,772,1 1602 | 500,300,1 1603 | 274,2008,0 1604 | 208,1913,0 1605 | 781,1182,0 1606 | 1330,1900,0 1607 | 543,984,0 1608 | 1101,208,0 1609 | 666,218,1 1610 | 1174,257,0 1611 | 1273,932,0 1612 | 204,1796,0 1613 | 32,3044,0 1614 | 762,3101,0 1615 | 587,2863,0 1616 | 978,842,1 1617 | 1174,954,0 1618 | 1355,2908,1 1619 | 1264,2432,0 1620 | 208,1904,0 1621 | 618,3214,0 1622 | 1057,3185,0 1623 | 728,438,1 1624 | 1110,1441,0 1625 | 270,2377,0 1626 | 508,362,0 1627 | 820,2057,0 1628 | 531,1803,0 1629 | 604,2833,0 1630 | 1340,2325,0 1631 | 695,157,0 1632 | 93,3187,0 1633 | 544,3094,0 1634 | 548,3033,0 1635 | 638,944,0 1636 | 910,1146,0 1637 | 587,2497,0 1638 | 361,242,0 1639 | 558,2062,0 1640 | 271,3171,0 1641 | 853,1696,0 1642 | 114,7,1 1643 | 832,2778,0 1644 | 992,891,0 1645 | 1185,462,0 1646 | 1087,988,1 1647 | 632,800,0 1648 | 1002,2933,0 1649 | 321,1933,0 1650 | 547,904,0 1651 | 248,77,0 1652 | 705,1026,0 1653 | 1024,2438,1 1654 | 516,1213,0 1655 | 986,2370,0 1656 | 79,1164,0 1657 | 304,2980,1 1658 | 951,1816,0 1659 | 208,34,0 1660 | 1008,1583,0 1661 | 93,2818,0 1662 | 779,1052,0 1663 | 508,352,0 1664 | 840,1510,1 1665 | 763,1020,0 1666 | 1145,2867,0 1667 | 950,426,0 1668 | 851,1433,0 1669 | 137,56,0 1670 | 76,731,0 1671 | 204,1863,0 1672 | 632,2524,0 1673 | 145,1169,0 1674 | 264,2606,1 1675 | 648,1210,0 1676 | 837,984,0 1677 | 753,3027,0 1678 | 804,1404,0 1679 | 968,2310,1 1680 | 536,3213,0 1681 | 719,446,1 1682 | 953,2941,0 1683 | 272,1575,0 1684 | 860,2176,0 1685 | 204,1848,0 1686 | 522,3190,0 1687 | 993,3178,0 1688 | 1322,242,0 1689 | 871,996,1 1690 | 597,917,0 1691 | 1074,2019,0 1692 | 1042,802,0 1693 | 622,234,0 1694 | 593,955,0 1695 | 523,3160,0 1696 | 105,1655,1 1697 | 614,1095,0 1698 | 248,1011,0 1699 | 761,2112,0 1700 | 615,257,0 1701 | 653,535,0 1702 | 615,1763,0 1703 | 93,1014,0 1704 | 29,2095,0 1705 | 173,1967,0 1706 | 558,423,0 1707 | 515,3021,1 1708 | 474,1636,0 1709 | 518,2833,0 1710 | 349,548,0 1711 | 781,2867,0 1712 | 1121,943,1 1713 | 896,813,0 1714 | 1171,980,0 1715 | 84,1625,1 1716 | 40,1441,0 1717 | 502,2216,0 1718 | 1185,2044,0 1719 | 1038,2984,0 1720 | 518,1686,0 1721 | 1174,558,0 1722 | 826,1085,0 1723 | 608,3032,0 1724 | 941,1870,0 1725 | 109,2313,0 1726 | 208,3138,0 1727 | 1362,608,1 1728 | 26,923,0 1729 | 522,904,0 1730 | 287,1188,0 1731 | 602,1763,0 1732 | 609,892,0 1733 | 1329,2008,0 1734 | 930,932,1 1735 | 975,59,0 1736 | 547,1275,0 1737 | 40,906,0 1738 | 910,338,0 1739 | 357,2841,1 1740 | 208,87,0 1741 | 500,2050,0 1742 | 126,3159,0 1743 | 527,3213,0 1744 | 1189,571,1 1745 | 1072,2348,0 1746 | 470,1979,0 1747 | 1149,912,0 1748 | 1314,971,1 1749 | 1258,1035,0 1750 | 1157,2365,0 1751 | 497,1207,0 1752 | 588,893,0 1753 | 516,2393,0 1754 | 1325,3146,0 1755 | 328,430,0 1756 | 851,1427,0 1757 | 1130,960,0 1758 | 139,911,0 1759 | 1164,1103,0 1760 | 6,2177,0 1761 | 208,749,0 1762 | 1176,2766,0 1763 | 582,996,0 1764 | 853,2391,0 1765 | 1183,252,0 1766 | 520,2530,0 1767 | 1005,3070,0 1768 | 538,929,0 1769 | 522,915,0 1770 | 858,3175,0 1771 | 503,1586,1 1772 | 837,1667,0 1773 | 148,1939,0 1774 | 714,2784,1 1775 | 971,2501,0 1776 | 1252,371,0 1777 | 610,2547,0 1778 | 605,2364,0 1779 | 191,1533,0 1780 | 644,2816,0 1781 | 873,3033,0 1782 | 1097,1547,0 1783 | 1169,744,0 1784 | 602,946,1 1785 | 892,521,0 1786 | 781,915,0 1787 | 127,1825,1 1788 | 443,1514,0 1789 | 554,2008,0 1790 | 541,2165,0 1791 | 341,416,0 1792 | 145,273,0 1793 | 1033,762,1 1794 | 622,1602,0 1795 | 881,2036,0 1796 | 809,3036,0 1797 | 1258,1080,0 1798 | 511,2833,0 1799 | 564,2114,1 1800 | 736,368,0 1801 | 305,2977,0 1802 | 1021,1188,0 1803 | 587,2959,0 1804 | 897,531,0 1805 | 271,3215,0 1806 | 271,1643,0 1807 | 958,902,0 1808 | 535,286,0 1809 | 1185,1955,0 1810 | 1289,1193,0 1811 | 574,2821,0 1812 | 26,216,0 1813 | 92,2214,0 1814 | 443,2818,0 1815 | 707,2785,1 1816 | 1020,2060,0 1817 | 504,257,0 1818 | 26,352,0 1819 | 1262,371,0 1820 | 985,3106,0 1821 | 536,986,0 1822 | 758,391,1 1823 | 552,3186,0 1824 | 335,3040,1 1825 | 47,3189,0 1826 | 1349,70,0 1827 | 293,2971,0 1828 | 991,1475,0 1829 | 605,3220,0 1830 | 79,1583,1 1831 | 1021,291,0 1832 | 709,1240,0 1833 | 1008,1164,0 1834 | 504,151,0 1835 | 163,3096,1 1836 | 1128,938,1 1837 | 819,1093,0 1838 | 208,43,0 1839 | 1066,2018,0 1840 | 146,3171,0 1841 | 1229,986,0 1842 | 539,1099,0 1843 | 287,2792,0 1844 | 739,424,1 1845 | 753,1169,0 1846 | 289,1497,0 1847 | 1078,535,0 1848 | 320,1036,0 1849 | 508,1188,0 1850 | 1234,1344,1 1851 | 614,2195,0 1852 | 945,2822,1 1853 | 541,1129,0 1854 | 1057,2467,0 1855 | 1190,1033,0 1856 | 1014,163,0 1857 | 912,2561,1 1858 | 597,99,0 1859 | 707,2217,0 1860 | 597,216,0 1861 | 553,176,0 1862 | 122,1724,1 1863 | 44,2501,0 1864 | 546,639,0 1865 | 1074,3102,0 1866 | 588,977,0 1867 | 471,508,0 1868 | 819,904,0 1869 | 1196,1436,0 1870 | 683,2835,0 1871 | 1269,2400,1 1872 | 932,240,0 1873 | 514,1861,0 1874 | 837,485,0 1875 | 1118,1612,0 1876 | 868,201,1 1877 | 605,1105,0 1878 | 93,1120,0 1879 | 574,904,0 1880 | 826,861,0 1881 | 1001,2561,0 1882 | 452,172,0 1883 | 145,3093,0 1884 | 1272,313,0 1885 | 1281,3025,0 1886 | 975,987,1 1887 | 1349,3161,0 1888 | 333,533,0 1889 | 1025,357,0 1890 | 531,808,0 1891 | 515,2851,0 1892 | 213,42,0 1893 | 604,998,0 1894 | 1024,2752,0 1895 | 320,523,0 1896 | 26,531,0 1897 | 1080,2836,0 1898 | 890,2851,0 1899 | 1276,3216,0 1900 | 40,2283,0 1901 | 541,998,0 1902 | 753,945,0 1903 | 572,430,0 1904 | 577,246,0 1905 | 216,176,0 1906 | 930,324,0 1907 | 1122,304,0 1908 | 476,3034,0 1909 | 279,1157,0 1910 | 678,2501,0 1911 | 379,2644,1 1912 | 23,1437,1 1913 | 1066,1157,0 1914 | 593,444,0 1915 | 1289,2459,0 1916 | 925,2147,0 1917 | 518,2257,0 1918 | 653,2008,0 1919 | 1215,3225,0 1920 | 490,891,0 1921 | 973,1517,1 1922 | 531,53,0 1923 | 271,111,0 1924 | 950,2755,0 1925 | 72,3108,0 1926 | 605,3027,0 1927 | 271,1182,0 1928 | 87,3171,0 1929 | 910,257,0 1930 | 488,319,0 1931 | 991,621,0 1932 | 105,621,0 1933 | 823,2497,0 1934 | 960,2296,1 1935 | 539,2874,1 1936 | 1322,1678,0 1937 | 456,2024,0 1938 | 250,2177,0 1939 | 108,1695,1 1940 | 1269,2082,0 1941 | 591,2236,0 1942 | 923,368,0 1943 | 736,1853,0 1944 | 26,939,0 1945 | 824,1407,0 1946 | 1333,346,0 1947 | 1066,2093,0 1948 | 395,3106,0 1949 | 395,3119,0 1950 | 225,2474,0 1951 | 52,3103,0 1952 | 642,960,0 1953 | 274,3016,0 1954 | 1276,3134,0 1955 | 490,1473,0 1956 | 225,1163,0 1957 | 298,430,0 1958 | 271,3204,0 1959 | 1362,1168,0 1960 | 554,3022,0 1961 | 271,3091,0 1962 | 116,1784,1 1963 | 827,2274,0 1964 | 1005,208,0 1965 | 146,1808,0 1966 | 755,189,0 1967 | 547,1014,0 1968 | 1171,538,0 1969 | 30,355,0 1970 | 602,917,0 1971 | 35,2039,1 1972 | 497,3091,0 1973 | 544,1643,0 1974 | 270,367,0 1975 | 582,457,1 1976 | 914,1787,0 1977 | 1246,1361,0 1978 | 518,2518,0 1979 | 512,959,0 1980 | 346,2824,0 1981 | 356,495,0 1982 | 109,3014,0 1983 | 781,2877,0 1984 | 561,1138,0 1985 | 474,2816,0 1986 | 1238,2452,0 1987 | 235,2998,0 1988 | 26,960,0 1989 | 1058,2053,1 1990 | 604,1585,0 1991 | 620,800,0 1992 | 587,3215,0 1993 | 341,3051,0 1994 | 12,1010,0 1995 | 227,87,0 1996 | 287,2793,0 1997 | 779,1033,0 1998 | 618,3060,0 1999 | 139,354,0 2000 | 13,2813,0 2001 | 135,1007,0 2002 | 722,1167,0 2003 | 699,1600,0 2004 | 350,950,0 2005 | 605,904,0 2006 | 616,1105,0 2007 | 93,1007,0 2008 | 165,1982,0 2009 | 287,891,0 2010 | 251,1747,0 2011 | 1055,2589,0 2012 | 346,1190,0 2013 | 559,953,0 2014 | 779,1062,0 2015 | 951,1183,0 2016 | 995,2460,0 2017 | 225,1182,0 2018 | 1172,1711,0 2019 | 876,331,0 2020 | 23,2591,0 2021 | 348,3187,0 2022 | 827,1089,0 2023 | 816,2681,0 2024 | 866,1485,0 2025 | 470,2469,0 2026 | 876,984,0 2027 | 881,433,0 2028 | 1332,632,1 2029 | 361,369,0 2030 | 1353,2143,0 2031 | 1244,284,0 2032 | 267,2649,0 2033 | 1160,2118,0 2034 | 653,2926,0 2035 | 955,366,0 2036 | 839,190,0 2037 | 574,1860,0 2038 | 21,359,0 2039 | 412,324,0 2040 | 497,1010,0 2041 | 1196,2060,0 2042 | 1321,2996,0 2043 | 1287,782,0 2044 | 391,3106,0 2045 | 118,49,0 2046 | 271,2863,0 2047 | 1258,1028,0 2048 | 604,945,0 2049 | 578,1170,0 2050 | 105,260,0 2051 | 511,2847,0 2052 | 255,135,0 2053 | 558,2008,0 2054 | 923,1564,0 2055 | 544,273,0 2056 | 956,948,0 2057 | 1287,3056,0 2058 | 1038,2081,0 2059 | 592,25,0 2060 | 606,533,0 2061 | 364,2391,1 2062 | 1021,2808,0 2063 | 270,1150,0 2064 | 592,368,0 2065 | 1093,363,0 2066 | 733,2024,0 2067 | 350,893,0 2068 | 223,1171,0 2069 | 139,1853,0 2070 | 923,3161,0 2071 | 501,1973,0 2072 | 514,111,0 2073 | 605,1652,0 2074 | 1049,3143,0 2075 | 1148,2998,0 2076 | 1275,2438,0 2077 | 562,176,0 2078 | 231,1875,0 2079 | 627,2056,0 2080 | 109,2996,0 2081 | 781,1636,0 2082 | 186,2316,0 2083 | 271,1120,0 2084 | 1185,2100,0 2085 | 853,3213,0 2086 | 308,537,0 2087 | 364,190,0 2088 | 356,2810,0 2089 | 1241,3080,0 2090 | 1103,2478,0 2091 | 750,2821,0 2092 | 730,436,1 2093 | 88,1763,0 2094 | 839,2606,0 2095 | 993,1265,0 2096 | 270,1734,0 2097 | 139,984,0 2098 | 354,3106,0 2099 | 1105,782,1 2100 | 786,2070,0 2101 | 423,2821,0 2102 | 87,1609,1 2103 | 1132,432,0 2104 | 527,508,0 2105 | 1264,2399,1 2106 | 482,2922,0 2107 | 1321,845,0 2108 | 62,1506,0 2109 | 710,402,1 2110 | 957,1066,0 2111 | 851,1083,0 2112 | 589,3216,0 2113 | 1351,596,1 2114 | 484,182,0 2115 | 135,367,0 2116 | 971,621,0 2117 | 1305,1864,0 2118 | 538,228,0 2119 | 1148,3172,0 2120 | 1244,788,0 2121 | 465,1678,0 2122 | 1281,1030,0 2123 | 508,3003,0 2124 | 382,3017,0 2125 | 1242,1300,0 2126 | 86,1538,0 2127 | 930,921,0 2128 | 132,1889,0 2129 | 56,1445,0 2130 | 444,2056,0 2131 | 938,875,1 2132 | 1225,3089,0 2133 | 271,998,0 2134 | 618,1447,0 2135 | 354,3119,0 2136 | 1262,216,0 2137 | 832,2786,0 2138 | 1308,2188,0 2139 | 382,75,0 2140 | 1186,977,0 2141 | 993,1307,0 2142 | 565,324,1 2143 | 1257,2573,0 2144 | 1136,1600,0 2145 | 502,140,0 2146 | 605,3199,0 2147 | 458,558,0 2148 | 465,923,0 2149 | 606,2816,0 2150 | 547,1162,0 2151 | 1005,2192,0 2152 | 1112,965,0 2153 | 1287,2421,0 2154 | 646,3016,0 2155 | 866,1188,0 2156 | 124,3062,0 2157 | 1269,1080,0 2158 | 371,2480,0 2159 | 803,71,0 2160 | 350,1150,0 2161 | 265,263,0 2162 | 883,452,0 2163 | 1305,777,0 2164 | 951,3003,0 2165 | 21,1434,1 2166 | 912,2857,0 2167 | 690,252,1 2168 | 680,1112,0 2169 | 213,81,0 2170 | 350,182,0 2171 | 778,2542,0 2172 | 607,277,0 2173 | 874,857,0 2174 | 1190,1062,0 2175 | 604,831,0 2176 | 314,3070,0 2177 | 532,3063,0 2178 | 859,803,1 2179 | 654,1447,0 2180 | 662,1803,0 2181 | 703,2787,1 2182 | 371,2867,0 2183 | 220,90,0 2184 | 1016,2133,0 2185 | 283,3119,0 2186 | 1038,1181,0 2187 | 26,394,0 2188 | 265,946,0 2189 | 445,2161,1 2190 | 719,3039,0 2191 | 945,3110,0 2192 | 871,357,0 2193 | 1146,892,0 2194 | 522,2518,0 2195 | 382,372,0 2196 | 329,3,0 2197 | 244,2423,0 2198 | 1031,2561,0 2199 | 1094,976,1 2200 | 716,1644,0 2201 | 225,1636,0 2202 | 322,911,0 2203 | 858,2803,0 2204 | 927,178,1 2205 | 1209,542,1 2206 | 1299,2944,0 2207 | 553,372,0 2208 | 208,41,0 2209 | 610,2027,1 2210 | 27,2998,0 2211 | 993,3002,0 2212 | 1287,2435,0 2213 | 724,272,0 2214 | 36,2037,1 2215 | 506,1120,0 2216 | 266,2389,0 2217 | 608,531,1 2218 | 210,2122,0 2219 | 105,2501,0 2220 | 289,2688,0 2221 | 1144,180,0 2222 | 588,1853,0 2223 | 29,1127,0 2224 | 522,1636,0 2225 | 46,2074,0 2226 | 559,355,0 2227 | 699,415,1 2228 | 309,964,0 2229 | 113,1095,0 2230 | 592,309,0 2231 | 139,921,0 2232 | 1350,2678,0 2233 | 317,2947,0 2234 | 518,2441,0 2235 | 1036,2564,0 2236 | 675,1178,0 2237 | 128,2255,0 2238 | 497,3194,0 2239 | 216,465,0 2240 | 539,3033,0 2241 | 827,2194,0 2242 | 1264,1955,0 2243 | 874,1300,0 2244 | 249,2902,0 2245 | 823,3027,0 2246 | 731,3048,0 2247 | 194,1494,0 2248 | 910,368,0 2249 | 157,1696,0 2250 | 930,984,0 2251 | 981,647,0 2252 | 431,100,0 2253 | 553,3161,0 2254 | 587,1902,0 2255 | 689,251,1 2256 | 1022,2866,0 2257 | 1127,2370,0 2258 | 1020,760,0 2259 | 602,192,0 2260 | 517,2860,0 2261 | 499,1104,0 2262 | 940,876,1 2263 | 1108,2509,0 2264 | 372,2237,0 2265 | 642,2860,0 2266 | 109,196,0 2267 | 333,2377,0 2268 | 815,803,0 2269 | 382,2391,0 2270 | 12,3117,0 2271 | 761,124,0 2272 | 1053,758,0 2273 | 558,1655,0 2274 | 642,176,0 2275 | 597,960,0 2276 | 350,71,0 2277 | 847,2673,0 2278 | 562,371,0 2279 | 465,897,0 2280 | 611,3055,0 2281 | 890,307,0 2282 | 658,230,1 2283 | 821,2032,0 2284 | 827,1078,0 2285 | 736,960,0 2286 | 606,1512,0 2287 | 1362,1153,0 2288 | 689,3139,0 2289 | 642,2393,0 2290 | 1016,3189,0 2291 | 661,1213,0 2292 | 654,448,0 2293 | 498,2547,1 2294 | 587,915,0 2295 | -------------------------------------------------------------------------------- /data/Amazon-Google.info: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/windofshadow/GraphER/16ad1630fb7c768357a248069b23580a1d4cc237/data/Amazon-Google.info -------------------------------------------------------------------------------- /data/ind.Amazon-Google.adj: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/windofshadow/GraphER/16ad1630fb7c768357a248069b23580a1d4cc237/data/ind.Amazon-Google.adj -------------------------------------------------------------------------------- /graph_att.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pickle as pkl 3 | import scipy.sparse as sp 4 | from math import log 5 | import math 6 | import sys 7 | import torch 8 | import csv 9 | from nltk.corpus import stopwords 10 | from nltk.tokenize import word_tokenize 11 | 12 | 13 | if len(sys.argv) != 2: 14 | sys.exit("Use: python build_graph.py ") 15 | 16 | datasets = ['Amazon-Google', 'DBLP-GoogleScholar'] 17 | # build corpus 18 | dataset = sys.argv[1] 19 | schema = [0,0,1] #0:compostional attribute 1:non-compositional or singular 20 | 21 | if dataset not in datasets: 22 | sys.exit("wrong dataset name") 23 | 24 | # solve unknown word by uniform(-0.25,0.25) 25 | def _add_unknown_words_by_uniform(k): 26 | return np.random.uniform(-0.25, 0.25, k).round(6).tolist() 27 | 28 | def _load_embedding(vocab,path, embed_dim): 29 | glove = [] 30 | glove2id = {} 31 | print("**************** loading pre_trained word embeddings *******************") 32 | with open(path, 'r', encoding="utf-8") as f: 33 | lines = f.readlines() 34 | for line in lines: 35 | values = line.split(" ") 36 | word = values[0] 37 | glove2id[word] = len(glove2id) 38 | vector = [float(val) for val in values[1:]] 39 | glove.append(vector) 40 | embedding = [] 41 | for i in range(len(vocab)): 42 | word = list(vocab.keys())[list(vocab.values()).index(i)] 43 | if word in glove2id: 44 | embedding.append(glove[glove2id[word]]) 45 | else: 46 | embedding.append(_add_unknown_words_by_uniform(embed_dim)) 47 | pretrained_emb = torch.FloatTensor(embedding) 48 | return pretrained_emb 49 | 50 | def load_fasttext(vocab, fname, embed_dim): 51 | fin = open(fname, 'r', encoding='utf-8', newline='\n', errors='ignore') 52 | n, d = map(int, fin.readline().split()) 53 | data = {} 54 | for line in fin: 55 | tokens = line.rstrip().split(' ') 56 | data[tokens[0]] = map(float, tokens[1:]) 57 | 58 | for i in range(len(vocab)): 59 | word = list(vocab.keys())[list(vocab.values()).index(i)] 60 | if word in data: 61 | embedding.append(data[word]) 62 | else: 63 | embedding.append(_add_unknown_words_by_uniform(embed_dim)) 64 | pretrained_emb = torch.FloatTensor(embedding) 65 | return pretrained_emb 66 | 67 | def _assign_embedding(vocab_size,embed_dim): 68 | embedding = [] 69 | for i in range(vocab_size): 70 | embedding.append(_add_unknown_words_by_uniform(embed_dim)) 71 | pretrained_emb = torch.FloatTensor(embedding) 72 | return pretrained_emb 73 | 74 | 75 | def readData(): 76 | file_a = "Structured/" + dataset + "/tableA.csv" 77 | file_b = "Structured/" + dataset + "/tableB.csv" 78 | file_train = "Structured/" + dataset + "/train.csv" 79 | file_dev = "Structured/" + dataset + "/valid.csv" 80 | file_test = "Structured/" + dataset + "/test.csv" 81 | 82 | def remove_stopwords(sent): 83 | stop_words = set(stopwords.words('english')) 84 | word_tokens = word_tokenize(sent) 85 | filtered_sentence = [w for w in word_tokens if not w in stop_words] 86 | return filtered_sentence 87 | 88 | def readTable(file): 89 | data = {} 90 | with open(file, encoding='utf-8') as f: 91 | reader = csv.reader(f) 92 | for r in reader: 93 | data[r[0]] = [remove_stopwords(sent) for sent in r[1:]] 94 | #for sent in r[1:]: 95 | # fsent = remove_stopwords(sent) 96 | # data[r[0]].append(fsent) 97 | del (data['id']) 98 | return data 99 | 100 | def readMapping(file): 101 | mapping = [] 102 | with open(file) as f: 103 | reader = csv.reader(f) 104 | for r in reader: 105 | mapping.append((r[0], r[1], r[2])) 106 | del (mapping[0]) 107 | return mapping 108 | 109 | def check(mapping, ta, tb): 110 | for i in range(len(mapping)): 111 | if mapping[i][0] not in ta or mapping[i][1] not in tb: 112 | del (mapping[i]) 113 | print("!!!") 114 | 115 | table_a = readTable(file_a) 116 | table_b = readTable(file_b) 117 | train = readMapping(file_train) 118 | dev = readMapping(file_dev) 119 | test = readMapping(file_test) 120 | check(train, table_a, table_b) 121 | check(dev, table_a, table_b) 122 | check(test, table_a, table_b) 123 | return table_a, table_b, train, dev, test 124 | 125 | ta,tb,train,dev,test = readData() 126 | 127 | def merge_table(ta, tb): 128 | all = ta.copy() 129 | offset = len(ta) 130 | for (id,value) in tb.items(): 131 | new_id = offset + int(id) 132 | all[str(new_id)] = value 133 | return offset, all 134 | 135 | offset, all_doc = merge_table(ta, tb) 136 | 137 | def convert_mapping(mapping, offset, cosine_loss = True): 138 | new_mapping = [] 139 | for (x,y,l) in mapping: 140 | new_y = int(y) + offset 141 | if cosine_loss == True: 142 | if l == '0': 143 | l = '-1' 144 | new_mapping.append((int(x), new_y, int(l))) 145 | return new_mapping 146 | 147 | train = convert_mapping(train, offset) 148 | dev = convert_mapping(dev, offset) 149 | test = convert_mapping(test, offset) 150 | 151 | doc_num = len(all_doc) 152 | 153 | 154 | def handle(doc_content_list, row, col, weight, previous_layer_length, att_type_mask): 155 | 156 | # build vocab 157 | word_freq = {} 158 | word_set = set() 159 | for (id, doc_words) in doc_content_list.items(): 160 | words = doc_words 161 | for word in words: 162 | word = str(word) 163 | word_set.add(word) 164 | if word in word_freq: 165 | word_freq[word] += 1 166 | else: 167 | word_freq[word] = 1 168 | temp = [] 169 | vocab = list(word_set) 170 | if str(temp) in vocab: 171 | vocab.remove(str(temp)) 172 | vocab_size = len(vocab) 173 | 174 | # build a word to doc list 175 | word_doc_list = {} 176 | 177 | for (id, content) in doc_content_list.items(): 178 | appeared = set() 179 | for word in content: 180 | word = str(word) 181 | if word in appeared: 182 | continue 183 | if word in word_doc_list: 184 | doc_list = word_doc_list[word] 185 | doc_list.append(id) 186 | word_doc_list[word] = doc_list 187 | else: 188 | word_doc_list[word] = [id] 189 | appeared.add(word) 190 | 191 | word_doc_freq = {} 192 | for word, doc_list in word_doc_list.items(): 193 | word_doc_freq[word] = len(doc_list) 194 | 195 | word_id_map = {} 196 | for i in range(vocab_size): 197 | word_id_map[vocab[i]] = i # !!! local id 198 | 199 | doc_num = len(doc_content_list) 200 | 201 | def word2word_pmi(doc_content_list, att_type_mask): 202 | # word co-occurence with context windows 203 | window_size = 20 204 | windows = [] 205 | 206 | for (id, doc_words) in doc_content_list.items(): 207 | if att_type_mask[int(id)] == 1: 208 | continue 209 | words = doc_words 210 | length = len(words) 211 | if length <= window_size: 212 | windows.append(words) 213 | else: 214 | # print(length, length - window_size + 1) 215 | for j in range(length - window_size + 1): 216 | window = words[j: j + window_size] 217 | windows.append(window) 218 | # print(window) 219 | 220 | word_window_freq = {} 221 | for window in windows: 222 | appeared = set() 223 | for i in range(len(window)): 224 | word = str(window[i]) 225 | if word in appeared: 226 | continue 227 | if word in word_window_freq: 228 | word_window_freq[word] += 1 229 | else: 230 | word_window_freq[word] = 1 231 | appeared.add(word) 232 | 233 | word_pair_count = {} 234 | for window in windows: 235 | for i in range(1, len(window)): 236 | for j in range(0, i): 237 | word_i = str(window[i]) 238 | if word_i in word_id_map: 239 | word_i_id = word_id_map[word_i] 240 | word_j = str(window[j]) 241 | if word_j in word_id_map: 242 | word_j_id = word_id_map[word_j] 243 | if word_i_id == word_j_id: 244 | continue 245 | word_pair_str = str(word_i_id) + ',' + str(word_j_id) 246 | if word_pair_str in word_pair_count: 247 | word_pair_count[word_pair_str] += 1 248 | else: 249 | word_pair_count[word_pair_str] = 1 250 | # two orders 251 | word_pair_str = str(word_j_id) + ',' + str(word_i_id) 252 | if word_pair_str in word_pair_count: 253 | word_pair_count[word_pair_str] += 1 254 | else: 255 | word_pair_count[word_pair_str] = 1 256 | # pmi as weights 257 | 258 | num_window = len(windows) 259 | 260 | for key in word_pair_count: 261 | temp = key.split(',') 262 | i = int(temp[0]) 263 | j = int(temp[1]) 264 | count = word_pair_count[key] 265 | word_freq_i = word_window_freq[vocab[i]] 266 | word_freq_j = word_window_freq[vocab[j]] 267 | pmi = log((1.0 * count / num_window) / 268 | (1.0 * word_freq_i * word_freq_j / (num_window * num_window))) 269 | if pmi <= 0: 270 | continue 271 | row.append(previous_layer_length + i) 272 | col.append(previous_layer_length + j) 273 | weight.append(pmi) 274 | #return row, col, weight 275 | 276 | # handle different type: description -> word2doc; values -> compare 277 | def word2word_range_window(doc_content_list, att_type_mask): 278 | # word co-occurence with context windows 279 | cut_off = 0.5 280 | prices = [] 281 | price_ids = [] 282 | 283 | for (id, doc_words) in doc_content_list.items(): 284 | words = doc_words 285 | if att_type_mask[int(id)] == 1: 286 | prices.append(float(words[0])) 287 | price_ids.append(word_id_map[words[0]]) 288 | 289 | for i in range(len(prices)): 290 | x = prices[i] 291 | for j in range(i+1, len(prices)): 292 | y = prices[j] 293 | diff_ration = pow(x-y, 2) / (x*y) #math.abs(x - y) / math.max(x, y) 294 | if diff_ration < cut_off: 295 | row.append(previous_layer_length + price_ids[i]) 296 | col.append(previous_layer_length + price_ids[j]) 297 | weight.append(1.0 - diff_ration) 298 | print(x ,y,diff_ration) 299 | 300 | 301 | 302 | word2word_pmi(doc_content_list, att_type_mask) 303 | word2word_range_window(doc_content_list, att_type_mask) 304 | 305 | # doc word frequency 306 | doc_word_freq = {} 307 | 308 | for (doc_id, doc_words) in doc_content_list.items(): 309 | words = doc_words 310 | for word in words: 311 | if len(word) == 0: 312 | continue 313 | word = str(word) 314 | word_id = word_id_map[word] 315 | doc_word_str = str(doc_id) + ',' + str(word_id) 316 | if doc_word_str in doc_word_freq: 317 | doc_word_freq[doc_word_str] += 1 318 | else: 319 | doc_word_freq[doc_word_str] = 1 320 | 321 | for (id, doc_words) in doc_content_list.items(): 322 | words = doc_words 323 | doc_word_set = set() 324 | for word in words: 325 | if len(word) == 0: 326 | continue 327 | word = str(word) 328 | if word in doc_word_set: 329 | continue 330 | j = word_id_map[word] 331 | key = id + ',' + str(j) 332 | freq = doc_word_freq[key] 333 | if int(id) < previous_layer_length: 334 | row.append(int(id)) 335 | else: 336 | row.append(int(id) + vocab_size) 337 | col.append(previous_layer_length + j) 338 | idf = log(1.0 * len(doc_content_list) / 339 | word_doc_freq[vocab[j]]) 340 | #weight.append(freq * idf) !!!!!!!!!!!!!!!!!! 341 | weight.append(idf) 342 | doc_word_set.add(word) 343 | return vocab, vocab_size, word_id_map 344 | 345 | 346 | row = [] 347 | col = [] 348 | weight = [] 349 | 350 | ### handle doc-att 351 | att_type_mask = {} 352 | previous_layer_length = len(all_doc) 353 | for i in range(len(all_doc)): 354 | att_type_mask[i] = 0 355 | att_vocab, vocab_size, att_id_map = handle(all_doc, row, col, weight, previous_layer_length, att_type_mask) 356 | doc_att_embed = _assign_embedding(previous_layer_length+vocab_size, 200) 357 | node_size = previous_layer_length + vocab_size 358 | 359 | ### handle att-word 360 | att_words = {} 361 | att_type_mask = {} 362 | for (id, atts) in all_doc.items(): 363 | for i in range(len(atts)): 364 | att = atts[i] 365 | if len(att) == 0: 366 | continue 367 | att_str = str(att) 368 | id = att_id_map[att_str] 369 | id = str(previous_layer_length + id) 370 | att_type_mask[int(id)] = schema[i] 371 | att_words[id] = att 372 | previous_layer_length = node_size 373 | vocab, vocab_size, word_id_map = handle(att_words, row, col, weight, previous_layer_length, att_type_mask) 374 | token_embed = _load_embedding(word_id_map,"Embedding/glove.6B.200d.txt", 200) 375 | #token_embed = load_fasttext(word_id_map,"Embedding/wiki-news-300d-1M.vec", 300) 376 | node_size = previous_layer_length + vocab_size 377 | 378 | embedding = torch.cat((doc_att_embed, token_embed), dim=0) 379 | 380 | adj = sp.csr_matrix( 381 | (weight, (row, col)), shape=(node_size, node_size)) 382 | 383 | ## doc att 384 | doc_att = {} 385 | for (id, atts) in all_doc.items(): 386 | for att in atts: 387 | if len(att) == 0: 388 | att_id = -1 389 | else: 390 | att = str(att) 391 | att_id = att_id_map[att] + len(all_doc) 392 | if id in doc_att: 393 | doc_att[id].append(att_id) 394 | else: 395 | doc_att[id] = [att_id] 396 | 397 | ## att_words 398 | aw = {} 399 | for (id, words) in att_words.items(): 400 | aw[id] = [] 401 | for word in words: 402 | aw[id].append(word_id_map[word]+previous_layer_length) 403 | 404 | #dump 405 | def save(): 406 | data = { 407 | 'tableA_len': len(ta), 408 | 'tableB_len': len(tb), 409 | 'doc_len': len(ta) + len(tb), 410 | #'vocab': vocab, 411 | 'vocab_size': vocab_size, 412 | 'data': { 413 | 'doc_content': all_doc, 414 | 'odc_att': doc_att, 415 | 'att_words': aw, 416 | 'train': train, 417 | 'dev': dev, 418 | 'test': test, 419 | 'embedding': embedding 420 | # 'label': [self.l.word2idx[l] for l in self.valid_labels] 421 | } 422 | } 423 | torch.save(data, 'data/' + dataset + '.info') 424 | save() 425 | 426 | f = open("data/ind.{}.adj".format(dataset), 'wb') 427 | pkl.dump(adj, f) 428 | f.close() 429 | 430 | 431 | -------------------------------------------------------------------------------- /highway.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | 5 | class HighwayMLP(nn.Module): 6 | 7 | def __init__(self, 8 | input_size, 9 | gate_bias=-2, 10 | activation_function=nn.functional.relu, 11 | gate_activation=nn.functional.softmax): 12 | 13 | super(HighwayMLP, self).__init__() 14 | 15 | self.activation_function = activation_function 16 | self.gate_activation = gate_activation 17 | 18 | self.normal_layer = nn.Linear(input_size, input_size) 19 | 20 | self.gate_layer = nn.Linear(input_size, input_size) 21 | self.gate_layer.bias.data.fill_(gate_bias) 22 | 23 | def forward(self, x): 24 | 25 | normal_layer_result = self.activation_function(self.normal_layer(x)) 26 | gate_layer_result = self.gate_activation(self.gate_layer(x)) 27 | 28 | multiplyed_gate_and_normal = torch.mul(normal_layer_result, gate_layer_result) 29 | multiplyed_gate_and_input = torch.mul((1 - gate_layer_result), x) 30 | 31 | return torch.add(multiplyed_gate_and_normal, 32 | multiplyed_gate_and_input) 33 | 34 | 35 | class HighwayCNN(nn.Module): 36 | 37 | def __init__(self, 38 | input_size, 39 | gate_bias=-1, 40 | activation_function=nn.functional.relu, 41 | gate_activation=nn.functional.softmax): 42 | 43 | super(HighwayCNN, self).__init__() 44 | 45 | self.activation_function = activation_function 46 | self.gate_activation = gate_activation 47 | 48 | self.normal_layer = nn.Linear(input_size, input_size) 49 | 50 | self.gate_layer = nn.Linear(input_size, input_size) 51 | self.gate_layer.bias.data.fill_(gate_bias) 52 | 53 | def forward(self, x): 54 | 55 | normal_layer_result = self.activation_function(self.normal_layer(x)) 56 | gate_layer_result = self.gate_activation(self.gate_layer(x)) 57 | 58 | multiplyed_gate_and_normal = torch.mul(normal_layer_result, gate_layer_result) 59 | multiplyed_gate_and_input = torch.mul((1 - gate_layer_result), x) 60 | 61 | return torch.add(multiplyed_gate_and_normal, 62 | multiplyed_gate_and_input) 63 | -------------------------------------------------------------------------------- /model.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from torch.nn import Parameter 5 | from highway import HighwayMLP 6 | import math 7 | 8 | class GCN_1layer(nn.Module): 9 | def __init__(self, input_dim, hidden_dim, output_dim, adj, featureless=True): 10 | super(GCN_1layer, self).__init__() 11 | self.weights1 = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 12 | #self.weights1 = nn.Linear(input_dim, hidden_dim) 13 | #self.weights2 = Parameter(torch.zeros([hidden_dim,output_dim], dtype=torch.float, requires_grad=True)) 14 | nn.init.xavier_uniform_(self.weights1, gain=1) 15 | #nn.init.xavier_uniform_(self.weights2, gain=1) 16 | self.A = adj 17 | self.featureless = featureless 18 | self.dropout = 0.5 19 | self.dropout = nn.Dropout(self.dropout) 20 | 21 | def forward(self, X): 22 | if self.featureless != True: 23 | X = self.dropout(X) 24 | pre_matrix = torch.mm(self.A, X) 25 | else: 26 | pre_matrix = self.A 27 | layer1 = torch.mm(pre_matrix,self.weights1) 28 | #layer1 = self.weights1(self.A) 29 | #layer2 = F.relu(torch.mm(layer1,self.weights2)) 30 | return layer1 31 | 32 | class GCN_2layer(nn.Module): 33 | def __init__(self, input_dim, hidden_dim, output_dim, adj, featureless=True): 34 | super(GCN_2layer, self).__init__() 35 | self.weights1 = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 36 | #self.weights2 = Parameter(torch.zeros([hidden_dim,output_dim], dtype=torch.float, requires_grad=True)) 37 | nn.init.xavier_uniform_(self.weights1, gain=1) 38 | #nn.init.xavier_uniform_(self.weights2, gain=1) 39 | self.A = adj 40 | self.featureless = featureless 41 | self.dropout = 0.5 42 | self.dropout = nn.Dropout(self.dropout) 43 | 44 | def forward(self, X): 45 | if self.featureless != True: 46 | X = self.dropout(X) 47 | pre_matrix = torch.mm(self.A, X) 48 | else: 49 | pre_matrix = self.A 50 | layer1 = torch.mm(pre_matrix,self.weights1) 51 | layer2 = torch.mm(pre_matrix, layer1) 52 | #layer2 = F.relu(torch.mm(temp, self.weights2)) 53 | return layer2 54 | 55 | 56 | class Logstic_Regression(nn.Module): 57 | def __init__(self, input_dim, hidden_dim, output_dim, adj): 58 | super(Logstic_Regression, self).__init__() 59 | self.gcn = GCN_1layer(input_dim, hidden_dim, output_dim, adj) 60 | self.linear = nn.Linear(2, 1) 61 | nn.init.xavier_uniform_(self.linear.weight, gain=1) 62 | self.input_dim = input_dim 63 | 64 | def forward(self, doc_att, batch_x, batch_y, use_cuda): 65 | encoding = self.gcn.forward(torch.eye(self.input_dim)) 66 | 67 | def tuple_representation(doc_att, encoding, batch, use_cuda): 68 | collection0 = [] 69 | collection1 = [] 70 | ## compositional ## 71 | for ins in batch: 72 | v = ins.item() 73 | atts = doc_att[str(v)] 74 | if atts[1] == -1: 75 | atts[1] = atts[0] 76 | att0_idx = [atts[0]] 77 | att1_idx = [atts[1]] 78 | idx0 = torch.LongTensor(att0_idx) 79 | idx1 = torch.LongTensor(att1_idx) 80 | if use_cuda == True: 81 | idx0 = idx0.cuda() 82 | idx1 = idx1.cuda() 83 | collection0.append(torch.index_select(encoding, 0, idx0)) 84 | collection1.append(torch.index_select(encoding, 0, idx1)) 85 | return torch.cat(collection0, dim=0), torch.cat(collection1, dim=0) 86 | 87 | x0, x1 = tuple_representation(doc_att, encoding, batch_x, use_cuda) 88 | y0, y1 = tuple_representation(doc_att, encoding, batch_y, use_cuda) 89 | f0 = F.cosine_similarity(x0, y0).view(-1,1) 90 | f1 = F.cosine_similarity(x1, y1).view(-1,1) 91 | feature = torch.cat((f0, f1), dim =1) 92 | out = self.linear(feature) 93 | return F.sigmoid(out) 94 | 95 | 96 | class BiLSTM(nn.Module): 97 | def __init__(self, vocab, input_dim, hidden_dim, use_cuda, batch_size): 98 | super(BiLSTM, self).__init__() 99 | self.lstm = nn.LSTM(input_size=input_dim,hidden_size=hidden_dim//2,batch_first= True,bidirectional=True) 100 | self.embedding = nn.Embedding(vocab, input_dim,padding_idx=0) 101 | self.hidden_dim = hidden_dim 102 | self.batch_size = batch_size 103 | self.use_cuda = use_cuda 104 | self.linear = nn.Linear(input_dim, 2) 105 | self.highway_layers = nn.ModuleList([HighwayMLP(input_dim, activation_function=F.relu) 106 | for _ in range(2)]) 107 | self.softmax = nn.LogSoftmax(dim=1) 108 | 109 | 110 | 111 | def forward(self, x, y): 112 | embeds_x = self.embedding(x) 113 | lstm_out, hc = self.lstm(embeds_x) 114 | hn = hc[0] 115 | ex = torch.cat((hn[0], hn[1]), dim=1) 116 | 117 | embeds_y = self.embedding(y) 118 | lstm_out, hc = self.lstm(embeds_y) 119 | hn = hc[0] 120 | ey = torch.cat((hn[0], hn[1]), dim=1) 121 | input = ex - ey 122 | #dense2 = self.linear2(F.relu(dense1)) 123 | #predict = F.softmax(dense2,dim=1) 124 | new_input = input 125 | for current_layer in self.highway_layers: 126 | new_input = current_layer(new_input) 127 | predict = self.softmax(self.linear(new_input)) 128 | return predict 129 | 130 | 131 | class GCN_hw(nn.Module): 132 | def __init__(self, input_dim, hidden_dim, output_dim, adj, featureless=True): 133 | super(GCN_hw, self).__init__() 134 | self.weights1 = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 135 | nn.init.xavier_uniform_(self.weights1, gain=1) 136 | self.A = adj 137 | self.featureless = featureless 138 | self.dropout = 0.5 139 | self.dropout = nn.Dropout(self.dropout) 140 | self.linear = nn.Linear(hidden_dim, 2) 141 | self.highway_layers = nn.ModuleList([HighwayMLP(hidden_dim, activation_function=F.relu) 142 | for _ in range(2)]) 143 | self.softmax = nn.LogSoftmax(dim=1) 144 | 145 | def forward(self, X, batch_x, batch_y, doc_att, use_cuda): 146 | if self.featureless != True: 147 | X = self.dropout(X) 148 | pre_matrix = torch.mm(self.A, X) 149 | else: 150 | pre_matrix = self.A 151 | layer1 = F.relu(torch.mm(pre_matrix,self.weights1)) 152 | 153 | ex = self.tuple_representation(doc_att, layer1, batch_x, use_cuda) 154 | ey = self.tuple_representation(doc_att, layer1, batch_y, use_cuda) 155 | predict = [] 156 | for i in range(len(ex)): 157 | temp = ex[i] - ey[i] 158 | input_pre = torch.mul(temp, temp) 159 | input = torch.div(input_pre, math.sqrt(len(input_pre))).unsqueeze(dim=0) 160 | 161 | new_input = input 162 | for current_layer in self.highway_layers: 163 | new_input = current_layer(new_input) 164 | predict.append(self.softmax(self.linear(new_input))) 165 | return torch.cat(predict, dim=0) 166 | 167 | def tuple_representation(self, doc_att, encoding, batch, use_cuda): 168 | collection = [] 169 | ## compositional ## 170 | for i in range(len(batch)): 171 | v = batch[i].item() 172 | atts = doc_att[str(v)] 173 | att_idx = [atts[0]] ## test on the first attribute 174 | idx = torch.LongTensor(att_idx) 175 | if use_cuda: 176 | idx = idx.cuda() 177 | collection.append(torch.index_select(encoding, 0, idx)) 178 | return torch.cat(collection, dim=0) 179 | 180 | class GCN_alignment(nn.Module): 181 | def __init__(self, input_dim, hidden_dim, output_dim, adj, featureless=True): 182 | super(GCN_alignment, self).__init__() 183 | self.embedding = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 184 | nn.init.xavier_uniform_(self.embedding, gain=1) 185 | self.A = adj 186 | self.featureless = featureless 187 | self.dropout = 0.5 188 | self.dropout = nn.Dropout(self.dropout) 189 | self.linear = nn.Linear(hidden_dim*6, 2) 190 | self.att_len = 3 191 | self.highway_layers = self.init_highway(hidden_dim, self.att_len) 192 | self.softmax = nn.LogSoftmax(dim=1) 193 | def init_highway(self, hidden_dim, size): 194 | layers = nn.ModuleList() 195 | for i in range(size): 196 | layers.append( 197 | nn.ModuleList([HighwayMLP(hidden_dim, activation_function=F.relu) 198 | for _ in range(1)])) 199 | return layers 200 | 201 | def soft_attention(self, batch_x, batch_y, encoding, doc_att, att_words, use_cuda): 202 | def _get_words_encoding(batch): 203 | batch_collection = [] 204 | for i in range(len(batch)): 205 | doc_collection = [] 206 | v = batch[i].item() 207 | atts = doc_att[str(v)] 208 | att_len = len(atts) 209 | for j in range(att_len): 210 | att_idx = atts[j] 211 | if att_idx == -1: 212 | doc_collection.append(encoding[v].view(1,-1)) 213 | continue 214 | #att_idx = atts[0] ## test on the first attribute 215 | words_idx = att_words[str(att_idx)] 216 | idx = torch.LongTensor(words_idx) 217 | if use_cuda: 218 | idx = idx.cuda() 219 | doc_collection.append(torch.index_select(encoding, 0, idx)) 220 | batch_collection.append(doc_collection) 221 | return batch_collection, att_len 222 | 223 | Q, att_len = _get_words_encoding(batch_x) 224 | A, _ = _get_words_encoding(batch_y) 225 | 226 | def align_weights(x, y): 227 | score = torch.mm(x, y.t()) 228 | return F.softmax(score, dim= 1), F.softmax(score.t(), dim= 1) 229 | 230 | def weighted_encoding(y, w): 231 | return torch.mm(w, y) 232 | 233 | batch_size = len(batch_x) 234 | EQ = [] 235 | EA = [] 236 | for i in range(batch_size): 237 | temp_Q = [] 238 | temp_A = [] 239 | for j in range(att_len): 240 | wq, wa = align_weights(Q[i][j], A[i][j]) 241 | eq = weighted_encoding(A[i][j], wq) 242 | ea = weighted_encoding(Q[i][j], wa) 243 | temp_Q.append(eq) 244 | temp_A.append(ea) 245 | EQ.append(temp_Q) 246 | EA.append(temp_A) 247 | return Q, A, EQ, EA, att_len 248 | 249 | def forward(self, X, batch_x, batch_y, doc_att, att_words, use_cuda): 250 | if self.featureless != True: 251 | X = self.dropout(X) 252 | pre_matrix = torch.mm(self.A, X) 253 | else: 254 | pre_matrix = self.A 255 | layer1 = torch.mm(pre_matrix,self.embedding) 256 | #layer2 = torch.mm(pre_matrix, layer1) 257 | 258 | Q, A, EQ, EA, att_len = self.soft_attention(batch_x, batch_y, layer1, doc_att, att_words, use_cuda) 259 | batch_size = len(batch_x) 260 | predict = [] 261 | 262 | for i in range(batch_size): 263 | q = Q[i] 264 | a = A[i] 265 | eq = EQ[i] 266 | ea = EA[i] 267 | collection = [] 268 | for j in range(att_len): 269 | q_sub = q[j] - eq[j] 270 | tq = torch.mul(q_sub, q_sub) 271 | a_sub = a[j] - ea[j] 272 | ta = torch.mul(a_sub, a_sub) 273 | 274 | new_input = tq 275 | for current_layer in self.highway_layers[j]: 276 | new_input = current_layer(new_input) 277 | qr = torch.sum(new_input, dim=0) 278 | qr = torch.div(qr, math.sqrt(len(Q[i]))).unsqueeze(dim=0) 279 | 280 | new_input = ta 281 | for current_layer in self.highway_layers[j]: 282 | new_input = current_layer(new_input) 283 | ar = torch.sum(new_input, dim=0) 284 | ar = torch.div(ar, math.sqrt(len(A[i]))).unsqueeze(dim=0) 285 | 286 | collection.append(qr) 287 | collection.append(ar) 288 | #collection.append(torch.abs(qr-ar)) 289 | re = torch.cat(collection, dim=1) 290 | score = self.linear(re) 291 | predict.append(self.softmax(score)) 292 | 293 | return torch.cat(predict, dim=0) 294 | 295 | class GCN_alignment_flat(nn.Module): 296 | def __init__(self, input_dim, hidden_dim, output_dim, adj, featureless=True): 297 | super(GCN_alignment_flat, self).__init__() 298 | self.embedding = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 299 | nn.init.xavier_uniform_(self.embedding, gain=1) 300 | self.A = adj 301 | self.featureless = featureless 302 | self.dropout = 0.5 303 | self.dropout = nn.Dropout(self.dropout) 304 | self.linear = nn.Linear(hidden_dim*4, 2) 305 | self.att_len = 3 306 | self.highway_layers = nn.ModuleList([HighwayMLP(hidden_dim, activation_function=F.relu) for _ in range(2)]) 307 | self.softmax = nn.LogSoftmax(dim=1) 308 | 309 | def soft_attention(self, batch_x, batch_y, encoding, doc_att, att_words, use_cuda): 310 | def _get_words_encoding(batch): 311 | batch_collection = [] 312 | for i in range(len(batch)): 313 | v = batch[i].item() 314 | atts = doc_att[str(v)] 315 | att_len = len(atts) 316 | words_idx = [] 317 | for j in range(att_len): ###only take the first two attibute 318 | att_idx = atts[j] 319 | if att_idx == -1: 320 | continue 321 | words_idx.extend(att_words[str(att_idx)]) 322 | idx = torch.LongTensor(words_idx) 323 | if use_cuda: 324 | idx = idx.cuda() 325 | batch_collection.append(torch.index_select(encoding, 0, idx)) 326 | return batch_collection 327 | 328 | Q = _get_words_encoding(batch_x) 329 | A = _get_words_encoding(batch_y) 330 | 331 | def align_weights(x, y): 332 | score = torch.mm(x, y.t()) 333 | return F.softmax(score, dim= 1), F.softmax(score.t(), dim= 1) 334 | 335 | def weighted_encoding(y, w): 336 | return torch.mm(w, y) 337 | 338 | batch_size = len(batch_x) 339 | EQ = [] 340 | EA = [] 341 | for i in range(batch_size): 342 | wq, wa = align_weights(Q[i], A[i]) 343 | eq = weighted_encoding(A[i], wq) 344 | ea = weighted_encoding(Q[i], wa) 345 | EQ.append(eq) 346 | EA.append(ea) 347 | return Q, A, EQ, EA, 348 | 349 | def forward(self, X, batch_x, batch_y, doc_att, att_words, use_cuda): 350 | if self.featureless != True: 351 | X = self.dropout(X) 352 | pre_matrix = torch.mm(self.A, X) 353 | else: 354 | pre_matrix = self.A 355 | layer1 = torch.mm(pre_matrix,self.embedding) 356 | #layer2 = torch.mm(pre_matrix, layer1) 357 | 358 | Q, A, EQ, EA = self.soft_attention(batch_x, batch_y, layer1, doc_att, att_words, use_cuda) 359 | batch_size = len(batch_x) 360 | predict = [] 361 | 362 | for i in range(batch_size): 363 | q_sub = Q[i] - EQ[i] 364 | 365 | tq = torch.mul(q_sub, q_sub) 366 | a_sub = A[i] - EA[i] 367 | ta = torch.mul(a_sub, a_sub) 368 | 369 | #mul_q = torch.mul(Q[i], EQ[i]) 370 | #mul_a = torch.mul(A[i], EA[i]) 371 | #qr = torch.sum(mul_q, dim=0) 372 | 373 | mul_q = torch.sum(torch.mul(Q[i], EQ[i]), dim=0) 374 | mul_a = torch.sum(torch.mul(A[i], EA[i]), dim=0) 375 | mul_q = torch.div(mul_q, math.sqrt(len(Q[i]))).unsqueeze(dim=0) 376 | mul_a = torch.div(mul_a, math.sqrt(len(Q[i]))).unsqueeze(dim=0) 377 | 378 | new_input = tq 379 | for current_layer in self.highway_layers: 380 | new_input = current_layer(new_input) 381 | qr = torch.sum(new_input, dim=0) 382 | qr = torch.div(qr, math.sqrt(len(Q[i]))).unsqueeze(dim=0) 383 | 384 | 385 | new_input = ta 386 | for current_layer in self.highway_layers: 387 | new_input = current_layer(new_input) 388 | ar = torch.sum(new_input, dim=0) 389 | ar = torch.div(ar, math.sqrt(len(A[i]))).unsqueeze(dim=0) 390 | 391 | 392 | re = torch.cat((qr, mul_q, ar, mul_a), dim=1) 393 | score = self.linear(re) 394 | predict.append(self.softmax(score)) 395 | 396 | return torch.cat(predict, dim=0) 397 | 398 | class GCN_alignment_cam(nn.Module): 399 | def __init__(self, input_dim, hidden_dim, output_dim, adj, featureless=True): 400 | super(GCN_alignment_cam, self).__init__() 401 | self.embedding = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 402 | nn.init.xavier_uniform_(self.embedding, gain=1) 403 | self.A = adj 404 | self.featureless = featureless 405 | self.dropout = 0.5 406 | self.dropout = nn.Dropout(self.dropout) 407 | self.linear = nn.Linear(hidden_dim*4, 2) 408 | self.att_len = 3 409 | #self.highway_layers = nn.ModuleList([HighwayMLP(hidden_dim, activation_function=F.relu) for _ in range(2)]) 410 | self.preprocessing = nn.ModuleList([HighwayMLP(hidden_dim, activation_function=F.relu) for _ in range(1)]) 411 | self.transform = nn.Linear(hidden_dim, hidden_dim) 412 | self.softmax = nn.LogSoftmax(dim=1) 413 | 414 | 415 | def _align_weights(self, x, y): 416 | score = torch.mm(x, y.t()) 417 | return F.softmax(score, dim= 1), F.softmax(score.t(), dim= 1) 418 | 419 | def _weighted_encoding(self, y, w): 420 | return torch.mm(w, y) 421 | 422 | def _bypass(self, input, layers): 423 | new_input = input 424 | for current_layer in layers: 425 | new_input = current_layer(new_input) 426 | return new_input 427 | 428 | def _get_words_encoding(slef, batch, encoding, doc_att, att_words, use_cuda): 429 | batch_collection = [] 430 | for i in range(len(batch)): 431 | v = batch[i].item() 432 | atts = doc_att[str(v)] 433 | att_len = len(atts) 434 | words_idx = [] 435 | for j in range(att_len): 436 | att_idx = atts[j] 437 | if att_idx == -1: 438 | continue 439 | words_idx.extend(att_words[str(att_idx)]) 440 | idx = torch.LongTensor(words_idx) 441 | if use_cuda: 442 | idx = idx.cuda() 443 | batch_collection.append(torch.index_select(encoding, 0, idx)) 444 | return batch_collection 445 | 446 | 447 | def forward(self, X, batch_x, batch_y, doc_att, att_words, use_cuda): 448 | if self.featureless != True: 449 | X = self.dropout(X) 450 | pre_matrix = torch.mm(self.A, X) 451 | else: 452 | pre_matrix = self.A 453 | layer1 = torch.mm(pre_matrix,self.embedding) 454 | #layer2 = torch.mm(pre_matrix, layer1) 455 | 456 | 457 | Q = self._get_words_encoding(batch_x, layer1, doc_att, att_words, use_cuda) 458 | A = self._get_words_encoding(batch_y, layer1, doc_att, att_words, use_cuda) 459 | 460 | batch_size = len(batch_x) 461 | EQ = [] 462 | EA = [] 463 | PA = [] 464 | PQ = [] 465 | for i in range(batch_size): 466 | 467 | # preprocessing 468 | #Q_p = self._bypass(Q[i], self.preprocessing) 469 | #A_p = self._bypass(A[i], self.preprocessing) 470 | #PA.append(A_p) 471 | #PQ.append(Q_p) 472 | 473 | PQ = Q 474 | PA = A 475 | Q_p = Q[i] 476 | A_p = A[i] 477 | 478 | # alignment 479 | wq, wa = self._align_weights(Q_p, A_p) 480 | eq = self._weighted_encoding(A_p, wq) 481 | ea = self._weighted_encoding(Q_p, wa) 482 | EQ.append(eq) 483 | EA.append(ea) 484 | 485 | 486 | batch_size = len(batch_x) 487 | #features = [] 488 | predict = [] 489 | 490 | for i in range(batch_size): 491 | 492 | # Comparison 493 | # sub 494 | q_sub = PQ[i] - EQ[i] 495 | tq = torch.mul(q_sub, q_sub) 496 | qr = torch.sum(tq, dim=0) 497 | qr = torch.div(qr, math.sqrt(len(A[i]))).unsqueeze(dim=0) 498 | a_sub = PA[i] - EA[i] 499 | ta = torch.mul(a_sub, a_sub) 500 | ar = torch.sum(ta, dim=0) 501 | ar = torch.div(ar, math.sqrt(len(A[i]))).unsqueeze(dim=0) 502 | # mul 503 | mul_q = torch.sum(torch.mul(PQ[i], EQ[i]), dim=0) 504 | mul_a = torch.sum(torch.mul(PA[i], EA[i]), dim=0) 505 | mul_q = torch.div(mul_q, math.sqrt(len(Q[i]))).unsqueeze(dim=0) 506 | mul_a = torch.div(mul_a, math.sqrt(len(Q[i]))).unsqueeze(dim=0) 507 | 508 | 509 | re = torch.cat((qr, ar, mul_q, mul_a), dim=1) 510 | score = self.linear(re) 511 | predict.append(self.softmax(score)) 512 | 513 | return torch.cat(predict, dim=0) 514 | 515 | 516 | class GCN_alignment_cnn(nn.Module): 517 | def __init__(self, input_dim, hidden_dim, kernel_num, adj, pre_trained_embedding, featureless=True): 518 | super(GCN_alignment_cnn, self).__init__() 519 | #self.embedding = pre_trained_embedding 520 | #self.embedding.requires_grad = True 521 | #self.embedding = Parameter(self.embedding) 522 | self.embedding = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 523 | nn.init.xavier_uniform_(self.embedding, gain=1) 524 | self.A = adj 525 | self.featureless = featureless 526 | self.dropout = 0.5 527 | self.dropout = nn.Dropout(self.dropout) 528 | self.kernel_num = kernel_num 529 | self.filter_sizes = [1, 2, 3, 4] 530 | #self.linear = nn.Linear(hidden_dim*4, 300) 531 | self.dense = nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 2) 532 | ''' 533 | self.dense = torch.nn.Sequential( 534 | torch.nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 128), 535 | torch.nn.ReLU(), 536 | torch.nn.Linear(128, 2) 537 | ) 538 | ''' 539 | self.softmax = nn.LogSoftmax(dim=1) 540 | 541 | self.encoders = [] 542 | for i, filter_size in enumerate(self.filter_sizes): 543 | enc_attr_name = "encoder_%d" % i 544 | self.__setattr__(enc_attr_name, 545 | nn.Conv2d(in_channels=1, 546 | out_channels=self.kernel_num, 547 | kernel_size=(filter_size, hidden_dim*2+10), 548 | padding=5)) 549 | self.encoders.append(self.__getattr__(enc_attr_name)) 550 | 551 | def soft_attention(self, batch_x, batch_y, encoding, doc_att, att_words, use_cuda): 552 | def _get_words_encoding(batch): 553 | batch_collection = [] 554 | for i in range(len(batch)): 555 | v = batch[i].item() 556 | atts = doc_att[str(v)] 557 | att_len = len(atts) 558 | words_idx = [] 559 | for j in range(att_len): 560 | att_idx = atts[j] 561 | if att_idx == -1: 562 | continue 563 | words_idx.extend(att_words[str(att_idx)]) 564 | idx = torch.LongTensor(words_idx) 565 | if use_cuda: 566 | idx = idx.cuda() 567 | batch_collection.append(torch.index_select(encoding, 0, idx)) 568 | return batch_collection 569 | 570 | Q = _get_words_encoding(batch_x) 571 | A = _get_words_encoding(batch_y) 572 | 573 | def align_weights(x, y): 574 | score = torch.mm(x, y.t()) 575 | return F.softmax(score, dim= 1), F.softmax(score.t(), dim= 1) 576 | 577 | def weighted_encoding(y, w): 578 | return torch.mm(w, y) 579 | 580 | batch_size = len(batch_x) 581 | EQ = [] 582 | EA = [] 583 | for i in range(batch_size): 584 | wq, wa = align_weights(Q[i], A[i]) 585 | eq = weighted_encoding(A[i], wq) 586 | ea = weighted_encoding(Q[i], wa) 587 | EQ.append(eq) 588 | EA.append(ea) 589 | return Q, A, EQ, EA, 590 | 591 | def forward(self, X, batch_x, batch_y, doc_att, att_words, use_cuda): 592 | if self.featureless != True: 593 | X = self.dropout(X) 594 | pre_matrix = torch.mm(self.A, X) 595 | else: 596 | pre_matrix = self.A 597 | layer1 = torch.mm(pre_matrix,self.embedding) 598 | #layer2 = torch.mm(pre_matrix, layer1) 599 | #layer3 = torch.mm(pre_matrix, layer2) 600 | #layer4 = torch.mm(pre_matrix, layer3) 601 | 602 | Q, A, EQ, EA = self.soft_attention(batch_x, batch_y, layer1, doc_att, att_words, use_cuda) 603 | batch_size = len(batch_x) 604 | predict = [] 605 | 606 | n_idx = 0 607 | c_idx = 1 608 | h_idx = 2 609 | w_idx = 3 610 | 611 | for i in range(batch_size): 612 | q_sub = Q[i] - EQ[i] 613 | 614 | tq = torch.mul(q_sub, q_sub) 615 | 616 | a_sub = A[i] - EA[i] 617 | ta = torch.mul(a_sub, a_sub) 618 | 619 | 620 | mul_q = torch.mul(Q[i], EQ[i]) 621 | mul_a = torch.mul(A[i], EA[i]) 622 | 623 | qr = tq 624 | 625 | #t = torch.cat((qr, mul_q),dim=1).unsqueeze(dim=0) 626 | t = torch.cat((qr, mul_q), dim=1).unsqueeze(dim=0) 627 | t = t.unsqueeze(c_idx) 628 | enc_outs = [] 629 | for encoder in self.encoders: 630 | f_map = encoder(t) 631 | enc_ = F.relu(f_map) 632 | k_h = enc_.size()[h_idx] 633 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 634 | enc_ = enc_.squeeze(w_idx) 635 | enc_ = enc_.squeeze(h_idx) 636 | enc_outs.append(enc_) 637 | encoding = self.dropout(torch.cat(enc_outs, 1)) 638 | #encoding = torch.cat(enc_outs, 1) 639 | q_re = F.relu(encoding) 640 | 641 | ar = ta 642 | 643 | t = torch.cat((ar, mul_a), dim=1).unsqueeze(dim=0) 644 | t = t.unsqueeze(c_idx) 645 | enc_outs = [] 646 | for encoder in self.encoders: 647 | f_map = encoder(t) 648 | enc_ = F.relu(f_map) 649 | k_h = enc_.size()[h_idx] 650 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 651 | enc_ = enc_.squeeze(w_idx) 652 | enc_ = enc_.squeeze(h_idx) 653 | enc_outs.append(enc_) 654 | encoding = self.dropout(torch.cat(enc_outs, 1)) 655 | #encoding = torch.cat(enc_outs, 1) 656 | a_re = F.relu(encoding) 657 | 658 | 659 | re = torch.cat((q_re, a_re), dim=1) 660 | score = self.dense(re) 661 | F.relu(score) 662 | predict.append(self.softmax(score)) 663 | 664 | #print(self.embedding) 665 | 666 | return torch.cat(predict, dim=0) 667 | 668 | 669 | class GCN_alignment_iia(nn.Module): 670 | def __init__(self, input_dim, hidden_dim, kernel_num, adj, featureless=True): 671 | super(GCN_alignment_iia, self).__init__() 672 | self.embedding = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 673 | nn.init.xavier_uniform_(self.embedding, gain=1) 674 | self.A = adj 675 | self.featureless = featureless 676 | self.dropout = 0.5 677 | self.dropout = nn.Dropout(self.dropout) 678 | self.kernel_num = kernel_num 679 | self.filter_sizes = [1, 2] 680 | #self.linear = nn.Linear(hidden_dim*4, 300) 681 | self.dense = nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 2) 682 | ''' 683 | self.dense = torch.nn.Sequential( 684 | torch.nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 128), 685 | torch.nn.ReLU(), 686 | torch.nn.Linear(128, 2) 687 | ) 688 | ''' 689 | self.softmax = nn.LogSoftmax(dim=1) 690 | 691 | self.encoders = [] 692 | for i, filter_size in enumerate(self.filter_sizes): 693 | enc_attr_name = "encoder_%d" % i 694 | self.__setattr__(enc_attr_name, 695 | nn.Conv2d(in_channels=1, 696 | out_channels=self.kernel_num, 697 | kernel_size=(filter_size, hidden_dim*2+2), 698 | padding=1 699 | )) 700 | self.encoders.append(self.__getattr__(enc_attr_name)) 701 | 702 | def soft_attention(self, batch_x, batch_y, encoding, doc_att, att_words, use_cuda): 703 | def _get_words_encoding(batch): 704 | record_collection = [] 705 | att_collection = [] 706 | token_collection = [] 707 | for i in range(len(batch)): 708 | v = batch[i].item() 709 | atts = doc_att[str(v)] 710 | att_len = len(atts) 711 | words_idx = [] 712 | ### record ### 713 | idx = torch.LongTensor([v]) 714 | if use_cuda: 715 | idx = idx.cuda() 716 | record_collection.append(torch.index_select(encoding, 0, idx)) 717 | atts_idx = [] 718 | #words_idx.append(v) 719 | for j in range(att_len): 720 | att_idx = atts[j] 721 | if att_idx == -1: 722 | continue 723 | atts_idx.append(att_idx) 724 | ### token 725 | words_idx.append(att_idx) 726 | words_idx.extend(att_words[str(att_idx)]) 727 | 728 | ### att ### 729 | idx = torch.LongTensor(atts_idx) 730 | if use_cuda: 731 | idx = idx.cuda() 732 | att_collection.append(torch.index_select(encoding, 0, idx)) 733 | #token 734 | idx = torch.LongTensor(words_idx) 735 | if use_cuda: 736 | idx = idx.cuda() 737 | token_collection.append(torch.index_select(encoding, 0, idx)) 738 | return token_collection, att_collection, record_collection 739 | 740 | Q, aQ, rQ = _get_words_encoding(batch_x) 741 | A, aA, rA = _get_words_encoding(batch_y) 742 | 743 | def align_weights(x, y): 744 | score = torch.mm(x, y.t()) 745 | return F.softmax(score, dim= 1), F.softmax(score.t(), dim= 1) 746 | 747 | def weighted_encoding(y, w): 748 | return torch.mm(w, y) 749 | 750 | batch_size = len(batch_x) 751 | EQ = [] 752 | EaQ = [] 753 | ErQ = [] 754 | EA = [] 755 | EaA = [] 756 | ErA= [] 757 | 758 | for i in range(batch_size): 759 | wq, wa = align_weights(Q[i], A[i]) 760 | eq = weighted_encoding(A[i], wq) 761 | ea = weighted_encoding(Q[i], wa) 762 | EQ.append(eq) 763 | EA.append(ea) 764 | waq, waa = align_weights(aQ[i], aA[i]) 765 | eaq = weighted_encoding(aA[i], waq) 766 | eaa = weighted_encoding(aQ[i], waa) 767 | EaQ.append(eaq) 768 | EaA.append(eaa) 769 | ErA.append(rQ[i]) 770 | ErQ.append(rA[i]) 771 | return Q, A, EQ, EA, aQ, aA, EaQ, EaA, rQ, rA, ErQ, ErA 772 | 773 | def forward(self, X, batch_x, batch_y, doc_att, att_words, use_cuda): 774 | if self.featureless != True: 775 | X = self.dropout(X) 776 | pre_matrix = torch.mm(self.A, X) 777 | else: 778 | pre_matrix = self.A 779 | layer1 = torch.mm(pre_matrix,self.embedding) 780 | #layer2 = torch.mm(pre_matrix, layer1) 781 | 782 | Q, A, EQ, EA, aQ, aA, EaQ, EaA, rQ, rA, ErQ, ErA = self.soft_attention(batch_x, batch_y, layer1, doc_att, att_words, use_cuda) 783 | batch_size = len(batch_x) 784 | predict = [] 785 | 786 | n_idx = 0 787 | c_idx = 1 788 | h_idx = 2 789 | w_idx = 3 790 | 791 | for i in range(batch_size): 792 | 793 | ## TOKEN layer 794 | q_sub = Q[i] - EQ[i] 795 | 796 | tq = torch.mul(q_sub, q_sub) 797 | 798 | a_sub = A[i] - EA[i] 799 | ta = torch.mul(a_sub, a_sub) 800 | 801 | 802 | mul_q = torch.mul(Q[i], EQ[i]) 803 | mul_a = torch.mul(A[i], EA[i]) 804 | 805 | qr = tq 806 | 807 | #t = torch.cat((qr, mul_q),dim=1).unsqueeze(dim=0) 808 | t = torch.cat((qr, mul_q), dim=1).unsqueeze(dim=0) 809 | t = t.unsqueeze(c_idx) 810 | enc_outs = [] 811 | for encoder in self.encoders: 812 | f_map = encoder(t) 813 | enc_ = F.relu(f_map) 814 | k_h = enc_.size()[h_idx] 815 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 816 | enc_ = enc_.squeeze(w_idx) 817 | enc_ = enc_.squeeze(h_idx) 818 | enc_outs.append(enc_) 819 | encoding = self.dropout(torch.cat(enc_outs, 1)) 820 | #encoding = torch.cat(enc_outs, 1) 821 | q_re = F.relu(encoding) 822 | 823 | ar = ta 824 | 825 | t = torch.cat((ar, mul_a), dim=1).unsqueeze(dim=0) 826 | t = t.unsqueeze(c_idx) 827 | enc_outs = [] 828 | for encoder in self.encoders: 829 | f_map = encoder(t) 830 | enc_ = F.relu(f_map) 831 | k_h = enc_.size()[h_idx] 832 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 833 | enc_ = enc_.squeeze(w_idx) 834 | enc_ = enc_.squeeze(h_idx) 835 | enc_outs.append(enc_) 836 | encoding = self.dropout(torch.cat(enc_outs, 1)) 837 | #encoding = torch.cat(enc_outs, 1) 838 | a_re = F.relu(encoding) 839 | token_re = torch.cat((q_re, a_re), dim=1) 840 | 841 | 842 | ## attribute layer 843 | q_sub = aQ[i] - EaQ[i] 844 | tq = torch.mul(q_sub, q_sub) 845 | 846 | a_sub = aA[i] - EaA[i] 847 | ta = torch.mul(a_sub, a_sub) 848 | 849 | mul_q = torch.mul(aQ[i], EaQ[i]) 850 | mul_a = torch.mul(aA[i], EaA[i]) 851 | 852 | qr = tq 853 | 854 | # t = torch.cat((qr, mul_q),dim=1).unsqueeze(dim=0) 855 | t = torch.cat((qr, mul_q), dim=1).unsqueeze(dim=0) 856 | t = t.unsqueeze(c_idx) 857 | enc_outs = [] 858 | for encoder in self.encoders: 859 | f_map = encoder(t) 860 | enc_ = F.relu(f_map) 861 | k_h = enc_.size()[h_idx] 862 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 863 | enc_ = enc_.squeeze(w_idx) 864 | enc_ = enc_.squeeze(h_idx) 865 | enc_outs.append(enc_) 866 | encoding = self.dropout(torch.cat(enc_outs, 1)) 867 | # encoding = torch.cat(enc_outs, 1) 868 | q_re = F.relu(encoding) 869 | 870 | ar = ta 871 | 872 | t = torch.cat((ar, mul_a), dim=1).unsqueeze(dim=0) 873 | t = t.unsqueeze(c_idx) 874 | enc_outs = [] 875 | for encoder in self.encoders: 876 | f_map = encoder(t) 877 | enc_ = F.relu(f_map) 878 | k_h = enc_.size()[h_idx] 879 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 880 | enc_ = enc_.squeeze(w_idx) 881 | enc_ = enc_.squeeze(h_idx) 882 | enc_outs.append(enc_) 883 | encoding = self.dropout(torch.cat(enc_outs, 1)) 884 | # encoding = torch.cat(enc_outs, 1) 885 | a_re = F.relu(encoding) 886 | att_re = torch.cat((q_re, a_re), dim=1) 887 | 888 | ### record layer 889 | q_sub = rQ[i] - ErQ[i] 890 | tq = torch.mul(q_sub, q_sub) 891 | 892 | a_sub = rA[i] - ErA[i] 893 | ta = torch.mul(a_sub, a_sub) 894 | 895 | mul_q = torch.mul(rQ[i], ErQ[i]) 896 | mul_a = torch.mul(rA[i], ErA[i]) 897 | 898 | qr = tq 899 | 900 | # t = torch.cat((qr, mul_q),dim=1).unsqueeze(dim=0) 901 | t = torch.cat((qr, mul_q), dim=1).unsqueeze(dim=0) 902 | t = t.unsqueeze(c_idx) 903 | enc_outs = [] 904 | for encoder in self.encoders: 905 | f_map = encoder(t) 906 | enc_ = F.relu(f_map) 907 | k_h = enc_.size()[h_idx] 908 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 909 | enc_ = enc_.squeeze(w_idx) 910 | enc_ = enc_.squeeze(h_idx) 911 | enc_outs.append(enc_) 912 | encoding = self.dropout(torch.cat(enc_outs, 1)) 913 | # encoding = torch.cat(enc_outs, 1) 914 | q_re = F.relu(encoding) 915 | 916 | ar = ta 917 | 918 | t = torch.cat((ar, mul_a), dim=1).unsqueeze(dim=0) 919 | t = t.unsqueeze(c_idx) 920 | enc_outs = [] 921 | for encoder in self.encoders: 922 | f_map = encoder(t) 923 | enc_ = F.relu(f_map) 924 | k_h = enc_.size()[h_idx] 925 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 926 | enc_ = enc_.squeeze(w_idx) 927 | enc_ = enc_.squeeze(h_idx) 928 | enc_outs.append(enc_) 929 | encoding = self.dropout(torch.cat(enc_outs, 1)) 930 | # encoding = torch.cat(enc_outs, 1) 931 | a_re = F.relu(encoding) 932 | record_re = torch.cat((q_re, a_re), dim=1) 933 | 934 | re = record_re 935 | score = self.dense(re) 936 | F.relu(score) 937 | predict.append(self.softmax(score)) 938 | 939 | return torch.cat(predict, dim=0) 940 | 941 | class GCN_alignment_gate_att(nn.Module): 942 | def __init__(self, input_dim, hidden_dim, kernel_num, adj, featureless=True): 943 | super(GCN_alignment_gate_att, self).__init__() 944 | self.embedding = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 945 | nn.init.xavier_uniform_(self.embedding, gain=1) 946 | self.A = adj 947 | self.featureless = featureless 948 | self.dropout = 0.5 949 | self.dropout = nn.Dropout(self.dropout) 950 | self.kernel_num = kernel_num 951 | self.filter_sizes = [1, 2] 952 | #self.linear = nn.Linear(hidden_dim*4, 300) 953 | self.dense = nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 2) 954 | ''' 955 | self.dense = torch.nn.Sequential( 956 | torch.nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 128), 957 | torch.nn.ReLU(), 958 | torch.nn.Linear(128, 2) 959 | ) 960 | ''' 961 | self.gate = torch.nn.Sequential( 962 | torch.nn.Linear(hidden_dim, 1), 963 | torch.nn.Sigmoid() 964 | ) 965 | self.gate[0].bias.data.fill_(3) 966 | #self.att_len = 3 967 | #self.highway_layers = nn.ModuleList([HighwayMLP(hidden_dim, activation_function=F.relu) for _ in range(2)]) 968 | self.softmax = nn.LogSoftmax(dim=1) 969 | 970 | 971 | self.encoders = [] 972 | for i, filter_size in enumerate(self.filter_sizes): 973 | enc_attr_name = "encoder_%d" % i 974 | self.__setattr__(enc_attr_name, 975 | nn.Conv2d(in_channels=1, 976 | out_channels=self.kernel_num, 977 | kernel_size=(filter_size, hidden_dim*2))) 978 | self.encoders.append(self.__getattr__(enc_attr_name)) 979 | 980 | def soft_attention(self, batch_x, batch_y, encoding, doc_att, att_words, use_cuda): 981 | def _get_words_encoding(batch): 982 | batch_collection = [] 983 | for i in range(len(batch)): 984 | v = batch[i].item() 985 | atts = doc_att[str(v)] 986 | att_len = len(atts) 987 | words_idx = [] 988 | for j in range(att_len): 989 | att_idx = atts[j] 990 | if att_idx == -1: 991 | continue 992 | words_idx.extend(att_words[str(att_idx)]) 993 | idx = torch.LongTensor(words_idx) 994 | if use_cuda: 995 | idx = idx.cuda() 996 | batch_collection.append(torch.index_select(encoding, 0, idx)) 997 | return batch_collection 998 | 999 | Q = _get_words_encoding(batch_x) 1000 | A = _get_words_encoding(batch_y) 1001 | 1002 | def align_weights(x, y): 1003 | score = torch.mm(x, y.t()) 1004 | return F.softmax(score, dim= 1), F.softmax(score.t(), dim= 1) 1005 | 1006 | def weighted_encoding(y, w): 1007 | return torch.mm(w, y) 1008 | 1009 | batch_size = len(batch_x) 1010 | EQ = [] 1011 | EA = [] 1012 | for i in range(batch_size): 1013 | wq, wa = align_weights(Q[i], A[i]) 1014 | eq = weighted_encoding(A[i], wq) 1015 | ea = weighted_encoding(Q[i], wa) 1016 | EQ.append(eq) 1017 | EA.append(ea) 1018 | return Q, A, EQ, EA, 1019 | 1020 | def forward(self, X, batch_x, batch_y, doc_att, att_words, use_cuda): 1021 | if self.featureless != True: 1022 | X = self.dropout(X) 1023 | pre_matrix = torch.mm(self.A, X) 1024 | else: 1025 | pre_matrix = self.A 1026 | layer1 = torch.mm(pre_matrix,self.embedding) 1027 | #layer2 = torch.mm(pre_matrix, layer1) 1028 | 1029 | Q, A, EQ, EA = self.soft_attention(batch_x, batch_y, layer1, doc_att, att_words, use_cuda) 1030 | batch_size = len(batch_x) 1031 | predict = [] 1032 | 1033 | n_idx = 0 1034 | c_idx = 1 1035 | h_idx = 2 1036 | w_idx = 3 1037 | 1038 | for i in range(batch_size): 1039 | intra = torch.diagflat(self.gate(Q[i].detach())) 1040 | Q_s = torch.mm(1 - intra, Q[i]) + torch.mm(intra, EQ[i]) 1041 | q_sub = Q[i] - Q_s 1042 | 1043 | tq = torch.mul(q_sub, q_sub) 1044 | 1045 | intra = torch.diagflat(self.gate(A[i].detach())) 1046 | A_s = torch.mm(1 - intra, A[i]) + torch.mm(intra, EA[i]) 1047 | a_sub = A[i] - A_s 1048 | ta = torch.mul(a_sub, a_sub) 1049 | 1050 | 1051 | mul_q = torch.mul(Q[i], Q_s) 1052 | mul_a = torch.mul(A[i], A_s) 1053 | 1054 | qr = tq 1055 | 1056 | #t = torch.cat((qr, mul_q),dim=1).unsqueeze(dim=0) 1057 | t = torch.cat((qr, mul_q), dim=1).unsqueeze(dim=0) 1058 | t = t.unsqueeze(c_idx) 1059 | enc_outs = [] 1060 | for encoder in self.encoders: 1061 | f_map = encoder(t) 1062 | enc_ = F.relu(f_map) 1063 | k_h = enc_.size()[h_idx] 1064 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 1065 | enc_ = enc_.squeeze(w_idx) 1066 | enc_ = enc_.squeeze(h_idx) 1067 | enc_outs.append(enc_) 1068 | encoding = self.dropout(torch.cat(enc_outs, 1)) 1069 | #encoding = torch.cat(enc_outs, 1) 1070 | q_re = F.relu(encoding) 1071 | 1072 | ar = ta 1073 | 1074 | t = torch.cat((ar, mul_a), dim=1).unsqueeze(dim=0) 1075 | t = t.unsqueeze(c_idx) 1076 | enc_outs = [] 1077 | for encoder in self.encoders: 1078 | f_map = encoder(t) 1079 | enc_ = F.relu(f_map) 1080 | k_h = enc_.size()[h_idx] 1081 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 1082 | enc_ = enc_.squeeze(w_idx) 1083 | enc_ = enc_.squeeze(h_idx) 1084 | enc_outs.append(enc_) 1085 | encoding = self.dropout(torch.cat(enc_outs, 1)) 1086 | #encoding = torch.cat(enc_outs, 1) 1087 | a_re = F.relu(encoding) 1088 | 1089 | 1090 | re = torch.cat((q_re, a_re), dim=1) 1091 | score = self.dense(re) 1092 | F.relu(score) 1093 | predict.append(self.softmax(score)) 1094 | 1095 | return torch.cat(predict, dim=0) 1096 | 1097 | class GCN_alignment_gram(nn.Module): 1098 | def __init__(self, input_dim, hidden_dim, kernel_num, adj, featureless=True): 1099 | super(GCN_alignment_gram, self).__init__() 1100 | self.embedding = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 1101 | nn.init.xavier_uniform_(self.embedding, gain=1) 1102 | self.A = adj 1103 | self.featureless = featureless 1104 | self.dropout = 0.5 1105 | self.dropout = nn.Dropout(self.dropout) 1106 | self.kernel_num = kernel_num 1107 | self.filter_sizes = [1, 2] 1108 | #self.linear = nn.Linear(hidden_dim*4, 300) 1109 | self.dense = nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 2) 1110 | #self.att_len = 3 1111 | #self.highway_layers = nn.ModuleList([HighwayMLP(hidden_dim, activation_function=F.relu) for _ in range(2)]) 1112 | self.softmax = nn.LogSoftmax(dim=1) 1113 | 1114 | 1115 | self.encoders = [] 1116 | for i, filter_size in enumerate(self.filter_sizes): 1117 | enc_attr_name = "encoder_%d" % i 1118 | self.__setattr__(enc_attr_name, 1119 | nn.Conv2d(in_channels=1, 1120 | out_channels=self.kernel_num, 1121 | kernel_size=(filter_size, hidden_dim*2))) 1122 | self.encoders.append(self.__getattr__(enc_attr_name)) 1123 | 1124 | self.intra_att_encoders = [] 1125 | for i, filter_size in enumerate([1]): 1126 | enc_attr_name = "intra_att_encoder_%d" % i 1127 | self.__setattr__(enc_attr_name, 1128 | nn.Conv1d(in_channels=hidden_dim, 1129 | out_channels=hidden_dim, 1130 | kernel_size=filter_size, 1131 | padding=filter_size-1)) 1132 | self.intra_att_encoders.append(self.__getattr__(enc_attr_name)) 1133 | 1134 | def soft_attention(self, Q, A): 1135 | 1136 | def align_weights(x, y): 1137 | score = torch.mm(x, y.t()) 1138 | return F.softmax(score, dim= 1), F.softmax(score.t(), dim= 1) 1139 | 1140 | def weighted_encoding(y, w): 1141 | return torch.mm(w, y) 1142 | 1143 | batch_size = len(Q) 1144 | EQ = [] 1145 | EA = [] 1146 | for i in range(batch_size): 1147 | wq, wa = align_weights(Q[i], A[i]) 1148 | eq = weighted_encoding(A[i], wq) 1149 | ea = weighted_encoding(Q[i], wa) 1150 | EQ.append(eq) 1151 | EA.append(ea) 1152 | return EQ, EA 1153 | 1154 | def get_encoding(self, batch_x, batch_y, encoding, doc_att, att_words, use_cuda): 1155 | def _get_words_encoding(batch): 1156 | batch_collection = [] 1157 | for i in range(len(batch)): 1158 | v = batch[i].item() 1159 | atts = doc_att[str(v)] 1160 | att_len = len(atts) 1161 | words_idx = [] 1162 | for j in range(att_len): 1163 | att_idx = atts[j] 1164 | if att_idx == -1: 1165 | continue 1166 | words_idx.extend(att_words[str(att_idx)]) 1167 | idx = torch.LongTensor(words_idx) 1168 | if use_cuda: 1169 | idx = idx.cuda() 1170 | batch_collection.append(torch.index_select(encoding, 0, idx)) 1171 | return batch_collection 1172 | 1173 | Q = _get_words_encoding(batch_x) 1174 | A = _get_words_encoding(batch_y) 1175 | return Q, A 1176 | 1177 | def multi_gram(self, E): 1178 | #batch_size = len(E) 1179 | multi_gram_encoding = [] 1180 | for r in E: 1181 | sum_outs = [] 1182 | encoding = [] 1183 | filter_s = 0 1184 | r = r.t().unsqueeze(0) 1185 | for encoder in self.intra_att_encoders: 1186 | filter_s += 1 1187 | f_map = encoder(r) 1188 | enc_ = F.relu(f_map) 1189 | enc_ = F.avg_pool2d(enc_, stride=1, kernel_size=(1, filter_s)) 1190 | enc_ = enc_.squeeze(0).t() 1191 | encoding.append(enc_.unsqueeze(1)) 1192 | sum_outs.append(torch.sum(enc_, dim=1).unsqueeze(0)) 1193 | sum_outs = torch.cat(sum_outs, 0) 1194 | encoding = torch.cat(encoding, 1) 1195 | att = F.softmax(sum_outs, dim=0).t().unsqueeze(1) 1196 | final = torch.matmul(att, encoding).squeeze(1) 1197 | multi_gram_encoding.append(final) 1198 | return multi_gram_encoding 1199 | 1200 | def forward(self, X, batch_x, batch_y, doc_att, att_words, use_cuda): 1201 | if self.featureless != True: 1202 | X = self.dropout(X) 1203 | pre_matrix = torch.mm(self.A, X) 1204 | else: 1205 | pre_matrix = self.A 1206 | layer1 = torch.mm(pre_matrix,self.embedding) 1207 | 1208 | ## n-gram ### 1209 | OQ, OA = self.get_encoding(batch_x, batch_y, layer1, doc_att, att_words, use_cuda) 1210 | Q = self.multi_gram(OQ) 1211 | A = self.multi_gram(OA) 1212 | 1213 | EQ, EA = self.soft_attention(Q, A) 1214 | batch_size = len(batch_x) 1215 | predict = [] 1216 | 1217 | n_idx = 0 1218 | c_idx = 1 1219 | h_idx = 2 1220 | w_idx = 3 1221 | 1222 | for i in range(batch_size): 1223 | q_sub = Q[i] - EQ[i] 1224 | 1225 | tq = torch.mul(q_sub, q_sub) 1226 | tq = torch.mul(q_sub, q_sub) 1227 | a_sub = A[i] - EA[i] 1228 | ta = torch.mul(a_sub, a_sub) 1229 | 1230 | 1231 | mul_q = torch.mul(Q[i], EQ[i]) 1232 | mul_a = torch.mul(A[i], EA[i]) 1233 | 1234 | qr = tq 1235 | 1236 | t = torch.cat((qr, mul_q),dim=1).unsqueeze(dim=0) 1237 | t = t.unsqueeze(c_idx) 1238 | enc_outs = [] 1239 | for encoder in self.encoders: 1240 | f_map = encoder(t) 1241 | enc_ = F.relu(f_map) 1242 | k_h = enc_.size()[h_idx] 1243 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 1244 | enc_ = enc_.squeeze(w_idx) 1245 | enc_ = enc_.squeeze(h_idx) 1246 | enc_outs.append(enc_) 1247 | encoding = self.dropout(torch.cat(enc_outs, 1)) 1248 | #encoding = torch.cat(enc_outs, 1) 1249 | q_re = F.relu(encoding) 1250 | 1251 | ar = ta 1252 | 1253 | t = torch.cat((ar, mul_a), dim=1).unsqueeze(dim=0) 1254 | t = t.unsqueeze(c_idx) 1255 | enc_outs = [] 1256 | for encoder in self.encoders: 1257 | f_map = encoder(t) 1258 | enc_ = F.relu(f_map) 1259 | k_h = enc_.size()[h_idx] 1260 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 1261 | enc_ = enc_.squeeze(w_idx) 1262 | enc_ = enc_.squeeze(h_idx) 1263 | enc_outs.append(enc_) 1264 | encoding = self.dropout(torch.cat(enc_outs, 1)) 1265 | #encoding = torch.cat(enc_outs, 1) 1266 | a_re = F.relu(encoding) 1267 | 1268 | 1269 | re = torch.cat((q_re, a_re), dim=1) 1270 | score = self.dense(re) 1271 | F.relu(score) 1272 | predict.append(self.softmax(score)) 1273 | 1274 | return torch.cat(predict, dim=0) 1275 | 1276 | 1277 | 1278 | class GCN_alignment_joint_att(nn.Module): 1279 | def __init__(self, input_dim, hidden_dim, kernel_num, adj, featureless=True): 1280 | super(GCN_alignment_joint_att, self).__init__() 1281 | self.embedding = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 1282 | nn.init.xavier_uniform_(self.embedding, gain=1) 1283 | self.A = adj 1284 | self.featureless = featureless 1285 | self.dropout = 0.5 1286 | self.dropout = nn.Dropout(self.dropout) 1287 | self.kernel_num = kernel_num 1288 | self.filter_sizes = [1, 2] 1289 | self.r = 8 1290 | self.linear = nn.Linear(hidden_dim*16, 2) 1291 | self.dense = nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 2) 1292 | ''' 1293 | self.dense = torch.nn.Sequential( 1294 | torch.nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 128), 1295 | torch.nn.ReLU(), 1296 | torch.nn.Linear(128, 2) 1297 | ) 1298 | ''' 1299 | self.gate = torch.nn.Sequential( 1300 | torch.nn.Linear(hidden_dim, 1), 1301 | torch.nn.Sigmoid() 1302 | ) 1303 | self.att_gate = torch.nn.Sequential( 1304 | torch.nn.Linear(hidden_dim, 350, bias=False), 1305 | torch.nn.Tanh(), 1306 | torch.nn.Linear(350, 1, bias=False), 1307 | torch.nn.Sigmoid() 1308 | ) 1309 | self.att_gate_norm = torch.nn.Sequential( 1310 | torch.nn.Linear(hidden_dim, 350, bias=False), 1311 | torch.nn.Tanh(), 1312 | torch.nn.Linear(350, 8, bias=False), 1313 | torch.nn.Softmax(dim=0) 1314 | ) 1315 | self.gate[0].bias.data.fill_(3) 1316 | #self.att_len = 3 1317 | #self.highway_layers = nn.ModuleList([HighwayMLP(hidden_dim, activation_function=F.relu) for _ in range(2)]) 1318 | self.softmax = nn.LogSoftmax(dim=1) 1319 | 1320 | 1321 | self.encoders = [] 1322 | for i, filter_size in enumerate(self.filter_sizes): 1323 | enc_attr_name = "encoder_%d" % i 1324 | self.__setattr__(enc_attr_name, 1325 | nn.Conv2d(in_channels=1, 1326 | out_channels=self.kernel_num, 1327 | kernel_size=(filter_size, hidden_dim))) 1328 | self.encoders.append(self.__getattr__(enc_attr_name)) 1329 | 1330 | def soft_attention(self, batch_x, batch_y, encoding, doc_att, att_words, use_cuda): 1331 | def _get_words_encoding(batch): 1332 | batch_collection = [] 1333 | for i in range(len(batch)): 1334 | v = batch[i].item() 1335 | atts = doc_att[str(v)] 1336 | att_len = len(atts) 1337 | words_idx = [] 1338 | ### record ### 1339 | words_idx.append(v) 1340 | for j in range(att_len): 1341 | att_idx = atts[j] 1342 | if att_idx == -1: 1343 | continue 1344 | words_idx.append(att_idx) 1345 | words_idx.extend(att_words[str(att_idx)]) 1346 | idx = torch.LongTensor(words_idx) 1347 | if use_cuda: 1348 | idx = idx.cuda() 1349 | batch_collection.append(torch.index_select(encoding, 0, idx)) 1350 | return batch_collection 1351 | 1352 | Q = _get_words_encoding(batch_x) 1353 | A = _get_words_encoding(batch_y) 1354 | 1355 | def align_weights(x, y): 1356 | score = torch.mm(x, y.t()) 1357 | return F.softmax(score, dim= 1), F.softmax(score.t(), dim= 1) 1358 | 1359 | def weighted_encoding(y, w): 1360 | return torch.mm(w, y) 1361 | 1362 | batch_size = len(batch_x) 1363 | EQ = [] 1364 | EA = [] 1365 | for i in range(batch_size): 1366 | wq, wa = align_weights(Q[i], A[i]) 1367 | eq = weighted_encoding(A[i], wq) 1368 | ea = weighted_encoding(Q[i], wa) 1369 | EQ.append(eq) 1370 | EA.append(ea) 1371 | return Q, A, EQ, EA, 1372 | 1373 | def forward(self, X, batch_x, batch_y, doc_att, att_words, use_cuda): 1374 | if self.featureless != True: 1375 | X = self.dropout(X) 1376 | pre_matrix = torch.mm(self.A, X) 1377 | else: 1378 | pre_matrix = self.A 1379 | layer1 = torch.mm(pre_matrix,self.embedding) 1380 | #layer2 = torch.mm(pre_matrix, layer1) 1381 | 1382 | Q, A, EQ, EA = self.soft_attention(batch_x, batch_y, layer1, doc_att, att_words, use_cuda) 1383 | batch_size = len(batch_x) 1384 | predict = [] 1385 | 1386 | n_idx = 0 1387 | c_idx = 1 1388 | h_idx = 2 1389 | w_idx = 3 1390 | 1391 | for i in range(batch_size): 1392 | 1393 | 1394 | cnn_flag = False 1395 | 1396 | if cnn_flag == True: 1397 | # intra = torch.diagflat(self.gate(torch.mul(Q[i], Q[i]))) 1398 | intra = torch.diagflat( 1399 | self.att_gate(Q[i].detach()) 1400 | ) 1401 | # print(intra) 1402 | Q_s = torch.mm(1 - intra, Q[i]) + torch.mm(intra, EQ[i]) 1403 | q_sub = Q[i] - EQ[i] 1404 | 1405 | tq = torch.mm(intra, torch.mul(q_sub, q_sub)) 1406 | 1407 | # intra = torch.diagflat(self.gate(A[i].detach())) 1408 | intra = torch.diagflat( 1409 | self.att_gate(A[i].detach()) 1410 | ) 1411 | A_s = torch.mm(1 - intra, A[i]) + torch.mm(intra, EA[i]) 1412 | a_sub = A[i] - EA[i] 1413 | ta = torch.mm(intra, torch.mul(a_sub, a_sub)) 1414 | 1415 | mul_q = torch.mul(Q[i], EQ[i]) 1416 | mul_a = torch.mul(A[i], EA[i]) 1417 | 1418 | qr = tq 1419 | # t = torch.cat((qr, mul_q),dim=1).unsqueeze(dim=0) 1420 | t = qr.unsqueeze(dim=0) 1421 | t = t.unsqueeze(c_idx) 1422 | enc_outs = [] 1423 | for encoder in self.encoders: 1424 | f_map = encoder(t) 1425 | enc_ = F.relu(f_map) 1426 | k_h = enc_.size()[h_idx] 1427 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 1428 | enc_ = enc_.squeeze(w_idx) 1429 | enc_ = enc_.squeeze(h_idx) 1430 | enc_outs.append(enc_) 1431 | encoding = self.dropout(torch.cat(enc_outs, 1)) 1432 | # encoding = torch.cat(enc_outs, 1) 1433 | q_re = F.relu(encoding) 1434 | 1435 | ar = ta 1436 | 1437 | # t = torch.cat((ar, mul_a), dim=1).unsqueeze(dim=0) 1438 | t = ar.unsqueeze(dim=0) 1439 | t = t.unsqueeze(c_idx) 1440 | enc_outs = [] 1441 | for encoder in self.encoders: 1442 | f_map = encoder(t) 1443 | enc_ = F.relu(f_map) 1444 | k_h = enc_.size()[h_idx] 1445 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 1446 | enc_ = enc_.squeeze(w_idx) 1447 | enc_ = enc_.squeeze(h_idx) 1448 | enc_outs.append(enc_) 1449 | encoding = self.dropout(torch.cat(enc_outs, 1)) 1450 | # encoding = torch.cat(enc_outs, 1) 1451 | a_re = F.relu(encoding) 1452 | re = torch.cat((q_re, a_re), dim=1) 1453 | score = self.dense(re) 1454 | 1455 | else: 1456 | intra = self.att_gate_norm(Q[i].detach()) 1457 | #print(intra) 1458 | q_sub = Q[i] - EQ[i] 1459 | tq = torch.mm(intra.t(), torch.mul(q_sub, q_sub)) 1460 | intra = self.att_gate_norm(A[i].detach()) 1461 | a_sub = A[i] - EA[i] 1462 | ta = torch.mm(intra.t(), torch.mul(a_sub, a_sub)) 1463 | 1464 | q_re = tq 1465 | a_re = ta 1466 | re = torch.cat((q_re, a_re), dim=1) 1467 | score = self.linear(re.view(1,-1)) 1468 | 1469 | F.relu(score) 1470 | predict.append(self.softmax(score)) 1471 | 1472 | return torch.cat(predict, dim=0) 1473 | 1474 | 1475 | class alignment_cnn(nn.Module): 1476 | def __init__(self, input_dim, hidden_dim, kernel_num, adj, featureless=True): 1477 | super(alignment_cnn, self).__init__() 1478 | self.embedding = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 1479 | nn.init.xavier_uniform_(self.embedding, gain=1) 1480 | self.dropout = 0.5 1481 | self.dropout = nn.Dropout(self.dropout) 1482 | self.kernel_num = kernel_num 1483 | self.filter_sizes = [1, 2] 1484 | #self.linear = nn.Linear(hidden_dim*4, 300) 1485 | self.dense = nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 2) 1486 | 1487 | self.softmax = nn.LogSoftmax(dim=1) 1488 | 1489 | self.encoders = [] 1490 | for i, filter_size in enumerate(self.filter_sizes): 1491 | enc_attr_name = "encoder_%d" % i 1492 | self.__setattr__(enc_attr_name, 1493 | nn.Conv2d(in_channels=1, 1494 | out_channels=self.kernel_num, 1495 | kernel_size=(filter_size, hidden_dim*2))) 1496 | self.encoders.append(self.__getattr__(enc_attr_name)) 1497 | 1498 | def soft_attention(self, batch_x, batch_y, encoding, doc_att, att_words, use_cuda): 1499 | def _get_words_encoding(batch): 1500 | batch_collection = [] 1501 | for i in range(len(batch)): 1502 | v = batch[i].item() 1503 | atts = doc_att[str(v)] 1504 | att_len = len(atts) 1505 | words_idx = [] 1506 | for j in range(att_len): 1507 | att_idx = atts[j] 1508 | if att_idx == -1: 1509 | continue 1510 | words_idx.extend(att_words[str(att_idx)]) 1511 | idx = torch.LongTensor(words_idx) 1512 | if use_cuda: 1513 | idx = idx.cuda() 1514 | batch_collection.append(torch.index_select(encoding, 0, idx)) 1515 | return batch_collection 1516 | 1517 | Q = _get_words_encoding(batch_x) 1518 | A = _get_words_encoding(batch_y) 1519 | 1520 | def align_weights(x, y): 1521 | score = torch.mm(x, y.t()) 1522 | return F.softmax(score, dim= 1), F.softmax(score.t(), dim= 1) 1523 | 1524 | def weighted_encoding(y, w): 1525 | return torch.mm(w, y) 1526 | 1527 | batch_size = len(batch_x) 1528 | EQ = [] 1529 | EA = [] 1530 | for i in range(batch_size): 1531 | wq, wa = align_weights(Q[i], A[i]) 1532 | eq = weighted_encoding(A[i], wq) 1533 | ea = weighted_encoding(Q[i], wa) 1534 | EQ.append(eq) 1535 | EA.append(ea) 1536 | return Q, A, EQ, EA, 1537 | 1538 | def forward(self, X, batch_x, batch_y, doc_att, att_words, use_cuda): 1539 | 1540 | Q, A, EQ, EA = self.soft_attention(batch_x, batch_y, self.embedding, doc_att, att_words, use_cuda) 1541 | batch_size = len(batch_x) 1542 | predict = [] 1543 | 1544 | n_idx = 0 1545 | c_idx = 1 1546 | h_idx = 2 1547 | w_idx = 3 1548 | 1549 | for i in range(batch_size): 1550 | q_sub = Q[i] - EQ[i] 1551 | 1552 | tq = torch.mul(q_sub, q_sub) 1553 | 1554 | a_sub = A[i] - EA[i] 1555 | ta = torch.mul(a_sub, a_sub) 1556 | 1557 | 1558 | mul_q = torch.mul(Q[i], EQ[i]) 1559 | mul_a = torch.mul(A[i], EA[i]) 1560 | 1561 | qr = tq 1562 | 1563 | #t = torch.cat((qr, mul_q),dim=1).unsqueeze(dim=0) 1564 | t = torch.cat((qr, mul_q), dim=1).unsqueeze(dim=0) 1565 | t = t.unsqueeze(c_idx) 1566 | enc_outs = [] 1567 | for encoder in self.encoders: 1568 | f_map = encoder(t) 1569 | enc_ = F.relu(f_map) 1570 | k_h = enc_.size()[h_idx] 1571 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 1572 | enc_ = enc_.squeeze(w_idx) 1573 | enc_ = enc_.squeeze(h_idx) 1574 | enc_outs.append(enc_) 1575 | encoding = self.dropout(torch.cat(enc_outs, 1)) 1576 | #encoding = torch.cat(enc_outs, 1) 1577 | q_re = F.relu(encoding) 1578 | 1579 | ar = ta 1580 | 1581 | t = torch.cat((ar, mul_a), dim=1).unsqueeze(dim=0) 1582 | t = t.unsqueeze(c_idx) 1583 | enc_outs = [] 1584 | for encoder in self.encoders: 1585 | f_map = encoder(t) 1586 | enc_ = F.relu(f_map) 1587 | k_h = enc_.size()[h_idx] 1588 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 1589 | enc_ = enc_.squeeze(w_idx) 1590 | enc_ = enc_.squeeze(h_idx) 1591 | enc_outs.append(enc_) 1592 | encoding = self.dropout(torch.cat(enc_outs, 1)) 1593 | #encoding = torch.cat(enc_outs, 1) 1594 | a_re = F.relu(encoding) 1595 | 1596 | 1597 | re = torch.cat((q_re, a_re), dim=1) 1598 | score = self.dense(re) 1599 | F.relu(score) 1600 | predict.append(self.softmax(score)) 1601 | 1602 | return torch.cat(predict, dim=0) 1603 | 1604 | 1605 | class GCN_alignment_cnn_nce(nn.Module): 1606 | def __init__(self, input_dim, hidden_dim, kernel_num, adj, pre_trained_embedding, featureless=True): 1607 | super(GCN_alignment_cnn_nce, self).__init__() 1608 | self.embedding = pre_trained_embedding 1609 | self.embedding.requires_grad = True 1610 | self.embedding = Parameter(self.embedding) 1611 | #self.embedding = Parameter(torch.zeros([input_dim, hidden_dim], dtype=torch.float, requires_grad=True)) 1612 | #nn.init.xavier_uniform_(self.embedding, gain=1) 1613 | self.A = adj 1614 | self.featureless = featureless 1615 | self.dropout = 0.5 1616 | self.dropout = nn.Dropout(self.dropout) 1617 | self.kernel_num = kernel_num 1618 | self.filter_sizes = [1, 2] 1619 | #self.linear = nn.Linear(hidden_dim*4, 300) 1620 | self.dense = nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 2) 1621 | ''' 1622 | self.dense = torch.nn.Sequential( 1623 | torch.nn.Linear(self.kernel_num*len(self.filter_sizes)*2, 128), 1624 | torch.nn.ReLU(), 1625 | torch.nn.Linear(128, 2) 1626 | ) 1627 | ''' 1628 | self.softmax = nn.LogSoftmax(dim=1) 1629 | 1630 | self.encoders = [] 1631 | for i, filter_size in enumerate(self.filter_sizes): 1632 | enc_attr_name = "encoder_%d" % i 1633 | self.__setattr__(enc_attr_name, 1634 | nn.Conv2d(in_channels=1, 1635 | out_channels=self.kernel_num, 1636 | kernel_size=(filter_size, hidden_dim))) 1637 | self.encoders.append(self.__getattr__(enc_attr_name)) 1638 | 1639 | def soft_attention(self, batch_x, batch_y, encoding, doc_att, att_words, use_cuda): 1640 | def _get_words_encoding(batch): 1641 | batch_collection = [] 1642 | for i in range(len(batch)): 1643 | v = batch[i].item() 1644 | atts = doc_att[str(v)] 1645 | att_len = len(atts) 1646 | words_idx = [] 1647 | for j in range(att_len): 1648 | att_idx = atts[j] 1649 | if att_idx == -1: 1650 | continue 1651 | words_idx.extend(att_words[str(att_idx)]) 1652 | idx = torch.LongTensor(words_idx) 1653 | if use_cuda: 1654 | idx = idx.cuda() 1655 | batch_collection.append(torch.index_select(encoding, 0, idx)) 1656 | return batch_collection 1657 | 1658 | Q = _get_words_encoding(batch_x) 1659 | A = _get_words_encoding(batch_y) 1660 | 1661 | def align_weights(x, y): 1662 | score = torch.mm(x, y.t()) 1663 | return F.softmax(score, dim= 1), F.softmax(score.t(), dim= 1) 1664 | 1665 | def weighted_encoding(y, w): 1666 | return torch.mm(w, y) 1667 | 1668 | batch_size = len(batch_x) 1669 | EQ = [] 1670 | EA = [] 1671 | for i in range(batch_size): 1672 | wq, wa = align_weights(Q[i], A[i]) 1673 | eq = weighted_encoding(A[i], wq) 1674 | ea = weighted_encoding(Q[i], wa) 1675 | EQ.append(eq) 1676 | EA.append(ea) 1677 | return Q, A, EQ, EA, 1678 | 1679 | def forward(self, X, batch_x, batch_y, doc_att, att_words, use_cuda): 1680 | if self.featureless != True: 1681 | X = self.dropout(X) 1682 | pre_matrix = torch.mm(self.A, X) 1683 | else: 1684 | pre_matrix = self.A 1685 | layer1 = torch.mm(pre_matrix,self.embedding) 1686 | #layer2 = torch.mm(pre_matrix, layer1) 1687 | 1688 | Q, A, EQ, EA = self.soft_attention(batch_x, batch_y, layer1, doc_att, att_words, use_cuda) 1689 | batch_size = len(batch_x) 1690 | predict = [] 1691 | 1692 | n_idx = 0 1693 | c_idx = 1 1694 | h_idx = 2 1695 | w_idx = 3 1696 | 1697 | for i in range(batch_size): 1698 | q_sub = Q[i] - EQ[i] 1699 | 1700 | tq = torch.mul(q_sub, q_sub) 1701 | 1702 | a_sub = A[i] - EA[i] 1703 | ta = torch.mul(a_sub, a_sub) 1704 | 1705 | 1706 | mul_q = torch.mul(Q[i], EQ[i]) 1707 | mul_a = torch.mul(A[i], EA[i]) 1708 | 1709 | qr = tq 1710 | 1711 | #t = torch.cat((qr, mul_q),dim=1).unsqueeze(dim=0) 1712 | t= Q[i].unsqueeze(dim=0) 1713 | t = t.unsqueeze(c_idx) 1714 | enc_outs = [] 1715 | for encoder in self.encoders: 1716 | f_map = encoder(t) 1717 | enc_ = F.relu(f_map) 1718 | k_h = enc_.size()[h_idx] 1719 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 1720 | enc_ = enc_.squeeze(w_idx) 1721 | enc_ = enc_.squeeze(h_idx) 1722 | enc_outs.append(enc_) 1723 | encoding = self.dropout(torch.cat(enc_outs, 1)) 1724 | #encoding = torch.cat(enc_outs, 1) 1725 | q_re = F.relu(encoding) 1726 | 1727 | ar = ta 1728 | 1729 | #t = torch.cat((ar, mul_a), dim=1).unsqueeze(dim=0) 1730 | t = A[i].unsqueeze(dim=0) 1731 | t = t.unsqueeze(c_idx) 1732 | enc_outs = [] 1733 | for encoder in self.encoders: 1734 | f_map = encoder(t) 1735 | enc_ = F.relu(f_map) 1736 | k_h = enc_.size()[h_idx] 1737 | enc_ = F.max_pool2d(enc_, kernel_size=(k_h, 1)) 1738 | enc_ = enc_.squeeze(w_idx) 1739 | enc_ = enc_.squeeze(h_idx) 1740 | enc_outs.append(enc_) 1741 | encoding = self.dropout(torch.cat(enc_outs, 1)) 1742 | #encoding = torch.cat(enc_outs, 1) 1743 | a_re = F.relu(encoding) 1744 | 1745 | 1746 | re = torch.cat((q_re, a_re), dim=1) 1747 | score = self.dense(re) 1748 | F.relu(score) 1749 | predict.append(self.softmax(score)) 1750 | 1751 | #print(self.embedding) 1752 | 1753 | return torch.cat(predict, dim=0) -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from model import * 3 | import argparse 4 | from utils import * 5 | import torch.nn.functional as F 6 | 7 | def get_args(): 8 | parser = argparse.ArgumentParser(description='CNN text classification') 9 | parser.add_argument('--lr', type=float, default=0.001, 10 | help='initial learning rate [default: 0.001]') 11 | parser.add_argument('--epochs', type=int, default=100, 12 | help='number of epochs for train') 13 | parser.add_argument('--dropout', type=float, default=0.5, 14 | help='the probability for dropout (0 = no dropout) [default: 0.5]') 15 | parser.add_argument('--hidden-dim', type=int, default=200, 16 | help='number of embedding dimension [default: 100]') 17 | parser.add_argument('--output-dim', type=int, default=50, 18 | help='number of embedding dimension [default: 50]') 19 | parser.add_argument('--negative-num', type=int, default=10, 20 | help='number of negative example') 21 | parser.add_argument('--kernel-num', type=int, default=384, 22 | help='number of negative example') 23 | parser.add_argument('--loss', type=str, default='Cosine', 24 | help='loss function') 25 | parser.add_argument('--model', type=str, default='gcn-align-cnn', 26 | help='model') 27 | parser.add_argument('--layers', type=int, default=2, 28 | help='number of layers') 29 | parser.add_argument('--l2', type=float, default=0, 30 | help='weight decay') 31 | parser.add_argument('--infer_thres', type=float, default=0.5, 32 | help='inference threshold [default: 0.5]') 33 | parser.add_argument('--batch', type=int, default=32, 34 | help='batch size [default: 10]') 35 | parser.add_argument('--cuda-able', action='store_true', 36 | help='enables cuda') 37 | args = parser.parse_args() 38 | return args 39 | 40 | 41 | def validation(info, encoding, dev, thres, use_cuda = False): 42 | values = [0, 0, 0, 0] # tp tn fp fn 43 | thres = torch.tensor(thres) 44 | doc_att = info['data']['odc_att'] 45 | temp = dev 46 | for inst in temp: 47 | (x, y, l) = inst 48 | s = tuple_representation(doc_att, encoding,torch.LongTensor([x]), use_cuda) 49 | t = tuple_representation(doc_att, encoding,torch.LongTensor([y]), use_cuda) 50 | score = torch.nn.functional.cosine_similarity(s, t, dim=1) 51 | if score.data[0].cpu() > thres: 52 | if l == 1: 53 | values[0] += 1 54 | else: 55 | values[2] += 1 56 | elif l == 1: 57 | values[3] += 1 58 | else: 59 | values[1] += 1 60 | p, r, f1 = compute_f1(values[0], values[1], values[2], values[3]) 61 | return p, r, f1 62 | 63 | def validation4LR(info, lr, dev, use_cuda = False): 64 | values = [0, 0, 0, 0] # tp tn fp fn 65 | doc_att = info['data']['odc_att'] 66 | S, T, L = toTensor(dev) 67 | L = label_batch_for_lr(L) 68 | predict = lr.forward(doc_att, S, T, use_cuda) 69 | for i in range(len(predict)): 70 | if predict[i] > 0.5: 71 | if L[i] == 1: 72 | values[0] += 1 73 | else: 74 | values[2] += 1 75 | elif L[i] == 1: 76 | values[3] += 1 77 | else: 78 | values[1] += 1 79 | p, r, f1 = compute_f1(values[0], values[1], values[2], values[3]) 80 | return p, r, f1 81 | 82 | def validation4lstm(lstm, dev, use_cuda = False): 83 | values = [0, 0, 0, 0] # tp tn fp fn 84 | for (x, y, l) in dev: 85 | x = torch.LongTensor(x).unsqueeze(dim=0) 86 | y = torch.LongTensor(y).unsqueeze(dim=0) 87 | #ex = lstm.forward(x) 88 | #ey = lstm.forward(y) 89 | if use_cuda == True: 90 | x = x.cuda() 91 | y = y.cuda() 92 | #score = F.cosine_similarity(ex, ey, dim =1) 93 | score = lstm.forward(x,y) 94 | _, indices = torch.max(score, dim=1) 95 | if indices == 1: 96 | if l == '1': 97 | values[0] += 1 98 | else: 99 | values[2] += 1 100 | elif l == '1': 101 | values[3] += 1 102 | else: 103 | values[1] += 1 104 | p, r, f1 = compute_f1(values[0], values[1], values[2], values[3]) 105 | return p, r, f1 106 | 107 | def validation4test(info, encoding, dev, thres, use_cuda = False): 108 | values = [0, 0, 0, 0] # tp tn fp fn 109 | f = open('output.txt','w',encoding='utf-8') 110 | docs = info['data']['doc_content'] 111 | doc_att = info['data']['odc_att'] 112 | def list2str(lista): 113 | strr = '' 114 | for att in lista: 115 | for w in att: 116 | strr += w+' ' 117 | return strr 118 | 119 | thres = torch.tensor(thres) 120 | temp = dev 121 | for inst in temp: 122 | (x, y, l) = inst 123 | s = tuple_representation(doc_att, encoding, torch.LongTensor([x]), use_cuda) 124 | t = tuple_representation(doc_att, encoding, torch.LongTensor([y]), use_cuda) 125 | score = torch.nn.functional.cosine_similarity(s, t, dim=1) 126 | if score.data[0].cpu() > thres: 127 | if l == 1: 128 | values[0] += 1 129 | f.write(list2str(docs[str(x)])+'\n'+list2str(docs[str(y)])+'\n') 130 | f.write('1\t'+'1\t'+str(score.item())+'\n\n') 131 | else: 132 | values[2] += 1 133 | f.write(list2str(docs[str(x)]) + '\n' + list2str(docs[str(y)]) + '\n') 134 | f.write('0\t' + '1\t'+str(score.item())+'\n\n') 135 | elif l == 1: 136 | values[3] += 1 137 | f.write(list2str(docs[str(x)]) + '\n' + list2str(docs[str(y)]) + '\n') 138 | f.write('1\t' + '0\t'+str(score.item())+'\n\n') 139 | else: 140 | values[1] += 1 141 | p, r, f1 = compute_f1(values[0], values[1], values[2], values[3]) 142 | return p, r, f1 143 | 144 | def align_validation4test(info, model, dev, use_cuda = False): 145 | 146 | f = open('output.txt','w',encoding='utf-8') 147 | docs = info['data']['doc_content'] 148 | doc_att = info['data']['odc_att'] 149 | att_words = info['data']['att_words'] 150 | def list2str(lista): 151 | strr = '' 152 | for att in lista: 153 | for w in att: 154 | strr += w+' ' 155 | return strr 156 | 157 | values = [0, 0, 0, 0] # tp tn fp fn 158 | X, Y, L = train_batch4gcn_hw([], dev, True) 159 | predict = model.forward(torch.eye(10), X, Y, doc_att, att_words, False) 160 | _, indices = torch.max(predict, dim=1) 161 | for i in range(len(indices)): 162 | if indices[i] == 1: 163 | if L[i] == 1: 164 | values[0] += 1 165 | f.write(list2str(docs[str(X[i].item())]) + '\n' + list2str(docs[str(Y[i].item())]) + '\n') 166 | f.write('1\t' + '1\t' + '\n\n') 167 | else: 168 | values[2] += 1 169 | f.write(list2str(docs[str(X[i].item())]) + '\n' + list2str(docs[str(Y[i].item())]) + '\n') 170 | f.write('0\t' + '1\t' + '\n\n') 171 | elif L[i] == 1: 172 | values[3] += 1 173 | #values[3] += 1 174 | f.write(list2str(docs[str(X[i].item())]) + '\n' + list2str(docs[str(Y[i].item())]) + '\n') 175 | f.write('1\t' + '0\t' + '\n\n') 176 | else: 177 | values[1] += 1 178 | p, r, f1 = compute_f1(values[0], values[1], values[2], values[3]) 179 | return p, r, f1 180 | 181 | 182 | def tuple_representation(doc_att, encoding, batch, use_cuda): 183 | flag = 'att' 184 | if flag == 'att': 185 | collection = [] 186 | ## compositional ## 187 | for ins in batch: 188 | v = ins.item() 189 | atts = doc_att[str(v)] 190 | att_idx = [atts[0]] ## test on the first attribute 191 | idx = torch.LongTensor(att_idx) 192 | if use_cuda == True: 193 | idx = idx.cuda() 194 | collection.append(torch.index_select(encoding, 0, idx)) 195 | return torch.cat(collection, dim=0) 196 | else: 197 | if use_cuda == True: 198 | batch = batch.cuda() 199 | return torch.index_select(encoding, 0, batch) 200 | 201 | 202 | def validation4gcn_hw(gcn_hw, dev, doc_att, use_cuda): 203 | values = [0, 0, 0, 0] # tp tn fp fn 204 | X, Y, L = train_batch4gcn_hw(args, dev, True) 205 | predict = gcn_hw.forward(torch.eye(10), X, Y, doc_att, use_cuda) 206 | _, indices = torch.max(predict, dim=1) 207 | for i in range(len(indices)): 208 | if indices[i] == 1: 209 | if L[i]== 1: 210 | values[0] += 1 211 | else: 212 | values[2] += 1 213 | elif L[i] == 1: 214 | values[3] += 1 215 | else: 216 | values[1] += 1 217 | p, r, f1 = compute_f1(values[0], values[1], values[2], values[3]) 218 | return p, r, f1 219 | 220 | def validation4gcn_align(gcn_align, dev, doc_att, att_words, use_cuda): 221 | values = [0, 0, 0, 0] # tp tn fp fn 222 | X, Y, L = train_batch4gcn_hw(args, dev, True) 223 | predict = gcn_align.forward(torch.eye(10), X, Y, doc_att, att_words, use_cuda) 224 | _, indices = torch.max(predict, dim=1) 225 | for i in range(len(indices)): 226 | if indices[i] == 1: 227 | if L[i]== 1: 228 | values[0] += 1 229 | else: 230 | values[2] += 1 231 | elif L[i] == 1: 232 | values[3] += 1 233 | else: 234 | values[1] += 1 235 | p, r, f1 = compute_f1(values[0], values[1], values[2], values[3]) 236 | return p, r, f1 237 | 238 | 239 | 240 | def gcn_align_cnn_model(args): 241 | info = torch.load('data/' + 'Amazon-Google' + '.info') 242 | doc_att = info['data']['odc_att'] 243 | att_words = info['data']['att_words'] 244 | embedding = info['data']['embedding'] 245 | #embedding = [] 246 | 247 | use_cuda = torch.cuda.is_available() and args.cuda_able 248 | 249 | print('-' * 90) 250 | A = load_graph("Amazon-Google") 251 | 252 | input_dim = len(A) 253 | 254 | if use_cuda == True: 255 | A = A.cuda() 256 | 257 | train, dev, test, smap, tmap = load_tdt(info) 258 | 259 | best = 0.0 260 | 261 | criterion = nn.NLLLoss() 262 | gcn_align = GCN_alignment_cnn(input_dim, args.hidden_dim, args.kernel_num, A, embedding) 263 | if use_cuda == True: 264 | gcn_align = gcn_align.cuda() 265 | optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, gcn_align.parameters()), lr=args.lr, 266 | weight_decay=args.l2) 267 | 268 | for epoch in range(args.epochs): 269 | loader = train_batch4gcn_hw(args, train) 270 | total_loss = torch.zeros(1) 271 | for step, (batch_x, batch_y, batch_l) in enumerate(loader): 272 | if use_cuda == True: 273 | batch_x = batch_x.cuda() 274 | batch_y = batch_y.cuda() 275 | batch_l = batch_l.cuda() 276 | predict = gcn_align.forward(torch.eye(input_dim), batch_x, batch_y, doc_att, att_words, use_cuda) 277 | loss = criterion(predict, batch_l) 278 | total_loss += loss 279 | #print(loss) 280 | optimizer.zero_grad() 281 | loss.backward() 282 | optimizer.step() 283 | p, r, f1 = validation4gcn_align(gcn_align, test, doc_att, att_words, use_cuda) 284 | print(p,r,f1) 285 | if best < f1: 286 | best = f1 287 | #best_model = "model/model_" + str(epoch) + ".pkl" 288 | torch.save(gcn_align, "model/model_best.pkl") 289 | print("best", best) 290 | 291 | 292 | if __name__=="__main__": 293 | try: 294 | args = get_args() 295 | 296 | if args.model == 'gcn-align-cnn': 297 | gcn_align_cnn_model(args) 298 | 299 | 300 | except KeyboardInterrupt: 301 | print("error") 302 | -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import scipy.sparse as sp 4 | import torch.utils.data as Data 5 | import pickle as pkl 6 | import random 7 | 8 | 9 | def sparse_to_tuple(sparse_mx): 10 | """Convert sparse matrix to tuple representation.""" 11 | def to_tuple(mx): 12 | if not sp.isspmatrix_coo(mx): 13 | mx = mx.tocoo() 14 | coords = np.vstack((mx.row, mx.col)).transpose() 15 | values = mx.data 16 | shape = mx.shape 17 | return coords, values, shape 18 | 19 | if isinstance(sparse_mx, list): 20 | for i in range(len(sparse_mx)): 21 | sparse_mx[i] = to_tuple(sparse_mx[i]) 22 | else: 23 | sparse_mx = to_tuple(sparse_mx) 24 | 25 | return sparse_mx 26 | 27 | def normalize_adj(adj): 28 | """Symmetrically normalize adjacency matrix.""" 29 | adj = adj + sp.eye(adj.shape[0]) 30 | adj = sp.coo_matrix(adj) 31 | rowsum = np.array(adj.sum(1)) 32 | d_inv_sqrt = np.power(rowsum, -0.5).flatten() 33 | d_inv_sqrt[np.isinf(d_inv_sqrt)] = 0. 34 | d_mat_inv_sqrt = sp.diags(d_inv_sqrt) 35 | ''' 36 | adj += torch.eye(len(adj)) 37 | rowsum = torch.sum(adj, dim=1) 38 | rowsum = torch.rsqrt(rowsum) 39 | D = torch.diag(rowsum) 40 | temp = torch.mm(D, adj) 41 | return torch.mm(temp, D) 42 | ''' 43 | return adj.dot(d_mat_inv_sqrt).transpose().dot(d_mat_inv_sqrt).tocoo() 44 | 45 | def preprocess_adj(adj): 46 | """Preprocessing of adjacency matrix for simple GCN model and conversion to tuple representation.""" 47 | adj_normalized = normalize_adj(adj + sp.eye(adj.shape[0])) 48 | #return sparse_to_tuple(adj_normalized) 49 | return adj_normalized 50 | 51 | 52 | def gen_rand_neg(k, isSource, r_id, size, smap, tmap): 53 | idx = [] 54 | for i in range(k): 55 | if isSource == True: 56 | rn = random.randint(0, size) 57 | while rn in smap[r_id]: 58 | rn = random.randint(0, size) 59 | idx.append(rn) 60 | else: 61 | rn = random.randint(0, size) 62 | while rn in tmap[r_id]: 63 | rn = random.randint(0, size) 64 | idx.append(rn) 65 | return idx 66 | 67 | def gen_rand_all_neg(k, smap, tmap, s_size, t_size): 68 | neg = [] 69 | s_idx = [] 70 | t_idx = [] 71 | for i in range(k): 72 | rn = random.randint(0, s_size) 73 | s_idx.append(rn) 74 | for i in range(k): 75 | rn = random.randint(0, t_size) 76 | if s_idx[i] in smap: 77 | while rn in smap[s_idx[i]]: 78 | rn = random.randint(0, t_size) 79 | t_idx.append(rn) 80 | for i in range(k): 81 | neg.append((s_idx[i],t_idx[i],-1)) 82 | return neg 83 | 84 | 85 | def check_conflict(anker, isSource, rn, smap, tmap): 86 | if isSource == True: 87 | if rn not in smap[anker]: 88 | return True 89 | return False 90 | else: 91 | if rn not in tmap[anker]: 92 | return True 93 | return False 94 | 95 | def label_batch_for_lr(batch): 96 | for i in range(len(batch)): 97 | if batch[i] == -1: 98 | batch[i] = 0 99 | return batch 100 | 101 | def hard_negative(anker, encoding, base, k, anIsSource, r_id): 102 | DAt = [torch.dist(anker, encoding[i], 2) for i in range(len(encoding))] 103 | DAt = torch.stack(DAt, 0) 104 | sorted, indices = torch.sort(DAt) 105 | topk = [] 106 | for i in range(len(encoding)): 107 | if sorted[i] > base: 108 | idx = [] 109 | j = i 110 | for tt in range(k): 111 | while check_conflict(r_id, anIsSource, indices[j]) == False: 112 | j += 1 113 | idx.append(j) 114 | topk = [encoding[j] for j in idx] 115 | break 116 | if len(topk) == 0: 117 | randneg = gen_rand_neg(k,anIsSource,r_id,len(encoding)) 118 | topk = [encoding[j] for j in randneg] 119 | return torch.stack(topk,0) 120 | 121 | def toTensor(train): 122 | S = [] 123 | T = [] 124 | L = [] 125 | #tperm = torch.randperm(len(train)) 126 | for i in range(len(train)): 127 | (s,t,l) = train[i] 128 | S.append(int(s)) 129 | T.append(int(t)) 130 | L.append(int(l)) 131 | SI = torch.LongTensor(S) 132 | TI = torch.LongTensor(T) 133 | LI = torch.FloatTensor(L) 134 | return SI, TI, LI 135 | 136 | def load_label(): 137 | train = [] 138 | dev = [] 139 | test = [] 140 | smap = {} 141 | tmap = {} 142 | with open('data/AG_train','r') as f: 143 | lines = f.readlines() 144 | for line in lines: 145 | pair = line.strip().split('\t') 146 | tup = (pair[0],pair[1],1) 147 | train.append(tup) 148 | if pair[0] not in smap: 149 | smap[pair[0]] = set() 150 | smap[pair[0]].add(pair[1]) 151 | if pair[1] not in tmap: 152 | tmap[pair[1]] = set() 153 | tmap[pair[1]].add(pair[0]) 154 | 155 | with open('data/AG_dev','r') as f: 156 | lines = f.readlines() 157 | for line in lines: 158 | pair = line.strip().split('\t') 159 | tup = (pair[0], pair[1], 1) 160 | dev.append(tup) 161 | if pair[0] not in smap: 162 | smap[pair[0]] = set() 163 | smap[pair[0]].add(pair[1]) 164 | if pair[1] not in tmap: 165 | tmap[pair[1]] = set() 166 | tmap[pair[1]].add(pair[0]) 167 | with open('data/AG_test','r') as f: 168 | lines = f.readlines() 169 | for line in lines: 170 | pair = line.strip().split('\t') 171 | tup = (pair[0], pair[1], 1) 172 | test.append(tup) 173 | if pair[0] not in smap: 174 | smap[pair[0]] = set() 175 | smap[pair[0]].add(pair[1]) 176 | if pair[1] not in tmap: 177 | tmap[pair[1]] = set() 178 | tmap[pair[1]].add(pair[0]) 179 | return train, dev, test, smap, tmap 180 | 181 | def train_batch(args, info, train, smap, tmap, neg_sample = False, hard_example = False): 182 | if neg_sample == True: 183 | temp = [] 184 | temp.extend(train) 185 | if hard_example == False: 186 | neg = gen_rand_all_neg(len(train) * args.negative_num, smap, tmap, info['tableA_len'], info['tableB_len']) 187 | temp.extend(neg) 188 | else: 189 | temp = train 190 | S, T, L = toTensor(temp) 191 | data = Data.TensorDataset(S, T, L) 192 | loader = Data.DataLoader( 193 | dataset=data, 194 | batch_size=args.batch, 195 | shuffle=True, 196 | num_workers=2, 197 | ) 198 | return loader 199 | 200 | def pre_seprate_batch(args, info): 201 | tableA_len = info['tableA_len'] 202 | tableB_len = info['tableB_len'] 203 | nsample = [] 204 | for i in range(tableA_len): 205 | for j in range(tableA_len): 206 | if i == j: 207 | continue 208 | nsample.append((i, j, -1)) 209 | 210 | for i in range(tableA_len, tableB_len): 211 | for j in range(tableA_len, tableB_len): 212 | if i == j: 213 | continue 214 | nsample.append((i, j, -1)) 215 | L = torch.zeros(len(nsample)) 216 | L = torch.add(L, -1) 217 | S, T, L = toTensor(nsample) 218 | data = Data.TensorDataset(S, T, L) 219 | loader = Data.DataLoader( 220 | dataset=data, 221 | batch_size=args.batch, 222 | shuffle=True, 223 | num_workers=2, 224 | ) 225 | return loader 226 | 227 | def compute_f1(tp, tn, fp, fn): 228 | p = 0 229 | r = 0 230 | f1 = 0 231 | if tp + fp != 0: 232 | p = tp / (tp + fp) 233 | if tp + fn != 0: 234 | r = tp / (tp + fn) 235 | if p + r != 0: 236 | f1 = 2 * p * r / (p + r) 237 | print("tp,tn,fp,fn:", tp, tn, fp, fn) 238 | return p, r, f1 239 | 240 | def load_graph(dataset): 241 | names = ['adj'] 242 | objects = [] 243 | for i in range(len(names)): 244 | with open("data/ind.{}.{}".format(dataset, names[i]), 'rb') as f: 245 | objects.append(pkl.load(f)) 246 | adj = objects[0] 247 | adj = adj + adj.T.multiply(adj.T > adj) - adj.multiply(adj.T > adj) 248 | 249 | adj = preprocess_adj(adj) 250 | adj = torch.from_numpy(adj.todense()).float() 251 | return adj 252 | 253 | 254 | def load_tdt(info): 255 | train = info['data']['train'] 256 | dev = info['data']['dev'] 257 | test = info['data']['test'] 258 | smap = {} 259 | tmap = {} 260 | def build_stmap(mapping, smap, tmap): 261 | for (x, y, l) in mapping: 262 | if l != 1: 263 | continue 264 | if x not in smap: 265 | smap[x] = set() 266 | smap[x].add(y) 267 | if y not in tmap: 268 | tmap[y] = set() 269 | tmap[y].add(x) 270 | build_stmap(train, smap, tmap) 271 | build_stmap(dev, smap, tmap) 272 | build_stmap(test, smap, tmap) 273 | return train, dev, test, smap, tmap 274 | 275 | def train_batch4lstm(args, train): 276 | S = [] 277 | T = [] 278 | L = [] 279 | for (x,y,l) in train: 280 | S.append(torch.LongTensor(x).view(1,-1)) 281 | T.append(torch.LongTensor(y).view(1,-1)) 282 | L.append(int(l)) 283 | S = torch.cat(S,dim=0) 284 | T = torch.cat(T, dim=0) 285 | L = torch.LongTensor(L) 286 | data = Data.TensorDataset(S, T, L) 287 | loader = Data.DataLoader( 288 | dataset=data, 289 | batch_size=args.batch, 290 | shuffle=True, 291 | num_workers=1, 292 | ) 293 | return loader 294 | 295 | def train_batch4gcn_hw(args, train, isTest = False): 296 | S = [] 297 | T = [] 298 | L = [] 299 | for (x,y,l) in train: 300 | S.append(x) 301 | T.append(y) 302 | if l == -1 or l == 0: 303 | L.append(0) 304 | else: 305 | L.append(1) 306 | S = torch.LongTensor(S) 307 | T = torch.LongTensor(T) 308 | L = torch.LongTensor(L) 309 | if isTest == True: 310 | return S, T, L 311 | data = Data.TensorDataset(S, T, L) 312 | loader = Data.DataLoader( 313 | dataset=data, 314 | batch_size=args.batch, 315 | shuffle=True, 316 | num_workers=1, 317 | ) 318 | return loader --------------------------------------------------------------------------------