├── .gitignore ├── LICENSE ├── README.md ├── data └── link_prediction │ ├── CA-GrQc_test.txt │ ├── CA-GrQc_test_neg.txt │ └── CA-GrQc_train.txt ├── pre_train └── link_prediction │ └── CA-GrQc_pre_train.emb └── src ├── GraphGAN ├── __init__.py ├── config.py ├── discriminator.py ├── generator.py └── graph_gan.py ├── evaluation ├── __init__.py └── link_prediction.py └── utils.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.swp 3 | ======= 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | env/ 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *,cover 49 | .hypothesis/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # IPython Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # dotenv 82 | .env 83 | 84 | # virtualenv 85 | venv/ 86 | ENV/ 87 | 88 | # Spyder project settings 89 | .spyderproject 90 | 91 | # Rope project settings 92 | .ropeproject 93 | 94 | 95 | .idea/* -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Tongyang Liu 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## GraphGAN 2 | 3 | - This repository is the implementation of [GraphGAN](https://www.aaai.org/ocs/index.php/AAAI/AAAI18/paper/view/16611) ([arXiv](https://arxiv.org/abs/1711.08267)): 4 | > GraphGAN: Graph Representation Learning With Generative Adversarial Nets 5 | Hongwei Wang, Jia Wang, Jialin Wang, Miao Zhao, Weinan Zhang, Fuzheng Zhang, Xing Xie, Minyi Guo 6 | 32nd AAAI Conference on Artificial Intelligence, 2018 7 | 8 | ![](https://github.com/hwwang55/GraphGAN/blob/master/framework.jpg) 9 | 10 | GraphGAN unifies two schools of graph representation learning methodologies: generative methods and discriminative methods, via adversarial training in a minimax game. 11 | The generator is guided by the signals from the discriminator and improves its generating performance, while the discriminator is pushed by the generator to better distinguish ground truth from generated samples. 12 | 13 | 14 | 15 | ### Files in the folder 16 | - `data/`: training and test data 17 | - `pre_train/`: pre-trained node embeddings 18 | > Note: the dimension of pre-trained node embeddings should equal n_emb in src/GraphGAN/config.py 19 | - `results/`: evaluation results and the learned embeddings of the generator and the discriminator 20 | - `src/`: source codes 21 | 22 | 23 | ### Requirements 24 | The code has been tested running under Python 3.6.5, with the following packages installed (along with their dependencies): 25 | 26 | - pytorch == 1.3.0 27 | - tqdm == 4.23.4 (for displaying the progress bar) 28 | - numpy == 1.14.3 29 | - sklearn == 0.19.1 30 | 31 | 32 | ### Input format 33 | The input data should be an undirected graph in which node IDs start from *0* to *N-1* (*N* is the number of nodes in the graph). Each line contains two node IDs indicating an edge in the graph. 34 | 35 | ##### txt file sample 36 | 37 | ```0 1``` 38 | ```3 2``` 39 | ```...``` 40 | 41 | 42 | ### Basic usage 43 | ```mkdir cache``` 44 | ```cd src/GraphGAN``` 45 | ```python graph_gan.py``` 46 | 47 | 48 | -------------------------------------------------------------------------------- /data/link_prediction/CA-GrQc_test.txt: -------------------------------------------------------------------------------- 1 | 2138 1632 2 | 3467 3771 3 | 1008 2034 4 | 4656 3417 5 | 3752 3949 6 | 4202 5085 7 | 830 2710 8 | 4818 1319 9 | 2880 4969 10 | 1961 746 11 | 2516 3319 12 | 422 3405 13 | 1037 4454 14 | 4530 3619 15 | 239 1690 16 | 3213 1004 17 | 2930 2986 18 | 3485 5009 19 | 2005 3303 20 | 841 4067 21 | 4290 4386 22 | 3793 3467 23 | 4144 4473 24 | 4038 2841 25 | 2098 643 26 | 1114 4521 27 | 4239 5187 28 | 2358 2483 29 | 2925 1227 30 | 2253 1502 31 | 2223 4557 32 | 1637 1280 33 | 4724 1538 34 | 227 1340 35 | 467 3232 36 | 1535 4494 37 | 3135 1984 38 | 2542 154 39 | 3614 3537 40 | 3784 4470 41 | 691 930 42 | 38 2677 43 | 3350 1333 44 | 997 4405 45 | 3174 3231 46 | 2723 498 47 | 3123 4855 48 | 2326 1710 49 | 196 1864 50 | 1610 537 51 | 2069 1527 52 | 142 1905 53 | 4029 2559 54 | 512 2174 55 | 4950 5116 56 | 3611 2529 57 | 1343 264 58 | 2867 3341 59 | 3586 4258 60 | 2621 526 61 | 4844 1674 62 | 600 3502 63 | 251 213 64 | 830 1731 65 | 399 468 66 | 4760 981 67 | 401 2757 68 | 4906 5111 69 | 1204 4288 70 | 1364 4784 71 | 2194 1202 72 | 2146 106 73 | 856 1757 74 | 5 2476 75 | 3496 4566 76 | 5233 400 77 | 664 1412 78 | 4443 2647 79 | 3528 3697 80 | 2735 1296 81 | 324 3833 82 | 1476 4494 83 | 387 1646 84 | 2901 5100 85 | 1055 170 86 | 4055 3367 87 | 3027 4656 88 | 1227 3117 89 | 225 4177 90 | 4770 1370 91 | 4415 4806 92 | 600 2312 93 | 551 2467 94 | 2700 993 95 | 3830 493 96 | 5028 794 97 | 1953 4498 98 | 15 4401 99 | 4906 104 100 | 1352 3738 101 | 2471 498 102 | 4454 1700 103 | 1570 1760 104 | 3861 1787 105 | 2707 2920 106 | 3210 3274 107 | 1014 3872 108 | 3420 2095 109 | 4036 4182 110 | 2531 3274 111 | 3235 1037 112 | 351 4814 113 | 1482 1853 114 | 4782 2446 115 | 122 2319 116 | 1642 5191 117 | 3784 1503 118 | 841 116 119 | 2487 4770 120 | 3307 2183 121 | 2892 116 122 | 1246 230 123 | 1412 3184 124 | 4290 3378 125 | 476 1155 126 | 4587 4690 127 | 1862 95 128 | 1042 5022 129 | 2117 3352 130 | 3801 4397 131 | 4949 1037 132 | 3822 2653 133 | 4893 4454 134 | 813 1035 135 | 2362 4665 136 | 4396 1740 137 | 2837 44 138 | 1354 137 139 | 1830 1023 140 | 1486 4563 141 | 770 711 142 | 3784 1202 143 | 1699 1700 144 | 5217 1266 145 | 609 3218 146 | 3565 3372 147 | 1434 1407 148 | 4681 3341 149 | 4197 2559 150 | 4861 4884 151 | 4238 610 152 | 3847 1139 153 | 79 53 154 | 1538 3156 155 | 459 5227 156 | 250 3798 157 | 3813 3685 158 | 4933 445 159 | 2034 3448 160 | 2467 1781 161 | 69 3793 162 | 5073 3210 163 | 3631 4364 164 | 2644 3363 165 | 725 1253 166 | 2497 3784 167 | 4248 1560 168 | 1340 806 169 | 2083 2769 170 | 2465 3023 171 | 936 1547 172 | 3920 944 173 | 3800 1195 174 | 3719 972 175 | 4125 1715 176 | 4660 2428 177 | 2531 485 178 | 4151 2541 179 | 3300 4537 180 | 4889 2034 181 | 864 1641 182 | 2761 3408 183 | 4777 387 184 | 3631 4081 185 | 3717 4019 186 | 3013 2266 187 | 4290 4850 188 | 4181 254 189 | 152 1560 190 | 4127 291 191 | 4850 4921 192 | 33 1037 193 | 4263 562 194 | 3569 3761 195 | 630 5073 196 | 4231 4295 197 | 4156 1690 198 | 2390 3104 199 | 3893 2936 200 | 4119 4685 201 | 2607 4614 202 | 1044 1671 203 | 3117 5230 204 | 2379 2670 205 | 507 5224 206 | 2166 2868 207 | 401 465 208 | 4497 3277 209 | 1762 488 210 | 1211 96 211 | 498 2581 212 | 1911 3809 213 | 1114 1718 214 | 1862 5022 215 | 265 3854 216 | 1680 4401 217 | 2657 2603 218 | 5236 1895 219 | 1235 2250 220 | 1326 1018 221 | 4909 2584 222 | 3776 1445 223 | 2681 2070 224 | 1456 449 225 | 2320 1297 226 | 3884 4268 227 | 1718 1202 228 | 1114 3822 229 | 3877 3456 230 | 3153 3738 231 | 2155 1043 232 | 2952 4612 233 | 2275 4978 234 | 3770 2772 235 | 2771 1306 236 | 3696 3860 237 | 4567 3532 238 | 513 154 239 | 4809 4082 240 | 150 3219 241 | 2618 4013 242 | 4702 3160 243 | 1699 4893 244 | 1352 654 245 | 1114 1503 246 | 4160 4168 247 | 1570 2596 248 | 5118 4712 249 | 1014 4396 250 | 1396 474 251 | 1646 1996 252 | 3485 3489 253 | 3216 2386 254 | 5006 4726 255 | 872 2188 256 | 2740 1282 257 | 4213 364 258 | 1757 4473 259 | 2845 2547 260 | 2933 1958 261 | 4636 1431 262 | 4849 526 263 | 2742 3448 264 | 931 2114 265 | 96 2244 266 | 3733 1282 267 | 3207 32 268 | 4800 991 269 | 600 970 270 | 3350 2698 271 | 914 4561 272 | 3191 4352 273 | 2892 3898 274 | 3341 4716 275 | 2238 2889 276 | 1019 4024 277 | 960 1761 278 | 3809 4825 279 | 1978 4435 280 | 4237 256 281 | 1961 16 282 | 1718 3112 283 | 4971 4917 284 | 926 1674 285 | 1685 3484 286 | 3476 5009 287 | 239 1830 288 | 144 2677 289 | 15 5189 290 | 1500 3144 291 | 3273 315 292 | 822 4907 293 | 33 4405 294 | 2380 3272 295 | 522 4392 296 | 3455 3845 297 | 1760 2170 298 | 1842 1481 299 | 2497 4113 300 | 2178 3054 301 | 4695 1897 302 | 150 2735 303 | 2422 1183 304 | 4071 3479 305 | 268 2174 306 | 372 1533 307 | 3867 1722 308 | 2582 3232 309 | 4782 2128 310 | 1233 1126 311 | 1519 5142 312 | 4550 978 313 | 1114 4401 314 | 3578 936 315 | 697 2563 316 | 567 247 317 | 4520 875 318 | 3179 1943 319 | 1500 1282 320 | 4219 195 321 | 5026 3803 322 | 3669 1343 323 | 2757 3473 324 | 1888 1888 325 | 1340 701 326 | 1685 3353 327 | 1329 3733 328 | 1680 1446 329 | 4076 2964 330 | 4364 4655 331 | 4507 4399 332 | 2716 136 333 | 2106 5014 334 | 2457 1023 335 | 1762 5073 336 | 3884 3883 337 | 1298 2353 338 | 1023 3784 339 | 2918 3715 340 | 3733 3318 341 | 1407 4266 342 | 4741 4831 343 | 2161 4161 344 | 838 2581 345 | 460 1694 346 | 2271 1859 347 | 2708 2280 348 | 1905 4056 349 | 1202 1832 350 | 5096 2292 351 | 549 4463 352 | 4781 2871 353 | 4894 4013 354 | 636 5212 355 | 4688 110 356 | 2247 4813 357 | 4770 488 358 | 2091 394 359 | 4019 5237 360 | 4736 1280 361 | 3261 1032 362 | 4721 4188 363 | 4667 1155 364 | 1147 2770 365 | 337 340 366 | 2750 206 367 | 3365 1321 368 | 984 1761 369 | 2409 1132 370 | 4506 5126 371 | 2621 1369 372 | 1982 5184 373 | 2910 2867 374 | 4949 1035 375 | 3557 4824 376 | 4113 1718 377 | 4003 4057 378 | 4724 3156 379 | 205 141 380 | 4961 4140 381 | 4470 3448 382 | 1949 2581 383 | 90 2577 384 | 4348 377 385 | 4576 5006 386 | 2497 4368 387 | 2621 4849 388 | 2292 4760 389 | 4074 1928 390 | 304 4156 391 | 4484 5054 392 | 3216 1672 393 | 4145 1013 394 | 3703 3577 395 | 2928 4762 396 | 2453 376 397 | 1889 1926 398 | 2931 4086 399 | 1705 506 400 | 3622 2004 401 | 2853 3416 402 | 4944 2647 403 | 1911 4843 404 | 5187 2522 405 | 4538 4695 406 | 3281 654 407 | 2736 4502 408 | 3703 3458 409 | 257 508 410 | 1703 1435 411 | 2408 5200 412 | 813 3809 413 | 4238 334 414 | 779 2986 415 | 2483 1736 416 | 473 3941 417 | 1441 2003 418 | 1334 579 419 | 4405 1332 420 | 1037 2508 421 | 3467 16 422 | 3822 902 423 | 2362 5209 424 | 4671 5177 425 | 2467 1757 426 | 897 1132 427 | 2867 1963 428 | 33 1933 429 | 2676 4665 430 | 2326 227 431 | 3870 4869 432 | 3992 2293 433 | 3275 1756 434 | 3281 2584 435 | 5009 4800 436 | 2154 961 437 | 2176 5110 438 | 2172 759 439 | 2853 1094 440 | 3250 1579 441 | 3693 1416 442 | 2468 1951 443 | 3484 4056 444 | 1374 2928 445 | 2266 2737 446 | 5230 717 447 | 856 1912 448 | 2291 1205 449 | 4924 2372 450 | 4392 4995 451 | 90 2192 452 | 2742 1690 453 | 2464 2370 454 | 2954 3799 455 | 1510 1935 456 | 4530 3025 457 | 2206 1548 458 | 670 4997 459 | 3112 1503 460 | 376 3432 461 | 75 4527 462 | 2481 238 463 | 5180 3178 464 | 4368 141 465 | 3697 380 466 | 4665 2616 467 | 1029 2061 468 | 2442 1500 469 | 1002 1370 470 | 1953 3954 471 | 2093 3533 472 | 3437 26 473 | 2910 3031 474 | 2848 1980 475 | 2457 4401 476 | 1155 182 477 | 2910 5113 478 | 189 3383 479 | 1762 1770 480 | 1467 3122 481 | 654 838 482 | 2379 2676 483 | 4576 3174 484 | 4550 3986 485 | 2926 3739 486 | 1737 3851 487 | 1672 2541 488 | 4435 2116 489 | 2138 2978 490 | 304 526 491 | 3098 2218 492 | 3341 4665 493 | 1572 5157 494 | 230 3708 495 | 4399 2160 496 | 1862 1718 497 | 4076 4880 498 | 1472 2904 499 | 1476 5144 500 | 2284 1224 501 | 4346 1855 502 | 2577 574 503 | 1473 4494 504 | 4038 2933 505 | 1909 1744 506 | 3898 1933 507 | 4808 4602 508 | 4481 622 509 | 4169 1368 510 | 1581 2869 511 | 1820 1461 512 | 2834 1675 513 | 3842 4270 514 | 2260 139 515 | 1264 2676 516 | 1396 2281 517 | 2532 3590 518 | 1862 2034 519 | 200 3805 520 | 3403 2207 521 | 4306 3857 522 | 967 568 523 | 2582 2878 524 | 521 2723 525 | 2613 3378 526 | 3935 4144 527 | 3752 4217 528 | 4849 2681 529 | 639 135 530 | 1103 2320 531 | 856 4307 532 | 1517 4128 533 | 1619 4015 534 | 2885 1385 535 | 4481 1299 536 | 1812 986 537 | 4048 3165 538 | 4416 4280 539 | 636 2986 540 | 2515 4631 541 | 636 3195 542 | 1352 3281 543 | 4765 582 544 | 4736 1558 545 | 857 3777 546 | 203 4965 547 | 4906 5094 548 | 1070 2584 549 | 1951 817 550 | 4962 3967 551 | 4961 1886 552 | 0 1416 553 | 4362 1164 554 | 4849 1680 555 | 3883 1659 556 | 4113 1961 557 | 2376 2581 558 | 4795 706 559 | 1008 2697 560 | 661 1461 561 | 1159 1559 562 | 3322 3795 563 | 3775 948 564 | 4974 631 565 | 3274 4907 566 | 3020 3456 567 | 3023 2760 568 | 1889 847 569 | 4180 4041 570 | 3578 878 571 | 5180 2925 572 | 202 4701 573 | 3418 3941 574 | 15 2742 575 | 3884 1862 576 | 4443 5037 577 | 3376 943 578 | 1659 4561 579 | 1243 1700 580 | 4327 4532 581 | 1211 2244 582 | 698 1859 583 | 1549 1348 584 | 2199 664 585 | 955 4871 586 | 175 4564 587 | 476 3557 588 | 1830 1961 589 | 1659 1497 590 | 2442 2018 591 | 4458 4124 592 | 852 2775 593 | 2376 4596 594 | 2584 2723 595 | 2704 3396 596 | 2037 2581 597 | 3634 4849 598 | 2365 240 599 | 2532 5142 600 | 3915 4840 601 | 3084 2066 602 | 3697 4326 603 | 3240 4087 604 | 4569 5174 605 | 2497 146 606 | 676 3535 607 | 914 4341 608 | 4156 1503 609 | 2723 2581 610 | 4493 2308 611 | 2556 541 612 | 2531 3210 613 | 2235 4079 614 | 2742 16 615 | 4782 3231 616 | 3963 3207 617 | 4973 2188 618 | 1429 3803 619 | 1172 2724 620 | 2621 2194 621 | 324 2565 622 | 3229 5094 623 | 204 463 624 | 1231 4401 625 | 4749 53 626 | 565 4982 627 | 3235 511 628 | 982 3315 629 | 1134 1519 630 | 1221 2733 631 | 3920 982 632 | 5151 943 633 | 4122 293 634 | 2220 3782 635 | 3575 139 636 | 1512 4050 637 | 2634 3826 638 | 3109 1878 639 | 1352 586 640 | 949 4262 641 | 3743 3729 642 | 2457 4156 643 | 4732 52 644 | 3215 4999 645 | 1393 5017 646 | 1329 2166 647 | 2389 3121 648 | 1409 1323 649 | 527 3154 650 | 946 3280 651 | 3614 5011 652 | 4316 4255 653 | 86 3569 654 | 5166 2161 655 | 1549 2269 656 | 4843 1113 657 | 2924 2839 658 | 1476 5142 659 | 1822 2725 660 | 1298 1766 661 | 1181 1919 662 | 4949 3009 663 | 4724 4375 664 | 3083 264 665 | 1371 559 666 | 3618 3591 667 | 438 4795 668 | 3353 1342 669 | 2376 838 670 | 2760 2976 671 | 3160 1245 672 | 549 3931 673 | 3667 3265 674 | 3037 2816 675 | 3251 4631 676 | 273 463 677 | 4514 2260 678 | 1266 2750 679 | 3953 5238 680 | 3909 5225 681 | 1411 1192 682 | 2931 3191 683 | 2910 3178 684 | 5117 882 685 | 106 1867 686 | 600 4901 687 | 1773 3047 688 | 1961 602 689 | 2037 2562 690 | 438 691 691 | 1658 2576 692 | 1434 196 693 | 1762 1870 694 | 491 4411 695 | 1369 1690 696 | 3112 4484 697 | 4544 5196 698 | 844 4787 699 | 1565 4097 700 | 508 1659 701 | 844 179 702 | 3037 2763 703 | 1370 3274 704 | 1114 2034 705 | 205 4093 706 | 2291 2897 707 | 257 2846 708 | 948 2588 709 | 4830 3699 710 | 619 2768 711 | 1410 3539 712 | 98 2723 713 | 4113 1832 714 | 582 3156 715 | 2932 3435 716 | 3098 666 717 | 3090 5004 718 | 3239 1759 719 | 3809 1761 720 | 3518 1091 721 | 2634 990 722 | 3614 2674 723 | 3580 4134 724 | 4587 5057 725 | 67 71 726 | 3822 1832 727 | 1576 824 728 | 5073 488 729 | 4439 4305 730 | 1789 3685 731 | 3634 526 732 | 3709 4651 733 | 4873 2537 734 | 1415 1887 735 | 2634 4317 736 | 2604 4314 737 | 4913 1632 738 | 1762 2771 739 | 1380 4368 740 | 257 2868 741 | 2457 15 742 | 4782 1887 743 | 1886 5037 744 | 3266 4426 745 | 3311 4026 746 | 2356 38 747 | 2853 1011 748 | 3716 3060 749 | 3298 4569 750 | 2864 506 751 | 485 3803 752 | 2867 2843 753 | 3833 2905 754 | 1565 2654 755 | 2476 3351 756 | 239 1369 757 | 2170 1174 758 | 4876 989 759 | 4647 778 760 | 3547 1097 761 | 1911 3739 762 | 3497 2130 763 | 4011 3871 764 | 781 100 765 | 2037 4765 766 | 2246 4671 767 | 5194 3054 768 | 3652 498 769 | 5189 526 770 | 3867 69 771 | 5053 4735 772 | 2764 2634 773 | 3418 2442 774 | 3799 3222 775 | 116 1243 776 | 2867 3225 777 | 864 1168 778 | 706 1296 779 | 3898 2439 780 | 4185 1780 781 | 521 3156 782 | 4550 1646 783 | 1332 1761 784 | 2119 2505 785 | 1710 2882 786 | 788 5100 787 | 4791 3467 788 | 830 2643 789 | 3818 5145 790 | 1882 202 791 | 3737 1615 792 | 142 430 793 | 4949 4137 794 | 2695 547 795 | 2577 1911 796 | 4418 1569 797 | 3856 1691 798 | 2638 2312 799 | 4113 902 800 | 787 2828 801 | 1203 1306 802 | 1541 654 803 | 4337 1390 804 | 1589 4795 805 | 3009 5116 806 | 2895 4326 807 | 1659 1920 808 | 3484 3883 809 | 401 4508 810 | 2928 2373 811 | 2621 2764 812 | 3733 3144 813 | 3109 3657 814 | 722 1373 815 | 3750 4036 816 | 3703 4168 817 | 173 2246 818 | 3109 2338 819 | 3809 5010 820 | 4843 5010 821 | 576 5023 822 | 2904 4144 823 | 156 1910 824 | 4791 4368 825 | 4228 2025 826 | 1862 4403 827 | 15 481 828 | 1870 3210 829 | 4508 4071 830 | 884 3135 831 | 3224 40 832 | 3549 3729 833 | 574 2439 834 | 4466 2019 835 | 892 2566 836 | 3235 1243 837 | 2901 3374 838 | 552 3607 839 | 1588 3692 840 | 2127 4486 841 | 2641 4789 842 | 3634 2194 843 | 1103 4639 844 | 3021 3463 845 | 2670 1155 846 | 3715 588 847 | 1996 1404 848 | 4733 2749 849 | 619 5240 850 | 4525 1842 851 | 3752 294 852 | 510 208 853 | 4456 3579 854 | 3172 264 855 | 1482 2056 856 | 3588 1371 857 | 2413 3422 858 | 427 2347 859 | 1982 5183 860 | 4899 2738 861 | 2022 367 862 | 485 4996 863 | 263 3404 864 | 4300 1886 865 | 574 1332 866 | 3023 4043 867 | 2465 3703 868 | 2138 3269 869 | 4949 960 870 | 1819 3445 871 | 1961 510 872 | 4842 1801 873 | 3717 4312 874 | 2016 5026 875 | 2194 3467 876 | 3897 1222 877 | 5143 2499 878 | 3332 4952 879 | 562 750 880 | 3135 1678 881 | 1008 4889 882 | 5 4100 883 | 630 3274 884 | 415 1002 885 | 1100 1471 886 | 521 1070 887 | 3343 3710 888 | 2644 2960 889 | 227 2555 890 | 2688 3689 891 | 4616 3422 892 | 4177 1376 893 | 3719 2269 894 | 4272 1064 895 | 4622 4712 896 | 3657 2134 897 | 3720 3827 898 | 3484 926 899 | 3791 4991 900 | 1226 899 901 | 1820 661 902 | 2467 4144 903 | 1961 4904 904 | 4736 1589 905 | 1459 4143 906 | 193 1285 907 | 512 2070 908 | 1040 472 909 | 3109 5058 910 | 678 5137 911 | 302 5221 912 | 2700 3103 913 | 2638 5208 914 | 4566 2718 915 | 2047 4008 916 | 4611 1538 917 | 1953 2621 918 | 3334 3609 919 | 401 4296 920 | 3216 2541 921 | 4562 5107 922 | 2622 1443 923 | 31 585 924 | 5103 1628 925 | 1369 4470 926 | 1215 5012 927 | 2689 3601 928 | 1243 2439 929 | 3749 4932 930 | 1445 2373 931 | 3160 4990 932 | 4963 1659 933 | 830 2669 934 | 2892 5116 935 | 499 1411 936 | 1035 4825 937 | 1070 4478 938 | 4707 778 939 | 3770 1908 940 | 4323 1137 941 | 3109 4324 942 | 587 2037 943 | 1255 2972 944 | 1891 3528 945 | 3736 506 946 | 2596 2833 947 | 4416 509 948 | 3800 3901 949 | 5152 3363 950 | 99 781 951 | 2997 2111 952 | 2843 2385 953 | 5142 1877 954 | 2487 2531 955 | 1094 1995 956 | 703 2849 957 | 1205 2056 958 | 385 144 959 | 4619 2325 960 | 4671 4779 961 | 574 4825 962 | 128 4729 963 | 4391 4389 964 | 2853 1415 965 | 5241 406 966 | 3013 4529 967 | 1821 380 968 | 1842 3341 969 | 2497 1160 970 | 4416 2071 971 | 827 985 972 | 1953 1908 973 | 2764 696 974 | 4113 15 975 | 5207 2133 976 | 16 1503 977 | 1554 1609 978 | 4894 2569 979 | 3893 3272 980 | 1110 4043 981 | 4981 5151 982 | 4291 3069 983 | 4315 1006 984 | 3450 402 985 | 3912 3952 986 | 328 2621 987 | 4386 4929 988 | 1167 1603 989 | 3776 88 990 | 1362 3180 991 | 1718 512 992 | 4471 2934 993 | 1996 2855 994 | 3049 286 995 | 4128 4124 996 | 425 4833 997 | 4793 1202 998 | 1928 449 999 | 4909 98 1000 | 1867 1884 1001 | 1001 4870 1002 | 5199 1754 1003 | 3809 1933 1004 | 3017 3281 1005 | 1352 3153 1006 | 1575 3207 1007 | 117 3643 1008 | 4688 671 1009 | 1944 3093 1010 | 3737 4514 1011 | 501 4321 1012 | 3019 5145 1013 | 1008 481 1014 | 2286 1441 1015 | 654 586 1016 | 4177 1433 1017 | 746 208 1018 | 4125 1666 1019 | 2925 4693 1020 | 790 1721 1021 | 4310 1183 1022 | 3219 2540 1023 | 1480 3132 1024 | 2151 1404 1025 | 2368 3152 1026 | 1758 2836 1027 | 4583 2582 1028 | 5198 3839 1029 | 3509 4274 1030 | 4506 121 1031 | 3314 4784 1032 | 189 3300 1033 | 460 544 1034 | 4667 1712 1035 | 4021 1494 1036 | 2005 5238 1037 | 1491 169 1038 | 193 2241 1039 | 2567 2562 1040 | 2317 4784 1041 | 2562 4478 1042 | 3809 4067 1043 | 4314 5143 1044 | 120 3016 1045 | 1699 116 1046 | 415 2531 1047 | 4996 4907 1048 | 5116 1113 1049 | 336 3350 1050 | 2194 5189 1051 | 1699 2926 1052 | 2904 4473 1053 | 3669 3172 1054 | 3784 1671 1055 | 1081 4368 1056 | 1699 2848 1057 | 5022 4912 1058 | 4040 980 1059 | 3652 587 1060 | 2471 98 1061 | 4893 960 1062 | 2200 616 1063 | 1231 1369 1064 | 74 4795 1065 | 134 134 1066 | 4473 1174 1067 | 1349 142 1068 | 4461 937 1069 | 210 3644 1070 | 361 4756 1071 | 5018 1270 1072 | 1068 4375 1073 | 3455 3651 1074 | 3932 1828 1075 | 1243 1980 1076 | 1473 1877 1077 | 1456 1928 1078 | 1103 19 1079 | 1028 4875 1080 | 4724 582 1081 | 1407 3252 1082 | 3892 474 1083 | 3719 3103 1084 | 1231 3822 1085 | 517 5121 1086 | 2900 2948 1087 | 956 490 1088 | 2277 2802 1089 | 1068 498 1090 | 3467 4691 1091 | 2759 4173 1092 | 2497 2860 1093 | 401 4735 1094 | 2052 4682 1095 | 2374 2677 1096 | 1789 1750 1097 | 2250 3400 1098 | 4677 4068 1099 | 33 1332 1100 | 302 3745 1101 | 196 3142 1102 | 1116 217 1103 | 2194 2034 1104 | 4736 1364 1105 | 3995 2164 1106 | 2589 2604 1107 | 4654 144 1108 | 1858 4324 1109 | 4459 275 1110 | 1718 1023 1111 | 3950 3376 1112 | 1949 586 1113 | 960 1035 1114 | 4981 4583 1115 | 1001 4757 1116 | 2471 521 1117 | 4948 1259 1118 | 3229 4400 1119 | 3076 4368 1120 | 961 4441 1121 | 2986 1762 1122 | 5241 2025 1123 | 3867 5191 1124 | 1081 1674 1125 | 2034 4156 1126 | 2172 3102 1127 | 4053 4575 1128 | 5088 3558 1129 | 2072 2026 1130 | 5088 1986 1131 | 3969 3142 1132 | 2053 3551 1133 | 2446 875 1134 | 1257 2923 1135 | 2220 2920 1136 | 1893 4891 1137 | 4611 4909 1138 | 813 1113 1139 | 1008 3218 1140 | 3719 467 1141 | 4473 2361 1142 | 4213 3585 1143 | 44 4941 1144 | 2362 1757 1145 | 662 4330 1146 | 3009 3617 1147 | 5180 351 1148 | 5088 1573 1149 | 4067 2439 1150 | 1700 2439 1151 | 3920 3335 1152 | 4530 1789 1153 | 3247 3452 1154 | 108 3755 1155 | 1472 2361 1156 | 4128 4934 1157 | 3153 2037 1158 | 3021 4550 1159 | 653 817 1160 | 2952 3679 1161 | 3083 3172 1162 | 86 3295 1163 | 1167 4038 1164 | 2567 4375 1165 | 3741 3579 1166 | 4704 846 1167 | 2238 3531 1168 | 98 2562 1169 | 1758 2468 1170 | 4976 3150 1171 | 872 246 1172 | 3126 1138 1173 | 5173 4558 1174 | 1953 2536 1175 | 4704 4048 1176 | 4567 1649 1177 | 557 3090 1178 | 4038 3107 1179 | 4157 1587 1180 | 15 1961 1181 | 2468 1932 1182 | 1548 4932 1183 | 1178 3677 1184 | 2016 485 1185 | 5139 1529 1186 | 4703 1024 1187 | 3272 2193 1188 | 1083 4803 1189 | 1589 2540 1190 | 1549 2494 1191 | 1271 4475 1192 | 675 5072 1193 | 3976 4462 1194 | 2986 3102 1195 | 323 66 1196 | 4850 5144 1197 | 1114 4793 1198 | 3634 239 1199 | 1960 2486 1200 | 2878 4998 1201 | 1364 3458 1202 | 3207 1657 1203 | 1739 2560 1204 | 501 4162 1205 | 2621 239 1206 | 4681 3554 1207 | 603 1397 1208 | 3515 921 1209 | 1718 4793 1210 | 2621 3467 1211 | 3129 2442 1212 | 1846 2495 1213 | 601 2566 1214 | 2764 3101 1215 | 4014 1086 1216 | 304 16 1217 | 4660 1892 1218 | 1718 4470 1219 | 1224 4965 1220 | 2634 0 1221 | 91 4795 1222 | 3799 3455 1223 | 4765 3156 1224 | 4906 4608 1225 | 2933 3317 1226 | 3380 4934 1227 | 3338 1497 1228 | 474 3599 1229 | 3996 5240 1230 | 4116 4493 1231 | 3485 4918 1232 | 432 4949 1233 | 2432 3684 1234 | 3029 1253 1235 | 3160 3343 1236 | 1870 3274 1237 | 2771 86 1238 | 5198 3873 1239 | 1810 2172 1240 | 2621 902 1241 | 4834 799 1242 | 351 2073 1243 | 4835 343 1244 | 69 1722 1245 | 736 1650 1246 | 1476 246 1247 | 1712 2084 1248 | 425 5175 1249 | 2531 488 1250 | 2727 2563 1251 | 324 263 1252 | 4290 1877 1253 | 304 3448 1254 | 1062 2140 1255 | 3733 2541 1256 | 250 2326 1257 | 4567 1849 1258 | 2078 1931 1259 | 1226 1501 1260 | 1340 739 1261 | 3031 4378 1262 | 3784 1409 1263 | 2497 3822 1264 | 1805 504 1265 | 2644 4777 1266 | 389 3873 1267 | 1614 5159 1268 | 1147 5031 1269 | 2365 3259 1270 | 1349 2042 1271 | 913 4718 1272 | 2789 1849 1273 | 2508 3809 1274 | 3891 2696 1275 | 4034 445 1276 | 3513 1171 1277 | 2371 202 1278 | 1579 1477 1279 | 2549 3317 1280 | 3240 1097 1281 | 4208 327 1282 | 977 2056 1283 | 1830 1446 1284 | 4583 1549 1285 | 2980 3086 1286 | 2848 4950 1287 | 5142 3378 1288 | 5204 697 1289 | 2544 2981 1290 | 2928 147 1291 | 2091 2875 1292 | 3892 745 1293 | 273 1486 1294 | 35 1622 1295 | 3733 1409 1296 | 4443 1886 1297 | 415 485 1298 | 3326 3822 1299 | 3439 540 1300 | 1948 2164 1301 | 2151 3826 1302 | 1963 4351 1303 | 1610 5014 1304 | 3418 1895 1305 | 1434 1420 1306 | 4963 1497 1307 | 2248 2640 1308 | 324 1274 1309 | 1870 2531 1310 | 3935 1760 1311 | 3565 300 1312 | 3528 2263 1313 | 3101 696 1314 | 2269 2494 1315 | 3298 2875 1316 | 1183 1443 1317 | 1908 3027 1318 | 3341 3471 1319 | 799 3131 1320 | 766 1357 1321 | 2461 646 1322 | 2843 839 1323 | 2260 3575 1324 | 2379 4492 1325 | 4844 1052 1326 | 1114 89 1327 | 1114 1830 1328 | 4837 5064 1329 | 1103 2607 1330 | 4756 3475 1331 | 2508 2439 1332 | 1062 3657 1333 | 2764 1646 1334 | 3074 3551 1335 | 2487 1370 1336 | 4921 4929 1337 | 4583 1918 1338 | 5071 1779 1339 | 5023 2821 1340 | 512 1202 1341 | 1472 3876 1342 | 1070 4596 1343 | 1373 3504 1344 | 929 5086 1345 | 2621 1503 1346 | 3235 694 1347 | 1081 3867 1348 | 2853 1337 1349 | 3250 3481 1350 | 4639 901 1351 | 1134 1877 1352 | 1235 2509 1353 | 4497 1234 1354 | 1405 2090 1355 | 12 1642 1356 | 1039 5217 1357 | 997 33 1358 | 4310 2193 1359 | 3377 2312 1360 | 3858 1496 1361 | 3696 738 1362 | 1700 1761 1363 | 488 3209 1364 | 2621 3733 1365 | 3586 4330 1366 | 4990 4471 1367 | 2471 586 1368 | 3102 2166 1369 | 2925 1067 1370 | 2167 1320 1371 | 4819 5181 1372 | 2569 4706 1373 | 3634 5189 1374 | 5105 402 1375 | 4216 4303 1376 | 4654 2765 1377 | 1231 1503 1378 | 2569 4409 1379 | 2508 1332 1380 | 2901 788 1381 | 5009 1805 1382 | 2941 1483 1383 | 2742 1202 1384 | 1285 4666 1385 | 251 4376 1386 | 4616 3463 1387 | 853 2257 1388 | 4077 4777 1389 | 2046 2945 1390 | 4849 4484 1391 | 691 2540 1392 | 3719 2878 1393 | 1327 2391 1394 | 399 1190 1395 | 2973 4020 1396 | 2497 3658 1397 | 1842 1843 1398 | 3260 3379 1399 | 3366 494 1400 | 2475 2545 1401 | 1678 1676 1402 | 5080 3587 1403 | 2461 4180 1404 | 2497 1213 1405 | 2257 4056 1406 | 4357 4439 1407 | 1760 1174 1408 | 1659 3417 1409 | 4044 546 1410 | 485 1370 1411 | 5191 405 1412 | 1570 4144 1413 | 3037 2967 1414 | 2531 5075 1415 | 2557 1342 1416 | 1538 98 1417 | 652 5236 1418 | 2046 4737 1419 | 4443 4665 1420 | 3784 2572 1421 | 1035 5116 1422 | 4247 3506 1423 | 4625 3547 1424 | 4819 4690 1425 | 402 3232 1426 | 1204 3854 1427 | 3467 1503 1428 | 4843 1243 1429 | 3017 2567 1430 | 16 1446 1431 | 995 4685 1432 | 402 2494 1433 | 4677 4379 1434 | 4616 4707 1435 | 1570 3641 1436 | 3038 3165 1437 | 2362 1752 1438 | 5017 2226 1439 | 1806 1194 1440 | 1169 285 1441 | 1862 3353 1442 | 3528 152 1443 | 1562 1877 1444 | 3578 3965 1445 | 2681 2599 1446 | 1614 1293 1447 | 5017 4812 1448 | 2362 5212 1449 | 2402 3465 1450 | -------------------------------------------------------------------------------- /data/link_prediction/CA-GrQc_test_neg.txt: -------------------------------------------------------------------------------- 1 | 2138 3367 2 | 3467 3767 3 | 1008 850 4 | 4656 768 5 | 3752 3593 6 | 4202 4926 7 | 830 1557 8 | 4818 2160 9 | 2880 2802 10 | 1961 4624 11 | 2516 2388 12 | 422 4495 13 | 1037 3688 14 | 4530 4531 15 | 239 1954 16 | 3213 1859 17 | 2930 3067 18 | 3485 4488 19 | 2005 3372 20 | 841 2930 21 | 4290 1146 22 | 3793 5039 23 | 4144 2794 24 | 4038 1185 25 | 2098 5148 26 | 1114 2600 27 | 4239 2162 28 | 2358 2736 29 | 2925 28 30 | 2253 4370 31 | 2223 2192 32 | 1637 892 33 | 4724 3767 34 | 227 2682 35 | 467 4342 36 | 1535 1560 37 | 3135 755 38 | 2542 636 39 | 3614 2257 40 | 3784 5238 41 | 691 4594 42 | 38 3002 43 | 3350 3975 44 | 997 2650 45 | 3174 1793 46 | 2723 77 47 | 3123 549 48 | 2326 3707 49 | 196 3775 50 | 1610 2893 51 | 2069 412 52 | 142 866 53 | 4029 4097 54 | 512 2277 55 | 4950 4171 56 | 3611 1321 57 | 1343 1895 58 | 2867 4283 59 | 3586 1684 60 | 2621 4366 61 | 4844 4304 62 | 600 3907 63 | 251 3927 64 | 830 218 65 | 399 166 66 | 4760 2788 67 | 401 2091 68 | 4906 2989 69 | 1204 1185 70 | 1364 957 71 | 2194 1328 72 | 2146 3015 73 | 856 1629 74 | 5 3273 75 | 3496 1690 76 | 5233 374 77 | 664 2829 78 | 4443 3123 79 | 3528 5163 80 | 2735 2290 81 | 324 5151 82 | 1476 4362 83 | 387 2687 84 | 2901 2231 85 | 1055 2354 86 | 4055 3484 87 | 3027 3688 88 | 1227 63 89 | 225 1113 90 | 4770 216 91 | 4415 1007 92 | 600 857 93 | 551 2510 94 | 2700 4894 95 | 3830 1282 96 | 5028 2308 97 | 1953 1013 98 | 15 2740 99 | 4906 89 100 | 1352 4520 101 | 2471 522 102 | 4454 1198 103 | 1570 3424 104 | 3861 3977 105 | 2707 3575 106 | 3210 563 107 | 1014 3214 108 | 3420 1609 109 | 4036 1506 110 | 2531 3503 111 | 3235 4846 112 | 351 1946 113 | 1482 2349 114 | 4782 1600 115 | 122 2840 116 | 1642 473 117 | 3784 2667 118 | 841 2340 119 | 2487 862 120 | 3307 191 121 | 2892 38 122 | 1246 357 123 | 1412 846 124 | 4290 457 125 | 476 2720 126 | 4587 1379 127 | 1862 3979 128 | 1042 4618 129 | 2117 670 130 | 3801 2311 131 | 4949 2004 132 | 3822 4011 133 | 4893 2408 134 | 813 1530 135 | 2362 661 136 | 4396 4781 137 | 2837 4980 138 | 1354 4231 139 | 1830 5240 140 | 1486 2225 141 | 770 2861 142 | 3784 2771 143 | 1699 3920 144 | 5217 3557 145 | 609 2583 146 | 3565 4305 147 | 1434 4034 148 | 4681 1130 149 | 4197 4915 150 | 4861 270 151 | 4238 1840 152 | 3847 3726 153 | 79 2060 154 | 1538 337 155 | 459 4121 156 | 250 4427 157 | 3813 1793 158 | 4933 366 159 | 2034 3747 160 | 2467 953 161 | 69 5099 162 | 5073 3457 163 | 3631 2241 164 | 2644 3042 165 | 725 4290 166 | 2497 4320 167 | 4248 4187 168 | 1340 1601 169 | 2083 4927 170 | 2465 1203 171 | 936 3331 172 | 3920 1141 173 | 3800 4127 174 | 3719 1079 175 | 4125 720 176 | 4660 4108 177 | 2531 3987 178 | 4151 2137 179 | 3300 3706 180 | 4889 1310 181 | 864 1611 182 | 2761 2788 183 | 4777 3365 184 | 3631 3553 185 | 3717 4074 186 | 3013 2225 187 | 4290 1482 188 | 4181 784 189 | 152 3473 190 | 4127 3549 191 | 4850 1155 192 | 33 4479 193 | 4263 3766 194 | 3569 542 195 | 630 4816 196 | 4231 4136 197 | 4156 3636 198 | 2390 3410 199 | 3893 3358 200 | 4119 3859 201 | 2607 2567 202 | 1044 1583 203 | 3117 3770 204 | 2379 1805 205 | 507 4078 206 | 2166 491 207 | 401 778 208 | 4497 2677 209 | 1762 1306 210 | 1211 413 211 | 498 4431 212 | 1911 2785 213 | 1114 4590 214 | 1862 4666 215 | 265 1018 216 | 1680 1687 217 | 2657 2778 218 | 5236 3108 219 | 1235 575 220 | 1326 3970 221 | 4909 1112 222 | 3776 2032 223 | 2681 5050 224 | 1456 2328 225 | 2320 635 226 | 3884 724 227 | 1718 2760 228 | 1114 4808 229 | 3877 3446 230 | 3153 2661 231 | 2155 2116 232 | 2952 290 233 | 2275 4424 234 | 3770 1677 235 | 2771 135 236 | 3696 2461 237 | 4567 2746 238 | 513 3102 239 | 4809 2730 240 | 150 4317 241 | 2618 2779 242 | 4702 1487 243 | 1699 3484 244 | 1352 4184 245 | 1114 33 246 | 4160 1945 247 | 1570 177 248 | 5118 4942 249 | 1014 0 250 | 1396 1076 251 | 1646 4582 252 | 3485 2914 253 | 3216 4180 254 | 5006 2439 255 | 872 772 256 | 2740 3257 257 | 4213 3239 258 | 1757 2996 259 | 2845 715 260 | 2933 4686 261 | 4636 574 262 | 4849 2862 263 | 2742 2985 264 | 931 2397 265 | 96 309 266 | 3733 692 267 | 3207 3418 268 | 4800 1644 269 | 600 1082 270 | 3350 4835 271 | 914 3683 272 | 3191 1720 273 | 2892 4284 274 | 3341 1123 275 | 2238 3361 276 | 1019 3385 277 | 960 2288 278 | 3809 2347 279 | 1978 3021 280 | 4237 2331 281 | 1961 2628 282 | 1718 3974 283 | 4971 2297 284 | 926 3195 285 | 1685 780 286 | 3476 5232 287 | 239 1646 288 | 144 2189 289 | 15 3538 290 | 1500 3622 291 | 3273 4591 292 | 822 2611 293 | 33 2098 294 | 2380 2714 295 | 522 4440 296 | 3455 2108 297 | 1760 4877 298 | 1842 1012 299 | 2497 2650 300 | 2178 219 301 | 4695 1083 302 | 150 3494 303 | 2422 262 304 | 4071 5142 305 | 268 1830 306 | 372 3738 307 | 3867 745 308 | 2582 3872 309 | 4782 3559 310 | 1233 4118 311 | 1519 3651 312 | 4550 748 313 | 1114 4017 314 | 3578 4825 315 | 697 1268 316 | 567 4985 317 | 4520 3430 318 | 3179 1266 319 | 1500 794 320 | 4219 3162 321 | 5026 1377 322 | 3669 1467 323 | 2757 3173 324 | 1888 3238 325 | 1340 2569 326 | 1685 2965 327 | 1329 1208 328 | 1680 2162 329 | 4076 66 330 | 4364 4935 331 | 4507 2738 332 | 2716 3339 333 | 2106 2363 334 | 2457 3922 335 | 1762 3111 336 | 3884 4221 337 | 1298 3562 338 | 1023 2228 339 | 2918 1508 340 | 3733 3941 341 | 1407 5022 342 | 4741 635 343 | 2161 1805 344 | 838 4824 345 | 460 588 346 | 2271 4950 347 | 2708 5175 348 | 1905 4871 349 | 1202 1237 350 | 5096 2905 351 | 549 5136 352 | 4781 3706 353 | 4894 282 354 | 636 1038 355 | 4688 3020 356 | 2247 4810 357 | 4770 4703 358 | 2091 1900 359 | 4019 3735 360 | 4736 859 361 | 3261 4970 362 | 4721 118 363 | 4667 3935 364 | 1147 5226 365 | 337 3478 366 | 2750 699 367 | 3365 551 368 | 984 986 369 | 2409 1839 370 | 4506 2399 371 | 2621 1535 372 | 1982 4260 373 | 2910 3507 374 | 4949 1347 375 | 3557 4713 376 | 4113 4332 377 | 4003 588 378 | 4724 94 379 | 205 3351 380 | 4961 3082 381 | 4470 2063 382 | 1949 1817 383 | 90 2485 384 | 4348 3329 385 | 4576 1813 386 | 2497 2464 387 | 2621 2346 388 | 2292 1926 389 | 4074 3870 390 | 304 1214 391 | 4484 1839 392 | 3216 830 393 | 4145 3046 394 | 3703 3801 395 | 2928 2414 396 | 2453 1520 397 | 1889 4902 398 | 2931 2629 399 | 1705 4027 400 | 3622 2587 401 | 2853 678 402 | 4944 2108 403 | 1911 1850 404 | 5187 2583 405 | 4538 810 406 | 3281 1607 407 | 2736 1605 408 | 3703 1107 409 | 257 2050 410 | 1703 1654 411 | 2408 637 412 | 813 2796 413 | 4238 1795 414 | 779 3632 415 | 2483 2472 416 | 473 2763 417 | 1441 6 418 | 1334 2396 419 | 4405 1706 420 | 1037 147 421 | 3467 1393 422 | 3822 266 423 | 2362 4028 424 | 4671 3302 425 | 2467 2969 426 | 897 5083 427 | 2867 3995 428 | 33 2704 429 | 2676 3601 430 | 2326 2486 431 | 3870 364 432 | 3992 2816 433 | 3275 3285 434 | 3281 3393 435 | 5009 2309 436 | 2154 572 437 | 2176 4869 438 | 2172 17 439 | 2853 3765 440 | 3250 4360 441 | 3693 4490 442 | 2468 2814 443 | 3484 1195 444 | 1374 4719 445 | 2266 1149 446 | 5230 56 447 | 856 3141 448 | 2291 1693 449 | 4924 246 450 | 4392 4357 451 | 90 4194 452 | 2742 772 453 | 2464 310 454 | 2954 4469 455 | 1510 4661 456 | 4530 1026 457 | 2206 4120 458 | 670 107 459 | 3112 755 460 | 376 1165 461 | 75 553 462 | 2481 2009 463 | 5180 43 464 | 4368 3450 465 | 3697 4587 466 | 4665 3806 467 | 1029 4851 468 | 2442 357 469 | 1002 2868 470 | 1953 4765 471 | 2093 4315 472 | 3437 5103 473 | 2910 1440 474 | 2848 1751 475 | 2457 2283 476 | 1155 1674 477 | 2910 3167 478 | 189 3187 479 | 1762 4223 480 | 1467 4389 481 | 654 4885 482 | 2379 3098 483 | 4576 3525 484 | 4550 1069 485 | 2926 1440 486 | 1737 2207 487 | 1672 345 488 | 4435 2671 489 | 2138 700 490 | 304 2217 491 | 3098 4923 492 | 3341 5212 493 | 1572 528 494 | 230 1635 495 | 4399 1676 496 | 1862 1456 497 | 4076 3903 498 | 1472 3260 499 | 1476 2134 500 | 2284 3020 501 | 4346 4984 502 | 2577 2856 503 | 1473 594 504 | 4038 1462 505 | 1909 4474 506 | 3898 1862 507 | 4808 812 508 | 4481 3767 509 | 4169 1641 510 | 1581 3390 511 | 1820 526 512 | 2834 4203 513 | 3842 3827 514 | 2260 4026 515 | 1264 4298 516 | 1396 1548 517 | 2532 11 518 | 1862 2523 519 | 200 2614 520 | 3403 3014 521 | 4306 2639 522 | 967 3742 523 | 2582 3478 524 | 521 850 525 | 2613 157 526 | 3935 4819 527 | 3752 1700 528 | 4849 5021 529 | 639 3197 530 | 1103 2826 531 | 856 972 532 | 1517 366 533 | 1619 2795 534 | 2885 1907 535 | 4481 1985 536 | 1812 240 537 | 4048 4788 538 | 4416 954 539 | 636 203 540 | 2515 256 541 | 636 2128 542 | 1352 4704 543 | 4765 1647 544 | 4736 3318 545 | 857 2836 546 | 203 3052 547 | 4906 2971 548 | 1070 2911 549 | 1951 4545 550 | 4962 2237 551 | 4961 1734 552 | 0 5180 553 | 4362 3161 554 | 4849 3922 555 | 3883 2885 556 | 4113 499 557 | 2376 4253 558 | 4795 839 559 | 1008 23 560 | 661 1362 561 | 1159 1303 562 | 3322 2050 563 | 3775 1643 564 | 4974 2012 565 | 3274 3176 566 | 3020 177 567 | 3023 3501 568 | 1889 2507 569 | 4180 3718 570 | 3578 4227 571 | 5180 1038 572 | 202 1427 573 | 3418 1396 574 | 15 742 575 | 3884 2941 576 | 4443 2389 577 | 3376 2466 578 | 1659 2565 579 | 1243 2653 580 | 4327 3647 581 | 1211 902 582 | 698 400 583 | 1549 2831 584 | 2199 5093 585 | 955 2421 586 | 175 4107 587 | 476 3503 588 | 1830 344 589 | 1659 3256 590 | 2442 4280 591 | 4458 743 592 | 852 2212 593 | 2376 4774 594 | 2584 1837 595 | 2704 2 596 | 2037 1398 597 | 3634 2745 598 | 2365 4675 599 | 2532 910 600 | 3915 4349 601 | 3084 500 602 | 3697 1095 603 | 3240 294 604 | 4569 2610 605 | 2497 85 606 | 676 2563 607 | 914 2770 608 | 4156 1780 609 | 2723 4773 610 | 4493 67 611 | 2556 3686 612 | 2531 1636 613 | 2235 407 614 | 2742 153 615 | 4782 2316 616 | 3963 4054 617 | 4973 4855 618 | 1429 185 619 | 1172 840 620 | 2621 4529 621 | 324 323 622 | 3229 3614 623 | 204 2173 624 | 1231 3861 625 | 4749 838 626 | 565 4488 627 | 3235 2925 628 | 982 1551 629 | 1134 499 630 | 1221 342 631 | 3920 3712 632 | 5151 1090 633 | 4122 953 634 | 2220 4281 635 | 3575 3269 636 | 1512 5195 637 | 2634 366 638 | 3109 3379 639 | 1352 4613 640 | 949 2958 641 | 3743 2332 642 | 2457 216 643 | 4732 2984 644 | 3215 3217 645 | 1393 4131 646 | 1329 3549 647 | 2389 3479 648 | 1409 4722 649 | 527 5049 650 | 946 3500 651 | 3614 3164 652 | 4316 4391 653 | 86 2925 654 | 5166 1699 655 | 1549 3282 656 | 4843 3113 657 | 2924 5095 658 | 1476 5232 659 | 1822 3753 660 | 1298 2775 661 | 1181 2373 662 | 4949 1591 663 | 4724 4872 664 | 3083 4409 665 | 1371 281 666 | 3618 3753 667 | 438 1597 668 | 3353 2223 669 | 2376 4890 670 | 2760 1248 671 | 3160 5020 672 | 549 962 673 | 3667 3451 674 | 3037 2252 675 | 3251 1092 676 | 273 4651 677 | 4514 2507 678 | 1266 3813 679 | 3953 4626 680 | 3909 1272 681 | 1411 746 682 | 2931 602 683 | 2910 2078 684 | 5117 4656 685 | 106 4127 686 | 600 1091 687 | 1773 2123 688 | 1961 4252 689 | 2037 2396 690 | 438 2414 691 | 1658 2430 692 | 1434 2573 693 | 1762 4605 694 | 491 4579 695 | 1369 1540 696 | 3112 686 697 | 4544 1403 698 | 844 5141 699 | 1565 3181 700 | 508 2968 701 | 844 4089 702 | 3037 2969 703 | 1370 116 704 | 1114 3538 705 | 205 2601 706 | 2291 4308 707 | 257 1810 708 | 948 723 709 | 4830 4522 710 | 619 41 711 | 1410 1327 712 | 98 4235 713 | 4113 3536 714 | 582 517 715 | 2932 2848 716 | 3098 2738 717 | 3090 3668 718 | 3239 3944 719 | 3809 2717 720 | 3518 3848 721 | 2634 4895 722 | 3614 3636 723 | 3580 2394 724 | 4587 1523 725 | 67 3428 726 | 3822 961 727 | 1576 4899 728 | 5073 1874 729 | 4439 623 730 | 1789 1755 731 | 3634 1149 732 | 3709 2460 733 | 4873 2658 734 | 1415 884 735 | 2634 801 736 | 2604 390 737 | 4913 1704 738 | 1762 3166 739 | 1380 372 740 | 257 4953 741 | 2457 790 742 | 4782 42 743 | 1886 4665 744 | 3266 1041 745 | 3311 1171 746 | 2356 4931 747 | 2853 943 748 | 3716 969 749 | 3298 4334 750 | 2864 4864 751 | 485 2827 752 | 2867 1881 753 | 3833 1008 754 | 1565 1313 755 | 2476 3254 756 | 239 3810 757 | 2170 2668 758 | 4876 2057 759 | 4647 818 760 | 3547 1566 761 | 1911 3363 762 | 3497 2053 763 | 4011 1408 764 | 781 4675 765 | 2037 2 766 | 2246 4640 767 | 5194 5049 768 | 3652 2079 769 | 5189 2153 770 | 3867 5064 771 | 5053 4782 772 | 2764 3854 773 | 3418 5010 774 | 3799 3766 775 | 116 3619 776 | 2867 56 777 | 864 1538 778 | 706 1683 779 | 3898 2763 780 | 4185 131 781 | 521 4875 782 | 4550 3708 783 | 1332 3439 784 | 2119 2354 785 | 1710 4778 786 | 788 228 787 | 4791 1682 788 | 830 5071 789 | 3818 3425 790 | 1882 2110 791 | 3737 497 792 | 142 4554 793 | 4949 4154 794 | 2695 4689 795 | 2577 160 796 | 4418 2649 797 | 3856 3470 798 | 2638 163 799 | 4113 2393 800 | 787 1452 801 | 1203 4410 802 | 1541 1400 803 | 4337 2233 804 | 1589 1664 805 | 3009 399 806 | 2895 1376 807 | 1659 962 808 | 3484 1584 809 | 401 190 810 | 2928 2958 811 | 2621 1904 812 | 3733 4383 813 | 3109 2653 814 | 722 2308 815 | 3750 3482 816 | 3703 240 817 | 173 1323 818 | 3109 1907 819 | 3809 929 820 | 4843 3113 821 | 576 2571 822 | 2904 3149 823 | 156 3682 824 | 4791 1950 825 | 4228 1619 826 | 1862 116 827 | 15 4073 828 | 1870 3472 829 | 4508 2445 830 | 884 3354 831 | 3224 1711 832 | 3549 1090 833 | 574 4788 834 | 4466 2872 835 | 892 4068 836 | 3235 3368 837 | 2901 2107 838 | 552 657 839 | 1588 4378 840 | 2127 2359 841 | 2641 3776 842 | 3634 3686 843 | 1103 5078 844 | 3021 2539 845 | 2670 462 846 | 3715 1772 847 | 1996 845 848 | 4733 1009 849 | 619 1414 850 | 4525 4467 851 | 3752 2438 852 | 510 1175 853 | 4456 5190 854 | 3172 3327 855 | 1482 474 856 | 3588 3341 857 | 2413 1051 858 | 427 3672 859 | 1982 3347 860 | 4899 285 861 | 2022 5061 862 | 485 4321 863 | 263 2594 864 | 4300 883 865 | 574 1892 866 | 3023 4472 867 | 2465 4734 868 | 2138 4575 869 | 4949 4988 870 | 1819 1264 871 | 1961 576 872 | 4842 2688 873 | 3717 4904 874 | 2016 627 875 | 2194 4108 876 | 3897 4719 877 | 5143 2148 878 | 3332 689 879 | 562 2383 880 | 3135 1803 881 | 1008 2619 882 | 5 828 883 | 630 303 884 | 415 815 885 | 1100 4193 886 | 521 4841 887 | 3343 1704 888 | 2644 696 889 | 227 4884 890 | 2688 3964 891 | 4616 3286 892 | 4177 1314 893 | 3719 3965 894 | 4272 4482 895 | 4622 1949 896 | 3657 1532 897 | 3720 4044 898 | 3484 815 899 | 3791 1565 900 | 1226 714 901 | 1820 1615 902 | 2467 2462 903 | 1961 1736 904 | 4736 1852 905 | 1459 3605 906 | 193 3526 907 | 512 4143 908 | 1040 2000 909 | 3109 5117 910 | 678 1151 911 | 302 4134 912 | 2700 371 913 | 2638 4815 914 | 4566 3899 915 | 2047 1764 916 | 4611 4815 917 | 1953 689 918 | 3334 919 919 | 401 3249 920 | 3216 4688 921 | 4562 2513 922 | 2622 544 923 | 31 958 924 | 5103 415 925 | 1369 2028 926 | 1215 1090 927 | 2689 3547 928 | 1243 2152 929 | 3749 5198 930 | 1445 835 931 | 3160 1355 932 | 4963 3588 933 | 830 3469 934 | 2892 2199 935 | 499 4897 936 | 1035 556 937 | 1070 3762 938 | 4707 1676 939 | 3770 2048 940 | 4323 2104 941 | 3109 2349 942 | 587 1742 943 | 1255 3728 944 | 1891 1334 945 | 3736 4809 946 | 2596 4231 947 | 4416 1997 948 | 3800 1073 949 | 5152 2960 950 | 99 585 951 | 2997 789 952 | 2843 2802 953 | 5142 2697 954 | 2487 3068 955 | 1094 1647 956 | 703 3220 957 | 1205 4121 958 | 385 441 959 | 4619 671 960 | 4671 1507 961 | 574 1595 962 | 128 4654 963 | 4391 4861 964 | 2853 1869 965 | 5241 693 966 | 3013 3802 967 | 1821 3237 968 | 1842 5171 969 | 2497 1970 970 | 4416 1038 971 | 827 4765 972 | 1953 3860 973 | 2764 3361 974 | 4113 4837 975 | 5207 3246 976 | 16 1219 977 | 1554 83 978 | 4894 2223 979 | 3893 292 980 | 1110 4768 981 | 4981 2112 982 | 4291 4150 983 | 4315 1275 984 | 3450 761 985 | 3912 5119 986 | 328 4364 987 | 4386 2734 988 | 1167 1118 989 | 3776 4491 990 | 1362 3815 991 | 1718 1143 992 | 4471 903 993 | 1996 680 994 | 3049 3256 995 | 4128 3788 996 | 425 3477 997 | 4793 1688 998 | 1928 85 999 | 4909 5053 1000 | 1867 4906 1001 | 1001 1934 1002 | 5199 2361 1003 | 3809 1830 1004 | 3017 4941 1005 | 1352 2778 1006 | 1575 977 1007 | 117 10 1008 | 4688 482 1009 | 1944 353 1010 | 3737 2947 1011 | 501 2154 1012 | 3019 3035 1013 | 1008 1299 1014 | 2286 1320 1015 | 654 823 1016 | 4177 3691 1017 | 746 1938 1018 | 4125 1139 1019 | 2925 712 1020 | 790 2910 1021 | 4310 4483 1022 | 3219 2906 1023 | 1480 4222 1024 | 2151 976 1025 | 2368 2662 1026 | 1758 3930 1027 | 4583 3616 1028 | 5198 70 1029 | 3509 496 1030 | 4506 1394 1031 | 3314 4467 1032 | 189 3133 1033 | 460 3446 1034 | 4667 1484 1035 | 4021 502 1036 | 2005 4533 1037 | 1491 4072 1038 | 193 1961 1039 | 2567 1348 1040 | 2317 1545 1041 | 2562 1519 1042 | 3809 1779 1043 | 4314 382 1044 | 120 2851 1045 | 1699 2073 1046 | 415 3150 1047 | 4996 1938 1048 | 5116 689 1049 | 336 630 1050 | 2194 2315 1051 | 1699 247 1052 | 2904 1104 1053 | 3669 3255 1054 | 3784 2393 1055 | 1081 1338 1056 | 1699 2743 1057 | 5022 4586 1058 | 4040 706 1059 | 3652 83 1060 | 2471 407 1061 | 4893 1557 1062 | 2200 3961 1063 | 1231 1057 1064 | 74 786 1065 | 134 2451 1066 | 4473 1984 1067 | 1349 4421 1068 | 4461 3884 1069 | 210 574 1070 | 361 1051 1071 | 5018 2216 1072 | 1068 4961 1073 | 3455 3941 1074 | 3932 998 1075 | 1243 1723 1076 | 1473 3546 1077 | 1456 140 1078 | 1103 4396 1079 | 1028 314 1080 | 4724 5100 1081 | 1407 3244 1082 | 3892 3898 1083 | 3719 1858 1084 | 1231 3182 1085 | 517 1842 1086 | 2900 752 1087 | 956 3204 1088 | 2277 2083 1089 | 1068 977 1090 | 3467 3167 1091 | 2759 3760 1092 | 2497 3333 1093 | 401 489 1094 | 2052 268 1095 | 2374 3531 1096 | 1789 1122 1097 | 2250 3769 1098 | 4677 2156 1099 | 33 1454 1100 | 302 4365 1101 | 196 2194 1102 | 1116 1768 1103 | 2194 1314 1104 | 4736 4622 1105 | 3995 1749 1106 | 2589 1757 1107 | 4654 328 1108 | 1858 2225 1109 | 4459 1643 1110 | 1718 875 1111 | 3950 4307 1112 | 1949 4305 1113 | 960 4540 1114 | 4981 3516 1115 | 1001 4854 1116 | 2471 1346 1117 | 4948 3510 1118 | 3229 594 1119 | 3076 2334 1120 | 961 89 1121 | 2986 3154 1122 | 5241 3964 1123 | 3867 2551 1124 | 1081 521 1125 | 2034 3164 1126 | 2172 4630 1127 | 4053 804 1128 | 5088 2534 1129 | 2072 1872 1130 | 5088 4933 1131 | 3969 1636 1132 | 2053 2369 1133 | 2446 5012 1134 | 1257 1724 1135 | 2220 1410 1136 | 1893 3513 1137 | 4611 2400 1138 | 813 3900 1139 | 1008 4636 1140 | 3719 2750 1141 | 4473 4748 1142 | 4213 4818 1143 | 44 2402 1144 | 2362 2384 1145 | 662 1000 1146 | 3009 1273 1147 | 5180 5057 1148 | 5088 2377 1149 | 4067 1313 1150 | 1700 13 1151 | 3920 4675 1152 | 4530 3464 1153 | 3247 4107 1154 | 108 80 1155 | 1472 952 1156 | 4128 3543 1157 | 3153 2127 1158 | 3021 3907 1159 | 653 2917 1160 | 2952 2746 1161 | 3083 4689 1162 | 86 475 1163 | 1167 1083 1164 | 2567 2022 1165 | 3741 1525 1166 | 4704 1548 1167 | 2238 1633 1168 | 98 733 1169 | 1758 4973 1170 | 4976 3479 1171 | 872 4593 1172 | 3126 2346 1173 | 5173 382 1174 | 1953 880 1175 | 4704 2891 1176 | 4567 4780 1177 | 557 4913 1178 | 4038 4089 1179 | 4157 2778 1180 | 15 4308 1181 | 2468 2513 1182 | 1548 4828 1183 | 1178 3712 1184 | 2016 91 1185 | 5139 1228 1186 | 4703 2106 1187 | 3272 192 1188 | 1083 4960 1189 | 1589 1977 1190 | 1549 3618 1191 | 1271 2990 1192 | 675 4319 1193 | 3976 673 1194 | 2986 3541 1195 | 323 49 1196 | 4850 575 1197 | 1114 4524 1198 | 3634 4391 1199 | 1960 4327 1200 | 2878 4363 1201 | 1364 2062 1202 | 3207 4921 1203 | 1739 311 1204 | 501 4345 1205 | 2621 2004 1206 | 4681 2434 1207 | 603 3960 1208 | 3515 3598 1209 | 1718 687 1210 | 2621 2640 1211 | 3129 4435 1212 | 1846 2780 1213 | 601 2416 1214 | 2764 5188 1215 | 4014 4024 1216 | 304 2396 1217 | 4660 610 1218 | 1718 2418 1219 | 1224 4115 1220 | 2634 3891 1221 | 91 528 1222 | 3799 1335 1223 | 4765 1773 1224 | 4906 5174 1225 | 2933 5087 1226 | 3380 3495 1227 | 3338 2114 1228 | 474 193 1229 | 3996 222 1230 | 4116 1265 1231 | 3485 2459 1232 | 432 1405 1233 | 2432 1945 1234 | 3029 2507 1235 | 3160 1363 1236 | 1870 2316 1237 | 2771 3473 1238 | 5198 1628 1239 | 1810 4293 1240 | 2621 5087 1241 | 4834 9 1242 | 351 4892 1243 | 4835 2944 1244 | 69 2283 1245 | 736 3683 1246 | 1476 2158 1247 | 1712 4961 1248 | 425 1695 1249 | 2531 1908 1250 | 2727 1610 1251 | 324 500 1252 | 4290 4468 1253 | 304 2163 1254 | 1062 2916 1255 | 3733 4590 1256 | 250 386 1257 | 4567 3119 1258 | 2078 3504 1259 | 1226 3157 1260 | 1340 4148 1261 | 3031 1964 1262 | 3784 196 1263 | 2497 2176 1264 | 1805 2296 1265 | 2644 3191 1266 | 389 858 1267 | 1614 3515 1268 | 1147 2444 1269 | 2365 345 1270 | 1349 4967 1271 | 913 821 1272 | 2789 1501 1273 | 2508 632 1274 | 3891 2004 1275 | 4034 4294 1276 | 3513 776 1277 | 2371 1864 1278 | 1579 3122 1279 | 2549 3331 1280 | 3240 5048 1281 | 4208 207 1282 | 977 4867 1283 | 1830 823 1284 | 4583 1457 1285 | 2980 5152 1286 | 2848 4604 1287 | 5142 3317 1288 | 5204 3950 1289 | 2544 1470 1290 | 2928 2588 1291 | 2091 4960 1292 | 3892 1527 1293 | 273 4090 1294 | 35 75 1295 | 3733 1083 1296 | 4443 3222 1297 | 415 2656 1298 | 3326 2557 1299 | 3439 2912 1300 | 1948 4947 1301 | 2151 3582 1302 | 1963 3353 1303 | 1610 3692 1304 | 3418 3463 1305 | 1434 3001 1306 | 4963 2212 1307 | 2248 5109 1308 | 324 4781 1309 | 1870 788 1310 | 3935 1336 1311 | 3565 1163 1312 | 3528 4759 1313 | 3101 5045 1314 | 2269 1614 1315 | 3298 2014 1316 | 1183 2530 1317 | 1908 477 1318 | 3341 2136 1319 | 799 1963 1320 | 766 3073 1321 | 2461 22 1322 | 2843 2561 1323 | 2260 4535 1324 | 2379 4272 1325 | 4844 2246 1326 | 1114 269 1327 | 1114 3284 1328 | 4837 4732 1329 | 1103 1081 1330 | 4756 1320 1331 | 2508 2647 1332 | 1062 2290 1333 | 2764 851 1334 | 3074 2299 1335 | 2487 2225 1336 | 4921 4438 1337 | 4583 4344 1338 | 5071 4995 1339 | 5023 667 1340 | 512 3708 1341 | 1472 990 1342 | 1070 1746 1343 | 1373 282 1344 | 929 4220 1345 | 2621 3227 1346 | 3235 3324 1347 | 1081 4366 1348 | 2853 258 1349 | 3250 440 1350 | 4639 2220 1351 | 1134 5176 1352 | 1235 2317 1353 | 4497 4728 1354 | 1405 2581 1355 | 12 1058 1356 | 1039 1370 1357 | 997 3012 1358 | 4310 765 1359 | 3377 272 1360 | 3858 3293 1361 | 3696 5120 1362 | 1700 3271 1363 | 488 4362 1364 | 2621 2327 1365 | 3586 2420 1366 | 4990 5003 1367 | 2471 2837 1368 | 3102 1261 1369 | 2925 3652 1370 | 2167 4726 1371 | 4819 2751 1372 | 2569 4163 1373 | 3634 4800 1374 | 5105 968 1375 | 4216 677 1376 | 4654 2407 1377 | 1231 3733 1378 | 2569 3958 1379 | 2508 3201 1380 | 2901 3228 1381 | 5009 2507 1382 | 2941 488 1383 | 2742 3916 1384 | 1285 5044 1385 | 251 629 1386 | 4616 5045 1387 | 853 2151 1388 | 4077 4725 1389 | 2046 1262 1390 | 4849 1360 1391 | 691 233 1392 | 3719 4004 1393 | 1327 2141 1394 | 399 93 1395 | 2973 3763 1396 | 2497 4515 1397 | 1842 2544 1398 | 3260 3947 1399 | 3366 2073 1400 | 2475 140 1401 | 1678 5214 1402 | 5080 1869 1403 | 2461 3466 1404 | 2497 2532 1405 | 2257 4116 1406 | 4357 1899 1407 | 1760 5230 1408 | 1659 3212 1409 | 4044 2588 1410 | 485 3457 1411 | 5191 190 1412 | 1570 4200 1413 | 3037 1323 1414 | 2531 417 1415 | 2557 4511 1416 | 1538 1238 1417 | 652 4206 1418 | 2046 4943 1419 | 4443 3211 1420 | 3784 274 1421 | 1035 4736 1422 | 4247 2751 1423 | 4625 3813 1424 | 4819 4200 1425 | 402 4475 1426 | 1204 2602 1427 | 3467 617 1428 | 4843 2301 1429 | 3017 316 1430 | 16 3355 1431 | 995 2189 1432 | 402 2466 1433 | 4677 2830 1434 | 4616 718 1435 | 1570 946 1436 | 3038 2232 1437 | 2362 4107 1438 | 5017 2736 1439 | 1806 245 1440 | 1169 4622 1441 | 1862 3047 1442 | 3528 1091 1443 | 1562 5121 1444 | 3578 4579 1445 | 2681 5048 1446 | 1614 388 1447 | 5017 507 1448 | 2362 4364 1449 | 2402 178 1450 | -------------------------------------------------------------------------------- /src/GraphGAN/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liutongyang/GraphGAN-pytorch/3be0f757b431cb470b984de8d3e94105dcdd4afe/src/GraphGAN/__init__.py -------------------------------------------------------------------------------- /src/GraphGAN/config.py: -------------------------------------------------------------------------------- 1 | modes = ["gen", "dis"] 2 | 3 | # training settings 4 | batch_size_gen = 64 # batch size for the generator 5 | batch_size_dis = 64 # batch size for the discriminator 6 | lambda_gen = 1e-5 # l2 loss regulation weight for the generator 7 | lambda_dis = 1e-5 # l2 loss regulation weight for the discriminator 8 | n_sample_gen = 20 # number of samples for the generator 9 | lr_gen = 1e-3 # learning rate for the generator 10 | lr_dis = 1e-3 # learning rate for the discriminator 11 | n_epochs = 20 # number of outer loops 12 | n_epochs_gen = 30 # number of inner loops for the generator 13 | n_epochs_dis = 30 # number of inner loops for the discriminator 14 | gen_interval = n_epochs_gen # sample new nodes for the generator for every gen_interval iterations 15 | dis_interval = n_epochs_dis # sample new nodes for the discriminator for every dis_interval iterations 16 | update_ratio = 1 # updating ratio when choose the trees 17 | 18 | # model saving 19 | load_model = False # whether loading existing model for initialization 20 | save_steps = 10 21 | 22 | # other hyper-parameters 23 | n_emb = 50 24 | multi_processing = False # whether using multi-processing to construct BFS-trees 25 | window_size = 2 26 | 27 | # application and dataset settings 28 | app = "link_prediction" 29 | dataset = "CA-GrQc" 30 | 31 | # path settings 32 | train_filename = "../../data/" + app + "/" + dataset + "_train.txt" 33 | test_filename = "../../data/" + app + "/" + dataset + "_test.txt" 34 | test_neg_filename = "../../data/" + app + "/" + dataset + "_test_neg.txt" 35 | pretrain_emb_filename_d = "../../pre_train/" + app + "/" + dataset + "_pre_train.emb" 36 | pretrain_emb_filename_g = "../../pre_train/" + app + "/" + dataset + "_pre_train.emb" 37 | emb_filenames = ["../../results/" + app + "/" + dataset + "_gen_.emb", 38 | "../../results/" + app + "/" + dataset + "_dis_.emb"] 39 | result_filename = "../../results/" + app + "/" + dataset + ".txt" 40 | cache_filename = "../../cache/" + dataset + ".pkl" 41 | model_log = "../../log/" 42 | PATH = './checkpoint.path' 43 | -------------------------------------------------------------------------------- /src/GraphGAN/discriminator.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import config 3 | 4 | 5 | class Discriminator(object): 6 | def __init__(self, n_node, node_emd_init): 7 | self.n_node = n_node 8 | self.node_emd_init = node_emd_init 9 | 10 | self.embedding_matrix = torch.randn(self.node_emd_init.shape) 11 | self.bias_vector = torch.zeros([self.n_node]) 12 | 13 | self.node_id = 0 14 | self.node_neighbor_id = 0 15 | self.reward = 0 16 | 17 | self.node_embedding = torch.index_select(self.embedding_matrix, 0, self.node_id.long()) 18 | self.node_neighbor_embedding = torch.index_select(self.embedding_matrix, 0, self.node_neighbor_id.long()) 19 | self.bias = torch.index_select(self.bias_vector, 0, self.node_neighbor_id.long()) 20 | self.score = self.node_embedding*self.node_neighbor_embedding.sum(0) + self.bias 21 | 22 | -------------------------------------------------------------------------------- /src/GraphGAN/generator.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import config 3 | import numpy as np 4 | from src import utils 5 | 6 | class Generator(object): 7 | def __init__(self, n_node, node_emd_init): 8 | 9 | self.n_node = n_node 10 | self.node_emd_init = node_emd_init 11 | self.embedding_matrix = torch.randn(self.node_emd_init.shape) 12 | self.bias_vector = torch.zeros([self.n_node]) 13 | 14 | self.node_id = 0 15 | self.node_neighbor_id = 0 16 | self.reward = 0 17 | 18 | self.all_score = torch.matmul(self.embedding_matrix,self.embedding_matrix.t()) + self.bias_vector 19 | self.node_embedding = torch.index_select(self.embedding_matrix, 0, self.node_id) 20 | self.node_neighbor_embedding = torch.index_select(self.embedding_matrix, 0, self.node_neighbor_id) 21 | 22 | self.bias = torch.index_select(self.bias_vector, 0, self.node_neighbor_id) 23 | self.score = self.node_embedding*self.node_neighbor_embedding.sum(0) + self.bias 24 | self.prob = utils.clip_by_tensor(torch.sigmoid(self.score), 1e-5, 1) 25 | 26 | -------------------------------------------------------------------------------- /src/GraphGAN/graph_gan.py: -------------------------------------------------------------------------------- 1 | import os 2 | import collections 3 | import tqdm 4 | import multiprocessing 5 | import pickle 6 | import numpy as np 7 | import torch 8 | import config 9 | import generator 10 | import discriminator 11 | from src import utils 12 | from src.evaluation import link_prediction as lp 13 | 14 | class GraphGAN(object): 15 | def __init__(self): 16 | print("read graph") 17 | 18 | # n_node(5242) 是节点的数量 19 | # graph 是一个字典 20 | self.n_node, self.graph = utils.read_edges(config.train_filename, config.test_filename) 21 | self.root_nodes = [i for i in range(self.n_node)] 22 | 23 | print("reading initial embeddings ...") 24 | 25 | # n_node * n_emb 的矩阵 26 | self.node_embed_init_d = utils.read_embeddings(filename=config.pretrain_emb_filename_d, 27 | n_node = self.n_node, 28 | n_embed = config.n_emb) 29 | 30 | self.node_embed_init_g = utils.read_embeddings(filename=config.pretrain_emb_filename_g, 31 | n_node = self.n_node, 32 | n_embed = config.n_emb) 33 | 34 | # 构建或读取 BFS-trees 35 | self.trees = None 36 | if os.path.isfile(config.cache_filename): 37 | print("reading BFS-trees from cache ... ") 38 | pickle_file = open(config.cache_filename, 'rb') 39 | self.trees = pickle.load(pickle_file) 40 | pickle_file.close() 41 | else: 42 | print("constructiong BFS-trees") 43 | pickle_file = open(config.cache_filename, 'wb') 44 | if config.multi_processing: 45 | self.construct_trees_with_mp(self.root_nodes) 46 | else: 47 | self.trees = self.construct_trees(self.root_nodes) 48 | pickle.dump(self.trees, pickle_file) 49 | pickle_file.close() 50 | 51 | print("building GAN model...") 52 | 53 | self.discriminator = None 54 | self.generator = None 55 | self.build_generator() 56 | self.build_discriminator() 57 | 58 | 59 | def construct_trees(self, nodes): 60 | """ use BFS algorithm to construct the BFS-trees 61 | 62 | Args: 63 | nodes: Graph节点中的列表 64 | 65 | Return: 66 | trees: dict, root_node_id -> tree, where tree is a dict: node_id -> list:[father, child_0, child_1, ...] 67 | """ 68 | 69 | trees = {} 70 | for root in tqdm.tqdm(nodes): 71 | trees[root] = {} 72 | 73 | # 把每棵树的父节点设为自己 74 | trees[root][root] = [root] 75 | 76 | used_nodes = set() 77 | queue = collections.deque([root]) 78 | while len(queue) > 0: 79 | cur_node = queue.popleft() 80 | used_nodes.add(cur_node) 81 | for sub_node in self.graph[cur_node]: 82 | if sub_node not in used_nodes: 83 | trees[root][cur_node].append(sub_node) 84 | trees[root][sub_node] = [cur_node] 85 | queue.append(sub_node) 86 | used_nodes.add(sub_node) 87 | return trees 88 | 89 | def build_generator(self): 90 | self.generator = generator.Generator(n_node=self.n_node, node_emd_init = self.node_embed_init_g) 91 | 92 | def build_discriminator(self): 93 | self.discriminator = discriminator.Discriminator(n_node = self.n_node, node_emd_init=self.node_embed_init_d) 94 | 95 | def train(self): 96 | 97 | self.write_embeddings_to_file() 98 | self.evaluation(self) 99 | 100 | print("start training ... ") 101 | for epoch in range(config.n_epochs): 102 | print("epoch %d" % epoch) 103 | 104 | # 训练判别器 105 | center_nodes = [] 106 | neighbor_nodes = [] 107 | labels = [] 108 | for d_epoch in range(config.n_epochs_dis): 109 | # 每次 dis_interval 迭代都为判别器生成新节点 110 | if d_epoch % config.dis_interval == 0: 111 | center_nodes, neighbor_nodes, labels = self.prepare_data_for_d() 112 | # 开始训练 113 | train_size = len(center_nodes) 114 | start_list = list(range(0, train_size, config.batch_size_dis)) 115 | np.random.shuffle(start_list) 116 | for start in start_list: 117 | end = start + config.batch_size_dis 118 | 119 | loss = torch.nn.MultiLabelSoftMarginLoss(self.discriminator.score, np.array(labels[start:end])).sum(0) 120 | 121 | # TODO:L2正则化 122 | # node_neighbor_embedding = self.discriminator 123 | # node_embedding = pass 124 | # bias = passe 125 | 126 | # loss = torch.nn.MultiLabelSoftMarginLoss(self.discriminator.score, np.array(labels[start:end])).sum(0) + \ 127 | # config.lambda_dis * ( 128 | # sum(node_neighbor_embedding ** 2) / 2 + 129 | # sum(node_embedding ** 2) / 2 + 130 | # sum(bias ** 2) / 2 131 | # ) 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | def prepare_data_for_d(self): 143 | """为判别器提供正采样和负采样,并记录日志""" 144 | center_nodes = [] 145 | neighbor_nodes = [] 146 | labels = [] 147 | 148 | for i in self.root_nodes: 149 | if np.random.rand() < config.update_ratio: 150 | pos = self.graph[i] 151 | neg, _ = self.sample(i, self.trees[i], len(pos), for_d = True) 152 | if len(pos) != 0 and neg is not None: 153 | # 正采样 154 | center_nodes.extend([i] * len(pos)) 155 | neighbor_nodes.extend(pos) 156 | labels.extend([1] * len(pos)) 157 | 158 | # 负采样 159 | center_nodes.extend([i] * len(pos)) 160 | neighbor_nodes.extend(neg) 161 | labels.extend([0]*len(neg)) 162 | return center_nodes, neighbor_nodes, labels 163 | 164 | 165 | def sample(self, root, tree, sample_num, for_d): 166 | """从 BFS-tree 中采样节点 167 | 168 | Args: 169 | root: int, 根节点 170 | tree: dict, BFS-tree 171 | sample_num: 需要采样的数量 172 | for_d : bool, 样本是用在生成器还是判别器 173 | 174 | Return: 175 | samples: list,采样节点的索引 176 | paths: list, 从根节点到采样节点的路径 177 | """ 178 | 179 | all_score = self.generator.all_score 180 | samples = [] 181 | paths = [] 182 | n = 0 183 | 184 | while len(samples) < sample_num: 185 | current_node = root 186 | previous_node = -1 187 | paths.append([]) 188 | is_root = True 189 | paths[n].append(current_node) 190 | while True: 191 | node_neighbor = tree[current_node][1:] if is_root else tree[current_node] 192 | is_root = False 193 | if len(node_neighbor) == 0: # 当树只有一个节点(根)时 194 | return None, None 195 | if for_d: # 跳过单跳节点(正采样) 196 | if node_neighbor == [root]: 197 | # 在当前的版本 None 被返回 198 | return None, None 199 | if root in node_neighbor: 200 | node_neighbr.remove(root) 201 | relevance_probability = all_score[current_node, node_neighbor] 202 | relevance_probability = utils.softmax(relevance_probability) 203 | next_node = np.random.choice(node_neighbor, size=1, p=relevance_probability)[0] # 选择下一个节点 204 | paths[n].append(next_node) 205 | if next_node == previous_node: # 结束条件 206 | samples.append(current_node) 207 | break 208 | previous_node = current_node 209 | current_node = next_node 210 | n = n + 1 211 | return samples, paths 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | def write_embeddings_to_file(self): 223 | """把G和D的Embedding写入文件里""" 224 | modes = [self.generator, self.discriminator] 225 | 226 | for i in range(2): 227 | embedding_matrix = modes[i].embedding_matrix 228 | index = np.array(range(self.n_node)).reshape(-1,1) 229 | embeddings_matrix = np.hstack([index, embeddings_matrix]) 230 | embeddings_list = embedding_matrix.tolist() 231 | embedding_str = [str(int(emb[0])) + "\t" + "\t".join([str(x) for x in emb[1:]]) + "\n" 232 | for emb in embedding_list] 233 | with open(config.emb_filenames[i], "w+") as f: 234 | lines = [str(self.n_node) + "\t" + str(config.n_emb) + "\n"] + embedding_str 235 | f.writelines(lines) 236 | 237 | @staticmethod 238 | def evaluation(self): 239 | results = [] 240 | if config.app == "link_prediction": 241 | for i in range(2): 242 | lpe = lp.LinkPredictEval( 243 | config.emb_filenames[i], config.test_filename, config.test_neg_filename, self.n_node, config.n_emb) 244 | result = lpe.eval_link_prediction() 245 | results.append(config.modes[i] + ":" + str(result) + "\n") 246 | 247 | with open(config.result_filename, mode="a+") as f: 248 | f.writelines(results) 249 | -------------------------------------------------------------------------------- /src/evaluation/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/liutongyang/GraphGAN-pytorch/3be0f757b431cb470b984de8d3e94105dcdd4afe/src/evaluation/__init__.py -------------------------------------------------------------------------------- /src/evaluation/link_prediction.py: -------------------------------------------------------------------------------- 1 | """ 2 | The class is used to evaluate the application of link prediction 3 | """ 4 | 5 | import numpy as np 6 | from sklearn.metrics import accuracy_score 7 | from src import utils 8 | 9 | 10 | class LinkPredictEval(object): 11 | def __init__(self, embed_filename, test_filename, test_neg_filename, n_node, n_embed): 12 | self.embed_filename = embed_filename # each line: node_id, embeddings(dim: n_embed) 13 | self.test_filename = test_filename # each line: node_id1, node_id2 14 | self.test_neg_filename = test_neg_filename # each line: node_id1, node_id2 15 | self.n_node = n_node 16 | self.n_embed = n_embed 17 | self.emd = utils.read_embeddings(embed_filename, n_node=n_node, n_embed=n_embed) 18 | 19 | def eval_link_prediction(self): 20 | test_edges = utils.read_edges_from_file(self.test_filename) 21 | test_edges_neg = utils.read_edges_from_file(self.test_neg_filename) 22 | test_edges.extend(test_edges_neg) 23 | 24 | # may exists isolated point 25 | score_res = [] 26 | for i in range(len(test_edges)): 27 | score_res.append(np.dot(self.emd[test_edges[i][0]], self.emd[test_edges[i][1]])) 28 | test_label = np.array(score_res) 29 | median = np.median(test_label) 30 | index_pos = test_label >= median 31 | index_neg = test_label < median 32 | test_label[index_pos] = 1 33 | test_label[index_neg] = 0 34 | true_label = np.zeros(test_label.shape) 35 | true_label[0: len(true_label) // 2] = 1 36 | accuracy = accuracy_score(true_label, test_label) 37 | 38 | return accuracy 39 | -------------------------------------------------------------------------------- /src/utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def str_list_to_float(str_list): 5 | return [float(item) for item in str_list] 6 | 7 | 8 | def str_list_to_int(str_list): 9 | return [int(item) for item in str_list] 10 | 11 | 12 | def read_edges(train_filename, test_filename): 13 | """read data from files 14 | 15 | Args: 16 | train_filename: training file name 17 | test_filename: test file name 18 | 19 | Returns: 20 | node_num: int, number of nodes in the graph 21 | graph: dict, node_id -> list of neighbors in the graph 22 | """ 23 | 24 | graph = {} 25 | nodes = set() 26 | train_edges = read_edges_from_file(train_filename) 27 | test_edges = read_edges_from_file(test_filename) if test_filename != "" else [] 28 | 29 | for edge in train_edges: 30 | nodes.add(edge[0]) 31 | nodes.add(edge[1]) 32 | if graph.get(edge[0]) is None: 33 | graph[edge[0]] = [] 34 | if graph.get(edge[1]) is None: 35 | graph[edge[1]] = [] 36 | graph[edge[0]].append(edge[1]) 37 | graph[edge[1]].append(edge[0]) 38 | 39 | for edge in test_edges: 40 | nodes.add(edge[0]) 41 | nodes.add(edge[1]) 42 | if graph.get(edge[0]) is None: 43 | graph[edge[0]] = [] 44 | if graph.get(edge[1]) is None: 45 | graph[edge[1]] = [] 46 | 47 | return len(nodes), graph 48 | 49 | 50 | def read_edges_from_file(filename): 51 | with open(filename, "r") as f: 52 | lines = f.readlines() 53 | edges = [str_list_to_int(line.split()) for line in lines] 54 | return edges 55 | 56 | 57 | def read_embeddings(filename, n_node, n_embed): 58 | """read pretrained node embeddings 59 | """ 60 | 61 | with open(filename, "r") as f: 62 | lines = f.readlines()[1:] # skip the first line 63 | embedding_matrix = np.random.rand(n_node, n_embed) 64 | for line in lines: 65 | emd = line.split() 66 | 67 | # 把预训练的词向量替换到对应的位置,没有的就使用随机生成的,这样可以简单解决未登录词的问题 68 | embedding_matrix[int(emd[0]), :] = str_list_to_float(emd[1:]) 69 | return embedding_matrix 70 | 71 | 72 | def reindex_node_id(edges): 73 | """reindex the original node ID to [0, node_num) 74 | 75 | Args: 76 | edges: list, element is also a list like [node_id_1, node_id_2] 77 | Returns: 78 | new_edges: list[[1,2],[2,3]] 79 | new_nodes: list [1,2,3] 80 | """ 81 | 82 | node_set = set() 83 | for edge in edges: 84 | node_set = node_set.union(set(edge)) 85 | 86 | node_set = list(node_set) 87 | new_nodes = set() 88 | new_edges = [] 89 | for edge in edges: 90 | new_edges.append([node_set.index(edge[0]), node_set.index(edge[1])]) 91 | new_nodes = new_nodes.add(node_set.index(edge[0])) 92 | new_nodes = new_nodes.add(node_set.index(edge[1])) 93 | 94 | new_nodes = list(new_nodes) 95 | return new_edges, new_nodes 96 | 97 | 98 | def generate_neg_links(train_filename, test_filename, test_neg_filename): 99 | """ 100 | generate neg links for link prediction evaluation 101 | Args: 102 | train_filename: the training edges 103 | test_filename: the test edges 104 | test_neg_filename: the negative edges for test 105 | """ 106 | 107 | train_edges = read_edges_from_file(train_filename) 108 | test_edges = read_edges_from_file(test_filename) 109 | neighbors = {} # dict, node_ID -> list_of_neighbors 110 | for edge in train_edges + test_edges: 111 | if neighbors.get(edge[0]) is None: 112 | neighbors[edge[0]] = [] 113 | if neighbors.get(edge[1]) is None: 114 | neighbors[edge[1]] = [] 115 | neighbors[edge[0]].append(edge[1]) 116 | neighbors[edge[1]].append(edge[0]) 117 | nodes = set([x for x in range(len(neighbors))]) 118 | 119 | # for each edge in the test set, sample a negative edge 120 | neg_edges = [] 121 | 122 | for i in range(len(test_edges)): 123 | edge = test_edges[i] 124 | start_node = edge[0] 125 | neg_nodes = list(nodes.difference(set(neighbors[edge[0]] + [edge[0]]))) 126 | neg_node = np.random.choice(neg_nodes, size=1)[0] 127 | neg_edges.append([start_node, neg_node]) 128 | neg_edges_str = [str(x[0]) + "\t" + str(x[1]) + "\n" for x in neg_edges] 129 | with open(test_neg_filename, "w+") as f: 130 | f.writelines(neg_edges_str) 131 | 132 | 133 | def softmax(x): 134 | e_x = np.exp(x - np.max(x)) # for computation stability 135 | return e_x / e_x.sum() 136 | 137 | def clip_by_tensor(t,t_min,t_max): 138 | """ 139 | clip_by_tensor 140 | :param t: tensor 141 | :param t_min: min 142 | :param t_max: max 143 | :return: cliped tensor 144 | """ 145 | t=t.float() 146 | # t_min=t_min.float() 147 | # t_max=t_max.float() 148 | 149 | result = (t >= t_min).float() * t + (t < t_min).float() * t_min 150 | result = (result <= t_max).float() * result + (result > t_max).float() * t_max 151 | return result --------------------------------------------------------------------------------