├── src
├── run_cora_cgnn.py
├── run_cora_wcgnn.py
├── gnn.py
├── wgnn.py
├── trainer.py
├── loader.py
└── train.py
├── README.md
├── LICENSE.txt
└── data
└── cora
├── train.txt
├── dev.txt
├── test.txt
└── label.txt
/src/run_cora_cgnn.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | import copy
4 | import json
5 | import datetime
6 |
7 | opt = dict()
8 |
9 | opt['dataset'] = '../data/cora'
10 | opt['hidden_dim'] = 16
11 | opt['input_dropout'] = 0.5
12 | opt['dropout'] = 0
13 | opt['optimizer'] = 'rmsprop'
14 | opt['lr'] = 0.0047
15 | opt['decay'] = 5e-4
16 | opt['self_link_weight'] = 0.555
17 | opt['alpha'] = 0.918
18 | opt['epoch'] = 400
19 | opt['time']=12.1
20 |
21 | def generate_command(opt):
22 | cmd = 'python train.py'
23 | for opt, val in opt.items():
24 | cmd += ' --' + opt + ' ' + str(val)
25 | return cmd
26 |
27 | def run(opt):
28 | opt_ = copy.deepcopy(opt)
29 | os.system(generate_command(opt_))
30 |
31 | for k in range(1):
32 | seed = k + 1
33 | opt['seed'] = 1
34 | run(opt)
35 |
--------------------------------------------------------------------------------
/src/run_cora_wcgnn.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | import copy
4 | import json
5 | import datetime
6 |
7 | opt = dict()
8 |
9 | opt['dataset'] = '../data/cora'
10 | opt['hidden_dim'] = 16
11 | opt['input_dropout'] = 0.5
12 | opt['dropout'] = 0
13 | opt['optimizer'] = 'rmsprop'
14 | opt['lr'] = 0.00211
15 | opt['decay'] = 5e-4
16 | opt['self_link_weight'] = 0.947
17 | opt['alpha']=0.95
18 | opt['epoch'] = 400
19 | opt['time']=14.3
20 | opt['weight']=True
21 |
22 | def generate_command(opt):
23 | cmd = 'python train.py'
24 | for opt, val in opt.items():
25 | cmd += ' --' + opt + ' ' + str(val)
26 | return cmd
27 |
28 | def run(opt):
29 | opt_ = copy.deepcopy(opt)
30 | os.system(generate_command(opt_))
31 |
32 | for k in range(1):
33 | seed = k + 1
34 | opt['seed'] = 1
35 | run(opt)
36 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Continuous Graph Neural Networks
2 | Louis-Pascal A. C. Xhonneux\*, Meng Qu\*, and Jian Tang
3 | [https://arxiv.org/pdf/1912.00967.pdf](https://arxiv.org/pdf/1912.00967.pdf)
4 |
5 | ## Dependencies
6 |
7 | The code has been tested under Pytorch 3.6.7 and requires the installation of the following packages and their dependencies
8 |
9 | - `pytorch==1.2.0`
10 | - `numpy==1.17.2`
11 | - `torchdiffeq==0.0.1`
12 |
13 | To run the model without weights use `cd src; python run_cora_cgnn.py` and for the model with weights use `cd src; python run_cora_wcgnn.py`.
14 |
15 | ## References
16 | If you used this code for your research, please cite this in your manuscript:
17 |
18 | ```
19 | @misc{xhonneux2019continuous,
20 | title={Continuous Graph Neural Networks},
21 | author={Louis-Pascal A. C. Xhonneux and Meng Qu and Jian Tang},
22 | year={2019},
23 | eprint={1912.00967},
24 | archivePrefix={arXiv},
25 | primaryClass={cs.LG}
26 | }
27 | ```
28 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Louis-Pascal Xhonneux, Meng Qu, Jian Tang
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/data/cora/train.txt:
--------------------------------------------------------------------------------
1 | 0
2 | 1
3 | 2
4 | 3
5 | 4
6 | 5
7 | 6
8 | 7
9 | 8
10 | 9
11 | 10
12 | 11
13 | 12
14 | 13
15 | 14
16 | 15
17 | 16
18 | 17
19 | 18
20 | 19
21 | 20
22 | 21
23 | 22
24 | 23
25 | 24
26 | 25
27 | 26
28 | 27
29 | 28
30 | 29
31 | 30
32 | 31
33 | 32
34 | 33
35 | 34
36 | 35
37 | 36
38 | 37
39 | 38
40 | 39
41 | 40
42 | 41
43 | 42
44 | 43
45 | 44
46 | 45
47 | 46
48 | 47
49 | 48
50 | 49
51 | 50
52 | 51
53 | 52
54 | 53
55 | 54
56 | 55
57 | 56
58 | 57
59 | 58
60 | 59
61 | 60
62 | 61
63 | 62
64 | 63
65 | 64
66 | 65
67 | 66
68 | 67
69 | 68
70 | 69
71 | 70
72 | 71
73 | 72
74 | 73
75 | 74
76 | 75
77 | 76
78 | 77
79 | 78
80 | 79
81 | 80
82 | 81
83 | 82
84 | 83
85 | 84
86 | 85
87 | 86
88 | 87
89 | 88
90 | 89
91 | 90
92 | 91
93 | 92
94 | 93
95 | 94
96 | 95
97 | 96
98 | 97
99 | 98
100 | 99
101 | 100
102 | 101
103 | 102
104 | 103
105 | 104
106 | 105
107 | 106
108 | 107
109 | 108
110 | 109
111 | 110
112 | 111
113 | 112
114 | 113
115 | 114
116 | 115
117 | 116
118 | 117
119 | 118
120 | 119
121 | 120
122 | 121
123 | 122
124 | 123
125 | 124
126 | 125
127 | 126
128 | 127
129 | 128
130 | 129
131 | 130
132 | 131
133 | 132
134 | 133
135 | 134
136 | 135
137 | 136
138 | 137
139 | 138
140 | 139
141 |
--------------------------------------------------------------------------------
/src/gnn.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import math
3 | import numpy as np
4 | import torch
5 | from torch import nn
6 | from torch.nn import init
7 | from torch.autograd import Variable
8 | import torch.nn.functional as F
9 |
10 | # Whether use adjoint method or not.
11 | adjoint = False
12 | if adjoint:
13 | from torchdiffeq import odeint_adjoint as odeint
14 | else:
15 | from torchdiffeq import odeint
16 |
17 |
18 | # Define the ODE function.
19 | # Input:
20 | # --- t: A tensor with shape [], meaning the current time.
21 | # --- x: A tensor with shape [#batches, dims], meaning the value of x at t.
22 | # Output:
23 | # --- dx/dt: A tensor with shape [#batches, dims], meaning the derivative of x at t.
24 | class ODEFunc(nn.Module):
25 |
26 | # currently requires in_features = out_features
27 | def __init__(self, in_features, out_features, opt, adj, deg):
28 | super(ODEFunc, self).__init__()
29 | self.opt = opt
30 | self.adj = adj
31 | self.x0 = None
32 | self.nfe = 0
33 | self.in_features = in_features
34 | self.out_features = out_features
35 | self.alpha = opt['alpha']
36 | self.alpha_train = nn.Parameter(self.alpha*torch.ones(adj.shape[1]))
37 |
38 | self.w = nn.Parameter(torch.eye(opt['hidden_dim']))
39 | self.d = nn.Parameter(torch.zeros(opt['hidden_dim']) + 1)
40 |
41 | def forward(self, t, x):
42 | self.nfe +=1
43 |
44 | alph = F.sigmoid(self.alpha_train).unsqueeze(dim=1)
45 | ax = torch.spmm(self.adj, x)
46 | f = alph * 0.5 * (ax-x) + self.x0
47 | return f
48 |
49 |
50 |
51 | class ODEblock(nn.Module):
52 | def __init__(self, odefunc, t=torch.tensor([0,1])):
53 | super(ODEblock, self).__init__()
54 | self.t = t
55 | self.odefunc = odefunc
56 | self.nfe = 0
57 |
58 | def set_x0(self, x0):
59 | self.odefunc.x0 = x0.clone().detach()
60 |
61 | def forward(self, x):
62 | self.nfe += 1
63 |
64 | t = self.t.type_as(x)
65 | z = odeint(self.odefunc, x, t)[1]
66 | return z
67 |
68 | def __repr__(self):
69 | return self.__class__.__name__ + '( Time Interval ' + str(self.t[0].item()) + ' -> ' + str(self.t[1].item()) \
70 | + ")"
71 |
72 |
73 | # Define the GNN model.
74 | class GNN(nn.Module):
75 | def __init__(self, opt, adj, deg, time):
76 | super(GNN, self).__init__()
77 | self.opt = opt
78 | self.adj = adj
79 | self.T = time
80 |
81 | self.m1 = nn.Linear(opt['num_feature'], opt['hidden_dim'])
82 |
83 | self.odeblock = ODEblock(ODEFunc(2*opt['hidden_dim'], 2*opt['hidden_dim'], opt, adj, deg), t=torch.tensor([0,self.T]))
84 |
85 | self.m2 = nn.Linear(opt['hidden_dim'], opt['num_class'])
86 |
87 | if opt['cuda']:
88 | self.cuda()
89 |
90 | def reset(self):
91 | self.m1.reset_parameters()
92 | self.m2.reset_parameters()
93 |
94 | def forward(self, x):
95 | # Encode each node based on its feature.
96 | x = F.dropout(x, self.opt['input_dropout'], training=self.training)
97 | x = self.m1(x)
98 |
99 | # Solve the initial value problem of the ODE.
100 | c_aux = torch.zeros(x.shape).cuda()
101 | x = torch.cat([x,c_aux], dim=1)
102 | self.odeblock.set_x0(x)
103 |
104 | z = self.odeblock(x)
105 | z = torch.split(z, x.shape[1]//2, dim=1)[0]
106 |
107 | # Activation.
108 | z = F.relu(z)
109 |
110 | # Dropout.
111 | z = F.dropout(z, self.opt['dropout'], training=self.training)
112 |
113 | # Decode each node embedding to get node label.
114 | z = self.m2(z)
115 | return z
116 |
117 |
--------------------------------------------------------------------------------
/src/wgnn.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import math
3 | import numpy as np
4 | import torch
5 | from torch import nn
6 | from torch.nn import init
7 | from torch.autograd import Variable
8 | import torch.nn.functional as F
9 |
10 | # Whether use adjoint method or not.
11 | adjoint = False
12 | if adjoint:
13 | from torchdiffeq import odeint_adjoint as odeint
14 | else:
15 | from torchdiffeq import odeint
16 |
17 |
18 | # Define the ODE function.
19 | # Input:
20 | # --- t: A tensor with shape [], meaning the current time.
21 | # --- x: A tensor with shape [#batches, dims], meaning the value of x at t.
22 | # Output:
23 | # --- dx/dt: A tensor with shape [#batches, dims], meaning the derivative of x at t.
24 | class ODEFuncW(nn.Module):
25 |
26 | # currently requires in_features = out_features
27 | def __init__(self, in_features, out_features, opt, adj, deg):
28 | super(ODEFuncW, self).__init__()
29 | self.opt = opt
30 | self.adj = adj
31 | self.x0 = None
32 | self.nfe = 0
33 | self.in_features = in_features
34 | self.out_features = out_features
35 | self.alpha = opt['alpha']
36 | self.alpha_train = nn.Parameter(self.alpha*torch.ones(adj.shape[1]))
37 |
38 | self.w = nn.Parameter(torch.eye(2*opt['hidden_dim']))
39 | self.d = nn.Parameter(torch.zeros(2*opt['hidden_dim']) + 1)
40 |
41 | def forward(self, t, x):
42 | self.nfe +=1
43 |
44 | alph = F.sigmoid(self.alpha_train).unsqueeze(dim=1)
45 | ax = torch.spmm(self.adj, x)
46 |
47 | d = torch.clamp(self.d, min=0, max=1)
48 | w = torch.mm(self.w * d, torch.t(self.w))
49 | xw = torch.spmm(x, w)
50 |
51 | f = alph * 0.5 * (ax - x) + xw - x + self.x0
52 |
53 | return f
54 |
55 |
56 |
57 | class ODEblockW(nn.Module):
58 | def __init__(self, odefunc, t=torch.tensor([0,1])):
59 | super(ODEblockW, self).__init__()
60 | self.t = t
61 | self.odefunc = odefunc
62 | self.nfe = 0
63 |
64 | def set_x0(self, x0):
65 | self.odefunc.x0 = x0.clone().detach()
66 |
67 | def forward(self, x):
68 | self.nfe += 1
69 |
70 | t = self.t.type_as(x)
71 | z = odeint(self.odefunc, x, t)[1]
72 | return z
73 |
74 | def __repr__(self):
75 | return self.__class__.__name__ + '( Time Interval ' + str(self.t[0].item()) + ' -> ' + str(self.t[1].item()) \
76 | + ")"
77 |
78 |
79 | # Define the GNN model.
80 | class WGNN(nn.Module):
81 | def __init__(self, opt, adj, deg, time):
82 | super(WGNN, self).__init__()
83 | self.opt = opt
84 | self.adj = adj
85 | self.T = time
86 |
87 | self.m1 = nn.Linear(opt['num_feature'], opt['hidden_dim'])
88 |
89 | self.odeblock = ODEblockW(ODEFuncW(2*opt['hidden_dim'], 2*opt['hidden_dim'], opt, adj, deg), t=torch.tensor([0,self.T]))
90 |
91 | self.m2 = nn.Linear(opt['hidden_dim'], opt['num_class'])
92 |
93 | if opt['cuda']:
94 | self.cuda()
95 |
96 | def reset(self):
97 | self.m1.reset_parameters()
98 | self.m2.reset_parameters()
99 |
100 | def forward(self, x):
101 | # Encode each node based on its feature.
102 | x = F.dropout(x, self.opt['input_dropout'], training=self.training)
103 | x = self.m1(x)
104 |
105 | # Solve the initial value problem of the ODE.
106 | c_aux = torch.zeros(x.shape).cuda()
107 | x = torch.cat([x,c_aux], dim=1)
108 | self.odeblock.set_x0(x)
109 |
110 | z = self.odeblock(x)
111 | z = torch.split(z, x.shape[1]//2, dim=1)[0]
112 |
113 | # Activation.
114 | z = F.relu(z)
115 |
116 | # Dropout.
117 | z = F.dropout(z, self.opt['dropout'], training=self.training)
118 |
119 | # Decode each node embedding to get node label.
120 | z = self.m2(z)
121 | return z
122 |
123 |
--------------------------------------------------------------------------------
/data/cora/dev.txt:
--------------------------------------------------------------------------------
1 | 140
2 | 141
3 | 142
4 | 143
5 | 144
6 | 145
7 | 146
8 | 147
9 | 148
10 | 149
11 | 150
12 | 151
13 | 152
14 | 153
15 | 154
16 | 155
17 | 156
18 | 157
19 | 158
20 | 159
21 | 160
22 | 161
23 | 162
24 | 163
25 | 164
26 | 165
27 | 166
28 | 167
29 | 168
30 | 169
31 | 170
32 | 171
33 | 172
34 | 173
35 | 174
36 | 175
37 | 176
38 | 177
39 | 178
40 | 179
41 | 180
42 | 181
43 | 182
44 | 183
45 | 184
46 | 185
47 | 186
48 | 187
49 | 188
50 | 189
51 | 190
52 | 191
53 | 192
54 | 193
55 | 194
56 | 195
57 | 196
58 | 197
59 | 198
60 | 199
61 | 200
62 | 201
63 | 202
64 | 203
65 | 204
66 | 205
67 | 206
68 | 207
69 | 208
70 | 209
71 | 210
72 | 211
73 | 212
74 | 213
75 | 214
76 | 215
77 | 216
78 | 217
79 | 218
80 | 219
81 | 220
82 | 221
83 | 222
84 | 223
85 | 224
86 | 225
87 | 226
88 | 227
89 | 228
90 | 229
91 | 230
92 | 231
93 | 232
94 | 233
95 | 234
96 | 235
97 | 236
98 | 237
99 | 238
100 | 239
101 | 240
102 | 241
103 | 242
104 | 243
105 | 244
106 | 245
107 | 246
108 | 247
109 | 248
110 | 249
111 | 250
112 | 251
113 | 252
114 | 253
115 | 254
116 | 255
117 | 256
118 | 257
119 | 258
120 | 259
121 | 260
122 | 261
123 | 262
124 | 263
125 | 264
126 | 265
127 | 266
128 | 267
129 | 268
130 | 269
131 | 270
132 | 271
133 | 272
134 | 273
135 | 274
136 | 275
137 | 276
138 | 277
139 | 278
140 | 279
141 | 280
142 | 281
143 | 282
144 | 283
145 | 284
146 | 285
147 | 286
148 | 287
149 | 288
150 | 289
151 | 290
152 | 291
153 | 292
154 | 293
155 | 294
156 | 295
157 | 296
158 | 297
159 | 298
160 | 299
161 | 300
162 | 301
163 | 302
164 | 303
165 | 304
166 | 305
167 | 306
168 | 307
169 | 308
170 | 309
171 | 310
172 | 311
173 | 312
174 | 313
175 | 314
176 | 315
177 | 316
178 | 317
179 | 318
180 | 319
181 | 320
182 | 321
183 | 322
184 | 323
185 | 324
186 | 325
187 | 326
188 | 327
189 | 328
190 | 329
191 | 330
192 | 331
193 | 332
194 | 333
195 | 334
196 | 335
197 | 336
198 | 337
199 | 338
200 | 339
201 | 340
202 | 341
203 | 342
204 | 343
205 | 344
206 | 345
207 | 346
208 | 347
209 | 348
210 | 349
211 | 350
212 | 351
213 | 352
214 | 353
215 | 354
216 | 355
217 | 356
218 | 357
219 | 358
220 | 359
221 | 360
222 | 361
223 | 362
224 | 363
225 | 364
226 | 365
227 | 366
228 | 367
229 | 368
230 | 369
231 | 370
232 | 371
233 | 372
234 | 373
235 | 374
236 | 375
237 | 376
238 | 377
239 | 378
240 | 379
241 | 380
242 | 381
243 | 382
244 | 383
245 | 384
246 | 385
247 | 386
248 | 387
249 | 388
250 | 389
251 | 390
252 | 391
253 | 392
254 | 393
255 | 394
256 | 395
257 | 396
258 | 397
259 | 398
260 | 399
261 | 400
262 | 401
263 | 402
264 | 403
265 | 404
266 | 405
267 | 406
268 | 407
269 | 408
270 | 409
271 | 410
272 | 411
273 | 412
274 | 413
275 | 414
276 | 415
277 | 416
278 | 417
279 | 418
280 | 419
281 | 420
282 | 421
283 | 422
284 | 423
285 | 424
286 | 425
287 | 426
288 | 427
289 | 428
290 | 429
291 | 430
292 | 431
293 | 432
294 | 433
295 | 434
296 | 435
297 | 436
298 | 437
299 | 438
300 | 439
301 | 440
302 | 441
303 | 442
304 | 443
305 | 444
306 | 445
307 | 446
308 | 447
309 | 448
310 | 449
311 | 450
312 | 451
313 | 452
314 | 453
315 | 454
316 | 455
317 | 456
318 | 457
319 | 458
320 | 459
321 | 460
322 | 461
323 | 462
324 | 463
325 | 464
326 | 465
327 | 466
328 | 467
329 | 468
330 | 469
331 | 470
332 | 471
333 | 472
334 | 473
335 | 474
336 | 475
337 | 476
338 | 477
339 | 478
340 | 479
341 | 480
342 | 481
343 | 482
344 | 483
345 | 484
346 | 485
347 | 486
348 | 487
349 | 488
350 | 489
351 | 490
352 | 491
353 | 492
354 | 493
355 | 494
356 | 495
357 | 496
358 | 497
359 | 498
360 | 499
361 | 500
362 | 501
363 | 502
364 | 503
365 | 504
366 | 505
367 | 506
368 | 507
369 | 508
370 | 509
371 | 510
372 | 511
373 | 512
374 | 513
375 | 514
376 | 515
377 | 516
378 | 517
379 | 518
380 | 519
381 | 520
382 | 521
383 | 522
384 | 523
385 | 524
386 | 525
387 | 526
388 | 527
389 | 528
390 | 529
391 | 530
392 | 531
393 | 532
394 | 533
395 | 534
396 | 535
397 | 536
398 | 537
399 | 538
400 | 539
401 | 540
402 | 541
403 | 542
404 | 543
405 | 544
406 | 545
407 | 546
408 | 547
409 | 548
410 | 549
411 | 550
412 | 551
413 | 552
414 | 553
415 | 554
416 | 555
417 | 556
418 | 557
419 | 558
420 | 559
421 | 560
422 | 561
423 | 562
424 | 563
425 | 564
426 | 565
427 | 566
428 | 567
429 | 568
430 | 569
431 | 570
432 | 571
433 | 572
434 | 573
435 | 574
436 | 575
437 | 576
438 | 577
439 | 578
440 | 579
441 | 580
442 | 581
443 | 582
444 | 583
445 | 584
446 | 585
447 | 586
448 | 587
449 | 588
450 | 589
451 | 590
452 | 591
453 | 592
454 | 593
455 | 594
456 | 595
457 | 596
458 | 597
459 | 598
460 | 599
461 | 600
462 | 601
463 | 602
464 | 603
465 | 604
466 | 605
467 | 606
468 | 607
469 | 608
470 | 609
471 | 610
472 | 611
473 | 612
474 | 613
475 | 614
476 | 615
477 | 616
478 | 617
479 | 618
480 | 619
481 | 620
482 | 621
483 | 622
484 | 623
485 | 624
486 | 625
487 | 626
488 | 627
489 | 628
490 | 629
491 | 630
492 | 631
493 | 632
494 | 633
495 | 634
496 | 635
497 | 636
498 | 637
499 | 638
500 | 639
501 |
--------------------------------------------------------------------------------
/src/trainer.py:
--------------------------------------------------------------------------------
1 | import math
2 | import numpy as np
3 | import torch
4 | from torch import nn
5 | from torch.nn import init
6 | from torch.autograd import Variable
7 | import torch.nn.functional as F
8 | from torch.optim import Optimizer
9 |
10 | def get_optimizer(name, parameters, lr, weight_decay=0):
11 | if name == 'sgd':
12 | return torch.optim.SGD(parameters, lr=lr, weight_decay=weight_decay)
13 | elif name == 'rmsprop':
14 | return torch.optim.RMSprop(parameters, lr=lr, weight_decay=weight_decay)
15 | elif name == 'adagrad':
16 | return torch.optim.Adagrad(parameters, lr=lr, weight_decay=weight_decay)
17 | elif name == 'adam':
18 | return torch.optim.Adam(parameters, lr=lr, weight_decay=weight_decay)
19 | elif name == 'adamax':
20 | return torch.optim.Adamax(parameters, lr=lr, weight_decay=weight_decay)
21 | else:
22 | raise Exception("Unsupported optimizer: {}".format(name))
23 |
24 | # Counter of forward and backward passes.
25 | class Meter(object):
26 |
27 | def __init__(self):
28 | self.reset()
29 |
30 | def reset(self):
31 | self.val = None
32 | self.sum = 0
33 | self.cnt = 0
34 |
35 | def update(self, val):
36 | self.val = val
37 | self.sum += val
38 | self.cnt += 1
39 |
40 | def get_average(self):
41 | if self.cnt == 0:
42 | return 0
43 | return self.sum / self.cnt
44 |
45 | def get_value(self):
46 | return self.val
47 |
48 | class Trainer(object):
49 | def __init__(self, opt, model):
50 | self.opt = opt
51 | self.model = model
52 | self.fm = Meter()
53 | self.bm = Meter()
54 | self.criterion = nn.CrossEntropyLoss()
55 | self.parameters = [p for p in self.model.parameters() if p.requires_grad]
56 | if opt['cuda']:
57 | self.criterion.cuda()
58 | self.optimizer = get_optimizer(self.opt['optimizer'], self.parameters, self.opt['lr'], self.opt['decay'])
59 |
60 | def reset(self):
61 | self.model.reset()
62 | self.optimizer = get_optimizer(self.opt['optimizer'], self.parameters, self.opt['lr'], self.opt['decay'])
63 |
64 | # Train model with hard labels.
65 | def update(self, inputs, target, idx):
66 | if self.opt['cuda']:
67 | inputs = inputs.cuda()
68 | target = target.cuda()
69 | idx = idx.cuda()
70 |
71 | self.model.train()
72 | self.optimizer.zero_grad()
73 |
74 | logits = self.model(inputs)
75 | loss = self.criterion(logits[idx], target[idx])
76 |
77 | self.fm.update(self.model.odeblock.nfe)
78 | self.model.odeblock.nfe = 0
79 |
80 | loss.backward()
81 | self.optimizer.step()
82 |
83 | self.bm.update(self.model.odeblock.nfe)
84 | self.model.odeblock.nfe = 0
85 |
86 | return loss.item()
87 |
88 | def updatew(self, inputs, target, idx):
89 | if self.opt['cuda']:
90 | inputs = inputs.cuda()
91 | target = target.cuda()
92 | idx = idx.cuda()
93 |
94 | self.model.train()
95 | self.optimizer.zero_grad()
96 |
97 | logits = self.model(inputs)
98 | loss = self.criterion(logits[idx], target[idx])
99 |
100 | self.fm.update(self.model.odeblock.odefunc.nfe)
101 | self.model.odeblock.odefunc.nfe = 0
102 |
103 | loss.backward()
104 | self.optimizer.step()
105 |
106 | W = self.model.odeblock.odefunc.w.data
107 | beta = 0.5
108 | W.copy_((1 + beta) * W - beta * W.mm(W.transpose(0, 1).mm(W)))
109 |
110 | self.bm.update(self.model.odeblock.odefunc.nfe)
111 | self.model.odeblock.odefunc.nfe = 0
112 |
113 | return loss.item()
114 |
115 | # Train model with soft labels, e.g., [0.1, 0.2, 0.7].
116 | def update_soft(self, inputs, target, idx):
117 | if self.opt['cuda']:
118 | inputs = inputs.cuda()
119 | target = target.cuda()
120 | idx = idx.cuda()
121 |
122 | self.model.train()
123 | self.optimizer.zero_grad()
124 |
125 | logits = self.model(inputs)
126 | logits = torch.log_softmax(logits, dim=-1)
127 | loss = -torch.mean(torch.sum(target[idx] * logits[idx], dim=-1))
128 |
129 | self.fm.update(self.model.odefunc.ncall)
130 | self.model.odefunc.ncall = 0
131 |
132 | loss.backward()
133 | self.optimizer.step()
134 |
135 | self.bm.update(self.model.odefunc.ncall)
136 | self.model.odefunc.ncall = 0
137 |
138 | return loss.item()
139 |
140 | # Evaluate model.
141 | def evaluate(self, inputs, target, idx):
142 | if self.opt['cuda']:
143 | inputs = inputs.cuda()
144 | target = target.cuda()
145 | idx = idx.cuda()
146 |
147 | self.model.eval()
148 |
149 | logits = self.model(inputs)
150 | loss = self.criterion(logits[idx], target[idx])
151 | preds = torch.max(logits[idx], dim=1)[1]
152 | correct = preds.eq(target[idx]).double()
153 | accuracy = correct.sum() / idx.size(0)
154 |
155 | return loss.item(), preds, accuracy.item()
156 |
157 | def predict(self, inputs, tau=1):
158 | if self.opt['cuda']:
159 | inputs = inputs.cuda()
160 |
161 | self.model.eval()
162 |
163 | logits = self.model(inputs) / tau
164 |
165 | logits = torch.softmax(logits, dim=-1).detach()
166 |
167 | return logits
168 |
169 | def save(self, filename):
170 | params = {
171 | 'model': self.model.state_dict(),
172 | 'optim': self.optimizer.state_dict()
173 | }
174 | try:
175 | torch.save(params, filename)
176 | except BaseException:
177 | print("[Warning: Saving failed... continuing anyway.]")
178 |
179 | def load(self, filename):
180 | try:
181 | checkpoint = torch.load(filename)
182 | except BaseException:
183 | print("Cannot load model from {}".format(filename))
184 | exit()
185 | self.model.load_state_dict(checkpoint['model'])
186 | self.optimizer.load_state_dict(checkpoint['optim'])
187 |
--------------------------------------------------------------------------------
/src/loader.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | import math
4 | import numpy as np
5 | import torch
6 | from torch.autograd import Variable
7 |
8 | class Vocab(object):
9 |
10 | def __init__(self, file_name, cols, with_padding=False):
11 | self.itos = []
12 | self.stoi = {}
13 | self.vocab_size = 0
14 |
15 | if with_padding:
16 | string = ''
17 | self.stoi[string] = self.vocab_size
18 | self.itos.append(string)
19 | self.vocab_size += 1
20 |
21 | fi = open(file_name, 'r')
22 | for line in fi:
23 | items = line.strip().split('\t')
24 | for col in cols:
25 | item = items[col]
26 | strings = item.strip().split(' ')
27 | for string in strings:
28 | string = string.split(':')[0]
29 | if string not in self.stoi:
30 | self.stoi[string] = self.vocab_size
31 | self.itos.append(string)
32 | self.vocab_size += 1
33 | fi.close()
34 |
35 | def __len__(self):
36 | return self.vocab_size
37 |
38 | class EntityLabel(object):
39 |
40 | def __init__(self, file_name, entity, label):
41 | self.vocab_n, self.col_n = entity
42 | self.vocab_l, self.col_l = label
43 | self.itol = [-1 for k in range(self.vocab_n.vocab_size)]
44 |
45 | fi = open(file_name, 'r')
46 | for line in fi:
47 | items = line.strip().split('\t')
48 | sn, sl = items[self.col_n], items[self.col_l]
49 | n = self.vocab_n.stoi.get(sn, -1)
50 | l = self.vocab_l.stoi.get(sl, -1)
51 | if n == -1:
52 | continue
53 | self.itol[n] = l
54 | fi.close()
55 |
56 | class EntityFeature(object):
57 |
58 | def __init__(self, file_name, entity, feature):
59 | self.vocab_n, self.col_n = entity
60 | self.vocab_f, self.col_f = feature
61 | self.itof = [[] for k in range(len(self.vocab_n))]
62 | self.one_hot = []
63 |
64 | fi = open(file_name, 'r')
65 | for line in fi:
66 | items = line.strip().split('\t')
67 | sn, sf = items[self.col_n], items[self.col_f]
68 | n = self.vocab_n.stoi.get(sn, -1)
69 | if n == -1:
70 | continue
71 | for s in sf.strip().split(' '):
72 | f = self.vocab_f.stoi.get(s.split(':')[0], -1)
73 | w = float(s.split(':')[1])
74 | if f == -1:
75 | continue
76 | self.itof[n].append((f, w))
77 | fi.close()
78 |
79 | def to_one_hot(self, binary=False):
80 | self.one_hot = [[0 for j in range(len(self.vocab_f))] for i in range(len(self.vocab_n))]
81 | for k in range(len(self.vocab_n)):
82 | sm = 0
83 | for fid, wt in self.itof[k]:
84 | if binary:
85 | wt = 1.0
86 | sm += wt
87 | for fid, wt in self.itof[k]:
88 | if binary:
89 | wt = 1.0
90 | self.one_hot[k][fid] = wt/sm ### one_hot value (1)
91 |
92 | def to_index(self):
93 | max_length = max([len(fs) for fs in self.itof])
94 | self.index = [[int(0) for j in range(max_length)] for i in range(len(self.vocab_n))]
95 | for k in range(len(self.vocab_n)):
96 | for i, (fid, wt) in enumerate(self.itof[k]):
97 | self.index[k][i] = int(fid)
98 |
99 | class Graph(object):
100 | def __init__(self, file_name, entity, weight=None):
101 | self.vocab_n, self.col_u, self.col_v = entity
102 | self.col_w = weight
103 | self.edges = []
104 |
105 | self.node_size = -1
106 |
107 | self.eid2iid = None
108 | self.iid2eid = None
109 |
110 | self.adj_w = None
111 | self.adj_t = None
112 |
113 | with open(file_name, 'r') as fi:
114 |
115 | for line in fi:
116 | items = line.strip().split('\t')
117 |
118 | su, sv = items[self.col_u], items[self.col_v]
119 | sw = items[self.col_w] if self.col_w != None else None
120 |
121 | u, v = self.vocab_n.stoi.get(su, -1), self.vocab_n.stoi.get(sv, -1)
122 | w = float(sw) if sw != None else 1
123 |
124 | if u == -1 or v == -1 or w <= 0:
125 | continue
126 |
127 | self.edges += [(u, v, w)]
128 |
129 | def get_node_size(self):
130 | return self.node_size
131 |
132 | def get_edge_size(self):
133 | return len(self.edges)
134 |
135 | def to_symmetric(self, self_link_weight=1.0):
136 | vocab = set()
137 | for u, v, w in self.edges:
138 | vocab.add(u)
139 | vocab.add(v)
140 |
141 | pair2wt = dict()
142 | for u, v, w in self.edges:
143 | pair2wt[(u, v)] = w
144 |
145 | edges_ = list()
146 | for (u, v), w in pair2wt.items():
147 | if u == v:
148 | continue
149 | w_ = pair2wt.get((v, u), -1)
150 | if w > w_:
151 | edges_ += [(u, v, w), (v, u, w)]
152 | elif w == w_:
153 | edges_ += [(u, v, w)]
154 | if self_link_weight > 0:
155 | for k in vocab:
156 | edges_ += [(k, k, self_link_weight)]
157 |
158 | d = dict()
159 | for u, v, w in edges_:
160 | d[u] = d.get(u, 0.0) + w
161 |
162 | self.edges = [(u, v, w/math.sqrt(d[u]*d[v])) for u, v, w in edges_]
163 | return d
164 |
165 | def get_sparse_adjacency(self, cuda=True):
166 | shape = torch.Size([self.vocab_n.vocab_size, self.vocab_n.vocab_size])
167 |
168 | us, vs, ws = [], [], []
169 | for u, v, w in self.edges:
170 | us += [u]
171 | vs += [v]
172 | ws += [w]
173 | index = torch.LongTensor([us, vs])
174 | value = torch.Tensor(ws)
175 | if cuda:
176 | index = index.cuda()
177 | value = value.cuda()
178 | adj = torch.sparse.FloatTensor(index, value, shape)
179 | if cuda:
180 | adj = adj.cuda()
181 |
182 | return adj
183 |
184 | def get_dense_adjenccy(self, cuda=True):
185 |
186 | shape = torch.Size([self.vocab_n.vocab_size, self.vocab_n.vocab_size])
187 |
188 | adj = torch.zeros(shape, dtype=torch.float)
189 | for u, v, w in self.edges:
190 | adj[u,v] = w
191 |
192 | if cuda:
193 | adj = adj.cuda()
194 |
195 | return adj
196 |
--------------------------------------------------------------------------------
/src/train.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import copy
3 | import numpy as np
4 | import random
5 | import argparse
6 | import statistics
7 | import torch
8 |
9 | from trainer import Trainer
10 | from gnn import GNN
11 | from wgnn import WGNN
12 | import loader
13 |
14 | from orion.client import report_results
15 |
16 |
17 |
18 | def main(opt):
19 | device = torch.device('cuda' if opt['cuda'] == True and torch.cuda.is_available() else 'cpu')
20 |
21 | #--------------------------------------------------
22 | # Load data.
23 | #--------------------------------------------------
24 | net_file = opt['dataset'] + '/net.txt'
25 | label_file = opt['dataset'] + '/label.txt'
26 | feature_file = opt['dataset'] + '/feature.txt'
27 | train_file = opt['dataset'] + '/train.txt'
28 | dev_file = opt['dataset'] + '/dev.txt'
29 | test_file = opt['dataset'] + '/test.txt'
30 |
31 | vocab_node = loader.Vocab(net_file, [0, 1])
32 | vocab_label = loader.Vocab(label_file, [1])
33 | vocab_feature = loader.Vocab(feature_file, [1])
34 |
35 | opt['num_node'] = len(vocab_node)
36 | opt['num_feature'] = len(vocab_feature)
37 | opt['num_class'] = len(vocab_label)
38 |
39 | graph = loader.Graph(file_name=net_file, entity=[vocab_node, 0, 1])
40 | label = loader.EntityLabel(file_name=label_file, entity=[vocab_node, 0], label=[vocab_label, 1])
41 | feature = loader.EntityFeature(file_name=feature_file, entity=[vocab_node, 0], feature=[vocab_feature, 1])
42 | d = graph.to_symmetric(opt['self_link_weight'])
43 | feature.to_one_hot(binary=True)
44 | adj = graph.get_sparse_adjacency(opt['cuda'])
45 | deg = torch.zeros(adj.shape[0])
46 | for k,v in d.items():
47 | deg[k] = v
48 |
49 | with open(train_file, 'r') as fi:
50 | idx_train = [vocab_node.stoi[line.strip()] for line in fi]
51 | with open(dev_file, 'r') as fi:
52 | idx_dev = [vocab_node.stoi[line.strip()] for line in fi]
53 | with open(test_file, 'r') as fi:
54 | idx_test = [vocab_node.stoi[line.strip()] for line in fi]
55 |
56 | inputs = torch.Tensor(feature.one_hot)
57 | target = torch.LongTensor(label.itol)
58 | idx_train = torch.LongTensor(idx_train)
59 | idx_dev = torch.LongTensor(idx_dev)
60 | idx_test = torch.LongTensor(idx_test)
61 |
62 | if opt['cuda']:
63 | inputs = inputs.cuda()
64 | target = target.cuda()
65 | idx_train = idx_train.cuda()
66 | idx_dev = idx_dev.cuda()
67 | idx_test = idx_test.cuda()
68 |
69 | #--------------------------------------------------
70 | # Build model.
71 | #--------------------------------------------------
72 | if opt['weight']:
73 | gnn = WGNN(opt, adj, deg, opt['time'])
74 | else:
75 | gnn = GNN(opt, adj, deg, opt['time'])
76 | trainer = Trainer(opt, gnn)
77 | print(gnn)
78 | print(opt)
79 |
80 | #--------------------------------------------------
81 | # Train model.
82 | #--------------------------------------------------
83 | def train(epochs):
84 | best = 0.0
85 | results = []
86 | prev_dev_acc = 0
87 | cnt = 0
88 | lr = opt['lr']
89 | for epoch in range(0, epochs):
90 | # -----------------------
91 | # Train Model
92 | # -----------------------
93 | if opt['weight']:
94 | loss = trainer.updatew(inputs, target, idx_train)
95 | else:
96 | loss = trainer.update(inputs, target, idx_train)
97 | # -----------------------
98 | # Evaluate Model
99 | # -----------------------
100 | _, preds, accuracy_dev = trainer.evaluate(inputs, target, idx_dev)
101 | # -----------------------
102 | # Test Model
103 | # -----------------------
104 | _, preds, accuracy_test = trainer.evaluate(inputs, target, idx_test)
105 | print(
106 | 'Epoch: {} | Loss: {:.3f} | Dev acc: {:.3f} | Test acc: {:.3f} | Forward: {} {:.3f} | Backward: {} {:.3f}'.format(
107 | epoch,
108 | loss,
109 | accuracy_dev,
110 | accuracy_test,
111 | trainer.fm.get_value(),
112 | trainer.fm.get_average(),
113 | trainer.bm.get_value(),
114 | trainer.bm.get_average()))
115 | results += [(accuracy_dev, accuracy_test)]
116 | if accuracy_dev >= best:
117 | best = accuracy_dev
118 | state = dict([('model', copy.deepcopy(trainer.model.state_dict())),
119 | ('optim', copy.deepcopy(trainer.optimizer.state_dict()))])
120 | trainer.model.load_state_dict(state['model'])
121 | trainer.optimizer.load_state_dict(state['optim'])
122 | return results
123 |
124 | results = train(opt['epoch'])
125 |
126 |
127 | def get_accuracy(results):
128 | best_dev, acc_test = 0.0, 0.0
129 | for d, t in results:
130 | if d > best_dev:
131 | best_dev, acc_test = d, t
132 | return acc_test, best_dev
133 |
134 | acc_test = get_accuracy(results)
135 |
136 | print('{:.3f}'.format(acc_test[0]*100))
137 |
138 | return acc_test
139 |
140 | if __name__ == '__main__':
141 | parser = argparse.ArgumentParser()
142 | parser.add_argument('--dataset', type=str, default='data')
143 | parser.add_argument('--save', type=str, default='/')
144 | parser.add_argument('--hidden_dim', type=int, default=16, help='Hidden dimension.')
145 | parser.add_argument('--input_dropout', type=float, default=0.5, help='Input dropout rate.')
146 | parser.add_argument('--dropout', type=float, default=0.5, help='Dropout rate.')
147 | parser.add_argument('--optimizer', type=str, default='adam', help='Optimizer.')
148 | parser.add_argument('--lr', type=float, default=0.01, help='Learning rate.')
149 | parser.add_argument('--decay', type=float, default=5e-4, help='Weight decay for optimization')
150 | parser.add_argument('--self_link_weight', type=float, default=1.0, help='Weight of self-links.')
151 | parser.add_argument('--epoch', type=int, default=10, help='Number of training epochs per iteration.')
152 | parser.add_argument('--iter', type=int, default=10, help='Number of training iterations.')
153 | parser.add_argument('--use_gold', type=int, default=1,
154 | help='Whether using the ground-truth label of labeled objects, 1 for using, 0 for not using.')
155 | parser.add_argument('--tau', type=float, default=1.0, help='Annealing temperature in sampling.')
156 | parser.add_argument('--alpha', type=float, default=1.0, help='Factor in front matrix A.')
157 | parser.add_argument('--draw', type=str, default='max',
158 | help='Method for drawing object labels, max for max-pooling, smp for sampling.')
159 | parser.add_argument('--seed', type=int, default=1)
160 | parser.add_argument('--time', type=float, default=1.0, help='End time of ODE function.')
161 | parser.add_argument('--cuda', type=bool, default=torch.cuda.is_available())
162 | parser.add_argument('--cpu', action='store_true', help='Ignore CUDA.')
163 | parser.add_argument('--analysis', type=bool, default=False, help='Enables the production of various analysis '
164 | 'plots.')
165 | parser.add_argument('--weight', type=bool, default=False, help='Set to true to use CGNN with weight')
166 |
167 | args = parser.parse_args()
168 |
169 | if args.cpu:
170 | args.cuda = False
171 | elif args.cuda:
172 | args.cuda = True
173 |
174 | opt = vars(args)
175 |
176 | main(opt)
177 |
178 |
179 |
--------------------------------------------------------------------------------
/data/cora/test.txt:
--------------------------------------------------------------------------------
1 | 1708
2 | 1709
3 | 1710
4 | 1711
5 | 1712
6 | 1713
7 | 1714
8 | 1715
9 | 1716
10 | 1717
11 | 1718
12 | 1719
13 | 1720
14 | 1721
15 | 1722
16 | 1723
17 | 1724
18 | 1725
19 | 1726
20 | 1727
21 | 1728
22 | 1729
23 | 1730
24 | 1731
25 | 1732
26 | 1733
27 | 1734
28 | 1735
29 | 1736
30 | 1737
31 | 1738
32 | 1739
33 | 1740
34 | 1741
35 | 1742
36 | 1743
37 | 1744
38 | 1745
39 | 1746
40 | 1747
41 | 1748
42 | 1749
43 | 1750
44 | 1751
45 | 1752
46 | 1753
47 | 1754
48 | 1755
49 | 1756
50 | 1757
51 | 1758
52 | 1759
53 | 1760
54 | 1761
55 | 1762
56 | 1763
57 | 1764
58 | 1765
59 | 1766
60 | 1767
61 | 1768
62 | 1769
63 | 1770
64 | 1771
65 | 1772
66 | 1773
67 | 1774
68 | 1775
69 | 1776
70 | 1777
71 | 1778
72 | 1779
73 | 1780
74 | 1781
75 | 1782
76 | 1783
77 | 1784
78 | 1785
79 | 1786
80 | 1787
81 | 1788
82 | 1789
83 | 1790
84 | 1791
85 | 1792
86 | 1793
87 | 1794
88 | 1795
89 | 1796
90 | 1797
91 | 1798
92 | 1799
93 | 1800
94 | 1801
95 | 1802
96 | 1803
97 | 1804
98 | 1805
99 | 1806
100 | 1807
101 | 1808
102 | 1809
103 | 1810
104 | 1811
105 | 1812
106 | 1813
107 | 1814
108 | 1815
109 | 1816
110 | 1817
111 | 1818
112 | 1819
113 | 1820
114 | 1821
115 | 1822
116 | 1823
117 | 1824
118 | 1825
119 | 1826
120 | 1827
121 | 1828
122 | 1829
123 | 1830
124 | 1831
125 | 1832
126 | 1833
127 | 1834
128 | 1835
129 | 1836
130 | 1837
131 | 1838
132 | 1839
133 | 1840
134 | 1841
135 | 1842
136 | 1843
137 | 1844
138 | 1845
139 | 1846
140 | 1847
141 | 1848
142 | 1849
143 | 1850
144 | 1851
145 | 1852
146 | 1853
147 | 1854
148 | 1855
149 | 1856
150 | 1857
151 | 1858
152 | 1859
153 | 1860
154 | 1861
155 | 1862
156 | 1863
157 | 1864
158 | 1865
159 | 1866
160 | 1867
161 | 1868
162 | 1869
163 | 1870
164 | 1871
165 | 1872
166 | 1873
167 | 1874
168 | 1875
169 | 1876
170 | 1877
171 | 1878
172 | 1879
173 | 1880
174 | 1881
175 | 1882
176 | 1883
177 | 1884
178 | 1885
179 | 1886
180 | 1887
181 | 1888
182 | 1889
183 | 1890
184 | 1891
185 | 1892
186 | 1893
187 | 1894
188 | 1895
189 | 1896
190 | 1897
191 | 1898
192 | 1899
193 | 1900
194 | 1901
195 | 1902
196 | 1903
197 | 1904
198 | 1905
199 | 1906
200 | 1907
201 | 1908
202 | 1909
203 | 1910
204 | 1911
205 | 1912
206 | 1913
207 | 1914
208 | 1915
209 | 1916
210 | 1917
211 | 1918
212 | 1919
213 | 1920
214 | 1921
215 | 1922
216 | 1923
217 | 1924
218 | 1925
219 | 1926
220 | 1927
221 | 1928
222 | 1929
223 | 1930
224 | 1931
225 | 1932
226 | 1933
227 | 1934
228 | 1935
229 | 1936
230 | 1937
231 | 1938
232 | 1939
233 | 1940
234 | 1941
235 | 1942
236 | 1943
237 | 1944
238 | 1945
239 | 1946
240 | 1947
241 | 1948
242 | 1949
243 | 1950
244 | 1951
245 | 1952
246 | 1953
247 | 1954
248 | 1955
249 | 1956
250 | 1957
251 | 1958
252 | 1959
253 | 1960
254 | 1961
255 | 1962
256 | 1963
257 | 1964
258 | 1965
259 | 1966
260 | 1967
261 | 1968
262 | 1969
263 | 1970
264 | 1971
265 | 1972
266 | 1973
267 | 1974
268 | 1975
269 | 1976
270 | 1977
271 | 1978
272 | 1979
273 | 1980
274 | 1981
275 | 1982
276 | 1983
277 | 1984
278 | 1985
279 | 1986
280 | 1987
281 | 1988
282 | 1989
283 | 1990
284 | 1991
285 | 1992
286 | 1993
287 | 1994
288 | 1995
289 | 1996
290 | 1997
291 | 1998
292 | 1999
293 | 2000
294 | 2001
295 | 2002
296 | 2003
297 | 2004
298 | 2005
299 | 2006
300 | 2007
301 | 2008
302 | 2009
303 | 2010
304 | 2011
305 | 2012
306 | 2013
307 | 2014
308 | 2015
309 | 2016
310 | 2017
311 | 2018
312 | 2019
313 | 2020
314 | 2021
315 | 2022
316 | 2023
317 | 2024
318 | 2025
319 | 2026
320 | 2027
321 | 2028
322 | 2029
323 | 2030
324 | 2031
325 | 2032
326 | 2033
327 | 2034
328 | 2035
329 | 2036
330 | 2037
331 | 2038
332 | 2039
333 | 2040
334 | 2041
335 | 2042
336 | 2043
337 | 2044
338 | 2045
339 | 2046
340 | 2047
341 | 2048
342 | 2049
343 | 2050
344 | 2051
345 | 2052
346 | 2053
347 | 2054
348 | 2055
349 | 2056
350 | 2057
351 | 2058
352 | 2059
353 | 2060
354 | 2061
355 | 2062
356 | 2063
357 | 2064
358 | 2065
359 | 2066
360 | 2067
361 | 2068
362 | 2069
363 | 2070
364 | 2071
365 | 2072
366 | 2073
367 | 2074
368 | 2075
369 | 2076
370 | 2077
371 | 2078
372 | 2079
373 | 2080
374 | 2081
375 | 2082
376 | 2083
377 | 2084
378 | 2085
379 | 2086
380 | 2087
381 | 2088
382 | 2089
383 | 2090
384 | 2091
385 | 2092
386 | 2093
387 | 2094
388 | 2095
389 | 2096
390 | 2097
391 | 2098
392 | 2099
393 | 2100
394 | 2101
395 | 2102
396 | 2103
397 | 2104
398 | 2105
399 | 2106
400 | 2107
401 | 2108
402 | 2109
403 | 2110
404 | 2111
405 | 2112
406 | 2113
407 | 2114
408 | 2115
409 | 2116
410 | 2117
411 | 2118
412 | 2119
413 | 2120
414 | 2121
415 | 2122
416 | 2123
417 | 2124
418 | 2125
419 | 2126
420 | 2127
421 | 2128
422 | 2129
423 | 2130
424 | 2131
425 | 2132
426 | 2133
427 | 2134
428 | 2135
429 | 2136
430 | 2137
431 | 2138
432 | 2139
433 | 2140
434 | 2141
435 | 2142
436 | 2143
437 | 2144
438 | 2145
439 | 2146
440 | 2147
441 | 2148
442 | 2149
443 | 2150
444 | 2151
445 | 2152
446 | 2153
447 | 2154
448 | 2155
449 | 2156
450 | 2157
451 | 2158
452 | 2159
453 | 2160
454 | 2161
455 | 2162
456 | 2163
457 | 2164
458 | 2165
459 | 2166
460 | 2167
461 | 2168
462 | 2169
463 | 2170
464 | 2171
465 | 2172
466 | 2173
467 | 2174
468 | 2175
469 | 2176
470 | 2177
471 | 2178
472 | 2179
473 | 2180
474 | 2181
475 | 2182
476 | 2183
477 | 2184
478 | 2185
479 | 2186
480 | 2187
481 | 2188
482 | 2189
483 | 2190
484 | 2191
485 | 2192
486 | 2193
487 | 2194
488 | 2195
489 | 2196
490 | 2197
491 | 2198
492 | 2199
493 | 2200
494 | 2201
495 | 2202
496 | 2203
497 | 2204
498 | 2205
499 | 2206
500 | 2207
501 | 2208
502 | 2209
503 | 2210
504 | 2211
505 | 2212
506 | 2213
507 | 2214
508 | 2215
509 | 2216
510 | 2217
511 | 2218
512 | 2219
513 | 2220
514 | 2221
515 | 2222
516 | 2223
517 | 2224
518 | 2225
519 | 2226
520 | 2227
521 | 2228
522 | 2229
523 | 2230
524 | 2231
525 | 2232
526 | 2233
527 | 2234
528 | 2235
529 | 2236
530 | 2237
531 | 2238
532 | 2239
533 | 2240
534 | 2241
535 | 2242
536 | 2243
537 | 2244
538 | 2245
539 | 2246
540 | 2247
541 | 2248
542 | 2249
543 | 2250
544 | 2251
545 | 2252
546 | 2253
547 | 2254
548 | 2255
549 | 2256
550 | 2257
551 | 2258
552 | 2259
553 | 2260
554 | 2261
555 | 2262
556 | 2263
557 | 2264
558 | 2265
559 | 2266
560 | 2267
561 | 2268
562 | 2269
563 | 2270
564 | 2271
565 | 2272
566 | 2273
567 | 2274
568 | 2275
569 | 2276
570 | 2277
571 | 2278
572 | 2279
573 | 2280
574 | 2281
575 | 2282
576 | 2283
577 | 2284
578 | 2285
579 | 2286
580 | 2287
581 | 2288
582 | 2289
583 | 2290
584 | 2291
585 | 2292
586 | 2293
587 | 2294
588 | 2295
589 | 2296
590 | 2297
591 | 2298
592 | 2299
593 | 2300
594 | 2301
595 | 2302
596 | 2303
597 | 2304
598 | 2305
599 | 2306
600 | 2307
601 | 2308
602 | 2309
603 | 2310
604 | 2311
605 | 2312
606 | 2313
607 | 2314
608 | 2315
609 | 2316
610 | 2317
611 | 2318
612 | 2319
613 | 2320
614 | 2321
615 | 2322
616 | 2323
617 | 2324
618 | 2325
619 | 2326
620 | 2327
621 | 2328
622 | 2329
623 | 2330
624 | 2331
625 | 2332
626 | 2333
627 | 2334
628 | 2335
629 | 2336
630 | 2337
631 | 2338
632 | 2339
633 | 2340
634 | 2341
635 | 2342
636 | 2343
637 | 2344
638 | 2345
639 | 2346
640 | 2347
641 | 2348
642 | 2349
643 | 2350
644 | 2351
645 | 2352
646 | 2353
647 | 2354
648 | 2355
649 | 2356
650 | 2357
651 | 2358
652 | 2359
653 | 2360
654 | 2361
655 | 2362
656 | 2363
657 | 2364
658 | 2365
659 | 2366
660 | 2367
661 | 2368
662 | 2369
663 | 2370
664 | 2371
665 | 2372
666 | 2373
667 | 2374
668 | 2375
669 | 2376
670 | 2377
671 | 2378
672 | 2379
673 | 2380
674 | 2381
675 | 2382
676 | 2383
677 | 2384
678 | 2385
679 | 2386
680 | 2387
681 | 2388
682 | 2389
683 | 2390
684 | 2391
685 | 2392
686 | 2393
687 | 2394
688 | 2395
689 | 2396
690 | 2397
691 | 2398
692 | 2399
693 | 2400
694 | 2401
695 | 2402
696 | 2403
697 | 2404
698 | 2405
699 | 2406
700 | 2407
701 | 2408
702 | 2409
703 | 2410
704 | 2411
705 | 2412
706 | 2413
707 | 2414
708 | 2415
709 | 2416
710 | 2417
711 | 2418
712 | 2419
713 | 2420
714 | 2421
715 | 2422
716 | 2423
717 | 2424
718 | 2425
719 | 2426
720 | 2427
721 | 2428
722 | 2429
723 | 2430
724 | 2431
725 | 2432
726 | 2433
727 | 2434
728 | 2435
729 | 2436
730 | 2437
731 | 2438
732 | 2439
733 | 2440
734 | 2441
735 | 2442
736 | 2443
737 | 2444
738 | 2445
739 | 2446
740 | 2447
741 | 2448
742 | 2449
743 | 2450
744 | 2451
745 | 2452
746 | 2453
747 | 2454
748 | 2455
749 | 2456
750 | 2457
751 | 2458
752 | 2459
753 | 2460
754 | 2461
755 | 2462
756 | 2463
757 | 2464
758 | 2465
759 | 2466
760 | 2467
761 | 2468
762 | 2469
763 | 2470
764 | 2471
765 | 2472
766 | 2473
767 | 2474
768 | 2475
769 | 2476
770 | 2477
771 | 2478
772 | 2479
773 | 2480
774 | 2481
775 | 2482
776 | 2483
777 | 2484
778 | 2485
779 | 2486
780 | 2487
781 | 2488
782 | 2489
783 | 2490
784 | 2491
785 | 2492
786 | 2493
787 | 2494
788 | 2495
789 | 2496
790 | 2497
791 | 2498
792 | 2499
793 | 2500
794 | 2501
795 | 2502
796 | 2503
797 | 2504
798 | 2505
799 | 2506
800 | 2507
801 | 2508
802 | 2509
803 | 2510
804 | 2511
805 | 2512
806 | 2513
807 | 2514
808 | 2515
809 | 2516
810 | 2517
811 | 2518
812 | 2519
813 | 2520
814 | 2521
815 | 2522
816 | 2523
817 | 2524
818 | 2525
819 | 2526
820 | 2527
821 | 2528
822 | 2529
823 | 2530
824 | 2531
825 | 2532
826 | 2533
827 | 2534
828 | 2535
829 | 2536
830 | 2537
831 | 2538
832 | 2539
833 | 2540
834 | 2541
835 | 2542
836 | 2543
837 | 2544
838 | 2545
839 | 2546
840 | 2547
841 | 2548
842 | 2549
843 | 2550
844 | 2551
845 | 2552
846 | 2553
847 | 2554
848 | 2555
849 | 2556
850 | 2557
851 | 2558
852 | 2559
853 | 2560
854 | 2561
855 | 2562
856 | 2563
857 | 2564
858 | 2565
859 | 2566
860 | 2567
861 | 2568
862 | 2569
863 | 2570
864 | 2571
865 | 2572
866 | 2573
867 | 2574
868 | 2575
869 | 2576
870 | 2577
871 | 2578
872 | 2579
873 | 2580
874 | 2581
875 | 2582
876 | 2583
877 | 2584
878 | 2585
879 | 2586
880 | 2587
881 | 2588
882 | 2589
883 | 2590
884 | 2591
885 | 2592
886 | 2593
887 | 2594
888 | 2595
889 | 2596
890 | 2597
891 | 2598
892 | 2599
893 | 2600
894 | 2601
895 | 2602
896 | 2603
897 | 2604
898 | 2605
899 | 2606
900 | 2607
901 | 2608
902 | 2609
903 | 2610
904 | 2611
905 | 2612
906 | 2613
907 | 2614
908 | 2615
909 | 2616
910 | 2617
911 | 2618
912 | 2619
913 | 2620
914 | 2621
915 | 2622
916 | 2623
917 | 2624
918 | 2625
919 | 2626
920 | 2627
921 | 2628
922 | 2629
923 | 2630
924 | 2631
925 | 2632
926 | 2633
927 | 2634
928 | 2635
929 | 2636
930 | 2637
931 | 2638
932 | 2639
933 | 2640
934 | 2641
935 | 2642
936 | 2643
937 | 2644
938 | 2645
939 | 2646
940 | 2647
941 | 2648
942 | 2649
943 | 2650
944 | 2651
945 | 2652
946 | 2653
947 | 2654
948 | 2655
949 | 2656
950 | 2657
951 | 2658
952 | 2659
953 | 2660
954 | 2661
955 | 2662
956 | 2663
957 | 2664
958 | 2665
959 | 2666
960 | 2667
961 | 2668
962 | 2669
963 | 2670
964 | 2671
965 | 2672
966 | 2673
967 | 2674
968 | 2675
969 | 2676
970 | 2677
971 | 2678
972 | 2679
973 | 2680
974 | 2681
975 | 2682
976 | 2683
977 | 2684
978 | 2685
979 | 2686
980 | 2687
981 | 2688
982 | 2689
983 | 2690
984 | 2691
985 | 2692
986 | 2693
987 | 2694
988 | 2695
989 | 2696
990 | 2697
991 | 2698
992 | 2699
993 | 2700
994 | 2701
995 | 2702
996 | 2703
997 | 2704
998 | 2705
999 | 2706
1000 | 2707
1001 |
--------------------------------------------------------------------------------
/data/cora/label.txt:
--------------------------------------------------------------------------------
1 | 0 3
2 | 1 4
3 | 2 4
4 | 3 0
5 | 4 3
6 | 5 2
7 | 6 0
8 | 7 3
9 | 8 3
10 | 9 2
11 | 10 0
12 | 11 0
13 | 12 4
14 | 13 3
15 | 14 3
16 | 15 3
17 | 16 2
18 | 17 3
19 | 18 1
20 | 19 3
21 | 20 5
22 | 21 3
23 | 22 4
24 | 23 6
25 | 24 3
26 | 25 3
27 | 26 6
28 | 27 3
29 | 28 2
30 | 29 4
31 | 30 3
32 | 31 6
33 | 32 0
34 | 33 4
35 | 34 2
36 | 35 0
37 | 36 1
38 | 37 5
39 | 38 4
40 | 39 4
41 | 40 3
42 | 41 6
43 | 42 6
44 | 43 4
45 | 44 3
46 | 45 3
47 | 46 2
48 | 47 5
49 | 48 3
50 | 49 4
51 | 50 5
52 | 51 3
53 | 52 0
54 | 53 2
55 | 54 1
56 | 55 4
57 | 56 6
58 | 57 3
59 | 58 2
60 | 59 2
61 | 60 0
62 | 61 0
63 | 62 0
64 | 63 4
65 | 64 2
66 | 65 0
67 | 66 4
68 | 67 5
69 | 68 2
70 | 69 6
71 | 70 5
72 | 71 2
73 | 72 2
74 | 73 2
75 | 74 0
76 | 75 4
77 | 76 5
78 | 77 6
79 | 78 4
80 | 79 0
81 | 80 0
82 | 81 0
83 | 82 4
84 | 83 2
85 | 84 4
86 | 85 1
87 | 86 4
88 | 87 6
89 | 88 0
90 | 89 4
91 | 90 2
92 | 91 4
93 | 92 6
94 | 93 6
95 | 94 0
96 | 95 0
97 | 96 6
98 | 97 5
99 | 98 0
100 | 99 6
101 | 100 0
102 | 101 2
103 | 102 1
104 | 103 1
105 | 104 1
106 | 105 2
107 | 106 6
108 | 107 5
109 | 108 6
110 | 109 1
111 | 110 2
112 | 111 2
113 | 112 1
114 | 113 5
115 | 114 5
116 | 115 5
117 | 116 6
118 | 117 5
119 | 118 6
120 | 119 5
121 | 120 5
122 | 121 1
123 | 122 6
124 | 123 6
125 | 124 1
126 | 125 5
127 | 126 1
128 | 127 6
129 | 128 5
130 | 129 5
131 | 130 5
132 | 131 1
133 | 132 5
134 | 133 1
135 | 134 1
136 | 135 1
137 | 136 1
138 | 137 1
139 | 138 1
140 | 139 1
141 | 140 4
142 | 141 3
143 | 142 0
144 | 143 3
145 | 144 6
146 | 145 6
147 | 146 0
148 | 147 3
149 | 148 4
150 | 149 0
151 | 150 3
152 | 151 4
153 | 152 4
154 | 153 1
155 | 154 2
156 | 155 2
157 | 156 2
158 | 157 3
159 | 158 3
160 | 159 3
161 | 160 3
162 | 161 0
163 | 162 4
164 | 163 5
165 | 164 0
166 | 165 3
167 | 166 4
168 | 167 3
169 | 168 3
170 | 169 3
171 | 170 2
172 | 171 3
173 | 172 3
174 | 173 2
175 | 174 2
176 | 175 6
177 | 176 1
178 | 177 4
179 | 178 3
180 | 179 3
181 | 180 3
182 | 181 6
183 | 182 3
184 | 183 3
185 | 184 3
186 | 185 3
187 | 186 0
188 | 187 4
189 | 188 2
190 | 189 2
191 | 190 6
192 | 191 5
193 | 192 3
194 | 193 5
195 | 194 4
196 | 195 0
197 | 196 4
198 | 197 3
199 | 198 4
200 | 199 4
201 | 200 3
202 | 201 3
203 | 202 2
204 | 203 4
205 | 204 0
206 | 205 3
207 | 206 2
208 | 207 3
209 | 208 3
210 | 209 4
211 | 210 4
212 | 211 0
213 | 212 3
214 | 213 6
215 | 214 0
216 | 215 3
217 | 216 3
218 | 217 4
219 | 218 3
220 | 219 3
221 | 220 5
222 | 221 2
223 | 222 3
224 | 223 2
225 | 224 4
226 | 225 1
227 | 226 3
228 | 227 2
229 | 228 2
230 | 229 3
231 | 230 3
232 | 231 3
233 | 232 3
234 | 233 5
235 | 234 1
236 | 235 3
237 | 236 1
238 | 237 3
239 | 238 5
240 | 239 0
241 | 240 3
242 | 241 5
243 | 242 0
244 | 243 4
245 | 244 2
246 | 245 4
247 | 246 2
248 | 247 4
249 | 248 4
250 | 249 5
251 | 250 4
252 | 251 3
253 | 252 5
254 | 253 3
255 | 254 3
256 | 255 4
257 | 256 3
258 | 257 0
259 | 258 4
260 | 259 5
261 | 260 0
262 | 261 3
263 | 262 6
264 | 263 2
265 | 264 5
266 | 265 5
267 | 266 5
268 | 267 3
269 | 268 2
270 | 269 3
271 | 270 0
272 | 271 4
273 | 272 5
274 | 273 3
275 | 274 0
276 | 275 4
277 | 276 0
278 | 277 3
279 | 278 3
280 | 279 0
281 | 280 0
282 | 281 3
283 | 282 5
284 | 283 4
285 | 284 4
286 | 285 3
287 | 286 4
288 | 287 3
289 | 288 3
290 | 289 2
291 | 290 2
292 | 291 3
293 | 292 0
294 | 293 3
295 | 294 1
296 | 295 3
297 | 296 2
298 | 297 3
299 | 298 3
300 | 299 4
301 | 300 5
302 | 301 2
303 | 302 1
304 | 303 1
305 | 304 0
306 | 305 0
307 | 306 1
308 | 307 6
309 | 308 1
310 | 309 3
311 | 310 3
312 | 311 3
313 | 312 2
314 | 313 3
315 | 314 3
316 | 315 0
317 | 316 3
318 | 317 4
319 | 318 1
320 | 319 3
321 | 320 4
322 | 321 3
323 | 322 2
324 | 323 0
325 | 324 0
326 | 325 4
327 | 326 2
328 | 327 3
329 | 328 2
330 | 329 1
331 | 330 4
332 | 331 6
333 | 332 3
334 | 333 2
335 | 334 0
336 | 335 3
337 | 336 3
338 | 337 2
339 | 338 3
340 | 339 4
341 | 340 4
342 | 341 2
343 | 342 1
344 | 343 3
345 | 344 5
346 | 345 3
347 | 346 2
348 | 347 0
349 | 348 4
350 | 349 5
351 | 350 1
352 | 351 3
353 | 352 3
354 | 353 2
355 | 354 0
356 | 355 2
357 | 356 4
358 | 357 2
359 | 358 2
360 | 359 2
361 | 360 5
362 | 361 4
363 | 362 4
364 | 363 2
365 | 364 2
366 | 365 0
367 | 366 3
368 | 367 2
369 | 368 4
370 | 369 4
371 | 370 5
372 | 371 5
373 | 372 1
374 | 373 0
375 | 374 3
376 | 375 4
377 | 376 5
378 | 377 3
379 | 378 4
380 | 379 5
381 | 380 3
382 | 381 4
383 | 382 3
384 | 383 3
385 | 384 1
386 | 385 4
387 | 386 3
388 | 387 3
389 | 388 5
390 | 389 2
391 | 390 3
392 | 391 2
393 | 392 5
394 | 393 5
395 | 394 4
396 | 395 3
397 | 396 3
398 | 397 3
399 | 398 3
400 | 399 1
401 | 400 5
402 | 401 3
403 | 402 3
404 | 403 2
405 | 404 6
406 | 405 0
407 | 406 1
408 | 407 3
409 | 408 0
410 | 409 1
411 | 410 5
412 | 411 3
413 | 412 6
414 | 413 3
415 | 414 6
416 | 415 0
417 | 416 3
418 | 417 3
419 | 418 3
420 | 419 5
421 | 420 4
422 | 421 3
423 | 422 4
424 | 423 0
425 | 424 5
426 | 425 2
427 | 426 1
428 | 427 2
429 | 428 4
430 | 429 4
431 | 430 4
432 | 431 4
433 | 432 3
434 | 433 3
435 | 434 0
436 | 435 4
437 | 436 3
438 | 437 0
439 | 438 5
440 | 439 2
441 | 440 0
442 | 441 5
443 | 442 4
444 | 443 4
445 | 444 4
446 | 445 3
447 | 446 0
448 | 447 6
449 | 448 5
450 | 449 2
451 | 450 4
452 | 451 5
453 | 452 1
454 | 453 3
455 | 454 5
456 | 455 3
457 | 456 0
458 | 457 3
459 | 458 5
460 | 459 1
461 | 460 1
462 | 461 0
463 | 462 3
464 | 463 4
465 | 464 2
466 | 465 6
467 | 466 2
468 | 467 0
469 | 468 5
470 | 469 3
471 | 470 4
472 | 471 6
473 | 472 5
474 | 473 3
475 | 474 5
476 | 475 0
477 | 476 1
478 | 477 3
479 | 478 0
480 | 479 5
481 | 480 2
482 | 481 2
483 | 482 3
484 | 483 5
485 | 484 1
486 | 485 0
487 | 486 3
488 | 487 1
489 | 488 4
490 | 489 2
491 | 490 5
492 | 491 6
493 | 492 4
494 | 493 2
495 | 494 2
496 | 495 6
497 | 496 0
498 | 497 0
499 | 498 4
500 | 499 6
501 | 500 3
502 | 501 2
503 | 502 0
504 | 503 3
505 | 504 6
506 | 505 1
507 | 506 6
508 | 507 3
509 | 508 1
510 | 509 3
511 | 510 3
512 | 511 3
513 | 512 3
514 | 513 2
515 | 514 5
516 | 515 4
517 | 516 5
518 | 517 5
519 | 518 3
520 | 519 1
521 | 520 3
522 | 521 3
523 | 522 4
524 | 523 4
525 | 524 2
526 | 525 0
527 | 526 2
528 | 527 0
529 | 528 5
530 | 529 4
531 | 530 0
532 | 531 0
533 | 532 3
534 | 533 2
535 | 534 2
536 | 535 2
537 | 536 2
538 | 537 6
539 | 538 4
540 | 539 6
541 | 540 5
542 | 541 5
543 | 542 1
544 | 543 0
545 | 544 0
546 | 545 4
547 | 546 3
548 | 547 3
549 | 548 1
550 | 549 3
551 | 550 6
552 | 551 6
553 | 552 2
554 | 553 3
555 | 554 3
556 | 555 3
557 | 556 1
558 | 557 2
559 | 558 2
560 | 559 5
561 | 560 4
562 | 561 3
563 | 562 2
564 | 563 1
565 | 564 2
566 | 565 2
567 | 566 3
568 | 567 2
569 | 568 3
570 | 569 2
571 | 570 3
572 | 571 3
573 | 572 0
574 | 573 5
575 | 574 3
576 | 575 3
577 | 576 3
578 | 577 4
579 | 578 5
580 | 579 3
581 | 580 2
582 | 581 1
583 | 582 4
584 | 583 4
585 | 584 4
586 | 585 4
587 | 586 0
588 | 587 5
589 | 588 4
590 | 589 1
591 | 590 3
592 | 591 0
593 | 592 3
594 | 593 4
595 | 594 6
596 | 595 3
597 | 596 6
598 | 597 3
599 | 598 3
600 | 599 3
601 | 600 6
602 | 601 3
603 | 602 4
604 | 603 3
605 | 604 6
606 | 605 3
607 | 606 0
608 | 607 3
609 | 608 1
610 | 609 2
611 | 610 5
612 | 611 6
613 | 612 5
614 | 613 2
615 | 614 0
616 | 615 2
617 | 616 2
618 | 617 3
619 | 618 3
620 | 619 0
621 | 620 3
622 | 621 5
623 | 622 3
624 | 623 4
625 | 624 0
626 | 625 3
627 | 626 2
628 | 627 4
629 | 628 5
630 | 629 2
631 | 630 3
632 | 631 2
633 | 632 2
634 | 633 3
635 | 634 5
636 | 635 2
637 | 636 0
638 | 637 3
639 | 638 4
640 | 639 3
641 | 1708 3
642 | 1709 2
643 | 1710 2
644 | 1711 2
645 | 1712 2
646 | 1713 0
647 | 1714 2
648 | 1715 2
649 | 1716 2
650 | 1717 2
651 | 1718 2
652 | 1719 2
653 | 1720 2
654 | 1721 2
655 | 1722 2
656 | 1723 2
657 | 1724 2
658 | 1725 2
659 | 1726 2
660 | 1727 2
661 | 1728 3
662 | 1729 2
663 | 1730 2
664 | 1731 2
665 | 1732 2
666 | 1733 2
667 | 1734 2
668 | 1735 1
669 | 1736 2
670 | 1737 2
671 | 1738 2
672 | 1739 2
673 | 1740 2
674 | 1741 3
675 | 1742 2
676 | 1743 2
677 | 1744 2
678 | 1745 2
679 | 1746 2
680 | 1747 2
681 | 1748 2
682 | 1749 2
683 | 1750 2
684 | 1751 2
685 | 1752 2
686 | 1753 2
687 | 1754 2
688 | 1755 2
689 | 1756 2
690 | 1757 2
691 | 1758 2
692 | 1759 2
693 | 1760 2
694 | 1761 2
695 | 1762 2
696 | 1763 2
697 | 1764 5
698 | 1765 2
699 | 1766 2
700 | 1767 1
701 | 1768 1
702 | 1769 1
703 | 1770 1
704 | 1771 1
705 | 1772 1
706 | 1773 1
707 | 1774 4
708 | 1775 1
709 | 1776 1
710 | 1777 1
711 | 1778 1
712 | 1779 1
713 | 1780 1
714 | 1781 1
715 | 1782 1
716 | 1783 1
717 | 1784 1
718 | 1785 4
719 | 1786 1
720 | 1787 1
721 | 1788 1
722 | 1789 1
723 | 1790 1
724 | 1791 1
725 | 1792 3
726 | 1793 4
727 | 1794 4
728 | 1795 4
729 | 1796 4
730 | 1797 1
731 | 1798 1
732 | 1799 3
733 | 1800 1
734 | 1801 0
735 | 1802 3
736 | 1803 0
737 | 1804 2
738 | 1805 1
739 | 1806 3
740 | 1807 3
741 | 1808 3
742 | 1809 3
743 | 1810 3
744 | 1811 3
745 | 1812 3
746 | 1813 3
747 | 1814 3
748 | 1815 3
749 | 1816 3
750 | 1817 3
751 | 1818 3
752 | 1819 3
753 | 1820 3
754 | 1821 3
755 | 1822 3
756 | 1823 3
757 | 1824 5
758 | 1825 5
759 | 1826 5
760 | 1827 5
761 | 1828 5
762 | 1829 5
763 | 1830 2
764 | 1831 2
765 | 1832 2
766 | 1833 2
767 | 1834 1
768 | 1835 6
769 | 1836 6
770 | 1837 3
771 | 1838 0
772 | 1839 0
773 | 1840 5
774 | 1841 0
775 | 1842 5
776 | 1843 0
777 | 1844 3
778 | 1845 5
779 | 1846 3
780 | 1847 0
781 | 1848 0
782 | 1849 6
783 | 1850 0
784 | 1851 6
785 | 1852 3
786 | 1853 3
787 | 1854 1
788 | 1855 3
789 | 1856 1
790 | 1857 3
791 | 1858 3
792 | 1859 3
793 | 1860 3
794 | 1861 3
795 | 1862 3
796 | 1863 3
797 | 1864 3
798 | 1865 3
799 | 1866 3
800 | 1867 3
801 | 1868 3
802 | 1869 3
803 | 1870 3
804 | 1871 3
805 | 1872 3
806 | 1873 3
807 | 1874 3
808 | 1875 3
809 | 1876 3
810 | 1877 3
811 | 1878 5
812 | 1879 5
813 | 1880 5
814 | 1881 5
815 | 1882 5
816 | 1883 5
817 | 1884 5
818 | 1885 5
819 | 1886 2
820 | 1887 2
821 | 1888 2
822 | 1889 4
823 | 1890 4
824 | 1891 4
825 | 1892 0
826 | 1893 3
827 | 1894 3
828 | 1895 2
829 | 1896 5
830 | 1897 5
831 | 1898 5
832 | 1899 5
833 | 1900 6
834 | 1901 5
835 | 1902 5
836 | 1903 5
837 | 1904 5
838 | 1905 0
839 | 1906 4
840 | 1907 4
841 | 1908 4
842 | 1909 0
843 | 1910 0
844 | 1911 5
845 | 1912 0
846 | 1913 0
847 | 1914 6
848 | 1915 6
849 | 1916 6
850 | 1917 6
851 | 1918 6
852 | 1919 6
853 | 1920 0
854 | 1921 0
855 | 1922 0
856 | 1923 0
857 | 1924 3
858 | 1925 0
859 | 1926 0
860 | 1927 0
861 | 1928 3
862 | 1929 3
863 | 1930 0
864 | 1931 3
865 | 1932 3
866 | 1933 3
867 | 1934 3
868 | 1935 3
869 | 1936 3
870 | 1937 3
871 | 1938 3
872 | 1939 3
873 | 1940 3
874 | 1941 3
875 | 1942 3
876 | 1943 3
877 | 1944 3
878 | 1945 3
879 | 1946 3
880 | 1947 3
881 | 1948 3
882 | 1949 3
883 | 1950 3
884 | 1951 3
885 | 1952 3
886 | 1953 5
887 | 1954 5
888 | 1955 5
889 | 1956 5
890 | 1957 3
891 | 1958 5
892 | 1959 5
893 | 1960 5
894 | 1961 5
895 | 1962 5
896 | 1963 5
897 | 1964 4
898 | 1965 4
899 | 1966 4
900 | 1967 4
901 | 1968 4
902 | 1969 4
903 | 1970 4
904 | 1971 4
905 | 1972 6
906 | 1973 6
907 | 1974 5
908 | 1975 6
909 | 1976 6
910 | 1977 3
911 | 1978 5
912 | 1979 5
913 | 1980 5
914 | 1981 0
915 | 1982 5
916 | 1983 0
917 | 1984 4
918 | 1985 4
919 | 1986 3
920 | 1987 3
921 | 1988 3
922 | 1989 2
923 | 1990 2
924 | 1991 1
925 | 1992 3
926 | 1993 3
927 | 1994 3
928 | 1995 3
929 | 1996 3
930 | 1997 3
931 | 1998 5
932 | 1999 3
933 | 2000 3
934 | 2001 4
935 | 2002 4
936 | 2003 3
937 | 2004 3
938 | 2005 3
939 | 2006 3
940 | 2007 3
941 | 2008 3
942 | 2009 3
943 | 2010 0
944 | 2011 3
945 | 2012 3
946 | 2013 6
947 | 2014 3
948 | 2015 6
949 | 2016 0
950 | 2017 5
951 | 2018 0
952 | 2019 0
953 | 2020 4
954 | 2021 0
955 | 2022 6
956 | 2023 5
957 | 2024 5
958 | 2025 0
959 | 2026 1
960 | 2027 3
961 | 2028 3
962 | 2029 5
963 | 2030 6
964 | 2031 5
965 | 2032 3
966 | 2033 3
967 | 2034 4
968 | 2035 3
969 | 2036 3
970 | 2037 3
971 | 2038 3
972 | 2039 3
973 | 2040 4
974 | 2041 3
975 | 2042 3
976 | 2043 4
977 | 2044 3
978 | 2045 1
979 | 2046 1
980 | 2047 0
981 | 2048 1
982 | 2049 0
983 | 2050 6
984 | 2051 0
985 | 2052 0
986 | 2053 0
987 | 2054 0
988 | 2055 0
989 | 2056 0
990 | 2057 0
991 | 2058 5
992 | 2059 0
993 | 2060 5
994 | 2061 5
995 | 2062 5
996 | 2063 3
997 | 2064 3
998 | 2065 3
999 | 2066 3
1000 | 2067 3
1001 | 2068 0
1002 | 2069 0
1003 | 2070 0
1004 | 2071 2
1005 | 2072 0
1006 | 2073 0
1007 | 2074 0
1008 | 2075 3
1009 | 2076 3
1010 | 2077 3
1011 | 2078 3
1012 | 2079 1
1013 | 2080 1
1014 | 2081 1
1015 | 2082 1
1016 | 2083 2
1017 | 2084 1
1018 | 2085 1
1019 | 2086 1
1020 | 2087 1
1021 | 2088 1
1022 | 2089 0
1023 | 2090 1
1024 | 2091 3
1025 | 2092 1
1026 | 2093 1
1027 | 2094 1
1028 | 2095 1
1029 | 2096 1
1030 | 2097 0
1031 | 2098 0
1032 | 2099 0
1033 | 2100 5
1034 | 2101 5
1035 | 2102 5
1036 | 2103 5
1037 | 2104 3
1038 | 2105 5
1039 | 2106 1
1040 | 2107 1
1041 | 2108 3
1042 | 2109 6
1043 | 2110 6
1044 | 2111 5
1045 | 2112 6
1046 | 2113 2
1047 | 2114 3
1048 | 2115 3
1049 | 2116 0
1050 | 2117 3
1051 | 2118 3
1052 | 2119 3
1053 | 2120 4
1054 | 2121 4
1055 | 2122 4
1056 | 2123 4
1057 | 2124 3
1058 | 2125 3
1059 | 2126 3
1060 | 2127 4
1061 | 2128 3
1062 | 2129 3
1063 | 2130 4
1064 | 2131 0
1065 | 2132 6
1066 | 2133 0
1067 | 2134 6
1068 | 2135 6
1069 | 2136 0
1070 | 2137 0
1071 | 2138 3
1072 | 2139 3
1073 | 2140 3
1074 | 2141 3
1075 | 2142 3
1076 | 2143 1
1077 | 2144 1
1078 | 2145 1
1079 | 2146 3
1080 | 2147 3
1081 | 2148 3
1082 | 2149 3
1083 | 2150 5
1084 | 2151 6
1085 | 2152 3
1086 | 2153 4
1087 | 2154 6
1088 | 2155 0
1089 | 2156 0
1090 | 2157 6
1091 | 2158 6
1092 | 2159 6
1093 | 2160 6
1094 | 2161 6
1095 | 2162 3
1096 | 2163 3
1097 | 2164 6
1098 | 2165 6
1099 | 2166 5
1100 | 2167 2
1101 | 2168 1
1102 | 2169 2
1103 | 2170 1
1104 | 2171 0
1105 | 2172 0
1106 | 2173 6
1107 | 2174 6
1108 | 2175 2
1109 | 2176 3
1110 | 2177 3
1111 | 2178 5
1112 | 2179 0
1113 | 2180 0
1114 | 2181 0
1115 | 2182 0
1116 | 2183 0
1117 | 2184 5
1118 | 2185 5
1119 | 2186 0
1120 | 2187 3
1121 | 2188 5
1122 | 2189 0
1123 | 2190 6
1124 | 2191 3
1125 | 2192 6
1126 | 2193 0
1127 | 2194 0
1128 | 2195 0
1129 | 2196 0
1130 | 2197 0
1131 | 2198 0
1132 | 2199 0
1133 | 2200 0
1134 | 2201 0
1135 | 2202 0
1136 | 2203 0
1137 | 2204 3
1138 | 2205 3
1139 | 2206 3
1140 | 2207 3
1141 | 2208 1
1142 | 2209 6
1143 | 2210 1
1144 | 2211 0
1145 | 2212 3
1146 | 2213 3
1147 | 2214 3
1148 | 2215 3
1149 | 2216 3
1150 | 2217 6
1151 | 2218 1
1152 | 2219 0
1153 | 2220 2
1154 | 2221 2
1155 | 2222 4
1156 | 2223 4
1157 | 2224 4
1158 | 2225 4
1159 | 2226 4
1160 | 2227 5
1161 | 2228 6
1162 | 2229 3
1163 | 2230 3
1164 | 2231 0
1165 | 2232 0
1166 | 2233 0
1167 | 2234 0
1168 | 2235 5
1169 | 2236 4
1170 | 2237 4
1171 | 2238 4
1172 | 2239 4
1173 | 2240 4
1174 | 2241 3
1175 | 2242 3
1176 | 2243 3
1177 | 2244 3
1178 | 2245 3
1179 | 2246 0
1180 | 2247 3
1181 | 2248 4
1182 | 2249 4
1183 | 2250 4
1184 | 2251 1
1185 | 2252 1
1186 | 2253 3
1187 | 2254 1
1188 | 2255 1
1189 | 2256 5
1190 | 2257 1
1191 | 2258 3
1192 | 2259 4
1193 | 2260 4
1194 | 2261 4
1195 | 2262 4
1196 | 2263 4
1197 | 2264 4
1198 | 2265 4
1199 | 2266 0
1200 | 2267 0
1201 | 2268 0
1202 | 2269 5
1203 | 2270 5
1204 | 2271 5
1205 | 2272 5
1206 | 2273 5
1207 | 2274 0
1208 | 2275 5
1209 | 2276 3
1210 | 2277 0
1211 | 2278 6
1212 | 2279 2
1213 | 2280 0
1214 | 2281 5
1215 | 2282 3
1216 | 2283 3
1217 | 2284 5
1218 | 2285 5
1219 | 2286 5
1220 | 2287 5
1221 | 2288 5
1222 | 2289 4
1223 | 2290 4
1224 | 2291 0
1225 | 2292 4
1226 | 2293 0
1227 | 2294 4
1228 | 2295 0
1229 | 2296 3
1230 | 2297 4
1231 | 2298 4
1232 | 2299 4
1233 | 2300 1
1234 | 2301 3
1235 | 2302 3
1236 | 2303 3
1237 | 2304 3
1238 | 2305 3
1239 | 2306 4
1240 | 2307 2
1241 | 2308 3
1242 | 2309 3
1243 | 2310 3
1244 | 2311 0
1245 | 2312 0
1246 | 2313 2
1247 | 2314 3
1248 | 2315 3
1249 | 2316 3
1250 | 2317 3
1251 | 2318 1
1252 | 2319 1
1253 | 2320 3
1254 | 2321 0
1255 | 2322 1
1256 | 2323 4
1257 | 2324 1
1258 | 2325 1
1259 | 2326 1
1260 | 2327 1
1261 | 2328 1
1262 | 2329 1
1263 | 2330 0
1264 | 2331 1
1265 | 2332 0
1266 | 2333 0
1267 | 2334 2
1268 | 2335 4
1269 | 2336 4
1270 | 2337 4
1271 | 2338 3
1272 | 2339 3
1273 | 2340 3
1274 | 2341 4
1275 | 2342 0
1276 | 2343 3
1277 | 2344 3
1278 | 2345 3
1279 | 2346 3
1280 | 2347 0
1281 | 2348 3
1282 | 2349 3
1283 | 2350 4
1284 | 2351 4
1285 | 2352 4
1286 | 2353 4
1287 | 2354 4
1288 | 2355 4
1289 | 2356 0
1290 | 2357 4
1291 | 2358 3
1292 | 2359 2
1293 | 2360 0
1294 | 2361 3
1295 | 2362 4
1296 | 2363 5
1297 | 2364 0
1298 | 2365 2
1299 | 2366 2
1300 | 2367 3
1301 | 2368 3
1302 | 2369 3
1303 | 2370 3
1304 | 2371 3
1305 | 2372 2
1306 | 2373 3
1307 | 2374 5
1308 | 2375 5
1309 | 2376 4
1310 | 2377 1
1311 | 2378 4
1312 | 2379 4
1313 | 2380 4
1314 | 2381 3
1315 | 2382 4
1316 | 2383 4
1317 | 2384 0
1318 | 2385 4
1319 | 2386 4
1320 | 2387 4
1321 | 2388 5
1322 | 2389 2
1323 | 2390 2
1324 | 2391 2
1325 | 2392 2
1326 | 2393 4
1327 | 2394 6
1328 | 2395 6
1329 | 2396 6
1330 | 2397 6
1331 | 2398 3
1332 | 2399 4
1333 | 2400 4
1334 | 2401 4
1335 | 2402 1
1336 | 2403 3
1337 | 2404 0
1338 | 2405 3
1339 | 2406 3
1340 | 2407 5
1341 | 2408 0
1342 | 2409 2
1343 | 2410 3
1344 | 2411 3
1345 | 2412 3
1346 | 2413 3
1347 | 2414 3
1348 | 2415 2
1349 | 2416 4
1350 | 2417 4
1351 | 2418 0
1352 | 2419 0
1353 | 2420 3
1354 | 2421 2
1355 | 2422 6
1356 | 2423 6
1357 | 2424 0
1358 | 2425 3
1359 | 2426 3
1360 | 2427 3
1361 | 2428 5
1362 | 2429 1
1363 | 2430 3
1364 | 2431 4
1365 | 2432 4
1366 | 2433 2
1367 | 2434 4
1368 | 2435 4
1369 | 2436 4
1370 | 2437 3
1371 | 2438 3
1372 | 2439 2
1373 | 2440 2
1374 | 2441 2
1375 | 2442 2
1376 | 2443 2
1377 | 2444 2
1378 | 2445 2
1379 | 2446 2
1380 | 2447 2
1381 | 2448 2
1382 | 2449 0
1383 | 2450 2
1384 | 2451 2
1385 | 2452 2
1386 | 2453 0
1387 | 2454 6
1388 | 2455 6
1389 | 2456 5
1390 | 2457 6
1391 | 2458 6
1392 | 2459 3
1393 | 2460 2
1394 | 2461 6
1395 | 2462 3
1396 | 2463 4
1397 | 2464 4
1398 | 2465 4
1399 | 2466 2
1400 | 2467 6
1401 | 2468 6
1402 | 2469 0
1403 | 2470 0
1404 | 2471 3
1405 | 2472 0
1406 | 2473 4
1407 | 2474 4
1408 | 2475 3
1409 | 2476 2
1410 | 2477 3
1411 | 2478 1
1412 | 2479 6
1413 | 2480 6
1414 | 2481 5
1415 | 2482 3
1416 | 2483 4
1417 | 2484 3
1418 | 2485 5
1419 | 2486 3
1420 | 2487 1
1421 | 2488 1
1422 | 2489 3
1423 | 2490 4
1424 | 2491 5
1425 | 2492 2
1426 | 2493 3
1427 | 2494 3
1428 | 2495 3
1429 | 2496 4
1430 | 2497 5
1431 | 2498 4
1432 | 2499 0
1433 | 2500 3
1434 | 2501 3
1435 | 2502 0
1436 | 2503 2
1437 | 2504 1
1438 | 2505 1
1439 | 2506 5
1440 | 2507 2
1441 | 2508 3
1442 | 2509 3
1443 | 2510 5
1444 | 2511 0
1445 | 2512 2
1446 | 2513 3
1447 | 2514 2
1448 | 2515 2
1449 | 2516 5
1450 | 2517 5
1451 | 2518 4
1452 | 2519 3
1453 | 2520 4
1454 | 2521 3
1455 | 2522 2
1456 | 2523 2
1457 | 2524 4
1458 | 2525 2
1459 | 2526 4
1460 | 2527 5
1461 | 2528 5
1462 | 2529 3
1463 | 2530 2
1464 | 2531 3
1465 | 2532 1
1466 | 2533 0
1467 | 2534 3
1468 | 2535 3
1469 | 2536 4
1470 | 2537 5
1471 | 2538 4
1472 | 2539 3
1473 | 2540 3
1474 | 2541 3
1475 | 2542 3
1476 | 2543 3
1477 | 2544 0
1478 | 2545 1
1479 | 2546 2
1480 | 2547 4
1481 | 2548 4
1482 | 2549 4
1483 | 2550 3
1484 | 2551 3
1485 | 2552 3
1486 | 2553 5
1487 | 2554 2
1488 | 2555 3
1489 | 2556 2
1490 | 2557 2
1491 | 2558 2
1492 | 2559 3
1493 | 2560 2
1494 | 2561 2
1495 | 2562 0
1496 | 2563 4
1497 | 2564 4
1498 | 2565 3
1499 | 2566 3
1500 | 2567 3
1501 | 2568 3
1502 | 2569 3
1503 | 2570 3
1504 | 2571 3
1505 | 2572 3
1506 | 2573 3
1507 | 2574 3
1508 | 2575 0
1509 | 2576 0
1510 | 2577 3
1511 | 2578 0
1512 | 2579 3
1513 | 2580 0
1514 | 2581 2
1515 | 2582 3
1516 | 2583 4
1517 | 2584 1
1518 | 2585 2
1519 | 2586 5
1520 | 2587 4
1521 | 2588 3
1522 | 2589 3
1523 | 2590 3
1524 | 2591 1
1525 | 2592 5
1526 | 2593 3
1527 | 2594 4
1528 | 2595 3
1529 | 2596 2
1530 | 2597 2
1531 | 2598 1
1532 | 2599 3
1533 | 2600 3
1534 | 2601 3
1535 | 2602 3
1536 | 2603 3
1537 | 2604 6
1538 | 2605 3
1539 | 2606 3
1540 | 2607 3
1541 | 2608 6
1542 | 2609 3
1543 | 2610 3
1544 | 2611 3
1545 | 2612 2
1546 | 2613 3
1547 | 2614 2
1548 | 2615 4
1549 | 2616 2
1550 | 2617 4
1551 | 2618 2
1552 | 2619 2
1553 | 2620 1
1554 | 2621 5
1555 | 2622 6
1556 | 2623 4
1557 | 2624 3
1558 | 2625 3
1559 | 2626 3
1560 | 2627 2
1561 | 2628 5
1562 | 2629 3
1563 | 2630 3
1564 | 2631 4
1565 | 2632 3
1566 | 2633 3
1567 | 2634 3
1568 | 2635 3
1569 | 2636 3
1570 | 2637 4
1571 | 2638 6
1572 | 2639 0
1573 | 2640 3
1574 | 2641 2
1575 | 2642 2
1576 | 2643 2
1577 | 2644 5
1578 | 2645 4
1579 | 2646 4
1580 | 2647 4
1581 | 2648 4
1582 | 2649 6
1583 | 2650 3
1584 | 2651 2
1585 | 2652 2
1586 | 2653 0
1587 | 2654 2
1588 | 2655 2
1589 | 2656 2
1590 | 2657 2
1591 | 2658 2
1592 | 2659 3
1593 | 2660 4
1594 | 2661 4
1595 | 2662 4
1596 | 2663 3
1597 | 2664 3
1598 | 2665 4
1599 | 2666 4
1600 | 2667 3
1601 | 2668 3
1602 | 2669 3
1603 | 2670 4
1604 | 2671 4
1605 | 2672 4
1606 | 2673 4
1607 | 2674 4
1608 | 2675 4
1609 | 2676 3
1610 | 2677 4
1611 | 2678 4
1612 | 2679 4
1613 | 2680 4
1614 | 2681 4
1615 | 2682 4
1616 | 2683 4
1617 | 2684 4
1618 | 2685 2
1619 | 2686 3
1620 | 2687 3
1621 | 2688 3
1622 | 2689 2
1623 | 2690 6
1624 | 2691 2
1625 | 2692 3
1626 | 2693 3
1627 | 2694 4
1628 | 2695 4
1629 | 2696 3
1630 | 2697 3
1631 | 2698 3
1632 | 2699 3
1633 | 2700 3
1634 | 2701 3
1635 | 2702 0
1636 | 2703 3
1637 | 2704 3
1638 | 2705 3
1639 | 2706 3
1640 | 2707 3
1641 |
--------------------------------------------------------------------------------