├── .gitignore
├── GroupIM.png
├── LICENSE
├── README.md
├── data
└── weeplaces
│ ├── group_users.csv
│ ├── test_gi.csv
│ ├── test_ui_te.csv
│ ├── test_ui_tr.csv
│ ├── train_gi.csv
│ ├── train_ui.csv
│ ├── val_gi.csv
│ ├── val_ui_te.csv
│ └── val_ui_tr.csv
├── eval
├── evaluate.py
└── metrics.py
├── models
├── aggregators.py
├── discriminator.py
├── encoder.py
└── models.py
├── requirements.txt
├── train.py
└── utils
├── __init__.py
├── group_utils.py
└── user_utils.py
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .vscode
3 | log/
4 |
5 | # Byte-compiled / optimized / DLL files
6 | __pycache__/
7 | *.py[cod]
8 | *$py.class
9 | *.pyc
10 |
11 | # C extensions
12 | *.so
13 |
14 | # Distribution / packaging
15 | .Python
16 | env/
17 | build/
18 | develop-eggs/
19 | dist/
20 | downloads/
21 | eggs/
22 | .eggs/
23 | lib/
24 | lib64/
25 | parts/
26 | sdist/
27 | var/
28 | wheels/
29 | *.egg-info/
30 | .installed.cfg
31 | *.egg
32 |
33 | # PyInstaller
34 | # Usually these files are written by a python script from a template
35 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
36 | *.manifest
37 | *.spec
38 |
39 | # Installer logs
40 | pip-log.txt
41 | pip-delete-this-directory.txt
42 |
43 | # Unit test / coverage reports
44 | htmlcov/
45 | .tox/
46 | .coverage
47 | .coverage.*
48 | .cache
49 | nosetests.xml
50 | coverage.xml
51 | *.cover
52 | .hypothesis/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 |
62 | # Flask stuff:
63 | instance/
64 | .webassets-cache
65 |
66 | # Scrapy stuff:
67 | .scrapy
68 |
69 | # Sphinx documentation
70 | docs/_build/
71 |
72 | # PyBuilder
73 | target/
74 |
75 | # Jupyter Notebook
76 | .ipynb_checkpoints
77 |
78 | # pyenv
79 | .python-version
80 |
81 | # celery beat schedule file
82 | celerybeat-schedule
83 |
84 | # SageMath parsed files
85 | *.sage.py
86 |
87 | # dotenv
88 | .env
89 |
90 | # virtualenv
91 | .venv
92 | venv/
93 | ENV/
94 |
95 | # Spyder project settings
96 | .spyderproject
97 | .spyproject
98 |
99 | # Rope project settings
100 | .ropeproject
101 |
102 | # mkdocs documentation
103 | /site
104 |
105 | # mypy
106 | .mypy_cache/
107 |
--------------------------------------------------------------------------------
/GroupIM.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CrowdDynamicsLab/GroupIM/899890e3c2486ffe40b202718e99cd37b8a85502/GroupIM.png
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2020, CrowdDynamicsLab
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | 2. Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | 3. Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## GroupIM: A Mutual Information Maximization Framework for Neural Group Recommendation
2 |
3 | ##### Contributors: [Aravind Sankar](http://asankar3.web.engr.illinois.edu/) (asankar3@illinois.edu).
4 |
5 | > Aravind Sankar, Yanhong Wu, Yuhang Wu, Wei Zhang, Hao Yang and Hari Sundaram, "GroupIM: A Mutual Information
6 | Maximization Framework for Neural Group Recommendation", International ACM SIGIR conference on
7 | research and development in Information Retrieval,
8 | SIGIR
9 | 2020, Virtual Event, China, July 25-30, 2020.
10 |
11 | This repository contains a PyTorch implementation of Group Information Maximization (GroupIM) - a neural framework for recommending items to groups of users.
12 | GroupIM has three modules: user preference encoder, group preference
13 | aggregator, and a training objective over user and group interactions with mutual information maximization. GroupIM
14 | supports arbitrary preference encoder and aggregator architectures for group recommendation.
15 |
16 | 
17 |
18 | ### Requirements
19 | The code has been tested running under Python 3.6 with the following packages installed (along with their dependencies):
20 |
21 |
22 | ```
23 | numpy==1.18.1
24 | pandas==1.0.3
25 | scikit-learn==0.23.1
26 | scipy==1.4.1
27 | torch==1.1.0
28 | ```
29 | To guarantee that you have the right package versions, you can use Anaconda to set up a virtual environment and install the above packages.
30 |
31 | ### Input Format
32 |
33 | This repository includes one dataset ``Weeplaces`` for POI recommendation to groups of users.
34 | The set of all groups are randomly partitioned into training (70%), validation
35 | (10%), and test (20%) sets, while individual user-item interactions of all users are used for training
36 | The validation and test sets contain strict ephemeral groups (i.e., a
37 | specific combination of users) that do not occur in the training set.
38 |
39 |
40 |
41 | In order to use your own data, you have to provide the following files:
42 |
43 | - ``train_ui.csv``: a csv file specifying the training set of user-item interactions (one
44 | pair per line) where each line contains a ``(u, i)`` pair.
45 |
46 | - ``val_ui_tr.csv, test_ui_tr.csv``: csv files specifying fold-in item interactions for the validation and test sets
47 | of users.
48 |
49 |
50 | - ``val_ui_te.csv, test_ui_te.csv``: csv files specifying held-out item interactions for the validation and test sets
51 | of users. This set may be used to evaluate user-level item recommendation performance.
52 |
53 |
54 | - ``train_gi.csv, val_gi.csv, test_gi.csv``: csv files specifying group-item interactions (one
55 | pair per line) where each line contains a `(g, i)` pair.
56 |
57 | - ``group_users.csv``: a csv file specifying group memberships of all users and groups in the dataset where each line contains a ``(g, u)`` pair indicating that user `u` belongs to group `g`.
58 |
59 | ### Repository Organization
60 | - ``data/`` contains the necessary input file(s) for each dataset in the specified format.
61 | - ``models/`` contains:
62 | - GroupIM model (``models.py``);
63 | - user preference encoder (``encoder.py``)
64 | - group preference aggregator (``aggregators.py``)
65 | - discriminator for mutual information estimation and maximization (``discriminator.py``)
66 |
67 |
68 |
69 | - ``utils/`` contains:
70 | - mini-batch loaders for user-item interactions (``user_utils.py``);
71 | - mini-batch loaders for group-item interactions (``group_utils.py``);
72 | - ``eval/`` contains:
73 | - ranking metrics NDCG and Recall at K (``metrics.py``);
74 | - helper functions to evaluate recommendation performance (``evaluate.py``);
75 |
76 | ### Running the Model
77 | To train and evaluate the model (e.g., on `weeplaces`), please run
78 | ```bash
79 | python train.py --dataset weeplaces --cuda --pretrain_user --pretrain_mi
80 | ```
81 |
82 | Note: The model is not deterministic. All the experimental results provided in the paper are averaged across multiple
83 | runs.
84 |
85 |
86 | ## Reference
87 | If you make use of this code or the GroupIM algorithm in your work, please cite the following paper:
88 |
89 | ```
90 | @inproceedings{sankar2020groupim,
91 | title = {GroupIM: A Mutual Information Maximization Framework for Neural Group Recommendation},
92 | author = {Sankar, Aravind and Wu, Yanhong and Wu, Yuhang and Zhang, Wei and Yang, Hao and Sundaram, Hari},
93 | doi = {10.1145/3397271.3401116},
94 | booktitle = {Proceedings of the 43rd International ACM SIGIR Conference on Research and Development in Information Retrieval},
95 | pages = {1279--1288},
96 | year = {2020}
97 | }
98 | ```
--------------------------------------------------------------------------------
/data/weeplaces/val_gi.csv:
--------------------------------------------------------------------------------
1 | group,item
2 | 15913,18744
3 | 15914,5310
4 | 15915,8560
5 | 15916,20561
6 | 15916,397
7 | 15917,6498
8 | 15917,12329
9 | 15918,10178
10 | 15919,7510
11 | 15920,3004
12 | 15920,1253
13 | 15920,5765
14 | 15921,16576
15 | 15922,3256
16 | 15922,18473
17 | 15922,11198
18 | 15923,9327
19 | 15924,4933
20 | 15925,24531
21 | 15925,22815
22 | 15925,14033
23 | 15926,681
24 | 15926,16786
25 | 15926,931
26 | 15927,5756
27 | 15927,19819
28 | 15928,7675
29 | 15928,11214
30 | 15929,3805
31 | 15930,20000
32 | 15931,4065
33 | 15931,8983
34 | 15932,16828
35 | 15933,8167
36 | 15933,19900
37 | 15933,8166
38 | 15934,5043
39 | 15935,9186
40 | 15936,11440
41 | 15937,4157
42 | 15937,9327
43 | 15937,10112
44 | 15937,18183
45 | 15938,2149
46 | 15938,21947
47 | 15939,18351
48 | 15940,17424
49 | 15941,404
50 | 15942,24815
51 | 15943,13062
52 | 15943,3209
53 | 15943,12125
54 | 15944,14207
55 | 15945,15167
56 | 15946,6428
57 | 15947,3504
58 | 15948,7832
59 | 15949,14353
60 | 15949,10814
61 | 15950,9327
62 | 15951,2246
63 | 15952,22522
64 | 15953,4860
65 | 15954,19093
66 | 15954,17241
67 | 15954,9159
68 | 15955,18583
69 | 15956,5092
70 | 15956,14834
71 | 15956,617
72 | 15956,2736
73 | 15956,7296
74 | 15956,15753
75 | 15956,17574
76 | 15956,18459
77 | 15956,20311
78 | 15956,8832
79 | 15956,12224
80 | 15956,7922
81 | 15956,21566
82 | 15956,6963
83 | 15956,609
84 | 15956,21081
85 | 15956,23116
86 | 15956,6646
87 | 15956,8146
88 | 15956,20346
89 | 15956,3295
90 | 15956,24333
91 | 15956,16133
92 | 15956,12365
93 | 15956,10214
94 | 15956,7255
95 | 15956,16132
96 | 15956,19418
97 | 15956,13298
98 | 15956,5584
99 | 15956,24398
100 | 15956,21356
101 | 15956,5855
102 | 15956,11203
103 | 15956,24651
104 | 15956,506
105 | 15957,17891
106 | 15958,4516
107 | 15959,47
108 | 15959,7645
109 | 15959,108
110 | 15959,14746
111 | 15960,18238
112 | 15960,24761
113 | 15961,10402
114 | 15962,10436
115 | 15963,21708
116 | 15964,22848
117 | 15965,21504
118 | 15965,6703
119 | 15965,1818
120 | 15965,21897
121 | 15965,21034
122 | 15965,630
123 | 15965,21920
124 | 15965,22790
125 | 15965,3912
126 | 15965,6878
127 | 15965,10952
128 | 15965,21331
129 | 15965,14217
130 | 15965,22765
131 | 15965,21606
132 | 15966,12739
133 | 15967,21296
134 | 15967,11753
135 | 15968,25047
136 | 15969,22032
137 | 15970,23572
138 | 15971,9159
139 | 15972,10104
140 | 15972,17851
141 | 15972,1509
142 | 15972,9265
143 | 15972,4185
144 | 15972,23389
145 | 15972,7224
146 | 15973,24094
147 | 15974,22316
148 | 15975,2080
149 | 15976,24503
150 | 15976,11282
151 | 15976,1761
152 | 15976,23950
153 | 15976,23095
154 | 15977,20715
155 | 15977,4216
156 | 15978,15009
157 | 15978,11961
158 | 15978,9456
159 | 15978,24224
160 | 15978,5701
161 | 15978,13034
162 | 15978,12880
163 | 15978,21404
164 | 15978,17908
165 | 15978,9327
166 | 15978,2670
167 | 15979,19624
168 | 15979,2875
169 | 15980,9327
170 | 15981,24670
171 | 15981,2847
172 | 15981,1999
173 | 15982,9327
174 | 15983,6987
175 | 15984,18890
176 | 15985,20561
177 | 15985,22529
178 | 15985,24936
179 | 15985,12275
180 | 15986,9168
181 | 15987,19586
182 | 15987,9288
183 | 15987,17060
184 | 15988,23962
185 | 15988,23064
186 | 15988,20917
187 | 15988,2964
188 | 15988,20355
189 | 15988,9417
190 | 15988,837
191 | 15988,11305
192 | 15989,8573
193 | 15990,7028
194 | 15991,21831
195 | 15991,6428
196 | 15992,2391
197 | 15993,1536
198 | 15994,22529
199 | 15994,16847
200 | 15995,24968
201 | 15996,7247
202 | 15996,14489
203 | 15996,23309
204 | 15997,9142
205 | 15998,8508
206 | 15999,6171
207 | 16000,15729
208 | 16000,3264
209 | 16000,16671
210 | 16000,18885
211 | 16000,10496
212 | 16000,16377
213 | 16000,19449
214 | 16000,7376
215 | 16000,452
216 | 16000,16651
217 | 16000,455
218 | 16000,456
219 | 16000,20084
220 | 16000,12922
221 | 16000,3524
222 | 16000,7250
223 | 16000,1601
224 | 16000,17544
225 | 16001,17880
226 | 16001,18655
227 | 16001,23908
228 | 16002,1253
229 | 16003,22313
230 | 16003,6986
231 | 16004,20305
232 | 16005,104
233 | 16005,15757
234 | 16005,10079
235 | 16005,9612
236 | 16005,8205
237 | 16006,2456
238 | 16007,24166
239 | 16008,21958
240 | 16009,11277
241 | 16010,13748
242 | 16010,5186
243 | 16010,65
244 | 16011,18746
245 | 16012,13716
246 | 16013,6335
247 | 16013,9779
248 | 16013,5651
249 | 16013,10393
250 | 16013,14858
251 | 16013,5229
252 | 16013,5922
253 | 16013,15968
254 | 16013,24930
255 | 16014,1212
256 | 16015,6602
257 | 16016,23161
258 | 16017,12982
259 | 16017,22423
260 | 16018,11050
261 | 16019,6887
262 | 16020,1079
263 | 16020,9903
264 | 16020,10428
265 | 16020,16168
266 | 16020,21317
267 | 16021,15182
268 | 16022,19646
269 | 16023,24049
270 | 16023,6949
271 | 16023,20561
272 | 16024,1253
273 | 16025,17568
274 | 16025,7490
275 | 16025,6312
276 | 16025,22700
277 | 16025,9464
278 | 16025,10810
279 | 16025,12526
280 | 16025,7405
281 | 16025,15844
282 | 16025,8004
283 | 16025,21026
284 | 16025,15166
285 | 16025,3215
286 | 16025,22400
287 | 16025,10263
288 | 16026,1934
289 | 16026,5444
290 | 16026,9219
291 | 16026,14045
292 | 16027,23161
293 | 16028,19889
294 | 16029,16758
295 | 16029,21832
296 | 16029,7849
297 | 16030,22842
298 | 16031,732
299 | 16032,8273
300 | 16032,19567
301 | 16032,10860
302 | 16032,6943
303 | 16032,13452
304 | 16032,16241
305 | 16032,15253
306 | 16032,17168
307 | 16032,6850
308 | 16032,19889
309 | 16032,21165
310 | 16032,16292
311 | 16032,17889
312 | 16032,4233
313 | 16032,9297
314 | 16032,12838
315 | 16032,16952
316 | 16032,16884
317 | 16032,9159
318 | 16032,6359
319 | 16032,23233
320 | 16032,2447
321 | 16032,8057
322 | 16032,2822
323 | 16032,230
324 | 16032,17212
325 | 16032,1934
326 | 16032,21598
327 | 16032,24503
328 | 16032,24951
329 | 16032,893
330 | 16032,1653
331 | 16032,22561
332 | 16032,3025
333 | 16032,16940
334 | 16033,11362
335 | 16033,11054
336 | 16034,9327
337 | 16035,2450
338 | 16036,5308
339 | 16037,9789
340 | 16037,6602
341 | 16037,22127
342 | 16037,21064
343 | 16037,23295
344 | 16038,24004
345 | 16039,19515
346 | 16040,8481
347 | 16041,1253
348 | 16042,4218
349 | 16043,6586
350 | 16043,19447
351 | 16043,17531
352 | 16043,3676
353 | 16043,2608
354 | 16043,1597
355 | 16044,17362
356 | 16044,12266
357 | 16045,20042
358 | 16046,14204
359 | 16047,24278
360 | 16047,21632
361 | 16048,21464
362 | 16049,15258
363 | 16050,1650
364 | 16051,20561
365 | 16051,7405
366 | 16051,297
367 | 16052,20127
368 | 16052,11489
369 | 16052,13062
370 | 16052,22934
371 | 16052,1379
372 | 16052,6594
373 | 16052,9944
374 | 16052,18696
375 | 16052,16210
376 | 16052,7223
377 | 16052,8149
378 | 16052,9759
379 | 16053,17953
380 | 16053,18876
381 | 16054,24512
382 | 16055,11172
383 | 16055,7191
384 | 16056,1240
385 | 16057,4891
386 | 16057,15917
387 | 16057,6903
388 | 16057,9896
389 | 16057,20235
390 | 16057,5155
391 | 16057,18224
392 | 16058,17254
393 | 16059,13845
394 | 16059,1212
395 | 16060,104
396 | 16060,12297
397 | 16061,9159
398 | 16062,13982
399 | 16062,21339
400 | 16063,1613
401 | 16064,1373
402 | 16064,8701
403 | 16064,9666
404 | 16065,6890
405 | 16065,13876
406 | 16065,81
407 | 16065,7777
408 | 16065,17914
409 | 16065,19696
410 | 16066,16110
411 | 16066,1340
412 | 16067,11653
413 | 16068,15194
414 | 16068,3369
415 | 16068,15036
416 | 16068,1475
417 | 16068,18305
418 | 16068,14096
419 | 16069,20042
420 | 16070,17573
421 | 16070,4476
422 | 16070,22587
423 | 16071,102
424 | 16072,1601
425 | 16072,25020
426 | 16072,16014
427 | 16072,8122
428 | 16072,15765
429 | 16072,18409
430 | 16072,6262
431 | 16072,19221
432 | 16073,4216
433 | 16073,9327
434 | 16074,12455
435 | 16075,12545
436 | 16076,24160
437 | 16076,24473
438 | 16076,548
439 | 16076,16311
440 | 16077,6428
441 | 16078,13971
442 | 16078,7596
443 | 16079,19694
444 | 16079,7435
445 | 16080,12324
446 | 16081,13283
447 | 16081,5034
448 | 16081,13468
449 | 16081,2185
450 | 16081,12185
451 | 16081,24989
452 | 16081,17414
453 | 16081,25027
454 | 16081,8499
455 | 16081,2501
456 | 16081,6296
457 | 16081,22510
458 | 16082,3692
459 | 16083,4273
460 | 16084,23024
461 | 16085,104
462 | 16085,2774
463 | 16085,9694
464 | 16086,9158
465 | 16087,1686
466 | 16087,3200
467 | 16087,17913
468 | 16087,16000
469 | 16087,7896
470 | 16087,18975
471 | 16087,7072
472 | 16087,14153
473 | 16087,10410
474 | 16087,19340
475 | 16087,13603
476 | 16087,13724
477 | 16087,18316
478 | 16087,19564
479 | 16087,17554
480 | 16087,14157
481 | 16087,2991
482 | 16087,6751
483 | 16087,14777
484 | 16087,7015
485 | 16087,581
486 | 16087,18131
487 | 16087,17410
488 | 16087,15757
489 | 16087,18710
490 | 16087,4218
491 | 16087,21956
492 | 16087,22549
493 | 16087,22525
494 | 16087,7574
495 | 16087,5239
496 | 16087,2375
497 | 16087,12767
498 | 16087,8254
499 | 16087,12475
500 | 16087,17441
501 | 16087,18028
502 | 16087,22705
503 | 16087,20607
504 | 16087,8369
505 | 16087,20561
506 | 16087,3111
507 | 16087,12275
508 | 16087,13711
509 | 16087,6695
510 | 16087,6181
511 | 16087,16229
512 | 16087,22176
513 | 16087,395
514 | 16087,11081
515 | 16087,5627
516 | 16087,18129
517 | 16087,15133
518 | 16087,18492
519 | 16087,17736
520 | 16087,3535
521 | 16087,8484
522 | 16087,6327
523 | 16087,10498
524 | 16087,4147
525 | 16087,12503
526 | 16087,10550
527 | 16087,22987
528 | 16087,19234
529 | 16087,7625
530 | 16087,10204
531 | 16087,18282
532 | 16087,4230
533 | 16087,2867
534 | 16087,6591
535 | 16087,2467
536 | 16088,19605
537 | 16089,22608
538 | 16089,10965
539 | 16090,23287
540 | 16091,21831
541 | 16092,8833
542 | 16092,7693
543 | 16092,7630
544 | 16092,16492
545 | 16092,13371
546 | 16092,3617
547 | 16092,20174
548 | 16092,2870
549 | 16092,8908
550 | 16093,14291
551 | 16094,9327
552 | 16095,19889
553 | 16096,8493
554 | 16096,17660
555 | 16097,8594
556 | 16098,9327
557 | 16099,13543
558 | 16099,13541
559 | 16099,13546
560 | 16100,19050
561 | 16101,1490
562 | 16101,11701
563 | 16102,10861
564 | 16102,9198
565 | 16103,9431
566 | 16104,22161
567 | 16105,18351
568 | 16106,22867
569 | 16106,19397
570 | 16106,5412
571 | 16106,6084
572 | 16106,1961
573 | 16106,2647
574 | 16106,24363
575 | 16106,8624
576 | 16106,7373
577 | 16106,5295
578 | 16106,13138
579 | 16106,5045
580 | 16106,10778
581 | 16106,8429
582 | 16106,22845
583 | 16106,21506
584 | 16106,12110
585 | 16106,7634
586 | 16106,9965
587 | 16106,5547
588 | 16106,24681
589 | 16106,13431
590 | 16106,5574
591 | 16106,20626
592 | 16106,13433
593 | 16107,19698
594 | 16108,6307
595 | 16108,6306
596 | 16109,14624
597 | 16110,18124
598 | 16111,8046
599 | 16111,10953
600 | 16112,17837
601 | 16113,3874
602 | 16114,13844
603 | 16115,17138
604 | 16116,22330
605 | 16117,3158
606 | 16118,12403
607 | 16119,20944
608 | 16120,10
609 | 16120,18689
610 | 16121,24250
611 | 16121,22529
612 | 16122,6338
613 | 16123,12051
614 | 16123,1832
615 | 16124,25077
616 | 16124,9267
617 | 16124,23132
618 | 16124,3823
619 | 16124,7081
620 | 16124,12481
621 | 16124,9024
622 | 16124,10020
623 | 16124,4981
624 | 16124,21723
625 | 16124,23127
626 | 16124,8837
627 | 16124,13819
628 | 16124,22080
629 | 16124,10434
630 | 16124,21329
631 | 16124,5843
632 | 16124,8718
633 | 16124,22618
634 | 16124,3203
635 | 16124,7772
636 | 16124,16712
637 | 16124,13667
638 | 16124,4922
639 | 16124,16688
640 | 16124,4343
641 | 16124,24511
642 | 16124,17658
643 | 16124,12556
644 | 16124,6925
645 | 16124,1987
646 | 16124,1122
647 | 16124,4250
648 | 16124,20940
649 | 16124,17368
650 | 16124,843
651 | 16125,18689
652 | 16125,19796
653 | 16126,15336
654 | 16127,12434
655 | 16128,10113
656 | 16129,14627
657 | 16130,19889
658 | 16130,4249
659 | 16130,5391
660 | 16130,17146
661 | 16130,22766
662 | 16131,16641
663 | 16132,21264
664 | 16133,19794
665 | 16133,4122
666 | 16134,5015
667 | 16135,2960
668 | 16136,9159
669 | 16137,24332
670 | 16137,9694
671 | 16138,18029
672 | 16138,21479
673 | 16139,14624
674 | 16140,6633
675 | 16141,18131
676 | 16142,12998
677 | 16143,21339
678 | 16144,22842
679 | 16145,18689
680 | 16146,2034
681 | 16146,22862
682 | 16146,2774
683 | 16146,3323
684 | 16146,5528
685 | 16146,14280
686 | 16146,14599
687 | 16146,24968
688 | 16147,10112
689 | 16148,23460
690 | 16149,5060
691 | 16150,9961
692 | 16150,21339
693 | 16151,3468
694 | 16152,11744
695 | 16153,13025
696 | 16154,12565
697 | 16154,15223
698 | 16154,9152
699 | 16154,7686
700 | 16154,9327
701 | 16155,9327
702 | 16156,19492
703 | 16156,15865
704 | 16156,19634
705 | 16156,23590
706 | 16157,23395
707 | 16158,3004
708 | 16159,3893
709 | 16159,22090
710 | 16160,6041
711 | 16161,1760
712 | 16162,19326
713 | 16163,9327
714 | 16164,17203
715 | 16165,10436
716 | 16166,21464
717 | 16167,21834
718 | 16167,19889
719 | 16167,24212
720 | 16167,22700
721 | 16168,3519
722 | 16168,5157
723 | 16168,23617
724 | 16168,5442
725 | 16169,11912
726 | 16170,2439
727 | 16171,6428
728 | 16172,17445
729 | 16173,6694
730 | 16174,9327
731 | 16175,16834
732 | 16175,19826
733 | 16175,5607
734 | 16176,6034
735 | 16176,12349
736 | 16177,3210
737 | 16177,6645
738 | 16178,5015
739 | 16178,20899
740 | 16178,1596
741 | 16178,18762
742 | 16178,14692
743 | 16179,9158
744 | 16180,17839
745 | 16181,19889
746 | 16182,17630
747 | 16183,21131
748 | 16183,17567
749 | 16183,23752
750 | 16183,20768
751 | 16183,9192
752 | 16184,22443
753 | 16184,20939
754 | 16185,5599
755 | 16185,6428
756 | 16185,1253
757 | 16186,10579
758 | 16187,1802
759 | 16187,21633
760 | 16188,9327
761 | 16189,21420
762 | 16189,8738
763 | 16190,24276
764 | 16190,19609
765 | 16190,22784
766 | 16191,24883
767 | 16191,22316
768 | 16191,15649
769 | 16191,11721
770 | 16191,16114
771 | 16191,18132
772 | 16191,18758
773 | 16191,20561
774 | 16191,22224
775 | 16191,22128
776 | 16191,10060
777 | 16191,4272
778 | 16191,23583
779 | 16191,22830
780 | 16191,1060
781 | 16192,24377
782 | 16192,24191
783 | 16193,19246
784 | 16194,15088
785 | 16194,146
786 | 16195,9159
787 | 16196,15391
788 | 16196,19915
789 | 16196,11534
790 | 16196,20433
791 | 16196,13311
792 | 16196,24829
793 | 16196,7098
794 | 16196,7313
795 | 16196,16504
796 | 16196,2760
797 | 16196,2749
798 | 16197,4860
799 | 16198,17736
800 | 16199,20216
801 | 16199,6428
802 | 16199,4745
803 | 16199,6993
804 | 16200,6444
805 | 16200,9841
806 | 16200,7106
807 | 16200,7473
808 | 16200,1470
809 | 16200,3734
810 | 16200,3434
811 | 16200,18960
812 | 16200,11274
813 | 16200,24979
814 | 16200,13152
815 | 16200,20204
816 | 16200,3523
817 | 16200,20287
818 | 16200,19007
819 | 16200,7301
820 | 16200,17191
821 | 16200,3583
822 | 16200,11706
823 | 16200,17205
824 | 16200,24856
825 | 16200,3666
826 | 16200,2173
827 | 16200,17192
828 | 16201,21464
829 | 16202,2819
830 | 16203,4288
831 | 16204,2366
832 | 16205,22299
833 | 16205,6126
834 | 16205,11566
835 | 16205,15962
836 | 16206,11681
837 | 16206,13029
838 | 16206,12275
839 | 16206,12987
840 | 16206,24158
841 | 16207,18351
842 | 16208,20834
843 | 16209,19682
844 | 16210,5486
845 | 16211,186
846 | 16212,8417
847 | 16213,20644
848 | 16214,15367
849 | 16215,5330
850 | 16215,14013
851 | 16216,9159
852 | 16216,14033
853 | 16217,22974
854 | 16217,21046
855 | 16217,4272
856 | 16218,2001
857 | 16218,12424
858 | 16218,23756
859 | 16218,9552
860 | 16218,3267
861 | 16218,25073
862 | 16218,4285
863 | 16218,19973
864 | 16218,9017
865 | 16218,14398
866 | 16219,22360
867 | 16219,24968
868 | 16219,7028
869 | 16220,502
870 | 16221,20599
871 | 16222,1966
872 | 16222,15871
873 | 16222,4272
874 | 16222,1618
875 | 16222,15366
876 | 16222,12060
877 | 16222,2158
878 | 16222,23737
879 | 16222,20728
880 | 16222,18028
881 | 16222,21662
882 | 16222,11210
883 | 16222,6947
884 | 16222,2373
885 | 16222,16976
886 | 16222,18163
887 | 16222,11873
888 | 16222,22995
889 | 16222,6936
890 | 16222,4891
891 | 16222,6391
892 | 16222,13800
893 | 16222,7615
894 | 16222,25045
895 | 16222,12041
896 | 16222,11047
897 | 16222,10563
898 | 16222,1964
899 | 16222,36
900 | 16222,6432
901 | 16222,14065
902 | 16222,369
903 | 16222,22490
904 | 16222,14970
905 | 16222,2414
906 | 16222,9312
907 | 16222,23024
908 | 16223,16885
909 | 16224,14808
910 | 16224,13845
911 | 16224,2034
912 | 16225,2456
913 | 16226,8753
914 | 16226,12108
915 | 16226,16526
916 | 16226,23568
917 | 16227,13339
918 | 16228,11328
919 | 16229,18951
920 | 16230,13206
921 | 16231,4376
922 | 16232,6555
923 | 16232,11266
924 | 16233,12800
925 | 16234,18823
926 | 16234,22753
927 | 16235,21037
928 | 16236,16881
929 | 16236,12065
930 | 16236,19889
931 | 16236,24049
932 | 16236,5308
933 | 16236,16161
934 | 16236,6349
935 | 16236,4675
936 | 16237,23578
937 | 16237,6319
938 | 16237,17410
939 | 16237,471
940 | 16238,20225
941 | 16238,20038
942 | 16239,894
943 | 16240,22157
944 | 16241,1223
945 | 16242,17936
946 | 16242,22406
947 | 16242,22265
948 | 16243,6319
949 | 16243,10396
950 | 16243,16894
951 | 16244,9108
952 | 16245,21420
953 | 16246,5521
954 | 16246,2868
955 | 16246,16577
956 | 16246,4518
957 | 16247,6585
958 | 16247,18864
959 | 16248,15009
960 | 16248,20849
961 | 16248,23227
962 | 16248,8539
963 | 16248,1181
964 | 16248,5864
965 | 16248,22287
966 | 16248,6823
967 | 16248,2670
968 | 16248,24353
969 | 16248,8064
970 | 16248,306
971 | 16248,7550
972 | 16248,24802
973 | 16248,24878
974 | 16248,7616
975 | 16248,5701
976 | 16248,22936
977 | 16248,23943
978 | 16248,20370
979 | 16248,13775
980 | 16248,14797
981 | 16248,6023
982 | 16248,22085
983 | 16248,9515
984 | 16248,15348
985 | 16248,24344
986 | 16248,13085
987 | 16248,20562
988 | 16248,8771
989 | 16248,8976
990 | 16249,7807
991 | 16250,115
992 | 16251,6805
993 | 16251,12500
994 | 16252,2276
995 | 16252,15683
996 | 16252,10779
997 | 16253,24981
998 | 16254,19954
999 | 16255,20297
1000 | 16255,7976
1001 | 16256,4581
1002 | 16257,4376
1003 | 16258,5191
1004 | 16259,10735
1005 | 16259,4107
1006 | 16259,13169
1007 | 16259,24166
1008 | 16259,14063
1009 | 16260,16781
1010 | 16261,9075
1011 | 16262,19738
1012 | 16263,8563
1013 | 16263,24211
1014 | 16264,12100
1015 | 16264,2627
1016 | 16265,10215
1017 | 16266,20024
1018 | 16266,20035
1019 | 16266,11970
1020 | 16266,20007
1021 | 16266,11880
1022 | 16266,12905
1023 | 16266,22915
1024 | 16267,6455
1025 | 16268,24744
1026 | 16269,23343
1027 | 16269,19954
1028 | 16269,21064
1029 | 16269,1549
1030 | 16269,8384
1031 | 16269,16691
1032 | 16270,22610
1033 | 16271,11741
1034 | 16272,6694
1035 | 16273,9814
1036 | 16273,1238
1037 | 16274,14346
1038 | 16275,11275
1039 | 16275,10872
1040 | 16275,3701
1041 | 16275,9207
1042 | 16276,18634
1043 | 16277,15167
1044 | 16277,16221
1045 | 16278,18402
1046 | 16278,18174
1047 | 16278,25026
1048 | 16278,6487
1049 | 16279,6428
1050 | 16280,10371
1051 | 16281,11615
1052 | 16282,9304
1053 | 16283,17837
1054 | 16284,17400
1055 | 16285,22579
1056 | 16285,8999
1057 | 16286,6414
1058 | 16287,18588
1059 | 16288,14225
1060 | 16288,19224
1061 | 16288,9472
1062 | 16288,9604
1063 | 16288,14730
1064 | 16288,11506
1065 | 16288,2893
1066 | 16288,2464
1067 | 16288,15532
1068 | 16288,18132
1069 | 16289,2034
1070 | 16290,13917
1071 | 16291,9939
1072 | 16292,24099
1073 | 16293,18689
1074 | 16294,404
1075 | 16295,12238
1076 | 16295,1481
1077 | 16295,11399
1078 | 16296,6347
1079 | 16297,21958
1080 | 16297,10810
1081 | 16297,9159
1082 | 16298,21957
1083 | 16298,12998
1084 | 16298,20561
1085 | 16298,19072
1086 | 16299,16994
1087 | 16299,18603
1088 | 16300,8908
1089 | 16300,9159
1090 | 16300,894
1091 | 16301,12747
1092 | 16302,18238
1093 | 16303,21591
1094 | 16304,3467
1095 | 16304,3486
1096 | 16305,18689
1097 | 16305,18008
1098 | 16305,5156
1099 | 16305,221
1100 | 16306,20055
1101 | 16307,23595
1102 | 16307,1794
1103 | 16307,22507
1104 | 16307,24000
1105 | 16307,6368
1106 | 16307,18737
1107 | 16307,20623
1108 | 16307,983
1109 | 16307,7860
1110 | 16307,17921
1111 | 16307,13670
1112 | 16307,13233
1113 | 16307,23006
1114 | 16307,15209
1115 | 16307,17341
1116 | 16307,1283
1117 | 16307,23134
1118 | 16307,22024
1119 | 16307,13660
1120 | 16307,6155
1121 | 16307,15679
1122 | 16307,4855
1123 | 16307,16240
1124 | 16307,24759
1125 | 16307,2019
1126 | 16307,20534
1127 | 16307,5022
1128 | 16307,22274
1129 | 16307,24784
1130 | 16307,19809
1131 | 16307,17671
1132 | 16308,7717
1133 | 16309,22120
1134 | 16309,5704
1135 | 16310,9561
1136 | 16311,1289
1137 | 16312,4675
1138 | 16313,8722
1139 | 16314,24191
1140 | 16315,15507
1141 | 16316,3101
1142 | 16317,464
1143 | 16318,7067
1144 | 16318,22009
1145 | 16318,9032
1146 | 16319,20465
1147 | 16320,17444
1148 | 16321,16972
1149 | 16322,10643
1150 | 16323,5015
1151 | 16324,17254
1152 | 16324,6993
1153 | 16325,14152
1154 | 16325,19302
1155 | 16326,22330
1156 | 16327,1796
1157 | 16327,6139
1158 | 16327,21466
1159 | 16328,15247
1160 | 16329,23504
1161 | 16330,9464
1162 | 16331,20269
1163 | 16332,9420
1164 | 16333,21039
1165 | 16334,19471
1166 | 16335,1482
1167 | 16336,24686
1168 | 16336,16943
1169 | 16336,2548
1170 | 16336,7997
1171 | 16336,23190
1172 | 16336,10767
1173 | 16336,15033
1174 | 16336,24163
1175 | 16336,6324
1176 | 16336,14821
1177 | 16336,18374
1178 | 16336,4619
1179 | 16336,15978
1180 | 16336,1093
1181 | 16336,23661
1182 | 16336,5546
1183 | 16336,20578
1184 | 16336,9938
1185 | 16336,8821
1186 | 16336,20992
1187 | 16336,11018
1188 | 16336,2708
1189 | 16336,6067
1190 | 16336,7737
1191 | 16336,20316
1192 | 16336,23158
1193 | 16336,18540
1194 | 16336,11602
1195 | 16336,6018
1196 | 16336,11603
1197 | 16336,12064
1198 | 16336,5648
1199 | 16336,800
1200 | 16336,13951
1201 | 16336,12837
1202 | 16336,1296
1203 | 16336,4547
1204 | 16336,16195
1205 | 16336,11691
1206 | 16336,12090
1207 | 16336,15600
1208 | 16336,10294
1209 | 16336,440
1210 | 16336,12234
1211 | 16336,5507
1212 | 16336,9866
1213 | 16336,13852
1214 | 16336,15813
1215 | 16336,12790
1216 | 16336,16872
1217 | 16336,15239
1218 | 16337,14624
1219 | 16337,12446
1220 | 16338,21458
1221 | 16339,18470
1222 | 16340,9159
1223 | 16341,6558
1224 | 16341,22172
1225 | 16342,19798
1226 | 16343,18583
1227 | 16344,2456
1228 | 16345,10420
1229 | 16346,8469
1230 | 16347,15506
1231 | 16348,6295
1232 | 16349,14624
1233 | 16350,19776
1234 | 16350,20762
1235 | 16350,6248
1236 | 16350,12174
1237 | 16351,17515
1238 | 16351,6694
1239 | 16352,21988
1240 | 16353,10860
1241 | 16354,5499
1242 | 16355,14890
1243 | 16355,15055
1244 | 16356,24050
1245 | 16357,18555
1246 | 16358,18689
1247 | 16359,12998
1248 | 16360,10794
1249 | 16361,18742
1250 | 16361,9215
1251 | 16362,20787
1252 | 16363,23024
1253 | 16364,1544
1254 | 16365,12184
1255 | 16365,9206
1256 | 16366,7037
1257 | 16367,19172
1258 | 16368,23962
1259 | 16369,21536
1260 | 16369,3692
1261 | 16369,16005
1262 | 16369,14054
1263 | 16369,7201
1264 | 16369,19095
1265 | 16369,21443
1266 | 16369,10593
1267 | 16369,21295
1268 | 16369,8852
1269 | 16369,4850
1270 | 16369,14881
1271 | 16370,12455
1272 | 16371,14573
1273 | 16372,10121
1274 | 16373,15520
1275 | 16373,18583
1276 | 16373,98
1277 | 16373,9327
1278 | 16374,1495
1279 | 16375,20561
1280 | 16375,4249
1281 | 16376,15227
1282 | 16377,16801
1283 | 16378,17056
1284 | 16379,4483
1285 | 16380,11246
1286 | 16380,24687
1287 | 16380,12002
1288 | 16381,19928
1289 | 16382,1713
1290 | 16382,16582
1291 | 16382,11621
1292 | 16382,24847
1293 | 16382,21753
1294 | 16383,9327
1295 | 16384,22814
1296 | 16384,22316
1297 | 16385,13724
1298 | 16386,15987
1299 | 16386,10934
1300 | 16386,9159
1301 | 16386,16233
1302 | 16387,3131
1303 | 16388,7297
1304 | 16389,20739
1305 | 16390,9589
1306 | 16391,16216
1307 | 16391,16025
1308 | 16391,8210
1309 | 16391,10986
1310 | 16391,11264
1311 | 16391,2677
1312 | 16391,10150
1313 | 16391,10714
1314 | 16391,23830
1315 | 16392,7764
1316 | 16393,9159
1317 | 16394,18779
1318 | 16394,21061
1319 | 16394,13389
1320 | 16395,9327
1321 | 16396,9335
1322 | 16397,196
1323 | 16398,6698
1324 | 16399,15216
1325 | 16400,6642
1326 | 16401,12610
1327 | 16401,1231
1328 | 16401,6563
1329 | 16401,18808
1330 | 16401,6967
1331 | 16401,14138
1332 | 16401,24300
1333 | 16401,19598
1334 | 16401,19484
1335 | 16402,19605
1336 | 16402,115
1337 | 16403,8861
1338 | 16404,4218
1339 | 16405,22493
1340 | 16405,20496
1341 | 16406,10769
1342 | 16407,18689
1343 | 16408,6041
1344 | 16409,925
1345 | 16410,1240
1346 | 16411,14481
1347 | 16412,19584
1348 | 16413,9159
1349 | 16414,9004
1350 | 16415,19796
1351 | 16416,24091
1352 | 16416,4999
1353 | 16416,14641
1354 | 16416,4166
1355 | 16416,16958
1356 | 16416,5339
1357 | 16416,3358
1358 | 16416,13365
1359 | 16416,12341
1360 | 16416,2045
1361 | 16417,916
1362 | 16417,19715
1363 | 16417,6784
1364 | 16418,10417
1365 | 16418,7421
1366 | 16419,1573
1367 | 16420,613
1368 | 16420,11320
1369 | 16420,2089
1370 | 16420,18494
1371 | 16420,1544
1372 | 16420,21327
1373 | 16420,15647
1374 | 16420,5867
1375 | 16420,11502
1376 | 16420,6864
1377 | 16420,6319
1378 | 16420,7618
1379 | 16420,24962
1380 | 16421,1210
1381 | 16422,4691
1382 | 16422,21579
1383 | 16422,21581
1384 | 16422,18954
1385 | 16422,16142
1386 | 16422,9672
1387 | 16422,19924
1388 | 16422,16391
1389 | 16422,18481
1390 | 16422,14041
1391 | 16423,8610
1392 | 16424,9742
1393 | 16424,21410
1394 | 16424,13061
1395 | 16424,6312
1396 | 16424,10381
1397 | 16424,5220
1398 | 16424,22951
1399 | 16424,10321
1400 | 16424,9591
1401 | 16424,2897
1402 | 16424,10611
1403 | 16424,96
1404 | 16424,2839
1405 | 16424,7412
1406 | 16424,11005
1407 | 16424,24814
1408 | 16424,10320
1409 | 16424,5793
1410 | 16424,23831
1411 | 16424,6920
1412 | 16424,230
1413 | 16424,10814
1414 | 16424,2246
1415 | 16424,24069
1416 | 16424,7773
1417 | 16424,16047
1418 | 16424,24390
1419 | 16424,14578
1420 | 16424,14096
1421 | 16424,3777
1422 | 16424,12501
1423 | 16424,24503
1424 | 16425,824
1425 | 16425,9595
1426 | 16426,19738
1427 | 16426,19469
1428 | 16426,4713
1429 | 16426,8894
1430 | 16427,19203
1431 | 16427,13006
1432 | 16428,9159
1433 | 16429,15009
1434 | 16430,4288
1435 | 16431,7138
1436 | 16432,9327
1437 | 16433,11093
1438 | 16434,12331
1439 | 16435,11566
1440 | 16436,4218
1441 | 16437,23173
1442 | 16438,18689
1443 | 16439,7685
1444 | 16439,13727
1445 | 16440,1151
1446 | 16440,12021
1447 | 16440,14346
1448 | 16440,20605
1449 | 16440,24450
1450 | 16440,12052
1451 | 16441,23660
1452 | 16441,17439
1453 | 16441,1358
1454 | 16442,21157
1455 | 16442,8244
1456 | 16442,997
1457 | 16443,8789
1458 | 16444,13100
1459 | 16444,18835
1460 | 16444,15048
1461 | 16444,24001
1462 | 16444,13525
1463 | 16444,21097
1464 | 16444,1068
1465 | 16444,19929
1466 | 16444,20437
1467 | 16444,14499
1468 | 16444,10478
1469 | 16444,7679
1470 | 16445,2365
1471 | 16445,17241
1472 | 16445,21005
1473 | 16446,9142
1474 | 16447,9327
1475 | 16448,4194
1476 | 16449,21339
1477 | 16450,9266
1478 | 16451,13947
1479 | 16452,3145
1480 | 16453,7480
1481 | 16454,9053
1482 | 16455,17642
1483 | 16455,15137
1484 | 16455,13654
1485 | 16455,13602
1486 | 16455,19322
1487 | 16455,19323
1488 | 16455,7149
1489 | 16455,3146
1490 | 16455,14977
1491 | 16455,14488
1492 | 16455,21406
1493 | 16455,3966
1494 | 16455,17269
1495 | 16456,3974
1496 | 16457,1524
1497 | 16458,1253
1498 | 16459,10643
1499 | 16459,23009
1500 | 16460,13811
1501 | 16460,21287
1502 | 16461,5161
1503 | 16462,12021
1504 | 16462,14324
1505 | 16462,19009
1506 | 16462,3335
1507 | 16462,24067
1508 | 16462,7092
1509 | 16462,10807
1510 | 16463,22810
1511 | 16463,21087
1512 | 16463,1795
1513 | 16463,3777
1514 | 16463,5264
1515 | 16463,18451
1516 | 16463,16165
1517 | 16463,3311
1518 | 16463,8091
1519 | 16463,2467
1520 | 16463,9924
1521 | 16463,4272
1522 | 16463,13524
1523 | 16463,18758
1524 | 16463,10095
1525 | 16463,22484
1526 | 16463,23783
1527 | 16463,12926
1528 | 16463,8566
1529 | 16463,25045
1530 | 16463,6499
1531 | 16463,23049
1532 | 16464,8725
1533 | 16465,9159
1534 | 16466,19121
1535 | 16467,3210
1536 | 16468,24587
1537 | 16469,8004
1538 | 16470,6696
1539 | 16471,1544
1540 | 16471,5436
1541 | 16472,2422
1542 | 16473,19866
1543 | 16474,19928
1544 | 16475,9327
1545 | 16476,12998
1546 | 16477,17834
1547 | 16478,6195
1548 | 16478,223
1549 | 16479,23024
1550 | 16480,4223
1551 | 16480,10486
1552 | 16481,16463
1553 | 16482,11197
1554 | 16483,21339
1555 | 16484,20216
1556 | 16485,732
1557 | 16486,21645
1558 | 16486,19923
1559 | 16487,2691
1560 | 16488,9293
1561 | 16488,1954
1562 | 16489,2178
1563 | 16490,10914
1564 | 16490,23927
1565 | 16491,8563
1566 | 16492,12061
1567 | 16492,3055
1568 | 16492,11586
1569 | 16492,12607
1570 | 16493,7174
1571 | 16494,19654
1572 | 16494,676
1573 | 16495,6312
1574 | 16495,23262
1575 | 16495,19889
1576 | 16495,12177
1577 | 16495,9911
1578 | 16495,19994
1579 | 16495,621
1580 | 16496,21958
1581 | 16497,15314
1582 | 16498,7239
1583 | 16499,17798
1584 | 16500,20561
1585 | 16500,22556
1586 | 16500,15452
1587 | 16500,3922
1588 | 16501,14796
1589 | 16501,12615
1590 | 16502,21427
1591 | 16503,4847
1592 | 16504,23114
1593 | 16504,19050
1594 | 16505,5948
1595 | 16505,18706
1596 | 16506,24453
1597 | 16507,24124
1598 | 16507,22000
1599 | 16508,15632
1600 | 16508,3255
1601 | 16509,12065
1602 | 16510,20966
1603 | 16511,21464
1604 | 16512,11655
1605 | 16513,10994
1606 | 16514,18351
1607 | 16515,3646
1608 | 16516,21831
1609 | 16517,14963
1610 | 16517,18080
1611 | 16517,9663
1612 | 16517,15851
1613 | 16517,921
1614 | 16517,7533
1615 | 16517,9664
1616 | 16517,13845
1617 | 16517,8447
1618 | 16517,742
1619 | 16517,20175
1620 | 16517,13066
1621 | 16517,13369
1622 | 16517,540
1623 | 16517,21646
1624 | 16517,16183
1625 | 16517,699
1626 | 16517,4821
1627 | 16518,17837
1628 | 16518,6041
1629 | 16518,13120
1630 | 16519,11985
1631 | 16520,16547
1632 | 16521,15472
1633 | 16521,10420
1634 | 16522,20000
1635 | 16523,19009
1636 | 16524,7191
1637 | 16525,9327
1638 | 16526,7472
1639 | 16526,16314
1640 | 16527,2641
1641 | 16527,17788
1642 | 16528,7721
1643 | 16529,7201
1644 | 16529,16005
1645 | 16529,21443
1646 | 16530,20871
1647 | 16530,20957
1648 | 16530,18484
1649 | 16530,24345
1650 | 16530,5666
1651 | 16530,6803
1652 | 16530,20583
1653 | 16531,4675
1654 | 16531,9159
1655 | 16532,21305
1656 | 16533,19385
1657 | 16534,6195
1658 | 16535,17568
1659 | 16536,6755
1660 | 16536,3402
1661 | 16536,3531
1662 | 16536,24855
1663 | 16536,15300
1664 | 16536,8251
1665 | 16536,3674
1666 | 16536,1606
1667 | 16536,15884
1668 | 16536,4138
1669 | 16536,11391
1670 | 16537,20351
1671 | 16537,24705
1672 | 16537,4832
1673 | 16537,17699
1674 | 16537,1730
1675 | 16537,12758
1676 | 16537,18442
1677 | 16537,17541
1678 | 16537,15185
1679 | 16537,1815
1680 | 16537,19774
1681 | 16537,776
1682 | 16537,12292
1683 | 16537,8244
1684 | 16537,19560
1685 | 16537,15463
1686 | 16537,8144
1687 | 16537,1682
1688 | 16537,10502
1689 | 16537,19929
1690 | 16537,20437
1691 | 16537,10618
1692 | 16537,15635
1693 | 16537,14094
1694 | 16537,18415
1695 | 16537,16014
1696 | 16537,585
1697 | 16537,4720
1698 | 16537,12893
1699 | 16537,17538
1700 | 16537,10719
1701 | 16537,11411
1702 | 16537,810
1703 | 16537,11391
1704 | 16537,9850
1705 | 16537,456
1706 | 16537,2971
1707 | 16537,23921
1708 | 16537,16560
1709 | 16537,15766
1710 | 16537,3967
1711 | 16537,15884
1712 | 16537,16445
1713 | 16537,15388
1714 | 16537,18198
1715 | 16537,17540
1716 | 16537,2960
1717 | 16537,2036
1718 | 16538,16537
1719 | 16538,16809
1720 | 16539,17837
1721 | 16539,21005
1722 | 16539,19889
1723 | 16539,1065
1724 | 16540,8712
1725 | 16540,8711
1726 | 16541,5308
1727 | 16541,7013
1728 | 16541,18990
1729 | 16541,4525
1730 | 16542,12207
1731 | 16542,8709
1732 | 16542,1253
1733 | 16542,21548
1734 | 16543,13164
1735 | 16544,18084
1736 | 16545,4218
1737 | 16546,22316
1738 | 16546,920
1739 | 16547,22371
1740 | 16547,7220
1741 | 16548,4511
1742 | 16549,287
1743 | 16550,19050
1744 | 16551,133
1745 | 16552,916
1746 | 16552,1253
1747 | 16553,4254
1748 | 16554,23588
1749 | 16554,12666
1750 | 16555,7510
1751 | 16556,23507
1752 | 16557,10701
1753 | 16557,2273
1754 | 16558,4218
1755 | 16559,7044
1756 | 16560,19919
1757 | 16560,19243
1758 | 16561,452
1759 | 16562,12010
1760 | 16562,13563
1761 | 16562,5268
1762 | 16562,14723
1763 | 16562,958
1764 | 16562,12277
1765 | 16563,8491
1766 | 16563,21615
1767 | 16563,12200
1768 | 16563,14651
1769 | 16563,22277
1770 | 16563,850
1771 | 16563,10360
1772 | 16563,19982
1773 | 16563,12418
1774 | 16564,4675
1775 | 16564,11586
1776 | 16564,13472
1777 | 16565,13892
1778 | 16566,9299
1779 | 16566,502
1780 | 16566,10266
1781 | 16567,12839
1782 | 16568,24347
1783 | 16569,3822
1784 | 16570,19794
1785 | 16571,21635
1786 | 16571,9757
1787 | 16571,8404
1788 | 16571,467
1789 | 16571,19246
1790 | 16571,23703
1791 | 16572,20561
1792 | 16573,16982
1793 | 16574,16362
1794 | 16574,1670
1795 | 16575,20431
1796 | 16575,17241
1797 | 16576,12494
1798 | 16577,9327
1799 | 16578,10112
1800 | 16579,22051
1801 | 16579,11721
1802 | 16579,5117
1803 | 16579,15114
1804 | 16579,17633
1805 | 16579,6851
1806 | 16579,840
1807 | 16579,22815
1808 | 16579,23024
1809 | 16579,18363
1810 | 16579,13990
1811 | 16579,10723
1812 | 16579,13169
1813 | 16580,115
1814 | 16581,20127
1815 | 16582,21747
1816 | 16582,843
1817 | 16582,13349
1818 | 16582,24431
1819 | 16583,90
1820 | 16584,5237
1821 | 16585,17779
1822 | 16586,3970
1823 | 16586,4218
1824 | 16586,17615
1825 | 16586,13239
1826 | 16586,15908
1827 | 16586,11746
1828 | 16586,1261
1829 | 16587,17630
1830 | 16588,2422
1831 | 16589,1253
1832 | 16590,9327
1833 | 16590,4216
1834 | 16591,9073
1835 | 16591,19368
1836 | 16592,15856
1837 | 16593,15917
1838 | 16594,20591
1839 | 16595,2619
1840 | 16596,11398
1841 | 16597,6208
1842 | 16597,22509
1843 | 16598,12230
1844 | 16598,18363
1845 | 16598,22205
1846 | 16598,1105
1847 | 16598,37
1848 | 16598,16779
1849 | 16598,16534
1850 | 16598,22391
1851 | 16598,12526
1852 | 16598,6041
1853 | 16598,1934
1854 | 16598,14276
1855 | 16598,1653
1856 | 16598,17938
1857 | 16598,15369
1858 | 16598,12463
1859 | 16598,894
1860 | 16598,16479
1861 | 16599,9119
1862 | 16600,19923
1863 | 16601,10074
1864 | 16601,9327
1865 | 16602,24474
1866 | 16602,7110
1867 | 16602,19729
1868 | 16603,6993
1869 | 16604,18142
1870 | 16604,2080
1871 | 16605,2820
1872 | 16606,5207
1873 | 16606,11383
1874 | 16607,20933
1875 | 16608,3364
1876 | 16608,228
1877 | 16609,15927
1878 | 16610,11701
1879 | 16611,19704
1880 | 16612,18624
1881 | 16613,2456
1882 | 16614,22653
1883 | 16614,15540
1884 | 16614,7592
1885 | 16614,19489
1886 | 16614,7394
1887 | 16614,20699
1888 | 16614,4678
1889 | 16614,5958
1890 | 16614,16397
1891 | 16614,22952
1892 | 16614,14315
1893 | 16614,19372
1894 | 16614,7090
1895 | 16614,2482
1896 | 16614,212
1897 | 16614,22824
1898 | 16614,19478
1899 | 16614,5422
1900 | 16614,22582
1901 | 16615,13790
1902 | 16615,12792
1903 | 16615,14362
1904 | 16615,20356
1905 | 16615,18466
1906 | 16616,805
1907 | 16617,20644
1908 | 16618,5319
1909 | 16619,7855
1910 | 16620,4272
1911 | 16621,1253
1912 | 16622,2456
1913 | 16623,11007
1914 | 16623,7105
1915 | 16624,806
1916 | 16624,14616
1917 | 16625,845
1918 | 16626,3529
1919 | 16627,11979
1920 | 16628,3283
1921 | 16629,24047
1922 | 16629,820
1923 | 16629,9084
1924 | 16629,6719
1925 | 16629,12391
1926 | 16629,10732
1927 | 16629,11154
1928 | 16629,13591
1929 | 16629,19708
1930 | 16629,1287
1931 | 16629,5894
1932 | 16629,20221
1933 | 16629,12997
1934 | 16629,5885
1935 | 16629,20014
1936 | 16629,11931
1937 | 16629,23325
1938 | 16629,3282
1939 | 16629,18449
1940 | 16629,20000
1941 | 16629,13588
1942 | 16629,824
1943 | 16629,9632
1944 | 16629,15735
1945 | 16629,13374
1946 | 16629,23018
1947 | 16629,3550
1948 | 16629,12039
1949 | 16629,20226
1950 | 16629,3610
1951 | 16629,19998
1952 | 16629,8890
1953 | 16629,12527
1954 | 16629,11480
1955 | 16629,11520
1956 | 16629,8598
1957 | 16629,833
1958 | 16629,5127
1959 | 16630,12039
1960 | 16631,22848
1961 | 16632,14575
1962 | 16632,22647
1963 | 16632,17700
1964 | 16632,4035
1965 | 16632,19641
1966 | 16632,19640
1967 | 16632,22470
1968 | 16632,4063
1969 | 16633,18013
1970 | 16634,19326
1971 | 16635,22316
1972 | 16636,18986
1973 | 16636,18792
1974 | 16636,24000
1975 | 16637,21339
1976 | 16638,9159
1977 | 16639,9671
1978 | 16639,12509
1979 | 16639,14324
1980 | 16639,24092
1981 | 16639,22591
1982 | 16640,1651
1983 | 16641,14346
1984 | 16642,9159
1985 | 16642,14261
1986 | 16643,1831
1987 | 16644,3477
1988 | 16644,23008
1989 | 16644,4809
1990 | 16644,2491
1991 | 16644,11613
1992 | 16644,16714
1993 | 16644,23506
1994 | 16644,447
1995 | 16645,5498
1996 | 16645,11998
1997 | 16645,19612
1998 | 16645,2933
1999 | 16645,1535
2000 | 16645,8586
2001 | 16645,1110
2002 | 16645,23857
2003 | 16645,18758
2004 | 16645,5290
2005 | 16645,22837
2006 | 16645,13472
2007 | 16645,7625
2008 | 16645,16571
2009 | 16645,20561
2010 | 16645,17613
2011 | 16645,23024
2012 | 16645,8610
2013 | 16645,12709
2014 | 16645,4230
2015 | 16645,19889
2016 | 16645,22965
2017 | 16645,2897
2018 | 16645,3769
2019 | 16645,6073
2020 | 16645,11209
2021 | 16645,1398
2022 | 16645,1482
2023 | 16645,3093
2024 | 16645,11681
2025 | 16645,2819
2026 | 16645,5594
2027 | 16645,24246
2028 | 16645,2707
2029 | 16645,14624
2030 | 16645,16822
2031 | 16645,2644
2032 | 16645,5308
2033 | 16645,14839
2034 | 16645,19559
2035 | 16645,24276
2036 | 16645,13990
2037 | 16645,10314
2038 | 16645,22069
2039 | 16645,8769
2040 | 16645,3027
2041 | 16645,3040
2042 | 16645,18131
2043 | 16645,17145
2044 | 16645,2536
2045 | 16645,21411
2046 | 16645,9819
2047 | 16645,4272
2048 | 16645,7431
2049 | 16645,14346
2050 | 16645,5007
2051 | 16645,23049
2052 | 16645,2537
2053 | 16645,12666
2054 | 16645,10260
2055 | 16645,15097
2056 | 16645,1288
2057 | 16645,1400
2058 | 16645,13989
2059 | 16645,14157
2060 | 16645,6957
2061 | 16645,11124
2062 | 16645,7838
2063 | 16645,1761
2064 | 16645,14239
2065 | 16645,16530
2066 | 16645,4793
2067 | 16645,11050
2068 | 16645,20377
2069 | 16645,1837
2070 | 16645,11598
2071 | 16645,2179
2072 | 16645,740
2073 | 16645,1839
2074 | 16645,14791
2075 | 16645,23502
2076 | 16645,17555
2077 | 16645,17480
2078 | 16645,10934
2079 | 16645,5251
2080 | 16645,14192
2081 | 16645,24201
2082 | 16645,16876
2083 | 16645,11566
2084 | 16645,9773
2085 | 16645,1446
2086 | 16645,17624
2087 | 16645,2293
2088 | 16645,7013
2089 | 16645,24158
2090 | 16645,11495
2091 | 16645,21970
2092 | 16645,8047
2093 | 16645,9312
2094 | 16645,1707
2095 | 16645,20605
2096 | 16645,18810
2097 | 16645,6654
2098 | 16645,23142
2099 | 16645,17342
2100 | 16645,237
2101 | 16645,396
2102 | 16645,4036
2103 | 16645,12647
2104 | 16645,4741
2105 | 16645,1560
2106 | 16645,17857
2107 | 16645,19279
2108 | 16645,17610
2109 | 16645,6327
2110 | 16645,10611
2111 | 16645,4274
2112 | 16645,24041
2113 | 16645,18965
2114 | 16645,22887
2115 | 16645,10770
2116 | 16646,4000
2117 | 16647,13878
2118 | 16647,12542
2119 | 16648,4655
2120 | 16648,21874
2121 | 16648,9518
2122 | 16649,17936
2123 | 16650,16161
2124 | 16651,24453
2125 | 16652,9733
2126 | 16653,2480
2127 | 16654,22917
2128 | 16655,22406
2129 | 16656,20561
2130 | 16657,13845
2131 | 16658,19743
2132 | 16658,9093
2133 | 16659,13956
2134 | 16659,7281
2135 | 16660,17482
2136 | 16661,19638
2137 | 16662,15515
2138 | 16663,13280
2139 | 16664,7469
2140 | 16664,14346
2141 | 16664,2259
2142 | 16665,4504
2143 | 16666,24330
2144 | 16667,8908
2145 | 16668,4825
2146 | 16669,20617
2147 | 16670,9034
2148 | 16671,10333
2149 | 16672,18
2150 | 16672,18898
2151 | 16673,14222
2152 | 16674,565
2153 | 16674,24996
2154 | 16674,19580
2155 | 16674,7411
2156 | 16674,12778
2157 | 16674,20027
2158 | 16674,23773
2159 | 16674,15825
2160 | 16674,20012
2161 | 16674,20016
2162 | 16674,11901
2163 | 16674,5596
2164 | 16674,2883
2165 | 16675,5701
2166 | 16675,9327
2167 | 16676,14627
2168 | 16677,15683
2169 | 16678,22755
2170 | 16678,6230
2171 | 16678,21688
2172 | 16678,10083
2173 | 16678,15991
2174 | 16678,22904
2175 | 16678,2659
2176 | 16678,19032
2177 | 16679,12443
2178 | 16680,19210
2179 | 16681,15609
2180 | 16682,9327
2181 | 16683,14624
2182 | 16683,21720
2183 | 16684,9007
2184 | 16684,18040
2185 | 16684,10873
2186 | 16684,3512
2187 | 16684,12494
2188 | 16685,5765
2189 | 16686,12883
2190 | 16686,21620
2191 | 16686,1350
2192 | 16686,22301
2193 | 16686,21430
2194 | 16686,11062
2195 | 16687,5317
2196 | 16688,21548
2197 | 16688,22128
2198 | 16688,18538
2199 | 16688,20561
2200 | 16688,24922
2201 | 16688,2089
2202 | 16689,4516
2203 | 16689,10511
2204 | 16689,21111
2205 | 16690,9327
2206 | 16691,9660
2207 | 16692,14561
2208 | 16693,8885
2209 | 16694,9159
2210 | 16695,21632
2211 | 16696,22529
2212 | 16697,22702
2213 | 16697,22720
2214 | 16697,22531
2215 | 16697,22606
2216 | 16697,18576
2217 | 16697,12103
2218 | 16697,22820
2219 | 16697,22040
2220 | 16698,13579
2221 | 16698,1659
2222 | 16699,3158
2223 | 16700,10121
2224 | 16701,9327
2225 | 16702,13845
2226 | 16702,9472
2227 | 16703,9359
2228 | 16703,11753
2229 | 16703,16168
2230 | 16703,1253
2231 | 16704,15410
2232 | 16705,19050
2233 | 16706,18738
2234 | 16706,15773
2235 | 16706,19027
2236 | 16706,5677
2237 | 16707,9327
2238 | 16708,1308
2239 | 16708,4673
2240 | 16708,17275
2241 | 16708,12559
2242 | 16709,6539
2243 | 16710,18858
2244 | 16711,9624
2245 | 16711,18763
2246 | 16711,90
2247 | 16712,11721
2248 | 16712,8674
2249 | 16713,2439
2250 | 16714,9483
2251 | 16714,2836
2252 | 16714,9159
2253 | 16715,21758
2254 | 16716,1253
2255 | 16717,22974
2256 | 16718,10598
2257 | 16718,22167
2258 | 16718,129
2259 | 16719,19455
2260 | 16720,12214
2261 | 16720,11119
2262 | 16720,24007
2263 | 16720,2262
2264 | 16721,20661
2265 | 16722,16996
2266 | 16723,16012
2267 | 16724,17709
2268 | 16724,9587
2269 | 16725,19098
2270 | 16725,19889
2271 | 16726,5893
2272 | 16727,4526
2273 | 16728,10112
2274 | 16729,13158
2275 | 16730,2330
2276 | 16730,4477
2277 | 16730,9845
2278 | 16730,16255
2279 | 16730,2686
2280 | 16730,708
2281 | 16730,3712
2282 | 16730,16723
2283 | 16730,23491
2284 | 16730,12306
2285 | 16730,7743
2286 | 16730,7846
2287 | 16730,17930
2288 | 16730,3021
2289 | 16730,21853
2290 | 16730,3093
2291 | 16730,2819
2292 | 16730,2645
2293 | 16730,21795
2294 | 16730,13819
2295 | 16730,23357
2296 | 16730,16751
2297 | 16730,8703
2298 | 16730,3675
2299 | 16730,8680
2300 | 16730,5025
2301 | 16731,20561
2302 | 16732,11888
2303 | 16733,12884
2304 | 16733,15799
2305 | 16733,3609
2306 | 16733,15017
2307 | 16734,21891
2308 | 16735,6428
2309 | 16736,19050
2310 | 16736,21831
2311 | 16736,18124
2312 | 16737,8417
2313 | 16738,22500
2314 | 16738,12177
2315 | 16738,3845
2316 | 16738,17585
2317 | 16738,13896
2318 | 16738,13901
2319 | 16738,3846
2320 | 16738,6591
2321 | 16738,12810
2322 | 16738,7254
2323 | 16738,10584
2324 | 16738,4927
2325 | 16738,19277
2326 | 16738,11905
2327 | 16738,15162
2328 | 16738,13711
2329 | 16738,16789
2330 | 16738,19889
2331 | 16738,14338
2332 | 16738,21716
2333 | 16738,18046
2334 | 16738,5723
2335 | 16738,19582
2336 | 16738,24448
2337 | 16738,22579
2338 | 16738,2121
2339 | 16738,8004
2340 | 16738,925
2341 | 16739,12435
2342 | 16740,20155
2343 | 16741,8592
2344 | 16742,9159
2345 | 16743,5308
2346 | 16744,13605
2347 | 16745,6041
2348 | 16746,7405
2349 | 16746,3101
2350 | 16746,16616
2351 | 16746,23774
2352 | 16746,18879
2353 | 16746,18989
2354 | 16746,3024
2355 | 16746,16779
2356 | 16746,15122
2357 | 16746,22830
2358 | 16746,24429
2359 | 16746,18028
2360 | 16746,22608
2361 | 16746,14723
2362 | 16747,15757
2363 | 16748,11959
2364 | 16748,779
2365 | 16748,6577
2366 | 16748,3583
2367 | 16748,20795
2368 | 16749,20437
2369 | 16749,15768
2370 | 16750,4615
2371 | 16750,7556
2372 | 16750,7291
2373 | 16750,998
2374 | 16750,15394
2375 | 16750,22932
2376 | 16751,15990
2377 | 16751,13477
2378 | 16751,15038
2379 | 16751,17230
2380 | 16751,5996
2381 | 16751,7414
2382 | 16751,2125
2383 | 16751,4330
2384 | 16751,13945
2385 | 16751,9496
2386 | 16751,3356
2387 | 16751,6967
2388 | 16751,13668
2389 | 16751,14607
2390 | 16751,13671
2391 | 16751,13684
2392 | 16751,15668
2393 | 16751,14804
2394 | 16751,24509
2395 | 16751,1077
2396 | 16751,10870
2397 | 16751,14044
2398 | 16751,6477
2399 | 16751,24110
2400 | 16751,20573
2401 | 16751,6186
2402 | 16751,13457
2403 | 16751,23872
2404 | 16752,4163
2405 | 16752,6455
2406 | 16753,2574
2407 | 16754,1079
2408 | 16754,19889
2409 | 16755,19954
2410 | 16756,4194
2411 | 16757,24166
2412 | 16758,19210
2413 | 16759,14549
2414 | 16760,24600
2415 | 16760,22868
2416 | 16760,7563
2417 | 16760,10825
2418 | 16760,16002
2419 | 16760,12228
2420 | 16760,13341
2421 | 16761,4254
2422 | 16762,7832
2423 | 16763,18040
2424 | 16764,22813
2425 | 16765,17837
2426 | 16766,16815
2427 | 16766,13208
2428 | 16766,15136
2429 | 16767,10838
2430 | 16768,14746
2431 | 16769,19929
2432 | 16770,5040
2433 | 16771,471
2434 | 16772,3158
2435 | 16773,4034
2436 | 16774,8004
2437 | 16775,12576
2438 | 16776,9083
2439 | 16776,19815
2440 | 16776,18296
2441 | 16776,2356
2442 | 16776,6526
2443 | 16776,4206
2444 | 16776,9549
2445 | 16776,13769
2446 | 16777,14554
2447 | 16778,1147
2448 | 16778,8219
2449 | 16778,20762
2450 | 16778,10154
2451 | 16778,24737
2452 | 16779,12882
2453 | 16780,9327
2454 | 16781,23523
2455 | 16781,10396
2456 | 16781,15717
2457 | 16782,18758
2458 | 16782,8587
2459 | 16782,21087
2460 | 16783,14627
2461 | 16784,14225
2462 | 16785,24429
2463 | 16786,17069
2464 | 16786,19524
2465 | 16786,24457
2466 | 16786,1644
2467 | 16786,7330
2468 | 16787,9410
2469 | 16788,18305
2470 | 16789,16547
2471 | 16790,10112
2472 | 16791,15661
2473 | 16791,17645
2474 | 16792,9543
2475 | 16793,11537
2476 | 16794,5237
2477 | 16795,8991
2478 | 16796,18830
2479 | 16797,15360
2480 | 16798,12481
2481 | 16798,9072
2482 | 16798,5936
2483 | 16799,6041
2484 | 16800,10707
2485 | 16801,17837
2486 | 16802,12455
2487 | 16802,18351
2488 | 16802,12341
2489 | 16802,58
2490 | 16803,5218
2491 | 16804,18010
2492 | 16805,22330
2493 | 16806,19009
2494 | 16807,20000
2495 | 16807,19570
2496 | 16807,6822
2497 | 16807,13657
2498 | 16807,23106
2499 | 16807,8600
2500 | 16807,7434
2501 | 16807,9255
2502 | 16808,21831
2503 | 16809,2014
2504 | 16810,1504
2505 | 16810,10924
2506 | 16810,24426
2507 | 16810,8504
2508 | 16811,16793
2509 | 16811,3566
2510 | 16811,19459
2511 | 16812,23284
2512 | 16813,1430
2513 | 16814,8594
2514 | 16815,17444
2515 | 16816,8168
2516 | 16817,21258
2517 | 16818,18689
2518 | 16819,1121
2519 | 16820,1002
2520 | 16821,25048
2521 | 16821,11404
2522 | 16822,3717
2523 | 16822,11538
2524 | 16823,11572
2525 | 16824,10245
2526 | 16825,19753
2527 | 16825,19703
2528 | 16825,10635
2529 | 16825,9872
2530 | 16825,20117
2531 | 16825,6515
2532 | 16825,15280
2533 | 16825,24824
2534 | 16825,15134
2535 | 16825,24701
2536 | 16825,1047
2537 | 16825,22711
2538 | 16825,21187
2539 | 16825,6510
2540 | 16825,7380
2541 | 16825,4049
2542 | 16825,755
2543 | 16825,14762
2544 | 16825,6340
2545 | 16825,9822
2546 | 16825,3262
2547 | 16825,2182
2548 | 16825,2423
2549 | 16826,14788
2550 | 16826,5564
2551 | 16827,10101
2552 | 16827,3106
2553 | 16828,21272
2554 | 16829,9327
2555 | 16829,20561
2556 | 16829,16732
2557 | 16830,14346
2558 | 16831,6635
2559 | 16831,14142
2560 | 16831,16662
2561 | 16831,22561
2562 | 16831,12577
2563 | 16831,17456
2564 | 16831,2554
2565 | 16831,9865
2566 | 16831,5620
2567 | 16831,1055
2568 | 16831,2599
2569 | 16831,13881
2570 | 16831,735
2571 | 16831,4675
2572 | 16831,1795
2573 | 16831,10137
2574 | 16831,5166
2575 | 16831,19889
2576 | 16831,14602
2577 | 16831,5997
2578 | 16831,12709
2579 | 16831,2447
2580 | 16831,2646
2581 | 16832,21250
2582 | 16833,13888
2583 | 16833,2562
2584 | 16833,870
2585 | 16833,3560
2586 | 16833,640
2587 | 16834,5206
2588 | 16835,2759
2589 | 16835,1160
2590 | 16835,13134
2591 | 16835,7206
2592 | 16836,11145
2593 | 16836,18875
2594 | 16836,15194
2595 | 16836,17705
2596 | 16837,23024
2597 | 16838,19050
2598 | 16839,9142
2599 | 16840,19267
2600 | 16841,23024
2601 | 16842,4745
2602 | 16843,6727
2603 | 16843,24276
2604 | 16844,20457
2605 | 16844,19954
2606 | 16845,3910
2607 | 16845,24475
2608 | 16846,7755
2609 | 16847,3158
2610 | 16847,14072
2611 | 16847,4218
2612 | 16848,21411
2613 | 16848,7040
2614 | 16848,20881
2615 | 16849,2742
2616 | 16850,1212
2617 | 16851,7654
2618 | 16852,931
2619 | 16853,12841
2620 | 16854,17779
2621 | 16855,10459
2622 | 16855,4048
2623 | 16855,5390
2624 | 16855,9195
2625 | 16855,12562
2626 | 16855,15539
2627 | 16855,24935
2628 | 16855,5779
2629 | 16855,2977
2630 | 16856,4457
2631 | 16856,8535
2632 | 16856,752
2633 | 16856,8443
2634 | 16856,16920
2635 | 16856,9877
2636 | 16856,4421
2637 | 16856,18167
2638 | 16856,1988
2639 | 16856,17863
2640 | 16856,12685
2641 | 16856,17213
2642 | 16856,15308
2643 | 16857,19524
2644 | 16857,6359
2645 | 16857,25063
2646 | 16857,471
2647 | 16857,21877
2648 | 16857,8795
2649 | 16857,15149
2650 | 16857,8415
2651 | 16857,16013
2652 | 16857,2246
2653 | 16857,15447
2654 | 16857,4433
2655 | 16857,7855
2656 | 16857,23600
2657 | 16857,2857
2658 | 16857,13959
2659 | 16857,6319
2660 | 16857,9991
2661 | 16857,12351
2662 | 16857,17410
2663 | 16857,4675
2664 | 16857,24512
2665 | 16858,22391
2666 | 16859,8645
2667 | 16860,10430
2668 | 16861,7220
2669 | 16862,19387
2670 | 16863,19050
2671 | 16864,20000
2672 | 16865,7510
2673 | 16866,15905
2674 | 16866,5973
2675 | 16866,7015
2676 | 16866,18928
2677 | 16866,19671
2678 | 16866,23590
2679 | 16866,13
2680 | 16866,8563
2681 | 16867,22331
2682 | 16868,2250
2683 | 16869,7730
2684 | 16869,19994
2685 | 16870,16604
2686 | 16870,18577
2687 | 16871,24041
2688 | 16872,16594
2689 | 16872,7630
2690 | 16873,6428
2691 | 16874,21339
2692 | 16875,14331
2693 | 16876,19401
2694 | 16876,21249
2695 | 16876,17689
2696 | 16876,8393
2697 | 16876,22467
2698 | 16876,1444
2699 | 16876,7791
2700 | 16876,15133
2701 | 16876,10919
2702 | 16876,20608
2703 | 16876,20086
2704 | 16877,15556
2705 | 16877,21339
2706 | 16878,14624
2707 | 16879,24827
2708 | 16879,6053
2709 | 16880,9789
2710 | 16881,7744
2711 | 16882,1253
2712 | 16883,4558
2713 | 16884,8320
2714 | 16884,19079
2715 | 16885,15876
2716 | 16886,13266
2717 | 16887,22161
2718 | 16888,23390
2719 | 16889,13120
2720 | 16890,1711
2721 | 16891,1183
2722 | 16891,14474
2723 | 16892,18689
2724 | 16893,6633
2725 | 16893,18029
2726 | 16894,9373
2727 | 16895,7783
2728 | 16896,4504
2729 | 16897,18689
2730 | 16898,7870
2731 | 16899,24158
2732 | 16900,2517
2733 | 16901,13082
2734 | 16902,14346
2735 | 16903,14331
2736 | 16904,16373
2737 | 16904,13303
2738 | 16904,19744
2739 | 16904,3261
2740 | 16904,4182
2741 | 16905,19263
2742 | 16906,20928
2743 | 16906,16136
2744 | 16907,891
2745 | 16907,17893
2746 | 16907,11376
2747 | 16907,20724
2748 | 16907,17168
2749 | 16908,23632
2750 | 16909,13910
2751 | 16910,22556
2752 | 16911,1737
2753 | 16911,18398
2754 | 16912,14410
2755 | 16913,9168
2756 | 16914,5040
2757 | 16914,23168
2758 | 16915,9327
2759 | 16916,10919
2760 | 16916,23308
2761 | 16916,2456
2762 | 16916,20473
2763 | 16916,15936
2764 | 16917,22129
2765 | 16918,7634
2766 | 16919,2857
2767 | 16919,20174
2768 | 16919,9754
2769 | 16919,21336
2770 | 16919,2447
2771 | 16919,11871
2772 | 16920,7138
2773 | 16921,23295
2774 | 16922,21393
2775 | 16923,7237
2776 | 16924,10398
2777 | 16925,19009
2778 | 16926,21411
2779 | 16927,19758
2780 | 16927,14410
2781 | 16928,19275
2782 | 16928,14652
2783 | 16928,12853
2784 | 16929,14541
2785 | 16930,15694
2786 | 16930,21627
2787 | 16931,17597
2788 | 16932,23024
2789 | 16933,5003
2790 | 16933,1099
2791 | 16933,4224
2792 | 16933,22196
2793 | 16933,22076
2794 | 16933,4171
2795 | 16933,2454
2796 | 16933,10085
2797 | 16933,17874
2798 | 16933,8013
2799 | 16933,19285
2800 | 16934,10643
2801 | 16935,24958
2802 | 16936,13082
2803 | 16937,11572
2804 | 16938,5194
2805 | 16939,805
2806 | 16940,21639
2807 | 16941,9327
2808 | 16942,15082
2809 | 16942,18559
2810 | 16943,9327
2811 | 16944,5308
2812 | 16945,19866
2813 | 16945,21954
2814 | 16946,23389
2815 | 16946,24968
2816 | 16946,1577
2817 | 16946,12275
2818 | 16946,12575
2819 | 16946,22128
2820 | 16946,20561
2821 | 16946,22458
2822 | 16946,6041
2823 | 16946,1122
2824 | 16946,21302
2825 | 16947,18689
2826 | 16948,17633
2827 | 16949,20042
2828 | 16950,11429
2829 | 16950,11876
2830 | 16950,8156
2831 | 16950,9307
2832 | 16950,17516
2833 | 16950,11512
2834 | 16951,19210
2835 | 16952,10306
2836 | 16953,10436
2837 | 16954,6920
2838 | 16955,22487
2839 | 16956,18470
2840 | 16957,12287
2841 | 16958,23575
2842 | 16959,10389
2843 | 16959,7191
2844 | 16960,18596
2845 | 16961,20652
2846 | 16962,21723
2847 | 16962,363
2848 | 16963,6378
2849 | 16964,10545
2850 | 16965,18927
2851 | 16965,12269
2852 | 16965,8302
2853 | 16965,6550
2854 | 16966,18689
2855 | 16967,8115
2856 | 16968,23060
2857 | 16969,19432
2858 | 16969,21392
2859 | 16970,23145
2860 | 16971,11716
2861 | 16972,19182
2862 | 16972,13608
2863 | 16973,17859
2864 | 16973,4282
2865 | 16974,4359
2866 | 16975,13085
2867 | 16976,4669
2868 | 16977,741
2869 | 16978,13845
2870 | 16978,15114
2871 | 16979,15166
2872 | 16979,3442
2873 | 16979,20561
2874 | 16979,22128
2875 | 16979,960
2876 | 16979,3849
2877 | 16979,10351
2878 | 16979,3400
2879 | 16979,14624
2880 | 16979,16815
2881 | 16979,22877
2882 | 16979,5025
2883 | 16979,18928
2884 | 16979,17903
2885 | 16979,3040
2886 | 16979,3028
2887 | 16979,8563
2888 | 16979,2089
2889 | 16979,7659
2890 | 16979,9711
2891 | 16979,18117
2892 | 16979,9291
2893 | 16979,22666
2894 | 16979,24956
2895 | 16979,14434
2896 | 16979,17303
2897 | 16979,10641
2898 | 16979,5231
2899 | 16979,266
2900 | 16979,16976
2901 | 16979,19388
2902 | 16979,17930
2903 | 16979,9288
2904 | 16979,21998
2905 | 16979,20431
2906 | 16979,868
2907 | 16979,538
2908 | 16979,20150
2909 | 16979,15747
2910 | 16979,6949
2911 | 16979,18943
2912 | 16979,8908
2913 | 16979,2529
2914 | 16979,6450
2915 | 16980,21509
2916 | 16980,24962
2917 | 16981,12333
2918 | 16982,11074
2919 | 16983,19112
2920 | 16984,22489
2921 | 16984,17870
2922 | 16985,2456
2923 | 16986,14069
2924 | 16987,4390
2925 | 16987,10430
2926 | 16987,1253
2927 | 16987,15673
2928 | 16987,20525
2929 | 16987,5040
2930 | 16987,9327
2931 | 16987,4359
2932 | 16987,12763
2933 | 16987,20827
2934 | 16987,11793
2935 | 16987,361
2936 | 16987,8585
2937 | 16987,22587
2938 | 16987,15492
2939 | 16987,21969
2940 | 16988,11887
2941 | 16989,9266
2942 | 16990,20313
2943 | 16991,14529
2944 | 16992,9120
2945 | 16992,3289
2946 | 16993,12275
2947 | 16994,3158
2948 | 16995,23123
2949 | 16995,14440
2950 | 16995,16365
2951 | 16996,5291
2952 | 16997,13448
2953 | 16998,11604
2954 | 16998,16161
2955 | 16998,12545
2956 | 16999,23039
2957 | 17000,20849
2958 | 17001,19101
2959 | 17001,16103
2960 | 17001,7618
2961 | 17002,21411
2962 | 17003,2365
2963 | 17004,18531
2964 | 17005,18707
2965 | 17005,7395
2966 | 17005,2482
2967 | 17005,2481
2968 | 17005,5961
2969 | 17005,7650
2970 | 17005,24657
2971 | 17005,12754
2972 | 17005,12196
2973 | 17005,9051
2974 | 17005,17022
2975 | 17005,1058
2976 | 17005,17639
2977 | 17005,8674
2978 | 17005,3082
2979 | 17005,9472
2980 | 17005,7780
2981 | 17005,16321
2982 | 17005,17633
2983 | 17005,23187
2984 | 17005,2774
2985 | 17005,2391
2986 | 17005,7824
2987 | 17005,17915
2988 | 17005,7813
2989 | 17005,7329
2990 | 17005,9166
2991 | 17005,15965
2992 | 17005,16424
2993 | 17005,4322
2994 | 17005,2034
2995 | 17005,17077
2996 | 17005,2456
2997 | 17005,14696
2998 | 17005,12988
2999 | 17005,16804
3000 | 17005,24994
3001 | 17005,6164
3002 | 17006,22128
3003 | 17007,22210
3004 | 17008,11421
3005 | 17008,14408
3006 | 17009,6428
3007 | 17009,20617
3008 | 17009,16161
3009 | 17009,19224
3010 | 17009,20216
3011 | 17009,4218
3012 | 17010,6316
3013 | 17010,9251
3014 | 17010,17794
3015 | 17010,15604
3016 | 17010,4296
3017 | 17011,17300
3018 | 17012,22989
3019 | 17012,19564
3020 | 17012,13711
3021 | 17013,11335
3022 | 17014,9538
3023 | 17015,6428
3024 | 17016,13411
3025 | 17016,12540
3026 | 17016,2850
3027 | 17016,20561
3028 | 17016,17040
3029 | 17016,230
3030 | 17016,23993
3031 | 17016,20814
3032 | 17016,19000
3033 | 17016,1014
3034 | 17016,12647
3035 | 17016,17835
3036 | 17016,21834
3037 | 17016,22128
3038 | 17016,9705
3039 | 17016,23523
3040 | 17016,20134
3041 | 17016,23487
3042 | 17016,12998
3043 | 17016,22754
3044 | 17017,10121
3045 | 17018,2729
3046 | 17019,19954
3047 | 17019,1699
3048 | 17019,13577
3049 | 17019,8072
3050 | 17019,16414
3051 | 17020,10398
3052 | 17021,10707
3053 | 17022,4791
3054 | 17022,14923
3055 | 17022,24286
3056 | 17023,16572
3057 | 17024,9021
3058 | 17024,17785
3059 | 17025,12499
3060 | 17025,1413
3061 | 17025,21575
3062 | 17025,9148
3063 | 17026,24262
3064 | 17026,17181
3065 | 17027,17155
3066 | 17027,23291
3067 | 17027,15010
3068 | 17027,23861
3069 | 17027,20566
3070 | 17027,8062
3071 | 17027,20782
3072 | 17027,23925
3073 | 17027,6304
3074 | 17028,12481
3075 | 17029,17490
3076 | 17030,21958
3077 | 17031,17227
3078 | 17032,15336
3079 | 17033,10707
3080 | 17034,12494
3081 | 17034,5323
3082 | 17035,9162
3083 | 17036,24707
3084 | 17037,18399
3085 | 17038,19682
3086 | 17039,7674
3087 | 17039,16808
3088 | 17039,23984
3089 | 17039,24459
3090 | 17039,7667
3091 | 17039,5774
3092 | 17039,14175
3093 | 17040,21295
3094 | 17041,17964
3095 | 17042,9158
3096 | 17043,6314
3097 | 17044,11150
3098 | 17045,18736
3099 | 17045,4789
3100 | 17045,19644
3101 | 17045,11847
3102 | 17046,18689
3103 | 17047,16384
3104 | 17048,5308
3105 | 17049,4297
3106 | 17050,12481
3107 | 17051,15511
3108 | 17052,19638
3109 | 17052,10769
3110 | 17053,16718
3111 | 17054,15688
3112 | 17054,13811
3113 | 17054,104
3114 | 17055,19923
3115 | 17056,10799
3116 | 17057,24546
3117 | 17058,20617
3118 | 17059,23620
3119 | 17059,4032
3120 | 17059,3321
3121 | 17059,10325
3122 | 17059,13989
3123 | 17059,4675
3124 | 17059,20881
3125 | 17060,10088
3126 | 17061,19492
3127 | 17062,5141
3128 | 17062,12470
3129 | 17062,13254
3130 | 17062,5142
3131 | 17062,22969
3132 | 17062,8444
3133 | 17062,8054
3134 | 17062,9889
3135 | 17063,5820
3136 | 17064,18763
3137 | 17065,8266
3138 | 17066,13920
3139 | 17066,17135
3140 | 17066,12145
3141 | 17066,12108
3142 | 17066,1838
3143 | 17066,23568
3144 | 17066,2200
3145 | 17066,23151
3146 | 17066,3065
3147 | 17067,19729
3148 | 17068,6993
3149 | 17069,18301
3150 | 17070,13910
3151 | 17071,2838
3152 | 17072,2688
3153 | 17073,10769
3154 | 17074,18729
3155 | 17074,14006
3156 | 17075,22356
3157 | 17076,20216
3158 | 17077,15808
3159 | 17077,2246
3160 | 17077,9159
3161 | 17078,8308
3162 | 17078,23575
3163 | 17078,17221
3164 | 17079,17263
3165 | 17080,13273
3166 | 17080,23518
3167 | 17080,9190
3168 | 17080,22702
3169 | 17080,11157
3170 | 17080,4355
3171 | 17081,20277
3172 | 17081,24864
3173 | 17082,106
3174 | 17082,9271
3175 | 17082,1927
3176 | 17083,6993
3177 | 17084,5463
3178 | 17084,21933
3179 | 17084,24473
3180 | 17084,16161
3181 | 17085,23161
3182 | 17086,18849
3183 | 17087,16479
3184 | 17087,24201
3185 | 17087,22815
3186 | 17087,21957
3187 | 17087,22705
3188 | 17087,9705
3189 | 17087,12065
3190 | 17087,11453
3191 | 17087,18117
3192 | 17087,16564
3193 | 17087,21932
3194 | 17087,17837
3195 | 17087,8610
3196 | 17087,4032
3197 | 17087,3321
3198 | 17087,2246
3199 | 17087,3777
3200 | 17087,9305
3201 | 17088,21336
3202 | 17089,18689
3203 | 17090,18429
3204 | 17090,8701
3205 | 17090,12466
3206 | 17091,8290
3207 | 17092,4272
3208 | 17093,3815
3209 | 17094,5792
3210 | 17095,18587
3211 | 17096,18689
3212 | 17097,17145
3213 | 17098,20144
3214 | 17099,18624
3215 | 17100,14513
3216 | 17101,1332
3217 | 17101,9043
3218 | 17101,16735
3219 | 17101,6565
3220 | 17101,7925
3221 | 17101,10849
3222 | 17102,24765
3223 | 17103,23590
3224 | 17104,7158
3225 | 17105,12545
3226 | 17106,2088
3227 | 17106,24376
3228 | 17106,20144
3229 | 17106,4459
3230 | 17107,6813
3231 | 17108,13475
3232 | 17109,17722
3233 | 17110,14157
3234 | 17110,15699
3235 | 17110,17164
3236 | 17111,14346
3237 | 17112,6428
3238 | 17113,21226
3239 | 17113,19497
3240 | 17113,6277
3241 | 17113,15254
3242 | 17113,6142
3243 | 17113,22945
3244 | 17113,6735
3245 | 17113,364
3246 | 17113,20492
3247 | 17113,12545
3248 | 17113,1819
3249 | 17113,23534
3250 | 17113,12261
3251 | 17113,22549
3252 | 17113,6018
3253 | 17113,1253
3254 | 17113,1970
3255 | 17113,17374
3256 | 17113,114
3257 | 17113,21933
3258 | 17113,22053
3259 | 17113,18583
3260 | 17113,22530
3261 | 17113,4450
3262 | 17113,12712
3263 | 17113,6624
3264 | 17113,21854
3265 | 17113,23107
3266 | 17113,21438
3267 | 17113,8896
3268 | 17113,17667
3269 | 17113,548
3270 | 17113,3802
3271 | 17113,20845
3272 | 17113,15433
3273 | 17113,8076
3274 | 17113,5012
3275 | 17113,23581
3276 | 17113,2387
3277 | 17113,24473
3278 | 17113,7732
3279 | 17113,22818
3280 | 17113,12756
3281 | 17114,24078
3282 | 17115,11537
3283 | 17116,4218
3284 | 17117,6854
3285 | 17117,1253
3286 | 17117,14514
3287 | 17117,10079
3288 | 17117,916
3289 | 17117,5450
3290 | 17117,16311
3291 | 17117,11566
3292 | 17117,2729
3293 | 17117,12879
3294 | 17117,19715
3295 | 17117,10071
3296 | 17117,5762
3297 | 17117,10911
3298 | 17117,7219
3299 | 17117,24473
3300 | 17118,7686
3301 | 17119,17585
3302 | 17120,19741
3303 | 17121,6588
3304 | 17122,18211
3305 | 17123,24968
3306 | 17124,14072
3307 | 17125,9158
3308 | 17126,234
3309 | 17126,12230
3310 | 17127,18689
3311 | 17128,343
3312 | 17129,3188
3313 | 17129,4441
3314 | 17129,4300
3315 | 17130,19725
3316 | 17130,16593
3317 | 17130,13914
3318 | 17130,17708
3319 | 17131,5015
3320 | 17132,10971
3321 | 17132,7999
3322 | 17133,3452
3323 | 17134,21828
3324 | 17135,11527
3325 | 17136,6602
3326 | 17137,18689
3327 | 17138,9327
3328 | 17139,7813
3329 | 17139,12193
3330 | 17139,18544
3331 | 17139,19613
3332 | 17139,13845
3333 | 17139,9408
3334 | 17139,7066
3335 | 17139,23152
3336 | 17139,2034
3337 | 17139,5206
3338 | 17140,19491
3339 | 17140,612
3340 | 17140,11871
3341 | 17141,22265
3342 | 17142,22390
3343 | 17143,104
3344 | 17144,18583
3345 | 17145,7471
3346 | 17146,4589
3347 | 17146,14199
3348 | 17146,9032
3349 | 17146,20613
3350 | 17146,2222
3351 | 17146,22712
3352 | 17146,22335
3353 | 17146,6115
3354 | 17147,4581
3355 | 17148,21957
3356 | 17149,20000
3357 | 17150,20011
3358 | 17151,10860
3359 | 17152,18029
3360 | 17153,17837
3361 | 17154,19954
3362 | 17155,18010
3363 | 17156,22005
3364 | 17157,4218
3365 | 17158,4249
3366 | 17158,11586
3367 | 17159,12453
3368 | 17160,6962
3369 | 17161,9579
3370 | 17161,16461
3371 | 17162,5150
3372 | 17162,17241
3373 | 17162,7728
3374 | 17163,22242
3375 | 17164,23293
3376 | 17164,21683
3377 | 17164,18984
3378 | 17164,8718
3379 | 17164,7853
3380 | 17164,7852
3381 | 17164,13837
3382 | 17164,23112
3383 | 17165,2276
3384 | 17166,24512
3385 | 17167,22420
3386 | 17167,19237
3387 | 17168,7192
3388 | 17169,6029
3389 | 17170,17180
3390 | 17170,4989
3391 | 17171,22705
3392 | 17171,20125
3393 | 17172,8027
3394 | 17173,14410
3395 | 17174,20058
3396 | 17175,21087
3397 | 17175,22042
3398 | 17175,22084
3399 | 17175,7040
3400 | 17175,24922
3401 | 17176,22330
3402 | 17177,19741
3403 | 17178,10937
3404 | 17179,21339
3405 | 17180,7980
3406 | 17180,2268
3407 | 17180,13006
3408 | 17180,4818
3409 | 17181,9826
3410 | 17182,14280
3411 | 17183,24276
3412 | 17184,20000
3413 | 17185,19753
3414 | 17186,534
3415 | 17186,12781
3416 | 17186,19296
3417 | 17187,16677
3418 | 17188,17837
3419 | 17188,15808
3420 | 17188,963
3421 | 17188,12065
3422 | 17189,20967
3423 | 17190,1231
3424 | 17190,1234
3425 | 17190,18260
3426 | 17190,22059
3427 | 17191,14723
3428 | 17192,186
3429 | 17193,24726
3430 | 17194,9162
3431 | 17195,8500
3432 | 17195,12432
3433 | 17196,53
3434 | 17197,3135
3435 | 17198,845
3436 | 17198,16972
3437 | 17199,23179
3438 | 17199,1666
3439 | 17199,390
3440 | 17200,17179
3441 | 17200,21446
3442 | 17200,8732
3443 | 17200,10471
3444 | 17200,22705
3445 | 17200,18317
3446 | 17200,24511
3447 | 17200,7256
3448 | 17200,13145
3449 | 17200,24503
3450 | 17200,17480
3451 | 17200,1370
3452 | 17200,12387
3453 | 17200,11461
3454 | 17200,20430
3455 | 17200,740
3456 | 17200,6486
3457 | 17201,6665
3458 | 17202,17977
3459 | 17202,5002
3460 | 17202,21663
3461 | 17202,3920
3462 | 17202,18524
3463 | 17203,9714
3464 | 17204,18879
3465 | 17205,19455
3466 | 17206,14072
3467 | 17207,8674
3468 | 17207,8223
3469 | 17207,2774
3470 | 17207,8634
3471 | 17207,143
3472 | 17207,1212
3473 | 17207,13993
3474 | 17207,21274
3475 | 17208,7240
3476 | 17208,13230
3477 | 17209,21250
3478 | 17210,15635
3479 | 17211,5269
3480 | 17212,10389
3481 | 17213,15396
3482 | 17214,2788
3483 | 17215,13006
3484 | 17216,14280
3485 | 17216,18729
3486 | 17217,22815
3487 | 17218,14554
3488 | 17219,14157
3489 | 17219,6080
3490 | 17219,21587
3491 | 17220,18351
3492 | 17221,12998
3493 | 17222,9312
3494 | 17223,13787
3495 | 17223,19210
3496 | 17223,9169
3497 | 17223,14927
3498 | 17223,4446
3499 | 17223,3645
3500 | 17223,1536
3501 | 17223,10530
3502 | 17223,18480
3503 | 17223,20907
3504 | 17223,12434
3505 | 17223,22556
3506 | 17223,19586
3507 | 17223,16235
3508 | 17223,23966
3509 | 17223,1322
3510 | 17223,6201
3511 | 17223,264
3512 | 17223,13991
3513 | 17223,15122
3514 | 17223,11852
3515 | 17223,13611
3516 | 17223,6768
3517 | 17223,10907
3518 | 17223,14157
3519 | 17223,7357
3520 | 17223,17146
3521 | 17223,10098
3522 | 17223,14923
3523 | 17223,19671
3524 | 17223,6654
3525 | 17223,2009
3526 | 17223,17573
3527 | 17223,17593
3528 | 17223,4147
3529 | 17223,3546
3530 | 17223,8502
3531 | 17223,13065
3532 | 17223,18538
3533 | 17223,13990
3534 | 17223,6908
3535 | 17223,6864
3536 | 17223,22784
3537 | 17223,13466
3538 | 17223,22588
3539 | 17223,5007
3540 | 17224,2178
3541 | 17225,1298
3542 | 17226,9150
3543 | 17227,11655
3544 | 17228,15780
3545 | 17228,802
3546 | 17228,16198
3547 | 17229,18689
3548 | 17230,6041
3549 | 17231,9166
3550 | 17231,5206
3551 | 17232,18624
3552 | 17233,23703
3553 | 17234,22110
3554 | 17235,9166
3555 | 17236,21595
3556 | 17236,12290
3557 | 17236,21848
3558 | 17237,8263
3559 | 17238,13845
3560 | 17238,22316
3561 | 17239,10262
3562 | 17240,14618
3563 | 17241,24968
3564 | 17241,22814
3565 | 17241,6428
3566 | 17241,9393
3567 | 17241,14280
3568 | 17242,19800
3569 | 17243,10627
3570 | 17244,20216
3571 | 17245,1253
3572 | 17246,7783
3573 | 17246,21177
3574 | 17247,18689
3575 | 17248,19326
3576 | 17249,17257
3577 | 17250,14782
3578 | 17251,7674
3579 | 17252,12091
3580 | 17252,18351
3581 | 17253,15757
3582 | 17253,9327
3583 | 17253,6049
3584 | 17254,9327
3585 | 17255,2748
3586 | 17255,2820
3587 | 17256,22529
3588 | 17257,22776
3589 | 17257,24589
3590 | 17257,18503
3591 | 17257,13803
3592 | 17258,90
3593 | 17259,12545
3594 | 17260,3649
3595 | 17260,21047
3596 | 17260,16358
3597 | 17260,21594
3598 | 17260,13431
3599 | 17260,20626
3600 | 17260,21343
3601 | 17260,1889
3602 | 17260,2612
3603 | 17261,1206
3604 | 17261,142
3605 | 17261,22514
3606 | 17261,10111
3607 | 17261,8202
3608 | 17261,18622
3609 | 17262,10542
3610 | 17262,4144
3611 | 17262,7464
3612 | 17262,5959
3613 | 17263,23168
3614 | 17264,22784
3615 | 17265,21046
3616 | 17265,21701
3617 | 17265,55
3618 | 17265,19009
3619 | 17265,1088
3620 | 17265,188
3621 | 17265,10814
3622 | 17265,7240
3623 | 17266,22128
3624 | 17267,20661
3625 | 17268,14612
3626 | 17268,1544
3627 | 17268,3775
3628 | 17269,22960
3629 | 17270,19705
3630 | 17270,18823
3631 | 17271,9327
3632 | 17272,2409
3633 | 17273,8807
3634 | 17274,1253
3635 | 17274,9327
3636 | 17275,17837
3637 | 17276,261
3638 | 17277,21339
3639 | 17278,404
3640 | 17279,18282
3641 | 17280,2857
3642 | 17280,23487
3643 | 17280,14568
3644 | 17281,6607
3645 | 17281,3718
3646 | 17281,19552
3647 | 17281,1219
3648 | 17281,19453
3649 | 17281,20338
3650 | 17281,4738
3651 | 17281,1903
3652 | 17281,15721
3653 | 17281,16663
3654 | 17281,22299
3655 | 17281,2091
3656 | 17281,4405
3657 | 17281,16507
3658 | 17281,21795
3659 | 17281,14189
3660 | 17281,8556
3661 | 17281,6320
3662 | 17281,21808
3663 | 17281,9176
3664 | 17281,19191
3665 | 17281,14649
3666 | 17281,25077
3667 | 17282,13011
3668 | 17282,20561
3669 | 17282,2494
3670 | 17282,22591
3671 | 17282,21932
3672 | 17282,1105
3673 | 17282,3636
3674 | 17282,19124
3675 | 17282,10112
3676 | 17282,10121
3677 | 17282,4860
3678 | 17282,22588
3679 | 17282,1924
3680 | 17282,16072
3681 | 17282,12540
3682 | 17282,11566
3683 | 17283,11124
3684 | 17284,20042
3685 | 17284,19996
3686 | 17285,2456
3687 | 17286,8469
3688 | 17287,9327
3689 | 17288,16314
3690 | 17289,11204
3691 | 17289,838
3692 | 17289,16311
3693 | 17290,16305
3694 | 17291,11000
3695 | 17292,21464
3696 | 17293,14390
3697 | 17294,9332
3698 | 17295,19934
3699 | 17295,653
3700 | 17295,9809
3701 | 17296,21614
3702 | 17296,6018
3703 | 17296,20216
3704 | 17296,6428
3705 | 17296,1201
3706 | 17296,6019
3707 | 17297,2820
3708 | 17298,11663
3709 | 17299,18689
3710 | 17300,165
3711 | 17301,2759
3712 | 17301,1160
3713 | 17301,13134
3714 | 17301,20228
3715 | 17301,15767
3716 | 17302,25073
3717 | 17302,25072
3718 | 17303,12958
3719 | 17303,8908
3720 | 17304,13167
3721 | 17304,13605
3722 | 17304,104
3723 | 17304,6018
3724 | 17304,4654
3725 | 17304,15430
3726 | 17304,17825
3727 | 17304,14422
3728 | 17304,22399
3729 | 17304,21004
3730 | 17304,22824
3731 | 17304,2941
3732 | 17304,23275
3733 | 17304,4792
3734 | 17305,15958
3735 | 17305,22627
3736 | 17306,14001
3737 | 17306,20597
3738 | 17307,17040
3739 | 17308,18877
3740 | 17308,1036
3741 | 17308,18027
3742 | 17308,10706
3743 | 17308,16547
3744 | 17309,13085
3745 | 17310,22818
3746 | 17311,3777
3747 | 17312,15513
3748 | 17313,7029
3749 | 17313,15370
3750 | 17314,12061
3751 | 17314,4722
3752 | 17315,13685
3753 | 17316,23759
3754 | 17316,12489
3755 | 17316,12017
3756 | 17317,18689
3757 | 17318,19923
3758 | 17318,15337
3759 | 17318,22537
3760 | 17318,19700
3761 | 17318,1458
3762 | 17318,5527
3763 | 17319,23285
3764 | 17320,7405
3765 | 17321,6428
3766 | 17322,16904
3767 | 17322,18582
3768 | 17323,4697
3769 | 17324,3923
3770 | 17324,10271
3771 | 17325,6041
3772 | 17326,16426
3773 | 17327,14869
3774 | 17327,17046
3775 | 17327,6093
3776 | 17327,20942
3777 | 17327,21613
3778 | 17327,22410
3779 | 17327,20314
3780 | 17327,18651
3781 | 17328,24462
3782 | 17328,14671
3783 | 17329,8500
3784 | 17330,6312
3785 | 17330,23060
3786 | 17331,19237
3787 | 17331,23024
3788 | 17331,19889
3789 | 17332,10371
3790 | 17333,9327
3791 | 17334,5973
3792 | 17334,20897
3793 | 17335,19145
3794 | 17336,12145
3795 | 17336,24085
3796 | 17337,13836
3797 | 17337,10796
3798 | 17338,19656
3799 | 17338,8136
3800 | 17339,22157
3801 | 17339,1928
3802 | 17339,12575
3803 | 17339,20561
3804 | 17339,17837
3805 | 17339,5266
3806 | 17340,502
3807 | 17340,19966
3808 | 17341,11376
3809 | 17342,15505
3810 | 17343,11871
3811 | 17344,18689
3812 | 17345,20042
3813 | 17346,7787
3814 | 17347,12058
3815 | 17348,461
3816 | 17349,18238
3817 | 17350,5965
3818 | 17351,19889
3819 | 17352,3364
3820 | 17353,14276
3821 | 17354,2997
3822 | 17354,20438
3823 | 17355,9714
3824 | 17356,22824
3825 | 17357,4341
3826 | 17357,8049
3827 | 17358,21464
3828 | 17359,21464
3829 | 17360,2729
3830 | 17360,9599
3831 | 17361,19796
3832 | 17361,18689
3833 | 17362,3944
3834 | 17363,0
3835 | 17364,19113
3836 | 17364,19203
3837 | 17365,19019
3838 | 17366,5393
3839 | 17367,19153
3840 | 17368,4359
3841 | 17369,19224
3842 | 17369,4648
3843 | 17369,7832
3844 | 17369,8398
3845 | 17369,11376
3846 | 17369,19715
3847 | 17370,7510
3848 | 17371,18555
3849 | 17371,20227
3850 | 17371,8337
3851 | 17372,3158
3852 | 17373,13965
3853 | 17373,6744
3854 | 17373,24073
3855 | 17373,22917
3856 | 17373,9039
3857 | 17374,29
3858 | 17375,20561
3859 | 17376,14990
3860 | 17377,11108
3861 | 17377,3833
3862 | 17377,23766
3863 | 17378,11806
3864 | 17379,7907
3865 | 17379,6366
3866 | 17379,10279
3867 | 17380,16555
3868 | 17381,5300
3869 | 17381,4282
3870 | 17381,1646
3871 | 17381,17211
3872 | 17381,1012
3873 | 17382,8807
3874 | 17382,3158
3875 | 17383,18624
3876 | 17384,10389
3877 | 17385,17659
3878 | 17386,7510
3879 | 17386,11108
3880 | 17386,22878
3881 | 17386,8789
3882 | 17387,9346
3883 | 17388,15472
3884 | 17389,21339
3885 | 17390,15554
3886 | 17391,12275
3887 | 17392,7490
3888 | 17393,642
3889 | 17393,2870
3890 | 17393,9089
3891 | 17393,9392
3892 | 17393,19136
3893 | 17393,9434
3894 | 17393,12102
3895 | 17393,12887
3896 | 17393,2525
3897 | 17393,1482
3898 | 17393,1439
3899 | 17394,23789
3900 | 17395,11721
3901 | 17396,9167
3902 | 17397,9327
3903 | 17398,18710
3904 | 17399,18689
3905 | 17400,21828
3906 | 17401,16866
3907 | 17401,15865
3908 | 17402,6319
3909 | 17403,15876
3910 | 17403,11040
3911 | 17404,21464
3912 | 17405,9208
3913 | 17405,20335
3914 | 17406,11699
3915 | 17407,14507
3916 | 17407,18549
3917 | 17408,21339
3918 | 17409,14077
3919 | 17409,24546
3920 | 17410,24664
3921 | 17411,15555
3922 | 17411,13673
3923 | 17411,24278
3924 | 17412,12024
3925 | 17413,13469
3926 | 17413,21743
3927 | 17413,16654
3928 | 17414,17343
3929 | 17414,10612
3930 | 17415,11572
3931 | 17416,4218
3932 | 17417,4152
3933 | 17417,21535
3934 | 17418,6041
3935 | 17419,19900
3936 | 17420,12481
3937 | 17420,20783
3938 | 17420,11655
3939 | 17420,21470
3940 | 17420,2866
3941 | 17420,10348
3942 | 17421,11572
3943 | 17422,9159
3944 | 17423,20042
3945 | 17424,13982
3946 | 17425,17688
3947 | 17426,21953
3948 | 17426,12713
3949 | 17426,10931
3950 | 17426,4677
3951 | 17426,14480
3952 | 17426,17952
3953 | 17427,18428
3954 | 17427,21793
3955 | 17427,21439
3956 | 17427,17579
3957 | 17428,9265
3958 | 17429,10898
3959 | 17430,17792
3960 | 17431,24679
3961 | 17431,21826
3962 | 17431,1660
3963 | 17431,1384
3964 | 17431,11606
3965 | 17431,18685
3966 | 17432,3942
3967 | 17433,20000
3968 | 17434,16161
3969 | 17435,4232
3970 | 17436,21734
3971 | 17436,23558
3972 | 17437,18634
3973 | 17438,21254
3974 | 17438,8229
3975 | 17438,612
3976 | 17438,6626
3977 | 17439,19240
3978 | 17440,7352
3979 | 17441,15505
3980 | 17441,11914
3981 | 17441,2512
3982 | 17441,17458
3983 | 17441,11699
3984 | 17441,24707
3985 | 17442,9356
3986 | 17442,19923
3987 | 17443,12785
3988 | 17443,6556
3989 | 17443,5669
3990 | 17443,16972
3991 | 17443,1827
3992 | 17443,845
3993 | 17443,11929
3994 | 17443,21487
3995 | 17443,18610
3996 | 17443,18341
3997 | 17444,18996
3998 | 17445,11358
3999 | 17445,8961
4000 | 17446,15627
4001 | 17446,10760
4002 | 17447,16168
4003 | 17447,21079
4004 | 17447,20376
4005 | 17447,8379
4006 | 17448,20150
4007 | 17448,1370
4008 | 17448,12416
4009 | 17448,6497
4010 | 17448,17835
4011 | 17448,872
4012 | 17448,24962
4013 | 17448,21932
4014 | 17448,18286
4015 | 17448,10884
4016 | 17448,22686
4017 | 17448,14346
4018 | 17448,1265
4019 | 17448,3342
4020 | 17448,17875
4021 | 17448,20697
4022 | 17448,24276
4023 | 17448,6498
4024 | 17448,23583
4025 | 17448,10606
4026 | 17448,17803
4027 | 17448,20546
4028 | 17449,22330
4029 | 17450,20561
4030 | 17451,19796
4031 | 17452,22529
4032 | 17452,4675
4033 | 17452,4218
4034 | 17452,16544
4035 | 17452,19009
4036 | 17452,18925
4037 | 17452,23946
4038 | 17453,10715
4039 | 17454,8991
4040 | 17455,21589
4041 | 17456,16925
4042 | 17456,14489
4043 | 17456,20337
4044 | 17456,18758
4045 | 17456,17179
4046 | 17457,12998
4047 | 17458,3106
4048 | 17459,5560
4049 | 17460,6041
4050 | 17461,5040
4051 | 17462,20319
4052 | 17463,24683
4053 | 17463,10280
4054 | 17464,21140
4055 | 17465,2860
4056 | 17466,1253
4057 | 17467,11705
4058 | 17468,1711
4059 | 17469,23024
4060 | 17469,14624
4061 | 17470,1544
4062 | 17471,22352
4063 | 17472,21824
4064 | 17472,14446
4065 | 17473,10873
4066 | 17474,13
4067 | 17475,18634
4068 | 17476,12455
4069 | 17477,8610
4070 | 17478,644
4071 | 17479,10839
4072 | 17479,3060
4073 | 17479,3128
4074 | 17479,15337
4075 | 17479,18062
4076 | 17479,1057
4077 | 17479,11713
4078 | 17479,1937
4079 | 17479,24968
4080 | 17479,18372
4081 | 17479,2456
4082 | 17479,2068
4083 | 17479,23839
4084 | 17479,2589
4085 | 17479,13795
4086 | 17479,20894
4087 | 17479,53
4088 | 17479,23793
4089 | 17480,20216
4090 | 17481,4675
4091 | 17481,6319
4092 | 17481,11126
4093 | 17481,19492
4094 | 17481,14270
4095 | 17481,21322
4096 | 17482,12100
4097 | 17483,9327
4098 | 17484,7424
4099 | 17485,22371
4100 | 17485,23110
4101 | 17485,20539
4102 | 17485,24245
4103 | 17486,19210
4104 | 17487,815
4105 | 17487,22665
4106 | 17487,10780
4107 | 17488,8305
4108 | 17488,18396
4109 | 17488,17480
4110 | 17488,12158
4111 | 17488,17554
4112 | 17488,19889
4113 | 17488,2663
4114 | 17488,11671
4115 | 17488,1979
4116 | 17488,12410
4117 | 17488,19903
4118 | 17488,2582
4119 | 17489,19210
4120 | 17490,23152
4121 | 17491,24453
4122 | 17492,22337
4123 | 17493,357
4124 | 17493,12591
4125 | 17493,6778
4126 | 17493,18536
4127 | 17493,3702
4128 | 17494,11245
4129 | 17494,19875
4130 | 17494,3598
4131 | 17494,19546
4132 | 17494,22291
4133 | 17494,5359
4134 | 17494,442
4135 | 17494,19835
4136 | 17494,8068
4137 | 17494,23892
4138 | 17494,9020
4139 | 17494,6389
4140 | 17494,18956
4141 | 17494,1282
4142 | 17494,20298
4143 | 17494,16633
4144 | 17494,19886
4145 | 17494,14463
4146 | 17494,8426
4147 | 17494,19081
4148 | 17494,3842
4149 | 17494,7221
4150 | 17494,3143
4151 | 17494,19753
4152 | 17494,8424
4153 | 17494,16632
4154 | 17494,8362
4155 | 17494,22761
4156 | 17494,14350
4157 | 17494,6183
4158 | 17494,8628
4159 | 17494,5762
4160 | 17495,18351
4161 | 17495,18634
4162 | 17495,16077
4163 | 17495,18632
4164 | 17495,10175
4165 | 17495,10407
4166 | 17495,11095
4167 | 17495,6596
4168 | 17495,15896
4169 | 17495,4457
4170 | 17495,22741
4171 | 17495,23617
4172 | 17495,16641
4173 | 17495,15571
4174 | 17495,1781
4175 | 17495,7995
4176 | 17495,2358
4177 | 17495,7348
4178 | 17495,7253
4179 | 17495,9171
4180 | 17495,24547
4181 | 17495,19969
4182 | 17495,5731
4183 | 17495,3672
4184 | 17495,13912
4185 | 17495,23598
4186 | 17495,13414
4187 | 17495,710
4188 | 17495,9086
4189 | 17495,14526
4190 | 17495,13616
4191 | 17495,22083
4192 | 17495,9123
4193 | 17495,19914
4194 | 17495,16561
4195 | 17495,10449
4196 | 17496,18689
4197 | 17497,22842
4198 | 17498,5496
4199 | 17499,261
4200 | 17499,14927
4201 | 17499,22330
4202 | 17499,24298
4203 | 17499,7552
4204 | 17499,17170
4205 | 17500,19954
4206 | 17501,14359
4207 | 17501,21450
4208 | 17501,22319
4209 | 17501,17837
4210 | 17502,22556
4211 | 17503,9159
4212 | 17504,13025
4213 | 17505,9159
4214 | 17506,6953
4215 | 17507,6925
4216 | 17508,19321
4217 | 17508,415
4218 | 17508,8268
4219 | 17508,17847
4220 | 17508,9290
4221 | 17508,5730
4222 | 17509,18689
4223 | 17510,21677
4224 | 17510,8285
4225 | 17510,16870
4226 | 17510,18104
4227 | 17510,10913
4228 | 17510,11587
4229 | 17510,4992
4230 | 17510,12390
4231 | 17510,15815
4232 | 17510,10575
4233 | 17510,2805
4234 | 17510,19678
4235 | 17510,13978
4236 | 17510,9631
4237 | 17510,2801
4238 | 17510,23319
4239 | 17510,4193
4240 | 17510,21529
4241 | 17510,14095
4242 | 17510,8336
4243 | 17510,9011
4244 | 17511,6406
4245 | 17511,22098
4246 | 17512,7349
4247 | 17513,7028
4248 | 17514,23485
4249 | 17515,2741
4250 | 17516,18689
4251 | 17517,21932
4252 | 17518,17066
4253 | 17519,1644
4254 | 17519,16320
4255 | 17519,2080
4256 | 17519,20814
4257 | 17519,20807
4258 | 17519,9383
4259 | 17519,12655
4260 | 17519,16015
4261 | 17519,10481
4262 | 17520,23636
4263 | 17521,5956
4264 | 17521,15420
4265 | 17521,5561
4266 | 17521,23164
4267 | 17521,8531
4268 | 17521,7299
4269 | 17522,15729
4270 | 17522,12046
4271 | 17522,16377
4272 | 17522,3524
4273 | 17522,4828
4274 | 17522,16978
4275 | 17522,7376
4276 | 17523,14366
4277 | 17523,17899
4278 | 17523,12489
4279 | 17523,12474
4280 | 17523,6424
4281 | 17523,16787
4282 | 17523,17232
4283 | 17524,18649
4284 | 17524,23846
4285 | 17525,9855
4286 | 17525,13156
4287 | 17525,9452
4288 | 17525,8465
4289 | 17526,7973
4290 | 17527,22041
4291 | 17528,10318
4292 | 17529,10268
4293 | 17529,21214
4294 | 17529,741
4295 | 17530,6145
4296 | 17530,3169
4297 | 17530,3895
4298 | 17530,24913
4299 | 17530,23764
4300 | 17530,11241
4301 | 17530,10932
4302 | 17530,8975
4303 | 17530,16442
4304 | 17530,703
4305 | 17530,6238
4306 | 17530,7460
4307 | 17530,4554
4308 | 17530,21985
4309 | 17530,6941
4310 | 17531,16076
4311 | 17532,10112
4312 | 17533,4337
4313 | 17534,1844
4314 | 17534,3396
4315 | 17534,23755
4316 | 17534,2104
4317 | 17534,9590
4318 | 17535,14836
4319 | 17536,10718
4320 | 17537,1653
4321 | 17538,19093
4322 | 17539,21820
4323 | 17540,14624
4324 | 17541,18351
4325 | 17542,1253
4326 | 17542,13473
4327 | 17542,10079
4328 | 17542,21196
4329 | 17542,21927
4330 | 17542,7832
4331 | 17543,3130
4332 | 17544,14489
4333 | 17544,14865
4334 | 17544,16925
4335 | 17544,19672
4336 | 17544,20604
4337 | 17544,23590
4338 | 17544,14746
4339 | 17545,18864
4340 | 17545,10643
4341 | 17545,3815
4342 | 17546,9159
4343 | 17547,0
4344 | 17548,13982
4345 | 17549,18010
4346 | 17550,8055
4347 | 17550,1144
4348 | 17550,18831
4349 | 17551,8480
4350 | 17551,428
4351 | 17552,16779
4352 | 17552,24266
4353 | 17553,923
4354 | 17553,19153
4355 | 17553,16711
4356 | 17554,5832
4357 | 17555,8940
4358 | 17556,17797
4359 | 17556,14185
4360 | 17556,15357
4361 | 17556,24673
4362 | 17556,24359
4363 | 17557,9158
4364 | 17558,9445
4365 | 17558,10084
4366 | 17558,9418
4367 | 17559,24851
4368 | 17560,19210
4369 | 17561,23766
4370 | 17562,19090
4371 | 17563,6279
4372 | 17564,14532
4373 | 17565,2479
4374 | 17565,9671
4375 | 17565,16726
4376 | 17565,18404
4377 | 17565,24749
4378 | 17565,908
4379 | 17565,948
4380 | 17565,10436
4381 | 17565,5459
4382 | 17565,18862
4383 | 17566,8620
4384 | 17567,7533
4385 | 17568,24993
4386 | 17569,6018
4387 | 17570,4218
4388 | 17571,16076
4389 | 17572,894
4390 | 17573,21339
4391 | 17573,13874
4392 | 17573,104
4393 | 17574,14346
4394 | 17575,10769
4395 | 17576,10117
4396 | 17576,11205
4397 | 17577,4218
4398 | 17578,20966
4399 | 17579,22406
4400 | 17580,5545
4401 | 17580,17583
4402 | 17580,16757
4403 | 17580,1212
4404 | 17580,10692
4405 | 17580,4620
4406 | 17580,16266
4407 | 17581,7200
4408 | 17581,8155
4409 | 17582,9152
4410 | 17583,12753
4411 | 17584,22796
4412 | 17584,12283
4413 | 17585,21097
4414 | 17586,12275
4415 | 17586,2357
4416 | 17586,23583
4417 | 17587,17598
4418 | 17587,8971
4419 | 17588,2422
4420 | 17589,22090
4421 | 17589,3835
4422 | 17590,8610
4423 | 17591,7510
4424 | 17592,5548
4425 | 17592,2500
4426 | 17592,4383
4427 | 17593,9159
4428 | 17594,7510
4429 | 17594,169
4430 | 17594,23766
4431 | 17595,23500
4432 | 17596,9327
4433 | 17597,7651
4434 | 17598,20307
4435 | 17599,5516
4436 | 17600,6403
4437 | 17600,14651
4438 | 17601,12526
4439 | 17601,19524
4440 | 17601,6364
4441 | 17602,8840
4442 | 17602,5308
4443 | 17603,18266
4444 | 17604,6602
4445 | 17605,20500
4446 | 17606,21831
4447 | 17606,1253
4448 | 17607,23342
4449 | 17608,18351
4450 | 17608,1253
4451 | 17609,10643
4452 | 17610,6993
4453 | 17611,23828
4454 | 17611,22010
4455 | 17611,21470
4456 | 17612,8885
4457 | 17613,3522
4458 | 17613,10369
4459 | 17614,20229
4460 | 17614,6065
4461 | 17615,18351
4462 | 17615,6854
4463 | 17616,4697
4464 | 17617,4476
4465 | 17618,17959
4466 | 17619,22033
4467 | 17620,21339
4468 | 17621,5308
4469 | 17622,13982
4470 | 17623,7651
4471 | 17624,12125
4472 | 17625,14715
4473 | 17625,22272
4474 | 17625,14788
4475 | 17625,20617
4476 | 17625,16161
4477 | 17625,12455
4478 | 17625,18504
4479 | 17626,5077
4480 | 17627,22316
4481 | 17628,21339
4482 | 17629,20644
4483 | 17630,9086
4484 | 17631,21339
4485 | 17632,8563
4486 | 17633,19889
4487 | 17634,15501
4488 | 17634,14411
4489 | 17634,20639
4490 | 17634,8276
4491 | 17634,13752
4492 | 17634,11089
4493 | 17634,7999
4494 | 17634,13004
4495 | 17635,7297
4496 | 17636,19050
4497 | 17637,10545
4498 | 17638,12684
4499 | 17639,9312
4500 | 17640,7787
4501 | 17641,18351
4502 | 17642,5511
4503 | 17642,8662
4504 | 17642,12636
4505 | 17642,6045
4506 | 17642,19014
4507 | 17643,3158
4508 | 17643,10121
4509 | 17643,22154
4510 | 17643,2788
4511 | 17643,23606
4512 | 17643,13918
4513 | 17643,5040
4514 | 17644,19241
4515 | 17644,7683
4516 | 17644,17832
4517 | 17645,14504
4518 | 17645,22529
4519 | 17646,22265
4520 | 17647,16969
4521 | 17648,12175
4522 | 17649,23247
4523 | 17650,7140
4524 | 17650,4965
4525 | 17650,16849
4526 | 17650,12425
4527 | 17650,12967
4528 | 17650,23107
4529 | 17650,16558
4530 | 17650,20336
4531 | 17650,8076
4532 | 17650,12545
4533 | 17650,8583
4534 | 17650,10650
4535 | 17650,21933
4536 | 17650,20720
4537 | 17650,21307
4538 | 17650,20940
4539 | 17650,23828
4540 | 17650,8355
4541 | 17650,22276
4542 | 17650,17802
4543 | 17650,8813
4544 | 17650,18917
4545 | 17650,21329
4546 | 17650,9746
4547 | 17650,23989
4548 | 17651,5295
4549 | 17652,6041
4550 | 17653,12472
4551 | 17654,12696
4552 | 17654,6854
4553 | 17655,15170
4554 | 17655,8049
4555 | 17655,1238
4556 | 17655,4341
4557 | 17655,24044
4558 | 17656,22175
4559 | 17656,21124
4560 | 17656,22268
4561 | 17656,15939
4562 | 17656,12516
4563 | 17656,25055
4564 | 17656,21401
4565 | 17656,16383
4566 | 17656,1589
4567 | 17656,23877
4568 | 17656,14574
4569 | 17656,18690
4570 | 17656,433
4571 | 17656,23907
4572 | 17656,22344
4573 | 17656,3334
4574 | 17656,22857
4575 | 17656,11084
4576 | 17656,4007
4577 | 17656,14439
4578 | 17656,13384
4579 | 17657,20216
4580 | 17658,7624
4581 | 17659,2788
4582 | 17659,22204
4583 | 17660,5040
4584 | 17661,8983
4585 | 17662,16641
4586 | 17662,2977
4587 | 17662,2551
4588 | 17663,18875
4589 | 17664,18849
4590 | 17665,5548
4591 | 17665,2820
4592 | 17665,16291
4593 | 17665,230
4594 | 17665,10902
4595 | 17665,13491
4596 | 17665,14624
4597 | 17665,9433
4598 | 17666,18710
4599 | 17667,10420
4600 | 17668,19866
4601 | 17669,24378
4602 | 17670,13932
4603 | 17670,14624
4604 | 17670,24503
4605 | 17670,10288
4606 | 17671,3354
4607 | 17672,16
4608 | 17672,15413
4609 | 17672,2604
4610 | 17672,4406
4611 | 17672,20785
4612 | 17673,8499
4613 | 17673,12185
4614 | 17673,4349
4615 | 17673,10718
4616 | 17674,2404
4617 | 17675,15352
4618 | 17675,24973
4619 | 17675,20550
4620 | 17676,3513
4621 | 17676,49
4622 | 17677,9788
4623 | 17678,4359
4624 | 17679,6050
4625 | 17680,23805
4626 | 17680,8929
4627 | 17680,23169
4628 | 17680,22187
4629 | 17680,21813
4630 | 17680,24796
4631 | 17680,3353
4632 | 17680,8382
4633 | 17680,10300
4634 | 17681,1160
4635 | 17681,2759
4636 | 17681,13134
4637 | 17681,7383
4638 | 17681,21756
4639 | 17681,1582
4640 | 17682,14125
4641 | 17683,502
4642 | 17683,3175
4643 | 17683,24209
4644 | 17683,5646
4645 | 17683,1633
4646 | 17684,18351
4647 | 17685,10873
4648 | 17686,11744
4649 | 17687,12575
4650 | 17687,11399
4651 | 17687,1197
4652 | 17688,19491
4653 | 17689,14349
4654 | 17690,24583
4655 | 17691,7663
4656 | 17692,14470
4657 | 17693,6219
4658 | 17694,16161
4659 | 17695,4160
4660 | 17696,3374
4661 | 17697,23961
4662 | 17698,18665
4663 | 17698,5791
4664 | 17699,12726
4665 | 17699,14700
4666 | 17699,8935
4667 | 17700,17665
4668 | 17701,23590
4669 | 17701,17837
4670 | 17702,21831
4671 | 17702,14851
4672 | 17703,15073
4673 | 17704,20561
4674 | 17705,20430
4675 | 17706,21734
4676 | 17707,1040
4677 | 17708,7510
4678 | 17709,4359
4679 | 17710,24276
4680 | 17711,6041
4681 | 17712,805
4682 | 17713,21831
4683 | 17714,9327
4684 | 17715,14186
4685 | 17716,11655
4686 | 17717,1028
4687 | 17718,17300
4688 | 17719,6364
4689 | 17720,17913
4690 | 17721,12573
4691 | 17721,3311
4692 | 17721,18538
4693 | 17722,4699
4694 | 17722,10420
4695 | 17723,11226
4696 | 17724,17576
4697 | 17725,11886
4698 | 17725,12398
4699 | 17725,8661
4700 | 17726,11231
4701 | 17727,9159
4702 | 17728,3158
4703 | 17729,20966
4704 | 17730,7006
4705 | 17730,1856
4706 | 17731,1299
4707 | 17731,1297
4708 | 17731,1455
4709 | 17732,17195
4710 | 17733,22265
4711 | 17734,2939
4712 | 17734,18659
4713 | 17735,11603
4714 | 17736,20224
4715 | 17736,18331
4716 | 17737,1158
4717 | 17737,12266
4718 | 17737,24211
4719 | 17738,11359
4720 | 17739,14472
4721 | 17739,1328
4722 | 17739,18351
4723 | 17740,19211
4724 | 17741,4924
4725 | 17742,10539
4726 | 17742,13079
4727 | 17743,395
4728 | 17743,23053
4729 | 17744,9610
4730 | 17745,19210
4731 | 17745,10611
4732 | 17745,24276
4733 | 17745,840
4734 | 17745,14153
4735 | 17745,6494
4736 | 17745,2158
4737 | 17745,8004
4738 | 17745,8560
4739 | 17745,3844
4740 | 17745,3043
4741 | 17745,24621
4742 | 17745,9154
4743 | 17745,19009
4744 | 17745,16865
4745 | 17745,17410
4746 | 17745,19671
4747 | 17745,9543
4748 | 17745,17228
4749 | 17745,16065
4750 | 17745,3401
4751 | 17745,3019
4752 | 17745,2819
4753 | 17745,14002
4754 | 17745,21420
4755 | 17745,14824
4756 | 17746,806
4757 | 17747,19796
4758 | 17748,10121
4759 | 17748,20617
4760 | 17748,14181
4761 | 17748,1253
4762 | 17749,4581
4763 | 17750,6832
4764 | 17751,19015
4765 | 17751,3917
4766 | 17751,8814
4767 | 17751,17629
4768 | 17751,17967
4769 | 17751,9927
4770 | 17751,17143
4771 | 17751,17451
4772 | 17751,6606
4773 | 17751,7057
4774 | 17751,511
4775 | 17751,14772
4776 | 17751,15676
4777 | 17751,8674
4778 | 17752,8999
4779 | 17753,17575
4780 | 17754,23788
4781 | 17754,3941
4782 | 17754,14494
4783 | 17754,6578
4784 | 17754,761
4785 | 17754,17158
4786 | 17754,16405
4787 | 17754,6715
4788 | 17754,20309
4789 | 17754,10561
4790 | 17754,22652
4791 | 17754,4179
4792 | 17754,14679
4793 | 17754,4669
4794 | 17754,15817
4795 | 17754,18343
4796 | 17754,16042
4797 | 17754,9985
4798 | 17754,18609
4799 | 17754,18340
4800 | 17754,15559
4801 | 17754,23521
4802 | 17755,3158
4803 | 17756,15727
4804 | 17757,19210
4805 | 17758,7389
4806 | 17759,2875
4807 | 17760,21339
4808 | 17761,1483
4809 | 17762,18848
4810 | 17763,994
4811 | 17763,8563
4812 | 17763,25063
4813 | 17763,1065
4814 | 17763,2620
4815 | 17763,967
4816 | 17763,17983
4817 | 17763,6327
4818 | 17763,14632
4819 | 17763,14633
4820 | 17763,20663
4821 | 17763,11297
4822 | 17763,1618
4823 | 17763,21410
4824 | 17763,7094
4825 | 17764,9327
4826 | 17765,20815
4827 | 17766,9152
4828 | 17767,9159
4829 | 17768,5177
4830 | 17769,20617
4831 | 17770,19900
4832 | 17771,10430
4833 | 17771,22128
4834 | 17772,1121
4835 | 17773,24887
4836 | 17773,10643
4837 | 17774,1591
4838 | 17775,13952
4839 | 17775,5057
4840 | 17775,14420
4841 | 17775,15045
4842 | 17775,6259
4843 | 17775,15800
4844 | 17775,4137
4845 | 17776,4077
4846 | 17776,9434
4847 | 17776,3509
4848 | 17776,764
4849 | 17776,16228
4850 | 17776,16938
4851 | 17776,9998
4852 | 17776,5073
4853 | 17776,17592
4854 | 17776,24075
4855 | 17776,24622
4856 | 17776,18758
4857 | 17776,22013
4858 | 17776,5589
4859 | 17777,2815
4860 | 17777,7355
4861 | 17777,3390
4862 | 17778,21957
4863 | 17778,4218
4864 | 17779,11045
4865 | 17780,19050
4866 | 17781,22975
4867 | 17782,0
4868 | 17783,12747
4869 | 17783,21247
4870 | 17784,20000
4871 | 17784,20042
4872 | 17785,14221
4873 | 17785,13
4874 | 17785,10810
4875 | 17785,20310
4876 | 17785,2067
4877 | 17785,968
4878 | 17785,20068
4879 | 17785,22822
4880 | 17785,7344
4881 | 17785,17837
4882 | 17785,15036
4883 | 17785,12810
4884 | 17785,1092
4885 | 17785,2188
4886 | 17785,19889
4887 | 17785,8560
4888 | 17785,8563
4889 | 17785,4860
4890 | 17785,9705
4891 | 17785,21819
4892 | 17785,397
4893 | 17785,22587
4894 | 17785,17480
4895 | 17785,1560
4896 | 17785,18649
4897 | 17785,19310
4898 | 17785,4447
4899 | 17785,396
4900 | 17786,22556
4901 | 17786,9714
4902 | 17786,20561
4903 | 17786,17040
4904 | 17786,10371
4905 | 17787,4328
4906 | 17788,20042
4907 | 17788,20038
4908 | 17789,6428
4909 | 17790,8701
4910 | 17791,19036
4911 | 17791,12823
4912 | 17791,7239
4913 | 17791,10307
4914 | 17791,3946
4915 | 17791,5433
4916 | 17792,21336
4917 | 17793,10364
4918 | 17794,2641
4919 | 17794,4965
4920 | 17794,20582
4921 | 17794,24078
4922 | 17795,23775
4923 | 17795,12249
4924 | 17795,24567
4925 | 17795,20374
4926 | 17795,8481
4927 | 17795,10092
4928 | 17795,14243
4929 | 17795,17094
4930 | 17795,7370
4931 | 17795,10915
4932 | 17795,8958
4933 | 17795,23834
4934 | 17796,19241
4935 | 17797,8676
4936 | 17798,20354
4937 | 17799,9327
4938 | 17800,16065
4939 | 17801,19889
4940 | 17802,21261
4941 | 17803,4428
4942 | 17804,19210
4943 | 17804,23590
4944 | 17805,3612
4945 | 17806,8208
4946 | 17807,14584
4947 | 17808,14618
4948 | 17809,18351
4949 | 17809,1253
4950 | 17810,6041
4951 | 17810,20561
4952 | 17810,16779
4953 | 17810,24460
4954 | 17811,2799
4955 | 17812,22394
4956 | 17812,10285
4957 | 17812,6874
4958 | 17812,5073
4959 | 17812,6287
4960 | 17812,18339
4961 | 17812,20681
4962 | 17812,2351
4963 | 17812,7445
4964 | 17812,16547
4965 | 17812,6715
4966 | 17813,23675
4967 | 17813,10710
4968 | 17813,4469
4969 | 17813,1248
4970 | 17813,17610
4971 | 17813,9320
4972 | 17813,5328
4973 | 17813,11701
4974 | 17813,21094
4975 | 17813,4811
4976 | 17813,5786
4977 | 17813,2160
4978 | 17813,2873
4979 | 17813,14130
4980 | 17813,20547
4981 | 17813,16065
4982 | 17813,16024
4983 | 17814,815
4984 | 17814,13547
4985 | 17814,5807
4986 | 17814,22457
4987 | 17814,25013
4988 | 17814,21908
4989 | 17814,7668
4990 | 17814,20400
4991 | 17814,11593
4992 | 17814,21875
4993 | 17814,12016
4994 | 17814,11536
4995 | 17815,19210
4996 | 17815,12005
4997 | 17815,16865
4998 | 17815,14560
4999 | 17815,6327
5000 | 17815,21411
5001 | 17815,5870
5002 | 17816,22406
5003 | 17817,24054
5004 | 17817,9449
5005 | 17817,8648
5006 | 17818,19455
5007 | 17819,14027
5008 | 17820,111
5009 | 17821,7033
5010 | 17822,20431
5011 | 17823,13015
5012 | 17823,9717
5013 | 17823,18495
5014 | 17823,21795
5015 | 17823,13596
5016 | 17823,7188
5017 | 17823,23132
5018 | 17823,5026
5019 | 17823,2729
5020 | 17823,7798
5021 | 17823,739
5022 | 17824,4218
5023 | 17825,4675
5024 | 17826,9043
5025 | 17826,16734
5026 | 17827,24968
5027 | 17827,18351
5028 | 17828,12297
5029 | 17829,10038
5030 | 17830,3416
5031 | 17831,17021
5032 | 17831,24959
5033 | 17831,24704
5034 | 17832,19210
5035 | 17833,22393
5036 | 17834,18898
5037 | 17835,6363
5038 | 17836,19224
5039 | 17837,24049
5040 | 17837,6428
5041 | 17838,17950
5042 | 17839,7037
5043 | 17840,19605
5044 | 17840,21556
5045 | 17840,2380
5046 | 17840,22566
5047 | 17840,1913
5048 | 17840,18721
5049 | 17840,23573
5050 | 17840,5685
5051 | 17840,22221
5052 | 17840,2647
5053 | 17841,824
5054 | 17842,6450
5055 | 17842,4675
5056 | 17843,6018
5057 | 17843,4913
5058 | 17843,12447
5059 | 17843,20929
5060 | 17843,7277
5061 | 17843,8676
5062 | 17843,7153
5063 | 17843,19633
5064 | 17843,4444
5065 | 17843,23083
5066 | 17843,1048
5067 | 17843,19192
5068 | 17843,19203
5069 | 17843,23965
5070 | 17844,4949
5071 | 17845,10327
5072 | 17845,10061
5073 | 17846,8004
5074 | 17847,11655
5075 | 17848,10112
5076 | 17848,19009
5077 | 17849,6428
5078 | 17849,894
5079 | 17850,6314
5080 | 17851,12475
5081 | 17852,9356
5082 | 17853,4675
5083 | 17854,7510
5084 | 17855,22728
5085 | 17855,4201
5086 | 17855,22409
5087 | 17855,18387
5088 | 17855,9688
5089 | 17855,22123
5090 | 17855,24787
5091 | 17855,21206
5092 | 17855,22066
5093 | 17855,9690
5094 | 17855,21419
5095 | 17855,1672
5096 | 17855,17438
5097 | 17855,21641
5098 | 17855,16264
5099 | 17855,12284
5100 | 17855,823
5101 | 17855,15289
5102 | 17855,2865
5103 | 17855,20650
5104 | 17855,3573
5105 | 17855,20000
5106 | 17855,19086
5107 | 17855,8716
5108 | 17855,14655
5109 | 17855,22446
5110 | 17855,24383
5111 | 17855,22503
5112 | 17855,4550
5113 | 17855,2263
5114 | 17855,25033
5115 | 17855,8673
5116 | 17855,3348
5117 | 17855,8576
5118 | 17855,4391
5119 | 17855,15887
5120 | 17855,8202
5121 | 17855,12437
5122 | 17855,23690
5123 | 17855,22325
5124 | 17855,5354
5125 | 17855,10262
5126 | 17855,19158
5127 | 17855,15272
5128 | 17855,24707
5129 | 17855,17291
5130 | 17855,10551
5131 | 17855,19641
5132 | 17855,21699
5133 | 17855,1650
5134 | 17855,11690
5135 | 17855,21071
5136 | 17855,16743
5137 | 17855,10545
5138 | 17855,12465
5139 | 17855,6279
5140 | 17855,18538
5141 | 17855,289
5142 | 17855,7405
5143 | 17855,16959
5144 | 17855,24113
5145 | 17855,3691
5146 | 17855,973
5147 | 17855,21669
5148 | 17855,4043
5149 | 17855,21871
5150 | 17855,19779
5151 | 17855,24065
5152 | 17855,19160
5153 | 17855,22244
5154 | 17855,972
5155 | 17855,22633
5156 | 17855,22546
5157 | 17855,21967
5158 | 17855,22491
5159 | 17855,11171
5160 | 17855,22265
5161 | 17855,21787
5162 | 17855,4577
5163 | 17855,15692
5164 | 17855,9347
5165 | 17855,16364
5166 | 17855,18533
5167 | 17855,24743
5168 | 17855,22406
5169 | 17855,16272
5170 | 17856,20042
5171 | 17857,14429
5172 | 17857,17257
5173 | 17857,24262
5174 | 17857,15494
5175 | 17857,592
5176 | 17857,11959
5177 | 17857,17181
5178 | 17857,18947
5179 | 17858,5986
5180 | 17858,2597
5181 | 17858,3492
5182 | 17859,6050
5183 | 17860,4177
5184 | 17861,20013
5185 | 17862,2857
5186 | 17862,18618
5187 | 17862,9159
5188 | 17863,22587
5189 | 17864,12998
5190 | 17865,2399
5191 | 17866,24761
5192 | 17867,14927
5193 | 17867,90
5194 | 17867,19682
5195 | 17868,16779
5196 | 17869,23147
5197 | 17870,4216
5198 | 17870,17661
5199 | 17870,13158
5200 | 17871,3944
5201 | 17872,6041
5202 | 17873,24333
5203 | 17873,16133
5204 | 17873,2103
5205 | 17873,17564
5206 | 17873,7493
5207 | 17873,7765
5208 | 17873,6731
5209 | 17873,20616
5210 | 17873,11701
5211 | 17873,20819
5212 | 17873,2736
5213 | 17873,24651
5214 | 17874,8417
5215 | 17875,15691
5216 | 17876,17988
5217 | 17876,8249
5218 | 17876,3108
5219 | 17876,5759
5220 | 17876,23541
5221 | 17876,10532
5222 | 17876,24677
5223 | 17877,6428
5224 | 17878,4218
5225 | 17879,9159
5226 | 17880,7125
5227 | 17880,20598
5228 | 17880,1644
5229 | 17880,21957
5230 | 17880,2350
5231 | 17880,21336
5232 | 17880,9042
5233 | 17880,5437
5234 | 17881,201
5235 | 17881,6898
5236 | 17882,15300
5237 | 17883,7407
5238 | 17883,23679
5239 | 17883,19614
5240 | 17883,17756
5241 | 17883,23822
5242 | 17883,18001
5243 | 17884,23837
5244 | 17885,11842
5245 | 17886,3605
5246 | 17886,14903
5247 | 17887,25073
5248 | 17888,14410
5249 | 17889,18746
5250 | 17890,8417
5251 | 17891,20834
5252 | 17892,20617
5253 | 17893,15167
5254 | 17894,1499
5255 | 17894,20354
5256 | 17895,20605
5257 | 17896,19900
5258 | 17897,19567
5259 | 17897,14844
5260 | 17897,1303
5261 | 17897,3880
5262 | 17897,22983
5263 | 17897,11462
5264 | 17897,674
5265 | 17898,23681
5266 | 17898,14793
5267 | 17899,19263
5268 | 17900,18898
5269 | 17901,9327
5270 | 17902,20966
5271 | 17902,613
5272 | 17903,894
5273 | 17903,6329
5274 | 17904,14808
5275 | 17904,12297
5276 | 17905,20616
5277 | 17905,20466
5278 | 17905,2103
5279 | 17905,17701
5280 | 17905,23356
5281 | 17905,1145
5282 | 17905,4837
5283 | 17905,13298
5284 | 17905,23406
5285 | 17905,22319
5286 | 17905,24746
5287 | 17905,2736
5288 | 17905,15753
5289 | 17905,22919
5290 | 17905,6043
5291 | 17905,13318
5292 | 17906,17630
5293 | 17906,11699
5294 | 17906,9399
5295 | 17906,5778
5296 | 17907,13365
5297 | 17908,21824
5298 | 17908,18016
5299 | 17909,2939
5300 | 17909,8609
5301 | 17910,23132
5302 | 17911,5308
5303 | 17912,8852
5304 | 17912,17576
5305 | 17913,21064
5306 | 17913,8384
5307 | 17914,5515
5308 | 17915,9327
5309 | 17916,6854
5310 | 17917,10769
5311 | 17917,7971
5312 | 17918,19638
5313 | 17918,19639
5314 | 17918,14506
5315 | 17919,22385
5316 | 17920,16339
5317 | 17920,6533
5318 | 17921,8455
5319 | 17922,11502
5320 | 17923,17535
5321 | 17923,23630
5322 | 17923,2591
5323 | 17923,23898
5324 | 17923,1721
5325 | 17923,19540
5326 | 17923,23130
5327 | 17923,13312
5328 | 17923,17539
5329 | 17923,11562
5330 | 17923,17549
5331 | 17923,4345
5332 | 17923,15391
5333 | 17923,1457
5334 | 17924,12369
5335 | 17925,12275
5336 | 17926,10871
5337 | 17927,14125
5338 | 17928,7389
5339 | 17929,12297
5340 | 17930,13788
5341 | 17930,10121
5342 | 17930,20216
5343 | 17930,10112
5344 | 17930,23148
5345 | 17930,1740
5346 | 17930,10650
5347 | 17930,3158
5348 | 17930,18763
5349 | 17930,15609
5350 | 17930,19050
5351 | 17930,16510
5352 | 17930,10930
5353 | 17930,21186
5354 | 17930,10983
5355 | 17930,1253
5356 | 17930,16425
5357 | 17930,23793
5358 | 17930,6851
5359 | 17930,11448
5360 | 17930,23716
5361 | 17930,4196
5362 | 17930,7780
5363 | 17930,15114
5364 | 17930,16186
5365 | 17930,10919
5366 | 17930,1725
5367 | 17930,18351
5368 | 17930,14991
5369 | 17930,24539
5370 | 17930,13167
5371 | 17930,18319
5372 | 17930,9327
5373 | 17930,7832
5374 | 17930,15908
5375 | 17930,7242
5376 | 17930,12713
5377 | 17930,22635
5378 | 17930,8076
5379 | 17930,7650
5380 | 17930,1390
5381 | 17930,14552
5382 | 17930,3303
5383 | 17930,2507
5384 | 17930,18775
5385 | 17930,13811
5386 | 17930,4879
5387 | 17930,18047
5388 | 17930,25036
5389 | 17930,21436
5390 | 17930,4530
5391 | 17930,4872
5392 | 17930,11338
5393 | 17930,9459
5394 | 17930,590
5395 | 17930,24970
5396 | 17930,20778
5397 | 17930,19401
5398 | 17930,17101
5399 | 17930,4484
5400 | 17930,4322
5401 | 17930,20848
5402 | 17930,24513
5403 | 17930,11336
5404 | 17930,7317
5405 | 17930,18544
5406 | 17930,21572
5407 | 17930,24294
5408 | 17930,12195
5409 | 17930,12624
5410 | 17930,12716
5411 | 17930,9216
5412 | 17930,19372
5413 | 17930,713
5414 | 17930,6819
5415 | 17930,25069
5416 | 17930,10951
5417 | 17930,332
5418 | 17930,5618
5419 | 17930,2790
5420 | 17930,22181
5421 | 17930,18710
5422 | 17930,4257
5423 | 17930,8807
5424 | 17930,10610
5425 | 17930,13645
5426 | 17930,2083
5427 | 17930,6808
5428 | 17930,4906
5429 | 17930,22360
5430 | 17930,19118
5431 | 17930,6919
5432 | 17930,2941
5433 | 17930,104
5434 | 17930,13369
5435 | 17930,11514
5436 | 17930,5278
5437 | 17930,11341
5438 | 17930,9737
5439 | 17930,22438
5440 | 17930,2842
5441 | 17930,16770
5442 | 17930,12196
5443 | 17930,12439
5444 | 17930,20731
5445 | 17930,4862
5446 | 17930,5958
5447 | 17930,554
5448 | 17930,20780
5449 | 17930,11676
5450 | 17930,5243
5451 | 17930,983
5452 | 17930,6018
5453 | 17930,14965
5454 | 17930,214
5455 | 17930,16845
5456 | 17930,1212
5457 | 17930,1945
5458 | 17930,7394
5459 | 17930,6322
5460 | 17930,16740
5461 | 17930,20379
5462 | 17930,13846
5463 | 17930,2068
5464 | 17930,18368
5465 | 17930,22076
5466 | 17930,18501
5467 | 17930,15863
5468 | 17930,7265
5469 | 17930,21250
5470 | 17930,15797
5471 | 17930,23678
5472 | 17930,11575
5473 | 17930,10689
5474 | 17930,8233
5475 | 17930,17572
5476 | 17930,6653
5477 | 17930,4190
5478 | 17930,20232
5479 | 17930,9211
5480 | 17930,21426
5481 | 17930,22152
5482 | 17930,143
5483 | 17930,7395
5484 | 17930,13993
5485 | 17930,10104
5486 | 17930,20458
5487 | 17930,13396
5488 | 17930,19289
5489 | 17930,10545
5490 | 17930,22705
5491 | 17930,3093
5492 | 17930,12987
5493 | 17930,18538
5494 | 17930,13365
5495 | 17930,897
5496 | 17930,5926
5497 | 17930,12279
5498 | 17930,15416
5499 | 17930,1686
5500 | 17930,3032
5501 | 17930,19674
5502 | 17930,12392
5503 | 17930,13566
5504 | 17931,9756
5505 | 17931,2742
5506 | 17932,1391
5507 | 17933,23305
5508 | 17934,21157
5509 | 17935,4249
5510 | 17936,22168
5511 | 17937,1691
5512 | 17938,20126
5513 | 17938,17410
5514 | 17938,23024
5515 | 17939,21339
5516 | 17940,21831
5517 | 17941,23734
5518 | 17941,5705
5519 | 17941,10350
5520 | 17941,5495
5521 | 17941,18779
5522 | 17941,1699
5523 | 17941,22303
5524 | 17941,19174
5525 | 17941,8082
5526 | 17941,15058
5527 | 17941,7617
5528 | 17941,23121
5529 | 17941,7070
5530 | 17941,21366
5531 | 17941,1280
5532 | 17941,24233
5533 | 17941,21061
5534 | 17941,9048
5535 | 17941,19195
5536 | 17941,616
5537 | 17941,16414
5538 | 17941,21775
5539 | 17941,24678
5540 | 17941,12300
5541 | 17941,9789
5542 | 17941,11909
5543 | 17941,22613
5544 | 17941,9030
5545 | 17941,7870
5546 | 17941,10215
5547 | 17941,9537
5548 | 17941,22487
5549 | 17941,22704
5550 | 17941,7509
5551 | 17941,15966
5552 | 17941,15347
5553 | 17941,19607
5554 | 17941,9206
5555 | 17941,21584
5556 | 17941,17239
5557 | 17941,826
5558 | 17941,21585
5559 | 17941,18591
5560 | 17941,6871
5561 | 17941,14971
5562 | 17941,11883
5563 | 17941,23599
5564 | 17941,18133
5565 | 17941,23763
5566 | 17941,768
5567 | 17942,20504
5568 | 17943,11143
5569 | 17944,4043
5570 | 17945,6327
5571 | 17945,1079
5572 | 17945,12380
5573 | 17946,4550
5574 | 17946,22738
5575 | 17947,17703
5576 | 17947,657
5577 | 17948,9220
5578 | 17948,4661
5579 | 17949,9244
5580 | 17949,19628
5581 | 17949,18977
5582 | 17949,19408
5583 | 17949,19873
5584 | 17949,14421
5585 | 17949,17623
5586 | 17949,10905
5587 | 17949,22917
5588 | 17949,11570
5589 | 17949,6602
5590 | 17949,2555
5591 | 17949,637
5592 | 17949,8705
5593 | 17949,14032
5594 | 17949,19690
5595 | 17949,1100
5596 | 17949,24313
5597 | 17949,2106
5598 | 17949,1123
5599 | 17949,6729
5600 | 17950,13395
5601 | 17950,10462
5602 | 17950,19476
5603 | 17950,16136
5604 | 17950,13506
5605 | 17951,22815
5606 | 17952,7988
5607 | 17953,11772
5608 | 17953,21806
5609 | 17954,14601
5610 | 17954,7412
5611 | 17955,1201
5612 | 17955,22204
5613 | 17955,10364
5614 | 17956,17837
5615 | 17956,13120
5616 | 17957,17722
5617 | 17958,2933
5618 | 17959,17600
5619 | 17959,9691
5620 | 17959,6504
5621 | 17959,5434
5622 | 17960,24968
5623 | 17961,12304
5624 | 17961,14637
5625 | 17962,19910
5626 | 17963,11643
5627 | 17963,9533
5628 | 17963,15
5629 | 17963,14368
5630 | 17963,4403
5631 | 17963,16871
5632 | 17963,3925
5633 | 17963,23247
5634 | 17963,22519
5635 | 17963,17484
5636 | 17963,15981
5637 | 17964,19036
5638 | 17964,22848
5639 | 17964,12823
5640 | 17964,7239
5641 | 17964,10307
5642 | 17964,3946
5643 | 17964,5433
5644 | 17965,14489
5645 | 17966,13521
5646 | 17966,2850
5647 | 17966,2470
5648 | 17967,16036
5649 | 17968,9265
5650 | 17968,14346
5651 | 17969,76
5652 | 17970,24960
5653 | 17971,21997
5654 | 17972,15871
5655 | 17972,19210
5656 | 17972,14227
5657 | 17972,14246
5658 | 17972,14260
5659 | 17972,12618
5660 | 17973,19682
5661 | 17973,2819
5662 | 17974,20904
5663 | 17975,4428
5664 | 17976,3845
5665 | 17976,24141
5666 | 17976,14936
5667 | 17977,13618
5668 | 17977,22548
5669 | 17977,1072
5670 | 17977,4401
5671 | 17977,5593
5672 | 17977,12070
5673 | 17978,19866
5674 | 17979,22595
5675 | 17980,1841
5676 | 17981,732
5677 | 17982,9624
5678 | 17983,18094
5679 | 17983,18551
5680 | 17984,20227
5681 | 17985,16665
5682 | 17986,10121
5683 | 17987,14015
5684 | 17987,9559
5685 | 17987,16753
5686 | 17987,4550
5687 | 17988,941
5688 | 17989,22140
5689 | 17990,3066
5690 | 17990,19203
5691 | 17990,22480
5692 | 17990,6593
5693 | 17990,12913
5694 | 17990,11592
5695 | 17990,12341
5696 | 17990,16750
5697 | 17990,6018
5698 | 17990,18827
5699 | 17991,2175
5700 | 17992,5867
5701 | 17993,4882
5702 | 17993,13260
5703 | 17993,2485
5704 | 17993,19273
5705 | 17994,17780
5706 | 17995,6428
5707 | 17996,7054
5708 | 17996,16029
5709 | 17996,21336
5710 | 17997,4498
5711 | 17997,12475
5712 | 17998,121
5713 | 17998,18351
5714 | 17999,6428
5715 | 18000,8004
5716 | 18000,23235
5717 | 18001,15744
5718 | 18001,19590
5719 | 18001,2948
5720 | 18001,4917
5721 | 18001,24032
5722 | 18002,8610
5723 | 18003,22340
5724 | 18004,13078
5725 | 18005,19889
5726 | 18005,12065
5727 | 18005,5269
5728 | 18006,17351
5729 | 18006,20663
5730 | 18007,9342
5731 | 18008,24448
5732 | 18008,22579
5733 | 18008,7409
5734 | 18008,13735
5735 | 18009,14331
5736 | 18009,15196
5737 | 18010,3272
5738 | 18011,13230
5739 | 18012,4218
5740 | 18013,19889
5741 | 18014,22000
5742 | 18015,21758
5743 | 18015,18876
5744 | 18015,23170
5745 | 18016,12987
5746 | 18017,9108
5747 | 18018,230
5748 | 18018,18230
5749 | 18018,22958
5750 | 18018,16779
5751 | 18018,10436
5752 | 18018,3220
5753 | 18018,15270
5754 | 18018,18351
5755 | 18018,6200
5756 | 18019,13362
5757 | 18019,14903
5758 | 18019,16830
5759 | 18020,15218
5760 | 18021,143
5761 | 18022,657
5762 | 18023,9266
5763 | 18024,23707
5764 | 18025,18319
5765 | 18026,7082
5766 | 18026,18718
5767 | 18026,21736
5768 | 18026,22007
5769 | 18027,2456
5770 | 18028,4760
5771 | 18029,8646
5772 | 18030,19191
5773 | 18030,10020
5774 | 18031,9513
5775 | 18031,6395
5776 | 18031,1212
5777 | 18031,9529
5778 | 18031,9265
5779 | 18031,17834
5780 | 18032,11419
5781 | 18033,9006
5782 | 18033,17645
5783 | 18033,2904
5784 | 18034,14346
5785 | 18034,9159
5786 | 18035,1975
5787 | 18036,20561
5788 | 18037,18132
5789 | 18037,10542
5790 | 18037,7491
5791 | 18038,12455
5792 | 18039,3397
5793 | 18040,15036
5794 | 18040,11927
5795 | 18040,14000
5796 | 18040,13343
5797 | 18040,17722
5798 | 18040,9464
5799 | 18040,22042
5800 | 18040,11885
5801 | 18040,2365
5802 | 18040,19503
5803 | 18040,7040
5804 | 18040,14660
5805 | 18040,10680
5806 | 18040,17161
5807 | 18040,19682
5808 | 18040,22830
5809 | 18040,11762
5810 | 18040,648
5811 | 18040,8908
5812 | 18040,10965
5813 | 18040,10860
5814 | 18040,24298
5815 | 18040,20575
5816 | 18040,19277
5817 | 18040,621
5818 | 18040,22476
5819 | 18040,24936
5820 | 18040,24276
5821 | 18040,2821
5822 | 18040,12851
5823 | 18040,23774
5824 | 18040,24922
5825 | 18040,18538
5826 | 18040,1934
5827 | 18040,13524
5828 | 18040,9517
5829 | 18040,22050
5830 | 18040,6409
5831 | 18040,3055
5832 | 18040,22490
5833 | 18041,11841
5834 | 18042,23234
5835 | 18042,21612
5836 | 18043,24613
5837 | 18043,15658
5838 | 18043,10979
5839 | 18043,11929
5840 | 18043,4164
5841 | 18043,20403
5842 | 18043,156
5843 | 18043,11426
5844 | 18043,19141
5845 | 18043,10889
5846 | 18043,11428
5847 | 18044,1078
5848 | 18044,9372
5849 | 18045,22858
5850 | 18045,182
5851 | 18045,22786
5852 | 18046,14624
5853 | 18047,20561
5854 | 18048,7510
5855 | 18049,19706
5856 | 18049,20153
5857 | 18049,21932
5858 | 18049,15808
5859 | 18049,12014
5860 | 18050,15410
5861 | 18051,23591
5862 | 18052,20989
5863 | 18053,14058
5864 | 18054,15126
5865 | 18055,15494
5866 | 18056,19889
5867 | 18057,11223
5868 | 18057,14911
5869 | 18057,1962
5870 | 18057,5138
5871 | 18058,7141
5872 | 18059,9159
5873 | 18060,9200
5874 | 18060,24010
5875 | 18060,19515
5876 | 18061,23024
5877 | 18062,9799
5878 | 18063,16161
5879 | 18064,11109
5880 | 18065,17837
5881 | 18066,3969
5882 | 18066,4253
5883 | 18066,15635
5884 | 18066,15864
5885 | 18066,21755
5886 | 18066,19929
5887 | 18066,13122
5888 | 18066,10495
5889 | 18066,319
5890 | 18066,15885
5891 | 18066,4828
5892 | 18066,2757
5893 | 18066,1610
5894 | 18066,12701
5895 | 18066,18899
5896 | 18066,15309
5897 | 18067,20561
5898 | 18068,10703
5899 | 18069,10122
5900 | 18069,24162
5901 | 18069,180
5902 | 18069,20232
5903 | 18069,1391
5904 | 18069,2952
5905 | 18069,24426
5906 | 18069,16545
5907 | 18069,24054
5908 | 18069,16636
5909 | 18069,23826
5910 | 18069,19285
5911 | 18069,9394
5912 | 18069,18528
5913 | 18069,2231
5914 | 18070,8500
5915 | 18071,24916
5916 | 18072,19900
5917 | 18072,11985
5918 | 18072,4816
5919 | 18073,13627
5920 | 18073,7239
5921 | 18073,115
5922 | 18074,20431
5923 | 18074,21819
5924 | 18074,8908
5925 | 18074,20561
5926 | 18074,6922
5927 | 18074,23846
5928 | 18074,13989
5929 | 18074,17066
5930 | 18074,23575
5931 | 18074,1644
5932 | 18074,10840
5933 | 18074,18758
5934 | 18074,9882
5935 | 18074,5808
5936 | 18074,10860
5937 | 18074,24503
5938 | 18074,6319
5939 | 18074,4479
5940 | 18074,2246
5941 | 18074,6286
5942 | 18074,7728
5943 | 18074,17607
5944 | 18074,3110
5945 | 18074,23761
5946 | 18074,261
5947 | 18074,21587
5948 | 18075,115
5949 | 18076,8610
5950 | 18077,9375
5951 | 18077,22153
5952 | 18078,18131
5953 | 18079,1692
5954 | 18079,1069
5955 | 18080,15410
5956 | 18080,21841
5957 | 18080,19014
5958 | 18081,5054
5959 | 18081,22316
5960 | 18082,11412
5961 | 18082,13779
5962 | 18082,9601
5963 | 18082,12508
5964 | 18082,13151
5965 | 18082,22760
5966 | 18082,10549
5967 | 18082,21654
5968 | 18083,9943
5969 | 18083,24146
5970 | 18083,12129
5971 | 18083,1443
5972 | 18083,4177
5973 | 18083,20383
5974 | 18083,15691
5975 | 18084,3158
5976 | 18085,19638
5977 | 18086,19564
5978 | 18087,21336
5979 | 18087,12892
5980 | 18088,8113
5981 | 18088,21062
5982 | 18088,18889
5983 | 18088,18908
5984 | 18088,5959
5985 | 18088,24256
5986 | 18088,18555
5987 | 18088,4864
5988 | 18088,5554
5989 | 18089,5820
5990 | 18089,3034
5991 | 18090,852
5992 | 18090,18351
5993 | 18091,1253
5994 | 18092,18689
5995 | 18093,22529
5996 | 18093,12930
5997 | 18093,19119
5998 | 18093,15183
5999 | 18094,23523
6000 | 18095,9398
6001 | 18096,24219
6002 | 18097,21525
6003 | 18097,397
6004 | 18098,8952
6005 | 18098,8263
6006 | 18098,21339
6007 | 18099,17444
6008 | 18100,12892
6009 | 18101,18555
6010 | 18101,7686
6011 | 18101,6464
6012 | 18102,24049
6013 | 18103,18689
6014 | 18103,19796
6015 | 18104,20216
6016 | 18105,502
6017 | 18106,20954
6018 | 18106,9955
6019 | 18106,11083
6020 | 18106,18545
6021 | 18106,1307
6022 | 18107,16076
6023 | 18108,19161
6024 | 18109,6018
6025 | 18110,20561
6026 | 18111,3788
6027 | 18112,13844
6028 | 18113,16524
6029 | 18113,16638
6030 | 18113,29
6031 | 18113,11695
6032 | 18113,15941
6033 | 18113,21802
6034 | 18113,15845
6035 | 18113,18099
6036 | 18113,24487
6037 | 18113,19410
6038 | 18114,4428
6039 | 18114,14686
6040 | 18114,331
6041 | 18114,17275
6042 | 18114,11958
6043 | 18114,15984
6044 | 18114,15186
6045 | 18114,15555
6046 | 18114,341
6047 | 18115,9108
6048 | 18116,22522
6049 | 18117,14271
6050 | 18117,20981
6051 | 18117,5858
6052 | 18117,5152
6053 | 18117,18429
6054 | 18117,1373
6055 | 18117,15918
6056 | 18117,7938
6057 | 18117,6461
6058 | 18117,10677
6059 | 18117,2653
6060 | 18117,13845
6061 | 18117,3540
6062 | 18117,18898
6063 | 18117,18351
6064 | 18117,16641
6065 | 18117,12912
6066 | 18117,1271
6067 | 18117,14973
6068 | 18117,18474
6069 | 18117,12467
6070 | 18117,15517
6071 | 18117,21198
6072 | 18117,969
6073 | 18117,11781
6074 | 18117,398
6075 | 18117,6138
6076 | 18117,3757
6077 | 18117,6420
6078 | 18117,3758
6079 | 18117,16612
6080 | 18117,2331
6081 | 18117,1754
6082 | 18117,4799
6083 | 18117,10275
6084 | 18117,1906
6085 | 18117,9551
6086 | 18117,16441
6087 | 18117,22469
6088 | 18117,11338
6089 | 18117,18515
6090 | 18117,2056
6091 | 18117,5536
6092 | 18117,5245
6093 | 18117,24974
6094 | 18117,17875
6095 | 18117,6020
6096 | 18118,7030
6097 | 18119,9159
6098 | 18120,10162
6099 | 18121,1138
6100 | 18121,1683
6101 | 18121,4833
6102 | 18121,10666
6103 | 18121,12841
6104 | 18121,9245
6105 | 18121,3716
6106 | 18121,14361
6107 | 18121,7840
6108 | 18121,3218
6109 | 18121,3622
6110 | 18121,3564
6111 | 18121,1492
6112 | 18121,7084
6113 | 18121,2315
6114 | 18121,8933
6115 | 18121,6749
6116 | 18121,19830
6117 | 18121,20184
6118 | 18121,3733
6119 | 18121,23749
6120 | 18121,3080
6121 | 18121,2446
6122 | 18121,11138
6123 | 18121,14954
6124 | 18121,673
6125 | 18121,2081
6126 | 18121,7654
6127 | 18121,20183
6128 | 18122,471
6129 | 18123,3974
6130 | 18124,3303
6131 | 18125,10110
6132 | 18125,12700
6133 | 18126,664
6134 | 18127,21464
6135 | 18128,14476
6136 | 18129,126
6137 | 18130,5035
6138 | 18131,22842
6139 | 18132,12275
6140 | 18133,12552
6141 | 18133,4990
6142 | 18134,9377
6143 | 18134,22587
6144 | 18135,9006
6145 | 18136,15673
6146 | 18136,14672
6147 | 18137,1435
6148 | 18137,19902
6149 | 18137,6834
6150 | 18137,14188
6151 | 18137,18354
6152 | 18137,1814
6153 | 18137,8066
6154 | 18137,8446
6155 | 18137,24137
6156 | 18137,9982
6157 | 18137,4679
6158 | 18137,18182
6159 | 18137,15597
6160 | 18137,4508
6161 | 18137,18351
6162 | 18137,18656
6163 | 18137,7506
6164 | 18137,12341
6165 | 18137,24009
6166 | 18137,22589
6167 | 18137,117
6168 | 18137,19867
6169 | 18137,12590
6170 | 18137,17878
6171 | 18137,12448
6172 | 18137,2354
6173 | 18137,5552
6174 | 18137,12943
6175 | 18137,4604
6176 | 18137,21511
6177 | 18137,10291
6178 | 18137,7507
6179 | 18137,2841
6180 | 18137,10293
6181 | 18137,18365
6182 | 18137,13749
6183 | 18137,8452
6184 | 18137,24883
6185 | 18137,19926
6186 | 18137,10289
6187 | 18137,15429
6188 | 18137,17741
6189 | 18137,21679
6190 | 18137,20216
6191 | 18137,8973
6192 | 18137,4218
6193 | 18137,19715
6194 | 18137,3158
6195 | 18137,7722
6196 | 18137,2390
6197 | 18137,15696
6198 | 18137,24473
6199 | 18137,9382
6200 | 18137,17093
6201 | 18137,15069
6202 | 18137,7244
6203 | 18137,8896
6204 | 18137,1722
6205 | 18137,22425
6206 | 18137,12695
6207 | 18137,16161
6208 | 18137,16076
6209 | 18137,13158
6210 | 18137,1253
6211 | 18137,21339
6212 | 18137,24072
6213 | 18137,5040
6214 | 18137,15579
6215 | 18137,12
6216 | 18137,17693
6217 | 18137,15843
6218 | 18137,10160
6219 | 18137,8256
6220 | 18137,19903
6221 | 18137,8378
6222 | 18137,23833
6223 | 18137,17759
6224 | 18137,285
6225 | 18137,12525
6226 | 18137,18183
6227 | 18137,11302
6228 | 18137,3865
6229 | 18137,10282
6230 | 18137,24497
6231 | 18137,6014
6232 | 18137,24492
6233 | 18137,7091
6234 | 18137,24931
6235 | 18137,8197
6236 | 18137,3153
6237 | 18137,4454
6238 | 18138,22316
6239 | 18139,22818
6240 | 18140,14782
6241 | 18141,20379
6242 | 18142,20249
6243 | 18143,21957
6244 | 18144,18028
6245 | 18144,11050
6246 | 18144,12475
6247 | 18144,13977
6248 | 18145,14532
6249 | 18145,5085
6250 | 18146,9671
6251 | 18147,20127
6252 | 18148,7992
6253 | 18148,14618
6254 | 18149,12311
6255 | 18149,13515
6256 | 18150,1253
6257 | 18151,2788
6258 | 18152,11000
6259 | 18152,7651
6260 | 18152,10396
6261 | 18152,16687
6262 | 18152,22128
6263 | 18152,1963
6264 | 18152,3215
6265 | 18152,18201
6266 | 18152,6081
6267 | 18152,15404
6268 | 18152,20820
6269 | 18152,21102
6270 | 18152,2164
6271 | 18152,7334
6272 | 18152,3024
6273 | 18153,14624
6274 | 18154,24450
6275 | 18155,19460
6276 | 18156,23024
6277 | 18156,12275
6278 | 18157,845
6279 | 18158,8417
6280 | 18159,19889
6281 | 18159,9939
6282 | 18159,4887
6283 | 18160,9854
6284 | 18161,2788
6285 | 18162,10077
6286 | 18163,24473
6287 | 18163,13158
6288 | 18163,7671
6289 | 18163,16509
6290 | 18163,10517
6291 | 18163,18555
6292 | 18164,17734
6293 | 18165,19427
6294 | 18165,23528
6295 | 18165,815
6296 | 18165,8336
6297 | 18165,5921
6298 | 18165,12094
6299 | 18165,6399
6300 | 18165,24198
6301 | 18165,16781
6302 | 18166,15865
6303 | 18167,3407
6304 | 18167,3392
6305 | 18167,14501
6306 | 18168,9266
6307 | 18169,6993
6308 | 18170,1160
6309 | 18171,20661
6310 | 18171,13690
6311 | 18171,18341
6312 | 18172,23470
6313 | 18172,19830
6314 | 18172,23482
6315 | 18172,19841
6316 | 18173,22210
6317 | 18174,18351
6318 | 18175,19006
6319 | 18176,15609
6320 | 18176,2603
6321 | 18176,9265
6322 | 18177,19790
6323 | 18178,14148
6324 | 18179,4218
6325 | 18180,9161
6326 | 18181,19776
6327 | 18182,13462
6328 | 18182,14772
6329 | 18182,5966
6330 | 18182,22152
6331 | 18182,1461
6332 | 18182,14115
6333 | 18183,20216
6334 | 18184,15065
6335 | 18185,16564
6336 | 18185,12502
6337 | 18185,1244
6338 |
--------------------------------------------------------------------------------
/eval/evaluate.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import numpy as np
3 | from eval import metrics
4 | import gc
5 |
6 |
7 | def evaluate_user(model, eval_loader, device, mode='pretrain'):
8 | """ evaluate model on recommending items to users (primarily during pre-training step) """
9 | model.eval()
10 | eval_loss = 0.0
11 | n100_list, r20_list, r50_list = [], [], []
12 | eval_preds = []
13 | with torch.no_grad():
14 | for batch_index, eval_data in enumerate(eval_loader):
15 | eval_data = [x.to(device, non_blocking=True) for x in eval_data]
16 | (users, fold_in_items, held_out_items) = eval_data
17 | fold_in_items = fold_in_items.to(device)
18 | if mode == 'pretrain':
19 | recon_batch, emb = model.user_preference_encoder.pre_train_forward(fold_in_items)
20 | else:
21 | recon_batch = model.group_predictor(model.user_preference_encoder(fold_in_items))
22 |
23 | loss = model.multinomial_loss(recon_batch, held_out_items)
24 | eval_loss += loss.item()
25 | fold_in_items = fold_in_items.cpu().numpy()
26 | recon_batch = torch.softmax(recon_batch, 1) # softmax over the item set to get normalized scores.
27 | recon_batch[fold_in_items.nonzero()] = -np.inf
28 |
29 | n100 = metrics.ndcg_binary_at_k_batch_torch(recon_batch, held_out_items, 100, device=device)
30 | r20 = metrics.recall_at_k_batch_torch(recon_batch, held_out_items, 20)
31 | r50 = metrics.recall_at_k_batch_torch(recon_batch, held_out_items, 50)
32 |
33 | n100_list.append(n100)
34 | r20_list.append(r20)
35 | r50_list.append(r50)
36 |
37 | eval_preds.append(recon_batch.cpu().numpy())
38 | del users, fold_in_items, held_out_items, recon_batch
39 | gc.collect()
40 | num_batches = max(1, len(eval_loader.dataset) / eval_loader.batch_size)
41 | eval_loss /= num_batches
42 | n100_list = torch.cat(n100_list)
43 | r20_list = torch.cat(r20_list)
44 | r50_list = torch.cat(r50_list)
45 | return eval_loss, torch.mean(n100_list), torch.mean(r20_list), torch.mean(r50_list), np.array(eval_preds)
46 |
47 |
48 | def evaluate_group(model, eval_group_loader, device):
49 | """ evaluate model on recommending items to groups """
50 | model.eval()
51 | eval_loss = 0.0
52 | n100_list, r20_list, r50_list = [], [], []
53 | eval_preds = []
54 |
55 | with torch.no_grad():
56 | for batch_idx, data in enumerate(eval_group_loader):
57 | data = [x.to(device, non_blocking=True) for x in data]
58 | group, group_users, group_mask, group_items, user_items = data
59 | recon_batch, _, _ = model(group, group_users, group_mask, user_items)
60 |
61 | loss = model.multinomial_loss(recon_batch, group_items)
62 | eval_loss += loss.item()
63 | result = recon_batch.softmax(1) # softmax over the item set to get normalized scores.
64 | heldout_data = group_items
65 |
66 | r20 = metrics.recall_at_k_batch_torch(result, heldout_data, 20)
67 | r50 = metrics.recall_at_k_batch_torch(result, heldout_data, 50)
68 | n100 = metrics.ndcg_binary_at_k_batch_torch(result, heldout_data, 100, device=device)
69 |
70 | n100_list.append(n100)
71 | r20_list.append(r20)
72 | r50_list.append(r50)
73 |
74 | eval_preds.append(recon_batch.cpu().numpy())
75 | del group, group_users, group_mask, group_items, user_items
76 | gc.collect()
77 |
78 | n100_list = torch.cat(n100_list)
79 | r20_list = torch.cat(r20_list)
80 | r50_list = torch.cat(r50_list)
81 | return eval_loss, torch.mean(n100_list), torch.mean(r20_list), torch.mean(r50_list), np.array(eval_preds)
82 |
83 |
--------------------------------------------------------------------------------
/eval/metrics.py:
--------------------------------------------------------------------------------
1 | import torch
2 |
3 |
4 | def ndcg_binary_at_k_batch_torch(X_pred, heldout_batch, k=100, device='cpu'):
5 | """
6 | Normalized Discounted Cumulative Gain@k for for predictions [B, I] and ground-truth [B, I], with binary relevance.
7 | ASSUMPTIONS: all the 0's in heldout_batch indicate 0 relevance.
8 | """
9 |
10 | batch_users = X_pred.shape[0] # batch_size
11 | _, idx_topk = torch.topk(X_pred, k, dim=1, sorted=True)
12 | tp = 1. / torch.log2(torch.arange(2, k + 2, device=device).float())
13 | heldout_batch_nonzero = (heldout_batch > 0).float()
14 | DCG = (heldout_batch_nonzero[torch.arange(batch_users, device=device).unsqueeze(1), idx_topk] * tp).sum(dim=1)
15 | heldout_nonzero = (heldout_batch > 0).sum(dim=1) # num. of non-zero items per batch. [B]
16 | IDCG = torch.tensor([(tp[:min(n, k)]).sum() for n in heldout_nonzero]).to(device)
17 | return DCG / IDCG
18 |
19 |
20 | def recall_at_k_batch_torch(X_pred, heldout_batch, k=100):
21 | """
22 | Recall@k for predictions [B, I] and ground-truth [B, I].
23 | """
24 | batch_users = X_pred.shape[0]
25 | _, topk_indices = torch.topk(X_pred, k, dim=1, sorted=False) # [B, K]
26 | X_pred_binary = torch.zeros_like(X_pred)
27 | if torch.cuda.is_available():
28 | X_pred_binary = X_pred_binary.cuda()
29 | X_pred_binary[torch.arange(batch_users).unsqueeze(1), topk_indices] = 1
30 | X_true_binary = (heldout_batch > 0).float() # .toarray() # [B, I]
31 | k_tensor = torch.tensor([k], dtype=torch.float32)
32 | if torch.cuda.is_available():
33 | X_true_binary = X_true_binary.cuda()
34 | k_tensor = k_tensor.cuda()
35 | tmp = (X_true_binary * X_pred_binary).sum(dim=1).float()
36 | recall = tmp / torch.min(k_tensor, X_true_binary.sum(dim=1).float())
37 | return recall
38 |
--------------------------------------------------------------------------------
/models/aggregators.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 |
4 |
5 | class MaxPoolAggregator(nn.Module):
6 | """ Group Preference Aggregator implemented as max pooling over group member embeddings """
7 |
8 | def __init__(self, input_dim, output_dim, drop_ratio=0):
9 | super(MaxPoolAggregator, self).__init__()
10 |
11 | self.mlp = nn.Sequential(
12 | nn.Linear(input_dim, output_dim, bias=True),
13 | nn.ReLU(),
14 | nn.Dropout(drop_ratio)
15 | )
16 | nn.init.xavier_uniform_(self.mlp[0].weight)
17 | if self.mlp[0].bias is not None:
18 | self.mlp[0].bias.data.fill_(0.0)
19 |
20 | def forward(self, x, mask, mlp=False):
21 | """ max pooling aggregator:
22 | :param x: [B, G, D] group member embeddings
23 | :param mask: [B, G] -inf/0 for absent/present
24 | :param mlp: flag to add a linear layer before max pooling
25 | """
26 | if mlp:
27 | h = torch.tanh(self.mlp(x))
28 | else:
29 | h = x
30 |
31 | if mask is None:
32 | return torch.max(h, dim=1)
33 | else:
34 | res = torch.max(h + mask.unsqueeze(2), dim=1)
35 | return res.values
36 |
37 |
38 | # mask: -inf/0 for absent/present.
39 | class MeanPoolAggregator(nn.Module):
40 | """ Group Preference Aggregator implemented as mean pooling over group member embeddings """
41 |
42 | def __init__(self, input_dim, output_dim, drop_ratio=0):
43 | super(MeanPoolAggregator, self).__init__()
44 | self.mlp = nn.Sequential(
45 | nn.Linear(input_dim, output_dim, bias=True),
46 | nn.ReLU(),
47 | nn.Dropout(drop_ratio)
48 | )
49 | nn.init.xavier_uniform_(self.mlp[0].weight)
50 | if self.mlp[0].bias is not None:
51 | self.mlp[0].bias.data.fill_(0.0)
52 |
53 | def forward(self, x, mask, mlp=False):
54 | """ mean pooling aggregator:
55 | :param x: [B, G, D] group member embeddings
56 | :param mask: [B, G] -inf/0 for absent/present
57 | :param mlp: flag to add a linear layer before mean pooling
58 | """
59 | if mlp:
60 | h = torch.tanh(self.mlp(x))
61 | else:
62 | h = x
63 | if mask is None:
64 | return torch.mean(h, dim=1)
65 | else:
66 | mask = torch.exp(mask)
67 | res = torch.sum(h * mask.unsqueeze(2), dim=1) / mask.sum(1).unsqueeze(1)
68 | return res
69 |
70 |
71 | class AttentionAggregator(nn.Module):
72 | """ Group Preference Aggregator implemented as attention over group member embeddings """
73 |
74 | def __init__(self, input_dim, output_dim, drop_ratio=0):
75 | super(AttentionAggregator, self).__init__()
76 | self.mlp = nn.Sequential(
77 | nn.Linear(input_dim, output_dim, bias=True),
78 | nn.ReLU(),
79 | nn.Dropout(drop_ratio)
80 | )
81 |
82 | self.attention = nn.Linear(output_dim, 1)
83 | self.drop = nn.Dropout(drop_ratio)
84 | nn.init.xavier_uniform_(self.mlp[0].weight)
85 | if self.mlp[0].bias is not None:
86 | self.mlp[0].bias.data.fill_(0.0)
87 |
88 | def forward(self, x, mask, mlp=False):
89 | """ attentive aggregator:
90 | :param x: [B, G, D] group member embeddings
91 | :param mask: [B, G] -inf/0 for absent/present
92 | :param mlp: flag to add a linear layer before attention
93 | """
94 | if mlp:
95 | h = torch.tanh(self.mlp(x))
96 | else:
97 | h = x
98 |
99 | attention_out = torch.tanh(self.attention(h))
100 | if mask is None:
101 | weight = torch.softmax(attention_out, dim=1)
102 | else:
103 | weight = torch.softmax(attention_out + mask.unsqueeze(2), dim=1)
104 | ret = torch.matmul(h.transpose(2, 1), weight).squeeze(2)
105 | return ret
106 |
--------------------------------------------------------------------------------
/models/discriminator.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 |
4 |
5 | class Discriminator(nn.Module):
6 | """ Discriminator for Mutual Information Estimation and Maximization, implemented with bilinear layers and
7 | binary cross-entropy loss training """
8 |
9 | def __init__(self, embedding_dim=64):
10 | super(Discriminator, self).__init__()
11 | self.embedding_dim = embedding_dim
12 |
13 | self.fc_layer = torch.nn.Linear(self.embedding_dim, self.embedding_dim, bias=True)
14 | nn.init.xavier_uniform_(self.fc_layer.weight)
15 | nn.init.zeros_(self.fc_layer.bias)
16 |
17 | self.bilinear_layer = nn.Bilinear(self.embedding_dim, self.embedding_dim, 1) # output_dim = 1 => single score.
18 | nn.init.zeros_(self.bilinear_layer.weight)
19 | nn.init.zeros_(self.bilinear_layer.bias)
20 |
21 | self.bce_loss = nn.BCEWithLogitsLoss()
22 |
23 | def forward(self, group_inputs, user_inputs, group_mask):
24 | """ bilinear discriminator:
25 | :param group_inputs: [B, I]
26 | :param user_inputs: [B, n_samples, I] where n_samples is either G or # negs
27 | :param group_mask: [B, G]
28 | """
29 | # FC + activation.
30 | group_encoded = self.fc_layer(group_inputs) # [B, D]
31 | group_embed = torch.tanh(group_encoded) # [B, D]
32 |
33 | # FC + activation.
34 | user_pref_embedding = self.fc_layer(user_inputs)
35 | user_embed = torch.tanh(user_pref_embedding) # [B, n_samples, D]
36 |
37 | return self.bilinear_layer(user_embed, group_embed.unsqueeze(1).repeat(1, user_inputs.shape[1], 1))
38 |
39 | def mi_loss(self, scores_group, group_mask, scores_corrupted, device='cpu'):
40 | """ binary cross-entropy loss over (group, user) pairs for discriminator training
41 | :param scores_group: [B, G]
42 | :param group_mask: [B, G]
43 | :param scores_corrupted: [B, N]
44 | :param device (cpu/gpu)
45 | """
46 | batch_size = scores_group.shape[0]
47 | pos_size, neg_size = scores_group.shape[1], scores_corrupted.shape[1]
48 |
49 | one_labels = torch.ones(batch_size, pos_size).to(device) # [B, G]
50 | zero_labels = torch.zeros(batch_size, neg_size).to(device) # [B, N]
51 |
52 | labels = torch.cat((one_labels, zero_labels), 1) # [B, G+N]
53 | logits = torch.cat((scores_group, scores_corrupted), 1).squeeze(2) # [B, G + N]
54 |
55 | mask = torch.cat((torch.exp(group_mask), torch.ones([batch_size, neg_size]).to(device)),
56 | 1) # torch.exp(.) to binarize since original mask has -inf.
57 |
58 | mi_loss = self.bce_loss(logits * mask, labels * mask) * (batch_size * (pos_size + neg_size)) \
59 | / (torch.exp(group_mask).sum() + batch_size * neg_size)
60 |
61 | return mi_loss
62 |
--------------------------------------------------------------------------------
/models/encoder.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 | import torch.nn.functional as F
4 |
5 |
6 | class Encoder(nn.Module):
7 | """ User Preference Encoder implemented as fully connected layers over binary bag-of-words vector
8 | (over item set) per user """
9 |
10 | def __init__(self, n_items, user_layers, embedding_dim, drop_ratio):
11 | super(Encoder, self).__init__()
12 | self.n_items = n_items
13 | self.embedding_dim = embedding_dim
14 | self.drop = nn.Dropout(drop_ratio)
15 | self.user_preference_encoder = torch.nn.ModuleList() # user individual preference encoder layers.
16 |
17 | for idx, (in_size, out_size) in enumerate(zip([self.n_items] + user_layers[:-1], user_layers)):
18 | layer = torch.nn.Linear(in_size, out_size, bias=True)
19 | nn.init.xavier_uniform_(layer.weight)
20 | nn.init.zeros_(layer.bias)
21 | self.user_preference_encoder.append(layer)
22 |
23 | self.transform_layer = nn.Linear(self.embedding_dim, self.embedding_dim)
24 | nn.init.xavier_uniform_(self.transform_layer.weight)
25 | nn.init.zeros_(self.transform_layer.bias)
26 |
27 | self.user_predictor = nn.Linear(self.embedding_dim, self.n_items, bias=False) # item embedding for pre-training
28 | nn.init.xavier_uniform_(self.user_predictor.weight)
29 |
30 | def pre_train_forward(self, user_items):
31 | """ user individual preference encoder (excluding final layer) for user-item pre-training
32 | :param user_items: [B, G, I] or [B, I]
33 | """
34 | user_items_norm = F.normalize(user_items) # [B, G, I] or [B, I]
35 | user_pref_embedding = self.drop(user_items_norm)
36 | for idx, _ in enumerate(range(len(self.user_preference_encoder))):
37 | user_pref_embedding = self.user_preference_encoder[idx](user_pref_embedding) # [B, G, D] or [B, D]
38 | user_pref_embedding = torch.tanh(user_pref_embedding) # [B, G, D] or [B, D]
39 |
40 | logits = self.user_predictor(user_pref_embedding) # [B, G, D] or [B, D]
41 | return logits, user_pref_embedding
42 |
43 | def forward(self, user_items):
44 | """ user individual preference encoder
45 | :param user_items: [B, G, I]
46 | """
47 | _, user_embeds = self.pre_train_forward(user_items) # [B, G, D]
48 | user_embeds = torch.tanh(self.transform_layer(user_embeds)) # [B, G, D]
49 | return user_embeds
50 |
--------------------------------------------------------------------------------
/models/models.py:
--------------------------------------------------------------------------------
1 | import torch.nn as nn
2 | import torch.nn.functional as F
3 | import torch
4 | from models.aggregators import MaxPoolAggregator, AttentionAggregator, MeanPoolAggregator
5 | from models.discriminator import Discriminator
6 | from models.encoder import Encoder
7 |
8 |
9 | class GroupIM(nn.Module):
10 | """
11 | GroupIM framework for Group Recommendation:
12 | (a) User Preference encoding: user_preference_encoder
13 | (b) Group Aggregator: preference_aggregator
14 | (c) InfoMax Discriminator: discriminator
15 | """
16 |
17 | def __init__(self, n_items, user_layers, lambda_mi=0.1, drop_ratio=0.4, aggregator_type='attention'):
18 | super(GroupIM, self).__init__()
19 | self.n_items = n_items
20 | self.lambda_mi = lambda_mi
21 | self.drop = nn.Dropout(drop_ratio)
22 | self.embedding_dim = user_layers[-1]
23 | self.aggregator_type = aggregator_type
24 |
25 | self.user_preference_encoder = Encoder(self.n_items, user_layers, self.embedding_dim, drop_ratio)
26 |
27 | if self.aggregator_type == 'maxpool':
28 | self.preference_aggregator = MaxPoolAggregator(self.embedding_dim, self.embedding_dim)
29 | elif self.aggregator_type == 'meanpool':
30 | self.preference_aggregator = MeanPoolAggregator(self.embedding_dim, self.embedding_dim)
31 | elif self.aggregator_type == 'attention':
32 | self.preference_aggregator = AttentionAggregator(self.embedding_dim, self.embedding_dim)
33 | else:
34 | raise NotImplementedError("Aggregator type {} not implemented ".format(self.aggregator_type))
35 |
36 | self.group_predictor = nn.Linear(self.embedding_dim, self.n_items, bias=False)
37 | nn.init.xavier_uniform_(self.group_predictor.weight)
38 |
39 | self.discriminator = Discriminator(embedding_dim=self.embedding_dim)
40 |
41 | for m in self.modules():
42 | if isinstance(m, nn.Linear):
43 | nn.init.xavier_uniform_(m.weight)
44 | if isinstance(m, nn.Embedding):
45 | nn.init.xavier_uniform_(m.weight)
46 |
47 | def forward(self, group, group_users, group_mask, user_items):
48 | """ compute group embeddings and item recommendations by user preference encoding, group aggregation and
49 | item prediction
50 |
51 | :param group: [B] group id
52 | :param group_users: [B, G] group user ids with padding
53 | :param group_mask: [B, G] -inf/0 for absent/present user
54 | :param user_items: [B, G, I] individual item interactions of group members
55 |
56 | """
57 | user_pref_embeds = self.user_preference_encoder(user_items)
58 | group_embed = self.preference_aggregator(user_pref_embeds, group_mask, mlp=False) # [B, D]
59 | group_logits = self.group_predictor(group_embed) # [B, I]
60 |
61 | if self.train:
62 | obs_user_embeds = self.user_preference_encoder(user_items) # [B, G, D]
63 | scores_ug = self.discriminator(group_embed, obs_user_embeds, group_mask).detach() # [B, G]
64 | return group_logits, group_embed, scores_ug
65 | else:
66 | return group_logits, group_embed
67 |
68 | def multinomial_loss(self, logits, items):
69 | """ multinomial likelihood with softmax over item set """
70 | return -torch.mean(torch.sum(F.log_softmax(logits, 1) * items, -1))
71 |
72 | def user_loss(self, user_logits, user_items):
73 | return self.multinomial_loss(user_logits, user_items)
74 |
75 | def infomax_group_loss(self, group_logits, group_embeds, scores_ug, group_mask, group_items, user_items,
76 | corrupted_user_items, device='cpu'):
77 | """ loss function with three terms: L_G, L_UG, L_MI
78 | :param group_logits: [B, G, I] group item predictions
79 | :param group_embeds: [B, D] group embedding
80 | :param scores_ug: [B, G] discriminator scores for group members
81 | :param group_mask: [B, G] -inf/0 for absent/present user
82 | :param group_items: [B, I] item interactions of group
83 | :param user_items: [B, G, I] individual item interactions of group members
84 | :param corrupted_user_items: [B, N, I] individual item interactions of negative user samples
85 | :param device: cpu/gpu
86 | """
87 |
88 | group_user_embeds = self.user_preference_encoder(user_items) # [B, G, D]
89 | corrupt_user_embeds = self.user_preference_encoder(corrupted_user_items) # [B, N, D]
90 |
91 | scores_observed = self.discriminator(group_embeds, group_user_embeds, group_mask) # [B, G]
92 | scores_corrupted = self.discriminator(group_embeds, corrupt_user_embeds, group_mask) # [B, N]
93 |
94 | mi_loss = self.discriminator.mi_loss(scores_observed, group_mask, scores_corrupted, device=device)
95 |
96 | ui_sum = user_items.sum(2, keepdim=True) # [B, G]
97 | user_items_norm = user_items / torch.max(torch.ones_like(ui_sum), ui_sum) # [B, G, I]
98 | gi_sum = group_items.sum(1, keepdim=True)
99 | group_items_norm = group_items / torch.max(torch.ones_like(gi_sum), gi_sum) # [B, I]
100 | assert scores_ug.requires_grad is False
101 |
102 | group_mask_zeros = torch.exp(group_mask).unsqueeze(2) # [B, G, 1]
103 | scores_ug = torch.sigmoid(scores_ug) # [B, G, 1]
104 |
105 | user_items_norm = torch.sum(user_items_norm * scores_ug * group_mask_zeros, dim=1) / group_mask_zeros.sum(1)
106 | user_group_loss = self.multinomial_loss(group_logits, user_items_norm)
107 | group_loss = self.multinomial_loss(group_logits, group_items_norm)
108 |
109 | return mi_loss, user_group_loss, group_loss
110 |
111 | def loss(self, group_logits, summary_embeds, scores_ug, group_mask, group_items, user_items, corrupted_user_items,
112 | device='cpu'):
113 | """ L_G + lambda L_UG + L_MI """
114 | mi_loss, user_group_loss, group_loss = self.infomax_group_loss(group_logits, summary_embeds, scores_ug,
115 | group_mask, group_items, user_items,
116 | corrupted_user_items, device)
117 |
118 | return group_loss + mi_loss + self.lambda_mi * user_group_loss
119 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy==1.18.1
2 | pandas==1.0.3
3 | scikit-learn==0.23.1
4 | scipy==1.4.1
5 | seaborn==0.10.1
6 | torch==1.1.0
--------------------------------------------------------------------------------
/train.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import time
3 | import gc
4 | import os
5 | import numpy as np
6 | import torch
7 | from torch.utils.data import DataLoader
8 |
9 | from models.models import GroupIM
10 | from utils.user_utils import TrainUserDataset, EvalUserDataset
11 | from utils.group_utils import TrainGroupDataset, EvalGroupDataset
12 | from eval.evaluate import evaluate_user, evaluate_group
13 |
14 | if torch.cuda.is_available():
15 | os.system('nvidia-smi -q -d Memory |grep -A4 GPU|grep Free >tmp')
16 | memory_available = [int(x.split()[2]) for x in open('tmp', 'r').readlines()]
17 | gpu_id = int(np.argmax(memory_available))
18 | torch.cuda.set_device(gpu_id)
19 |
20 | parser = argparse.ArgumentParser(description='PyTorch GroupIM: Group Information Maximization for Group Recommendation')
21 | parser.add_argument('--dataset', type=str, default='weeplaces', help='Name of dataset')
22 |
23 | # Training settings.
24 | parser.add_argument('--lr', type=float, default=5e-3, help='initial learning rate')
25 | parser.add_argument('--wd', type=float, default=0.00, help='weight decay coefficient')
26 | parser.add_argument('--lambda_mi', type=float, default=1.0, help='MI lambda hyper param')
27 | parser.add_argument('--drop_ratio', type=float, default=0.4, help='Dropout ratio')
28 | parser.add_argument('--batch_size', type=int, default=256, help='batch size')
29 | parser.add_argument('--epochs', type=int, default=200, help='maximum # training epochs')
30 | parser.add_argument('--eval_freq', type=int, default=5, help='frequency to evaluate performance on validation set')
31 |
32 | # Model settings.
33 | parser.add_argument('--emb_size', type=int, default=64, help='layer size')
34 | parser.add_argument('--aggregator', type=str, default='attention', help='choice of group preference aggregator',
35 | choices=['maxpool', 'meanpool', 'attention'])
36 | parser.add_argument('--negs_per_group', type=int, default=5, help='# negative users sampled per group')
37 |
38 | # Pre-training settings.
39 | parser.add_argument('--pretrain_user', action='store_true', help='Pre-train user encoder on user-item interactions')
40 | parser.add_argument('--pretrain_mi', action='store_true', help='Pre-train MI estimator for a few epochs')
41 | parser.add_argument('--pretrain_epochs', type=int, default=100, help='# pre-train epochs for user encoder layer')
42 |
43 | parser.add_argument('--cuda', action='store_true', help='use CUDA')
44 | parser.add_argument('--seed', type=int, default=1111, help='random seed for reproducibility')
45 |
46 | # Model save file parameters.
47 | parser.add_argument('--save', type=str, default='model_user.pt', help='path to save the final model')
48 | parser.add_argument('--save_group', type=str, default='model_group.pt', help='path to save the final model')
49 |
50 | args = parser.parse_args()
51 |
52 | torch.manual_seed(args.seed) # Set the random seed manually for reproducibility.
53 |
54 | if torch.cuda.is_available():
55 | if not args.cuda:
56 | print("WARNING: You have a CUDA device, so you should probably run with --cuda")
57 |
58 | ###############################################################################
59 | # Load data
60 | ###############################################################################
61 |
62 | train_params = {'batch_size': args.batch_size, 'shuffle': False, 'num_workers': 6, 'pin_memory': True}
63 | eval_params = {'batch_size': args.batch_size, 'shuffle': False, 'num_workers': 6, 'pin_memory': True}
64 | device = torch.device("cuda" if args.cuda else "cpu")
65 |
66 | # Define train/val/test datasets on user interactions.
67 | train_dataset = TrainUserDataset(args.dataset) # train dataset for user-item interactions.
68 | n_items, n_users = train_dataset.n_items, train_dataset.n_users
69 | val_dataset = EvalUserDataset(args.dataset, n_items, datatype='val')
70 | test_dataset = EvalUserDataset(args.dataset, n_items, datatype='test')
71 |
72 | # Define train/val/test datasets on group and user interactions.
73 | train_group_dataset = TrainGroupDataset(args.dataset, n_items, args.negs_per_group)
74 | padding_idx = train_group_dataset.padding_idx
75 | val_group_dataset = EvalGroupDataset(args.dataset, n_items, padding_idx, datatype='val')
76 | test_group_dataset = EvalGroupDataset(args.dataset, n_items, padding_idx, datatype='test')
77 |
78 | # Define data loaders on user interactions.
79 | train_loader = DataLoader(train_dataset, **train_params)
80 | val_loader = DataLoader(val_dataset, **eval_params)
81 | test_loader = DataLoader(test_dataset, **eval_params)
82 |
83 | # Define data loaders on group interactions.
84 | train_group_loader = DataLoader(train_group_dataset, **train_params)
85 | val_group_loader = DataLoader(val_group_dataset, **eval_params)
86 | test_group_loader = DataLoader(test_group_dataset, **eval_params)
87 |
88 | ###############################################################################
89 | # Build the model
90 | ###############################################################################
91 |
92 | user_layers = [args.emb_size] # user encoder layer configuration is tunable.
93 |
94 | model = GroupIM(n_items, user_layers, drop_ratio=args.drop_ratio, aggregator_type=args.aggregator,
95 | lambda_mi=args.lambda_mi).to(device)
96 | optimizer_gr = torch.optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.wd)
97 |
98 | best_user_n100, best_group_n100 = -np.inf, -np.inf
99 |
100 | print("args", args)
101 | # At any point you can hit Ctrl + C to break out of training early.
102 | try:
103 | if args.pretrain_user:
104 | optimizer_ur = torch.optim.Adam(model.parameters(), lr=0.01, weight_decay=args.wd)
105 | print("Pre-training model on user-item interactions")
106 | for epoch in range(0, args.pretrain_epochs):
107 | epoch_start_time = time.time()
108 | model.train()
109 | train_user_loss = 0.0
110 | start_time = time.time()
111 |
112 | for batch_index, data in enumerate(train_loader):
113 | optimizer_ur.zero_grad()
114 | data = [x.to(device, non_blocking=True) for x in data]
115 | (train_users, train_items) = data
116 | user_logits, user_embeds = model.user_preference_encoder.pre_train_forward(train_items)
117 | user_loss = model.user_loss(user_logits, train_items)
118 | user_loss.backward()
119 | train_user_loss += user_loss.item()
120 | optimizer_ur.step()
121 | del train_users, train_items, user_logits, user_embeds
122 | elapsed = time.time() - start_time
123 | print('| epoch {:3d} | time {:4.2f} | loss {:4.2f}'.format(epoch + 1, elapsed,
124 | train_user_loss / len(train_loader)))
125 | if epoch % args.eval_freq == 0:
126 | val_loss, n100, r20, r50, _ = evaluate_user(model, val_loader, device, mode='pretrain')
127 |
128 | if n100 > best_user_n100:
129 | torch.save(model.state_dict(), args.save)
130 | best_user_n100 = n100
131 |
132 | print("Load best pre-trained user encoder")
133 | model.load_state_dict(torch.load(args.save))
134 | model = model.to(device)
135 |
136 | val_loss, n100, r20, r50, _ = evaluate_user(model, val_loader, device, mode='pretrain')
137 | print('=' * 89)
138 | print('| User evaluation | val loss {:4.4f} | n100 {:4.4f} | r20 {:4.4f} | '
139 | 'r50 {:4.4f}'.format(val_loss, n100, r20, r50))
140 | print("Initializing group recommender with pre-train user encoder")
141 | # Initialize the group predictor (item embedding) weight based on the pre-trained user predictor.
142 | model.group_predictor.weight.data = model.user_preference_encoder.user_predictor.weight.data
143 |
144 | if args.pretrain_mi:
145 | # pre-train MI estimator.
146 | for epoch in range(0, 10):
147 | model.train()
148 | t = time.time()
149 | mi_epoch_loss = 0.0
150 | for batch_index, data in enumerate(train_group_loader):
151 | data = [x.to(device, non_blocking=True) for x in data]
152 | group, group_users, group_mask, group_items, user_items, corrupted_user_items = data
153 | optimizer_gr.zero_grad()
154 | model.zero_grad()
155 | model.train()
156 | _, group_embeds, _ = model(group, group_users, group_mask, user_items)
157 | obs_user_embed = model.user_preference_encoder(user_items).detach() # [B, G, D]
158 | corrupted_user_embed = model.user_preference_encoder(corrupted_user_items).detach() # [B, # negs, D]
159 |
160 | scores_observed = model.discriminator(group_embeds, obs_user_embed, group_mask) # [B, G]
161 | scores_corrupted = model.discriminator(group_embeds, corrupted_user_embed, group_mask) # [B, # negs]
162 |
163 | mi_loss = model.discriminator.mi_loss(scores_observed, group_mask, scores_corrupted, device=device)
164 | mi_loss.backward()
165 | optimizer_gr.step()
166 | mi_epoch_loss += mi_loss
167 | del group, group_users, group_mask, group_items, user_items, corrupted_user_items, \
168 | obs_user_embed, corrupted_user_embed
169 | gc.collect()
170 | print("MI loss: {}".format(float(mi_epoch_loss) / len(train_group_loader)))
171 |
172 | optimizer_gr = torch.optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.wd)
173 |
174 | for epoch in range(0, args.epochs):
175 | epoch_start_time = time.time()
176 | model.train()
177 | train_group_epoch_loss = 0.0
178 | for batch_index, data in enumerate(train_group_loader):
179 | data = [x.to(device, non_blocking=True) for x in data]
180 | group, group_users, group_mask, group_items, user_items, corrupted_user_items = data
181 | optimizer_gr.zero_grad()
182 | model.zero_grad()
183 | group_logits, group_embeds, scores_ug = model(group.squeeze(), group_users, group_mask, user_items)
184 | group_loss = model.loss(group_logits, group_embeds, scores_ug, group_mask, group_items, user_items,
185 | corrupted_user_items, device=device)
186 | group_loss.backward()
187 | train_group_epoch_loss += group_loss.item()
188 | optimizer_gr.step()
189 | del group, group_users, group_mask, group_items, user_items, corrupted_user_items, \
190 | group_logits, group_embeds, scores_ug
191 |
192 | gc.collect()
193 |
194 | print("Train loss: {}".format(float(train_group_epoch_loss) / len(train_group_loader)))
195 |
196 | if epoch % args.eval_freq == 0:
197 | # Group evaluation.
198 | val_loss_group, n100_group, r20_group, r50_group, _ = evaluate_group(model, val_group_loader, device)
199 |
200 | print('-' * 89)
201 | print('| end of epoch {:3d} | time: {:4.2f}s | n100 (group) {:5.4f} | r20 (group) {:5.4f} | r50 (group) '
202 | '{:5.4f}'.format(epoch + 1, time.time() - epoch_start_time, n100_group, r20_group, r50_group))
203 | print('-' * 89)
204 |
205 | # Save the model if the n100 is the best we've seen so far.
206 | if n100_group > best_group_n100:
207 | with open(args.save_group, 'wb') as f:
208 | torch.save(model, f)
209 | best_group_n100 = n100_group
210 |
211 | except KeyboardInterrupt:
212 | print('-' * 89)
213 | print('Exiting from training early')
214 |
215 | # Load the best saved model.
216 | with open(args.save_group, 'rb') as f:
217 | model = torch.load(f, map_location='cuda')
218 | model = model.to(device)
219 |
220 | # Best validation evaluation
221 | val_loss, n100, r20, r50, _ = evaluate_user(model, val_loader, device, mode='group')
222 | print('=' * 89)
223 | print('| User evaluation | val loss {:4.4f} | n100 {:4.4f} | r20 {:4.4f} | r50 {:4.4f}'
224 | .format(val_loss, n100, r20, r50))
225 |
226 | # Test evaluation
227 | test_loss, n100, r20, r50, _ = evaluate_user(model, test_loader, device, mode='group')
228 | print('=' * 89)
229 | print('| User evaluation | test loss {:4.4f} | n100 {:4.4f} | r20 {:4.4f} | r50 {:4.4f}'
230 | .format(test_loss, n100, r20, r50))
231 |
232 | print('=' * 89)
233 | _, n100_group, r20_group, r50_group, _ = evaluate_group(model, val_group_loader, device)
234 | print('| Group evaluation (val) | n100 (group) {:4.4f} | r20 (group) {:4.4f} | r50 (group) {:4.4f}'
235 | .format(n100_group, r20_group, r50_group))
236 |
237 | print('=' * 89)
238 | _, n100_group, r20_group, r50_group, _ = evaluate_group(model, test_group_loader, device)
239 | print('| Group evaluation (test) | n100 (group) {:4.4f} | r20 (group) {:4.4f} | r50 (group) {:4.4f}'
240 | .format(n100_group, r20_group, r50_group))
241 |
--------------------------------------------------------------------------------
/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CrowdDynamicsLab/GroupIM/899890e3c2486ffe40b202718e99cd37b8a85502/utils/__init__.py
--------------------------------------------------------------------------------
/utils/group_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import numpy as np
4 | import pandas as pd
5 | import torch
6 | import scipy.sparse as sp
7 | from sklearn.preprocessing import normalize
8 | from torch.utils import data
9 |
10 |
11 | class TrainGroupDataset(data.Dataset):
12 | """ Train Group Data Loader: load training group-item interactions and individual user item interactions """
13 |
14 | def __init__(self, dataset, n_items, negs_per_group):
15 | self.dataset = dataset
16 | self.n_items = n_items
17 | self.negs_per_group = negs_per_group
18 | self.data_dir = os.path.join('data/', dataset)
19 | self.user_data = self._load_user_data()
20 | self.group_data, self.group_users = self._load_group_data()
21 | self.group_inputs = [self.user_data[self.group_users[g]] for g in self.groups_list]
22 |
23 | def __len__(self):
24 | return len(self.groups_list)
25 |
26 | def get_corrupted_users(self, group):
27 | """ negative user sampling per group (eta balances item-biased and random sampling) """
28 | eta = 0.5
29 | p = np.ones(self.n_users + 1)
30 | p[self.group_users[group]] = 0
31 | p = normalize([p], norm='l1')[0]
32 | item_biased = normalize(self.user_data[:, self.group_data[group].indices].sum(1).squeeze(), norm='l1')[0]
33 | p = eta * item_biased + (1 - eta) * p
34 | negative_users = torch.multinomial(torch.from_numpy(p), self.negs_per_group)
35 | return negative_users
36 |
37 | def __getitem__(self, index):
38 | """ load group_id, padded group users, mask, group items, group member items, negative user items """
39 | group = self.groups_list[index]
40 | user_ids = torch.from_numpy(np.array(self.group_users[group], np.int32)) # [G] group member ids
41 | group_items = torch.from_numpy(self.group_data[group].toarray().squeeze()) # [I] items per group
42 |
43 | corrupted_group = self.get_corrupted_users(group) # [# negs]
44 | corrupted_user_items = torch.from_numpy(self.user_data[corrupted_group].toarray().squeeze()) # [# negs, I]
45 |
46 | # group mask to create fixed-size padded groups.
47 | group_length = self.max_group_size - list(user_ids).count(self.padding_idx)
48 | group_mask = torch.from_numpy(np.concatenate([np.zeros(group_length, dtype=np.float32), (-1) * np.inf *
49 | np.ones(self.max_group_size - group_length,
50 | dtype=np.float32)])) # [G]
51 |
52 | user_items = torch.from_numpy(self.group_inputs[group].toarray()) # [G, |I|] group member items
53 |
54 | return torch.tensor([group]), user_ids, group_mask, group_items, user_items, corrupted_user_items
55 |
56 | def _load_group_data(self):
57 | """ load training group-item interactions as a sparse matrix and user-group memberships """
58 | path_ug = os.path.join(self.data_dir, 'group_users.csv')
59 | path_gi = os.path.join(self.data_dir, 'train_gi.csv')
60 |
61 | df_gi = pd.read_csv(path_gi) # load training group-item interactions.
62 | start_idx, end_idx = df_gi['group'].min(), df_gi['group'].max()
63 | self.n_groups = end_idx - start_idx + 1
64 | rows_gi, cols_gi = df_gi['group'] - start_idx, df_gi['item']
65 |
66 | data_gi = sp.csr_matrix((np.ones_like(rows_gi), (rows_gi, cols_gi)), dtype='float32',
67 | shape=(self.n_groups, self.n_items)) # [# groups, I] sparse matrix.
68 |
69 | df_ug = pd.read_csv(path_ug).astype(int) # load user-group memberships.
70 | df_ug_train = df_ug[df_ug.group.isin(range(start_idx, end_idx + 1))]
71 | df_ug_train = df_ug_train.sort_values('group') # sort in ascending order of group ids.
72 | self.max_group_size = df_ug_train.groupby('group').size().max() # max group size denoted by G
73 |
74 | g_u_list_train = df_ug_train.groupby('group')['user'].apply(list).reset_index()
75 | g_u_list_train['user'] = list(map(lambda x: x + [self.padding_idx] * (self.max_group_size - len(x)),
76 | g_u_list_train.user))
77 | data_gu = np.squeeze(np.array(g_u_list_train[['user']].values.tolist())) # [# groups, G] with padding.
78 | self.groups_list = list(range(0, end_idx - start_idx + 1))
79 |
80 | assert len(df_ug_train['group'].unique()) == self.n_groups
81 | print("# training groups: {}, # max train group size: {}".format(self.n_groups, self.max_group_size))
82 |
83 | return data_gi, data_gu
84 |
85 | def _load_user_data(self):
86 | """ load user-item interactions of all users that appear in training groups, as a sparse matrix """
87 | df_ui = pd.DataFrame()
88 | train_path_ui = os.path.join(self.data_dir, 'train_ui.csv')
89 | df_train_ui = pd.read_csv(train_path_ui)
90 | df_ui = df_ui.append(df_train_ui)
91 |
92 | # include users from the (fold-in item set) of validation and test sets of user-item data.
93 | val_path_ui = os.path.join(self.data_dir, 'val_ui_tr.csv')
94 | df_val_ui = pd.read_csv(val_path_ui)
95 | df_ui = df_ui.append(df_val_ui)
96 |
97 | test_path_ui = os.path.join(self.data_dir, 'test_ui_tr.csv')
98 | df_test_ui = pd.read_csv(test_path_ui)
99 | df_ui = df_ui.append(df_test_ui)
100 |
101 | self.n_users = df_ui['user'].max() + 1
102 | self.padding_idx = self.n_users # padding idx for user when creating groups of fixed size.
103 | assert self.n_items == df_ui['item'].max() + 1
104 | rows_ui, cols_ui = df_ui['user'], df_ui['item']
105 |
106 | data_ui = sp.csr_matrix((np.ones_like(rows_ui), (rows_ui, cols_ui)), dtype='float32',
107 | shape=(self.n_users + 1, self.n_items)) # [U, I] sparse matrix
108 | return data_ui
109 |
110 |
111 | class EvalGroupDataset(data.Dataset):
112 | """ Eval Group Data Loader: load val/test group-item interactions and individual user item interactions """
113 |
114 | def __init__(self, dataset, n_items, padding_idx, datatype='val'):
115 | self.dataset = dataset
116 | self.padding_idx = padding_idx
117 | self.n_items = n_items
118 | self.data_dir = os.path.join('data/', dataset)
119 | self.eval_groups_list = []
120 | self.user_data = self._load_user_data(datatype)
121 | self.eval_group_data, self.eval_group_users = self._load_group_data(datatype)
122 |
123 | def __len__(self):
124 | return len(self.eval_groups_list)
125 |
126 | def __getitem__(self, index):
127 | """ load group_id, padded group users, mask, group items, group member items """
128 | group = self.eval_groups_list[index]
129 | user_ids = self.eval_group_users[group] # [G]
130 | length = self.max_gsize - list(user_ids).count(self.padding_idx)
131 | mask = torch.from_numpy(np.concatenate([np.zeros(length, dtype=np.float32), (-1) * np.inf *
132 | np.ones(self.max_gsize - length, dtype=np.float32)])) # [G]
133 | group_items = torch.from_numpy(self.eval_group_data[group].toarray().squeeze()) # [I]
134 | user_items = torch.from_numpy(self.user_data[user_ids].toarray().squeeze()) # [G, I]
135 |
136 | return torch.tensor([group]), torch.tensor(user_ids), mask, group_items, user_items
137 |
138 | def _load_user_data(self, datatype):
139 | """ load all user-item interactions of users that occur in val/test groups, as a sparse matrix """
140 | df_ui = pd.DataFrame()
141 | train_path_ui = os.path.join(self.data_dir, 'train_ui.csv')
142 | df_train_ui = pd.read_csv(train_path_ui)
143 | df_ui = df_ui.append(df_train_ui)
144 |
145 | val_path_ui = os.path.join(self.data_dir, 'val_ui_tr.csv')
146 | df_val_ui = pd.read_csv(val_path_ui)
147 | df_ui = df_ui.append(df_val_ui)
148 |
149 | if datatype == 'val' or datatype == 'test':
150 | # include eval user set (tr) items (since they might occur in evaluation set)
151 | test_path_ui = os.path.join(self.data_dir, 'test_ui_tr.csv')
152 | df_test_ui = pd.read_csv(test_path_ui)
153 | df_ui = df_ui.append(df_test_ui)
154 |
155 | n_users = df_ui['user'].max() + 1
156 | assert self.n_items == df_ui['item'].max() + 1
157 | rows_ui, cols_ui = df_ui['user'], df_ui['item']
158 | data_ui = sp.csr_matrix((np.ones_like(rows_ui), (rows_ui, cols_ui)), dtype='float32',
159 | shape=(n_users + 1, self.n_items)) # [# users, I] sparse matrix
160 | return data_ui
161 |
162 | def _load_group_data(self, datatype):
163 | """ load val/test group-item interactions as a sparse matrix and user-group memberships """
164 | path_ug = os.path.join(self.data_dir, 'group_users.csv')
165 | path_gi = os.path.join(self.data_dir, '{}_gi.csv'.format(datatype))
166 |
167 | df_gi = pd.read_csv(path_gi) # load group-item interactions
168 | start_idx, end_idx = df_gi['group'].min(), df_gi['group'].max()
169 | self.n_groups = end_idx - start_idx + 1
170 | rows_gi, cols_gi = df_gi['group'] - start_idx, df_gi['item']
171 | data_gi = sp.csr_matrix((np.ones_like(rows_gi), (rows_gi, cols_gi)), dtype='float32',
172 | shape=(self.n_groups, self.n_items)) # [# eval groups, I] sparse matrix
173 |
174 | df_ug = pd.read_csv(path_ug) # load user-group memberships
175 | df_ug_eval = df_ug[df_ug.group.isin(range(start_idx, end_idx + 1))]
176 | df_ug_eval = df_ug_eval.sort_values('group') # sort in ascending order of group ids
177 | self.max_gsize = df_ug_eval.groupby('group').size().max() # max group size denoted by G
178 | g_u_list_eval = df_ug_eval.groupby('group')['user'].apply(list).reset_index()
179 | g_u_list_eval['user'] = list(map(lambda x: x + [self.padding_idx] * (self.max_gsize - len(x)),
180 | g_u_list_eval.user))
181 | data_gu = np.squeeze(np.array(g_u_list_eval[['user']].values.tolist(), dtype=np.int32)) # [# groups, G]
182 | self.eval_groups_list = list(range(0, end_idx - start_idx + 1))
183 | return data_gi, data_gu
184 |
--------------------------------------------------------------------------------
/utils/user_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import numpy as np
4 | import pandas as pd
5 | import torch
6 | import scipy.sparse as sp
7 | from torch.utils import data
8 |
9 |
10 | class TrainUserDataset(data.Dataset):
11 | """ Train User Data Loader: load training user-item interactions """
12 |
13 | def __init__(self, dataset):
14 | self.dataset = dataset
15 | self.data_dir = os.path.join('data/', dataset)
16 | self.train_data_ui = self._load_train_data()
17 | self.user_list = list(range(self.n_users))
18 |
19 | def __len__(self):
20 | return len(self.user_list)
21 |
22 | def __getitem__(self, index):
23 | """ load user_id, binary vector over items """
24 | user = self.user_list[index]
25 | user_items = torch.from_numpy(self.train_data_ui[user, :].toarray()).squeeze() # [I]
26 | return torch.from_numpy(np.array([user], dtype=np.int32)), user_items
27 |
28 | def _load_train_data(self):
29 | """ load training user-item interactions as a sparse matrix """
30 | path_ui = os.path.join(self.data_dir, 'train_ui.csv')
31 | df_ui = pd.read_csv(path_ui)
32 | self.n_users, self.n_items = df_ui['user'].max() + 1, df_ui['item'].max() + 1
33 | rows_ui, cols_ui = df_ui['user'], df_ui['item']
34 | data_ui = sp.csr_matrix((np.ones_like(rows_ui), (rows_ui, cols_ui)), dtype='float32',
35 | shape=(self.n_users, self.n_items)) # [# train users, I] sparse matrix
36 |
37 | print("# train users", self.n_users, "# items", self.n_items)
38 | return data_ui
39 |
40 |
41 | class EvalUserDataset(data.Dataset):
42 | """ Eval User Data Loader: load val/test user-item interactions with fold-in and held-out item sets """
43 |
44 | def __init__(self, dataset, n_items, datatype='val'):
45 | self.dataset = dataset
46 | self.n_items = n_items
47 | self.data_dir = os.path.join('data/', dataset)
48 | self.data_tr, self.data_te = self._load_tr_te_data(datatype)
49 |
50 | def __len__(self):
51 | return len(self.user_list)
52 |
53 | def __getitem__(self, index):
54 | """ load user_id, fold-in items, held-out items """
55 | user = self.user_list[index]
56 | fold_in, held_out = self.data_tr[user, :].toarray(), self.data_te[user, :].toarray() # [I], [I]
57 | return user, torch.from_numpy(fold_in).squeeze(), held_out.squeeze() # user, fold-in items, fold-out items.
58 |
59 | def _load_tr_te_data(self, datatype='val'):
60 | """ load user-item interactions of val/test user sets as two sparse matrices of fold-in and held-out items """
61 | ui_tr_path = os.path.join(self.data_dir, '{}_ui_tr.csv'.format(datatype))
62 | ui_te_path = os.path.join(self.data_dir, '{}_ui_te.csv'.format(datatype))
63 |
64 | ui_df_tr, ui_df_te = pd.read_csv(ui_tr_path), pd.read_csv(ui_te_path)
65 |
66 | start_idx = min(ui_df_tr['user'].min(), ui_df_te['user'].min())
67 | end_idx = max(ui_df_tr['user'].max(), ui_df_te['user'].max())
68 |
69 | rows_tr, cols_tr = ui_df_tr['user'] - start_idx, ui_df_tr['item']
70 | rows_te, cols_te = ui_df_te['user'] - start_idx, ui_df_te['item']
71 | self.user_list = list(range(0, end_idx - start_idx + 1))
72 |
73 | ui_data_tr = sp.csr_matrix((np.ones_like(rows_tr), (rows_tr, cols_tr)), dtype='float32',
74 | shape=(end_idx - start_idx + 1, self.n_items)) # [# eval users, I] sparse matrix
75 | ui_data_te = sp.csr_matrix((np.ones_like(rows_te), (rows_te, cols_te)), dtype='float32',
76 | shape=(end_idx - start_idx + 1, self.n_items)) # [# eval users, I] sparse matrix
77 | return ui_data_tr, ui_data_te
78 |
--------------------------------------------------------------------------------