├── .gitattributes
├── .github
└── workflows
│ └── ci.yml
├── .gitignore
├── LICENSE
├── README.md
├── data
├── flight
│ ├── brazil-airports.edgelist
│ ├── europe-airports.edgelist
│ ├── labels-brazil-airports.txt
│ ├── labels-europe-airports.txt
│ ├── labels-usa-airports.txt
│ └── usa-airports.edgelist
└── wiki
│ ├── Wiki_category.txt
│ ├── Wiki_edgelist.txt
│ └── wiki_labels.txt
├── examples
├── alias.py
├── deepwalk_wiki.py
├── line_wiki.py
├── node2vec_flight.py
├── node2vec_wiki.py
├── sdne_wiki.py
└── struc2vec_flight.py
├── ge
├── __init__.py
├── alias.py
├── classify.py
├── models
│ ├── __init__.py
│ ├── deepwalk.py
│ ├── line.py
│ ├── node2vec.py
│ ├── sdne.py
│ └── struc2vec.py
├── utils.py
└── walker.py
├── pics
├── code.png
├── deepctrbot.png
├── edge_list.png
└── weichennote.png
├── setup.py
└── tests
├── Wiki_edgelist.txt
├── __init__.py
├── deepwalk_test.py
├── line_test.py
├── node2vec_test.py
├── sdne_test.py
└── struct2vec_test.py
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | path:
6 | - 'ge/*'
7 | - 'tests/*'
8 | pull_request:
9 | path:
10 | - 'ge/*'
11 | - 'tests/*'
12 |
13 | jobs:
14 | build:
15 |
16 | runs-on: ubuntu-latest
17 | timeout-minutes: 180
18 | strategy:
19 | matrix:
20 | python-version: [3.6,3.7,3.8]
21 | tf-version: [1.4.0,1.15.0,2.5.0,2.6.0,2.7.0,2.8.0,2.9.0]
22 |
23 | exclude:
24 | - python-version: 3.7
25 | tf-version: 1.4.0
26 | - python-version: 3.7
27 | tf-version: 1.15.0
28 | - python-version: 3.8
29 | tf-version: 1.4.0
30 | - python-version: 3.8
31 | tf-version: 1.14.0
32 | - python-version: 3.8
33 | tf-version: 1.15.0
34 | - python-version: 3.6
35 | tf-version: 2.7.0
36 | - python-version: 3.6
37 | tf-version: 2.8.0
38 | - python-version: 3.6
39 | tf-version: 2.9.0
40 | - python-version: 3.9
41 | tf-version: 1.4.0
42 | - python-version: 3.9
43 | tf-version: 1.15.0
44 | - python-version: 3.9
45 | tf-version: 2.2.0
46 | steps:
47 |
48 | - uses: actions/checkout@v3
49 |
50 | - name: Setup python environment
51 | uses: actions/setup-python@v4
52 | with:
53 | python-version: ${{ matrix.python-version }}
54 |
55 | - name: Install dependencies
56 | run: |
57 | pip3 install -q tensorflow==${{ matrix.tf-version }}
58 | pip install -q protobuf==3.19.0
59 | pip install -q requests
60 | pip install -e .
61 | - name: Test with pytest
62 | timeout-minutes: 180
63 | run: |
64 | pip install -q pytest
65 | pip install -q pytest-cov
66 | pip install -q python-coveralls
67 | pytest --cov=ge --cov-report=xml
68 | - name: Upload coverage to Codecov
69 | uses: codecov/codecov-action@v3.1.0
70 | with:
71 | token: ${{secrets.CODECOV_TOKEN}}
72 | file: ./coverage.xml
73 | flags: pytest
74 | name: py${{ matrix.python-version }}-tf${{ matrix.tf-version }}
75 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .nox/
42 | .coverage
43 | .coverage.*
44 | .cache
45 | nosetests.xml
46 | coverage.xml
47 | *.cover
48 | .hypothesis/
49 | .pytest_cache/
50 |
51 | # Translations
52 | *.mo
53 | *.pot
54 |
55 | # Django stuff:
56 | *.log
57 | local_settings.py
58 | db.sqlite3
59 |
60 | # Flask stuff:
61 | instance/
62 | .webassets-cache
63 |
64 | # Scrapy stuff:
65 | .scrapy
66 |
67 | # Sphinx documentation
68 | docs/_build/
69 |
70 | # PyBuilder
71 | target/
72 |
73 | # Jupyter Notebook
74 | .ipynb_checkpoints
75 |
76 | # IPython
77 | profile_default/
78 | ipython_config.py
79 |
80 | # pyenv
81 | .python-version
82 |
83 | # celery beat schedule file
84 | celerybeat-schedule
85 |
86 | # SageMath parsed files
87 | *.sage.py
88 |
89 | # Environments
90 | .env
91 | .venv
92 | env/
93 | venv/
94 | ENV/
95 | env.bak/
96 | venv.bak/
97 |
98 | # Spyder project settings
99 | .spyderproject
100 | .spyproject
101 |
102 | # Rope project settings
103 | .ropeproject
104 |
105 | # mkdocs documentation
106 | /site
107 |
108 | # mypy
109 | .mypy_cache/
110 | .dmypy.json
111 | dmypy.json
112 |
113 | # Pyre type checker
114 | .pyre/
115 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Weichen Shen
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # GraphEmbedding
2 |
3 | [](https://github.com/shenweichen/graphembedding/issues)
5 | 
6 | [](https://codecov.io/gh/shenweichen/graphembedding)
7 | [](https://www.codacy.com/gh/shenweichen/GraphEmbedding/dashboard?utm_source=github.com&utm_medium=referral&utm_content=shenweichen/GraphEmbedding&utm_campaign=Badge_Grade)
8 | [](./README.md#disscussiongroup--related-projects)
9 |
10 | [comment]: <> ([](https://github.com/shenweichen/graphembedding/blob/master/LICENSE))
11 |
12 | # Method
13 |
14 |
15 | | Model | Paper | Note |
16 | | :-------: | :------------------------------------------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------------------ |
17 | | DeepWalk | [KDD 2014][DeepWalk: Online Learning of Social Representations](http://www.perozzi.net/publications/14_kdd_deepwalk.pdf) | [【Graph Embedding】DeepWalk:算法原理,实现和应用](https://zhuanlan.zhihu.com/p/56380812) |
18 | | LINE | [WWW 2015][LINE: Large-scale Information Network Embedding](https://arxiv.org/pdf/1503.03578.pdf) | [【Graph Embedding】LINE:算法原理,实现和应用](https://zhuanlan.zhihu.com/p/56478167) |
19 | | Node2Vec | [KDD 2016][node2vec: Scalable Feature Learning for Networks](https://www.kdd.org/kdd2016/papers/files/rfp0218-groverA.pdf) | [【Graph Embedding】Node2Vec:算法原理,实现和应用](https://zhuanlan.zhihu.com/p/56542707) |
20 | | SDNE | [KDD 2016][Structural Deep Network Embedding](https://www.kdd.org/kdd2016/papers/files/rfp0191-wangAemb.pdf) | [【Graph Embedding】SDNE:算法原理,实现和应用](https://zhuanlan.zhihu.com/p/56637181) |
21 | | Struc2Vec | [KDD 2017][struc2vec: Learning Node Representations from Structural Identity](https://arxiv.org/pdf/1704.03165.pdf) | [【Graph Embedding】Struc2Vec:算法原理,实现和应用](https://zhuanlan.zhihu.com/p/56733145) |
22 |
23 |
24 | # How to run examples
25 | 1. clone the repo and make sure you have installed `tensorflow` or `tensorflow-gpu` on your local machine.
26 | 2. run following commands
27 | ```bash
28 | python setup.py install
29 | cd examples
30 | python deepwalk_wiki.py
31 | ```
32 |
33 | ## DisscussionGroup & Related Projects
34 |
35 |
36 |
37 |
38 |
39 | 公众号:浅梦学习笔记
40 |
41 |
42 |
43 | |
44 |
45 | 微信:deepctrbot
46 |
47 |
48 |
49 | |
50 |
51 |
57 | |
58 |
59 |
60 |
61 |
62 | # Usage
63 | The design and implementation follows simple principles(**graph in,embedding out**) as much as possible.
64 | ## Input format
65 | we use `networkx`to create graphs.The input of networkx graph is as follows:
66 | `node1 node2 `
67 |
68 | 
69 | ## DeepWalk
70 |
71 | ```python
72 | G = nx.read_edgelist('../data/wiki/Wiki_edgelist.txt',create_using=nx.DiGraph(),nodetype=None,data=[('weight',int)])# Read graph
73 |
74 | model = DeepWalk(G,walk_length=10,num_walks=80,workers=1)#init model
75 | model.train(window_size=5,iter=3)# train model
76 | embeddings = model.get_embeddings()# get embedding vectors
77 | ```
78 |
79 | ## LINE
80 |
81 | ```python
82 | G = nx.read_edgelist('../data/wiki/Wiki_edgelist.txt',create_using=nx.DiGraph(),nodetype=None,data=[('weight',int)])#read graph
83 |
84 | model = LINE(G,embedding_size=128,order='second') #init model,order can be ['first','second','all']
85 | model.train(batch_size=1024,epochs=50,verbose=2)# train model
86 | embeddings = model.get_embeddings()# get embedding vectors
87 | ```
88 | ## Node2Vec
89 | ```python
90 | G=nx.read_edgelist('../data/wiki/Wiki_edgelist.txt',
91 | create_using = nx.DiGraph(), nodetype = None, data = [('weight', int)])#read graph
92 |
93 | model = Node2Vec(G, walk_length = 10, num_walks = 80,p = 0.25, q = 4, workers = 1)#init model
94 | model.train(window_size = 5, iter = 3)# train model
95 | embeddings = model.get_embeddings()# get embedding vectors
96 | ```
97 | ## SDNE
98 |
99 | ```python
100 | G = nx.read_edgelist('../data/wiki/Wiki_edgelist.txt',create_using=nx.DiGraph(),nodetype=None,data=[('weight',int)])#read graph
101 |
102 | model = SDNE(G,hidden_size=[256,128]) #init model
103 | model.train(batch_size=3000,epochs=40,verbose=2)# train model
104 | embeddings = model.get_embeddings()# get embedding vectors
105 | ```
106 |
107 | ## Struc2Vec
108 |
109 |
110 | ```python
111 | G = nx.read_edgelist('../data/flight/brazil-airports.edgelist',create_using=nx.DiGraph(),nodetype=None,data=[('weight',int)])#read graph
112 |
113 | model = Struc2Vec(G, 10, 80, workers=4, verbose=40, ) #init model
114 | model.train(window_size = 5, iter = 3)# train model
115 | embeddings = model.get_embeddings()# get embedding vectors
116 | ```
117 |
--------------------------------------------------------------------------------
/data/flight/brazil-airports.edgelist:
--------------------------------------------------------------------------------
1 | 7 77
2 | 29 50
3 | 3 35
4 | 9 84
5 | 25 82
6 | 6 28
7 | 64 51
8 | 108 74
9 | 19 4
10 | 2 84
11 | 7 25
12 | 56 25
13 | 24 66
14 | 23 26
15 | 30 51
16 | 10 61
17 | 50 84
18 | 65 84
19 | 53 53
20 | 9 0
21 | 30 15
22 | 45 35
23 | 10 7
24 | 87 52
25 | 106 19
26 | 70 53
27 | 58 130
28 | 6 98
29 | 103 80
30 | 7 19
31 | 36 40
32 | 25 15
33 | 15 4
34 | 4 50
35 | 2 27
36 | 7 98
37 | 9 123
38 | 1 54
39 | 58 65
40 | 4 5
41 | 96 77
42 | 9 127
43 | 50 27
44 | 1 120
45 | 55 2
46 | 79 1
47 | 3 40
48 | 36 67
49 | 75 15
50 | 67 7
51 | 0 67
52 | 61 69
53 | 9 39
54 | 6 41
55 | 36 41
56 | 49 71
57 | 31 51
58 | 46 2
59 | 49 67
60 | 9 9
61 | 57 7
62 | 6 67
63 | 36 5
64 | 34 3
65 | 58 4
66 | 30 69
67 | 80 25
68 | 61 25
69 | 10 66
70 | 15 71
71 | 36 29
72 | 97 25
73 | 2 18
74 | 69 58
75 | 71 71
76 | 9 124
77 | 51 5
78 | 54 54
79 | 43 91
80 | 38 51
81 | 29 4
82 | 40 51
83 | 30 27
84 | 4 42
85 | 34 70
86 | 7 15
87 | 22 45
88 | 61 94
89 | 3 55
90 | 18 5
91 | 0 84
92 | 25 70
93 | 4 48
94 | 117 71
95 | 15 70
96 | 9 50
97 | 66 37
98 | 3 68
99 | 54 64
100 | 58 15
101 | 106 5
102 | 45 47
103 | 10 19
104 | 25 25
105 | 49 52
106 | 63 66
107 | 0 5
108 | 24 2
109 | 98 25
110 | 83 87
111 | 27 31
112 | 21 77
113 | 54 21
114 | 38 40
115 | 2 7
116 | 74 30
117 | 29 29
118 | 3 22
119 | 77 3
120 | 10 71
121 | 8 8
122 | 6 1
123 | 30 30
124 | 87 87
125 | 9 121
126 | 7 4
127 | 65 5
128 | 5 20
129 | 3 60
130 | 24 87
131 | 6 27
132 | 54 33
133 | 112 24
134 | 129 9
135 | 61 49
136 | 5 71
137 | 74 75
138 | 75 75
139 | 6 61
140 | 36 37
141 | 2 117
142 | 108 108
143 | 125 125
144 | 87 49
145 | 9 29
146 | 80 80
147 | 6 87
148 | 36 3
149 | 1 103
150 | 27 52
151 | 127 42
152 | 61 5
153 | 41 71
154 | 74 63
155 | 69 69
156 | 25 4
157 | 36 8
158 | 78 25
159 | 2 30
160 | 0 50
161 | 74 25
162 | 3 31
163 | 1 51
164 | 58 68
165 | 30 5
166 | 40 5
167 | 2 56
168 | 50 30
169 | 9 90
170 | 66 29
171 | 87 74
172 | 42 43
173 | 15 84
174 | 7 27
175 | 50 50
176 | 29 42
177 | 18 17
178 | 32 50
179 | 70 70
180 | 6 52
181 | 36 44
182 | 6 21
183 | 8 7
184 | 50 82
185 | 122 126
186 | 61 36
187 | 56 40
188 | 36 10
189 | 10 5
190 | 65 65
191 | 63 50
192 | 61 30
193 | 0 19
194 | 94 117
195 | 97 6
196 | 2 25
197 | 94 94
198 | 74 0
199 | 98 5
200 | 74 74
201 | 3 4
202 | 18 84
203 | 95 4
204 | 25 55
205 | 67 120
206 | 54 25
207 | 28 4
208 | 77 15
209 | 51 36
210 | 40 31
211 | 4 45
212 | 1 46
213 | 3 48
214 | 9 37
215 | 24 75
216 | 36 43
217 | 2 71
218 | 9 128
219 | 22 2
220 | 37 42
221 | 61 61
222 | 27 117
223 | 42 39
224 | 25 101
225 | 3 65
226 | 69 45
227 | 2 97
228 | 15 97
229 | 54 27
230 | 80 27
231 | 45 0
232 | 63 15
233 | 36 25
234 | 45 52
235 | 36 50
236 | 15 15
237 | 63 1
238 | 7 49
239 | 0 0
240 | 50 41
241 | 3 13
242 | 51 7
243 | 15 53
244 | 28 3
245 | 122 124
246 | 31 61
247 | 5 24
248 | 101 27
249 | 2 36
250 | 36 38
251 | 65 2
252 | 79 54
253 | 3 57
254 | 27 84
255 | 24 82
256 | 25 68
257 | 47 25
258 | 36 82
259 | 43 9
260 | 41 41
261 | 0 2
262 | 79 92
263 | 33 26
264 | 58 120
265 | 50 102
266 | 25 110
267 | 33 84
268 | 3 70
269 | 36 56
270 | 60 45
271 | 2 120
272 | 67 58
273 | 7 61
274 | 68 61
275 | 24 30
276 | 5 68
277 | 6 82
278 | 45 45
279 | 25 31
280 | 37 91
281 | 0 7
282 | 74 50
283 | 9 82
284 | 51 9
285 | 25 1
286 | 15 50
287 | 21 75
288 | 2 5
289 | 7 22
290 | 83 83
291 | 3 24
292 | 77 1
293 | 10 69
294 | 6 7
295 | 123 123
296 | 88 7
297 | 8 51
298 | 2 63
299 | 31 5
300 | 79 63
301 | 3 62
302 | 77 27
303 | 92 21
304 | 25 77
305 | 6 25
306 | 1 84
307 | 84 99
308 | 63 67
309 | 6 83
310 | 94 7
311 | 127 127
312 | 23 25
313 | 0 71
314 | 3 79
315 | 36 39
316 | 1 67
317 | 8 2
318 | 120 50
319 | 44 39
320 | 89 89
321 | 62 4
322 | 45 2
323 | 72 5
324 | 54 97
325 | 53 87
326 | 1 31
327 | 49 5
328 | 97 3
329 | 93 93
330 | 7 97
331 | 74 7
332 | 66 91
333 | 79 30
334 | 3 1
335 | 49 49
336 | 102 2
337 | 85 74
338 | 31 42
339 | 5 27
340 | 79 4
341 | 3 39
342 | 101 23
343 | 96 51
344 | 120 84
345 | 36 0
346 | 58 53
347 | 0 64
348 | 53 48
349 | 29 40
350 | 51 68
351 | 6 42
352 | 45 5
353 | 1 68
354 | 2 74
355 | 8 9
356 | 50 80
357 | 64 21
358 | 9 4
359 | 24 108
360 | 66 42
361 | 60 25
362 | 108 3
363 | 30 70
364 | 61 28
365 | 76 84
366 | 7 64
367 | 54 63
368 | 25 19
369 | 21 93
370 | 53 45
371 | 6 120
372 | 77 51
373 | 25 53
374 | 15 46
375 | 21 71
376 | 50 5
377 | 67 51
378 | 113 51
379 | 9 65
380 | 0 30
381 | 46 71
382 | 10 42
383 | 8 42
384 | 7 18
385 | 3 50
386 | 29 39
387 | 24 77
388 | 25 65
389 | 81 108
390 | 4 53
391 | 1 93
392 | 2 69
393 | 15 69
394 | 7 40
395 | 22 0
396 | 29 91
397 | 33 24
398 | 25 107
399 | 6 71
400 | 3 67
401 | 54 77
402 | 10 10
403 | 24 25
404 | 3 105
405 | 45 50
406 | 58 77
407 | 68 15
408 | 58 108
409 | 80 51
410 | 7 87
411 | 24 63
412 | 53 5
413 | 3 15
414 | 10 38
415 | 75 4
416 | 58 84
417 | 28 29
418 | 76 7
419 | 5 13
420 | 79 10
421 | 3 21
422 | 9 74
423 | 6 2
424 | 30 31
425 | 41 5
426 | 29 58
427 | 3 59
428 | 7 130
429 | 24 84
430 | 6 36
431 | 60 49
432 | 50 66
433 | 94 2
434 | 9 41
435 | 74 76
436 | 122 66
437 | 3 72
438 | 36 58
439 | 93 55
440 | 7 63
441 | 75 63
442 | 2 61
443 | 23 15
444 | 10 31
445 | 64 64
446 | 26 26
447 | 0 25
448 | 36 71
449 | 1 4
450 | 66 74
451 | 103 21
452 | 2 3
453 | 0 63
454 | 29 25
455 | 25 41
456 | 6 5
457 | 21 51
458 | 65 130
459 | 65 27
460 | 84 87
461 | 40 9
462 | 56 50
463 | 5 16
464 | 113 7
465 | 25 83
466 | 6 31
467 | 64 50
468 | 36 91
469 | 19 5
470 | 67 31
471 | 7 30
472 | 63 77
473 | 113 25
474 | 94 5
475 | 18 18
476 | 23 27
477 | 6 49
478 | 21 7
479 | 2 113
480 | 58 18
481 | 0 97
482 | 79 77
483 | 122 122
484 | 77 77
485 | 121 122
486 | 71 27
487 | 45 36
488 | 130 84
489 | 23 23
490 | 7 69
491 | 42 91
492 | 62 30
493 | 99 76
494 | 69 65
495 | 15 5
496 | 97 1
497 | 7 99
498 | 24 51
499 | 3 3
500 | 51 61
501 | 25 50
502 | 58 64
503 | 46 46
504 | 95 69
505 | 22 51
506 | 3 41
507 | 67 2
508 | 25 84
509 | 6 22
510 | 45 105
511 | 58 51
512 | 76 33
513 | 22 21
514 | 64 1
515 | 68 7
516 | 66 51
517 | 6 40
518 | 30 53
519 | 45 3
520 | 32 24
521 | 80 2
522 | 9 10
523 | 25 96
524 | 81 3
525 | 119 66
526 | 10 1
527 | 58 7
528 | 21 113
529 | 22 97
530 | 10 94
531 | 7 66
532 | 32 58
533 | 38 4
534 | 96 27
535 | 2 21
536 | 39 65
537 | 24 58
538 | 9 125
539 | 51 2
540 | 2 15
541 | 9 126
542 | 33 75
543 | 46 55
544 | 79 15
545 | 9 71
546 | 10 95
547 | 25 93
548 | 6 9
549 | 4 41
550 | 7 106
551 | 84 67
552 | 58 58
553 | 41 2
554 | 46 25
555 | 3 52
556 | 18 4
557 | 24 79
558 | 25 71
559 | 34 35
560 | 8 50
561 | 7 42
562 | 46 3
563 | 94 25
564 | 18 30
565 | 74 67
566 | 66 36
567 | 6 69
568 | 3 69
569 | 84 23
570 | 67 57
571 | 106 4
572 | 24 27
573 | 66 82
574 | 3 107
575 | 45 48
576 | 25 26
577 | 0 4
578 | 24 1
579 | 123 125
580 | 1 1
581 | 64 97
582 | 41 39
583 | 28 31
584 | 120 120
585 | 0 58
586 | 5 11
587 | 77 4
588 | 1 71
589 | 54 84
590 | 6 0
591 | 7 5
592 | 65 6
593 | 5 21
594 | 79 50
595 | 3 61
596 | 77 30
597 | 24 86
598 | 15 1
599 | 23 6
600 | 10 41
601 | 67 24
602 | 22 25
603 | 7 80
604 | 94 0
605 | 18 25
606 | 42 42
607 | 47 3
608 | 70 2
609 | 36 36
610 | 10 51
611 | 58 9
612 | 45 21
613 | 18 51
614 | 10 120
615 | 10 29
616 | 41 28
617 | 8 25
618 | 0 27
619 | 57 51
620 | 98 19
621 | 25 5
622 | 98 2
623 | 2 1
624 | 74 24
625 | 6 130
626 | 58 71
627 | 30 4
628 | 4 31
629 | 84 111
630 | 50 97
631 | 63 97
632 | 59 48
633 | 9 91
634 | 57 49
635 | 6 29
636 | 21 27
637 | 36 69
638 | 0 77
639 | 0 75
640 | 29 43
641 | 127 123
642 | 65 51
643 | 4 67
644 | 10 58
645 | 32 3
646 | 79 79
647 | 9 7
648 | 5 73
649 | 38 41
650 | 36 4
651 | 36 9
652 | 10 4
653 | 71 84
654 | 52 58
655 | 30 67
656 | 69 1
657 | 9 63
658 | 7 71
659 | 24 15
660 | 5 83
661 | 71 51
662 | 120 3
663 | 51 25
664 | 79 71
665 | 125 89
666 | 7 93
667 | 24 53
668 | 98 4
669 | 3 5
670 | 9 122
671 | 51 63
672 | 25 48
673 | 75 51
674 | 21 58
675 | 90 129
676 | 2 50
677 | 0 46
678 | 61 106
679 | 1 47
680 | 57 58
681 | 43 123
682 | 41 31
683 | 50 51
684 | 69 67
685 | 3 49
686 | 59 60
687 | 83 27
688 | 6 46
689 | 36 42
690 | 38 106
691 | 58 27
692 | 7 47
693 | 37 43
694 | 98 98
695 | 25 102
696 | 6 64
697 | 3 94
698 | 29 53
699 | 10 15
700 | 2 96
701 | 58 5
702 | 70 45
703 | 42 64
704 | 48 51
705 | 43 125
706 | 119 82
707 | 9 1
708 | 87 5
709 | 0 15
710 | 66 0
711 | 74 3
712 | 3 10
713 | 75 3
714 | 21 67
715 | 88 25
716 | 28 2
717 | 45 15
718 | 51 42
719 | 75 25
720 | 6 15
721 | 64 2
722 | 17 5
723 | 31 29
724 | 24 81
725 | 25 69
726 | 40 43
727 | 123 89
728 | 74 71
729 | 50 71
730 | 79 93
731 | 94 31
732 | 24 4
733 | 3 71
734 | 79 23
735 | 5 69
736 | 45 46
737 | 51 84
738 | 15 21
739 | 39 56
740 | 22 80
741 | 7 83
742 | 74 53
743 | 6 119
744 | 15 51
745 | 75 77
746 | 28 25
747 | 71 4
748 | 6 51
749 | 79 22
750 | 3 25
751 | 77 2
752 | 51 51
753 | 6 6
754 | 23 3
755 | 84 84
756 | 76 25
757 | 7 7
758 | 71 30
759 | 3 63
760 | 6 24
761 | 45 115
762 | 108 87
763 | 10 82
764 | 27 130
765 | 42 40
766 | 6 50
767 | 45 13
768 | 119 10
769 | 0 23
770 | 37 39
771 | 89 90
772 | 6 84
773 | 3 82
774 | 1 102
775 | 15 18
776 | 30 94
777 | 61 4
778 | 24 10
779 | 5 104
780 | 77 33
781 | 8 65
782 | 27 33
783 | 19 61
784 | 2 31
785 | 0 51
786 | 96 96
787 | 3 30
788 | 1 50
789 | 21 63
790 | 54 7
791 | 2 57
792 | 50 31
793 | 45 116
794 | 79 5
795 | 9 89
796 | 25 87
797 | 6 19
798 | 21 25
799 | 88 51
800 | 75 67
801 | 0 79
802 | 5 38
803 | 57 25
804 | 6 53
805 | 45 6
806 | 66 127
807 | 58 30
808 | 7 48
809 | 9 5
810 | 25 99
811 | 6 79
812 | 10 2
813 | 15 27
814 | 4 4
815 | 45 1
816 | 81 32
817 | 75 54
818 | 24 9
819 | 127 125
820 | 50 60
821 | 74 1
822 | 41 69
823 | 126 66
824 | 3 7
825 | 66 123
826 | 99 27
827 | 30 13
828 | 28 5
829 | 2 48
830 | 85 84
831 | 31 8
832 | 5 5
833 | 79 2
834 | 9 66
835 | 51 39
836 | 6 10
837 | 19 41
838 | 36 119
839 | 2 42
840 | 4 69
841 | 3 51
842 | 25 66
843 | 10 98
844 | 19 19
845 | 2 68
846 | 58 25
847 | 7 41
848 | 56 9
849 | 7 84
850 | 33 25
851 | 25 100
852 | 90 90
853 | 3 64
854 | 45 25
855 | 31 71
856 | 24 24
857 | 62 5
858 | 45 51
859 | 45 27
860 | 0 1
861 | 51 120
862 | 53 26
863 | 27 27
864 | 25 63
865 | 58 87
866 | 6 18
867 | 66 4
868 | 5 14
869 | 70 84
870 | 3 18
871 | 97 84
872 | 82 82
873 | 45 78
874 | 66 66
875 | 31 31
876 | 55 50
877 | 3 56
878 | 27 87
879 | 24 83
880 | 66 6
881 | 6 39
882 | 21 21
883 | 4 51
884 | 60 48
885 | 84 53
886 | 46 7
887 | 59 45
888 | 25 109
889 | 6 57
890 | 3 73
891 | 60 46
892 | 67 71
893 | 28 71
894 | 9 25
895 | 5 67
896 | 7 102
897 | 36 7
898 | 10 30
899 | 25 30
900 | 114 25
901 | 68 28
902 | 70 60
903 | 0 24
904 | 74 51
905 | 70 48
906 | 7 120
907 | 1 5
908 | 80 79
909 | 2 2
910 | 7 107
911 | 48 24
912 | 3 27
913 | 51 53
914 | 6 4
915 | 30 1
916 | 2 60
917 | 76 27
918 | 7 1
919 | 53 55
920 | 92 22
921 | 66 1
922 | 6 30
923 | 19 2
924 | 7 31
925 | 97 92
926 | 94 4
927 | 83 84
928 | 51 69
929 | 23 24
930 | 40 58
931 | 10 63
932 | 7 53
933 | 37 37
934 | 59 24
935 | 69 27
936 | 6 74
937 | 3 84
938 | 10 25
939 | 2 106
940 | 0 31
941 | 74 58
942 | 96 3
943 | 53 28
944 | 49 4
945 | 61 45
946 | 7 96
947 | 24 50
948 | 79 25
949 | 3 0
950 | 10 77
951 | 127 89
952 | 42 5
953 | 4 27
954 | 79 7
955 | 3 38
956 | 125 121
957 | 36 65
958 | 58 50
959 | 79 45
960 | 21 1
961 | 76 76
962 | 60 60
963 | 58 28
964 | 7 50
965 | 97 79
966 | 80 1
967 | 79 75
968 | 57 1
969 | 45 84
970 | 89 124
971 | 10 0
972 | 58 6
973 | 39 40
974 | 63 63
975 | 124 123
976 | 23 7
977 | 48 48
978 | 54 76
979 | 1 23
980 | 122 89
981 | 74 15
982 | 3 9
983 | 77 50
984 | 15 41
985 | 4 2
986 | 28 7
987 | 84 32
988 | 22 53
989 | 91 91
990 | 58 31
991 | 51 41
992 | 6 8
993 | 53 27
994 | 4 40
995 | 2 40
996 | 55 55
997 | 3 53
998 | 9 42
999 | 25 64
1000 | 40 40
1001 | 21 10
1002 | 1 92
1003 | 2 66
1004 | 22 1
1005 | 55 45
1006 | 61 58
1007 | 6 68
1008 | 3 66
1009 | 69 71
1010 | 58 1
1011 | 64 84
1012 | 42 58
1013 | 90 66
1014 | 45 49
1015 | 25 27
1016 | 22 93
1017 | 50 53
1018 | 36 31
1019 | 75 7
1020 | 4 84
1021 | 28 30
1022 | 2 9
1023 | 5 12
1024 | 79 21
1025 | 53 2
1026 | 27 51
1027 | 25 39
1028 | 6 3
1029 | 4 23
1030 | 65 7
1031 | 66 58
1032 | 79 51
1033 | 3 58
1034 | 77 31
1035 | 24 85
1036 | 25 73
1037 | 38 69
1038 | 45 118
1039 | 10 40
1040 | 2 93
1041 | 67 25
1042 | 50 67
1043 | 46 5
1044 | 74 77
1045 | 3 29
1046 | 119 9
1047 | 52 5
1048 | 97 51
1049 | 78 79
1050 | 27 70
1051 | 9 31
1052 | 0 21
1053 | 53 71
1054 | 10 28
1055 | 66 81
1056 | 67 67
1057 | 41 42
1058 | 26 27
1059 | 41 65
1060 | 24 7
1061 | 6 38
1062 | 1 27
1063 | 29 37
1064 | 29 5
1065 | 78 27
1066 | 45 7
1067 | 74 27
1068 | 111 3
1069 | 25 40
1070 | 49 27
1071 | 21 50
1072 | 54 0
1073 | 28 51
1074 | 2 58
1075 |
--------------------------------------------------------------------------------
/data/flight/labels-brazil-airports.txt:
--------------------------------------------------------------------------------
1 | node label
2 | 0 0
3 | 1 0
4 | 2 0
5 | 3 0
6 | 4 0
7 | 5 0
8 | 6 0
9 | 7 0
10 | 8 1
11 | 9 0
12 | 10 0
13 | 11 3
14 | 12 3
15 | 13 2
16 | 14 3
17 | 15 0
18 | 16 3
19 | 17 3
20 | 18 1
21 | 19 1
22 | 20 3
23 | 21 0
24 | 22 1
25 | 23 1
26 | 24 0
27 | 25 0
28 | 26 1
29 | 27 0
30 | 28 0
31 | 29 1
32 | 30 0
33 | 31 0
34 | 32 2
35 | 33 1
36 | 34 3
37 | 35 3
38 | 36 0
39 | 37 2
40 | 38 1
41 | 39 1
42 | 40 1
43 | 41 0
44 | 42 1
45 | 43 2
46 | 44 3
47 | 45 1
48 | 46 2
49 | 47 2
50 | 48 1
51 | 49 1
52 | 50 0
53 | 51 0
54 | 52 1
55 | 53 0
56 | 54 1
57 | 55 2
58 | 56 2
59 | 57 2
60 | 58 0
61 | 59 2
62 | 60 1
63 | 61 0
64 | 62 2
65 | 63 1
66 | 64 0
67 | 65 1
68 | 66 0
69 | 67 3
70 | 68 3
71 | 69 0
72 | 70 1
73 | 71 0
74 | 72 3
75 | 73 3
76 | 74 0
77 | 75 1
78 | 76 2
79 | 77 0
80 | 78 2
81 | 79 0
82 | 80 1
83 | 81 2
84 | 82 1
85 | 83 2
86 | 84 1
87 | 85 2
88 | 86 2
89 | 87 1
90 | 88 3
91 | 89 2
92 | 90 2
93 | 91 2
94 | 92 2
95 | 93 1
96 | 94 1
97 | 95 3
98 | 96 1
99 | 97 1
100 | 98 1
101 | 99 2
102 | 100 2
103 | 101 2
104 | 102 2
105 | 103 2
106 | 104 3
107 | 105 3
108 | 106 2
109 | 107 3
110 | 108 3
111 | 109 3
112 | 110 3
113 | 111 3
114 | 112 3
115 | 113 2
116 | 114 3
117 | 115 3
118 | 116 3
119 | 117 3
120 | 118 3
121 | 119 2
122 | 120 3
123 | 121 3
124 | 122 3
125 | 123 2
126 | 124 3
127 | 125 2
128 | 126 3
129 | 127 1
130 | 128 3
131 | 129 3
132 | 130 2
133 |
--------------------------------------------------------------------------------
/data/flight/labels-europe-airports.txt:
--------------------------------------------------------------------------------
1 | node label
2 | 0 1
3 | 1 1
4 | 2 2
5 | 3 1
6 | 4 0
7 | 5 0
8 | 6 0
9 | 7 0
10 | 8 0
11 | 9 0
12 | 10 0
13 | 11 0
14 | 12 0
15 | 13 1
16 | 14 0
17 | 15 0
18 | 16 0
19 | 17 1
20 | 18 0
21 | 19 0
22 | 20 0
23 | 21 0
24 | 22 0
25 | 23 0
26 | 24 1
27 | 25 1
28 | 26 0
29 | 27 0
30 | 28 3
31 | 29 0
32 | 30 1
33 | 31 0
34 | 32 1
35 | 33 0
36 | 34 0
37 | 35 0
38 | 36 0
39 | 37 0
40 | 38 0
41 | 39 0
42 | 40 0
43 | 41 0
44 | 42 3
45 | 43 0
46 | 44 0
47 | 45 0
48 | 46 1
49 | 47 0
50 | 48 3
51 | 49 1
52 | 50 0
53 | 51 0
54 | 52 1
55 | 53 0
56 | 54 0
57 | 55 0
58 | 56 0
59 | 57 0
60 | 58 0
61 | 59 1
62 | 60 0
63 | 61 0
64 | 62 0
65 | 63 0
66 | 64 0
67 | 65 0
68 | 66 0
69 | 67 0
70 | 68 1
71 | 69 1
72 | 70 0
73 | 71 0
74 | 72 0
75 | 73 0
76 | 74 0
77 | 75 0
78 | 76 1
79 | 77 0
80 | 78 0
81 | 79 0
82 | 80 0
83 | 81 2
84 | 82 0
85 | 83 0
86 | 84 1
87 | 85 2
88 | 86 0
89 | 87 0
90 | 88 0
91 | 89 0
92 | 90 0
93 | 91 1
94 | 92 1
95 | 93 0
96 | 94 0
97 | 95 1
98 | 96 0
99 | 97 0
100 | 98 1
101 | 99 0
102 | 100 0
103 | 101 0
104 | 102 1
105 | 103 0
106 | 104 0
107 | 105 0
108 | 106 0
109 | 107 2
110 | 108 3
111 | 109 3
112 | 110 1
113 | 111 2
114 | 112 3
115 | 113 2
116 | 114 1
117 | 115 3
118 | 116 2
119 | 117 1
120 | 118 1
121 | 119 0
122 | 120 1
123 | 121 1
124 | 122 2
125 | 123 3
126 | 124 3
127 | 125 1
128 | 126 3
129 | 127 3
130 | 128 3
131 | 129 1
132 | 130 1
133 | 131 1
134 | 132 1
135 | 133 1
136 | 134 1
137 | 135 2
138 | 136 1
139 | 137 2
140 | 138 1
141 | 139 0
142 | 140 1
143 | 141 0
144 | 142 0
145 | 143 0
146 | 144 2
147 | 145 3
148 | 146 0
149 | 147 1
150 | 148 2
151 | 149 2
152 | 150 3
153 | 151 1
154 | 152 2
155 | 153 1
156 | 154 1
157 | 155 0
158 | 156 3
159 | 157 3
160 | 158 1
161 | 159 1
162 | 160 2
163 | 161 2
164 | 162 1
165 | 163 0
166 | 164 3
167 | 165 1
168 | 166 1
169 | 167 0
170 | 168 1
171 | 169 1
172 | 170 0
173 | 171 0
174 | 172 0
175 | 173 1
176 | 174 1
177 | 175 0
178 | 176 2
179 | 177 1
180 | 178 0
181 | 179 1
182 | 180 3
183 | 181 2
184 | 182 1
185 | 183 0
186 | 184 1
187 | 185 1
188 | 186 2
189 | 187 1
190 | 188 2
191 | 189 2
192 | 190 2
193 | 191 1
194 | 192 2
195 | 193 2
196 | 194 1
197 | 195 1
198 | 196 3
199 | 197 2
200 | 198 2
201 | 199 2
202 | 200 2
203 | 201 2
204 | 202 1
205 | 203 2
206 | 204 2
207 | 205 3
208 | 206 3
209 | 207 2
210 | 208 1
211 | 209 3
212 | 210 3
213 | 211 3
214 | 212 3
215 | 213 2
216 | 214 1
217 | 215 1
218 | 216 2
219 | 217 2
220 | 218 2
221 | 219 3
222 | 220 3
223 | 221 0
224 | 222 3
225 | 223 2
226 | 224 2
227 | 225 3
228 | 226 3
229 | 227 3
230 | 228 1
231 | 229 2
232 | 230 2
233 | 231 1
234 | 232 3
235 | 233 2
236 | 234 3
237 | 235 3
238 | 236 3
239 | 237 1
240 | 238 3
241 | 239 2
242 | 240 0
243 | 241 1
244 | 242 1
245 | 243 1
246 | 244 0
247 | 245 2
248 | 246 1
249 | 247 2
250 | 248 1
251 | 249 1
252 | 250 3
253 | 251 2
254 | 252 1
255 | 253 3
256 | 254 2
257 | 255 3
258 | 256 3
259 | 257 2
260 | 258 3
261 | 259 3
262 | 260 2
263 | 261 3
264 | 262 3
265 | 263 3
266 | 264 2
267 | 265 2
268 | 266 2
269 | 267 2
270 | 268 3
271 | 269 3
272 | 270 3
273 | 271 2
274 | 272 1
275 | 273 2
276 | 274 3
277 | 275 2
278 | 276 3
279 | 277 2
280 | 278 1
281 | 279 3
282 | 280 1
283 | 281 2
284 | 282 2
285 | 283 3
286 | 284 2
287 | 285 1
288 | 286 3
289 | 287 3
290 | 288 3
291 | 289 3
292 | 290 1
293 | 291 1
294 | 292 0
295 | 293 3
296 | 294 2
297 | 295 2
298 | 296 3
299 | 297 2
300 | 298 3
301 | 299 1
302 | 300 3
303 | 301 3
304 | 302 2
305 | 303 2
306 | 304 3
307 | 305 2
308 | 306 3
309 | 307 2
310 | 308 1
311 | 309 2
312 | 310 1
313 | 311 3
314 | 312 2
315 | 313 2
316 | 314 1
317 | 315 3
318 | 316 3
319 | 317 3
320 | 318 1
321 | 319 3
322 | 320 2
323 | 321 3
324 | 322 3
325 | 323 3
326 | 324 3
327 | 325 2
328 | 326 2
329 | 327 2
330 | 328 3
331 | 329 3
332 | 330 3
333 | 331 2
334 | 332 3
335 | 333 2
336 | 334 2
337 | 335 3
338 | 336 3
339 | 337 3
340 | 338 3
341 | 339 3
342 | 340 3
343 | 341 3
344 | 342 3
345 | 343 2
346 | 344 2
347 | 345 2
348 | 346 2
349 | 347 2
350 | 348 3
351 | 349 1
352 | 350 2
353 | 351 2
354 | 352 3
355 | 353 2
356 | 354 0
357 | 355 1
358 | 356 2
359 | 357 3
360 | 358 3
361 | 359 3
362 | 360 1
363 | 361 1
364 | 362 2
365 | 363 1
366 | 364 2
367 | 365 1
368 | 366 1
369 | 367 3
370 | 368 2
371 | 369 3
372 | 370 2
373 | 371 2
374 | 372 3
375 | 373 2
376 | 374 1
377 | 375 1
378 | 376 1
379 | 377 2
380 | 378 1
381 | 379 2
382 | 380 2
383 | 381 3
384 | 382 3
385 | 383 3
386 | 384 3
387 | 385 3
388 | 386 3
389 | 387 3
390 | 388 3
391 | 389 2
392 | 390 1
393 | 391 2
394 | 392 2
395 | 393 3
396 | 394 1
397 | 395 1
398 | 396 3
399 | 397 2
400 | 398 2
401 |
--------------------------------------------------------------------------------
/data/flight/labels-usa-airports.txt:
--------------------------------------------------------------------------------
1 | node label
2 | 10241 1
3 | 10243 2
4 | 10245 0
5 | 16390 1
6 | 10247 1
7 | 12297 2
8 | 16727 3
9 | 10257 0
10 | 12307 2
11 | 16729 3
12 | 12314 3
13 | 12315 2
14 | 10268 1
15 | 10272 1
16 | 12321 1
17 | 12323 0
18 | 12324 3
19 | 12294 2
20 | 10278 2
21 | 10279 0
22 | 11612 0
23 | 16428 1
24 | 16429 2
25 | 12335 1
26 | 12339 0
27 | 12638 2
28 | 12343 1
29 | 16441 3
30 | 10298 1
31 | 10299 0
32 | 12348 2
33 | 14397 2
34 | 10304 1
35 | 15030 3
36 | 16454 3
37 | 12641 2
38 | 10313 2
39 | 11834 2
40 | 12363 1
41 | 14412 3
42 | 12365 1
43 | 16463 3
44 | 10322 2
45 | 14691 3
46 | 10324 2
47 | 10325 2
48 | 12375 1
49 | 14424 3
50 | 10329 2
51 | 10333 1
52 | 14430 3
53 | 16479 2
54 | 15376 0
55 | 14435 3
56 | 12388 3
57 | 12389 0
58 | 12390 2
59 | 12391 0
60 | 12392 3
61 | 12394 3
62 | 16743 3
63 | 10348 3
64 | 12397 0
65 | 14447 3
66 | 16744 3
67 | 12402 0
68 | 14457 0
69 | 12412 1
70 | 16746 3
71 | 16515 3
72 | 10372 0
73 | 14470 1
74 | 15041 0
75 | 16520 3
76 | 14474 1
77 | 14475 2
78 | 10380 3
79 | 14477 2
80 | 16527 3
81 | 10385 3
82 | 12436 3
83 | 14485 1
84 | 14487 0
85 | 14488 1
86 | 12441 0
87 | 14492 0
88 | 10397 0
89 | 16543 3
90 | 12448 0
91 | 12451 0
92 | 10405 1
93 | 12455 1
94 | 10408 0
95 | 10409 1
96 | 10414 1
97 | 15389 0
98 | 14512 0
99 | 14736 2
100 | 10419 3
101 | 15154 2
102 | 10423 0
103 | 14520 1
104 | 12207 1
105 | 14524 0
106 | 14259 1
107 | 12478 0
108 | 10431 0
109 | 12320 2
110 | 10434 0
111 | 15051 1
112 | 12484 3
113 | 14533 2
114 | 14534 1
115 | 16584 2
116 | 11980 0
117 | 14539 1
118 | 12492 1
119 | 12494 1
120 | 14543 1
121 | 14254 0
122 | 10275 0
123 | 14551 2
124 | 14553 2
125 | 16603 2
126 | 10460 3
127 | 14557 3
128 | 12511 1
129 | 10466 0
130 | 12078 3
131 | 10469 0
132 | 12519 1
133 | 14716 0
134 | 14570 0
135 | 12523 0
136 | 14572 2
137 | 14574 0
138 | 15741 0
139 | 14576 0
140 | 14577 2
141 | 12206 0
142 | 15011 2
143 | 14582 1
144 | 16634 3
145 | 14588 0
146 | 15061 2
147 | 12544 0
148 | 16643 3
149 | 12549 1
150 | 11697 0
151 | 16647 1
152 | 12553 0
153 | 16651 3
154 | 11637 0
155 | 16654 3
156 | 10511 3
157 | 14262 0
158 | 14617 3
159 | 14618 2
160 | 12571 2
161 | 16668 3
162 | 11057 0
163 | 12574 2
164 | 12576 3
165 | 10529 0
166 | 14627 1
167 | 14628 2
168 | 14630 2
169 | 10535 2
170 | 16680 3
171 | 14633 0
172 | 16682 3
173 | 14635 0
174 | 10540 1
175 | 12591 2
176 | 13704 2
177 | 15411 0
178 | 14046 2
179 | 12598 3
180 | 10551 0
181 | 14648 1
182 | 15412 0
183 | 16698 3
184 | 16737 3
185 | 10557 1
186 | 10558 1
187 | 10559 1
188 | 14656 2
189 | 10561 0
190 | 10562 2
191 | 16709 3
192 | 16710 3
193 | 12615 1
194 | 10569 2
195 | 16715 3
196 | 12621 2
197 | 12622 3
198 | 14672 2
199 | 10577 0
200 | 14674 0
201 | 16723 3
202 | 16724 3
203 | 10581 0
204 | 14679 0
205 | 16728 3
206 | 12633 2
207 | 16730 3
208 | 14683 0
209 | 14685 0
210 | 10590 0
211 | 10221 1
212 | 16736 3
213 | 14689 0
214 | 16738 3
215 | 16739 3
216 | 16741 3
217 | 16742 3
218 | 10599 0
219 | 14696 0
220 | 12649 1
221 | 14698 0
222 | 12652 1
223 | 12653 2
224 | 14704 0
225 | 14706 3
226 | 14709 0
227 | 14711 0
228 | 14712 2
229 | 10617 3
230 | 10620 0
231 | 14718 1
232 | 12671 2
233 | 14400 3
234 | 10627 0
235 | 12676 2
236 | 10630 2
237 | 10631 1
238 | 15497 1
239 | 14730 0
240 | 10640 1
241 | 14738 1
242 | 10643 1
243 | 10990 0
244 | 10647 1
245 | 14744 3
246 | 14745 1
247 | 14747 0
248 | 15325 2
249 | 10654 1
250 | 14751 3
251 | 12704 1
252 | 12705 2
253 | 12708 1
254 | 12709 1
255 | 14761 0
256 | 10666 0
257 | 10994 0
258 | 10670 2
259 | 12719 1
260 | 12720 2
261 | 12721 1
262 | 14771 0
263 | 10676 0
264 | 14750 3
265 | 11337 0
266 | 12728 1
267 | 10996 2
268 | 14778 3
269 | 10683 1
270 | 10685 0
271 | 14783 0
272 | 12737 2
273 | 10693 0
274 | 12743 2
275 | 14792 3
276 | 12745 2
277 | 14794 0
278 | 12748 2
279 | 14797 1
280 | 11341 3
281 | 14802 1
282 | 12755 2
283 | 14804 1
284 | 14805 1
285 | 12758 0
286 | 10713 0
287 | 10715 3
288 | 14812 3
289 | 14814 0
290 | 10661 0
291 | 14816 2
292 | 10721 0
293 | 14819 1
294 | 12772 2
295 | 12773 2
296 | 12774 3
297 | 10728 1
298 | 10731 1
299 | 14828 0
300 | 12370 2
301 | 12782 2
302 | 14831 0
303 | 12784 2
304 | 12785 2
305 | 10739 1
306 | 10744 1
307 | 14842 0
308 | 10747 0
309 | 10754 0
310 | 14853 2
311 | 16725 2
312 | 12807 1
313 | 14856 1
314 | 10327 2
315 | 12815 1
316 | 10770 2
317 | 12819 0
318 | 14869 0
319 | 12822 1
320 | 10775 1
321 | 10778 3
322 | 10779 1
323 | 10781 0
324 | 10783 1
325 | 14880 2
326 | 10785 0
327 | 14769 3
328 | 10792 0
329 | 12841 1
330 | 12844 1
331 | 14893 0
332 | 14895 1
333 | 10800 0
334 | 14897 2
335 | 14898 2
336 | 12851 1
337 | 12853 1
338 | 10589 1
339 | 12855 1
340 | 14905 0
341 | 12635 2
342 | 14908 0
343 | 10815 2
344 | 10817 3
345 | 12866 2
346 | 10819 1
347 | 12868 2
348 | 10821 0
349 | 12870 3
350 | 14919 1
351 | 14922 3
352 | 12878 1
353 | 13795 0
354 | 12882 2
355 | 12883 2
356 | 12884 0
357 | 10838 3
358 | 12888 1
359 | 12889 0
360 | 14938 3
361 | 12891 0
362 | 12892 0
363 | 15023 0
364 | 14942 1
365 | 14943 1
366 | 14944 3
367 | 10849 0
368 | 12898 0
369 | 12899 1
370 | 12902 1
371 | 14952 0
372 | 10857 3
373 | 14955 0
374 | 15841 1
375 | 14960 0
376 | 12915 0
377 | 10868 0
378 | 12917 0
379 | 13076 0
380 | 10874 0
381 | 10687 3
382 | 14972 3
383 | 10347 3
384 | 12932 1
385 | 12054 3
386 | 12934 3
387 | 14785 3
388 | 14986 0
389 | 14987 2
390 | 10349 1
391 | 12944 2
392 | 12945 0
393 | 11615 1
394 | 12947 3
395 | 14996 2
396 | 12951 0
397 | 12953 0
398 | 12954 0
399 | 12955 3
400 | 12956 3
401 | 10910 2
402 | 12847 1
403 | 15008 1
404 | 11376 2
405 | 15010 3
406 | 14107 0
407 | 15012 2
408 | 12965 2
409 | 10918 1
410 | 15016 0
411 | 11719 1
412 | 15020 3
413 | 10925 1
414 | 10926 1
415 | 10927 3
416 | 15024 0
417 | 13768 1
418 | 10930 1
419 | 15027 0
420 | 10184 1
421 | 10933 2
422 | 12982 0
423 | 12983 2
424 | 16731 3
425 | 12992 0
426 | 10945 3
427 | 10946 3
428 | 15043 2
429 | 15045 2
430 | 10950 1
431 | 14113 1
432 | 15048 1
433 | 15049 1
434 | 13002 1
435 | 13003 1
436 | 11042 0
437 | 15054 3
438 | 11617 0
439 | 13008 3
440 | 10961 1
441 | 10965 2
442 | 10967 1
443 | 13016 2
444 | 15069 3
445 | 15070 0
446 | 13024 2
447 | 15074 1
448 | 10980 0
449 | 13029 0
450 | 10982 3
451 | 13032 1
452 | 13034 0
453 | 10140 0
454 | 15085 1
455 | 15086 2
456 | 15090 3
457 | 15091 2
458 | 13044 3
459 | 15093 3
460 | 13046 3
461 | 10999 3
462 | 15096 0
463 | 11002 3
464 | 11003 0
465 | 13052 3
466 | 14295 2
467 | 13055 2
468 | 15108 3
469 | 11013 1
470 | 11278 0
471 | 11619 2
472 | 13072 2
473 | 15370 0
474 | 11027 1
475 | 15124 1
476 | 13077 2
477 | 14468 1
478 | 11036 2
479 | 13087 2
480 | 11041 2
481 | 15138 1
482 | 10139 1
483 | 10938 2
484 | 11049 0
485 | 11050 3
486 | 11053 2
487 | 15153 1
488 | 11058 3
489 | 13109 1
490 | 13111 2
491 | 15160 2
492 | 13871 0
493 | 11066 0
494 | 13115 3
495 | 13117 3
496 | 11007 2
497 | 13121 1
498 | 15171 3
499 | 11076 1
500 | 13125 3
501 | 12769 1
502 | 15177 2
503 | 16345 2
504 | 12087 2
505 | 11085 1
506 | 11086 3
507 | 13139 0
508 | 11092 1
509 | 11097 0
510 | 11098 2
511 | 13151 3
512 | 11106 3
513 | 11067 0
514 | 11109 0
515 | 13158 0
516 | 11111 0
517 | 15215 0
518 | 15855 1
519 | 11122 0
520 | 11123 1
521 | 13873 0
522 | 11126 1
523 | 13176 1
524 | 15167 1
525 | 13182 1
526 | 11135 3
527 | 13184 0
528 | 15235 3
529 | 15236 2
530 | 13192 3
531 | 10732 0
532 | 11146 0
533 | 13195 3
534 | 13196 1
535 | 15245 1
536 | 11150 0
537 | 15248 2
538 | 15249 0
539 | 15250 2
540 | 13203 2
541 | 13204 0
542 | 14489 0
543 | 15257 1
544 | 13211 1
545 | 15511 3
546 | 11165 3
547 | 13219 3
548 | 13221 2
549 | 11174 3
550 | 11176 3
551 | 10396 1
552 | 13226 3
553 | 13127 0
554 | 15278 2
555 | 14493 2
556 | 13232 0
557 | 15282 1
558 | 11188 3
559 | 13241 1
560 | 11423 0
561 | 13244 0
562 | 11197 1
563 | 13246 3
564 | 15295 0
565 | 11200 2
566 | 10400 3
567 | 11203 0
568 | 13255 2
569 | 15304 0
570 | 13259 3
571 | 11214 1
572 | 13264 0
573 | 14150 1
574 | 15323 0
575 | 13277 0
576 | 11230 3
577 | 11233 1
578 | 13282 1
579 | 14843 0
580 | 10065 2
581 | 13891 0
582 | 11241 2
583 | 13290 0
584 | 15339 2
585 | 13292 3
586 | 11245 2
587 | 14559 2
588 | 13295 2
589 | 15344 3
590 | 13297 3
591 | 11252 0
592 | 13303 0
593 | 11259 0
594 | 15356 0
595 | 11267 0
596 | 10141 1
597 | 13484 1
598 | 11274 0
599 | 12119 1
600 | 11764 1
601 | 15374 1
602 | 11280 2
603 | 11282 3
604 | 11283 1
605 | 15380 0
606 | 15381 1
607 | 11288 1
608 | 15385 2
609 | 13487 0
610 | 11292 0
611 | 13341 1
612 | 13342 0
613 | 13344 1
614 | 10416 1
615 | 11298 0
616 | 13347 0
617 | 13348 1
618 | 11833 1
619 | 15401 1
620 | 14855 2
621 | 11308 0
622 | 15406 1
623 | 13360 0
624 | 13361 3
625 | 11315 1
626 | 13061 0
627 | 11317 2
628 | 13367 0
629 | 13369 1
630 | 15422 3
631 | 11445 1
632 | 13377 0
633 | 15427 2
634 | 13381 2
635 | 12129 1
636 | 11336 1
637 | 15433 3
638 | 13495 0
639 | 13388 1
640 | 13389 3
641 | 11789 2
642 | 15440 3
643 | 10424 3
644 | 16540 2
645 | 15203 3
646 | 13397 2
647 | 15446 2
648 | 15447 2
649 | 15448 2
650 | 14863 2
651 | 11450 3
652 | 15454 1
653 | 10085 2
654 | 15458 3
655 | 14182 2
656 | 14222 1
657 | 13415 2
658 | 13418 2
659 | 13422 0
660 | 11375 2
661 | 13424 2
662 | 13502 0
663 | 15478 1
664 | 15481 2
665 | 13434 1
666 | 11388 3
667 | 11391 2
668 | 11392 1
669 | 13504 1
670 | 13200 1
671 | 11778 0
672 | 11399 1
673 | 11401 1
674 | 13450 3
675 | 14871 2
676 | 14530 2
677 | 15502 1
678 | 15504 2
679 | 11411 1
680 | 13461 1
681 | 13463 3
682 | 13464 2
683 | 13467 1
684 | 11421 2
685 | 16581 2
686 | 14875 1
687 | 13476 0
688 | 15525 3
689 | 11430 3
690 | 11431 2
691 | 11433 0
692 | 15532 2
693 | 13485 0
694 | 13486 0
695 | 14877 1
696 | 11441 1
697 | 13490 1
698 | 15539 3
699 | 13493 3
700 | 11447 1
701 | 15546 3
702 | 12831 1
703 | 15548 3
704 | 11453 2
705 | 15550 3
706 | 14197 2
707 | 11456 2
708 | 13505 3
709 | 15554 1
710 | 13507 2
711 | 11463 2
712 | 16588 2
713 | 11468 1
714 | 11470 0
715 | 11471 1
716 | 13520 1
717 | 15569 1
718 | 15570 1
719 | 14542 2
720 | 11479 2
721 | 11481 0
722 | 15579 1
723 | 11484 2
724 | 15581 1
725 | 15582 1
726 | 11487 2
727 | 13536 2
728 | 13537 3
729 | 11492 1
730 | 13541 0
731 | 13543 1
732 | 13546 3
733 | 11503 0
734 | 15601 2
735 | 11506 3
736 | 13555 3
737 | 10135 0
738 | 11510 2
739 | 15607 0
740 | 11512 2
741 | 15231 2
742 | 13564 2
743 | 11517 2
744 | 13525 3
745 | 15232 3
746 | 15618 2
747 | 11525 1
748 | 13574 2
749 | 10613 1
750 | 15624 0
751 | 13577 0
752 | 15626 0
753 | 13579 1
754 | 15628 1
755 | 11535 1
756 | 12250 1
757 | 11537 0
758 | 15634 3
759 | 13587 3
760 | 11540 0
761 | 11140 0
762 | 11823 0
763 | 11550 1
764 | 13599 2
765 | 15650 3
766 | 11555 0
767 | 11559 2
768 | 15656 2
769 | 11563 0
770 | 10478 2
771 | 13619 3
772 | 11577 0
773 | 10463 3
774 | 13194 1
775 | 13630 3
776 | 10615 2
777 | 11588 1
778 | 12854 2
779 | 15798 1
780 | 11592 3
781 | 16553 3
782 | 12173 0
783 | 11603 0
784 | 15700 2
785 | 13198 0
786 | 13655 2
787 | 13540 3
788 | 15706 3
789 | 15707 2
790 | 15708 2
791 | 15709 1
792 | 14565 2
793 | 15713 3
794 | 11618 0
795 | 13667 3
796 | 12857 2
797 | 11624 0
798 | 15721 1
799 | 13674 2
800 | 11627 3
801 | 13676 2
802 | 15725 2
803 | 11630 0
804 | 11633 3
805 | 13682 3
806 | 15731 2
807 | 13685 3
808 | 11638 0
809 | 11641 0
810 | 11643 2
811 | 13693 2
812 | 11646 2
813 | 11648 0
814 | 11299 3
815 | 11650 3
816 | 12663 2
817 | 13700 3
818 | 11656 3
819 | 10476 2
820 | 15754 2
821 | 11659 3
822 | 11842 2
823 | 13710 1
824 | 11663 3
825 | 12184 0
826 | 13714 2
827 | 13715 2
828 | 11669 3
829 | 14314 0
830 | 11673 2
831 | 11675 2
832 | 10317 3
833 | 15774 3
834 | 12869 3
835 | 13398 1
836 | 11684 1
837 | 13736 3
838 | 15785 1
839 | 11695 0
840 | 15793 2
841 | 15794 1
842 | 11699 0
843 | 11028 2
844 | 13756 2
845 | 11711 2
846 | 11714 3
847 | 14467 3
848 | 13767 1
849 | 11720 2
850 | 11721 0
851 | 13770 1
852 | 11511 2
853 | 13772 3
854 | 11725 1
855 | 11726 1
856 | 11728 2
857 | 15825 2
858 | 15939 3
859 | 11732 3
860 | 13781 3
861 | 13785 3
862 | 13788 3
863 | 15838 3
864 | 13689 2
865 | 14371 3
866 | 15842 3
867 | 15843 2
868 | 13796 0
869 | 14303 0
870 | 15847 3
871 | 11493 2
872 | 11756 3
873 | 15853 2
874 | 13806 2
875 | 15599 2
876 | 15856 2
877 | 13809 2
878 | 11762 1
879 | 11344 2
880 | 15861 1
881 | 15862 1
882 | 15273 3
883 | 11415 3
884 | 15163 3
885 | 10154 0
886 | 11775 0
887 | 16498 2
888 | 13826 2
889 | 15275 3
890 | 13829 1
891 | 13830 0
892 | 15879 3
893 | 13832 1
894 | 15881 3
895 | 15882 3
896 | 15885 2
897 | 11790 3
898 | 15887 1
899 | 13827 3
900 | 13841 3
901 | 13230 0
902 | 14100 0
903 | 15897 1
904 | 13851 0
905 | 11867 1
906 | 11813 3
907 | 13862 1
908 | 13863 2
909 | 13865 3
910 | 13791 3
911 | 15919 0
912 | 11824 3
913 | 11825 3
914 | 11827 2
915 | 11828 3
916 | 14601 3
917 | 13881 3
918 | 15930 3
919 | 10267 3
920 | 13884 2
921 | 15934 2
922 | 11840 3
923 | 12896 0
924 | 13890 3
925 | 11531 1
926 | 11844 1
927 | 11845 1
928 | 13894 1
929 | 14945 1
930 | 12124 1
931 | 13898 2
932 | 13899 3
933 | 13731 2
934 | 15534 3
935 | 11856 3
936 | 14102 1
937 | 11859 2
938 | 11331 3
939 | 11193 0
940 | 11865 1
941 | 15057 2
942 | 12559 1
943 | 11869 0
944 | 11870 3
945 | 11871 3
946 | 15971 3
947 | 15973 3
948 | 11879 1
949 | 10011 1
950 | 14268 2
951 | 13930 0
952 | 13931 0
953 | 11413 0
954 | 13933 0
955 | 13934 2
956 | 15293 1
957 | 13936 3
958 | 11198 3
959 | 13942 1
960 | 15991 1
961 | 11898 0
962 | 11899 2
963 | 13949 2
964 | 11905 1
965 | 13459 0
966 | 11883 3
967 | 11908 1
968 | 13958 2
969 | 11495 2
970 | 11884 0
971 | 13964 1
972 | 13763 3
973 | 11885 3
974 | 11921 0
975 | 13970 0
976 | 11924 2
977 | 15982 3
978 | 11545 2
979 | 11931 1
980 | 13983 1
981 | 13984 2
982 | 11938 3
983 | 13987 2
984 | 11941 1
985 | 11947 3
986 | 12332 3
987 | 11952 1
988 | 13256 0
989 | 14004 2
990 | 12264 0
991 | 14006 1
992 | 11538 3
993 | 12916 2
994 | 14013 2
995 | 14646 3
996 | 14965 3
997 | 12235 2
998 | 11973 0
999 | 11975 3
1000 | 14024 3
1001 | 14025 0
1002 | 14027 0
1003 | 14028 2
1004 | 11982 0
1005 | 11986 0
1006 | 15310 3
1007 | 11992 2
1008 | 12771 2
1009 | 11994 3
1010 | 16091 1
1011 | 11996 0
1012 | 11997 1
1013 | 13105 1
1014 | 14288 1
1015 | 14050 2
1016 | 12003 0
1017 | 16101 0
1018 | 14054 1
1019 | 12007 0
1020 | 14057 0
1021 | 12583 2
1022 | 12012 1
1023 | 14062 2
1024 | 12016 0
1025 | 12018 3
1026 | 16681 3
1027 | 12244 1
1028 | 12028 3
1029 | 16665 3
1030 | 12245 1
1031 | 14081 1
1032 | 14082 0
1033 | 12587 3
1034 | 10923 2
1035 | 16133 0
1036 | 12039 3
1037 | 14088 1
1038 | 15472 2
1039 | 14091 3
1040 | 14092 0
1041 | 15063 3
1042 | 14098 0
1043 | 11977 0
1044 | 10005 2
1045 | 10006 3
1046 | 12055 2
1047 | 10543 3
1048 | 14108 0
1049 | 14109 1
1050 | 10015 3
1051 | 10016 2
1052 | 10017 3
1053 | 11227 1
1054 | 12068 3
1055 | 10886 3
1056 | 11995 0
1057 | 14119 2
1058 | 14120 1
1059 | 10204 1
1060 | 14122 0
1061 | 14125 1
1062 | 10030 3
1063 | 12079 3
1064 | 10033 3
1065 | 14130 1
1066 | 14131 1
1067 | 10039 3
1068 | 10040 3
1069 | 10041 3
1070 | 10042 2
1071 | 12255 1
1072 | 12094 0
1073 | 10050 3
1074 | 10052 3
1075 | 10053 3
1076 | 11367 2
1077 | 12103 3
1078 | 10056 3
1079 | 12599 2
1080 | 12108 2
1081 | 10064 3
1082 | 16696 3
1083 | 14307 0
1084 | 14695 3
1085 | 14167 2
1086 | 16218 0
1087 | 10076 2
1088 | 12222 2
1089 | 12127 3
1090 | 14992 1
1091 | 12131 2
1092 | 12132 3
1093 | 13535 2
1094 | 12134 3
1095 | 14993 2
1096 | 12263 3
1097 | 12141 2
1098 | 13629 3
1099 | 14193 0
1100 | 12147 3
1101 | 12149 2
1102 | 10361 1
1103 | 12155 3
1104 | 12156 0
1105 | 16567 1
1106 | 13771 2
1107 | 14112 0
1108 | 16704 3
1109 | 14044 3
1110 | 14218 2
1111 | 12171 1
1112 | 12610 1
1113 | 12174 1
1114 | 12175 1
1115 | 12177 1
1116 | 11587 1
1117 | 14229 2
1118 | 14231 1
1119 | 10136 0
1120 | 12185 2
1121 | 14234 1
1122 | 12187 2
1123 | 14236 2
1124 | 14237 1
1125 | 12190 1
1126 | 12191 0
1127 | 13296 0
1128 | 10146 1
1129 | 12195 1
1130 | 12197 0
1131 | 14321 0
1132 | 12779 1
1133 | 11522 3
1134 | 10155 0
1135 | 14252 0
1136 | 10157 0
1137 | 10158 0
1138 | 14255 2
1139 | 14256 1
1140 | 13947 3
1141 | 12211 2
1142 | 10165 1
1143 | 12214 1
1144 | 16311 2
1145 | 12217 0
1146 | 10170 0
1147 | 10171 2
1148 | 10172 3
1149 | 14270 3
1150 | 12223 0
1151 | 14273 2
1152 | 14275 1
1153 | 12228 2
1154 | 12232 3
1155 | 10185 0
1156 | 14282 2
1157 | 14283 3
1158 | 11953 0
1159 | 16336 3
1160 | 13501 2
1161 | 10194 1
1162 | 16339 2
1163 | 16340 2
1164 | 14670 3
1165 | 10198 2
1166 | 13433 0
1167 | 11961 2
1168 | 16346 2
1169 | 12252 2
1170 | 12253 1
1171 | 10917 1
1172 | 10208 0
1173 | 16720 3
1174 | 14306 1
1175 | 13307 2
1176 | 14320 2
1177 | 16721 3
1178 | 10216 2
1179 | 12265 0
1180 | 12266 0
1181 | 16363 3
1182 | 12268 1
1183 | 16722 3
1184 | 11702 3
1185 | 10224 3
1186 | 10225 1
1187 | 12278 0
1188 | 12280 0
1189 | 14332 3
1190 | 10237 2
1191 | 16353 2
1192 |
--------------------------------------------------------------------------------
/data/wiki/Wiki_category.txt:
--------------------------------------------------------------------------------
1 | 0 8
2 | 1 8
3 | 2 8
4 | 3 8
5 | 4 8
6 | 5 8
7 | 6 8
8 | 7 8
9 | 8 8
10 | 9 8
11 | 10 8
12 | 11 1
13 | 12 1
14 | 13 1
15 | 14 1
16 | 15 1
17 | 16 1
18 | 17 1
19 | 18 1
20 | 19 1
21 | 20 1
22 | 21 1
23 | 22 1
24 | 23 1
25 | 24 1
26 | 25 1
27 | 26 1
28 | 27 1
29 | 28 1
30 | 29 1
31 | 30 1
32 | 31 1
33 | 32 1
34 | 33 1
35 | 34 1
36 | 35 1
37 | 36 1
38 | 37 1
39 | 38 1
40 | 39 1
41 | 40 1
42 | 41 1
43 | 42 1
44 | 43 1
45 | 44 1
46 | 45 1
47 | 46 1
48 | 47 1
49 | 48 1
50 | 49 1
51 | 50 1
52 | 51 1
53 | 52 1
54 | 53 1
55 | 54 1
56 | 55 1
57 | 56 1
58 | 57 1
59 | 58 1
60 | 59 1
61 | 60 1
62 | 61 1
63 | 62 1
64 | 63 1
65 | 64 1
66 | 65 1
67 | 66 1
68 | 67 1
69 | 68 1
70 | 69 1
71 | 70 1
72 | 71 1
73 | 72 1
74 | 73 1
75 | 74 1
76 | 75 1
77 | 76 1
78 | 77 1
79 | 78 1
80 | 79 1
81 | 80 1
82 | 81 1
83 | 82 1
84 | 83 1
85 | 84 1
86 | 85 1
87 | 86 1
88 | 87 1
89 | 88 1
90 | 89 1
91 | 90 1
92 | 91 1
93 | 92 1
94 | 93 1
95 | 94 1
96 | 95 1
97 | 96 1
98 | 97 1
99 | 98 1
100 | 99 1
101 | 100 1
102 | 101 1
103 | 102 1
104 | 103 1
105 | 104 1
106 | 105 1
107 | 106 1
108 | 107 1
109 | 108 1
110 | 109 1
111 | 110 1
112 | 111 1
113 | 112 1
114 | 113 1
115 | 114 1
116 | 115 1
117 | 116 1
118 | 117 1
119 | 118 1
120 | 119 1
121 | 120 1
122 | 121 1
123 | 122 1
124 | 123 1
125 | 124 1
126 | 125 1
127 | 126 1
128 | 127 1
129 | 128 1
130 | 129 1
131 | 130 1
132 | 131 1
133 | 132 1
134 | 133 1
135 | 134 1
136 | 135 1
137 | 136 1
138 | 137 1
139 | 138 1
140 | 139 1
141 | 140 1
142 | 141 1
143 | 142 1
144 | 143 1
145 | 144 1
146 | 145 1
147 | 146 1
148 | 147 1
149 | 148 1
150 | 149 1
151 | 150 1
152 | 151 1
153 | 152 1
154 | 153 1
155 | 154 1
156 | 155 1
157 | 156 1
158 | 157 1
159 | 158 1
160 | 159 1
161 | 160 1
162 | 161 1
163 | 162 1
164 | 163 1
165 | 164 1
166 | 165 1
167 | 166 1
168 | 167 1
169 | 168 1
170 | 169 1
171 | 170 1
172 | 171 1
173 | 172 1
174 | 173 1
175 | 174 1
176 | 175 1
177 | 176 1
178 | 177 1
179 | 178 1
180 | 179 1
181 | 180 1
182 | 181 1
183 | 182 1
184 | 183 1
185 | 184 1
186 | 185 1
187 | 186 1
188 | 187 1
189 | 188 1
190 | 189 1
191 | 190 0
192 | 191 0
193 | 192 0
194 | 193 0
195 | 194 0
196 | 195 0
197 | 196 0
198 | 197 0
199 | 198 0
200 | 199 0
201 | 200 0
202 | 201 0
203 | 202 0
204 | 203 0
205 | 204 0
206 | 205 0
207 | 206 0
208 | 207 0
209 | 208 0
210 | 209 0
211 | 210 0
212 | 211 0
213 | 212 0
214 | 213 1
215 | 214 1
216 | 215 1
217 | 216 1
218 | 217 1
219 | 218 1
220 | 219 1
221 | 220 1
222 | 221 1
223 | 222 1
224 | 223 1
225 | 224 1
226 | 225 1
227 | 226 1
228 | 227 1
229 | 228 1
230 | 229 1
231 | 230 1
232 | 231 1
233 | 232 1
234 | 233 1
235 | 234 1
236 | 235 1
237 | 236 1
238 | 237 1
239 | 238 1
240 | 239 1
241 | 240 1
242 | 241 1
243 | 242 1
244 | 243 1
245 | 244 1
246 | 245 1
247 | 246 1
248 | 247 1
249 | 248 6
250 | 249 6
251 | 250 6
252 | 251 6
253 | 252 6
254 | 253 6
255 | 254 6
256 | 255 6
257 | 256 6
258 | 257 6
259 | 258 6
260 | 259 6
261 | 260 6
262 | 261 6
263 | 262 6
264 | 263 6
265 | 264 6
266 | 265 5
267 | 266 5
268 | 267 5
269 | 268 5
270 | 269 5
271 | 270 5
272 | 271 5
273 | 272 5
274 | 273 5
275 | 274 5
276 | 275 5
277 | 276 5
278 | 277 5
279 | 278 5
280 | 279 5
281 | 280 5
282 | 281 5
283 | 282 5
284 | 283 5
285 | 284 5
286 | 285 5
287 | 286 5
288 | 287 5
289 | 288 5
290 | 289 5
291 | 290 5
292 | 291 5
293 | 292 5
294 | 293 5
295 | 294 5
296 | 295 5
297 | 296 5
298 | 297 5
299 | 298 5
300 | 299 5
301 | 300 5
302 | 301 5
303 | 302 5
304 | 303 5
305 | 304 5
306 | 305 5
307 | 306 5
308 | 307 5
309 | 308 5
310 | 309 5
311 | 310 5
312 | 311 5
313 | 312 5
314 | 313 5
315 | 314 5
316 | 315 5
317 | 316 5
318 | 317 5
319 | 318 13
320 | 319 13
321 | 320 13
322 | 321 13
323 | 322 13
324 | 323 13
325 | 324 13
326 | 325 13
327 | 326 13
328 | 327 13
329 | 328 13
330 | 329 13
331 | 330 13
332 | 331 13
333 | 332 13
334 | 333 13
335 | 334 13
336 | 335 13
337 | 336 13
338 | 337 13
339 | 338 13
340 | 339 13
341 | 340 13
342 | 341 13
343 | 342 13
344 | 343 13
345 | 344 13
346 | 345 13
347 | 346 13
348 | 347 13
349 | 348 6
350 | 349 6
351 | 350 6
352 | 351 6
353 | 352 6
354 | 353 6
355 | 354 6
356 | 355 6
357 | 356 6
358 | 357 6
359 | 358 6
360 | 359 6
361 | 360 6
362 | 361 6
363 | 362 6
364 | 363 6
365 | 364 6
366 | 365 6
367 | 366 6
368 | 367 6
369 | 368 6
370 | 369 6
371 | 370 6
372 | 371 6
373 | 372 6
374 | 373 6
375 | 374 6
376 | 375 6
377 | 376 6
378 | 377 12
379 | 378 12
380 | 379 12
381 | 380 12
382 | 381 12
383 | 382 12
384 | 383 12
385 | 384 12
386 | 385 12
387 | 386 15
388 | 387 15
389 | 388 15
390 | 389 15
391 | 390 15
392 | 391 15
393 | 392 15
394 | 393 15
395 | 394 15
396 | 395 15
397 | 396 15
398 | 397 15
399 | 398 15
400 | 399 15
401 | 400 15
402 | 401 15
403 | 402 15
404 | 403 15
405 | 404 15
406 | 405 15
407 | 406 15
408 | 407 15
409 | 408 15
410 | 409 15
411 | 410 15
412 | 411 15
413 | 412 15
414 | 413 15
415 | 414 15
416 | 415 15
417 | 416 15
418 | 417 15
419 | 418 15
420 | 419 15
421 | 420 15
422 | 421 15
423 | 422 15
424 | 423 15
425 | 424 15
426 | 425 15
427 | 426 15
428 | 427 15
429 | 428 15
430 | 429 15
431 | 430 15
432 | 431 15
433 | 432 15
434 | 433 15
435 | 434 15
436 | 435 15
437 | 436 15
438 | 437 15
439 | 438 15
440 | 439 15
441 | 440 15
442 | 441 15
443 | 442 15
444 | 443 15
445 | 444 15
446 | 445 15
447 | 446 15
448 | 447 15
449 | 448 15
450 | 449 15
451 | 450 15
452 | 451 15
453 | 452 15
454 | 453 15
455 | 454 15
456 | 455 15
457 | 456 15
458 | 457 15
459 | 458 15
460 | 459 15
461 | 460 15
462 | 461 15
463 | 462 15
464 | 463 15
465 | 464 15
466 | 465 15
467 | 466 15
468 | 467 15
469 | 468 15
470 | 469 15
471 | 470 15
472 | 471 15
473 | 472 15
474 | 473 15
475 | 474 15
476 | 475 15
477 | 476 15
478 | 477 15
479 | 478 15
480 | 479 15
481 | 480 15
482 | 481 15
483 | 482 15
484 | 483 15
485 | 484 15
486 | 485 15
487 | 486 15
488 | 487 15
489 | 488 15
490 | 489 15
491 | 490 15
492 | 491 15
493 | 492 15
494 | 493 15
495 | 494 15
496 | 495 15
497 | 496 15
498 | 497 15
499 | 498 15
500 | 499 15
501 | 500 15
502 | 501 15
503 | 502 15
504 | 503 15
505 | 504 15
506 | 505 15
507 | 506 15
508 | 507 15
509 | 508 15
510 | 509 15
511 | 510 15
512 | 511 15
513 | 512 15
514 | 513 15
515 | 514 15
516 | 515 15
517 | 516 15
518 | 517 15
519 | 518 15
520 | 519 15
521 | 520 15
522 | 521 15
523 | 522 15
524 | 523 15
525 | 524 15
526 | 525 15
527 | 526 15
528 | 527 15
529 | 528 15
530 | 529 15
531 | 530 15
532 | 531 15
533 | 532 15
534 | 533 15
535 | 534 15
536 | 535 15
537 | 536 15
538 | 537 15
539 | 538 15
540 | 539 15
541 | 540 15
542 | 541 16
543 | 542 16
544 | 543 16
545 | 544 16
546 | 545 16
547 | 546 16
548 | 547 16
549 | 548 16
550 | 549 16
551 | 550 16
552 | 551 16
553 | 552 16
554 | 553 16
555 | 554 16
556 | 555 16
557 | 556 16
558 | 557 16
559 | 558 16
560 | 559 16
561 | 560 16
562 | 561 16
563 | 562 16
564 | 563 16
565 | 564 16
566 | 565 16
567 | 566 16
568 | 567 16
569 | 568 16
570 | 569 16
571 | 570 16
572 | 571 16
573 | 572 16
574 | 573 16
575 | 574 16
576 | 575 16
577 | 576 16
578 | 577 16
579 | 578 16
580 | 579 16
581 | 580 16
582 | 581 16
583 | 582 16
584 | 583 16
585 | 584 16
586 | 585 16
587 | 586 16
588 | 587 16
589 | 588 16
590 | 589 16
591 | 590 16
592 | 591 16
593 | 592 16
594 | 593 16
595 | 594 16
596 | 595 16
597 | 596 16
598 | 597 16
599 | 598 16
600 | 599 16
601 | 600 16
602 | 601 16
603 | 602 16
604 | 603 16
605 | 604 16
606 | 605 16
607 | 606 16
608 | 607 16
609 | 608 16
610 | 609 16
611 | 610 16
612 | 611 16
613 | 612 16
614 | 613 16
615 | 614 16
616 | 615 16
617 | 616 16
618 | 617 16
619 | 618 16
620 | 619 16
621 | 620 16
622 | 621 16
623 | 622 16
624 | 623 16
625 | 624 16
626 | 625 16
627 | 626 16
628 | 627 16
629 | 628 16
630 | 629 16
631 | 630 16
632 | 631 16
633 | 632 16
634 | 633 16
635 | 634 16
636 | 635 16
637 | 636 16
638 | 637 16
639 | 638 16
640 | 639 16
641 | 640 16
642 | 641 16
643 | 642 16
644 | 643 16
645 | 644 16
646 | 645 16
647 | 646 16
648 | 647 16
649 | 648 16
650 | 649 1
651 | 650 1
652 | 651 1
653 | 652 1
654 | 653 1
655 | 654 1
656 | 655 1
657 | 656 1
658 | 657 1
659 | 658 1
660 | 659 1
661 | 660 1
662 | 661 1
663 | 662 1
664 | 663 1
665 | 664 1
666 | 665 1
667 | 666 1
668 | 667 1
669 | 668 1
670 | 669 1
671 | 670 1
672 | 671 1
673 | 672 1
674 | 673 1
675 | 674 1
676 | 675 1
677 | 676 1
678 | 677 1
679 | 678 1
680 | 679 1
681 | 680 1
682 | 681 1
683 | 682 1
684 | 683 1
685 | 684 1
686 | 685 1
687 | 686 1
688 | 687 1
689 | 688 1
690 | 689 1
691 | 690 1
692 | 691 1
693 | 692 1
694 | 693 1
695 | 694 1
696 | 695 1
697 | 696 1
698 | 697 1
699 | 698 1
700 | 699 1
701 | 700 1
702 | 701 1
703 | 702 1
704 | 703 1
705 | 704 1
706 | 705 1
707 | 706 1
708 | 707 1
709 | 708 1
710 | 709 1
711 | 710 1
712 | 711 1
713 | 712 1
714 | 713 1
715 | 714 1
716 | 715 1
717 | 716 1
718 | 717 1
719 | 718 1
720 | 719 1
721 | 720 1
722 | 721 1
723 | 722 1
724 | 723 1
725 | 724 1
726 | 725 1
727 | 726 1
728 | 727 1
729 | 728 1
730 | 729 1
731 | 730 1
732 | 731 1
733 | 732 1
734 | 733 1
735 | 734 1
736 | 735 1
737 | 736 1
738 | 737 1
739 | 738 1
740 | 739 1
741 | 740 1
742 | 741 1
743 | 742 1
744 | 743 1
745 | 744 1
746 | 745 1
747 | 746 1
748 | 747 1
749 | 748 1
750 | 749 1
751 | 750 1
752 | 751 1
753 | 752 1
754 | 753 1
755 | 754 1
756 | 755 1
757 | 756 1
758 | 757 1
759 | 758 1
760 | 759 1
761 | 760 1
762 | 761 1
763 | 762 1
764 | 763 1
765 | 764 1
766 | 765 1
767 | 766 1
768 | 767 1
769 | 768 1
770 | 769 1
771 | 770 1
772 | 771 1
773 | 772 1
774 | 773 1
775 | 774 1
776 | 775 1
777 | 776 1
778 | 777 1
779 | 778 1
780 | 779 1
781 | 780 1
782 | 781 1
783 | 782 1
784 | 783 1
785 | 784 1
786 | 785 1
787 | 786 1
788 | 787 1
789 | 788 1
790 | 789 1
791 | 790 1
792 | 791 1
793 | 792 1
794 | 793 1
795 | 794 1
796 | 795 1
797 | 796 1
798 | 797 1
799 | 798 1
800 | 799 1
801 | 800 1
802 | 801 1
803 | 802 1
804 | 803 1
805 | 804 1
806 | 805 1
807 | 806 1
808 | 807 1
809 | 808 1
810 | 809 1
811 | 810 1
812 | 811 1
813 | 812 1
814 | 813 1
815 | 814 1
816 | 815 1
817 | 816 1
818 | 817 1
819 | 818 1
820 | 819 1
821 | 820 1
822 | 821 1
823 | 822 1
824 | 823 1
825 | 824 1
826 | 825 1
827 | 826 1
828 | 827 1
829 | 828 1
830 | 829 1
831 | 830 1
832 | 831 1
833 | 832 1
834 | 833 1
835 | 834 1
836 | 835 1
837 | 836 1
838 | 837 1
839 | 838 1
840 | 839 1
841 | 840 1
842 | 841 9
843 | 842 9
844 | 843 9
845 | 844 9
846 | 845 9
847 | 846 9
848 | 847 9
849 | 848 9
850 | 849 9
851 | 850 9
852 | 851 9
853 | 852 10
854 | 853 10
855 | 854 10
856 | 855 10
857 | 856 10
858 | 857 10
859 | 858 10
860 | 859 10
861 | 860 10
862 | 861 10
863 | 862 10
864 | 863 10
865 | 864 10
866 | 865 10
867 | 866 10
868 | 867 10
869 | 868 10
870 | 869 10
871 | 870 10
872 | 871 10
873 | 872 10
874 | 873 10
875 | 874 10
876 | 875 10
877 | 876 10
878 | 877 10
879 | 878 10
880 | 879 10
881 | 880 10
882 | 881 10
883 | 882 5
884 | 883 5
885 | 884 5
886 | 885 5
887 | 886 5
888 | 887 5
889 | 888 5
890 | 889 5
891 | 890 5
892 | 891 5
893 | 892 5
894 | 893 5
895 | 894 5
896 | 895 5
897 | 896 5
898 | 897 5
899 | 898 5
900 | 899 5
901 | 900 5
902 | 901 5
903 | 902 5
904 | 903 5
905 | 904 5
906 | 905 5
907 | 906 5
908 | 907 5
909 | 908 5
910 | 909 5
911 | 910 5
912 | 911 5
913 | 912 5
914 | 913 5
915 | 914 5
916 | 915 5
917 | 916 5
918 | 917 5
919 | 918 5
920 | 919 5
921 | 920 5
922 | 921 5
923 | 922 5
924 | 923 5
925 | 924 5
926 | 925 5
927 | 926 5
928 | 927 5
929 | 928 5
930 | 929 5
931 | 930 5
932 | 931 5
933 | 932 5
934 | 933 5
935 | 934 5
936 | 935 5
937 | 936 5
938 | 937 5
939 | 938 5
940 | 939 5
941 | 940 5
942 | 941 5
943 | 942 5
944 | 943 5
945 | 944 5
946 | 945 5
947 | 946 5
948 | 947 5
949 | 948 5
950 | 949 5
951 | 950 5
952 | 951 5
953 | 952 5
954 | 953 5
955 | 954 5
956 | 955 5
957 | 956 5
958 | 957 5
959 | 958 5
960 | 959 5
961 | 960 5
962 | 961 5
963 | 962 5
964 | 963 5
965 | 964 5
966 | 965 5
967 | 966 5
968 | 967 5
969 | 968 5
970 | 969 5
971 | 970 5
972 | 971 5
973 | 972 5
974 | 973 5
975 | 974 5
976 | 975 5
977 | 976 5
978 | 977 5
979 | 978 5
980 | 979 5
981 | 980 5
982 | 981 5
983 | 982 5
984 | 983 5
985 | 984 5
986 | 985 5
987 | 986 5
988 | 987 5
989 | 988 5
990 | 989 5
991 | 990 5
992 | 991 5
993 | 992 5
994 | 993 5
995 | 994 5
996 | 995 5
997 | 996 5
998 | 997 5
999 | 998 5
1000 | 999 5
1001 | 1000 5
1002 | 1001 5
1003 | 1002 5
1004 | 1003 5
1005 | 1004 3
1006 | 1005 3
1007 | 1006 3
1008 | 1007 3
1009 | 1008 3
1010 | 1009 3
1011 | 1010 3
1012 | 1011 3
1013 | 1012 3
1014 | 1013 3
1015 | 1014 3
1016 | 1015 3
1017 | 1016 3
1018 | 1017 3
1019 | 1018 3
1020 | 1019 3
1021 | 1020 3
1022 | 1021 5
1023 | 1022 5
1024 | 1023 5
1025 | 1024 5
1026 | 1025 5
1027 | 1026 5
1028 | 1027 5
1029 | 1028 5
1030 | 1029 5
1031 | 1030 5
1032 | 1031 5
1033 | 1032 5
1034 | 1033 5
1035 | 1034 5
1036 | 1035 5
1037 | 1036 5
1038 | 1037 5
1039 | 1038 5
1040 | 1039 5
1041 | 1040 5
1042 | 1041 5
1043 | 1042 5
1044 | 1043 5
1045 | 1044 5
1046 | 1045 5
1047 | 1046 5
1048 | 1047 5
1049 | 1048 5
1050 | 1049 5
1051 | 1050 5
1052 | 1051 5
1053 | 1052 5
1054 | 1053 5
1055 | 1054 5
1056 | 1055 5
1057 | 1056 5
1058 | 1057 5
1059 | 1058 5
1060 | 1059 5
1061 | 1060 5
1062 | 1061 5
1063 | 1062 5
1064 | 1063 5
1065 | 1064 5
1066 | 1065 5
1067 | 1066 5
1068 | 1067 5
1069 | 1068 5
1070 | 1069 5
1071 | 1070 5
1072 | 1071 5
1073 | 1072 5
1074 | 1073 5
1075 | 1074 5
1076 | 1075 5
1077 | 1076 5
1078 | 1077 5
1079 | 1078 5
1080 | 1079 5
1081 | 1080 5
1082 | 1081 5
1083 | 1082 5
1084 | 1083 5
1085 | 1084 5
1086 | 1085 5
1087 | 1086 5
1088 | 1087 5
1089 | 1088 5
1090 | 1089 5
1091 | 1090 5
1092 | 1091 5
1093 | 1092 5
1094 | 1093 5
1095 | 1094 5
1096 | 1095 5
1097 | 1096 5
1098 | 1097 5
1099 | 1098 5
1100 | 1099 5
1101 | 1100 5
1102 | 1101 5
1103 | 1102 5
1104 | 1103 5
1105 | 1104 5
1106 | 1105 5
1107 | 1106 5
1108 | 1107 5
1109 | 1108 5
1110 | 1109 5
1111 | 1110 5
1112 | 1111 5
1113 | 1112 5
1114 | 1113 5
1115 | 1114 5
1116 | 1115 5
1117 | 1116 5
1118 | 1117 5
1119 | 1118 5
1120 | 1119 5
1121 | 1120 5
1122 | 1121 5
1123 | 1122 5
1124 | 1123 5
1125 | 1124 5
1126 | 1125 5
1127 | 1126 5
1128 | 1127 5
1129 | 1128 5
1130 | 1129 5
1131 | 1130 5
1132 | 1131 5
1133 | 1132 5
1134 | 1133 5
1135 | 1134 5
1136 | 1135 5
1137 | 1136 5
1138 | 1137 5
1139 | 1138 5
1140 | 1139 5
1141 | 1140 5
1142 | 1141 5
1143 | 1142 5
1144 | 1143 5
1145 | 1144 5
1146 | 1145 5
1147 | 1146 5
1148 | 1147 5
1149 | 1148 5
1150 | 1149 5
1151 | 1150 5
1152 | 1151 5
1153 | 1152 5
1154 | 1153 5
1155 | 1154 5
1156 | 1155 5
1157 | 1156 5
1158 | 1157 5
1159 | 1158 5
1160 | 1159 5
1161 | 1160 5
1162 | 1161 5
1163 | 1162 5
1164 | 1163 5
1165 | 1164 5
1166 | 1165 5
1167 | 1166 5
1168 | 1167 5
1169 | 1168 5
1170 | 1169 5
1171 | 1170 5
1172 | 1171 5
1173 | 1172 5
1174 | 1173 5
1175 | 1174 5
1176 | 1175 5
1177 | 1176 5
1178 | 1177 5
1179 | 1178 5
1180 | 1179 5
1181 | 1180 5
1182 | 1181 5
1183 | 1182 5
1184 | 1183 5
1185 | 1184 5
1186 | 1185 5
1187 | 1186 5
1188 | 1187 5
1189 | 1188 5
1190 | 1189 5
1191 | 1190 5
1192 | 1191 5
1193 | 1192 5
1194 | 1193 5
1195 | 1194 5
1196 | 1195 5
1197 | 1196 5
1198 | 1197 5
1199 | 1198 5
1200 | 1199 5
1201 | 1200 5
1202 | 1201 5
1203 | 1202 5
1204 | 1203 5
1205 | 1204 5
1206 | 1205 5
1207 | 1206 5
1208 | 1207 11
1209 | 1208 11
1210 | 1209 11
1211 | 1210 11
1212 | 1211 11
1213 | 1212 11
1214 | 1213 11
1215 | 1214 11
1216 | 1215 11
1217 | 1216 11
1218 | 1217 11
1219 | 1218 11
1220 | 1219 11
1221 | 1220 11
1222 | 1221 11
1223 | 1222 11
1224 | 1223 11
1225 | 1224 11
1226 | 1225 11
1227 | 1226 11
1228 | 1227 11
1229 | 1228 11
1230 | 1229 11
1231 | 1230 11
1232 | 1231 11
1233 | 1232 11
1234 | 1233 11
1235 | 1234 11
1236 | 1235 11
1237 | 1236 11
1238 | 1237 11
1239 | 1238 11
1240 | 1239 11
1241 | 1240 11
1242 | 1241 11
1243 | 1242 11
1244 | 1243 11
1245 | 1244 11
1246 | 1245 11
1247 | 1246 11
1248 | 1247 11
1249 | 1248 11
1250 | 1249 11
1251 | 1250 11
1252 | 1251 11
1253 | 1252 11
1254 | 1253 11
1255 | 1254 11
1256 | 1255 11
1257 | 1256 11
1258 | 1257 11
1259 | 1258 11
1260 | 1259 11
1261 | 1260 11
1262 | 1261 11
1263 | 1262 11
1264 | 1263 11
1265 | 1264 11
1266 | 1265 11
1267 | 1266 11
1268 | 1267 11
1269 | 1268 11
1270 | 1269 11
1271 | 1270 11
1272 | 1271 11
1273 | 1272 11
1274 | 1273 11
1275 | 1274 11
1276 | 1275 11
1277 | 1276 11
1278 | 1277 11
1279 | 1278 11
1280 | 1279 11
1281 | 1280 11
1282 | 1281 11
1283 | 1282 11
1284 | 1283 11
1285 | 1284 11
1286 | 1285 11
1287 | 1286 11
1288 | 1287 11
1289 | 1288 11
1290 | 1289 11
1291 | 1290 11
1292 | 1291 11
1293 | 1292 11
1294 | 1293 11
1295 | 1294 11
1296 | 1295 11
1297 | 1296 11
1298 | 1297 11
1299 | 1298 11
1300 | 1299 11
1301 | 1300 11
1302 | 1301 11
1303 | 1302 11
1304 | 1303 11
1305 | 1304 11
1306 | 1305 11
1307 | 1306 11
1308 | 1307 11
1309 | 1308 11
1310 | 1309 11
1311 | 1310 11
1312 | 1311 11
1313 | 1312 11
1314 | 1313 11
1315 | 1314 11
1316 | 1315 11
1317 | 1316 11
1318 | 1317 11
1319 | 1318 11
1320 | 1319 11
1321 | 1320 11
1322 | 1321 11
1323 | 1322 11
1324 | 1323 11
1325 | 1324 11
1326 | 1325 11
1327 | 1326 11
1328 | 1327 11
1329 | 1328 11
1330 | 1329 11
1331 | 1330 11
1332 | 1331 11
1333 | 1332 11
1334 | 1333 11
1335 | 1334 11
1336 | 1335 11
1337 | 1336 11
1338 | 1337 11
1339 | 1338 11
1340 | 1339 11
1341 | 1340 11
1342 | 1341 11
1343 | 1342 11
1344 | 1343 11
1345 | 1344 11
1346 | 1345 11
1347 | 1346 11
1348 | 1347 11
1349 | 1348 11
1350 | 1349 11
1351 | 1350 11
1352 | 1351 11
1353 | 1352 11
1354 | 1353 11
1355 | 1354 11
1356 | 1355 11
1357 | 1356 11
1358 | 1357 11
1359 | 1358 11
1360 | 1359 11
1361 | 1360 11
1362 | 1361 11
1363 | 1362 11
1364 | 1363 11
1365 | 1364 11
1366 | 1365 11
1367 | 1366 11
1368 | 1367 11
1369 | 1368 11
1370 | 1369 11
1371 | 1370 11
1372 | 1371 11
1373 | 1372 11
1374 | 1373 11
1375 | 1374 11
1376 | 1375 11
1377 | 1376 11
1378 | 1377 11
1379 | 1378 11
1380 | 1379 11
1381 | 1380 11
1382 | 1381 11
1383 | 1382 11
1384 | 1383 11
1385 | 1384 11
1386 | 1385 11
1387 | 1386 4
1388 | 1387 4
1389 | 1388 4
1390 | 1389 4
1391 | 1390 4
1392 | 1391 4
1393 | 1392 4
1394 | 1393 4
1395 | 1394 4
1396 | 1395 4
1397 | 1396 3
1398 | 1397 3
1399 | 1398 3
1400 | 1399 3
1401 | 1400 3
1402 | 1401 3
1403 | 1402 3
1404 | 1403 3
1405 | 1404 3
1406 | 1405 3
1407 | 1406 3
1408 | 1407 3
1409 | 1408 3
1410 | 1409 3
1411 | 1410 3
1412 | 1411 3
1413 | 1412 3
1414 | 1413 3
1415 | 1414 3
1416 | 1415 3
1417 | 1416 3
1418 | 1417 3
1419 | 1418 3
1420 | 1419 3
1421 | 1420 3
1422 | 1421 3
1423 | 1422 3
1424 | 1423 3
1425 | 1424 3
1426 | 1425 3
1427 | 1426 3
1428 | 1427 3
1429 | 1428 3
1430 | 1429 3
1431 | 1430 3
1432 | 1431 3
1433 | 1432 3
1434 | 1433 3
1435 | 1434 3
1436 | 1435 3
1437 | 1436 3
1438 | 1437 3
1439 | 1438 3
1440 | 1439 3
1441 | 1440 3
1442 | 1441 3
1443 | 1442 3
1444 | 1443 3
1445 | 1444 3
1446 | 1445 3
1447 | 1446 3
1448 | 1447 3
1449 | 1448 3
1450 | 1449 3
1451 | 1450 3
1452 | 1451 3
1453 | 1452 3
1454 | 1453 3
1455 | 1454 3
1456 | 1455 3
1457 | 1456 3
1458 | 1457 3
1459 | 1458 3
1460 | 1459 3
1461 | 1460 3
1462 | 1461 3
1463 | 1462 3
1464 | 1463 3
1465 | 1464 3
1466 | 1465 3
1467 | 1466 3
1468 | 1467 3
1469 | 1468 3
1470 | 1469 3
1471 | 1470 3
1472 | 1471 3
1473 | 1472 3
1474 | 1473 3
1475 | 1474 3
1476 | 1475 3
1477 | 1476 3
1478 | 1477 3
1479 | 1478 3
1480 | 1479 3
1481 | 1480 3
1482 | 1481 3
1483 | 1482 0
1484 | 1483 0
1485 | 1484 0
1486 | 1485 0
1487 | 1486 0
1488 | 1487 0
1489 | 1488 0
1490 | 1489 0
1491 | 1490 0
1492 | 1491 0
1493 | 1492 0
1494 | 1493 0
1495 | 1494 0
1496 | 1495 0
1497 | 1496 0
1498 | 1497 0
1499 | 1498 0
1500 | 1499 0
1501 | 1500 0
1502 | 1501 0
1503 | 1502 0
1504 | 1503 0
1505 | 1504 0
1506 | 1505 0
1507 | 1506 0
1508 | 1507 0
1509 | 1508 0
1510 | 1509 0
1511 | 1510 0
1512 | 1511 0
1513 | 1512 0
1514 | 1513 0
1515 | 1514 0
1516 | 1515 0
1517 | 1516 0
1518 | 1517 0
1519 | 1518 0
1520 | 1519 0
1521 | 1520 0
1522 | 1521 0
1523 | 1522 0
1524 | 1523 0
1525 | 1524 0
1526 | 1525 0
1527 | 1526 0
1528 | 1527 0
1529 | 1528 0
1530 | 1529 0
1531 | 1530 0
1532 | 1531 0
1533 | 1532 0
1534 | 1533 0
1535 | 1534 0
1536 | 1535 0
1537 | 1536 0
1538 | 1537 0
1539 | 1538 0
1540 | 1539 0
1541 | 1540 0
1542 | 1541 0
1543 | 1542 0
1544 | 1543 0
1545 | 1544 0
1546 | 1545 0
1547 | 1546 0
1548 | 1547 0
1549 | 1548 0
1550 | 1549 0
1551 | 1550 0
1552 | 1551 7
1553 | 1552 7
1554 | 1553 7
1555 | 1554 7
1556 | 1555 7
1557 | 1556 7
1558 | 1557 7
1559 | 1558 7
1560 | 1559 7
1561 | 1560 7
1562 | 1561 7
1563 | 1562 7
1564 | 1563 7
1565 | 1564 7
1566 | 1565 7
1567 | 1566 7
1568 | 1567 7
1569 | 1568 7
1570 | 1569 7
1571 | 1570 7
1572 | 1571 7
1573 | 1572 7
1574 | 1573 7
1575 | 1574 7
1576 | 1575 7
1577 | 1576 7
1578 | 1577 7
1579 | 1578 7
1580 | 1579 7
1581 | 1580 7
1582 | 1581 7
1583 | 1582 7
1584 | 1583 7
1585 | 1584 7
1586 | 1585 7
1587 | 1586 7
1588 | 1587 7
1589 | 1588 7
1590 | 1589 7
1591 | 1590 7
1592 | 1591 7
1593 | 1592 7
1594 | 1593 7
1595 | 1594 7
1596 | 1595 7
1597 | 1596 7
1598 | 1597 7
1599 | 1598 7
1600 | 1599 7
1601 | 1600 7
1602 | 1601 7
1603 | 1602 7
1604 | 1603 7
1605 | 1604 8
1606 | 1605 8
1607 | 1606 8
1608 | 1607 8
1609 | 1608 8
1610 | 1609 8
1611 | 1610 8
1612 | 1611 8
1613 | 1612 8
1614 | 1613 8
1615 | 1614 8
1616 | 1615 8
1617 | 1616 8
1618 | 1617 8
1619 | 1618 8
1620 | 1619 8
1621 | 1620 8
1622 | 1621 8
1623 | 1622 8
1624 | 1623 8
1625 | 1624 8
1626 | 1625 8
1627 | 1626 8
1628 | 1627 8
1629 | 1628 8
1630 | 1629 8
1631 | 1630 8
1632 | 1631 8
1633 | 1632 8
1634 | 1633 8
1635 | 1634 8
1636 | 1635 8
1637 | 1636 8
1638 | 1637 8
1639 | 1638 8
1640 | 1639 8
1641 | 1640 8
1642 | 1641 8
1643 | 1642 8
1644 | 1643 8
1645 | 1644 8
1646 | 1645 8
1647 | 1646 8
1648 | 1647 8
1649 | 1648 8
1650 | 1649 8
1651 | 1650 8
1652 | 1651 8
1653 | 1652 8
1654 | 1653 8
1655 | 1654 8
1656 | 1655 8
1657 | 1656 8
1658 | 1657 8
1659 | 1658 8
1660 | 1659 8
1661 | 1660 8
1662 | 1661 8
1663 | 1662 8
1664 | 1663 8
1665 | 1664 8
1666 | 1665 8
1667 | 1666 8
1668 | 1667 8
1669 | 1668 8
1670 | 1669 8
1671 | 1670 8
1672 | 1671 8
1673 | 1672 8
1674 | 1673 8
1675 | 1674 8
1676 | 1675 8
1677 | 1676 8
1678 | 1677 8
1679 | 1678 8
1680 | 1679 8
1681 | 1680 8
1682 | 1681 8
1683 | 1682 8
1684 | 1683 8
1685 | 1684 8
1686 | 1685 8
1687 | 1686 8
1688 | 1687 2
1689 | 1688 2
1690 | 1689 2
1691 | 1690 2
1692 | 1691 2
1693 | 1692 2
1694 | 1693 2
1695 | 1694 2
1696 | 1695 2
1697 | 1696 2
1698 | 1697 2
1699 | 1698 2
1700 | 1699 2
1701 | 1700 2
1702 | 1701 2
1703 | 1702 2
1704 | 1703 2
1705 | 1704 2
1706 | 1705 2
1707 | 1706 2
1708 | 1707 2
1709 | 1708 2
1710 | 1709 2
1711 | 1710 2
1712 | 1711 2
1713 | 1712 2
1714 | 1713 2
1715 | 1714 2
1716 | 1715 2
1717 | 1716 2
1718 | 1717 2
1719 | 1718 2
1720 | 1719 2
1721 | 1720 2
1722 | 1721 2
1723 | 1722 2
1724 | 1723 2
1725 | 1724 2
1726 | 1725 2
1727 | 1726 2
1728 | 1727 2
1729 | 1728 2
1730 | 1729 2
1731 | 1730 2
1732 | 1731 2
1733 | 1732 2
1734 | 1733 2
1735 | 1734 2
1736 | 1735 2
1737 | 1736 2
1738 | 1737 2
1739 | 1738 2
1740 | 1739 2
1741 | 1740 2
1742 | 1741 2
1743 | 1742 2
1744 | 1743 2
1745 | 1744 2
1746 | 1745 2
1747 | 1746 2
1748 | 1747 2
1749 | 1748 2
1750 | 1749 2
1751 | 1750 2
1752 | 1751 2
1753 | 1752 2
1754 | 1753 2
1755 | 1754 2
1756 | 1755 2
1757 | 1756 2
1758 | 1757 2
1759 | 1758 2
1760 | 1759 2
1761 | 1760 2
1762 | 1761 2
1763 | 1762 2
1764 | 1763 2
1765 | 1764 2
1766 | 1765 2
1767 | 1766 2
1768 | 1767 2
1769 | 1768 2
1770 | 1769 2
1771 | 1770 2
1772 | 1771 2
1773 | 1772 2
1774 | 1773 2
1775 | 1774 2
1776 | 1775 2
1777 | 1776 2
1778 | 1777 2
1779 | 1778 2
1780 | 1779 2
1781 | 1780 2
1782 | 1781 2
1783 | 1782 2
1784 | 1783 2
1785 | 1784 2
1786 | 1785 2
1787 | 1786 2
1788 | 1787 2
1789 | 1788 2
1790 | 1789 2
1791 | 1790 2
1792 | 1791 2
1793 | 1792 2
1794 | 1793 2
1795 | 1794 2
1796 | 1795 2
1797 | 1796 2
1798 | 1797 2
1799 | 1798 2
1800 | 1799 2
1801 | 1800 2
1802 | 1801 2
1803 | 1802 2
1804 | 1803 2
1805 | 1804 2
1806 | 1805 2
1807 | 1806 2
1808 | 1807 2
1809 | 1808 2
1810 | 1809 2
1811 | 1810 2
1812 | 1811 2
1813 | 1812 2
1814 | 1813 2
1815 | 1814 2
1816 | 1815 2
1817 | 1816 2
1818 | 1817 2
1819 | 1818 2
1820 | 1819 2
1821 | 1820 2
1822 | 1821 2
1823 | 1822 2
1824 | 1823 2
1825 | 1824 2
1826 | 1825 2
1827 | 1826 2
1828 | 1827 2
1829 | 1828 2
1830 | 1829 2
1831 | 1830 2
1832 | 1831 2
1833 | 1832 2
1834 | 1833 2
1835 | 1834 2
1836 | 1835 2
1837 | 1836 2
1838 | 1837 2
1839 | 1838 2
1840 | 1839 2
1841 | 1840 2
1842 | 1841 2
1843 | 1842 2
1844 | 1843 2
1845 | 1844 2
1846 | 1845 2
1847 | 1846 2
1848 | 1847 2
1849 | 1848 2
1850 | 1849 2
1851 | 1850 2
1852 | 1851 2
1853 | 1852 2
1854 | 1853 2
1855 | 1854 2
1856 | 1855 2
1857 | 1856 2
1858 | 1857 2
1859 | 1858 2
1860 | 1859 2
1861 | 1860 2
1862 | 1861 2
1863 | 1862 2
1864 | 1863 2
1865 | 1864 2
1866 | 1865 2
1867 | 1866 2
1868 | 1867 2
1869 | 1868 2
1870 | 1869 2
1871 | 1870 2
1872 | 1871 2
1873 | 1872 2
1874 | 1873 2
1875 | 1874 2
1876 | 1875 2
1877 | 1876 2
1878 | 1877 2
1879 | 1878 2
1880 | 1879 2
1881 | 1880 2
1882 | 1881 2
1883 | 1882 2
1884 | 1883 2
1885 | 1884 2
1886 | 1885 2
1887 | 1886 2
1888 | 1887 2
1889 | 1888 2
1890 | 1889 2
1891 | 1890 14
1892 | 1891 14
1893 | 1892 14
1894 | 1893 14
1895 | 1894 14
1896 | 1895 14
1897 | 1896 14
1898 | 1897 14
1899 | 1898 14
1900 | 1899 14
1901 | 1900 14
1902 | 1901 14
1903 | 1902 14
1904 | 1903 14
1905 | 1904 14
1906 | 1905 14
1907 | 1906 14
1908 | 1907 14
1909 | 1908 14
1910 | 1909 14
1911 | 1910 14
1912 | 1911 14
1913 | 1912 14
1914 | 1913 14
1915 | 1914 14
1916 | 1915 14
1917 | 1916 14
1918 | 1917 14
1919 | 1918 14
1920 | 1919 14
1921 | 1920 14
1922 | 1921 14
1923 | 1922 14
1924 | 1923 14
1925 | 1924 14
1926 | 1925 14
1927 | 1926 14
1928 | 1927 14
1929 | 1928 14
1930 | 1929 14
1931 | 1930 14
1932 | 1931 14
1933 | 1932 14
1934 | 1933 14
1935 | 1934 14
1936 | 1935 14
1937 | 1936 14
1938 | 1937 14
1939 | 1938 14
1940 | 1939 14
1941 | 1940 14
1942 | 1941 14
1943 | 1942 14
1944 | 1943 14
1945 | 1944 14
1946 | 1945 14
1947 | 1946 14
1948 | 1947 14
1949 | 1948 14
1950 | 1949 14
1951 | 1950 14
1952 | 1951 14
1953 | 1952 14
1954 | 1953 14
1955 | 1954 14
1956 | 1955 14
1957 | 1956 14
1958 | 1957 14
1959 | 1958 14
1960 | 1959 14
1961 | 1960 14
1962 | 1961 14
1963 | 1962 14
1964 | 1963 14
1965 | 1964 14
1966 | 1965 14
1967 | 1966 14
1968 | 1967 6
1969 | 1968 6
1970 | 1969 6
1971 | 1970 6
1972 | 1971 6
1973 | 1972 6
1974 | 1973 6
1975 | 1974 6
1976 | 1975 6
1977 | 1976 6
1978 | 1977 6
1979 | 1978 6
1980 | 1979 6
1981 | 1980 6
1982 | 1981 6
1983 | 1982 6
1984 | 1983 6
1985 | 1984 6
1986 | 1985 6
1987 | 1986 6
1988 | 1987 6
1989 | 1988 6
1990 | 1989 6
1991 | 1990 6
1992 | 1991 6
1993 | 1992 6
1994 | 1993 6
1995 | 1994 6
1996 | 1995 6
1997 | 1996 6
1998 | 1997 6
1999 | 1998 6
2000 | 1999 6
2001 | 2000 6
2002 | 2001 6
2003 | 2002 6
2004 | 2003 6
2005 | 2004 6
2006 | 2005 6
2007 | 2006 6
2008 | 2007 6
2009 | 2008 6
2010 | 2009 6
2011 | 2010 6
2012 | 2011 6
2013 | 2012 6
2014 | 2013 6
2015 | 2014 6
2016 | 2015 6
2017 | 2016 6
2018 | 2017 6
2019 | 2018 6
2020 | 2019 6
2021 | 2020 6
2022 | 2021 6
2023 | 2022 6
2024 | 2023 6
2025 | 2024 6
2026 | 2025 6
2027 | 2026 6
2028 | 2027 6
2029 | 2028 6
2030 | 2029 6
2031 | 2030 6
2032 | 2031 6
2033 | 2032 6
2034 | 2033 6
2035 | 2034 6
2036 | 2035 6
2037 | 2036 6
2038 | 2037 6
2039 | 2038 6
2040 | 2039 6
2041 | 2040 6
2042 | 2041 6
2043 | 2042 6
2044 | 2043 6
2045 | 2044 6
2046 | 2045 6
2047 | 2046 6
2048 | 2047 6
2049 | 2048 6
2050 | 2049 6
2051 | 2050 6
2052 | 2051 6
2053 | 2052 6
2054 | 2053 6
2055 | 2054 6
2056 | 2055 6
2057 | 2056 6
2058 | 2057 6
2059 | 2058 6
2060 | 2059 6
2061 | 2060 6
2062 | 2061 6
2063 | 2062 6
2064 | 2063 6
2065 | 2064 6
2066 | 2065 6
2067 | 2066 6
2068 | 2067 6
2069 | 2068 6
2070 | 2069 6
2071 | 2070 6
2072 | 2071 6
2073 | 2072 6
2074 | 2073 6
2075 | 2074 6
2076 | 2075 6
2077 | 2076 6
2078 | 2077 6
2079 | 2078 10
2080 | 2079 10
2081 | 2080 10
2082 | 2081 10
2083 | 2082 10
2084 | 2083 10
2085 | 2084 10
2086 | 2085 10
2087 | 2086 10
2088 | 2087 10
2089 | 2088 10
2090 | 2089 10
2091 | 2090 10
2092 | 2091 10
2093 | 2092 10
2094 | 2093 10
2095 | 2094 10
2096 | 2095 10
2097 | 2096 10
2098 | 2097 10
2099 | 2098 10
2100 | 2099 10
2101 | 2100 10
2102 | 2101 10
2103 | 2102 10
2104 | 2103 10
2105 | 2104 10
2106 | 2105 10
2107 | 2106 10
2108 | 2107 10
2109 | 2108 10
2110 | 2109 10
2111 | 2110 10
2112 | 2111 10
2113 | 2112 10
2114 | 2113 10
2115 | 2114 10
2116 | 2115 10
2117 | 2116 10
2118 | 2117 10
2119 | 2118 10
2120 | 2119 10
2121 | 2120 10
2122 | 2121 10
2123 | 2122 10
2124 | 2123 10
2125 | 2124 10
2126 | 2125 10
2127 | 2126 10
2128 | 2127 10
2129 | 2128 10
2130 | 2129 10
2131 | 2130 10
2132 | 2131 10
2133 | 2132 10
2134 | 2133 10
2135 | 2134 10
2136 | 2135 10
2137 | 2136 10
2138 | 2137 10
2139 | 2138 10
2140 | 2139 10
2141 | 2140 10
2142 | 2141 10
2143 | 2142 10
2144 | 2143 10
2145 | 2144 10
2146 | 2145 10
2147 | 2146 10
2148 | 2147 10
2149 | 2148 10
2150 | 2149 10
2151 | 2150 10
2152 | 2151 10
2153 | 2152 10
2154 | 2153 10
2155 | 2154 10
2156 | 2155 10
2157 | 2156 10
2158 | 2157 10
2159 | 2158 10
2160 | 2159 10
2161 | 2160 10
2162 | 2161 10
2163 | 2162 10
2164 | 2163 10
2165 | 2164 10
2166 | 2165 10
2167 | 2166 10
2168 | 2167 10
2169 | 2168 10
2170 | 2169 10
2171 | 2170 10
2172 | 2171 10
2173 | 2172 10
2174 | 2173 10
2175 | 2174 10
2176 | 2175 10
2177 | 2176 10
2178 | 2177 10
2179 | 2178 10
2180 | 2179 10
2181 | 2180 10
2182 | 2181 10
2183 | 2182 10
2184 | 2183 10
2185 | 2184 10
2186 | 2185 10
2187 | 2186 10
2188 | 2187 10
2189 | 2188 10
2190 | 2189 10
2191 | 2190 10
2192 | 2191 10
2193 | 2192 10
2194 | 2193 10
2195 | 2194 10
2196 | 2195 10
2197 | 2196 10
2198 | 2197 10
2199 | 2198 10
2200 | 2199 10
2201 | 2200 10
2202 | 2201 10
2203 | 2202 10
2204 | 2203 10
2205 | 2204 10
2206 | 2205 10
2207 | 2206 10
2208 | 2207 10
2209 | 2208 10
2210 | 2209 10
2211 | 2210 10
2212 | 2211 10
2213 | 2212 10
2214 | 2213 10
2215 | 2214 10
2216 | 2215 10
2217 | 2216 10
2218 | 2217 10
2219 | 2218 10
2220 | 2219 10
2221 | 2220 10
2222 | 2221 10
2223 | 2222 10
2224 | 2223 10
2225 | 2224 10
2226 | 2225 10
2227 | 2226 10
2228 | 2227 10
2229 | 2228 10
2230 | 2229 10
2231 | 2230 10
2232 | 2231 10
2233 | 2232 10
2234 | 2233 10
2235 | 2234 10
2236 | 2235 10
2237 | 2236 10
2238 | 2237 10
2239 | 2238 10
2240 | 2239 10
2241 | 2240 10
2242 | 2241 10
2243 | 2242 10
2244 | 2243 10
2245 | 2244 10
2246 | 2245 10
2247 | 2246 10
2248 | 2247 10
2249 | 2248 10
2250 | 2249 10
2251 | 2250 10
2252 | 2251 10
2253 | 2252 10
2254 | 2253 10
2255 | 2254 10
2256 | 2255 10
2257 | 2256 10
2258 | 2257 10
2259 | 2258 10
2260 | 2259 10
2261 | 2260 10
2262 | 2261 10
2263 | 2262 10
2264 | 2263 10
2265 | 2264 10
2266 | 2265 10
2267 | 2266 10
2268 | 2267 10
2269 | 2268 10
2270 | 2269 10
2271 | 2270 10
2272 | 2271 10
2273 | 2272 10
2274 | 2273 10
2275 | 2274 10
2276 | 2275 10
2277 | 2276 10
2278 | 2277 10
2279 | 2278 10
2280 | 2279 10
2281 | 2280 10
2282 | 2281 10
2283 | 2282 10
2284 | 2283 10
2285 | 2284 10
2286 | 2285 10
2287 | 2286 10
2288 | 2287 10
2289 | 2288 10
2290 | 2289 10
2291 | 2290 10
2292 | 2291 10
2293 | 2292 10
2294 | 2293 10
2295 | 2294 10
2296 | 2295 10
2297 | 2296 10
2298 | 2297 10
2299 | 2298 10
2300 | 2299 10
2301 | 2300 10
2302 | 2301 10
2303 | 2302 10
2304 | 2303 10
2305 | 2304 10
2306 | 2305 10
2307 | 2306 10
2308 | 2307 10
2309 | 2308 10
2310 | 2309 10
2311 | 2310 10
2312 | 2311 10
2313 | 2312 10
2314 | 2313 10
2315 | 2314 10
2316 | 2315 10
2317 | 2316 10
2318 | 2317 15
2319 | 2318 15
2320 | 2319 15
2321 | 2320 15
2322 | 2321 15
2323 | 2322 15
2324 | 2323 15
2325 | 2324 15
2326 | 2325 15
2327 | 2326 15
2328 | 2327 15
2329 | 2328 15
2330 | 2329 15
2331 | 2330 15
2332 | 2331 15
2333 | 2332 15
2334 | 2333 15
2335 | 2334 15
2336 | 2335 15
2337 | 2336 15
2338 | 2337 15
2339 | 2338 15
2340 | 2339 15
2341 | 2340 15
2342 | 2341 15
2343 | 2342 15
2344 | 2343 15
2345 | 2344 15
2346 | 2345 15
2347 | 2346 15
2348 | 2347 15
2349 | 2348 15
2350 | 2349 15
2351 | 2350 15
2352 | 2351 15
2353 | 2352 15
2354 | 2353 15
2355 | 2354 15
2356 | 2355 15
2357 | 2356 15
2358 | 2357 15
2359 | 2358 15
2360 | 2359 15
2361 | 2360 15
2362 | 2361 15
2363 | 2362 15
2364 | 2363 15
2365 | 2364 15
2366 | 2365 15
2367 | 2366 15
2368 | 2367 15
2369 | 2368 15
2370 | 2369 15
2371 | 2370 15
2372 | 2371 15
2373 | 2372 15
2374 | 2373 15
2375 | 2374 15
2376 | 2375 15
2377 | 2376 15
2378 | 2377 15
2379 | 2378 15
2380 | 2379 15
2381 | 2380 15
2382 | 2381 15
2383 | 2382 15
2384 | 2383 15
2385 | 2384 15
2386 | 2385 15
2387 | 2386 15
2388 | 2387 15
2389 | 2388 15
2390 | 2389 15
2391 | 2390 8
2392 | 2391 8
2393 | 2392 8
2394 | 2393 8
2395 | 2394 8
2396 | 2395 8
2397 | 2396 8
2398 | 2397 8
2399 | 2398 8
2400 | 2399 8
2401 | 2400 8
2402 | 2401 8
2403 | 2402 8
2404 | 2403 8
2405 | 2404 8
2406 |
--------------------------------------------------------------------------------
/data/wiki/wiki_labels.txt:
--------------------------------------------------------------------------------
1 | 0 8
2 | 1 8
3 | 2 8
4 | 3 8
5 | 4 8
6 | 5 8
7 | 6 8
8 | 7 8
9 | 8 8
10 | 9 8
11 | 10 8
12 | 11 1
13 | 12 1
14 | 13 1
15 | 14 1
16 | 15 1
17 | 16 1
18 | 17 1
19 | 18 1
20 | 19 1
21 | 20 1
22 | 21 1
23 | 22 1
24 | 23 1
25 | 24 1
26 | 25 1
27 | 26 1
28 | 27 1
29 | 28 1
30 | 29 1
31 | 30 1
32 | 31 1
33 | 32 1
34 | 33 1
35 | 34 1
36 | 35 1
37 | 36 1
38 | 37 1
39 | 38 1
40 | 39 1
41 | 40 1
42 | 41 1
43 | 42 1
44 | 43 1
45 | 44 1
46 | 45 1
47 | 46 1
48 | 47 1
49 | 48 1
50 | 49 1
51 | 50 1
52 | 51 1
53 | 52 1
54 | 53 1
55 | 54 1
56 | 55 1
57 | 56 1
58 | 57 1
59 | 58 1
60 | 59 1
61 | 60 1
62 | 61 1
63 | 62 1
64 | 63 1
65 | 64 1
66 | 65 1
67 | 66 1
68 | 67 1
69 | 68 1
70 | 69 1
71 | 70 1
72 | 71 1
73 | 72 1
74 | 73 1
75 | 74 1
76 | 75 1
77 | 76 1
78 | 77 1
79 | 78 1
80 | 79 1
81 | 80 1
82 | 81 1
83 | 82 1
84 | 83 1
85 | 84 1
86 | 85 1
87 | 86 1
88 | 87 1
89 | 88 1
90 | 89 1
91 | 90 1
92 | 91 1
93 | 92 1
94 | 93 1
95 | 94 1
96 | 95 1
97 | 96 1
98 | 97 1
99 | 98 1
100 | 99 1
101 | 100 1
102 | 101 1
103 | 102 1
104 | 103 1
105 | 104 1
106 | 105 1
107 | 106 1
108 | 107 1
109 | 108 1
110 | 109 1
111 | 110 1
112 | 111 1
113 | 112 1
114 | 113 1
115 | 114 1
116 | 115 1
117 | 116 1
118 | 117 1
119 | 118 1
120 | 119 1
121 | 120 1
122 | 121 1
123 | 122 1
124 | 123 1
125 | 124 1
126 | 125 1
127 | 126 1
128 | 127 1
129 | 128 1
130 | 129 1
131 | 130 1
132 | 131 1
133 | 132 1
134 | 133 1
135 | 134 1
136 | 135 1
137 | 136 1
138 | 137 1
139 | 138 1
140 | 139 1
141 | 140 1
142 | 141 1
143 | 142 1
144 | 143 1
145 | 144 1
146 | 145 1
147 | 146 1
148 | 147 1
149 | 148 1
150 | 149 1
151 | 150 1
152 | 151 1
153 | 152 1
154 | 153 1
155 | 154 1
156 | 155 1
157 | 156 1
158 | 157 1
159 | 158 1
160 | 159 1
161 | 160 1
162 | 161 1
163 | 162 1
164 | 163 1
165 | 164 1
166 | 165 1
167 | 166 1
168 | 167 1
169 | 168 1
170 | 169 1
171 | 170 1
172 | 171 1
173 | 172 1
174 | 173 1
175 | 174 1
176 | 175 1
177 | 176 1
178 | 177 1
179 | 178 1
180 | 179 1
181 | 180 1
182 | 181 1
183 | 182 1
184 | 183 1
185 | 184 1
186 | 185 1
187 | 186 1
188 | 187 1
189 | 188 1
190 | 189 1
191 | 190 0
192 | 191 0
193 | 192 0
194 | 193 0
195 | 194 0
196 | 195 0
197 | 196 0
198 | 197 0
199 | 198 0
200 | 199 0
201 | 200 0
202 | 201 0
203 | 202 0
204 | 203 0
205 | 204 0
206 | 205 0
207 | 206 0
208 | 207 0
209 | 208 0
210 | 209 0
211 | 210 0
212 | 211 0
213 | 212 0
214 | 213 1
215 | 214 1
216 | 215 1
217 | 216 1
218 | 217 1
219 | 218 1
220 | 219 1
221 | 220 1
222 | 221 1
223 | 222 1
224 | 223 1
225 | 224 1
226 | 225 1
227 | 226 1
228 | 227 1
229 | 228 1
230 | 229 1
231 | 230 1
232 | 231 1
233 | 232 1
234 | 233 1
235 | 234 1
236 | 235 1
237 | 236 1
238 | 237 1
239 | 238 1
240 | 239 1
241 | 240 1
242 | 241 1
243 | 242 1
244 | 243 1
245 | 244 1
246 | 245 1
247 | 246 1
248 | 247 1
249 | 248 6
250 | 249 6
251 | 250 6
252 | 251 6
253 | 252 6
254 | 253 6
255 | 254 6
256 | 255 6
257 | 256 6
258 | 257 6
259 | 258 6
260 | 259 6
261 | 260 6
262 | 261 6
263 | 262 6
264 | 263 6
265 | 264 6
266 | 265 5
267 | 266 5
268 | 267 5
269 | 268 5
270 | 269 5
271 | 270 5
272 | 271 5
273 | 272 5
274 | 273 5
275 | 274 5
276 | 275 5
277 | 276 5
278 | 277 5
279 | 278 5
280 | 279 5
281 | 280 5
282 | 281 5
283 | 282 5
284 | 283 5
285 | 284 5
286 | 285 5
287 | 286 5
288 | 287 5
289 | 288 5
290 | 289 5
291 | 290 5
292 | 291 5
293 | 292 5
294 | 293 5
295 | 294 5
296 | 295 5
297 | 296 5
298 | 297 5
299 | 298 5
300 | 299 5
301 | 300 5
302 | 301 5
303 | 302 5
304 | 303 5
305 | 304 5
306 | 305 5
307 | 306 5
308 | 307 5
309 | 308 5
310 | 309 5
311 | 310 5
312 | 311 5
313 | 312 5
314 | 313 5
315 | 314 5
316 | 315 5
317 | 316 5
318 | 317 5
319 | 318 13
320 | 319 13
321 | 320 13
322 | 321 13
323 | 322 13
324 | 323 13
325 | 324 13
326 | 325 13
327 | 326 13
328 | 327 13
329 | 328 13
330 | 329 13
331 | 330 13
332 | 331 13
333 | 332 13
334 | 333 13
335 | 334 13
336 | 335 13
337 | 336 13
338 | 337 13
339 | 338 13
340 | 339 13
341 | 340 13
342 | 341 13
343 | 342 13
344 | 343 13
345 | 344 13
346 | 345 13
347 | 346 13
348 | 347 13
349 | 348 6
350 | 349 6
351 | 350 6
352 | 351 6
353 | 352 6
354 | 353 6
355 | 354 6
356 | 355 6
357 | 356 6
358 | 357 6
359 | 358 6
360 | 359 6
361 | 360 6
362 | 361 6
363 | 362 6
364 | 363 6
365 | 364 6
366 | 365 6
367 | 366 6
368 | 367 6
369 | 368 6
370 | 369 6
371 | 370 6
372 | 371 6
373 | 372 6
374 | 373 6
375 | 374 6
376 | 375 6
377 | 376 6
378 | 377 12
379 | 378 12
380 | 379 12
381 | 380 12
382 | 381 12
383 | 382 12
384 | 383 12
385 | 384 12
386 | 385 12
387 | 386 15
388 | 387 15
389 | 388 15
390 | 389 15
391 | 390 15
392 | 391 15
393 | 392 15
394 | 393 15
395 | 394 15
396 | 395 15
397 | 396 15
398 | 397 15
399 | 398 15
400 | 399 15
401 | 400 15
402 | 401 15
403 | 402 15
404 | 403 15
405 | 404 15
406 | 405 15
407 | 406 15
408 | 407 15
409 | 408 15
410 | 409 15
411 | 410 15
412 | 411 15
413 | 412 15
414 | 413 15
415 | 414 15
416 | 415 15
417 | 416 15
418 | 417 15
419 | 418 15
420 | 419 15
421 | 420 15
422 | 421 15
423 | 422 15
424 | 423 15
425 | 424 15
426 | 425 15
427 | 426 15
428 | 427 15
429 | 428 15
430 | 429 15
431 | 430 15
432 | 431 15
433 | 432 15
434 | 433 15
435 | 434 15
436 | 435 15
437 | 436 15
438 | 437 15
439 | 438 15
440 | 439 15
441 | 440 15
442 | 441 15
443 | 442 15
444 | 443 15
445 | 444 15
446 | 445 15
447 | 446 15
448 | 447 15
449 | 448 15
450 | 449 15
451 | 450 15
452 | 451 15
453 | 452 15
454 | 453 15
455 | 454 15
456 | 455 15
457 | 456 15
458 | 457 15
459 | 458 15
460 | 459 15
461 | 460 15
462 | 461 15
463 | 462 15
464 | 463 15
465 | 464 15
466 | 465 15
467 | 466 15
468 | 467 15
469 | 468 15
470 | 469 15
471 | 470 15
472 | 471 15
473 | 472 15
474 | 473 15
475 | 474 15
476 | 475 15
477 | 476 15
478 | 477 15
479 | 478 15
480 | 479 15
481 | 480 15
482 | 481 15
483 | 482 15
484 | 483 15
485 | 484 15
486 | 485 15
487 | 486 15
488 | 487 15
489 | 488 15
490 | 489 15
491 | 490 15
492 | 491 15
493 | 492 15
494 | 493 15
495 | 494 15
496 | 495 15
497 | 496 15
498 | 497 15
499 | 498 15
500 | 499 15
501 | 500 15
502 | 501 15
503 | 502 15
504 | 503 15
505 | 504 15
506 | 505 15
507 | 506 15
508 | 507 15
509 | 508 15
510 | 509 15
511 | 510 15
512 | 511 15
513 | 512 15
514 | 513 15
515 | 514 15
516 | 515 15
517 | 516 15
518 | 517 15
519 | 518 15
520 | 519 15
521 | 520 15
522 | 521 15
523 | 522 15
524 | 523 15
525 | 524 15
526 | 525 15
527 | 526 15
528 | 527 15
529 | 528 15
530 | 529 15
531 | 530 15
532 | 531 15
533 | 532 15
534 | 533 15
535 | 534 15
536 | 535 15
537 | 536 15
538 | 537 15
539 | 538 15
540 | 539 15
541 | 540 15
542 | 541 16
543 | 542 16
544 | 543 16
545 | 544 16
546 | 545 16
547 | 546 16
548 | 547 16
549 | 548 16
550 | 549 16
551 | 550 16
552 | 551 16
553 | 552 16
554 | 553 16
555 | 554 16
556 | 555 16
557 | 556 16
558 | 557 16
559 | 558 16
560 | 559 16
561 | 560 16
562 | 561 16
563 | 562 16
564 | 563 16
565 | 564 16
566 | 565 16
567 | 566 16
568 | 567 16
569 | 568 16
570 | 569 16
571 | 570 16
572 | 571 16
573 | 572 16
574 | 573 16
575 | 574 16
576 | 575 16
577 | 576 16
578 | 577 16
579 | 578 16
580 | 579 16
581 | 580 16
582 | 581 16
583 | 582 16
584 | 583 16
585 | 584 16
586 | 585 16
587 | 586 16
588 | 587 16
589 | 588 16
590 | 589 16
591 | 590 16
592 | 591 16
593 | 592 16
594 | 593 16
595 | 594 16
596 | 595 16
597 | 596 16
598 | 597 16
599 | 598 16
600 | 599 16
601 | 600 16
602 | 601 16
603 | 602 16
604 | 603 16
605 | 604 16
606 | 605 16
607 | 606 16
608 | 607 16
609 | 608 16
610 | 609 16
611 | 610 16
612 | 611 16
613 | 612 16
614 | 613 16
615 | 614 16
616 | 615 16
617 | 616 16
618 | 617 16
619 | 618 16
620 | 619 16
621 | 620 16
622 | 621 16
623 | 622 16
624 | 623 16
625 | 624 16
626 | 625 16
627 | 626 16
628 | 627 16
629 | 628 16
630 | 629 16
631 | 630 16
632 | 631 16
633 | 632 16
634 | 633 16
635 | 634 16
636 | 635 16
637 | 636 16
638 | 637 16
639 | 638 16
640 | 639 16
641 | 640 16
642 | 641 16
643 | 642 16
644 | 643 16
645 | 644 16
646 | 645 16
647 | 646 16
648 | 647 16
649 | 648 16
650 | 649 1
651 | 650 1
652 | 651 1
653 | 652 1
654 | 653 1
655 | 654 1
656 | 655 1
657 | 656 1
658 | 657 1
659 | 658 1
660 | 659 1
661 | 660 1
662 | 661 1
663 | 662 1
664 | 663 1
665 | 664 1
666 | 665 1
667 | 666 1
668 | 667 1
669 | 668 1
670 | 669 1
671 | 670 1
672 | 671 1
673 | 672 1
674 | 673 1
675 | 674 1
676 | 675 1
677 | 676 1
678 | 677 1
679 | 678 1
680 | 679 1
681 | 680 1
682 | 681 1
683 | 682 1
684 | 683 1
685 | 684 1
686 | 685 1
687 | 686 1
688 | 687 1
689 | 688 1
690 | 689 1
691 | 690 1
692 | 691 1
693 | 692 1
694 | 693 1
695 | 694 1
696 | 695 1
697 | 696 1
698 | 697 1
699 | 698 1
700 | 699 1
701 | 700 1
702 | 701 1
703 | 702 1
704 | 703 1
705 | 704 1
706 | 705 1
707 | 706 1
708 | 707 1
709 | 708 1
710 | 709 1
711 | 710 1
712 | 711 1
713 | 712 1
714 | 713 1
715 | 714 1
716 | 715 1
717 | 716 1
718 | 717 1
719 | 718 1
720 | 719 1
721 | 720 1
722 | 721 1
723 | 722 1
724 | 723 1
725 | 724 1
726 | 725 1
727 | 726 1
728 | 727 1
729 | 728 1
730 | 729 1
731 | 730 1
732 | 731 1
733 | 732 1
734 | 733 1
735 | 734 1
736 | 735 1
737 | 736 1
738 | 737 1
739 | 738 1
740 | 739 1
741 | 740 1
742 | 741 1
743 | 742 1
744 | 743 1
745 | 744 1
746 | 745 1
747 | 746 1
748 | 747 1
749 | 748 1
750 | 749 1
751 | 750 1
752 | 751 1
753 | 752 1
754 | 753 1
755 | 754 1
756 | 755 1
757 | 756 1
758 | 757 1
759 | 758 1
760 | 759 1
761 | 760 1
762 | 761 1
763 | 762 1
764 | 763 1
765 | 764 1
766 | 765 1
767 | 766 1
768 | 767 1
769 | 768 1
770 | 769 1
771 | 770 1
772 | 771 1
773 | 772 1
774 | 773 1
775 | 774 1
776 | 775 1
777 | 776 1
778 | 777 1
779 | 778 1
780 | 779 1
781 | 780 1
782 | 781 1
783 | 782 1
784 | 783 1
785 | 784 1
786 | 785 1
787 | 786 1
788 | 787 1
789 | 788 1
790 | 789 1
791 | 790 1
792 | 791 1
793 | 792 1
794 | 793 1
795 | 794 1
796 | 795 1
797 | 796 1
798 | 797 1
799 | 798 1
800 | 799 1
801 | 800 1
802 | 801 1
803 | 802 1
804 | 803 1
805 | 804 1
806 | 805 1
807 | 806 1
808 | 807 1
809 | 808 1
810 | 809 1
811 | 810 1
812 | 811 1
813 | 812 1
814 | 813 1
815 | 814 1
816 | 815 1
817 | 816 1
818 | 817 1
819 | 818 1
820 | 819 1
821 | 820 1
822 | 821 1
823 | 822 1
824 | 823 1
825 | 824 1
826 | 825 1
827 | 826 1
828 | 827 1
829 | 828 1
830 | 829 1
831 | 830 1
832 | 831 1
833 | 832 1
834 | 833 1
835 | 834 1
836 | 835 1
837 | 836 1
838 | 837 1
839 | 838 1
840 | 839 1
841 | 840 1
842 | 841 9
843 | 842 9
844 | 843 9
845 | 844 9
846 | 845 9
847 | 846 9
848 | 847 9
849 | 848 9
850 | 849 9
851 | 850 9
852 | 851 9
853 | 852 10
854 | 853 10
855 | 854 10
856 | 855 10
857 | 856 10
858 | 857 10
859 | 858 10
860 | 859 10
861 | 860 10
862 | 861 10
863 | 862 10
864 | 863 10
865 | 864 10
866 | 865 10
867 | 866 10
868 | 867 10
869 | 868 10
870 | 869 10
871 | 870 10
872 | 871 10
873 | 872 10
874 | 873 10
875 | 874 10
876 | 875 10
877 | 876 10
878 | 877 10
879 | 878 10
880 | 879 10
881 | 880 10
882 | 881 10
883 | 882 5
884 | 883 5
885 | 884 5
886 | 885 5
887 | 886 5
888 | 887 5
889 | 888 5
890 | 889 5
891 | 890 5
892 | 891 5
893 | 892 5
894 | 893 5
895 | 894 5
896 | 895 5
897 | 896 5
898 | 897 5
899 | 898 5
900 | 899 5
901 | 900 5
902 | 901 5
903 | 902 5
904 | 903 5
905 | 904 5
906 | 905 5
907 | 906 5
908 | 907 5
909 | 908 5
910 | 909 5
911 | 910 5
912 | 911 5
913 | 912 5
914 | 913 5
915 | 914 5
916 | 915 5
917 | 916 5
918 | 917 5
919 | 918 5
920 | 919 5
921 | 920 5
922 | 921 5
923 | 922 5
924 | 923 5
925 | 924 5
926 | 925 5
927 | 926 5
928 | 927 5
929 | 928 5
930 | 929 5
931 | 930 5
932 | 931 5
933 | 932 5
934 | 933 5
935 | 934 5
936 | 935 5
937 | 936 5
938 | 937 5
939 | 938 5
940 | 939 5
941 | 940 5
942 | 941 5
943 | 942 5
944 | 943 5
945 | 944 5
946 | 945 5
947 | 946 5
948 | 947 5
949 | 948 5
950 | 949 5
951 | 950 5
952 | 951 5
953 | 952 5
954 | 953 5
955 | 954 5
956 | 955 5
957 | 956 5
958 | 957 5
959 | 958 5
960 | 959 5
961 | 960 5
962 | 961 5
963 | 962 5
964 | 963 5
965 | 964 5
966 | 965 5
967 | 966 5
968 | 967 5
969 | 968 5
970 | 969 5
971 | 970 5
972 | 971 5
973 | 972 5
974 | 973 5
975 | 974 5
976 | 975 5
977 | 976 5
978 | 977 5
979 | 978 5
980 | 979 5
981 | 980 5
982 | 981 5
983 | 982 5
984 | 983 5
985 | 984 5
986 | 985 5
987 | 986 5
988 | 987 5
989 | 988 5
990 | 989 5
991 | 990 5
992 | 991 5
993 | 992 5
994 | 993 5
995 | 994 5
996 | 995 5
997 | 996 5
998 | 997 5
999 | 998 5
1000 | 999 5
1001 | 1000 5
1002 | 1001 5
1003 | 1002 5
1004 | 1003 5
1005 | 1004 3
1006 | 1005 3
1007 | 1006 3
1008 | 1007 3
1009 | 1008 3
1010 | 1009 3
1011 | 1010 3
1012 | 1011 3
1013 | 1012 3
1014 | 1013 3
1015 | 1014 3
1016 | 1015 3
1017 | 1016 3
1018 | 1017 3
1019 | 1018 3
1020 | 1019 3
1021 | 1020 3
1022 | 1021 5
1023 | 1022 5
1024 | 1023 5
1025 | 1024 5
1026 | 1025 5
1027 | 1026 5
1028 | 1027 5
1029 | 1028 5
1030 | 1029 5
1031 | 1030 5
1032 | 1031 5
1033 | 1032 5
1034 | 1033 5
1035 | 1034 5
1036 | 1035 5
1037 | 1036 5
1038 | 1037 5
1039 | 1038 5
1040 | 1039 5
1041 | 1040 5
1042 | 1041 5
1043 | 1042 5
1044 | 1043 5
1045 | 1044 5
1046 | 1045 5
1047 | 1046 5
1048 | 1047 5
1049 | 1048 5
1050 | 1049 5
1051 | 1050 5
1052 | 1051 5
1053 | 1052 5
1054 | 1053 5
1055 | 1054 5
1056 | 1055 5
1057 | 1056 5
1058 | 1057 5
1059 | 1058 5
1060 | 1059 5
1061 | 1060 5
1062 | 1061 5
1063 | 1062 5
1064 | 1063 5
1065 | 1064 5
1066 | 1065 5
1067 | 1066 5
1068 | 1067 5
1069 | 1068 5
1070 | 1069 5
1071 | 1070 5
1072 | 1071 5
1073 | 1072 5
1074 | 1073 5
1075 | 1074 5
1076 | 1075 5
1077 | 1076 5
1078 | 1077 5
1079 | 1078 5
1080 | 1079 5
1081 | 1080 5
1082 | 1081 5
1083 | 1082 5
1084 | 1083 5
1085 | 1084 5
1086 | 1085 5
1087 | 1086 5
1088 | 1087 5
1089 | 1088 5
1090 | 1089 5
1091 | 1090 5
1092 | 1091 5
1093 | 1092 5
1094 | 1093 5
1095 | 1094 5
1096 | 1095 5
1097 | 1096 5
1098 | 1097 5
1099 | 1098 5
1100 | 1099 5
1101 | 1100 5
1102 | 1101 5
1103 | 1102 5
1104 | 1103 5
1105 | 1104 5
1106 | 1105 5
1107 | 1106 5
1108 | 1107 5
1109 | 1108 5
1110 | 1109 5
1111 | 1110 5
1112 | 1111 5
1113 | 1112 5
1114 | 1113 5
1115 | 1114 5
1116 | 1115 5
1117 | 1116 5
1118 | 1117 5
1119 | 1118 5
1120 | 1119 5
1121 | 1120 5
1122 | 1121 5
1123 | 1122 5
1124 | 1123 5
1125 | 1124 5
1126 | 1125 5
1127 | 1126 5
1128 | 1127 5
1129 | 1128 5
1130 | 1129 5
1131 | 1130 5
1132 | 1131 5
1133 | 1132 5
1134 | 1133 5
1135 | 1134 5
1136 | 1135 5
1137 | 1136 5
1138 | 1137 5
1139 | 1138 5
1140 | 1139 5
1141 | 1140 5
1142 | 1141 5
1143 | 1142 5
1144 | 1143 5
1145 | 1144 5
1146 | 1145 5
1147 | 1146 5
1148 | 1147 5
1149 | 1148 5
1150 | 1149 5
1151 | 1150 5
1152 | 1151 5
1153 | 1152 5
1154 | 1153 5
1155 | 1154 5
1156 | 1155 5
1157 | 1156 5
1158 | 1157 5
1159 | 1158 5
1160 | 1159 5
1161 | 1160 5
1162 | 1161 5
1163 | 1162 5
1164 | 1163 5
1165 | 1164 5
1166 | 1165 5
1167 | 1166 5
1168 | 1167 5
1169 | 1168 5
1170 | 1169 5
1171 | 1170 5
1172 | 1171 5
1173 | 1172 5
1174 | 1173 5
1175 | 1174 5
1176 | 1175 5
1177 | 1176 5
1178 | 1177 5
1179 | 1178 5
1180 | 1179 5
1181 | 1180 5
1182 | 1181 5
1183 | 1182 5
1184 | 1183 5
1185 | 1184 5
1186 | 1185 5
1187 | 1186 5
1188 | 1187 5
1189 | 1188 5
1190 | 1189 5
1191 | 1190 5
1192 | 1191 5
1193 | 1192 5
1194 | 1193 5
1195 | 1194 5
1196 | 1195 5
1197 | 1196 5
1198 | 1197 5
1199 | 1198 5
1200 | 1199 5
1201 | 1200 5
1202 | 1201 5
1203 | 1202 5
1204 | 1203 5
1205 | 1204 5
1206 | 1205 5
1207 | 1206 5
1208 | 1207 11
1209 | 1208 11
1210 | 1209 11
1211 | 1210 11
1212 | 1211 11
1213 | 1212 11
1214 | 1213 11
1215 | 1214 11
1216 | 1215 11
1217 | 1216 11
1218 | 1217 11
1219 | 1218 11
1220 | 1219 11
1221 | 1220 11
1222 | 1221 11
1223 | 1222 11
1224 | 1223 11
1225 | 1224 11
1226 | 1225 11
1227 | 1226 11
1228 | 1227 11
1229 | 1228 11
1230 | 1229 11
1231 | 1230 11
1232 | 1231 11
1233 | 1232 11
1234 | 1233 11
1235 | 1234 11
1236 | 1235 11
1237 | 1236 11
1238 | 1237 11
1239 | 1238 11
1240 | 1239 11
1241 | 1240 11
1242 | 1241 11
1243 | 1242 11
1244 | 1243 11
1245 | 1244 11
1246 | 1245 11
1247 | 1246 11
1248 | 1247 11
1249 | 1248 11
1250 | 1249 11
1251 | 1250 11
1252 | 1251 11
1253 | 1252 11
1254 | 1253 11
1255 | 1254 11
1256 | 1255 11
1257 | 1256 11
1258 | 1257 11
1259 | 1258 11
1260 | 1259 11
1261 | 1260 11
1262 | 1261 11
1263 | 1262 11
1264 | 1263 11
1265 | 1264 11
1266 | 1265 11
1267 | 1266 11
1268 | 1267 11
1269 | 1268 11
1270 | 1269 11
1271 | 1270 11
1272 | 1271 11
1273 | 1272 11
1274 | 1273 11
1275 | 1274 11
1276 | 1275 11
1277 | 1276 11
1278 | 1277 11
1279 | 1278 11
1280 | 1279 11
1281 | 1280 11
1282 | 1281 11
1283 | 1282 11
1284 | 1283 11
1285 | 1284 11
1286 | 1285 11
1287 | 1286 11
1288 | 1287 11
1289 | 1288 11
1290 | 1289 11
1291 | 1290 11
1292 | 1291 11
1293 | 1292 11
1294 | 1293 11
1295 | 1294 11
1296 | 1295 11
1297 | 1296 11
1298 | 1297 11
1299 | 1298 11
1300 | 1299 11
1301 | 1300 11
1302 | 1301 11
1303 | 1302 11
1304 | 1303 11
1305 | 1304 11
1306 | 1305 11
1307 | 1306 11
1308 | 1307 11
1309 | 1308 11
1310 | 1309 11
1311 | 1310 11
1312 | 1311 11
1313 | 1312 11
1314 | 1313 11
1315 | 1314 11
1316 | 1315 11
1317 | 1316 11
1318 | 1317 11
1319 | 1318 11
1320 | 1319 11
1321 | 1320 11
1322 | 1321 11
1323 | 1322 11
1324 | 1323 11
1325 | 1324 11
1326 | 1325 11
1327 | 1326 11
1328 | 1327 11
1329 | 1328 11
1330 | 1329 11
1331 | 1330 11
1332 | 1331 11
1333 | 1332 11
1334 | 1333 11
1335 | 1334 11
1336 | 1335 11
1337 | 1336 11
1338 | 1337 11
1339 | 1338 11
1340 | 1339 11
1341 | 1340 11
1342 | 1341 11
1343 | 1342 11
1344 | 1343 11
1345 | 1344 11
1346 | 1345 11
1347 | 1346 11
1348 | 1347 11
1349 | 1348 11
1350 | 1349 11
1351 | 1350 11
1352 | 1351 11
1353 | 1352 11
1354 | 1353 11
1355 | 1354 11
1356 | 1355 11
1357 | 1356 11
1358 | 1357 11
1359 | 1358 11
1360 | 1359 11
1361 | 1360 11
1362 | 1361 11
1363 | 1362 11
1364 | 1363 11
1365 | 1364 11
1366 | 1365 11
1367 | 1366 11
1368 | 1367 11
1369 | 1368 11
1370 | 1369 11
1371 | 1370 11
1372 | 1371 11
1373 | 1372 11
1374 | 1373 11
1375 | 1374 11
1376 | 1375 11
1377 | 1376 11
1378 | 1377 11
1379 | 1378 11
1380 | 1379 11
1381 | 1380 11
1382 | 1381 11
1383 | 1382 11
1384 | 1383 11
1385 | 1384 11
1386 | 1385 11
1387 | 1386 4
1388 | 1387 4
1389 | 1388 4
1390 | 1389 4
1391 | 1390 4
1392 | 1391 4
1393 | 1392 4
1394 | 1393 4
1395 | 1394 4
1396 | 1395 4
1397 | 1396 3
1398 | 1397 3
1399 | 1398 3
1400 | 1399 3
1401 | 1400 3
1402 | 1401 3
1403 | 1402 3
1404 | 1403 3
1405 | 1404 3
1406 | 1405 3
1407 | 1406 3
1408 | 1407 3
1409 | 1408 3
1410 | 1409 3
1411 | 1410 3
1412 | 1411 3
1413 | 1412 3
1414 | 1413 3
1415 | 1414 3
1416 | 1415 3
1417 | 1416 3
1418 | 1417 3
1419 | 1418 3
1420 | 1419 3
1421 | 1420 3
1422 | 1421 3
1423 | 1422 3
1424 | 1423 3
1425 | 1424 3
1426 | 1425 3
1427 | 1426 3
1428 | 1427 3
1429 | 1428 3
1430 | 1429 3
1431 | 1430 3
1432 | 1431 3
1433 | 1432 3
1434 | 1433 3
1435 | 1434 3
1436 | 1435 3
1437 | 1436 3
1438 | 1437 3
1439 | 1438 3
1440 | 1439 3
1441 | 1440 3
1442 | 1441 3
1443 | 1442 3
1444 | 1443 3
1445 | 1444 3
1446 | 1445 3
1447 | 1446 3
1448 | 1447 3
1449 | 1448 3
1450 | 1449 3
1451 | 1450 3
1452 | 1451 3
1453 | 1452 3
1454 | 1453 3
1455 | 1454 3
1456 | 1455 3
1457 | 1456 3
1458 | 1457 3
1459 | 1458 3
1460 | 1459 3
1461 | 1460 3
1462 | 1461 3
1463 | 1462 3
1464 | 1463 3
1465 | 1464 3
1466 | 1465 3
1467 | 1466 3
1468 | 1467 3
1469 | 1468 3
1470 | 1469 3
1471 | 1470 3
1472 | 1471 3
1473 | 1472 3
1474 | 1473 3
1475 | 1474 3
1476 | 1475 3
1477 | 1476 3
1478 | 1477 3
1479 | 1478 3
1480 | 1479 3
1481 | 1480 3
1482 | 1481 3
1483 | 1482 0
1484 | 1483 0
1485 | 1484 0
1486 | 1485 0
1487 | 1486 0
1488 | 1487 0
1489 | 1488 0
1490 | 1489 0
1491 | 1490 0
1492 | 1491 0
1493 | 1492 0
1494 | 1493 0
1495 | 1494 0
1496 | 1495 0
1497 | 1496 0
1498 | 1497 0
1499 | 1498 0
1500 | 1499 0
1501 | 1500 0
1502 | 1501 0
1503 | 1502 0
1504 | 1503 0
1505 | 1504 0
1506 | 1505 0
1507 | 1506 0
1508 | 1507 0
1509 | 1508 0
1510 | 1509 0
1511 | 1510 0
1512 | 1511 0
1513 | 1512 0
1514 | 1513 0
1515 | 1514 0
1516 | 1515 0
1517 | 1516 0
1518 | 1517 0
1519 | 1518 0
1520 | 1519 0
1521 | 1520 0
1522 | 1521 0
1523 | 1522 0
1524 | 1523 0
1525 | 1524 0
1526 | 1525 0
1527 | 1526 0
1528 | 1527 0
1529 | 1528 0
1530 | 1529 0
1531 | 1530 0
1532 | 1531 0
1533 | 1532 0
1534 | 1533 0
1535 | 1534 0
1536 | 1535 0
1537 | 1536 0
1538 | 1537 0
1539 | 1538 0
1540 | 1539 0
1541 | 1540 0
1542 | 1541 0
1543 | 1542 0
1544 | 1543 0
1545 | 1544 0
1546 | 1545 0
1547 | 1546 0
1548 | 1547 0
1549 | 1548 0
1550 | 1549 0
1551 | 1550 0
1552 | 1551 7
1553 | 1552 7
1554 | 1553 7
1555 | 1554 7
1556 | 1555 7
1557 | 1556 7
1558 | 1557 7
1559 | 1558 7
1560 | 1559 7
1561 | 1560 7
1562 | 1561 7
1563 | 1562 7
1564 | 1563 7
1565 | 1564 7
1566 | 1565 7
1567 | 1566 7
1568 | 1567 7
1569 | 1568 7
1570 | 1569 7
1571 | 1570 7
1572 | 1571 7
1573 | 1572 7
1574 | 1573 7
1575 | 1574 7
1576 | 1575 7
1577 | 1576 7
1578 | 1577 7
1579 | 1578 7
1580 | 1579 7
1581 | 1580 7
1582 | 1581 7
1583 | 1582 7
1584 | 1583 7
1585 | 1584 7
1586 | 1585 7
1587 | 1586 7
1588 | 1587 7
1589 | 1588 7
1590 | 1589 7
1591 | 1590 7
1592 | 1591 7
1593 | 1592 7
1594 | 1593 7
1595 | 1594 7
1596 | 1595 7
1597 | 1596 7
1598 | 1597 7
1599 | 1598 7
1600 | 1599 7
1601 | 1600 7
1602 | 1601 7
1603 | 1602 7
1604 | 1603 7
1605 | 1604 8
1606 | 1605 8
1607 | 1606 8
1608 | 1607 8
1609 | 1608 8
1610 | 1609 8
1611 | 1610 8
1612 | 1611 8
1613 | 1612 8
1614 | 1613 8
1615 | 1614 8
1616 | 1615 8
1617 | 1616 8
1618 | 1617 8
1619 | 1618 8
1620 | 1619 8
1621 | 1620 8
1622 | 1621 8
1623 | 1622 8
1624 | 1623 8
1625 | 1624 8
1626 | 1625 8
1627 | 1626 8
1628 | 1627 8
1629 | 1628 8
1630 | 1629 8
1631 | 1630 8
1632 | 1631 8
1633 | 1632 8
1634 | 1633 8
1635 | 1634 8
1636 | 1635 8
1637 | 1636 8
1638 | 1637 8
1639 | 1638 8
1640 | 1639 8
1641 | 1640 8
1642 | 1641 8
1643 | 1642 8
1644 | 1643 8
1645 | 1644 8
1646 | 1645 8
1647 | 1646 8
1648 | 1647 8
1649 | 1648 8
1650 | 1649 8
1651 | 1650 8
1652 | 1651 8
1653 | 1652 8
1654 | 1653 8
1655 | 1654 8
1656 | 1655 8
1657 | 1656 8
1658 | 1657 8
1659 | 1658 8
1660 | 1659 8
1661 | 1660 8
1662 | 1661 8
1663 | 1662 8
1664 | 1663 8
1665 | 1664 8
1666 | 1665 8
1667 | 1666 8
1668 | 1667 8
1669 | 1668 8
1670 | 1669 8
1671 | 1670 8
1672 | 1671 8
1673 | 1672 8
1674 | 1673 8
1675 | 1674 8
1676 | 1675 8
1677 | 1676 8
1678 | 1677 8
1679 | 1678 8
1680 | 1679 8
1681 | 1680 8
1682 | 1681 8
1683 | 1682 8
1684 | 1683 8
1685 | 1684 8
1686 | 1685 8
1687 | 1686 8
1688 | 1687 2
1689 | 1688 2
1690 | 1689 2
1691 | 1690 2
1692 | 1691 2
1693 | 1692 2
1694 | 1693 2
1695 | 1694 2
1696 | 1695 2
1697 | 1696 2
1698 | 1697 2
1699 | 1698 2
1700 | 1699 2
1701 | 1700 2
1702 | 1701 2
1703 | 1702 2
1704 | 1703 2
1705 | 1704 2
1706 | 1705 2
1707 | 1706 2
1708 | 1707 2
1709 | 1708 2
1710 | 1709 2
1711 | 1710 2
1712 | 1711 2
1713 | 1712 2
1714 | 1713 2
1715 | 1714 2
1716 | 1715 2
1717 | 1716 2
1718 | 1717 2
1719 | 1718 2
1720 | 1719 2
1721 | 1720 2
1722 | 1721 2
1723 | 1722 2
1724 | 1723 2
1725 | 1724 2
1726 | 1725 2
1727 | 1726 2
1728 | 1727 2
1729 | 1728 2
1730 | 1729 2
1731 | 1730 2
1732 | 1731 2
1733 | 1732 2
1734 | 1733 2
1735 | 1734 2
1736 | 1735 2
1737 | 1736 2
1738 | 1737 2
1739 | 1738 2
1740 | 1739 2
1741 | 1740 2
1742 | 1741 2
1743 | 1742 2
1744 | 1743 2
1745 | 1744 2
1746 | 1745 2
1747 | 1746 2
1748 | 1747 2
1749 | 1748 2
1750 | 1749 2
1751 | 1750 2
1752 | 1751 2
1753 | 1752 2
1754 | 1753 2
1755 | 1754 2
1756 | 1755 2
1757 | 1756 2
1758 | 1757 2
1759 | 1758 2
1760 | 1759 2
1761 | 1760 2
1762 | 1761 2
1763 | 1762 2
1764 | 1763 2
1765 | 1764 2
1766 | 1765 2
1767 | 1766 2
1768 | 1767 2
1769 | 1768 2
1770 | 1769 2
1771 | 1770 2
1772 | 1771 2
1773 | 1772 2
1774 | 1773 2
1775 | 1774 2
1776 | 1775 2
1777 | 1776 2
1778 | 1777 2
1779 | 1778 2
1780 | 1779 2
1781 | 1780 2
1782 | 1781 2
1783 | 1782 2
1784 | 1783 2
1785 | 1784 2
1786 | 1785 2
1787 | 1786 2
1788 | 1787 2
1789 | 1788 2
1790 | 1789 2
1791 | 1790 2
1792 | 1791 2
1793 | 1792 2
1794 | 1793 2
1795 | 1794 2
1796 | 1795 2
1797 | 1796 2
1798 | 1797 2
1799 | 1798 2
1800 | 1799 2
1801 | 1800 2
1802 | 1801 2
1803 | 1802 2
1804 | 1803 2
1805 | 1804 2
1806 | 1805 2
1807 | 1806 2
1808 | 1807 2
1809 | 1808 2
1810 | 1809 2
1811 | 1810 2
1812 | 1811 2
1813 | 1812 2
1814 | 1813 2
1815 | 1814 2
1816 | 1815 2
1817 | 1816 2
1818 | 1817 2
1819 | 1818 2
1820 | 1819 2
1821 | 1820 2
1822 | 1821 2
1823 | 1822 2
1824 | 1823 2
1825 | 1824 2
1826 | 1825 2
1827 | 1826 2
1828 | 1827 2
1829 | 1828 2
1830 | 1829 2
1831 | 1830 2
1832 | 1831 2
1833 | 1832 2
1834 | 1833 2
1835 | 1834 2
1836 | 1835 2
1837 | 1836 2
1838 | 1837 2
1839 | 1838 2
1840 | 1839 2
1841 | 1840 2
1842 | 1841 2
1843 | 1842 2
1844 | 1843 2
1845 | 1844 2
1846 | 1845 2
1847 | 1846 2
1848 | 1847 2
1849 | 1848 2
1850 | 1849 2
1851 | 1850 2
1852 | 1851 2
1853 | 1852 2
1854 | 1853 2
1855 | 1854 2
1856 | 1855 2
1857 | 1856 2
1858 | 1857 2
1859 | 1858 2
1860 | 1859 2
1861 | 1860 2
1862 | 1861 2
1863 | 1862 2
1864 | 1863 2
1865 | 1864 2
1866 | 1865 2
1867 | 1866 2
1868 | 1867 2
1869 | 1868 2
1870 | 1869 2
1871 | 1870 2
1872 | 1871 2
1873 | 1872 2
1874 | 1873 2
1875 | 1874 2
1876 | 1875 2
1877 | 1876 2
1878 | 1877 2
1879 | 1878 2
1880 | 1879 2
1881 | 1880 2
1882 | 1881 2
1883 | 1882 2
1884 | 1883 2
1885 | 1884 2
1886 | 1885 2
1887 | 1886 2
1888 | 1887 2
1889 | 1888 2
1890 | 1889 2
1891 | 1890 14
1892 | 1891 14
1893 | 1892 14
1894 | 1893 14
1895 | 1894 14
1896 | 1895 14
1897 | 1896 14
1898 | 1897 14
1899 | 1898 14
1900 | 1899 14
1901 | 1900 14
1902 | 1901 14
1903 | 1902 14
1904 | 1903 14
1905 | 1904 14
1906 | 1905 14
1907 | 1906 14
1908 | 1907 14
1909 | 1908 14
1910 | 1909 14
1911 | 1910 14
1912 | 1911 14
1913 | 1912 14
1914 | 1913 14
1915 | 1914 14
1916 | 1915 14
1917 | 1916 14
1918 | 1917 14
1919 | 1918 14
1920 | 1919 14
1921 | 1920 14
1922 | 1921 14
1923 | 1922 14
1924 | 1923 14
1925 | 1924 14
1926 | 1925 14
1927 | 1926 14
1928 | 1927 14
1929 | 1928 14
1930 | 1929 14
1931 | 1930 14
1932 | 1931 14
1933 | 1932 14
1934 | 1933 14
1935 | 1934 14
1936 | 1935 14
1937 | 1936 14
1938 | 1937 14
1939 | 1938 14
1940 | 1939 14
1941 | 1940 14
1942 | 1941 14
1943 | 1942 14
1944 | 1943 14
1945 | 1944 14
1946 | 1945 14
1947 | 1946 14
1948 | 1947 14
1949 | 1948 14
1950 | 1949 14
1951 | 1950 14
1952 | 1951 14
1953 | 1952 14
1954 | 1953 14
1955 | 1954 14
1956 | 1955 14
1957 | 1956 14
1958 | 1957 14
1959 | 1958 14
1960 | 1959 14
1961 | 1960 14
1962 | 1961 14
1963 | 1962 14
1964 | 1963 14
1965 | 1964 14
1966 | 1965 14
1967 | 1966 14
1968 | 1967 6
1969 | 1968 6
1970 | 1969 6
1971 | 1970 6
1972 | 1971 6
1973 | 1972 6
1974 | 1973 6
1975 | 1974 6
1976 | 1975 6
1977 | 1976 6
1978 | 1977 6
1979 | 1978 6
1980 | 1979 6
1981 | 1980 6
1982 | 1981 6
1983 | 1982 6
1984 | 1983 6
1985 | 1984 6
1986 | 1985 6
1987 | 1986 6
1988 | 1987 6
1989 | 1988 6
1990 | 1989 6
1991 | 1990 6
1992 | 1991 6
1993 | 1992 6
1994 | 1993 6
1995 | 1994 6
1996 | 1995 6
1997 | 1996 6
1998 | 1997 6
1999 | 1998 6
2000 | 1999 6
2001 | 2000 6
2002 | 2001 6
2003 | 2002 6
2004 | 2003 6
2005 | 2004 6
2006 | 2005 6
2007 | 2006 6
2008 | 2007 6
2009 | 2008 6
2010 | 2009 6
2011 | 2010 6
2012 | 2011 6
2013 | 2012 6
2014 | 2013 6
2015 | 2014 6
2016 | 2015 6
2017 | 2016 6
2018 | 2017 6
2019 | 2018 6
2020 | 2019 6
2021 | 2020 6
2022 | 2021 6
2023 | 2022 6
2024 | 2023 6
2025 | 2024 6
2026 | 2025 6
2027 | 2026 6
2028 | 2027 6
2029 | 2028 6
2030 | 2029 6
2031 | 2030 6
2032 | 2031 6
2033 | 2032 6
2034 | 2033 6
2035 | 2034 6
2036 | 2035 6
2037 | 2036 6
2038 | 2037 6
2039 | 2038 6
2040 | 2039 6
2041 | 2040 6
2042 | 2041 6
2043 | 2042 6
2044 | 2043 6
2045 | 2044 6
2046 | 2045 6
2047 | 2046 6
2048 | 2047 6
2049 | 2048 6
2050 | 2049 6
2051 | 2050 6
2052 | 2051 6
2053 | 2052 6
2054 | 2053 6
2055 | 2054 6
2056 | 2055 6
2057 | 2056 6
2058 | 2057 6
2059 | 2058 6
2060 | 2059 6
2061 | 2060 6
2062 | 2061 6
2063 | 2062 6
2064 | 2063 6
2065 | 2064 6
2066 | 2065 6
2067 | 2066 6
2068 | 2067 6
2069 | 2068 6
2070 | 2069 6
2071 | 2070 6
2072 | 2071 6
2073 | 2072 6
2074 | 2073 6
2075 | 2074 6
2076 | 2075 6
2077 | 2076 6
2078 | 2077 6
2079 | 2078 10
2080 | 2079 10
2081 | 2080 10
2082 | 2081 10
2083 | 2082 10
2084 | 2083 10
2085 | 2084 10
2086 | 2085 10
2087 | 2086 10
2088 | 2087 10
2089 | 2088 10
2090 | 2089 10
2091 | 2090 10
2092 | 2091 10
2093 | 2092 10
2094 | 2093 10
2095 | 2094 10
2096 | 2095 10
2097 | 2096 10
2098 | 2097 10
2099 | 2098 10
2100 | 2099 10
2101 | 2100 10
2102 | 2101 10
2103 | 2102 10
2104 | 2103 10
2105 | 2104 10
2106 | 2105 10
2107 | 2106 10
2108 | 2107 10
2109 | 2108 10
2110 | 2109 10
2111 | 2110 10
2112 | 2111 10
2113 | 2112 10
2114 | 2113 10
2115 | 2114 10
2116 | 2115 10
2117 | 2116 10
2118 | 2117 10
2119 | 2118 10
2120 | 2119 10
2121 | 2120 10
2122 | 2121 10
2123 | 2122 10
2124 | 2123 10
2125 | 2124 10
2126 | 2125 10
2127 | 2126 10
2128 | 2127 10
2129 | 2128 10
2130 | 2129 10
2131 | 2130 10
2132 | 2131 10
2133 | 2132 10
2134 | 2133 10
2135 | 2134 10
2136 | 2135 10
2137 | 2136 10
2138 | 2137 10
2139 | 2138 10
2140 | 2139 10
2141 | 2140 10
2142 | 2141 10
2143 | 2142 10
2144 | 2143 10
2145 | 2144 10
2146 | 2145 10
2147 | 2146 10
2148 | 2147 10
2149 | 2148 10
2150 | 2149 10
2151 | 2150 10
2152 | 2151 10
2153 | 2152 10
2154 | 2153 10
2155 | 2154 10
2156 | 2155 10
2157 | 2156 10
2158 | 2157 10
2159 | 2158 10
2160 | 2159 10
2161 | 2160 10
2162 | 2161 10
2163 | 2162 10
2164 | 2163 10
2165 | 2164 10
2166 | 2165 10
2167 | 2166 10
2168 | 2167 10
2169 | 2168 10
2170 | 2169 10
2171 | 2170 10
2172 | 2171 10
2173 | 2172 10
2174 | 2173 10
2175 | 2174 10
2176 | 2175 10
2177 | 2176 10
2178 | 2177 10
2179 | 2178 10
2180 | 2179 10
2181 | 2180 10
2182 | 2181 10
2183 | 2182 10
2184 | 2183 10
2185 | 2184 10
2186 | 2185 10
2187 | 2186 10
2188 | 2187 10
2189 | 2188 10
2190 | 2189 10
2191 | 2190 10
2192 | 2191 10
2193 | 2192 10
2194 | 2193 10
2195 | 2194 10
2196 | 2195 10
2197 | 2196 10
2198 | 2197 10
2199 | 2198 10
2200 | 2199 10
2201 | 2200 10
2202 | 2201 10
2203 | 2202 10
2204 | 2203 10
2205 | 2204 10
2206 | 2205 10
2207 | 2206 10
2208 | 2207 10
2209 | 2208 10
2210 | 2209 10
2211 | 2210 10
2212 | 2211 10
2213 | 2212 10
2214 | 2213 10
2215 | 2214 10
2216 | 2215 10
2217 | 2216 10
2218 | 2217 10
2219 | 2218 10
2220 | 2219 10
2221 | 2220 10
2222 | 2221 10
2223 | 2222 10
2224 | 2223 10
2225 | 2224 10
2226 | 2225 10
2227 | 2226 10
2228 | 2227 10
2229 | 2228 10
2230 | 2229 10
2231 | 2230 10
2232 | 2231 10
2233 | 2232 10
2234 | 2233 10
2235 | 2234 10
2236 | 2235 10
2237 | 2236 10
2238 | 2237 10
2239 | 2238 10
2240 | 2239 10
2241 | 2240 10
2242 | 2241 10
2243 | 2242 10
2244 | 2243 10
2245 | 2244 10
2246 | 2245 10
2247 | 2246 10
2248 | 2247 10
2249 | 2248 10
2250 | 2249 10
2251 | 2250 10
2252 | 2251 10
2253 | 2252 10
2254 | 2253 10
2255 | 2254 10
2256 | 2255 10
2257 | 2256 10
2258 | 2257 10
2259 | 2258 10
2260 | 2259 10
2261 | 2260 10
2262 | 2261 10
2263 | 2262 10
2264 | 2263 10
2265 | 2264 10
2266 | 2265 10
2267 | 2266 10
2268 | 2267 10
2269 | 2268 10
2270 | 2269 10
2271 | 2270 10
2272 | 2271 10
2273 | 2272 10
2274 | 2273 10
2275 | 2274 10
2276 | 2275 10
2277 | 2276 10
2278 | 2277 10
2279 | 2278 10
2280 | 2279 10
2281 | 2280 10
2282 | 2281 10
2283 | 2282 10
2284 | 2283 10
2285 | 2284 10
2286 | 2285 10
2287 | 2286 10
2288 | 2287 10
2289 | 2288 10
2290 | 2289 10
2291 | 2290 10
2292 | 2291 10
2293 | 2292 10
2294 | 2293 10
2295 | 2294 10
2296 | 2295 10
2297 | 2296 10
2298 | 2297 10
2299 | 2298 10
2300 | 2299 10
2301 | 2300 10
2302 | 2301 10
2303 | 2302 10
2304 | 2303 10
2305 | 2304 10
2306 | 2305 10
2307 | 2306 10
2308 | 2307 10
2309 | 2308 10
2310 | 2309 10
2311 | 2310 10
2312 | 2311 10
2313 | 2312 10
2314 | 2313 10
2315 | 2314 10
2316 | 2315 10
2317 | 2316 10
2318 | 2317 15
2319 | 2318 15
2320 | 2319 15
2321 | 2320 15
2322 | 2321 15
2323 | 2322 15
2324 | 2323 15
2325 | 2324 15
2326 | 2325 15
2327 | 2326 15
2328 | 2327 15
2329 | 2328 15
2330 | 2329 15
2331 | 2330 15
2332 | 2331 15
2333 | 2332 15
2334 | 2333 15
2335 | 2334 15
2336 | 2335 15
2337 | 2336 15
2338 | 2337 15
2339 | 2338 15
2340 | 2339 15
2341 | 2340 15
2342 | 2341 15
2343 | 2342 15
2344 | 2343 15
2345 | 2344 15
2346 | 2345 15
2347 | 2346 15
2348 | 2347 15
2349 | 2348 15
2350 | 2349 15
2351 | 2350 15
2352 | 2351 15
2353 | 2352 15
2354 | 2353 15
2355 | 2354 15
2356 | 2355 15
2357 | 2356 15
2358 | 2357 15
2359 | 2358 15
2360 | 2359 15
2361 | 2360 15
2362 | 2361 15
2363 | 2362 15
2364 | 2363 15
2365 | 2364 15
2366 | 2365 15
2367 | 2366 15
2368 | 2367 15
2369 | 2368 15
2370 | 2369 15
2371 | 2370 15
2372 | 2371 15
2373 | 2372 15
2374 | 2373 15
2375 | 2374 15
2376 | 2375 15
2377 | 2376 15
2378 | 2377 15
2379 | 2378 15
2380 | 2379 15
2381 | 2380 15
2382 | 2381 15
2383 | 2382 15
2384 | 2383 15
2385 | 2384 15
2386 | 2385 15
2387 | 2386 15
2388 | 2387 15
2389 | 2388 15
2390 | 2389 15
2391 | 2390 8
2392 | 2391 8
2393 | 2392 8
2394 | 2393 8
2395 | 2394 8
2396 | 2395 8
2397 | 2396 8
2398 | 2397 8
2399 | 2398 8
2400 | 2399 8
2401 | 2400 8
2402 | 2401 8
2403 | 2402 8
2404 | 2403 8
2405 | 2404 8
2406 |
--------------------------------------------------------------------------------
/examples/alias.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import numpy as np
3 |
4 | from ge.alias import alias_sample, create_alias_table
5 |
6 |
7 | def gen_prob_dist(N):
8 | p = np.random.randint(0, 100, N)
9 | return p/np.sum(p)
10 |
11 |
12 | def simulate(N=100, k=10000,):
13 |
14 | truth = gen_prob_dist(N)
15 |
16 | area_ratio = truth
17 | accept, alias = create_alias_table(area_ratio)
18 |
19 | ans = np.zeros(N)
20 | for _ in range(k):
21 | i = alias_sample(accept, alias)
22 | ans[i] += 1
23 | return ans/np.sum(ans), truth
24 |
25 |
26 | if __name__ == "__main__":
27 | alias_result, truth = simulate()
28 | plt.bar(list(range(len(alias_result))), alias_result, label='alias_result')
29 | plt.bar(list(range(len(truth))), truth, label='truth')
30 | plt.legend()
31 |
--------------------------------------------------------------------------------
/examples/deepwalk_wiki.py:
--------------------------------------------------------------------------------
1 |
2 | import numpy as np
3 |
4 | from ge.classify import read_node_label, Classifier
5 | from ge import DeepWalk
6 | from sklearn.linear_model import LogisticRegression
7 |
8 | import matplotlib.pyplot as plt
9 | import networkx as nx
10 | from sklearn.manifold import TSNE
11 |
12 |
13 | def evaluate_embeddings(embeddings):
14 | X, Y = read_node_label('../data/wiki/wiki_labels.txt')
15 | tr_frac = 0.8
16 | print("Training classifier using {:.2f}% nodes...".format(
17 | tr_frac * 100))
18 | clf = Classifier(embeddings=embeddings, clf=LogisticRegression())
19 | clf.split_train_evaluate(X, Y, tr_frac)
20 |
21 |
22 | def plot_embeddings(embeddings,):
23 | X, Y = read_node_label('../data/wiki/wiki_labels.txt')
24 |
25 | emb_list = []
26 | for k in X:
27 | emb_list.append(embeddings[k])
28 | emb_list = np.array(emb_list)
29 |
30 | model = TSNE(n_components=2)
31 | node_pos = model.fit_transform(emb_list)
32 |
33 | color_idx = {}
34 | for i in range(len(X)):
35 | color_idx.setdefault(Y[i][0], [])
36 | color_idx[Y[i][0]].append(i)
37 |
38 | for c, idx in color_idx.items():
39 | plt.scatter(node_pos[idx, 0], node_pos[idx, 1], label=c)
40 | plt.legend()
41 | plt.show()
42 |
43 |
44 | if __name__ == "__main__":
45 | G = nx.read_edgelist('../data/wiki/Wiki_edgelist.txt',
46 | create_using=nx.DiGraph(), nodetype=None, data=[('weight', int)])
47 |
48 | model = DeepWalk(G, walk_length=10, num_walks=80, workers=1)
49 | model.train(window_size=5, iter=3)
50 | embeddings = model.get_embeddings()
51 |
52 | evaluate_embeddings(embeddings)
53 | plot_embeddings(embeddings)
54 |
--------------------------------------------------------------------------------
/examples/line_wiki.py:
--------------------------------------------------------------------------------
1 |
2 | import numpy as np
3 |
4 | from ge.classify import read_node_label, Classifier
5 | from ge import LINE
6 | from sklearn.linear_model import LogisticRegression
7 |
8 | import matplotlib.pyplot as plt
9 | import networkx as nx
10 | from sklearn.manifold import TSNE
11 |
12 |
13 | def evaluate_embeddings(embeddings):
14 | X, Y = read_node_label('../data/wiki/wiki_labels.txt')
15 | tr_frac = 0.8
16 | print("Training classifier using {:.2f}% nodes...".format(
17 | tr_frac * 100))
18 | clf = Classifier(embeddings=embeddings, clf=LogisticRegression())
19 | clf.split_train_evaluate(X, Y, tr_frac)
20 |
21 |
22 | def plot_embeddings(embeddings,):
23 | X, Y = read_node_label('../data/wiki/wiki_labels.txt')
24 |
25 | emb_list = []
26 | for k in X:
27 | emb_list.append(embeddings[k])
28 | emb_list = np.array(emb_list)
29 |
30 | model = TSNE(n_components=2)
31 | node_pos = model.fit_transform(emb_list)
32 |
33 | color_idx = {}
34 | for i in range(len(X)):
35 | color_idx.setdefault(Y[i][0], [])
36 | color_idx[Y[i][0]].append(i)
37 |
38 | for c, idx in color_idx.items():
39 | plt.scatter(node_pos[idx, 0], node_pos[idx, 1], label=c)
40 | plt.legend()
41 | plt.show()
42 |
43 |
44 | if __name__ == "__main__":
45 | G = nx.read_edgelist('../data/wiki/Wiki_edgelist.txt',
46 | create_using=nx.DiGraph(), nodetype=None, data=[('weight', int)])
47 |
48 | model = LINE(G, embedding_size=128, order='second')
49 | model.train(batch_size=1024, epochs=50, verbose=2)
50 | embeddings = model.get_embeddings()
51 |
52 | evaluate_embeddings(embeddings)
53 | plot_embeddings(embeddings)
54 |
--------------------------------------------------------------------------------
/examples/node2vec_flight.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 |
5 | from ge.classify import read_node_label,Classifier
6 |
7 | from ge import Node2Vec
8 |
9 | from sklearn.linear_model import LogisticRegression
10 |
11 |
12 |
13 | import matplotlib.pyplot as plt
14 |
15 | import networkx as nx
16 |
17 | from sklearn.manifold import TSNE
18 |
19 |
20 |
21 | def evaluate_embeddings(embeddings):
22 |
23 | X, Y = read_node_label('../data/flight/labels-brazil-airports.txt',skip_head=True)
24 |
25 | tr_frac = 0.8
26 |
27 | print("Training classifier using {:.2f}% nodes...".format(
28 |
29 | tr_frac * 100))
30 |
31 | clf = Classifier(embeddings=embeddings, clf=LogisticRegression())
32 |
33 | clf.split_train_evaluate(X, Y, tr_frac)
34 |
35 |
36 |
37 |
38 |
39 | def plot_embeddings(embeddings,):
40 |
41 | X, Y = read_node_label('../data/flight/labels-brazil-airports.txt',skip_head=True)
42 |
43 |
44 |
45 | emb_list = []
46 |
47 | for k in X:
48 |
49 | emb_list.append(embeddings[k])
50 |
51 | emb_list = np.array(emb_list)
52 |
53 |
54 |
55 | model = TSNE(n_components=2)
56 |
57 | node_pos = model.fit_transform(emb_list)
58 |
59 |
60 |
61 | color_idx = {}
62 |
63 | for i in range(len(X)):
64 |
65 | color_idx.setdefault(Y[i][0], [])
66 |
67 | color_idx[Y[i][0]].append(i)
68 |
69 |
70 |
71 | for c, idx in color_idx.items():
72 |
73 | plt.scatter(node_pos[idx, 0], node_pos[idx, 1], label=c) # c=node_colors)
74 |
75 | plt.legend()
76 |
77 | plt.show()
78 |
79 | if __name__ == "__main__":
80 | G = nx.read_edgelist('../data/flight/brazil-airports.edgelist', create_using=nx.DiGraph(), nodetype=None,
81 | data=[('weight', int)])
82 |
83 | model = Node2Vec(G, 10, 80, workers=1, p=0.25, q=2, use_rejection_sampling=0)
84 | model.train()
85 | embeddings = model.get_embeddings()
86 |
87 | evaluate_embeddings(embeddings)
88 | plot_embeddings(embeddings)
89 |
--------------------------------------------------------------------------------
/examples/node2vec_wiki.py:
--------------------------------------------------------------------------------
1 |
2 | import numpy as np
3 |
4 | from ge.classify import read_node_label, Classifier
5 | from ge import Node2Vec
6 | from sklearn.linear_model import LogisticRegression
7 |
8 | import matplotlib.pyplot as plt
9 | import networkx as nx
10 | from sklearn.manifold import TSNE
11 |
12 |
13 | def evaluate_embeddings(embeddings):
14 | X, Y = read_node_label('../data/wiki/wiki_labels.txt')
15 | tr_frac = 0.8
16 | print("Training classifier using {:.2f}% nodes...".format(
17 | tr_frac * 100))
18 | clf = Classifier(embeddings=embeddings, clf=LogisticRegression())
19 | clf.split_train_evaluate(X, Y, tr_frac)
20 |
21 |
22 | def plot_embeddings(embeddings,):
23 | X, Y = read_node_label('../data/wiki/wiki_labels.txt')
24 |
25 | emb_list = []
26 | for k in X:
27 | emb_list.append(embeddings[k])
28 | emb_list = np.array(emb_list)
29 |
30 | model = TSNE(n_components=2)
31 | node_pos = model.fit_transform(emb_list)
32 |
33 | color_idx = {}
34 | for i in range(len(X)):
35 | color_idx.setdefault(Y[i][0], [])
36 | color_idx[Y[i][0]].append(i)
37 |
38 | for c, idx in color_idx.items():
39 | plt.scatter(node_pos[idx, 0], node_pos[idx, 1], label=c)
40 | plt.legend()
41 | plt.show()
42 |
43 |
44 | if __name__ == "__main__":
45 | G=nx.read_edgelist('../data/wiki/Wiki_edgelist.txt',
46 | create_using = nx.DiGraph(), nodetype = None, data = [('weight', int)])
47 | model = Node2Vec(G, walk_length=10, num_walks=80,
48 | p=0.25, q=4, workers=1, use_rejection_sampling=0)
49 | model.train(window_size = 5, iter = 3)
50 | embeddings=model.get_embeddings()
51 |
52 | evaluate_embeddings(embeddings)
53 | plot_embeddings(embeddings)
54 |
--------------------------------------------------------------------------------
/examples/sdne_wiki.py:
--------------------------------------------------------------------------------
1 |
2 | import numpy as np
3 |
4 | from ge.classify import read_node_label, Classifier
5 | from ge import SDNE
6 | from sklearn.linear_model import LogisticRegression
7 |
8 | import matplotlib.pyplot as plt
9 | import networkx as nx
10 | from sklearn.manifold import TSNE
11 |
12 |
13 | def evaluate_embeddings(embeddings):
14 | X, Y = read_node_label('../data/wiki/wiki_labels.txt')
15 | tr_frac = 0.8
16 | print("Training classifier using {:.2f}% nodes...".format(
17 | tr_frac * 100))
18 | clf = Classifier(embeddings=embeddings, clf=LogisticRegression())
19 | clf.split_train_evaluate(X, Y, tr_frac)
20 |
21 |
22 | def plot_embeddings(embeddings,):
23 | X, Y = read_node_label('../data/wiki/wiki_labels.txt')
24 |
25 | emb_list = []
26 | for k in X:
27 | emb_list.append(embeddings[k])
28 | emb_list = np.array(emb_list)
29 |
30 | model = TSNE(n_components=2)
31 | node_pos = model.fit_transform(emb_list)
32 |
33 | color_idx = {}
34 | for i in range(len(X)):
35 | color_idx.setdefault(Y[i][0], [])
36 | color_idx[Y[i][0]].append(i)
37 |
38 | for c, idx in color_idx.items():
39 | plt.scatter(node_pos[idx, 0], node_pos[idx, 1],
40 | label=c) # c=node_colors)
41 | plt.legend()
42 | plt.show()
43 |
44 |
45 | if __name__ == "__main__":
46 | G = nx.read_edgelist('../data/wiki/Wiki_edgelist.txt',
47 | create_using=nx.DiGraph(), nodetype=None, data=[('weight', int)])
48 |
49 | model = SDNE(G, hidden_size=[256, 128],)
50 | model.train(batch_size=3000, epochs=40, verbose=2)
51 | embeddings = model.get_embeddings()
52 |
53 | evaluate_embeddings(embeddings)
54 | plot_embeddings(embeddings)
55 |
--------------------------------------------------------------------------------
/examples/struc2vec_flight.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 |
5 | from ge.classify import read_node_label,Classifier
6 |
7 | from ge import Struc2Vec
8 |
9 | from sklearn.linear_model import LogisticRegression
10 |
11 |
12 |
13 | import matplotlib.pyplot as plt
14 |
15 | import networkx as nx
16 |
17 | from sklearn.manifold import TSNE
18 |
19 |
20 |
21 | def evaluate_embeddings(embeddings):
22 |
23 | X, Y = read_node_label('../data/flight/labels-brazil-airports.txt',skip_head=True)
24 |
25 | tr_frac = 0.8
26 |
27 | print("Training classifier using {:.2f}% nodes...".format(
28 |
29 | tr_frac * 100))
30 |
31 | clf = Classifier(embeddings=embeddings, clf=LogisticRegression())
32 |
33 | clf.split_train_evaluate(X, Y, tr_frac)
34 |
35 |
36 |
37 |
38 |
39 | def plot_embeddings(embeddings,):
40 |
41 | X, Y = read_node_label('../data/flight/labels-brazil-airports.txt',skip_head=True)
42 |
43 |
44 |
45 | emb_list = []
46 |
47 | for k in X:
48 |
49 | emb_list.append(embeddings[k])
50 |
51 | emb_list = np.array(emb_list)
52 |
53 |
54 |
55 | model = TSNE(n_components=2)
56 |
57 | node_pos = model.fit_transform(emb_list)
58 |
59 |
60 |
61 | color_idx = {}
62 |
63 | for i in range(len(X)):
64 |
65 | color_idx.setdefault(Y[i][0], [])
66 |
67 | color_idx[Y[i][0]].append(i)
68 |
69 |
70 |
71 | for c, idx in color_idx.items():
72 |
73 | plt.scatter(node_pos[idx, 0], node_pos[idx, 1], label=c) # c=node_colors)
74 |
75 | plt.legend()
76 |
77 | plt.show()
78 |
79 | if __name__ == "__main__":
80 | G = nx.read_edgelist('../data/flight/brazil-airports.edgelist', create_using=nx.DiGraph(), nodetype=None,
81 | data=[('weight', int)])
82 |
83 | model = Struc2Vec(G, 10, 80, workers=4, verbose=40, )
84 | model.train()
85 | embeddings = model.get_embeddings()
86 |
87 | evaluate_embeddings(embeddings)
88 | plot_embeddings(embeddings)
--------------------------------------------------------------------------------
/ge/__init__.py:
--------------------------------------------------------------------------------
1 | from .models import *
--------------------------------------------------------------------------------
/ge/alias.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 |
3 |
4 | def create_alias_table(area_ratio):
5 | """
6 |
7 | :param area_ratio: sum(area_ratio)=1
8 | :return: accept,alias
9 | """
10 | l = len(area_ratio)
11 | accept, alias = [0] * l, [0] * l
12 | small, large = [], []
13 | area_ratio_ = np.array(area_ratio) * l
14 | for i, prob in enumerate(area_ratio_):
15 | if prob < 1.0:
16 | small.append(i)
17 | else:
18 | large.append(i)
19 |
20 | while small and large:
21 | small_idx, large_idx = small.pop(), large.pop()
22 | accept[small_idx] = area_ratio_[small_idx]
23 | alias[small_idx] = large_idx
24 | area_ratio_[large_idx] = area_ratio_[large_idx] - \
25 | (1 - area_ratio_[small_idx])
26 | if area_ratio_[large_idx] < 1.0:
27 | small.append(large_idx)
28 | else:
29 | large.append(large_idx)
30 |
31 | while large:
32 | large_idx = large.pop()
33 | accept[large_idx] = 1
34 | while small:
35 | small_idx = small.pop()
36 | accept[small_idx] = 1
37 |
38 | return accept, alias
39 |
40 |
41 | def alias_sample(accept, alias):
42 | """
43 |
44 | :param accept:
45 | :param alias:
46 | :return: sample index
47 | """
48 | N = len(accept)
49 | i = int(np.random.random() * N)
50 | r = np.random.random()
51 | if r < accept[i]:
52 | return i
53 | else:
54 | return alias[i]
55 |
--------------------------------------------------------------------------------
/ge/classify.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 |
3 | import numpy
4 | from sklearn.metrics import f1_score, accuracy_score
5 | from sklearn.multiclass import OneVsRestClassifier
6 | from sklearn.preprocessing import MultiLabelBinarizer
7 |
8 |
9 | class TopKRanker(OneVsRestClassifier):
10 | def predict(self, X, top_k_list):
11 | probs = numpy.asarray(super(TopKRanker, self).predict_proba(X))
12 | all_labels = []
13 | for i, k in enumerate(top_k_list):
14 | probs_ = probs[i, :]
15 | labels = self.classes_[probs_.argsort()[-k:]].tolist()
16 | probs_[:] = 0
17 | probs_[labels] = 1
18 | all_labels.append(probs_)
19 | return numpy.asarray(all_labels)
20 |
21 |
22 | class Classifier(object):
23 |
24 | def __init__(self, embeddings, clf):
25 | self.embeddings = embeddings
26 | self.clf = TopKRanker(clf)
27 | self.binarizer = MultiLabelBinarizer(sparse_output=True)
28 |
29 | def train(self, X, Y, Y_all):
30 | self.binarizer.fit(Y_all)
31 | X_train = [self.embeddings[x] for x in X]
32 | Y = self.binarizer.transform(Y)
33 | self.clf.fit(X_train, Y)
34 |
35 | def evaluate(self, X, Y):
36 | top_k_list = [len(l) for l in Y]
37 | Y_ = self.predict(X, top_k_list)
38 | Y = self.binarizer.transform(Y)
39 | averages = ["micro", "macro", "samples", "weighted"]
40 | results = {}
41 | for average in averages:
42 | results[average] = f1_score(Y, Y_, average=average)
43 | results['acc'] = accuracy_score(Y, Y_)
44 | print('-------------------')
45 | print(results)
46 | return results
47 |
48 | def predict(self, X, top_k_list):
49 | X_ = numpy.asarray([self.embeddings[x] for x in X])
50 | Y = self.clf.predict(X_, top_k_list=top_k_list)
51 | return Y
52 |
53 | def split_train_evaluate(self, X, Y, train_precent, seed=0):
54 | state = numpy.random.get_state()
55 |
56 | training_size = int(train_precent * len(X))
57 | numpy.random.seed(seed)
58 | shuffle_indices = numpy.random.permutation(numpy.arange(len(X)))
59 | X_train = [X[shuffle_indices[i]] for i in range(training_size)]
60 | Y_train = [Y[shuffle_indices[i]] for i in range(training_size)]
61 | X_test = [X[shuffle_indices[i]] for i in range(training_size, len(X))]
62 | Y_test = [Y[shuffle_indices[i]] for i in range(training_size, len(X))]
63 |
64 | self.train(X_train, Y_train, Y)
65 | numpy.random.set_state(state)
66 | return self.evaluate(X_test, Y_test)
67 |
68 |
69 | def read_node_label(filename, skip_head=False):
70 | fin = open(filename, 'r')
71 | X = []
72 | Y = []
73 | while 1:
74 | if skip_head:
75 | fin.readline()
76 | l = fin.readline()
77 | if l == '':
78 | break
79 | vec = l.strip().split(' ')
80 | X.append(vec[0])
81 | Y.append(vec[1:])
82 | fin.close()
83 | return X, Y
84 |
--------------------------------------------------------------------------------
/ge/models/__init__.py:
--------------------------------------------------------------------------------
1 | from .deepwalk import DeepWalk
2 | from .node2vec import Node2Vec
3 | from .line import LINE
4 | from .sdne import SDNE
5 | from .struc2vec import Struc2Vec
6 |
7 |
8 | __all__ = ["DeepWalk", "Node2Vec", "LINE", "SDNE", "Struc2Vec"]
9 |
--------------------------------------------------------------------------------
/ge/models/deepwalk.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | """
4 |
5 |
6 |
7 | Author:
8 |
9 | Weichen Shen,weichenswc@163.com
10 |
11 |
12 |
13 | Reference:
14 |
15 | [1] Perozzi B, Al-Rfou R, Skiena S. Deepwalk: Online learning of social representations[C]//Proceedings of the 20th ACM SIGKDD international conference on Knowledge discovery and data mining. ACM, 2014: 701-710.(http://www.perozzi.net/publications/14_kdd_deepwalk.pdf)
16 |
17 |
18 |
19 | """
20 | from gensim.models import Word2Vec
21 |
22 | from ..walker import RandomWalker
23 |
24 |
25 | class DeepWalk:
26 | def __init__(self, graph, walk_length, num_walks, workers=1):
27 |
28 | self.graph = graph
29 | self.w2v_model = None
30 | self._embeddings = {}
31 |
32 | self.walker = RandomWalker(
33 | graph, p=1, q=1, )
34 | self.sentences = self.walker.simulate_walks(
35 | num_walks=num_walks, walk_length=walk_length, workers=workers, verbose=1)
36 |
37 | def train(self, embed_size=128, window_size=5, workers=3, iter=5, **kwargs):
38 |
39 | kwargs["sentences"] = self.sentences
40 | kwargs["min_count"] = kwargs.get("min_count", 0)
41 | kwargs["vector_size"] = embed_size
42 | kwargs["sg"] = 1 # skip gram
43 | kwargs["hs"] = 1 # deepwalk use Hierarchical Softmax
44 | kwargs["workers"] = workers
45 | kwargs["window"] = window_size
46 | kwargs["epochs"] = iter
47 |
48 | print("Learning embedding vectors...")
49 | model = Word2Vec(**kwargs)
50 | print("Learning embedding vectors done!")
51 |
52 | self.w2v_model = model
53 | return model
54 |
55 | def get_embeddings(self, ):
56 | if self.w2v_model is None:
57 | print("model not train")
58 | return {}
59 |
60 | self._embeddings = {}
61 | for word in self.graph.nodes():
62 | self._embeddings[word] = self.w2v_model.wv[word]
63 |
64 | return self._embeddings
65 |
--------------------------------------------------------------------------------
/ge/models/line.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | """
4 |
5 |
6 |
7 | Author:
8 |
9 | Weichen Shen,weichenswc@163.com
10 |
11 |
12 |
13 | Reference:
14 |
15 | [1] Tang J, Qu M, Wang M, et al. Line: Large-scale information network embedding[C]//Proceedings of the 24th International Conference on World Wide Web. International World Wide Web Conferences Steering Committee, 2015: 1067-1077.(https://arxiv.org/pdf/1503.03578.pdf)
16 |
17 |
18 |
19 | """
20 | import math
21 | import random
22 |
23 | import numpy as np
24 | from deepctr.layers.utils import reduce_sum
25 | from tensorflow.python.keras import backend as K
26 | from tensorflow.python.keras.layers import Embedding, Input, Lambda
27 | from tensorflow.python.keras.models import Model
28 |
29 | from ..alias import create_alias_table, alias_sample
30 | from ..utils import preprocess_nxgraph
31 |
32 |
33 | def line_loss(y_true, y_pred):
34 | return -K.mean(K.log(K.sigmoid(y_true * y_pred)))
35 |
36 |
37 | def create_model(numNodes, embedding_size, order='second'):
38 | v_i = Input(shape=(1,))
39 | v_j = Input(shape=(1,))
40 |
41 | first_emb = Embedding(numNodes, embedding_size, name='first_emb')
42 | second_emb = Embedding(numNodes, embedding_size, name='second_emb')
43 | context_emb = Embedding(numNodes, embedding_size, name='context_emb')
44 |
45 | v_i_emb = first_emb(v_i)
46 | v_j_emb = first_emb(v_j)
47 |
48 | v_i_emb_second = second_emb(v_i)
49 | v_j_context_emb = context_emb(v_j)
50 |
51 | first = Lambda(lambda x: reduce_sum(
52 | x[0] * x[1], axis=-1, keep_dims=False), name='first_order')([v_i_emb, v_j_emb])
53 | second = Lambda(lambda x: reduce_sum(
54 | x[0] * x[1], axis=-1, keep_dims=False), name='second_order')([v_i_emb_second, v_j_context_emb])
55 |
56 | if order == 'first':
57 | output_list = [first]
58 | elif order == 'second':
59 | output_list = [second]
60 | else:
61 | output_list = [first, second]
62 |
63 | model = Model(inputs=[v_i, v_j], outputs=output_list)
64 |
65 | return model, {'first': first_emb, 'second': second_emb}
66 |
67 |
68 | class LINE:
69 | def __init__(self, graph, embedding_size=8, negative_ratio=5, order='second', ):
70 | """
71 |
72 | :param graph:
73 | :param embedding_size:
74 | :param negative_ratio:
75 | :param order: 'first','second','all'
76 | """
77 | if order not in ['first', 'second', 'all']:
78 | raise ValueError('mode must be fisrt,second,or all')
79 |
80 | self.graph = graph
81 | self.idx2node, self.node2idx = preprocess_nxgraph(graph)
82 | self.use_alias = True
83 |
84 | self.rep_size = embedding_size
85 | self.order = order
86 |
87 | self._embeddings = {}
88 | self.negative_ratio = negative_ratio
89 | self.order = order
90 |
91 | self.node_size = graph.number_of_nodes()
92 | self.edge_size = graph.number_of_edges()
93 | self.samples_per_epoch = self.edge_size * (1 + negative_ratio)
94 |
95 | self._gen_sampling_table()
96 | self.reset_model()
97 |
98 | def reset_training_config(self, batch_size, times):
99 | self.batch_size = batch_size
100 | self.steps_per_epoch = (
101 | (self.samples_per_epoch - 1) // self.batch_size + 1) * times
102 |
103 | def reset_model(self, opt='adam'):
104 |
105 | self.model, self.embedding_dict = create_model(
106 | self.node_size, self.rep_size, self.order)
107 | self.model.compile(opt, line_loss)
108 | self.batch_it = self.batch_iter(self.node2idx)
109 |
110 | def _gen_sampling_table(self):
111 |
112 | # create sampling table for vertex
113 | power = 0.75
114 | numNodes = self.node_size
115 | node_degree = np.zeros(numNodes) # out degree
116 | node2idx = self.node2idx
117 |
118 | for edge in self.graph.edges():
119 | node_degree[node2idx[edge[0]]
120 | ] += self.graph[edge[0]][edge[1]].get('weight', 1.0)
121 |
122 | total_sum = sum([math.pow(node_degree[i], power)
123 | for i in range(numNodes)])
124 | norm_prob = [float(math.pow(node_degree[j], power)) /
125 | total_sum for j in range(numNodes)]
126 |
127 | self.node_accept, self.node_alias = create_alias_table(norm_prob)
128 |
129 | # create sampling table for edge
130 | numEdges = self.graph.number_of_edges()
131 | total_sum = sum([self.graph[edge[0]][edge[1]].get('weight', 1.0)
132 | for edge in self.graph.edges()])
133 | norm_prob = [self.graph[edge[0]][edge[1]].get('weight', 1.0) *
134 | numEdges / total_sum for edge in self.graph.edges()]
135 |
136 | self.edge_accept, self.edge_alias = create_alias_table(norm_prob)
137 |
138 | def batch_iter(self, node2idx):
139 |
140 | edges = [(node2idx[x[0]], node2idx[x[1]]) for x in self.graph.edges()]
141 |
142 | data_size = self.graph.number_of_edges()
143 | shuffle_indices = np.random.permutation(np.arange(data_size))
144 | # positive or negative mod
145 | mod = 0
146 | mod_size = 1 + self.negative_ratio
147 | h = []
148 | t = []
149 | sign = 0
150 | count = 0
151 | start_index = 0
152 | end_index = min(start_index + self.batch_size, data_size)
153 | while True:
154 | if mod == 0:
155 |
156 | h = []
157 | t = []
158 | for i in range(start_index, end_index):
159 | if random.random() >= self.edge_accept[shuffle_indices[i]]:
160 | shuffle_indices[i] = self.edge_alias[shuffle_indices[i]]
161 | cur_h = edges[shuffle_indices[i]][0]
162 | cur_t = edges[shuffle_indices[i]][1]
163 | h.append(cur_h)
164 | t.append(cur_t)
165 | sign = np.ones(len(h))
166 | else:
167 | sign = np.ones(len(h)) * -1
168 | t = []
169 | for i in range(len(h)):
170 | t.append(alias_sample(
171 | self.node_accept, self.node_alias))
172 |
173 | if self.order == 'all':
174 | yield ([np.array(h), np.array(t)], [sign, sign])
175 | else:
176 | yield ([np.array(h), np.array(t)], [sign])
177 | mod += 1
178 | mod %= mod_size
179 | if mod == 0:
180 | start_index = end_index
181 | end_index = min(start_index + self.batch_size, data_size)
182 |
183 | if start_index >= data_size:
184 | count += 1
185 | mod = 0
186 | h = []
187 | shuffle_indices = np.random.permutation(np.arange(data_size))
188 | start_index = 0
189 | end_index = min(start_index + self.batch_size, data_size)
190 |
191 | def get_embeddings(self, ):
192 | self._embeddings = {}
193 | if self.order == 'first':
194 | embeddings = self.embedding_dict['first'].get_weights()[0]
195 | elif self.order == 'second':
196 | embeddings = self.embedding_dict['second'].get_weights()[0]
197 | else:
198 | embeddings = np.hstack((self.embedding_dict['first'].get_weights()[
199 | 0], self.embedding_dict['second'].get_weights()[0]))
200 | idx2node = self.idx2node
201 | for i, embedding in enumerate(embeddings):
202 | self._embeddings[idx2node[i]] = embedding
203 |
204 | return self._embeddings
205 |
206 | def train(self, batch_size=1024, epochs=1, initial_epoch=0, verbose=1, times=1):
207 | self.reset_training_config(batch_size, times)
208 | hist = self.model.fit_generator(self.batch_it, epochs=epochs, initial_epoch=initial_epoch,
209 | steps_per_epoch=self.steps_per_epoch,
210 | verbose=verbose)
211 |
212 | return hist
213 |
--------------------------------------------------------------------------------
/ge/models/node2vec.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | """
4 |
5 |
6 |
7 | Author:
8 |
9 | Weichen Shen,weichenswc@163.com
10 |
11 |
12 |
13 | Reference:
14 |
15 | [1] Grover A, Leskovec J. node2vec: Scalable feature learning for networks[C]//Proceedings of the 22nd ACM SIGKDD international conference on Knowledge discovery and data mining. ACM, 2016: 855-864.(https://www.kdd.org/kdd2016/papers/files/rfp0218-groverA.pdf)
16 |
17 |
18 |
19 | """
20 |
21 | from gensim.models import Word2Vec
22 |
23 | from ..walker import RandomWalker
24 |
25 |
26 | class Node2Vec:
27 |
28 | def __init__(self, graph, walk_length, num_walks, p=1.0, q=1.0, workers=1, use_rejection_sampling=False):
29 |
30 | self.graph = graph
31 | self._embeddings = {}
32 | self.walker = RandomWalker(
33 | graph, p=p, q=q, use_rejection_sampling=use_rejection_sampling)
34 |
35 | print("Preprocess transition probs...")
36 | self.walker.preprocess_transition_probs()
37 |
38 | self.sentences = self.walker.simulate_walks(
39 | num_walks=num_walks, walk_length=walk_length, workers=workers, verbose=1)
40 |
41 | def train(self, embed_size=128, window_size=5, workers=3, iter=5, **kwargs):
42 | kwargs["sentences"] = self.sentences
43 | kwargs["min_count"] = kwargs.get("min_count", 0)
44 | kwargs["vector_size"] = embed_size
45 | kwargs["sg"] = 1
46 | kwargs["hs"] = 0 # node2vec not use Hierarchical Softmax
47 | kwargs["workers"] = workers
48 | kwargs["window"] = window_size
49 | kwargs["epochs"] = iter
50 |
51 | print("Learning embedding vectors...")
52 | model = Word2Vec(**kwargs)
53 | print("Learning embedding vectors done!")
54 |
55 | self.w2v_model = model
56 |
57 | return model
58 |
59 | def get_embeddings(self, ):
60 | if self.w2v_model is None:
61 | print("model not train")
62 | return {}
63 |
64 | self._embeddings = {}
65 | for word in self.graph.nodes():
66 | self._embeddings[word] = self.w2v_model.wv[word]
67 |
68 | return self._embeddings
69 |
--------------------------------------------------------------------------------
/ge/models/sdne.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | """
4 |
5 |
6 |
7 | Author:
8 |
9 | Weichen Shen,weichenswc@163.com
10 |
11 |
12 |
13 | Reference:
14 |
15 | [1] Wang D, Cui P, Zhu W. Structural deep network embedding[C]//Proceedings of the 22nd ACM SIGKDD international conference on Knowledge discovery and data mining. ACM, 2016: 1225-1234.(https://www.kdd.org/kdd2016/papers/files/rfp0191-wangAemb.pdf)
16 |
17 |
18 |
19 | """
20 | import time
21 |
22 | import numpy as np
23 | import scipy.sparse as sp
24 | import tensorflow as tf
25 | from tensorflow.python.keras import backend as K
26 | from tensorflow.python.keras.callbacks import History
27 | from tensorflow.python.keras.layers import Dense, Input
28 | from tensorflow.python.keras.models import Model
29 | from tensorflow.python.keras.regularizers import l1_l2
30 |
31 | from ..utils import preprocess_nxgraph
32 |
33 |
34 | def l_2nd(beta):
35 | def loss_2nd(y_true, y_pred):
36 | b_ = np.ones_like(y_true)
37 | b_[y_true != 0] = beta
38 | x = K.square((y_true - y_pred) * b_)
39 | t = K.sum(x, axis=-1, )
40 | return K.mean(t)
41 |
42 | return loss_2nd
43 |
44 |
45 | def l_1st(alpha):
46 | def loss_1st(y_true, y_pred):
47 | L = y_true
48 | Y = y_pred
49 | batch_size = tf.to_float(K.shape(L)[0])
50 | return alpha * 2 * tf.linalg.trace(tf.matmul(tf.matmul(Y, L, transpose_a=True), Y)) / batch_size
51 |
52 | return loss_1st
53 |
54 |
55 | def create_model(node_size, hidden_size=[256, 128], l1=1e-5, l2=1e-4):
56 | A = Input(shape=(node_size,))
57 | L = Input(shape=(None,))
58 | fc = A
59 | for i in range(len(hidden_size)):
60 | if i == len(hidden_size) - 1:
61 | fc = Dense(hidden_size[i], activation='relu',
62 | kernel_regularizer=l1_l2(l1, l2), name='1st')(fc)
63 | else:
64 | fc = Dense(hidden_size[i], activation='relu',
65 | kernel_regularizer=l1_l2(l1, l2))(fc)
66 | Y = fc
67 | for i in reversed(range(len(hidden_size) - 1)):
68 | fc = Dense(hidden_size[i], activation='relu',
69 | kernel_regularizer=l1_l2(l1, l2))(fc)
70 |
71 | A_ = Dense(node_size, 'relu', name='2nd')(fc)
72 | model = Model(inputs=[A, L], outputs=[A_, Y])
73 | emb = Model(inputs=A, outputs=Y)
74 | return model, emb
75 |
76 |
77 | class SDNE(object):
78 | def __init__(self, graph, hidden_size=[32, 16], alpha=1e-6, beta=5., nu1=1e-5, nu2=1e-4, ):
79 |
80 | self.graph = graph
81 | # self.g.remove_edges_from(self.g.selfloop_edges())
82 | self.idx2node, self.node2idx = preprocess_nxgraph(self.graph)
83 |
84 | self.node_size = self.graph.number_of_nodes()
85 | self.hidden_size = hidden_size
86 | self.alpha = alpha
87 | self.beta = beta
88 | self.nu1 = nu1
89 | self.nu2 = nu2
90 |
91 | self.A, self.L = _create_A_L(self.graph, self.node2idx) # Adj Matrix,L Matrix
92 | self.reset_model()
93 | self.inputs = [self.A, self.L]
94 | self._embeddings = {}
95 |
96 | def reset_model(self, opt='adam'):
97 |
98 | self.model, self.emb_model = create_model(self.node_size, hidden_size=self.hidden_size, l1=self.nu1,
99 | l2=self.nu2)
100 | self.model.compile(opt, [l_2nd(self.beta), l_1st(self.alpha)])
101 | self.get_embeddings()
102 |
103 | def train(self, batch_size=1024, epochs=1, initial_epoch=0, verbose=1):
104 | if batch_size >= self.node_size:
105 | if batch_size > self.node_size:
106 | print('batch_size({0}) > node_size({1}),set batch_size = {1}'.format(
107 | batch_size, self.node_size))
108 | batch_size = self.node_size
109 | return self.model.fit([self.A.todense(), self.L.todense()], [self.A.todense(), self.L.todense()],
110 | batch_size=batch_size, epochs=epochs, initial_epoch=initial_epoch, verbose=verbose,
111 | shuffle=False, )
112 | else:
113 | steps_per_epoch = (self.node_size - 1) // batch_size + 1
114 | hist = History()
115 | hist.on_train_begin()
116 | logs = {}
117 | for epoch in range(initial_epoch, epochs):
118 | start_time = time.time()
119 | losses = np.zeros(3)
120 | for i in range(steps_per_epoch):
121 | index = np.arange(
122 | i * batch_size, min((i + 1) * batch_size, self.node_size))
123 | A_train = self.A[index, :].todense()
124 | L_mat_train = self.L[index][:, index].todense()
125 | inp = [A_train, L_mat_train]
126 | batch_losses = self.model.train_on_batch(inp, inp)
127 | losses += batch_losses
128 | losses = losses / steps_per_epoch
129 |
130 | logs['loss'] = losses[0]
131 | logs['2nd_loss'] = losses[1]
132 | logs['1st_loss'] = losses[2]
133 | epoch_time = int(time.time() - start_time)
134 | hist.on_epoch_end(epoch, logs)
135 | if verbose > 0:
136 | print('Epoch {0}/{1}'.format(epoch + 1, epochs))
137 | print('{0}s - loss: {1: .4f} - 2nd_loss: {2: .4f} - 1st_loss: {3: .4f}'.format(
138 | epoch_time, losses[0], losses[1], losses[2]))
139 | return hist
140 |
141 | def evaluate(self, ):
142 | return self.model.evaluate(x=self.inputs, y=self.inputs, batch_size=self.node_size)
143 |
144 | def get_embeddings(self):
145 | self._embeddings = {}
146 | embeddings = self.emb_model.predict(self.A.todense(), batch_size=self.node_size)
147 | look_back = self.idx2node
148 | for i, embedding in enumerate(embeddings):
149 | self._embeddings[look_back[i]] = embedding
150 |
151 | return self._embeddings
152 |
153 |
154 | def _create_A_L(graph, node2idx):
155 | node_size = graph.number_of_nodes()
156 | A_data = []
157 | A_row_index = []
158 | A_col_index = []
159 |
160 | for edge in graph.edges():
161 | v1, v2 = edge
162 | edge_weight = graph[v1][v2].get('weight', 1)
163 |
164 | A_data.append(edge_weight)
165 | A_row_index.append(node2idx[v1])
166 | A_col_index.append(node2idx[v2])
167 |
168 | A = sp.csr_matrix((A_data, (A_row_index, A_col_index)), shape=(node_size, node_size))
169 | A_ = sp.csr_matrix((A_data + A_data, (A_row_index + A_col_index, A_col_index + A_row_index)),
170 | shape=(node_size, node_size))
171 |
172 | D = sp.diags(A_.sum(axis=1).flatten().tolist()[0])
173 | L = D - A_
174 | return A, L
175 |
--------------------------------------------------------------------------------
/ge/models/struc2vec.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | """
4 |
5 |
6 |
7 | Author:
8 |
9 | Weichen Shen,weichenswc@163.com
10 |
11 |
12 |
13 | Reference:
14 |
15 | [1] Ribeiro L F R, Saverese P H P, Figueiredo D R. struc2vec: Learning node representations from structural identity[C]//Proceedings of the 23rd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining. ACM, 2017: 385-394.(https://arxiv.org/pdf/1704.03165.pdf)
16 |
17 |
18 |
19 | """
20 |
21 | import math
22 | import os
23 | import shutil
24 | from collections import ChainMap, deque
25 |
26 | import numpy as np
27 | import pandas as pd
28 | from fastdtw import fastdtw
29 | from gensim.models import Word2Vec
30 | from joblib import Parallel, delayed
31 |
32 | from ..alias import create_alias_table
33 | from ..utils import partition_dict, preprocess_nxgraph
34 | from ..walker import BiasedWalker
35 |
36 |
37 | class Struc2Vec():
38 | def __init__(self, graph, walk_length=10, num_walks=100, workers=1, verbose=0, stay_prob=0.3, opt1_reduce_len=True,
39 | opt2_reduce_sim_calc=True, opt3_num_layers=None, temp_path='./temp_struc2vec/', reuse=False):
40 | self.graph = graph
41 | self.idx2node, self.node2idx = preprocess_nxgraph(graph)
42 | self.idx = list(range(len(self.idx2node)))
43 |
44 | self.opt1_reduce_len = opt1_reduce_len
45 | self.opt2_reduce_sim_calc = opt2_reduce_sim_calc
46 | self.opt3_num_layers = opt3_num_layers
47 |
48 | self.resue = reuse
49 | self.temp_path = temp_path
50 |
51 | if not os.path.exists(self.temp_path):
52 | os.mkdir(self.temp_path)
53 | if not reuse:
54 | shutil.rmtree(self.temp_path)
55 | os.mkdir(self.temp_path)
56 |
57 | self.create_context_graph(self.opt3_num_layers, workers, verbose)
58 | self.prepare_biased_walk()
59 | self.walker = BiasedWalker(self.idx2node, self.temp_path)
60 | self.sentences = self.walker.simulate_walks(
61 | num_walks, walk_length, stay_prob, workers, verbose)
62 |
63 | self._embeddings = {}
64 |
65 | def create_context_graph(self, max_num_layers, workers=1, verbose=0, ):
66 |
67 | pair_distances = self._compute_structural_distance(
68 | max_num_layers, workers, verbose, )
69 | layers_adj, layers_distances = self._get_layer_rep(pair_distances)
70 | pd.to_pickle(layers_adj, self.temp_path + 'layers_adj.pkl')
71 |
72 | layers_accept, layers_alias = self._get_transition_probs(
73 | layers_adj, layers_distances)
74 | pd.to_pickle(layers_alias, self.temp_path + 'layers_alias.pkl')
75 | pd.to_pickle(layers_accept, self.temp_path + 'layers_accept.pkl')
76 |
77 | def prepare_biased_walk(self, ):
78 |
79 | sum_weights = {}
80 | sum_edges = {}
81 | average_weight = {}
82 | gamma = {}
83 | layer = 0
84 | while (os.path.exists(self.temp_path + 'norm_weights_distance-layer-' + str(layer) + '.pkl')):
85 | probs = pd.read_pickle(
86 | self.temp_path + 'norm_weights_distance-layer-' + str(layer) + '.pkl')
87 | for v, list_weights in probs.items():
88 | sum_weights.setdefault(layer, 0)
89 | sum_edges.setdefault(layer, 0)
90 | sum_weights[layer] += sum(list_weights)
91 | sum_edges[layer] += len(list_weights)
92 |
93 | average_weight[layer] = sum_weights[layer] / sum_edges[layer]
94 |
95 | gamma.setdefault(layer, {})
96 |
97 | for v, list_weights in probs.items():
98 | num_neighbours = 0
99 | for w in list_weights:
100 | if (w > average_weight[layer]):
101 | num_neighbours += 1
102 | gamma[layer][v] = num_neighbours
103 |
104 | layer += 1
105 |
106 | pd.to_pickle(average_weight, self.temp_path + 'average_weight')
107 | pd.to_pickle(gamma, self.temp_path + 'gamma.pkl')
108 |
109 | def train(self, embed_size=128, window_size=5, workers=3, iter=5):
110 |
111 | # pd.read_pickle(self.temp_path+'walks.pkl')
112 | sentences = self.sentences
113 |
114 | print("Learning representation...")
115 | model = Word2Vec(sentences, vector_size=embed_size, window=window_size, min_count=0, hs=1, sg=1,
116 | workers=workers,
117 | epochs=iter)
118 | print("Learning representation done!")
119 | self.w2v_model = model
120 |
121 | return model
122 |
123 | def get_embeddings(self, ):
124 | if self.w2v_model is None:
125 | print("model not train")
126 | return {}
127 |
128 | self._embeddings = {}
129 | for word in self.graph.nodes():
130 | self._embeddings[word] = self.w2v_model.wv[word]
131 |
132 | return self._embeddings
133 |
134 | def _compute_ordered_degreelist(self, max_num_layers):
135 |
136 | degreeList = {}
137 | vertices = self.idx # self.g.nodes()
138 | for v in vertices:
139 | degreeList[v] = self._get_order_degreelist_node(v, max_num_layers)
140 | return degreeList
141 |
142 | def _get_order_degreelist_node(self, root, max_num_layers=None):
143 | if max_num_layers is None:
144 | max_num_layers = float('inf')
145 |
146 | ordered_degree_sequence_dict = {}
147 | visited = [False] * len(self.graph.nodes())
148 | queue = deque()
149 | level = 0
150 | queue.append(root)
151 | visited[root] = True
152 |
153 | while (len(queue) > 0 and level <= max_num_layers):
154 |
155 | count = len(queue)
156 | if self.opt1_reduce_len:
157 | degree_list = {}
158 | else:
159 | degree_list = []
160 | while (count > 0):
161 |
162 | top = queue.popleft()
163 | node = self.idx2node[top]
164 | degree = len(self.graph[node])
165 |
166 | if self.opt1_reduce_len:
167 | degree_list[degree] = degree_list.get(degree, 0) + 1
168 | else:
169 | degree_list.append(degree)
170 |
171 | for nei in self.graph[node]:
172 | nei_idx = self.node2idx[nei]
173 | if not visited[nei_idx]:
174 | visited[nei_idx] = True
175 | queue.append(nei_idx)
176 | count -= 1
177 | if self.opt1_reduce_len:
178 | orderd_degree_list = [(degree, freq)
179 | for degree, freq in degree_list.items()]
180 | orderd_degree_list.sort(key=lambda x: x[0])
181 | else:
182 | orderd_degree_list = sorted(degree_list)
183 | ordered_degree_sequence_dict[level] = orderd_degree_list
184 | level += 1
185 |
186 | return ordered_degree_sequence_dict
187 |
188 | def _compute_structural_distance(self, max_num_layers, workers=1, verbose=0, ):
189 |
190 | if os.path.exists(self.temp_path + 'structural_dist.pkl'):
191 | structural_dist = pd.read_pickle(
192 | self.temp_path + 'structural_dist.pkl')
193 | else:
194 | if self.opt1_reduce_len:
195 | dist_func = cost_max
196 | else:
197 | dist_func = cost
198 |
199 | if os.path.exists(self.temp_path + 'degreelist.pkl'):
200 | degreeList = pd.read_pickle(self.temp_path + 'degreelist.pkl')
201 | else:
202 | degreeList = self._compute_ordered_degreelist(max_num_layers)
203 | pd.to_pickle(degreeList, self.temp_path + 'degreelist.pkl')
204 |
205 | if self.opt2_reduce_sim_calc:
206 | degrees = self._create_vectors()
207 | degreeListsSelected = {}
208 | vertices = {}
209 | n_nodes = len(self.idx)
210 | for v in self.idx: # c:list of vertex
211 | nbs = get_vertices(
212 | v, len(self.graph[self.idx2node[v]]), degrees, n_nodes)
213 | vertices[v] = nbs # store nbs
214 | degreeListsSelected[v] = degreeList[v] # store dist
215 | for n in nbs:
216 | # store dist of nbs
217 | degreeListsSelected[n] = degreeList[n]
218 | else:
219 | vertices = {}
220 | for v in degreeList:
221 | vertices[v] = [vd for vd in degreeList.keys() if vd > v]
222 |
223 | results = Parallel(n_jobs=workers, verbose=verbose, )(
224 | delayed(compute_dtw_dist)(part_list, degreeList, dist_func) for part_list in
225 | partition_dict(vertices, workers))
226 | dtw_dist = dict(ChainMap(*results))
227 |
228 | structural_dist = convert_dtw_struc_dist(dtw_dist)
229 | pd.to_pickle(structural_dist, self.temp_path +
230 | 'structural_dist.pkl')
231 |
232 | return structural_dist
233 |
234 | def _create_vectors(self):
235 | degrees = {} # sotre v list of degree
236 | degrees_sorted = set() # store degree
237 | G = self.graph
238 | for v in self.idx:
239 | degree = len(G[self.idx2node[v]])
240 | degrees_sorted.add(degree)
241 | if (degree not in degrees):
242 | degrees[degree] = {}
243 | degrees[degree]['vertices'] = []
244 | degrees[degree]['vertices'].append(v)
245 | degrees_sorted = np.array(list(degrees_sorted), dtype='int')
246 | degrees_sorted = np.sort(degrees_sorted)
247 |
248 | l = len(degrees_sorted)
249 | for index, degree in enumerate(degrees_sorted):
250 | if (index > 0):
251 | degrees[degree]['before'] = degrees_sorted[index - 1]
252 | if (index < (l - 1)):
253 | degrees[degree]['after'] = degrees_sorted[index + 1]
254 |
255 | return degrees
256 |
257 | def _get_layer_rep(self, pair_distances):
258 | layer_distances = {}
259 | layer_adj = {}
260 | for v_pair, layer_dist in pair_distances.items():
261 | for layer, distance in layer_dist.items():
262 | vx = v_pair[0]
263 | vy = v_pair[1]
264 |
265 | layer_distances.setdefault(layer, {})
266 | layer_distances[layer][vx, vy] = distance
267 |
268 | layer_adj.setdefault(layer, {})
269 | layer_adj[layer].setdefault(vx, [])
270 | layer_adj[layer].setdefault(vy, [])
271 | layer_adj[layer][vx].append(vy)
272 | layer_adj[layer][vy].append(vx)
273 |
274 | return layer_adj, layer_distances
275 |
276 | def _get_transition_probs(self, layers_adj, layers_distances):
277 | layers_alias = {}
278 | layers_accept = {}
279 |
280 | for layer in layers_adj:
281 |
282 | neighbors = layers_adj[layer]
283 | layer_distances = layers_distances[layer]
284 | node_alias_dict = {}
285 | node_accept_dict = {}
286 | norm_weights = {}
287 |
288 | for v, neighbors in neighbors.items():
289 | e_list = []
290 | sum_w = 0.0
291 |
292 | for n in neighbors:
293 | if (v, n) in layer_distances:
294 | wd = layer_distances[v, n]
295 | else:
296 | wd = layer_distances[n, v]
297 | w = np.exp(-float(wd))
298 | e_list.append(w)
299 | sum_w += w
300 |
301 | e_list = [x / sum_w for x in e_list]
302 | norm_weights[v] = e_list
303 | accept, alias = create_alias_table(e_list)
304 | node_alias_dict[v] = alias
305 | node_accept_dict[v] = accept
306 |
307 | pd.to_pickle(
308 | norm_weights, self.temp_path + 'norm_weights_distance-layer-' + str(layer) + '.pkl')
309 |
310 | layers_alias[layer] = node_alias_dict
311 | layers_accept[layer] = node_accept_dict
312 |
313 | return layers_accept, layers_alias
314 |
315 |
316 | def cost(a, b):
317 | ep = 0.5
318 | m = max(a, b) + ep
319 | mi = min(a, b) + ep
320 | return ((m / mi) - 1)
321 |
322 |
323 | def cost_min(a, b):
324 | ep = 0.5
325 | m = max(a[0], b[0]) + ep
326 | mi = min(a[0], b[0]) + ep
327 | return ((m / mi) - 1) * min(a[1], b[1])
328 |
329 |
330 | def cost_max(a, b):
331 | ep = 0.5
332 | m = max(a[0], b[0]) + ep
333 | mi = min(a[0], b[0]) + ep
334 | return ((m / mi) - 1) * max(a[1], b[1])
335 |
336 |
337 | def convert_dtw_struc_dist(distances, startLayer=1):
338 | """
339 |
340 | :param distances: dict of dict
341 | :param startLayer:
342 | :return:
343 | """
344 | for vertices, layers in distances.items():
345 | keys_layers = sorted(layers.keys())
346 | startLayer = min(len(keys_layers), startLayer)
347 | for layer in range(0, startLayer):
348 | keys_layers.pop(0)
349 |
350 | for layer in keys_layers:
351 | layers[layer] += layers[layer - 1]
352 | return distances
353 |
354 |
355 | def get_vertices(v, degree_v, degrees, n_nodes):
356 | a_vertices_selected = 2 * math.log(n_nodes, 2)
357 | vertices = []
358 | try:
359 | c_v = 0
360 |
361 | for v2 in degrees[degree_v]['vertices']:
362 | if (v != v2):
363 | vertices.append(v2) # same degree
364 | c_v += 1
365 | if (c_v > a_vertices_selected):
366 | raise StopIteration
367 |
368 | if ('before' not in degrees[degree_v]):
369 | degree_b = -1
370 | else:
371 | degree_b = degrees[degree_v]['before']
372 | if ('after' not in degrees[degree_v]):
373 | degree_a = -1
374 | else:
375 | degree_a = degrees[degree_v]['after']
376 | if (degree_b == -1 and degree_a == -1):
377 | raise StopIteration # not anymore v
378 | degree_now = verifyDegrees(degrees, degree_v, degree_a, degree_b)
379 | # nearest valid degree
380 | while True:
381 | for v2 in degrees[degree_now]['vertices']:
382 | if (v != v2):
383 | vertices.append(v2)
384 | c_v += 1
385 | if (c_v > a_vertices_selected):
386 | raise StopIteration
387 |
388 | if (degree_now == degree_b):
389 | if ('before' not in degrees[degree_b]):
390 | degree_b = -1
391 | else:
392 | degree_b = degrees[degree_b]['before']
393 | else:
394 | if ('after' not in degrees[degree_a]):
395 | degree_a = -1
396 | else:
397 | degree_a = degrees[degree_a]['after']
398 |
399 | if (degree_b == -1 and degree_a == -1):
400 | raise StopIteration
401 |
402 | degree_now = verifyDegrees(degrees, degree_v, degree_a, degree_b)
403 |
404 | except StopIteration:
405 | return list(vertices)
406 |
407 | return list(vertices)
408 |
409 |
410 | def verifyDegrees(degrees, degree_v_root, degree_a, degree_b):
411 | if (degree_b == -1):
412 | degree_now = degree_a
413 | elif (degree_a == -1):
414 | degree_now = degree_b
415 | elif (abs(degree_b - degree_v_root) < abs(degree_a - degree_v_root)):
416 | degree_now = degree_b
417 | else:
418 | degree_now = degree_a
419 |
420 | return degree_now
421 |
422 |
423 | def compute_dtw_dist(part_list, degreeList, dist_func):
424 | dtw_dist = {}
425 | for v1, nbs in part_list:
426 | lists_v1 = degreeList[v1] # lists_v1 :orderd degree list of v1
427 | for v2 in nbs:
428 | lists_v2 = degreeList[v2] # lists_v1 :orderd degree list of v2
429 | max_layer = min(len(lists_v1), len(lists_v2)) # valid layer
430 | dtw_dist[v1, v2] = {}
431 | for layer in range(0, max_layer):
432 | dist, path = fastdtw(
433 | lists_v1[layer], lists_v2[layer], radius=1, dist=dist_func)
434 | dtw_dist[v1, v2][layer] = dist
435 | return dtw_dist
436 |
--------------------------------------------------------------------------------
/ge/utils.py:
--------------------------------------------------------------------------------
1 | def preprocess_nxgraph(graph):
2 | node2idx = {}
3 | idx2node = []
4 | node_size = 0
5 | for node in graph.nodes():
6 | node2idx[node] = node_size
7 | idx2node.append(node)
8 | node_size += 1
9 | return idx2node, node2idx
10 |
11 |
12 | def partition_dict(vertices, workers):
13 | batch_size = (len(vertices) - 1) // workers + 1
14 | part_list = []
15 | part = []
16 | count = 0
17 | for v1, nbs in vertices.items():
18 | part.append((v1, nbs))
19 | count += 1
20 | if count % batch_size == 0:
21 | part_list.append(part)
22 | part = []
23 | if len(part) > 0:
24 | part_list.append(part)
25 | return part_list
26 |
27 |
28 | def partition_list(vertices, workers):
29 | batch_size = (len(vertices) - 1) // workers + 1
30 | part_list = []
31 | part = []
32 | count = 0
33 | for v1, nbs in enumerate(vertices):
34 | part.append((v1, nbs))
35 | count += 1
36 | if count % batch_size == 0:
37 | part_list.append(part)
38 | part = []
39 | if len(part) > 0:
40 | part_list.append(part)
41 | return part_list
42 |
43 |
44 | def partition_num(num, workers):
45 | if num % workers == 0:
46 | return [num // workers] * workers
47 | else:
48 | return [num // workers] * workers + [num % workers]
49 |
--------------------------------------------------------------------------------
/ge/walker.py:
--------------------------------------------------------------------------------
1 | import itertools
2 | import math
3 | import random
4 |
5 | import pandas as pd
6 | from joblib import Parallel, delayed
7 |
8 | from .alias import alias_sample, create_alias_table
9 | from .utils import partition_num
10 |
11 |
12 | class RandomWalker:
13 | def __init__(self, G, p=1, q=1, use_rejection_sampling=False):
14 | """
15 | :param G:
16 | :param p: Return parameter,controls the likelihood of immediately revisiting a node in the walk.
17 | :param q: In-out parameter,allows the search to differentiate between “inward” and “outward” nodes
18 | :param use_rejection_sampling: Whether to use the rejection sampling strategy in node2vec.
19 | """
20 | self.G = G
21 | self.p = p
22 | self.q = q
23 | self.use_rejection_sampling = use_rejection_sampling
24 |
25 | def deepwalk_walk(self, walk_length, start_node):
26 |
27 | walk = [start_node]
28 |
29 | while len(walk) < walk_length:
30 | cur = walk[-1]
31 | cur_nbrs = list(self.G.neighbors(cur))
32 | if len(cur_nbrs) > 0:
33 | walk.append(random.choice(cur_nbrs))
34 | else:
35 | break
36 | return walk
37 |
38 | def node2vec_walk(self, walk_length, start_node):
39 |
40 | G = self.G
41 | alias_nodes = self.alias_nodes
42 | alias_edges = self.alias_edges
43 |
44 | walk = [start_node]
45 |
46 | while len(walk) < walk_length:
47 | cur = walk[-1]
48 | cur_nbrs = list(G.neighbors(cur))
49 | if len(cur_nbrs) > 0:
50 | if len(walk) == 1:
51 | walk.append(
52 | cur_nbrs[alias_sample(alias_nodes[cur][0], alias_nodes[cur][1])])
53 | else:
54 | prev = walk[-2]
55 | edge = (prev, cur)
56 | next_node = cur_nbrs[alias_sample(alias_edges[edge][0],
57 | alias_edges[edge][1])]
58 | walk.append(next_node)
59 | else:
60 | break
61 |
62 | return walk
63 |
64 | def node2vec_walk2(self, walk_length, start_node):
65 | """
66 | Reference:
67 | KnightKing: A Fast Distributed Graph Random Walk Engine
68 | http://madsys.cs.tsinghua.edu.cn/publications/SOSP19-yang.pdf
69 | """
70 |
71 | def rejection_sample(inv_p, inv_q, nbrs_num):
72 | upper_bound = max(1.0, max(inv_p, inv_q))
73 | lower_bound = min(1.0, min(inv_p, inv_q))
74 | shatter = 0
75 | second_upper_bound = max(1.0, inv_q)
76 | if (inv_p > second_upper_bound):
77 | shatter = second_upper_bound / nbrs_num
78 | upper_bound = second_upper_bound + shatter
79 | return upper_bound, lower_bound, shatter
80 |
81 | G = self.G
82 | alias_nodes = self.alias_nodes
83 | inv_p = 1.0 / self.p
84 | inv_q = 1.0 / self.q
85 | walk = [start_node]
86 | while len(walk) < walk_length:
87 | cur = walk[-1]
88 | cur_nbrs = list(G.neighbors(cur))
89 | if len(cur_nbrs) > 0:
90 | if len(walk) == 1:
91 | walk.append(
92 | cur_nbrs[alias_sample(alias_nodes[cur][0], alias_nodes[cur][1])])
93 | else:
94 | upper_bound, lower_bound, shatter = rejection_sample(
95 | inv_p, inv_q, len(cur_nbrs))
96 | prev = walk[-2]
97 | prev_nbrs = set(G.neighbors(prev))
98 | while True:
99 | prob = random.random() * upper_bound
100 | if (prob + shatter >= upper_bound):
101 | next_node = prev
102 | break
103 | next_node = cur_nbrs[alias_sample(
104 | alias_nodes[cur][0], alias_nodes[cur][1])]
105 | if (prob < lower_bound):
106 | break
107 | if (prob < inv_p and next_node == prev):
108 | break
109 | _prob = 1.0 if next_node in prev_nbrs else inv_q
110 | if (prob < _prob):
111 | break
112 | walk.append(next_node)
113 | else:
114 | break
115 | return walk
116 |
117 | def simulate_walks(self, num_walks, walk_length, workers=1, verbose=0):
118 |
119 | G = self.G
120 |
121 | nodes = list(G.nodes())
122 |
123 | results = Parallel(n_jobs=workers, verbose=verbose, )(
124 | delayed(self._simulate_walks)(nodes, num, walk_length) for num in
125 | partition_num(num_walks, workers))
126 |
127 | walks = list(itertools.chain(*results))
128 |
129 | return walks
130 |
131 | def _simulate_walks(self, nodes, num_walks, walk_length, ):
132 | walks = []
133 | for _ in range(num_walks):
134 | random.shuffle(nodes)
135 | for v in nodes:
136 | if self.p == 1 and self.q == 1:
137 | walks.append(self.deepwalk_walk(
138 | walk_length=walk_length, start_node=v))
139 | elif self.use_rejection_sampling:
140 | walks.append(self.node2vec_walk2(
141 | walk_length=walk_length, start_node=v))
142 | else:
143 | walks.append(self.node2vec_walk(
144 | walk_length=walk_length, start_node=v))
145 | return walks
146 |
147 | def get_alias_edge(self, t, v):
148 | """
149 | compute unnormalized transition probability between nodes v and its neighbors give the previous visited node t.
150 | :param t:
151 | :param v:
152 | :return:
153 | """
154 | G = self.G
155 | p = self.p
156 | q = self.q
157 |
158 | unnormalized_probs = []
159 | for x in G.neighbors(v):
160 | weight = G[v][x].get('weight', 1.0) # w_vx
161 | if x == t: # d_tx == 0
162 | unnormalized_probs.append(weight / p)
163 | elif G.has_edge(x, t): # d_tx == 1
164 | unnormalized_probs.append(weight)
165 | else: # d_tx > 1
166 | unnormalized_probs.append(weight / q)
167 | norm_const = sum(unnormalized_probs)
168 | normalized_probs = [
169 | float(u_prob) / norm_const for u_prob in unnormalized_probs]
170 |
171 | return create_alias_table(normalized_probs)
172 |
173 | def preprocess_transition_probs(self):
174 | """
175 | Preprocessing of transition probabilities for guiding the random walks.
176 | """
177 | G = self.G
178 | alias_nodes = {}
179 | for node in G.nodes():
180 | unnormalized_probs = [G[node][nbr].get('weight', 1.0)
181 | for nbr in G.neighbors(node)]
182 | norm_const = sum(unnormalized_probs)
183 | normalized_probs = [
184 | float(u_prob) / norm_const for u_prob in unnormalized_probs]
185 | alias_nodes[node] = create_alias_table(normalized_probs)
186 |
187 | if not self.use_rejection_sampling:
188 | alias_edges = {}
189 |
190 | for edge in G.edges():
191 | alias_edges[edge] = self.get_alias_edge(edge[0], edge[1])
192 | if not G.is_directed():
193 | alias_edges[(edge[1], edge[0])] = self.get_alias_edge(edge[1], edge[0])
194 | self.alias_edges = alias_edges
195 |
196 | self.alias_nodes = alias_nodes
197 | return
198 |
199 |
200 | class BiasedWalker:
201 | def __init__(self, idx2node, temp_path):
202 |
203 | self.idx2node = idx2node
204 | self.idx = list(range(len(self.idx2node)))
205 | self.temp_path = temp_path
206 | pass
207 |
208 | def simulate_walks(self, num_walks, walk_length, stay_prob=0.3, workers=1, verbose=0):
209 |
210 | layers_adj = pd.read_pickle(self.temp_path + 'layers_adj.pkl')
211 | layers_alias = pd.read_pickle(self.temp_path + 'layers_alias.pkl')
212 | layers_accept = pd.read_pickle(self.temp_path + 'layers_accept.pkl')
213 | gamma = pd.read_pickle(self.temp_path + 'gamma.pkl')
214 |
215 | nodes = self.idx # list(self.g.nodes())
216 |
217 | results = Parallel(n_jobs=workers, verbose=verbose, )(
218 | delayed(self._simulate_walks)(nodes, num, walk_length, stay_prob, layers_adj, layers_accept, layers_alias,
219 | gamma) for num in
220 | partition_num(num_walks, workers))
221 |
222 | walks = list(itertools.chain(*results))
223 | return walks
224 |
225 | def _simulate_walks(self, nodes, num_walks, walk_length, stay_prob, layers_adj, layers_accept, layers_alias, gamma):
226 | walks = []
227 | for _ in range(num_walks):
228 | random.shuffle(nodes)
229 | for v in nodes:
230 | walks.append(self._exec_random_walk(layers_adj, layers_accept, layers_alias,
231 | v, walk_length, gamma, stay_prob))
232 | return walks
233 |
234 | def _exec_random_walk(self, graphs, layers_accept, layers_alias, v, walk_length, gamma, stay_prob=0.3):
235 | initialLayer = 0
236 | layer = initialLayer
237 |
238 | path = []
239 | path.append(self.idx2node[v])
240 |
241 | while len(path) < walk_length:
242 | r = random.random()
243 | if (r < stay_prob): # same layer
244 | v = chooseNeighbor(v, graphs, layers_alias,
245 | layers_accept, layer)
246 | path.append(self.idx2node[v])
247 | else: # different layer
248 | r = random.random()
249 | try:
250 | x = math.log(gamma[layer][v] + math.e)
251 | p_moveup = (x / (x + 1))
252 | except:
253 | print(layer, v)
254 | raise ValueError()
255 |
256 | if (r > p_moveup):
257 | if (layer > initialLayer):
258 | layer = layer - 1
259 | else:
260 | if ((layer + 1) in graphs and v in graphs[layer + 1]):
261 | layer = layer + 1
262 |
263 | return path
264 |
265 |
266 | def chooseNeighbor(v, graphs, layers_alias, layers_accept, layer):
267 | v_list = graphs[layer][v]
268 |
269 | idx = alias_sample(layers_accept[layer][v], layers_alias[layer][v])
270 | v = v_list[idx]
271 |
272 | return v
273 |
--------------------------------------------------------------------------------
/pics/code.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shenweichen/GraphEmbedding/c8efad063c6a1162cb545385b3e18559b2e46df9/pics/code.png
--------------------------------------------------------------------------------
/pics/deepctrbot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shenweichen/GraphEmbedding/c8efad063c6a1162cb545385b3e18559b2e46df9/pics/deepctrbot.png
--------------------------------------------------------------------------------
/pics/edge_list.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shenweichen/GraphEmbedding/c8efad063c6a1162cb545385b3e18559b2e46df9/pics/edge_list.png
--------------------------------------------------------------------------------
/pics/weichennote.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shenweichen/GraphEmbedding/c8efad063c6a1162cb545385b3e18559b2e46df9/pics/weichennote.png
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools
2 |
3 |
4 | with open("README.md", "r") as fh:
5 |
6 | long_description = fh.read()
7 |
8 |
9 | REQUIRED_PACKAGES = [
10 | # 'tensorflow>=1.4.0',
11 | 'gensim>=4.0.0',
12 | 'networkx',
13 | 'joblib',
14 | 'fastdtw',
15 | 'tqdm',
16 | 'numpy',
17 | 'scikit-learn',
18 | 'pandas',
19 | 'matplotlib',
20 | 'deepctr'
21 | ]
22 |
23 |
24 | setuptools.setup(
25 |
26 | name="ge",
27 |
28 | version="0.0.0",
29 |
30 | author="Weichen Shen",
31 |
32 | author_email="weichenswc@163.com",
33 |
34 | url="https://github.com/shenweichen/GraphEmbedding",
35 |
36 | packages=setuptools.find_packages(exclude=[]),
37 |
38 | python_requires='>=3.5', # 3.4.6
39 |
40 | install_requires=REQUIRED_PACKAGES,
41 |
42 | extras_require={
43 |
44 | "cpu": ['tensorflow>=1.4.0,!=1.7.*,!=1.8.*'],
45 |
46 | "gpu": ['tensorflow-gpu>=1.4.0,!=1.7.*,!=1.8.*'],
47 |
48 | },
49 |
50 | entry_points={
51 |
52 | },
53 | license="MIT license",
54 |
55 |
56 | )
57 |
--------------------------------------------------------------------------------
/tests/Wiki_edgelist.txt:
--------------------------------------------------------------------------------
1 | 0 1
2 | 0 2
3 | 0 3
4 | 1 2
5 | 2 3
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shenweichen/GraphEmbedding/c8efad063c6a1162cb545385b3e18559b2e46df9/tests/__init__.py
--------------------------------------------------------------------------------
/tests/deepwalk_test.py:
--------------------------------------------------------------------------------
1 | import networkx as nx
2 |
3 | from ge import DeepWalk
4 |
5 |
6 | def test_DeepWalk():
7 | G = nx.read_edgelist('./tests/Wiki_edgelist.txt',
8 | create_using=nx.DiGraph(), nodetype=None, data=[('weight', int)])
9 |
10 | model = DeepWalk(G, walk_length=3, num_walks=2, workers=1)
11 | model.train(window_size=3, iter=1)
12 | embeddings = model.get_embeddings()
13 |
14 |
15 | if __name__ == "__main__":
16 | pass
17 |
--------------------------------------------------------------------------------
/tests/line_test.py:
--------------------------------------------------------------------------------
1 | import networkx as nx
2 |
3 | from ge import LINE
4 |
5 |
6 | def test_LINE():
7 | G = nx.read_edgelist('./tests/Wiki_edgelist.txt',
8 | create_using=nx.DiGraph(), nodetype=None, data=[('weight', int)])
9 |
10 | model = LINE(G, embedding_size=2, order='second')
11 | model.train(batch_size=2, epochs=1, verbose=2)
12 | embeddings = model.get_embeddings()
13 |
14 |
15 | if __name__ == "__main__":
16 | pass
17 |
--------------------------------------------------------------------------------
/tests/node2vec_test.py:
--------------------------------------------------------------------------------
1 | import networkx as nx
2 | import pytest
3 |
4 | from ge import Node2Vec
5 |
6 |
7 | @pytest.mark.parametrize(
8 | 'use_rejection_sampling',
9 | [True, False
10 | ]
11 | )
12 | def test_Node2Vec(use_rejection_sampling):
13 | G = nx.read_edgelist('./tests/Wiki_edgelist.txt',
14 | create_using=nx.DiGraph(), nodetype=None, data=[('weight', int)])
15 | model = Node2Vec(G, walk_length=10, num_walks=80,
16 | p=0.25, q=4, workers=1, use_rejection_sampling=use_rejection_sampling)
17 | model.train(window_size=5, iter=3)
18 | embeddings = model.get_embeddings()
19 |
20 |
21 | if __name__ == "__main__":
22 | pass
23 |
--------------------------------------------------------------------------------
/tests/sdne_test.py:
--------------------------------------------------------------------------------
1 | import networkx as nx
2 | import tensorflow as tf
3 |
4 | from ge import SDNE
5 |
6 |
7 | def test_SDNE():
8 | if tf.__version__ >= '1.15.0':
9 | return #todo
10 | G = nx.read_edgelist('./tests/Wiki_edgelist.txt',
11 | create_using=nx.DiGraph(), nodetype=None, data=[('weight', int)])
12 |
13 | model = SDNE(G, hidden_size=[8, 4], )
14 | model.train(batch_size=2, epochs=1, verbose=2)
15 | embeddings = model.get_embeddings()
16 |
17 |
18 | if __name__ == "__main__":
19 | pass
20 |
--------------------------------------------------------------------------------
/tests/struct2vec_test.py:
--------------------------------------------------------------------------------
1 | import networkx as nx
2 |
3 | from ge import Struc2Vec
4 |
5 |
6 | def test_Struc2Vec():
7 | G = nx.read_edgelist('./tests/Wiki_edgelist.txt', create_using=nx.DiGraph(), nodetype=None,
8 | data=[('weight', int)])
9 |
10 | model = Struc2Vec(G, 3, 1, workers=1, verbose=40, )
11 | model.train()
12 | embeddings = model.get_embeddings()
13 |
14 |
15 | if __name__ == "__main__":
16 | pass
17 |
--------------------------------------------------------------------------------