├── data ├── book │ ├── kg_final.npy │ └── ratings_final.npy ├── music │ ├── kg_final.npy │ ├── ratings_final.npy │ └── item_index2entity_id.txt └── movie │ └── ratings_final.npy ├── utils ├── helper.py ├── parser.py ├── metrics.py ├── evaluate.py └── data_loader.py ├── README.md ├── main.py └── modules └── MCCLK.py /data/book/kg_final.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CCIIPLab/MCCLK/HEAD/data/book/kg_final.npy -------------------------------------------------------------------------------- /data/music/kg_final.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CCIIPLab/MCCLK/HEAD/data/music/kg_final.npy -------------------------------------------------------------------------------- /data/book/ratings_final.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CCIIPLab/MCCLK/HEAD/data/book/ratings_final.npy -------------------------------------------------------------------------------- /data/movie/ratings_final.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CCIIPLab/MCCLK/HEAD/data/movie/ratings_final.npy -------------------------------------------------------------------------------- /data/music/ratings_final.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CCIIPLab/MCCLK/HEAD/data/music/ratings_final.npy -------------------------------------------------------------------------------- /utils/helper.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Created on Aug 19, 2016 3 | @author: Xiang Wang (xiangwang@u.nus.edu) 4 | ''' 5 | __author__ = "xiangwang" 6 | import os 7 | import re 8 | 9 | def txt2list(file_src): 10 | orig_file = open(file_src, "r") 11 | lines = orig_file.readlines() 12 | return lines 13 | 14 | 15 | def ensureDir(dir_path): 16 | d = os.path.dirname(dir_path) 17 | if not os.path.exists(d): 18 | os.makedirs(d) 19 | 20 | 21 | def uni2str(unicode_str): 22 | return str(unicode_str.encode('ascii', 'ignore')).replace('\n', '').strip() 23 | 24 | 25 | def hasNumbers(inputString): 26 | return bool(re.search(r'\d', inputString)) 27 | 28 | def delMultiChar(inputString, chars): 29 | for ch in chars: 30 | inputString = inputString.replace(ch, '') 31 | return inputString 32 | 33 | def merge_two_dicts(x, y): 34 | z = x.copy() # start with x's keys and values 35 | z.update(y) # modifies z with y's keys and values & returns None 36 | return z 37 | 38 | def early_stopping(log_value, best_value, stopping_step, expected_order='acc', flag_step=100): 39 | # early stopping strategy: 40 | assert expected_order in ['acc', 'dec'] 41 | 42 | if (expected_order == 'acc' and log_value >= best_value) or (expected_order == 'dec' and log_value <= best_value): 43 | stopping_step = 0 44 | best_value = log_value 45 | else: 46 | stopping_step += 1 47 | 48 | if stopping_step >= flag_step: 49 | print("Early stopping is trigger at step: {} log:{}".format(flag_step, log_value)) 50 | should_stop = True 51 | else: 52 | should_stop = False 53 | return best_value, stopping_step, should_stop 54 | -------------------------------------------------------------------------------- /utils/parser.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | def parse_args(): 4 | parser = argparse.ArgumentParser(description="MCCLK") 5 | # ===== dataset ===== # 6 | parser.add_argument("--dataset", nargs="?", default="music", help="Choose a dataset:[last-fm,amazon-book,alibaba, book, music, movie]") 7 | parser.add_argument("--data_path", nargs="?", default="data/", help="Input data path.") 8 | 9 | # ===== train ===== # 10 | parser.add_argument('--epoch', type=int, default=400, help='number of epochs') 11 | parser.add_argument('--batch_size', type=int, default=4096, help='batch size') 12 | parser.add_argument('--test_batch_size', type=int, default=2048, help='batch size') 13 | parser.add_argument('--dim', type=int, default=64, help='embedding size') 14 | parser.add_argument('--l2', type=float, default=1e-5, help='l2 regularization weight') 15 | parser.add_argument('--lr', type=float, default=3e-3, help='learning rate') # default = 1e-4 16 | parser.add_argument('--sim_regularity', type=float, default=1e-4, help='regularization weight for latent factor') 17 | parser.add_argument("--inverse_r", type=bool, default=True, help="consider inverse relation or not") 18 | parser.add_argument("--node_dropout", type=bool, default=True, help="consider node dropout or not") 19 | parser.add_argument("--node_dropout_rate", type=float, default=0.5, help="ratio of node dropout") 20 | parser.add_argument("--mess_dropout", type=bool, default=True, help="consider message dropout or not") 21 | parser.add_argument("--mess_dropout_rate", type=float, default=0.1, help="ratio of node dropout") 22 | parser.add_argument("--batch_test_flag", type=bool, default=True, help="use gpu or not") 23 | parser.add_argument("--channel", type=int, default=64, help="hidden channels for model") 24 | parser.add_argument("--cuda", type=bool, default=True, help="use gpu or not") 25 | parser.add_argument("--gpu_id", type=int, default=0, help="gpu id") 26 | parser.add_argument('--Ks', nargs='?', default='[5, 10, 20, 50, 100]', help='Output sizes of every layer') 27 | parser.add_argument('--test_flag', nargs='?', default='part', 28 | help='Specify the test type from {part, full}, indicating whether the reference is done in mini-batch') 29 | parser.add_argument("--ind", type=str, default='mi', help="Independence modeling: mi, distance, cosine") 30 | 31 | # ===== relation context ===== # 32 | parser.add_argument('--context_hops', type=int, default=2, help='number of context hops') 33 | 34 | # ===== save model ===== # 35 | parser.add_argument("--save", type=bool, default=False, help="save model or not") 36 | parser.add_argument("--out_dir", type=str, default="./weights/", help="output directory for model") 37 | 38 | return parser.parse_args() 39 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Multi-level Cross-view Contrastive Learning for Knowledge-aware Recommender System 2 | This is our Pytorch implementation for the paper: 3 | > Ding Zou, Wei Wei, Xian-Ling Mao, Ziyang Wang, Minghui Qiu, Feida Zhu, Xin Cao (2022). Multi-level Cross-view Contrastive Learning for Knowledge-aware Recommender System, [Paper in arXiv](https://arxiv.org/pdf/2204.08807.pdf). In SIGIR'22. 4 | 5 | 6 | ## Introduction 7 | Multi-level Cross-view Contrastive Learning for Knowledge-aware Recommender System (MCCLK) is a knowledge-aware recommendation solution based on GNN and Contrastive Learning, 8 | proposing a multi-level cross-view contrastive framework to enhance representation learning from multi-faced aspects. 9 | 10 | ## Requirement 11 | The code has been tested running under Python 3.7.9. The required packages are as follows: 12 | - pytorch == 1.5.0 13 | - numpy == 1.15.4 14 | - scipy == 1.1.0 15 | - sklearn == 0.20.0 16 | - torch_scatter == 2.0.5 17 | - torch_sparse == 0.6.10 18 | - networkx == 2.5 19 | 20 | ## Usage 21 | The hyper-parameter search range and optimal settings have been clearly stated in the codes (see the parser function in utils/parser.py). 22 | * Train and Test 23 | 24 | ``` 25 | python main.py 26 | ``` 27 | 28 | ## Citation 29 | If you want to use our codes and datasets in your research, please cite: 30 | ``` 31 | @inproceedings{mcclk2022, 32 | author = {Zou, Ding and 33 | Mao, Xian-Ling and 34 | Wang, Ziyang and 35 | Qiu, Minghui and 36 | Zhu, Feida and 37 | Cao, Xin}, 38 | title = {Multi-level Cross-view Contrastive Learning for Knowledge-aware Recommender System}, 39 | booktitle = {Proceedings of the 45th International {ACM} {SIGIR} Conference on 40 | Research and Development in Information Retrieval, {SIGIR} 2022, Madrid, 41 | Spain, July 11-15, 2022.}, 42 | year = {2022}, 43 | } 44 | ``` 45 | 46 | 47 | 48 | ## Dataset 49 | 50 | We provide three processed datasets: Book-Crossing, MovieLens-1M, and Last.FM. 51 | 52 | We follow the paper " [Ripplenet: Propagating user preferences on the knowledge 53 | graph for recommender systems](https://github.com/hwwang55/RippleNet)." to process data. 54 | 55 | 56 | | | | Book-Crossing | MovieLens-1M | Last.FM | 57 | | :-------------------: | :------------ | ----------: | --------: | ---------: | 58 | | User-Item Interaction | #Users | 17,860 | 6,036 | 1,872 | 59 | | | #Items | 14,967 | 2,445 | 3,846 | 60 | | | #Interactions | 139,746 | 753,772 | 42,346| 61 | | Knowledge Graph | #Entities | 77,903 | 182,011| 9,366 | 62 | | | #Relations | 25 | 12| 60 | 63 | | | #Triplets | 151,500 | 1,241,996| 15,518 | 64 | 65 | 66 | ## Reference 67 | - We partially use the codes of [KGIN](https://github.com/huangtinglin/Knowledge_Graph_based_Intent_Network). 68 | - You could find all other baselines in Github. 69 | -------------------------------------------------------------------------------- /utils/metrics.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from sklearn.metrics import roc_auc_score 3 | 4 | 5 | def recall(rank, ground_truth, N): 6 | return len(set(rank[:N]) & set(ground_truth)) / float(len(set(ground_truth))) 7 | 8 | 9 | def precision_at_k(r, k): 10 | """Score is precision @ k 11 | Relevance is binary (nonzero is relevant). 12 | Returns: 13 | Precision @ k 14 | Raises: 15 | ValueError: len(r) must be >= k 16 | """ 17 | assert k >= 1 18 | r = np.asarray(r)[:k] 19 | return np.mean(r) 20 | 21 | 22 | def average_precision(r,cut): 23 | """Score is average precision (area under PR curve) 24 | Relevance is binary (nonzero is relevant). 25 | Returns: 26 | Average precision 27 | """ 28 | r = np.asarray(r) 29 | out = [precision_at_k(r, k + 1) for k in range(cut) if r[k]] 30 | if not out: 31 | return 0. 32 | return np.sum(out)/float(min(cut, np.sum(r))) 33 | 34 | 35 | def mean_average_precision(rs): 36 | """Score is mean average precision 37 | Relevance is binary (nonzero is relevant). 38 | Returns: 39 | Mean average precision 40 | """ 41 | return np.mean([average_precision(r) for r in rs]) 42 | 43 | 44 | def dcg_at_k(r, k, method=1): 45 | """Score is discounted cumulative gain (dcg) 46 | Relevance is positive real values. Can use binary 47 | as the previous methods. 48 | Returns: 49 | Discounted cumulative gain 50 | """ 51 | r = np.asfarray(r)[:k] 52 | if r.size: 53 | if method == 0: 54 | return r[0] + np.sum(r[1:] / np.log2(np.arange(2, r.size + 1))) 55 | elif method == 1: 56 | return np.sum(r / np.log2(np.arange(2, r.size + 2))) 57 | else: 58 | raise ValueError('method must be 0 or 1.') 59 | return 0. 60 | 61 | 62 | def ndcg_at_k(r, k, ground_truth, method=1): 63 | """Score is normalized discounted cumulative gain (ndcg) 64 | Relevance is positive real values. Can use binary 65 | as the previous methods. 66 | Returns: 67 | Normalized discounted cumulative gain 68 | 69 | Low but correct defination 70 | """ 71 | GT = set(ground_truth) 72 | if len(GT) > k : 73 | sent_list = [1.0] * k 74 | else: 75 | sent_list = [1.0]*len(GT) + [0.0]*(k-len(GT)) 76 | dcg_max = dcg_at_k(sent_list, k, method) 77 | if not dcg_max: 78 | return 0. 79 | return dcg_at_k(r, k, method) / dcg_max 80 | 81 | 82 | def recall_at_k(r, k, all_pos_num): 83 | r = np.asfarray(r)[:k] 84 | return np.sum(r) / all_pos_num 85 | 86 | 87 | def hit_at_k(r, k): 88 | r = np.array(r)[:k] 89 | if np.sum(r) > 0: 90 | return 1. 91 | else: 92 | return 0. 93 | 94 | def F1(pre, rec): 95 | if pre + rec > 0: 96 | return (2.0 * pre * rec) / (pre + rec) 97 | else: 98 | return 0. 99 | 100 | def AUC(ground_truth, prediction): 101 | try: 102 | res = roc_auc_score(y_true=ground_truth, y_score=prediction) 103 | except Exception: 104 | res = 0. 105 | return res -------------------------------------------------------------------------------- /utils/evaluate.py: -------------------------------------------------------------------------------- 1 | from .metrics import * 2 | from .parser import parse_args 3 | 4 | import torch 5 | import numpy as np 6 | import multiprocessing 7 | import heapq 8 | from time import time 9 | 10 | cores = multiprocessing.cpu_count() // 2 11 | 12 | args = parse_args() 13 | Ks = eval(args.Ks) 14 | device = torch.device("cuda:" + str(args.gpu_id)) if args.cuda else torch.device("cpu") 15 | BATCH_SIZE = args.test_batch_size 16 | batch_test_flag = args.batch_test_flag 17 | 18 | def _get_topk_feed_data(user, items): 19 | res = list() 20 | for item in items: 21 | res.append([user, item, 0]) 22 | return np.array(res) 23 | 24 | def get_feed_dict_topk(train_entity_pairs): 25 | train_entity_pairs = torch.LongTensor(np.array([[cf[0], cf[1], cf[2]] for cf in train_entity_pairs], np.int32)) 26 | feed_dict = {} 27 | entity_pairs = train_entity_pairs.to(device) 28 | feed_dict['users'] = entity_pairs[:, 0] 29 | feed_dict['items'] = entity_pairs[:, 1] 30 | feed_dict['labels'] = entity_pairs[:, 2] 31 | return feed_dict 32 | 33 | def ranklist_by_heapq(user_pos_test, test_items, rating, Ks): 34 | item_score = {} 35 | for i in test_items: 36 | item_score[i] = rating[i] 37 | 38 | K_max = max(Ks) 39 | K_max_item_score = heapq.nlargest(K_max, item_score, key=item_score.get) 40 | 41 | r = [] 42 | for i in K_max_item_score: 43 | if i in user_pos_test: 44 | r.append(1) 45 | else: 46 | r.append(0) 47 | # auc = 0. 48 | auc = get_auc(item_score, user_pos_test) 49 | return r, auc 50 | 51 | def get_auc(item_score, user_pos_test): 52 | item_score = sorted(item_score.items(), key=lambda kv: kv[1]) 53 | item_score.reverse() 54 | item_sort = [x[0] for x in item_score] 55 | posterior = [x[1] for x in item_score] 56 | 57 | r = [] 58 | for i in item_sort: 59 | if i in user_pos_test: 60 | r.append(1) 61 | else: 62 | r.append(0) 63 | auc = AUC(ground_truth=r, prediction=posterior) 64 | return auc 65 | 66 | def ranklist_by_sorted(user_pos_test, test_items, rating, Ks): 67 | item_score = {} 68 | for i in test_items: 69 | item_score[i] = rating[i] 70 | 71 | K_max = max(Ks) 72 | K_max_item_score = heapq.nlargest(K_max, item_score, key=item_score.get) 73 | 74 | r = [] 75 | for i in K_max_item_score: 76 | if i in user_pos_test: 77 | r.append(1) 78 | else: 79 | r.append(0) 80 | auc = get_auc(item_score, user_pos_test) 81 | return r, auc 82 | 83 | def get_performance(user_pos_test, r, auc, Ks): 84 | precision, recall, ndcg, hit_ratio = [], [], [], [] 85 | 86 | for K in Ks: 87 | precision.append(precision_at_k(r, K)) 88 | recall.append(recall_at_k(r, K, len(user_pos_test))) 89 | ndcg.append(ndcg_at_k(r, K, user_pos_test)) 90 | hit_ratio.append(hit_at_k(r, K)) 91 | 92 | return {'recall': np.array(recall), 'precision': np.array(precision), 93 | 'ndcg': np.array(ndcg), 'hit_ratio': np.array(hit_ratio), 'auc': auc} 94 | 95 | 96 | def test_one_user(x): 97 | # user u's ratings for user u 98 | rating = x[0] 99 | # uid 100 | u = x[1] 101 | # user u's items in the training set 102 | try: 103 | training_items = train_user_set[u] 104 | except Exception: 105 | training_items = [] 106 | # user u's items in the test set 107 | user_pos_test = test_user_set[u] 108 | 109 | all_items = set(range(0, n_items)) 110 | 111 | test_items = list(all_items - set(training_items)) 112 | 113 | if args.test_flag == 'part': 114 | r, auc = ranklist_by_heapq(user_pos_test, test_items, rating, Ks) 115 | else: 116 | r, auc = ranklist_by_sorted(user_pos_test, test_items, rating, Ks) 117 | 118 | return get_performance(user_pos_test, r, auc, Ks) 119 | 120 | 121 | def test(model, user_dict, n_params): 122 | result = {'precision': np.zeros(len(Ks)), 123 | 'recall': np.zeros(len(Ks)), 124 | 'ndcg': np.zeros(len(Ks)), 125 | 'hit_ratio': np.zeros(len(Ks)), 126 | 'auc': 0.} 127 | 128 | global n_users, n_items 129 | n_items = n_params['n_items'] 130 | n_users = n_params['n_users'] 131 | 132 | global train_user_set, test_user_set 133 | train_user_set = user_dict['train_user_set'] 134 | test_user_set = user_dict['test_user_set'] 135 | 136 | pool = multiprocessing.Pool(cores) 137 | 138 | u_batch_size = BATCH_SIZE 139 | i_batch_size = BATCH_SIZE 140 | 141 | test_users = list(test_user_set.keys()) 142 | n_test_users = len(test_users) 143 | n_user_batchs = n_test_users // u_batch_size + 1 144 | count = 0 145 | entity_gcn_emb, user_gcn_emb = model.generate() 146 | 147 | for u_batch_id in range(n_user_batchs): 148 | start = u_batch_id * u_batch_size 149 | end = (u_batch_id + 1) * u_batch_size 150 | 151 | user_list_batch = test_users[start: end] 152 | user_batch = torch.LongTensor(np.array(user_list_batch)).to(device) 153 | u_g_embeddings = user_gcn_emb[user_batch] 154 | 155 | if batch_test_flag: 156 | # batch-item test 157 | n_item_batchs = n_items // i_batch_size + 1 158 | rate_batch = np.zeros(shape=(len(user_batch), n_items)) 159 | 160 | i_count = 0 161 | for i_batch_id in range(n_item_batchs): 162 | i_start = i_batch_id * i_batch_size 163 | i_end = min((i_batch_id + 1) * i_batch_size, n_items) 164 | 165 | item_batch = torch.LongTensor(np.array(range(i_start, i_end))).view(i_end - i_start).to(device) 166 | i_g_embeddings = entity_gcn_emb[item_batch] 167 | 168 | i_rate_batch = torch.matmul(u_g_embeddings, i_g_embeddings.t()).detach().cpu() 169 | 170 | rate_batch[:, i_start: i_end] = i_rate_batch 171 | i_count += i_rate_batch.shape[1] 172 | 173 | assert i_count == n_items 174 | else: 175 | # all-item test 176 | item_batch = torch.LongTensor(np.array(range(0, n_items))).view(n_items, -1).to(device) 177 | i_g_embddings = entity_gcn_emb[item_batch] 178 | rate_batch = model.rating(u_g_embeddings, i_g_embddings).detach().cpu() 179 | 180 | user_batch_rating_uid = zip(rate_batch, user_list_batch) 181 | batch_result = pool.map(test_one_user, user_batch_rating_uid) 182 | count += len(batch_result) 183 | 184 | for re in batch_result: 185 | result['precision'] += re['precision']/n_test_users 186 | result['recall'] += re['recall']/n_test_users 187 | result['ndcg'] += re['ndcg']/n_test_users 188 | result['hit_ratio'] += re['hit_ratio']/n_test_users 189 | result['auc'] += re['auc']/n_test_users 190 | assert count == n_test_users 191 | pool.close() 192 | return result 193 | -------------------------------------------------------------------------------- /utils/data_loader.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from tqdm import tqdm 3 | import networkx as nx 4 | import scipy.sparse as sp 5 | import os 6 | import random 7 | from time import time 8 | from collections import defaultdict 9 | import warnings 10 | warnings.filterwarnings('ignore') 11 | 12 | n_users = 0 13 | n_items = 0 14 | n_entities = 0 15 | n_relations = 0 16 | n_nodes = 0 17 | train_user_set = defaultdict(list) 18 | test_user_set = defaultdict(list) 19 | 20 | 21 | def read_cf(file_name): 22 | inter_mat = list() 23 | lines = open(file_name, "r").readlines() 24 | for l in lines: 25 | tmps = l.strip() 26 | inters = [int(i) for i in tmps.split(" ")] 27 | 28 | u_id, pos_ids = inters[0], inters[1:] 29 | pos_ids = list(set(pos_ids)) 30 | for i_id in pos_ids: 31 | inter_mat.append([u_id, i_id]) 32 | 33 | return np.array(inter_mat) 34 | 35 | def read_cf_new(): 36 | # reading rating file 37 | rating_file = 'data/' + args.dataset + '/ratings_final' 38 | if os.path.exists(rating_file + '.npy'): 39 | rating_np = np.load(rating_file + '.npy') 40 | else: 41 | rating_np = np.loadtxt(rating_file + '.txt', dtype=np.int64) 42 | np.save(rating_file + '.npy', rating_np) 43 | 44 | # rating_np_origin = rating_np 45 | # rating_np_label = rating_np.take([2], axis=1) 46 | # indix_click = np.where(rating_np_label == 1) 47 | # rating_np = rating_np.take(indix_click[0], axis=0) 48 | # rating_np = rating_np.take([0, 1], axis=1) 49 | 50 | test_ratio = 0.2 51 | n_ratings = rating_np.shape[0] 52 | eval_indices = np.random.choice(n_ratings, size=int(n_ratings * test_ratio), replace=False) 53 | left = set(range(n_ratings)) - set(eval_indices) 54 | # test_indices = np.random.choice(list(left), size=int(n_ratings * test_ratio), replace=False) 55 | train_indices = list(left) 56 | 57 | train_data = rating_np[train_indices] 58 | eval_data = rating_np[eval_indices] 59 | # test_data = rating_np[test_indices] 60 | 61 | train_rating = rating_np[train_indices] 62 | ui_adj = generate_ui_adj(rating_np, train_rating) 63 | return train_data, eval_data, ui_adj 64 | 65 | def generate_ui_adj(rating, train_rating): 66 | #ui_adj = sp.dok_matrix((n_user, n_item), dtype=np.float32) 67 | n_user, n_item = len(set(rating[:, 0])), len(set(rating[:, 1])) 68 | ui_adj_orign = sp.coo_matrix( 69 | (train_rating[:, 2], (train_rating[:, 0], train_rating[:, 1])), shape=(n_user, n_item)).todok() 70 | 71 | # ui_adj = sp.dok_matrix((n_user+n_item, n_user+n_item), dtype=np.float32) 72 | # ui_adj[:n_user, n_user:] = ui_adj_orign 73 | # ui_adj[n_user:, :n_user] = ui_adj_orign.T 74 | ui_adj = sp.bmat([[None, ui_adj_orign], 75 | [ui_adj_orign.T, None]], dtype=np.float32) 76 | ui_adj = ui_adj.todok() 77 | print('already create user-item adjacency matrix', ui_adj.shape) 78 | return ui_adj 79 | 80 | def remap_item(train_data, eval_data): 81 | global n_users, n_items 82 | n_users = max(max(train_data[:, 0]), max(eval_data[:, 0])) + 1 83 | n_items = max(max(train_data[:, 1]), max(eval_data[:, 1])) + 1 84 | 85 | eval_data_label = eval_data.take([2], axis=1) 86 | indix_click = np.where(eval_data_label == 1) 87 | eval_data = eval_data.take(indix_click[0], axis=0) 88 | 89 | eval_data = eval_data.take([0, 1], axis=1) 90 | train_data = train_data.take([0, 1], axis=1) 91 | for u_id, i_id in train_data: 92 | train_user_set[int(u_id)].append(int(i_id)) 93 | for u_id, i_id in eval_data: 94 | test_user_set[int(u_id)].append(int(i_id)) 95 | 96 | 97 | def read_triplets(file_name): 98 | global n_entities, n_relations, n_nodes 99 | 100 | can_triplets_np = np.loadtxt(file_name, dtype=np.int32) 101 | can_triplets_np = np.unique(can_triplets_np, axis=0) 102 | 103 | if args.inverse_r: 104 | # get triplets with inverse direction like 105 | inv_triplets_np = can_triplets_np.copy() 106 | inv_triplets_np[:, 0] = can_triplets_np[:, 2] 107 | inv_triplets_np[:, 2] = can_triplets_np[:, 0] 108 | inv_triplets_np[:, 1] = can_triplets_np[:, 1] + max(can_triplets_np[:, 1]) + 1 109 | # consider two additional relations --- 'interact' and 'be interacted' 110 | can_triplets_np[:, 1] = can_triplets_np[:, 1] + 1 111 | inv_triplets_np[:, 1] = inv_triplets_np[:, 1] + 1 112 | # get full version of knowledge graph 113 | triplets = np.concatenate((can_triplets_np, inv_triplets_np), axis=0) 114 | else: 115 | # consider two additional relations --- 'interact'. 116 | can_triplets_np[:, 1] = can_triplets_np[:, 1] + 1 117 | triplets = can_triplets_np.copy() 118 | 119 | n_entities = max(max(triplets[:, 0]), max(triplets[:, 2])) + 1 # including items + users 120 | n_nodes = n_entities + n_users 121 | n_relations = max(triplets[:, 1]) + 1 122 | 123 | return triplets 124 | 125 | 126 | def build_graph(train_data, triplets): 127 | ckg_graph = nx.MultiDiGraph() 128 | rd = defaultdict(list) 129 | train_data = train_data.take([0, 1], axis=1) 130 | print("Begin to load interaction triples ...") 131 | for u_id, i_id in tqdm(train_data, ascii=True): 132 | rd[0].append([u_id, i_id]) 133 | 134 | print("\nBegin to load knowledge graph triples ...") 135 | for h_id, r_id, t_id in tqdm(triplets, ascii=True): 136 | ckg_graph.add_edge(h_id, t_id, key=r_id) 137 | rd[r_id].append([h_id, t_id]) 138 | 139 | return ckg_graph, rd 140 | 141 | 142 | def build_sparse_relational_graph(relation_dict): 143 | def _bi_norm_lap(adj): 144 | # D^{-1/2}AD^{-1/2} 145 | rowsum = np.array(adj.sum(1)) 146 | 147 | d_inv_sqrt = np.power(rowsum, -0.5).flatten() 148 | d_inv_sqrt[np.isinf(d_inv_sqrt)] = 0. 149 | d_mat_inv_sqrt = sp.diags(d_inv_sqrt) 150 | 151 | # bi_lap = adj.dot(d_mat_inv_sqrt).transpose().dot(d_mat_inv_sqrt) 152 | bi_lap = d_mat_inv_sqrt.dot(adj).dot(d_mat_inv_sqrt) 153 | return bi_lap.tocoo() 154 | 155 | def _si_norm_lap(adj): 156 | # D^{-1}A 157 | rowsum = np.array(adj.sum(1)) 158 | 159 | d_inv = np.power(rowsum, -1).flatten() 160 | d_inv[np.isinf(d_inv)] = 0. 161 | d_mat_inv = sp.diags(d_inv) 162 | 163 | norm_adj = d_mat_inv.dot(adj) 164 | return norm_adj.tocoo() 165 | 166 | adj_mat_list = [] 167 | print("Begin to build sparse relation matrix ...") 168 | for r_id in tqdm(relation_dict.keys()): 169 | np_mat = np.array(relation_dict[r_id]) 170 | if r_id == 0: 171 | cf = np_mat.copy() 172 | cf[:, 1] = cf[:, 1] + n_users # [0, n_items) -> [n_users, n_users+n_items) 173 | vals = [1.] * len(cf) 174 | adj = sp.coo_matrix((vals, (cf[:, 0], cf[:, 1])), shape=(n_nodes, n_nodes)) 175 | else: 176 | vals = [1.] * len(np_mat) 177 | adj = sp.coo_matrix((vals, (np_mat[:, 0], np_mat[:, 1])), shape=(n_nodes, n_nodes)) 178 | adj_mat_list.append(adj) 179 | 180 | norm_mat_list = [_bi_norm_lap(mat) for mat in adj_mat_list] 181 | mean_mat_list = [_si_norm_lap(mat) for mat in adj_mat_list] 182 | # interaction: user->item, [n_users, n_entities] 183 | norm_mat_list[0] = norm_mat_list[0].tocsr()[:n_users, n_users:].tocoo() 184 | # norm_mat_list[0] = norm_mat_list[0].tocoo() 185 | mean_mat_list[0] = mean_mat_list[0].tocsr()[:n_users, n_users:].tocoo() 186 | 187 | return adj_mat_list, norm_mat_list, mean_mat_list 188 | 189 | def load_data(model_args): 190 | global args 191 | args = model_args 192 | directory = args.data_path + args.dataset + '/' 193 | 194 | print('reading train and test user-item set ...') 195 | # train_cf = read_cf(directory + 'train.txt') 196 | # test_cf = read_cf(directory + 'test.txt') 197 | train_cf, eval_cf, ui_adj = read_cf_new() 198 | remap_item(train_cf, eval_cf) 199 | 200 | print('combinating train_cf and kg data ...') 201 | triplets = read_triplets(directory + 'kg_final.txt') 202 | 203 | print('building the graph ...') 204 | graph, relation_dict = build_graph(train_cf, triplets) 205 | 206 | print('building the adj mat ...') 207 | adj_mat_list, norm_mat_list, mean_mat_list = build_sparse_relational_graph(relation_dict) 208 | 209 | n_params = { 210 | 'n_users': int(n_users), 211 | 'n_items': int(n_items), 212 | 'n_entities': int(n_entities), 213 | 'n_nodes': int(n_nodes), 214 | 'n_relations': int(n_relations) 215 | } 216 | user_dict = { 217 | 'train_user_set': train_user_set, 218 | 'test_user_set': test_user_set 219 | } 220 | return train_cf, eval_cf, user_dict, n_params, graph, \ 221 | [adj_mat_list, norm_mat_list, mean_mat_list] 222 | 223 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | 2 | # For any questions, don't hesitate to contact me: Ding Zou (m202173662@hust.edu.cn) 3 | 4 | import random 5 | import torch 6 | import numpy as np 7 | from sklearn.metrics import roc_auc_score, f1_score 8 | from time import time 9 | from prettytable import PrettyTable 10 | import logging 11 | from utils.parser import parse_args 12 | from utils.data_loader import load_data 13 | from modules.MCCLK import Recommender 14 | from utils.evaluate import test 15 | from utils.helper import early_stopping 16 | 17 | import logging 18 | n_users = 0 19 | n_items = 0 20 | n_entities = 0 21 | n_nodes = 0 22 | n_relations = 0 23 | 24 | 25 | def get_feed_dict(train_entity_pairs, start, end): 26 | train_entity_pairs = torch.LongTensor(np.array([[cf[0], cf[1], cf[2]] for cf in train_entity_pairs], np.int32)) 27 | feed_dict = {} 28 | entity_pairs = train_entity_pairs[start:end].to(device) 29 | feed_dict['users'] = entity_pairs[:, 0] 30 | feed_dict['items'] = entity_pairs[:, 1] 31 | feed_dict['labels'] = entity_pairs[:, 2] 32 | return feed_dict 33 | 34 | def get_feed_dict_topk(train_entity_pairs, start, end): 35 | train_entity_pairs = torch.LongTensor(np.array([[cf[0], cf[1], cf[2]] for cf in train_entity_pairs], np.int32)) 36 | feed_dict = {} 37 | entity_pairs = train_entity_pairs[start:end].to(device) 38 | feed_dict['users'] = entity_pairs[:, 0] 39 | feed_dict['items'] = entity_pairs[:, 1] 40 | feed_dict['labels'] = entity_pairs[:, 2] 41 | return feed_dict 42 | # def negative_sampling(user_item, train_user_set): 43 | # neg_items = [] 44 | # for user, _ in user_item.cpu().numpy(): 45 | # user = int(user) 46 | # while True: 47 | # neg_item = np.random.randint(low=0, high=n_items, size=1)[0] 48 | # if neg_item not in train_user_set[user]: 49 | # break 50 | # neg_items.append(neg_item) 51 | # return neg_items 52 | # 53 | # feed_dict = {} 54 | # entity_pairs = train_entity_pairs[start:end].to(device) 55 | # feed_dict['users'] = entity_pairs[:, 0] 56 | # feed_dict['pos_items'] = entity_pairs[:, 1] 57 | # feed_dict['neg_items'] = torch.LongTensor(negative_sampling(entity_pairs, 58 | # train_user_set)).to(device) 59 | 60 | def _show_recall_info(recall_zip): 61 | res = "" 62 | for i, j in recall_zip: 63 | res += "K@%d:%.4f "%(i,j) 64 | logging.info(res) 65 | 66 | def _get_topk_feed_data(user, items): 67 | res = list() 68 | for item in items: 69 | res.append([user, item, 0]) 70 | return np.array(res) 71 | 72 | def _get_user_record(data, is_train): 73 | user_history_dict = dict() 74 | for rating in data: 75 | user = rating[0] 76 | item = rating[1] 77 | label = rating[2] 78 | if is_train or label == 1: 79 | if user not in user_history_dict: 80 | user_history_dict[user] = set() 81 | user_history_dict[user].add(item) 82 | return user_history_dict 83 | 84 | def ctr_eval(model, data): 85 | auc_list = [] 86 | f1_list = [] 87 | model.eval() 88 | start = 0 89 | while start < data.shape[0]: 90 | 91 | batch = get_feed_dict(data, start, start + args.batch_size) 92 | labels = data[start:start + args.batch_size, 2] 93 | _, scores, _, _ = model(batch) 94 | scores = scores.detach().cpu().numpy() 95 | auc = roc_auc_score(y_true=labels, y_score=scores) 96 | predictions = [1 if i >= 0.5 else 0 for i in scores] 97 | f1 = f1_score(y_true=labels, y_pred=predictions) 98 | auc_list.append(auc) 99 | f1_list.append(f1) 100 | start += args.batch_size 101 | model.train() 102 | auc = float(np.mean(auc_list)) 103 | f1 = float(np.mean(f1_list)) 104 | return auc, f1 105 | 106 | def topk_eval(model, train_data, data): 107 | # logging.info('calculating recall ...') 108 | k_list = [5, 10, 20, 50, 100] 109 | recall_list = {k: [] for k in k_list} 110 | item_set = set(train_data[:, 1].tolist() + data[:, 1].tolist()) 111 | train_record = _get_user_record(train_data, True) 112 | test_record = _get_user_record(data, False) 113 | user_list = list(set(train_record.keys()) & set(test_record.keys())) 114 | user_num = 100 115 | if len(user_list) > user_num: 116 | np.random.seed() 117 | user_list = np.random.choice(user_list, size=user_num, replace=False) 118 | 119 | model.eval() 120 | for user in user_list: 121 | test_item_list = list(item_set-set(train_record[user])) 122 | item_score_map = dict() 123 | start = 0 124 | while start + args.batch_size <= len(test_item_list): 125 | items = test_item_list[start:start + args.batch_size] 126 | input_data = _get_topk_feed_data(user, items) 127 | batch = get_feed_dict_topk(input_data, start, start + args.batch_size) 128 | _, scores, _, _ = model(batch) 129 | for item, score in zip(items, scores): 130 | item_score_map[item] = score 131 | start += args.batch_size 132 | # padding the last incomplete mini-batch if exists 133 | if start < len(test_item_list): 134 | res_items = test_item_list[start:] + [test_item_list[-1]] * (args.batch_size - len(test_item_list) + start) 135 | input_data = _get_topk_feed_data(user, res_items) 136 | batch = get_feed_dict_topk(input_data, start, start + args.batch_size) 137 | _, scores, _, _ = model(batch) 138 | for item, score in zip(res_items, scores): 139 | item_score_map[item] = score 140 | item_score_pair_sorted = sorted(item_score_map.items(), key=lambda x: x[1], reverse=True) 141 | item_sorted = [i[0] for i in item_score_pair_sorted] 142 | for k in k_list: 143 | hit_num = len(set(item_sorted[:k]) & set(test_record[user])) 144 | recall_list[k].append(hit_num / len(set(test_record[user]))) 145 | model.train() 146 | 147 | recall = [np.mean(recall_list[k]) for k in k_list] 148 | return recall 149 | # _show_recall_info(zip(k_list, recall)) 150 | 151 | if __name__ == '__main__': 152 | """fix the random seed""" 153 | seed = 2020 154 | random.seed(seed) 155 | np.random.seed(seed) 156 | torch.manual_seed(seed) 157 | torch.cuda.manual_seed_all(seed) 158 | torch.backends.cudnn.deterministic = True 159 | torch.backends.cudnn.benchmark = False 160 | 161 | """read args""" 162 | global args, device 163 | args = parse_args() 164 | device = torch.device("cuda:"+str(args.gpu_id)) if args.cuda else torch.device("cpu") 165 | 166 | """build dataset""" 167 | train_cf, test_cf, user_dict, n_params, graph, mat_list = load_data(args) 168 | adj_mat_list, norm_mat_list, mean_mat_list = mat_list 169 | 170 | n_users = n_params['n_users'] 171 | n_items = n_params['n_items'] 172 | n_entities = n_params['n_entities'] 173 | n_relations = n_params['n_relations'] 174 | n_nodes = n_params['n_nodes'] 175 | 176 | """cf data""" 177 | # train_cf_pairs = torch.LongTensor(np.array([[cf[0], cf[1], cf[2]] for cf in train_cf], np.int32)) 178 | # eval_cf_pairs = torch.LongTensor(np.array([[cf[0], cf[1], cf[2]] for cf in eval_cf], np.int32)) 179 | # LongTensor 180 | # labels = torch.FloatTensor(np.array(cf[2] for cf in train_cf)) 181 | test_cf_pairs = torch.LongTensor(np.array([[cf[0], cf[1], cf[2]] for cf in test_cf], np.int32)) 182 | 183 | """define model""" 184 | model = Recommender(n_params, args, graph, mean_mat_list[0]).to(device) 185 | 186 | """define optimizer""" 187 | optimizer = torch.optim.Adam(model.parameters(), lr=args.lr) 188 | 189 | cur_best_pre_0 = 0 190 | stopping_step = 0 191 | should_stop = False 192 | 193 | print("start training ...") 194 | for epoch in range(args.epoch): 195 | """training CF""" 196 | # shuffle training data 197 | index = np.arange(len(train_cf)) 198 | np.random.shuffle(index) 199 | train_cf = train_cf[index] 200 | 201 | """training""" 202 | loss, s, cor_loss = 0, 0, 0 203 | train_s_t = time() 204 | while s + args.batch_size <= len(train_cf): 205 | batch = get_feed_dict(train_cf, s, s + args.batch_size) 206 | batch_loss, _, _, _ = model(batch) 207 | # batch_loss = batch_loss 208 | optimizer.zero_grad() 209 | batch_loss.backward() 210 | optimizer.step() 211 | 212 | loss += batch_loss 213 | # cor_loss += batch_cor 214 | s += args.batch_size 215 | 216 | train_e_t = time() 217 | # tsne_plot(model.all_embed, epoch) 218 | # if epoch % 10 == 9 or epoch == 1: 219 | if 1: 220 | """testing""" 221 | test_s_t = time() 222 | # ret = test(model, user_dict, n_params) 223 | test_auc, test_f1 = ctr_eval(model, test_cf_pairs) 224 | 225 | test_e_t = time() 226 | # ctr_info = 'epoch %.2d test auc: %.4f f1: %.4f' 227 | # logging.info(ctr_info, epoch, test_auc, test_f1) 228 | train_res = PrettyTable() 229 | # train_res.field_names = ["Epoch", "training time", "tesing time", "Loss", "recall", "ndcg", "precision", 230 | # "hit_ratio", "auc"] 231 | # train_res.add_row( 232 | # [epoch, train_e_t - train_s_t, test_e_t - test_s_t, loss.item(), ret['recall'], ret['ndcg'], 233 | # ret['precision'], ret['hit_ratio'], ret['auc']] 234 | # ) 235 | train_res.field_names = ["Epoch", "training time", "tesing time", "Loss", "test auc", "test f1"] 236 | train_res.add_row( 237 | [epoch, train_e_t - train_s_t, test_e_t - test_s_t, loss.item(), test_auc, test_f1] 238 | ) 239 | # train_res.field_names = ["Recall@5", "Recall@10", "Recall@20", "Recall@50", "Recall@100"] 240 | # train_res.add_row([recall[0], recall[1], recall[2], recall[3], recall[4]]) 241 | print(train_res) 242 | print('early stopping at %d, test_auc:%.4f' % (epoch-30, cur_best_pre_0)) 243 | -------------------------------------------------------------------------------- /modules/MCCLK.py: -------------------------------------------------------------------------------- 1 | 2 | import random 3 | import numpy as np 4 | import torch 5 | import torch.nn as nn 6 | import torch.nn.functional as F 7 | from torch_scatter import scatter_mean, scatter_softmax, scatter_sum 8 | 9 | class Aggregator(nn.Module): 10 | def __init__(self, n_users): 11 | super(Aggregator, self).__init__() 12 | self.n_users = n_users 13 | 14 | def forward(self, entity_emb, user_emb, 15 | edge_index, edge_type, interact_mat, 16 | weight): 17 | 18 | n_entities = entity_emb.shape[0] 19 | 20 | """KG aggregate""" 21 | head, tail = edge_index 22 | edge_relation_emb = weight[edge_type - 1] # exclude interact, remap [1, n_relations) to [0, n_relations-1) 23 | neigh_relation_emb = entity_emb[tail] * edge_relation_emb # [-1, channel] 24 | 25 | # ------------calculate attention weights --------------- 26 | neigh_relation_emb_weight = self.calculate_sim_hrt(entity_emb[head], entity_emb[tail], weight[edge_type - 1]) 27 | neigh_relation_emb_weight = neigh_relation_emb_weight.expand(neigh_relation_emb.shape[0], 28 | neigh_relation_emb.shape[1]) 29 | # neigh_relation_emb_tmp = torch.matmul(neigh_relation_emb_weight, neigh_relation_emb) 30 | neigh_relation_emb_weight = scatter_softmax(neigh_relation_emb_weight, index=head, dim=0) 31 | neigh_relation_emb = torch.mul(neigh_relation_emb_weight, neigh_relation_emb) 32 | entity_agg = scatter_sum(src=neigh_relation_emb, index=head, dim_size=n_entities, dim=0) 33 | 34 | user_agg = torch.sparse.mm(interact_mat, entity_emb) 35 | # user_agg = user_agg + user_emb * user_agg 36 | score = torch.mm(user_emb, weight.t()) 37 | score = torch.softmax(score, dim=-1) 38 | user_agg = user_agg + (torch.mm(score, weight)) * user_agg 39 | 40 | return entity_agg, user_agg 41 | 42 | def calculate_sim_hrt(self, entity_emb_head, entity_emb_tail, relation_emb): 43 | 44 | tail_relation_emb = entity_emb_tail * relation_emb 45 | tail_relation_emb = tail_relation_emb.norm(dim=1, p=2, keepdim=True) 46 | head_relation_emb = entity_emb_head * relation_emb 47 | head_relation_emb = head_relation_emb.norm(dim=1, p=2, keepdim=True) 48 | att_weights = torch.matmul(head_relation_emb.unsqueeze(dim=1), tail_relation_emb.unsqueeze(dim=2)).squeeze(dim=-1) 49 | att_weights = att_weights ** 2 50 | return att_weights 51 | 52 | class GraphConv(nn.Module): 53 | """ 54 | Graph Convolutional Network 55 | """ 56 | def __init__(self, channel, n_hops, n_users, 57 | n_relations, interact_mat, 58 | ind, node_dropout_rate=0.5, mess_dropout_rate=0.1): 59 | super(GraphConv, self).__init__() 60 | 61 | self.convs = nn.ModuleList() 62 | self.interact_mat = interact_mat 63 | self.n_relations = n_relations 64 | self.n_users = n_users 65 | self.node_dropout_rate = node_dropout_rate 66 | self.mess_dropout_rate = mess_dropout_rate 67 | self.ind = ind 68 | self.topk = 10 69 | self.lambda_coeff = 0.5 70 | self.temperature = 0.2 71 | self.device = torch.device("cuda:" + str(0)) 72 | initializer = nn.init.xavier_uniform_ 73 | weight = initializer(torch.empty(n_relations - 1, channel)) 74 | self.weight = nn.Parameter(weight) # [n_relations - 1, in_channel] 75 | 76 | for i in range(n_hops): 77 | self.convs.append(Aggregator(n_users=n_users)) 78 | 79 | self.dropout = nn.Dropout(p=mess_dropout_rate) # mess dropout 80 | 81 | def _edge_sampling(self, edge_index, edge_type, rate=0.5): 82 | # edge_index: [2, -1] 83 | # edge_type: [-1] 84 | n_edges = edge_index.shape[1] 85 | random_indices = np.random.choice(n_edges, size=int(n_edges * rate), replace=False) 86 | return edge_index[:, random_indices], edge_type[random_indices] 87 | 88 | def _sparse_dropout(self, x, rate=0.5): 89 | noise_shape = x._nnz() 90 | 91 | random_tensor = rate 92 | random_tensor += torch.rand(noise_shape).to(x.device) 93 | dropout_mask = torch.floor(random_tensor).type(torch.bool) 94 | i = x._indices() 95 | v = x._values() 96 | 97 | i = i[:, dropout_mask] 98 | v = v[dropout_mask] 99 | 100 | out = torch.sparse.FloatTensor(i, v, x.shape).to(x.device) 101 | return out * (1. / (1 - rate)) 102 | 103 | def _convert_sp_mat_to_sp_tensor(self, X): 104 | coo = X.tocoo() 105 | i = torch.LongTensor([coo.row, coo.col]) 106 | v = torch.from_numpy(coo.data).float() 107 | return torch.sparse.FloatTensor(i, v, coo.shape) 108 | 109 | def forward(self, user_emb, entity_emb, edge_index, edge_type, 110 | interact_mat, mess_dropout=True, node_dropout=False): 111 | 112 | """node dropout""" 113 | if node_dropout: 114 | edge_index, edge_type = self._edge_sampling(edge_index, edge_type, self.node_dropout_rate) 115 | interact_mat = self._sparse_dropout(interact_mat, self.node_dropout_rate) 116 | # ----------------build item-item graph------------------- 117 | origin_item_adj = self.build_adj(entity_emb, self.topk) 118 | 119 | entity_res_emb = entity_emb # [n_entity, channel] 120 | user_res_emb = user_emb # [n_users, channel] 121 | for i in range(len(self.convs)): 122 | entity_emb, user_emb = self.convs[i](entity_emb, user_emb, 123 | edge_index, edge_type, interact_mat, 124 | self.weight) 125 | """message dropout""" 126 | if mess_dropout: 127 | entity_emb = self.dropout(entity_emb) 128 | user_emb = self.dropout(user_emb) 129 | entity_emb = F.normalize(entity_emb) 130 | user_emb = F.normalize(user_emb) 131 | """result emb""" 132 | entity_res_emb = torch.add(entity_res_emb, entity_emb) 133 | user_res_emb = torch.add(user_res_emb, user_emb) 134 | 135 | # update item-item graph 136 | item_adj = (1 - self.lambda_coeff) * self.build_adj(entity_res_emb, 137 | self.topk) + self.lambda_coeff * origin_item_adj 138 | 139 | return entity_res_emb, user_res_emb, item_adj 140 | 141 | def build_adj(self, context, topk): 142 | # construct similarity adj matrix 143 | n_entity = context.shape[0] 144 | context_norm = context.div(torch.norm(context, p=2, dim=-1, keepdim=True)).cpu() 145 | sim = torch.mm(context_norm, context_norm.transpose(1, 0)) 146 | knn_val, knn_ind = torch.topk(sim, topk, dim=-1) 147 | # adj_matrix = (torch.zeros_like(sim)).scatter_(-1, knn_ind, knn_val) 148 | knn_val, knn_ind = knn_val.to(self.device), knn_ind.to(self.device) 149 | 150 | y = knn_ind.reshape(-1) 151 | x = torch.arange(0, n_entity).unsqueeze(dim=-1).to(self.device) 152 | x = x.expand(n_entity, topk).reshape(-1) 153 | indice = torch.cat((x.unsqueeze(dim=0), y.unsqueeze(dim=0)), dim=0) 154 | value = knn_val.reshape(-1) 155 | adj_sparsity = torch.sparse.FloatTensor(indice.data, value.data, torch.Size([n_entity, n_entity])).to(self.device) 156 | 157 | # normalized laplacian adj 158 | rowsum = torch.sparse.sum(adj_sparsity, dim=1) 159 | d_inv_sqrt = torch.pow(rowsum, -0.5) 160 | d_mat_inv_sqrt_value = d_inv_sqrt._values() 161 | x = torch.arange(0, n_entity).unsqueeze(dim=0).to(self.device) 162 | x = x.expand(2, n_entity) 163 | d_mat_inv_sqrt_indice = x 164 | d_mat_inv_sqrt = torch.sparse.FloatTensor(d_mat_inv_sqrt_indice, d_mat_inv_sqrt_value, torch.Size([n_entity, n_entity])) 165 | L_norm = torch.sparse.mm(torch.sparse.mm(d_mat_inv_sqrt, adj_sparsity), d_mat_inv_sqrt) 166 | return L_norm 167 | 168 | 169 | 170 | class Recommender(nn.Module): 171 | def __init__(self, data_config, args_config, graph, adj_mat): 172 | super(Recommender, self).__init__() 173 | 174 | self.n_users = data_config['n_users'] 175 | self.n_items = data_config['n_items'] 176 | self.n_relations = data_config['n_relations'] 177 | self.n_entities = data_config['n_entities'] # include items 178 | self.n_nodes = data_config['n_nodes'] # n_users + n_entities 179 | 180 | self.decay = args_config.l2 181 | self.sim_decay = args_config.sim_regularity 182 | self.emb_size = args_config.dim 183 | self.context_hops = args_config.context_hops 184 | self.node_dropout = args_config.node_dropout 185 | self.node_dropout_rate = args_config.node_dropout_rate 186 | self.mess_dropout = args_config.mess_dropout 187 | self.mess_dropout_rate = args_config.mess_dropout_rate 188 | self.ind = args_config.ind 189 | self.device = torch.device("cuda:" + str(args_config.gpu_id)) if args_config.cuda \ 190 | else torch.device("cpu") 191 | 192 | self.adj_mat = adj_mat 193 | self.graph = graph 194 | self.edge_index, self.edge_type = self._get_edges(graph) 195 | self._init_weight() 196 | self.all_embed = nn.Parameter(self.all_embed) 197 | self.gcn = self._init_model() 198 | self.lightgcn_layer = 2 199 | self.n_item_layer = 1 200 | self.alpha = 0.2 201 | self.fc1 = nn.Sequential( 202 | nn.Linear(self.emb_size, self.emb_size, bias=True), 203 | nn.ReLU(), 204 | nn.Linear(self.emb_size, self.emb_size, bias=True), 205 | ) 206 | self.fc2 = nn.Sequential( 207 | nn.Linear(self.emb_size, self.emb_size, bias=True), 208 | nn.ReLU(), 209 | nn.Linear(self.emb_size, self.emb_size, bias=True), 210 | ) 211 | self.fc3 = nn.Sequential( 212 | nn.Linear(self.emb_size, self.emb_size, bias=True), 213 | nn.ReLU(), 214 | nn.Linear(self.emb_size, self.emb_size, bias=True), 215 | ) 216 | 217 | def _init_weight(self): 218 | initializer = nn.init.xavier_uniform_ 219 | self.all_embed = initializer(torch.empty(self.n_nodes, self.emb_size)) 220 | self.interact_mat = self._convert_sp_mat_to_sp_tensor(self.adj_mat).to(self.device) 221 | 222 | def _init_model(self): 223 | return GraphConv(channel=self.emb_size, 224 | n_hops=self.context_hops, 225 | n_users=self.n_users, 226 | n_relations=self.n_relations, 227 | interact_mat=self.interact_mat, 228 | ind=self.ind, 229 | node_dropout_rate=self.node_dropout_rate, 230 | mess_dropout_rate=self.mess_dropout_rate) 231 | 232 | def _convert_sp_mat_to_sp_tensor(self, X): 233 | coo = X.tocoo() 234 | i = torch.LongTensor([coo.row, coo.col]) 235 | v = torch.from_numpy(coo.data).float() 236 | return torch.sparse.FloatTensor(i, v, coo.shape) 237 | 238 | def _get_indices(self, X): 239 | coo = X.tocoo() 240 | return torch.LongTensor([coo.row, coo.col]).t() # [-1, 2] 241 | 242 | def _get_edges(self, graph): 243 | graph_tensor = torch.tensor(list(graph.edges)) # [-1, 3] 244 | index = graph_tensor[:, :-1] # [-1, 2] 245 | type = graph_tensor[:, -1] # [-1, 1] 246 | return index.t().long().to(self.device), type.long().to(self.device) 247 | 248 | def forward( 249 | self, 250 | batch=None, 251 | ): 252 | user = batch['users'] 253 | item = batch['items'] 254 | labels = batch['labels'] 255 | user_emb = self.all_embed[:self.n_users, :] 256 | item_emb = self.all_embed[self.n_users:, :] 257 | entity_gcn_emb, user_gcn_emb, item_adj = self.gcn(user_emb, 258 | item_emb, 259 | self.edge_index, 260 | self.edge_type, 261 | self.interact_mat, 262 | mess_dropout=self.mess_dropout, 263 | node_dropout=self.node_dropout) 264 | u_e = user_gcn_emb[user] 265 | i_e = entity_gcn_emb[item] 266 | i_h = item_emb 267 | for i in range(self.n_item_layer): 268 | i_h = torch.sparse.mm(item_adj, i_h) 269 | i_h = F.normalize(i_h, p=2, dim=1) 270 | i_e_1 = i_h[item] 271 | 272 | interact_mat_new = self.interact_mat 273 | indice_old = interact_mat_new._indices() 274 | value_old = interact_mat_new._values() 275 | x = indice_old[0, :] 276 | y = indice_old[1, :] 277 | x_A = x 278 | y_A = y + self.n_users 279 | x_A_T = y + self.n_users 280 | y_A_T = x 281 | x_new = torch.cat((x_A, x_A_T), dim=-1) 282 | y_new = torch.cat((y_A, y_A_T), dim=-1) 283 | indice_new = torch.cat((x_new.unsqueeze(dim=0), y_new.unsqueeze(dim=0)), dim=0) 284 | value_new = torch.cat((value_old, value_old), dim=-1) 285 | interact_graph = torch.sparse.FloatTensor(indice_new, value_new, torch.Size([self.n_users + self.n_entities, self.n_users + self.n_entities])) 286 | user_lightgcn_emb, item_lightgcn_emb = self.light_gcn(user_emb, item_emb, interact_graph) 287 | u_e_2 = user_lightgcn_emb[user] 288 | i_e_2 = item_lightgcn_emb[item] 289 | 290 | # # loss_contrast = 0 291 | # loss_contrast = self.alpha * self.calculate_loss(i_e_1, i_e_2) 292 | # # i_e_1 = i_e_1 + i_e_2 293 | # loss_contrast = loss_contrast + ((1-self.alpha)/2)*self.calculate_loss_1(i_e_2, i_e) 294 | # loss_contrast = loss_contrast + ((1-self.alpha)/2)*self.calculate_loss_2(u_e_2, u_e) 295 | # 296 | # u_e = torch.cat((u_e, u_e), dim=-1) 297 | # i_e = torch.cat((i_e, i_e_1), dim=-1) 298 | # i_e_1 = i_e_1 + i_e_2 299 | item_1 = item_emb[item] 300 | user_1 = user_emb[user] 301 | loss_contrast = self.calculate_loss(i_e_1, i_e_2) 302 | loss_contrast = loss_contrast + self.calculate_loss_1(item_1, i_e_2) 303 | loss_contrast = loss_contrast + self.calculate_loss_2(user_1, u_e_2) 304 | 305 | u_e = torch.cat((u_e, u_e_2, u_e_2), dim=-1) 306 | i_e = torch.cat((i_e, i_e_1, i_e_2), dim=-1) 307 | 308 | return self.create_bpr_loss(u_e, i_e, labels, loss_contrast) 309 | 310 | def sim(self, z1: torch.Tensor, z2: torch.Tensor): 311 | z1 = F.normalize(z1) 312 | z2 = F.normalize(z2) 313 | return torch.mm(z1, z2.t()) 314 | 315 | def calculate_loss(self, A_embedding, B_embedding): 316 | # first calculate the sim rec 317 | tau = 0.6 # default = 0.8 318 | f = lambda x: torch.exp(x / tau) 319 | A_embedding = self.fc1(A_embedding) 320 | B_embedding = self.fc1(B_embedding) 321 | refl_sim = f(self.sim(A_embedding, A_embedding)) 322 | between_sim = f(self.sim(A_embedding, B_embedding)) 323 | 324 | loss_1 = -torch.log( 325 | between_sim.diag() 326 | / (refl_sim.sum(1) + between_sim.sum(1) - refl_sim.diag())) 327 | # refl_sim_1 = f(self.sim(B_embedding, B_embedding)) 328 | # between_sim_1 = f(self.sim(B_embedding, A_embedding)) 329 | # loss_2 = -torch.log( 330 | # between_sim_1.diag() 331 | # / (refl_sim_1.sum(1) + between_sim_1.sum(1) - refl_sim_1.diag())) 332 | # ret = (loss_1 + loss_2) * 0.5 333 | ret = loss_1 334 | ret = ret.mean() 335 | return ret 336 | 337 | def calculate_loss_1(self, A_embedding, B_embedding): 338 | # first calculate the sim rec 339 | tau = 0.6 # default = 0.8 340 | f = lambda x: torch.exp(x / tau) 341 | A_embedding = self.fc2(A_embedding) 342 | B_embedding = self.fc2(B_embedding) 343 | refl_sim = f(self.sim(A_embedding, A_embedding)) 344 | between_sim = f(self.sim(A_embedding, B_embedding)) 345 | 346 | loss_1 = -torch.log( 347 | between_sim.diag() 348 | / (refl_sim.sum(1) + between_sim.sum(1) - refl_sim.diag())) 349 | refl_sim_1 = f(self.sim(B_embedding, B_embedding)) 350 | between_sim_1 = f(self.sim(B_embedding, A_embedding)) 351 | loss_2 = -torch.log( 352 | between_sim_1.diag() 353 | / (refl_sim_1.sum(1) + between_sim_1.sum(1) - refl_sim_1.diag())) 354 | ret = (loss_1 + loss_2) * 0.5 355 | ret = ret.mean() 356 | return ret 357 | 358 | def calculate_loss_2(self, A_embedding, B_embedding): 359 | # first calculate the sim rec 360 | tau = 0.6 # default = 0.8 361 | f = lambda x: torch.exp(x / tau) 362 | A_embedding = self.fc3(A_embedding) 363 | B_embedding = self.fc3(B_embedding) 364 | refl_sim = f(self.sim(A_embedding, A_embedding)) 365 | between_sim = f(self.sim(A_embedding, B_embedding)) 366 | 367 | loss_1 = -torch.log( 368 | between_sim.diag() 369 | / (refl_sim.sum(1) + between_sim.sum(1) - refl_sim.diag())) 370 | refl_sim_1 = f(self.sim(B_embedding, B_embedding)) 371 | between_sim_1 = f(self.sim(B_embedding, A_embedding)) 372 | loss_2 = -torch.log( 373 | between_sim_1.diag() 374 | / (refl_sim_1.sum(1) + between_sim_1.sum(1) - refl_sim_1.diag())) 375 | ret = (loss_1 + loss_2) * 0.5 376 | ret = ret.mean() 377 | return ret 378 | 379 | def light_gcn(self, user_embedding, item_embedding, adj): 380 | ego_embeddings = torch.cat((user_embedding, item_embedding), dim=0) 381 | all_embeddings = [ego_embeddings] 382 | for i in range(self.lightgcn_layer): 383 | side_embeddings = torch.sparse.mm(adj, ego_embeddings) 384 | ego_embeddings = side_embeddings 385 | all_embeddings += [ego_embeddings] 386 | all_embeddings = torch.stack(all_embeddings, dim=1) 387 | all_embeddings = all_embeddings.mean(dim=1, keepdim=False) 388 | u_g_embeddings, i_g_embeddings = torch.split(all_embeddings, [self.n_users, self.n_entities], dim=0) 389 | return u_g_embeddings, i_g_embeddings 390 | 391 | def create_bpr_loss(self, users, items, labels, loss_contrast): 392 | batch_size = users.shape[0] 393 | scores = (items * users).sum(dim=1) 394 | scores = torch.sigmoid(scores) 395 | criteria = nn.BCELoss() 396 | bce_loss = criteria(scores, labels.float()) 397 | # cul regularizer 398 | regularizer = (torch.norm(users) ** 2 399 | + torch.norm(items) ** 2) / 2 400 | emb_loss = self.decay * regularizer / batch_size 401 | # cor_loss = self.sim_decay * cor 402 | return bce_loss + emb_loss + 0.001*loss_contrast, scores, bce_loss, emb_loss 403 | 404 | # def generate(self): 405 | # user_emb = self.all_embed[:self.n_users, :] 406 | # item_emb = self.all_embed[self.n_users:, :] 407 | # entity_gcn_emb, user_gcn_emb, item_adj = self.gcn(user_emb, 408 | # item_emb, 409 | # self.edge_index, 410 | # self.edge_type, 411 | # self.interact_mat, 412 | # mess_dropout=self.mess_dropout, 413 | # node_dropout=self.node_dropout) 414 | # 415 | # interact_mat_new = torch.sparse.mm(self.interact_mat, item_adj) 416 | # indice_old = interact_mat_new._indices() 417 | # value_old = interact_mat_new._values() 418 | # x = indice_old[0, :] 419 | # y = indice_old[1, :] 420 | # x_A = x 421 | # y_A = y + self.n_users 422 | # x_A_T = y + self.n_users 423 | # y_A_T = x 424 | # x_new = torch.cat((x_A, x_A_T), dim=-1) 425 | # y_new = torch.cat((y_A, y_A_T), dim=-1) 426 | # indice_new = torch.cat((x_new.unsqueeze(dim=0), y_new.unsqueeze(dim=0)), dim=0) 427 | # value_new = torch.cat((value_old, value_old), dim=-1) 428 | # interact_graph = torch.sparse.FloatTensor(indice_new, value_new, torch.Size( 429 | # [self.n_users + self.n_entities, self.n_users + self.n_entities])) 430 | # user_lightgcn_emb, item_lightgcn_emb = self.light_gcn(user_emb, item_emb, interact_graph) 431 | # u_e = torch.cat((user_gcn_emb, user_lightgcn_emb), dim=-1) 432 | # i_e = torch.cat((entity_gcn_emb, item_lightgcn_emb), dim=-1) 433 | # return i_e, u_e -------------------------------------------------------------------------------- /data/music/item_index2entity_id.txt: -------------------------------------------------------------------------------- 1 | 2 0 2 | 3 1 3 | 4 2 4 | 6 3 5 | 9 4 6 | 11 5 7 | 13 6 8 | 15 7 9 | 18 8 10 | 23 9 11 | 24 10 12 | 26 11 13 | 28 12 14 | 30 13 15 | 31 14 16 | 34 15 17 | 40 16 18 | 41 17 19 | 48 18 20 | 50 19 21 | 52 20 22 | 57 21 23 | 58 22 24 | 62 23 25 | 64 24 26 | 82 25 27 | 85 26 28 | 90 27 29 | 91 28 30 | 92 29 31 | 93 30 32 | 94 31 33 | 95 32 34 | 113 33 35 | 152 34 36 | 162 35 37 | 163 36 38 | 171 37 39 | 175 38 40 | 178 39 41 | 183 40 42 | 184 41 43 | 188 42 44 | 189 43 45 | 200 44 46 | 204 45 47 | 207 46 48 | 209 47 49 | 211 48 50 | 213 49 51 | 214 50 52 | 219 51 53 | 220 52 54 | 222 53 55 | 223 54 56 | 228 55 57 | 231 56 58 | 237 57 59 | 239 58 60 | 243 59 61 | 245 60 62 | 246 61 63 | 248 62 64 | 256 63 65 | 259 64 66 | 264 65 67 | 265 66 68 | 271 67 69 | 275 68 70 | 277 69 71 | 278 70 72 | 284 71 73 | 296 72 74 | 308 73 75 | 310 74 76 | 311 75 77 | 313 76 78 | 317 77 79 | 324 78 80 | 325 79 81 | 328 80 82 | 332 81 83 | 336 82 84 | 339 83 85 | 341 84 86 | 343 85 87 | 345 86 88 | 349 87 89 | 350 88 90 | 357 89 91 | 359 90 92 | 362 91 93 | 365 92 94 | 369 93 95 | 373 94 96 | 376 95 97 | 377 96 98 | 380 97 99 | 386 98 100 | 393 99 101 | 407 100 102 | 409 101 103 | 412 102 104 | 415 103 105 | 423 104 106 | 424 105 107 | 426 106 108 | 432 107 109 | 435 108 110 | 436 109 111 | 437 110 112 | 440 111 113 | 443 112 114 | 452 113 115 | 453 114 116 | 456 115 117 | 467 116 118 | 485 117 119 | 486 118 120 | 489 119 121 | 491 120 122 | 495 121 123 | 508 122 124 | 509 123 125 | 510 124 126 | 514 125 127 | 518 126 128 | 525 127 129 | 527 128 130 | 529 129 131 | 531 130 132 | 532 131 133 | 535 132 134 | 538 133 135 | 542 134 136 | 543 135 137 | 547 136 138 | 548 137 139 | 549 138 140 | 550 139 141 | 552 140 142 | 554 141 143 | 571 142 144 | 573 143 145 | 574 144 146 | 575 145 147 | 581 146 148 | 583 147 149 | 586 148 150 | 587 149 151 | 588 150 152 | 592 151 153 | 593 152 154 | 597 153 155 | 602 154 156 | 603 155 157 | 616 156 158 | 619 157 159 | 621 158 160 | 622 159 161 | 625 160 162 | 627 161 163 | 628 162 164 | 629 163 165 | 631 164 166 | 632 165 167 | 633 166 168 | 637 167 169 | 639 168 170 | 642 169 171 | 643 170 172 | 646 171 173 | 649 172 174 | 651 173 175 | 654 174 176 | 658 175 177 | 659 176 178 | 660 177 179 | 661 178 180 | 667 179 181 | 671 180 182 | 672 181 183 | 673 182 184 | 677 183 185 | 681 184 186 | 686 185 187 | 702 186 188 | 703 187 189 | 705 188 190 | 711 189 191 | 716 190 192 | 717 191 193 | 722 192 194 | 736 193 195 | 739 194 196 | 747 195 197 | 748 196 198 | 755 197 199 | 756 198 200 | 768 199 201 | 782 200 202 | 783 201 203 | 787 202 204 | 791 203 205 | 795 204 206 | 796 205 207 | 797 206 208 | 801 207 209 | 805 208 210 | 806 209 211 | 808 210 212 | 812 211 213 | 823 212 214 | 826 213 215 | 829 214 216 | 832 215 217 | 834 216 218 | 837 217 219 | 839 218 220 | 840 219 221 | 842 220 222 | 851 221 223 | 853 222 224 | 858 223 225 | 867 224 226 | 872 225 227 | 876 226 228 | 880 227 229 | 881 228 230 | 882 229 231 | 886 230 232 | 887 231 233 | 892 232 234 | 896 233 235 | 897 234 236 | 898 235 237 | 899 236 238 | 906 237 239 | 909 238 240 | 912 239 241 | 921 240 242 | 922 241 243 | 923 242 244 | 924 243 245 | 925 244 246 | 936 245 247 | 938 246 248 | 939 247 249 | 941 248 250 | 950 249 251 | 957 250 252 | 958 251 253 | 962 252 254 | 968 253 255 | 972 254 256 | 974 255 257 | 996 256 258 | 997 257 259 | 1002 258 260 | 1003 259 261 | 1006 260 262 | 1009 261 263 | 1016 262 264 | 1021 263 265 | 1023 264 266 | 1028 265 267 | 1036 266 268 | 1047 267 269 | 1053 268 270 | 1057 269 271 | 1059 270 272 | 1064 271 273 | 1065 272 274 | 1069 273 275 | 1071 274 276 | 1073 275 277 | 1081 276 278 | 1083 277 279 | 1092 278 280 | 1095 279 281 | 1100 280 282 | 1102 281 283 | 1105 282 284 | 1108 283 285 | 1111 284 286 | 1113 285 287 | 1118 286 288 | 1124 287 289 | 1127 288 290 | 1134 289 291 | 1136 290 292 | 1142 291 293 | 1147 292 294 | 1151 293 295 | 1153 294 296 | 1155 295 297 | 1161 296 298 | 1164 297 299 | 1172 298 300 | 1176 299 301 | 1177 300 302 | 1183 301 303 | 1185 302 304 | 1188 303 305 | 1194 304 306 | 1201 305 307 | 1202 306 308 | 1203 307 309 | 1205 308 310 | 1207 309 311 | 1208 310 312 | 1211 311 313 | 1215 312 314 | 1218 313 315 | 1222 314 316 | 1229 315 317 | 1233 316 318 | 1234 317 319 | 1237 318 320 | 1252 319 321 | 1253 320 322 | 1256 321 323 | 1265 322 324 | 1266 323 325 | 1271 324 326 | 1274 325 327 | 1282 326 328 | 1287 327 329 | 1289 328 330 | 1295 329 331 | 1296 330 332 | 1297 331 333 | 1298 332 334 | 1299 333 335 | 1300 334 336 | 1301 335 337 | 1302 336 338 | 1303 337 339 | 1305 338 340 | 1318 339 341 | 1319 340 342 | 1329 341 343 | 1331 342 344 | 1332 343 345 | 1334 344 346 | 1335 345 347 | 1337 346 348 | 1338 347 349 | 1345 348 350 | 1354 349 351 | 1355 350 352 | 1367 351 353 | 1380 352 354 | 1383 353 355 | 1384 354 356 | 1385 355 357 | 1394 356 358 | 1397 357 359 | 1401 358 360 | 1403 359 361 | 1408 360 362 | 1409 361 363 | 1411 362 364 | 1428 363 365 | 1429 364 366 | 1430 365 367 | 1433 366 368 | 1434 367 369 | 1436 368 370 | 1437 369 371 | 1439 370 372 | 1440 371 373 | 1441 372 374 | 1450 373 375 | 1452 374 376 | 1454 375 377 | 1455 376 378 | 1459 377 379 | 1463 378 380 | 1466 379 381 | 1467 380 382 | 1478 381 383 | 1486 382 384 | 1494 383 385 | 1499 384 386 | 1505 385 387 | 1507 386 388 | 1509 387 389 | 1510 388 390 | 1511 389 391 | 1514 390 392 | 1515 391 393 | 1520 392 394 | 1521 393 395 | 1526 394 396 | 1533 395 397 | 1543 396 398 | 1544 397 399 | 1551 398 400 | 1556 399 401 | 1560 400 402 | 1561 401 403 | 1565 402 404 | 1569 403 405 | 1576 404 406 | 1589 405 407 | 1590 406 408 | 1591 407 409 | 1593 408 410 | 1599 409 411 | 1603 410 412 | 1606 411 413 | 1608 412 414 | 1609 413 415 | 1615 414 416 | 1616 415 417 | 1617 416 418 | 1619 417 419 | 1620 418 420 | 1622 419 421 | 1623 420 422 | 1625 421 423 | 1627 422 424 | 1628 423 425 | 1634 424 426 | 1635 425 427 | 1636 426 428 | 1650 427 429 | 1653 428 430 | 1663 429 431 | 1667 430 432 | 1674 431 433 | 1677 432 434 | 1688 433 435 | 1690 434 436 | 1691 435 437 | 1693 436 438 | 1698 437 439 | 1700 438 440 | 1701 439 441 | 1702 440 442 | 1704 441 443 | 1705 442 444 | 1706 443 445 | 1712 444 446 | 1713 445 447 | 1714 446 448 | 1715 447 449 | 1716 448 450 | 1720 449 451 | 1726 450 452 | 1727 451 453 | 1728 452 454 | 1732 453 455 | 1733 454 456 | 1734 455 457 | 1735 456 458 | 1738 457 459 | 1742 458 460 | 1743 459 461 | 1745 460 462 | 1746 461 463 | 1747 462 464 | 1748 463 465 | 1750 464 466 | 1752 465 467 | 1756 466 468 | 1761 467 469 | 1762 468 470 | 1767 469 471 | 1774 470 472 | 1779 471 473 | 1781 472 474 | 1782 473 475 | 1787 474 476 | 1789 475 477 | 1796 476 478 | 1797 477 479 | 1800 478 480 | 1801 479 481 | 1809 480 482 | 1812 481 483 | 1818 482 484 | 1824 483 485 | 1826 484 486 | 1827 485 487 | 1835 486 488 | 1836 487 489 | 1838 488 490 | 1840 489 491 | 1841 490 492 | 1845 491 493 | 1848 492 494 | 1859 493 495 | 1863 494 496 | 1875 495 497 | 1879 496 498 | 1882 497 499 | 1886 498 500 | 1892 499 501 | 1897 500 502 | 1899 501 503 | 1900 502 504 | 1902 503 505 | 1927 504 506 | 1931 505 507 | 1932 506 508 | 1941 507 509 | 1943 508 510 | 1945 509 511 | 1946 510 512 | 1947 511 513 | 1950 512 514 | 1953 513 515 | 1955 514 516 | 1959 515 517 | 1964 516 518 | 1965 517 519 | 1966 518 520 | 1969 519 521 | 1972 520 522 | 1973 521 523 | 1975 522 524 | 1977 523 525 | 1978 524 526 | 1982 525 527 | 1984 526 528 | 1985 527 529 | 1988 528 530 | 1989 529 531 | 1991 530 532 | 1994 531 533 | 1997 532 534 | 2001 533 535 | 2002 534 536 | 2009 535 537 | 2015 536 538 | 2017 537 539 | 2020 538 540 | 2022 539 541 | 2024 540 542 | 2026 541 543 | 2031 542 544 | 2032 543 545 | 2035 544 546 | 2038 545 547 | 2042 546 548 | 2046 547 549 | 2047 548 550 | 2048 549 551 | 2053 550 552 | 2059 551 553 | 2064 552 554 | 2066 553 555 | 2068 554 556 | 2069 555 557 | 2070 556 558 | 2071 557 559 | 2075 558 560 | 2078 559 561 | 2080 560 562 | 2084 561 563 | 2087 562 564 | 2095 563 565 | 2100 564 566 | 2105 565 567 | 2106 566 568 | 2124 567 569 | 2127 568 570 | 2129 569 571 | 2132 570 572 | 2134 571 573 | 2137 572 574 | 2140 573 575 | 2149 574 576 | 2150 575 577 | 2153 576 578 | 2154 577 579 | 2155 578 580 | 2162 579 581 | 2168 580 582 | 2169 581 583 | 2174 582 584 | 2177 583 585 | 2181 584 586 | 2188 585 587 | 2189 586 588 | 2197 587 589 | 2211 588 590 | 2214 589 591 | 2222 590 592 | 2223 591 593 | 2229 592 594 | 2230 593 595 | 2232 594 596 | 2235 595 597 | 2239 596 598 | 2244 597 599 | 2245 598 600 | 2246 599 601 | 2249 600 602 | 2252 601 603 | 2253 602 604 | 2254 603 605 | 2255 604 606 | 2260 605 607 | 2262 606 608 | 2264 607 609 | 2265 608 610 | 2266 609 611 | 2267 610 612 | 2270 611 613 | 2272 612 614 | 2276 613 615 | 2280 614 616 | 2281 615 617 | 2285 616 618 | 2289 617 619 | 2290 618 620 | 2293 619 621 | 2296 620 622 | 2297 621 623 | 2298 622 624 | 2299 623 625 | 2300 624 626 | 2301 625 627 | 2302 626 628 | 2303 627 629 | 2306 628 630 | 2307 629 631 | 2308 630 632 | 2310 631 633 | 2312 632 634 | 2313 633 635 | 2316 634 636 | 2317 635 637 | 2318 636 638 | 2320 637 639 | 2321 638 640 | 2322 639 641 | 2323 640 642 | 2325 641 643 | 2327 642 644 | 2328 643 645 | 2331 644 646 | 2332 645 647 | 2333 646 648 | 2349 647 649 | 2354 648 650 | 2355 649 651 | 2358 650 652 | 2361 651 653 | 2364 652 654 | 2366 653 655 | 2369 654 656 | 2370 655 657 | 2376 656 658 | 2381 657 659 | 2382 658 660 | 2383 659 661 | 2391 660 662 | 2393 661 663 | 2394 662 664 | 2399 663 665 | 2401 664 666 | 2404 665 667 | 2409 666 668 | 2410 667 669 | 2417 668 670 | 2420 669 671 | 2424 670 672 | 2427 671 673 | 2445 672 674 | 2446 673 675 | 2454 674 676 | 2465 675 677 | 2470 676 678 | 2471 677 679 | 2472 678 680 | 2474 679 681 | 2477 680 682 | 2478 681 683 | 2479 682 684 | 2485 683 685 | 2486 684 686 | 2487 685 687 | 2488 686 688 | 2492 687 689 | 2495 688 690 | 2496 689 691 | 2504 690 692 | 2511 691 693 | 2515 692 694 | 2518 693 695 | 2526 694 696 | 2532 695 697 | 2537 696 698 | 2539 697 699 | 2543 698 700 | 2545 699 701 | 2549 700 702 | 2563 701 703 | 2564 702 704 | 2572 703 705 | 2573 704 706 | 2575 705 707 | 2577 706 708 | 2578 707 709 | 2582 708 710 | 2588 709 711 | 2589 710 712 | 2592 711 713 | 2612 712 714 | 2617 713 715 | 2619 714 716 | 2627 715 717 | 2628 716 718 | 2635 717 719 | 2636 718 720 | 2640 719 721 | 2644 720 722 | 2645 721 723 | 2652 722 724 | 2657 723 725 | 2658 724 726 | 2659 725 727 | 2664 726 728 | 2668 727 729 | 2674 728 730 | 2681 729 731 | 2682 730 732 | 2683 731 733 | 2691 732 734 | 2693 733 735 | 2714 734 736 | 2716 735 737 | 2724 736 738 | 2727 737 739 | 2738 738 740 | 2740 739 741 | 2746 740 742 | 2749 741 743 | 2751 742 744 | 2752 743 745 | 2754 744 746 | 2755 745 747 | 2756 746 748 | 2757 747 749 | 2759 748 750 | 2761 749 751 | 2768 750 752 | 2769 751 753 | 2771 752 754 | 2779 753 755 | 2785 754 756 | 2789 755 757 | 2794 756 758 | 2795 757 759 | 2799 758 760 | 2803 759 761 | 2805 760 762 | 2806 761 763 | 2809 762 764 | 2811 763 765 | 2815 764 766 | 2821 765 767 | 2823 766 768 | 2824 767 769 | 2825 768 770 | 2828 769 771 | 2829 770 772 | 2835 771 773 | 2839 772 774 | 2841 773 775 | 2842 774 776 | 2848 775 777 | 2850 776 778 | 2852 777 779 | 2862 778 780 | 2865 779 781 | 2866 780 782 | 2872 781 783 | 2873 782 784 | 2876 783 785 | 2878 784 786 | 2894 785 787 | 2897 786 788 | 2900 787 789 | 2901 788 790 | 2903 789 791 | 2906 790 792 | 2909 791 793 | 2911 792 794 | 2914 793 795 | 2915 794 796 | 2921 795 797 | 2927 796 798 | 2929 797 799 | 2934 798 800 | 2938 799 801 | 2944 800 802 | 2951 801 803 | 2959 802 804 | 2966 803 805 | 2967 804 806 | 2973 805 807 | 2975 806 808 | 2976 807 809 | 2978 808 810 | 2980 809 811 | 2995 810 812 | 2996 811 813 | 2997 812 814 | 2999 813 815 | 3004 814 816 | 3009 815 817 | 3017 816 818 | 3020 817 819 | 3021 818 820 | 3022 819 821 | 3024 820 822 | 3026 821 823 | 3027 822 824 | 3031 823 825 | 3034 824 826 | 3037 825 827 | 3041 826 828 | 3043 827 829 | 3059 828 830 | 3069 829 831 | 3074 830 832 | 3076 831 833 | 3081 832 834 | 3082 833 835 | 3083 834 836 | 3087 835 837 | 3093 836 838 | 3099 837 839 | 3108 838 840 | 3114 839 841 | 3122 840 842 | 3124 841 843 | 3126 842 844 | 3131 843 845 | 3132 844 846 | 3134 845 847 | 3147 846 848 | 3150 847 849 | 3153 848 850 | 3155 849 851 | 3156 850 852 | 3157 851 853 | 3158 852 854 | 3163 853 855 | 3164 854 856 | 3165 855 857 | 3169 856 858 | 3184 857 859 | 3186 858 860 | 3187 859 861 | 3196 860 862 | 3198 861 863 | 3199 862 864 | 3200 863 865 | 3203 864 866 | 3208 865 867 | 3222 866 868 | 3230 867 869 | 3231 868 870 | 3234 869 871 | 3236 870 872 | 3239 871 873 | 3241 872 874 | 3242 873 875 | 3245 874 876 | 3248 875 877 | 3250 876 878 | 3257 877 879 | 3258 878 880 | 3264 879 881 | 3270 880 882 | 3271 881 883 | 3279 882 884 | 3280 883 885 | 3282 884 886 | 3283 885 887 | 3285 886 888 | 3289 887 889 | 3294 888 890 | 3295 889 891 | 3298 890 892 | 3300 891 893 | 3305 892 894 | 3307 893 895 | 3309 894 896 | 3310 895 897 | 3312 896 898 | 3313 897 899 | 3315 898 900 | 3316 899 901 | 3321 900 902 | 3327 901 903 | 3328 902 904 | 3334 903 905 | 3335 904 906 | 3339 905 907 | 3341 906 908 | 3344 907 909 | 3346 908 910 | 3347 909 911 | 3349 910 912 | 3351 911 913 | 3355 912 914 | 3359 913 915 | 3365 914 916 | 3367 915 917 | 3375 916 918 | 3376 917 919 | 3391 918 920 | 3397 919 921 | 3398 920 922 | 3402 921 923 | 3403 922 924 | 3404 923 925 | 3407 924 926 | 3410 925 927 | 3414 926 928 | 3415 927 929 | 3417 928 930 | 3420 929 931 | 3421 930 932 | 3429 931 933 | 3430 932 934 | 3431 933 935 | 3432 934 936 | 3433 935 937 | 3434 936 938 | 3435 937 939 | 3437 938 940 | 3440 939 941 | 3442 940 942 | 3444 941 943 | 3450 942 944 | 3452 943 945 | 3455 944 946 | 3456 945 947 | 3467 946 948 | 3472 947 949 | 3473 948 950 | 3478 949 951 | 3484 950 952 | 3489 951 953 | 3493 952 954 | 3494 953 955 | 3496 954 956 | 3499 955 957 | 3501 956 958 | 3502 957 959 | 3504 958 960 | 3505 959 961 | 3513 960 962 | 3528 961 963 | 3529 962 964 | 3530 963 965 | 3531 964 966 | 3532 965 967 | 3534 966 968 | 3541 967 969 | 3542 968 970 | 3543 969 971 | 3545 970 972 | 3551 971 973 | 3556 972 974 | 3558 973 975 | 3560 974 976 | 3562 975 977 | 3568 976 978 | 3569 977 979 | 3574 978 980 | 3583 979 981 | 3587 980 982 | 3591 981 983 | 3594 982 984 | 3595 983 985 | 3599 984 986 | 3604 985 987 | 3607 986 988 | 3609 987 989 | 3611 988 990 | 3621 989 991 | 3622 990 992 | 3624 991 993 | 3640 992 994 | 3645 993 995 | 3650 994 996 | 3657 995 997 | 3660 996 998 | 3668 997 999 | 3670 998 1000 | 3671 999 1001 | 3678 1000 1002 | 3683 1001 1003 | 3684 1002 1004 | 3689 1003 1005 | 3690 1004 1006 | 3695 1005 1007 | 3697 1006 1008 | 3698 1007 1009 | 3701 1008 1010 | 3702 1009 1011 | 3703 1010 1012 | 3708 1011 1013 | 3718 1012 1014 | 3726 1013 1015 | 3736 1014 1016 | 3737 1015 1017 | 3743 1016 1018 | 3751 1017 1019 | 3756 1018 1020 | 3759 1019 1021 | 3769 1020 1022 | 3775 1021 1023 | 3780 1022 1024 | 3791 1023 1025 | 3793 1024 1026 | 3794 1025 1027 | 3796 1026 1028 | 3799 1027 1029 | 3803 1028 1030 | 3806 1029 1031 | 3812 1030 1032 | 3817 1031 1033 | 3821 1032 1034 | 3822 1033 1035 | 3823 1034 1036 | 3855 1035 1037 | 3863 1036 1038 | 3865 1037 1039 | 3867 1038 1040 | 3870 1039 1041 | 3873 1040 1042 | 3876 1041 1043 | 3877 1042 1044 | 3881 1043 1045 | 3882 1044 1046 | 3885 1045 1047 | 3886 1046 1048 | 3890 1047 1049 | 3898 1048 1050 | 3900 1049 1051 | 3904 1050 1052 | 3905 1051 1053 | 3906 1052 1054 | 3907 1053 1055 | 3908 1054 1056 | 3912 1055 1057 | 3915 1056 1058 | 3920 1057 1059 | 3922 1058 1060 | 3923 1059 1061 | 3925 1060 1062 | 3931 1061 1063 | 3936 1062 1064 | 3944 1063 1065 | 3947 1064 1066 | 3949 1065 1067 | 3958 1066 1068 | 3965 1067 1069 | 3973 1068 1070 | 3978 1069 1071 | 3980 1070 1072 | 3983 1071 1073 | 3986 1072 1074 | 3991 1073 1075 | 3994 1074 1076 | 3997 1075 1077 | 4004 1076 1078 | 4021 1077 1079 | 4022 1078 1080 | 4027 1079 1081 | 4028 1080 1082 | 4032 1081 1083 | 4036 1082 1084 | 4038 1083 1085 | 4043 1084 1086 | 4044 1085 1087 | 4048 1086 1088 | 4054 1087 1089 | 4058 1088 1090 | 4062 1089 1091 | 4066 1090 1092 | 4069 1091 1093 | 4074 1092 1094 | 4075 1093 1095 | 4081 1094 1096 | 4089 1095 1097 | 4098 1096 1098 | 4101 1097 1099 | 4103 1098 1100 | 4106 1099 1101 | 4108 1100 1102 | 4109 1101 1103 | 4111 1102 1104 | 4115 1103 1105 | 4116 1104 1106 | 4123 1105 1107 | 4127 1106 1108 | 4132 1107 1109 | 4133 1108 1110 | 4138 1109 1111 | 4147 1110 1112 | 4150 1111 1113 | 4151 1112 1114 | 4152 1113 1115 | 4153 1114 1116 | 4156 1115 1117 | 4158 1116 1118 | 4161 1117 1119 | 4162 1118 1120 | 4168 1119 1121 | 4169 1120 1122 | 4176 1121 1123 | 4177 1122 1124 | 4178 1123 1125 | 4187 1124 1126 | 4190 1125 1127 | 4196 1126 1128 | 4197 1127 1129 | 4199 1128 1130 | 4200 1129 1131 | 4201 1130 1132 | 4208 1131 1133 | 4212 1132 1134 | 4216 1133 1135 | 4220 1134 1136 | 4224 1135 1137 | 4230 1136 1138 | 4240 1137 1139 | 4245 1138 1140 | 4247 1139 1141 | 4251 1140 1142 | 4252 1141 1143 | 4253 1142 1144 | 4254 1143 1145 | 4255 1144 1146 | 4258 1145 1147 | 4266 1146 1148 | 4269 1147 1149 | 4273 1148 1150 | 4279 1149 1151 | 4283 1150 1152 | 4284 1151 1153 | 4288 1152 1154 | 4291 1153 1155 | 4294 1154 1156 | 4300 1155 1157 | 4309 1156 1158 | 4314 1157 1159 | 4321 1158 1160 | 4325 1159 1161 | 4328 1160 1162 | 4331 1161 1163 | 4332 1162 1164 | 4333 1163 1165 | 4334 1164 1166 | 4335 1165 1167 | 4341 1166 1168 | 4343 1167 1169 | 4354 1168 1170 | 4357 1169 1171 | 4358 1170 1172 | 4371 1171 1173 | 4375 1172 1174 | 4377 1173 1175 | 4384 1174 1176 | 4387 1175 1177 | 4388 1176 1178 | 4391 1177 1179 | 4394 1178 1180 | 4404 1179 1181 | 4434 1180 1182 | 4447 1181 1183 | 4451 1182 1184 | 4452 1183 1185 | 4457 1184 1186 | 4463 1185 1187 | 4474 1186 1188 | 4481 1187 1189 | 4488 1188 1190 | 4500 1189 1191 | 4515 1190 1192 | 4524 1191 1193 | 4529 1192 1194 | 4536 1193 1195 | 4540 1194 1196 | 4551 1195 1197 | 4552 1196 1198 | 4553 1197 1199 | 4557 1198 1200 | 4558 1199 1201 | 4561 1200 1202 | 4563 1201 1203 | 4566 1202 1204 | 4567 1203 1205 | 4569 1204 1206 | 4571 1205 1207 | 4573 1206 1208 | 4580 1207 1209 | 4581 1208 1210 | 4590 1209 1211 | 4591 1210 1212 | 4596 1211 1213 | 4603 1212 1214 | 4604 1213 1215 | 4605 1214 1216 | 4606 1215 1217 | 4611 1216 1218 | 4613 1217 1219 | 4618 1218 1220 | 4619 1219 1221 | 4649 1220 1222 | 4661 1221 1223 | 4662 1222 1224 | 4666 1223 1225 | 4670 1224 1226 | 4690 1225 1227 | 4692 1226 1228 | 4695 1227 1229 | 4696 1228 1230 | 4701 1229 1231 | 4708 1230 1232 | 4709 1231 1233 | 4713 1232 1234 | 4719 1233 1235 | 4725 1234 1236 | 4732 1235 1237 | 4734 1236 1238 | 4735 1237 1239 | 4737 1238 1240 | 4746 1239 1241 | 4750 1240 1242 | 4752 1241 1243 | 4755 1242 1244 | 4761 1243 1245 | 4767 1244 1246 | 4771 1245 1247 | 4775 1246 1248 | 4778 1247 1249 | 4779 1248 1250 | 4782 1249 1251 | 4788 1250 1252 | 4789 1251 1253 | 4792 1252 1254 | 4793 1253 1255 | 4800 1254 1256 | 4802 1255 1257 | 4805 1256 1258 | 4810 1257 1259 | 4814 1258 1260 | 4818 1259 1261 | 4819 1260 1262 | 4822 1261 1263 | 4833 1262 1264 | 4849 1263 1265 | 4854 1264 1266 | 4855 1265 1267 | 4866 1266 1268 | 4871 1267 1269 | 4872 1268 1270 | 4873 1269 1271 | 4874 1270 1272 | 4875 1271 1273 | 4876 1272 1274 | 4877 1273 1275 | 4878 1274 1276 | 4912 1275 1277 | 4917 1276 1278 | 4920 1277 1279 | 4923 1278 1280 | 4924 1279 1281 | 4932 1280 1282 | 4939 1281 1283 | 4946 1282 1284 | 4947 1283 1285 | 4954 1284 1286 | 4955 1285 1287 | 4960 1286 1288 | 4963 1287 1289 | 4965 1288 1290 | 4970 1289 1291 | 4971 1290 1292 | 4972 1291 1293 | 4973 1292 1294 | 4975 1293 1295 | 4978 1294 1296 | 4984 1295 1297 | 4991 1296 1298 | 4995 1297 1299 | 4998 1298 1300 | 5005 1299 1301 | 5006 1300 1302 | 5013 1301 1303 | 5014 1302 1304 | 5020 1303 1305 | 5028 1304 1306 | 5029 1305 1307 | 5039 1306 1308 | 5040 1307 1309 | 5045 1308 1310 | 5069 1309 1311 | 5072 1310 1312 | 5073 1311 1313 | 5074 1312 1314 | 5075 1313 1315 | 5077 1314 1316 | 5079 1315 1317 | 5080 1316 1318 | 5085 1317 1319 | 5098 1318 1320 | 5111 1319 1321 | 5112 1320 1322 | 5115 1321 1323 | 5119 1322 1324 | 5124 1323 1325 | 5149 1324 1326 | 5150 1325 1327 | 5152 1326 1328 | 5153 1327 1329 | 5154 1328 1330 | 5158 1329 1331 | 5160 1330 1332 | 5163 1331 1333 | 5166 1332 1334 | 5173 1333 1335 | 5174 1334 1336 | 5177 1335 1337 | 5178 1336 1338 | 5186 1337 1339 | 5197 1338 1340 | 5198 1339 1341 | 5209 1340 1342 | 5215 1341 1343 | 5216 1342 1344 | 5229 1343 1345 | 5232 1344 1346 | 5235 1345 1347 | 5236 1346 1348 | 5237 1347 1349 | 5238 1348 1350 | 5241 1349 1351 | 5244 1350 1352 | 5250 1351 1353 | 5256 1352 1354 | 5258 1353 1355 | 5262 1354 1356 | 5271 1355 1357 | 5272 1356 1358 | 5274 1357 1359 | 5275 1358 1360 | 5278 1359 1361 | 5279 1360 1362 | 5280 1361 1363 | 5297 1362 1364 | 5304 1363 1365 | 5307 1364 1366 | 5315 1365 1367 | 5316 1366 1368 | 5326 1367 1369 | 5332 1368 1370 | 5338 1369 1371 | 5343 1370 1372 | 5352 1371 1373 | 5353 1372 1374 | 5356 1373 1375 | 5358 1374 1376 | 5359 1375 1377 | 5370 1376 1378 | 5373 1377 1379 | 5393 1378 1380 | 5395 1379 1381 | 5397 1380 1382 | 5398 1381 1383 | 5400 1382 1384 | 5409 1383 1385 | 5411 1384 1386 | 5412 1385 1387 | 5417 1386 1388 | 5418 1387 1389 | 5421 1388 1390 | 5423 1389 1391 | 5424 1390 1392 | 5426 1391 1393 | 5427 1392 1394 | 5428 1393 1395 | 5435 1394 1396 | 5437 1395 1397 | 5439 1396 1398 | 5441 1397 1399 | 5442 1398 1400 | 5443 1399 1401 | 5448 1400 1402 | 5453 1401 1403 | 5456 1402 1404 | 5463 1403 1405 | 5474 1404 1406 | 5476 1405 1407 | 5480 1406 1408 | 5481 1407 1409 | 5487 1408 1410 | 5529 1409 1411 | 5530 1410 1412 | 5532 1411 1413 | 5533 1412 1414 | 5535 1413 1415 | 5537 1414 1416 | 5542 1415 1417 | 5549 1416 1418 | 5553 1417 1419 | 5555 1418 1420 | 5565 1419 1421 | 5571 1420 1422 | 5575 1421 1423 | 5581 1422 1424 | 5590 1423 1425 | 5594 1424 1426 | 5603 1425 1427 | 5604 1426 1428 | 5610 1427 1429 | 5613 1428 1430 | 5614 1429 1431 | 5616 1430 1432 | 5618 1431 1433 | 5619 1432 1434 | 5620 1433 1435 | 5623 1434 1436 | 5624 1435 1437 | 5629 1436 1438 | 5636 1437 1439 | 5638 1438 1440 | 5649 1439 1441 | 5650 1440 1442 | 5651 1441 1443 | 5653 1442 1444 | 5657 1443 1445 | 5658 1444 1446 | 5666 1445 1447 | 5667 1446 1448 | 5671 1447 1449 | 5678 1448 1450 | 5679 1449 1451 | 5681 1450 1452 | 5682 1451 1453 | 5683 1452 1454 | 5692 1453 1455 | 5695 1454 1456 | 5709 1455 1457 | 5710 1456 1458 | 5729 1457 1459 | 5733 1458 1460 | 5736 1459 1461 | 5742 1460 1462 | 5745 1461 1463 | 5750 1462 1464 | 5752 1463 1465 | 5769 1464 1466 | 5778 1465 1467 | 5780 1466 1468 | 5782 1467 1469 | 5786 1468 1470 | 5789 1469 1471 | 5792 1470 1472 | 5807 1471 1473 | 5809 1472 1474 | 5814 1473 1475 | 5828 1474 1476 | 5829 1475 1477 | 5833 1476 1478 | 5845 1477 1479 | 5846 1478 1480 | 5850 1479 1481 | 5851 1480 1482 | 5854 1481 1483 | 5867 1482 1484 | 5868 1483 1485 | 5876 1484 1486 | 5882 1485 1487 | 5886 1486 1488 | 5891 1487 1489 | 5892 1488 1490 | 5898 1489 1491 | 5899 1490 1492 | 5901 1491 1493 | 5906 1492 1494 | 5913 1493 1495 | 5914 1494 1496 | 5917 1495 1497 | 5927 1496 1498 | 5936 1497 1499 | 5940 1498 1500 | 5949 1499 1501 | 5954 1500 1502 | 5961 1501 1503 | 5966 1502 1504 | 5969 1503 1505 | 5977 1504 1506 | 5980 1505 1507 | 5995 1506 1508 | 6010 1507 1509 | 6030 1508 1510 | 6035 1509 1511 | 6039 1510 1512 | 6040 1511 1513 | 6043 1512 1514 | 6046 1513 1515 | 6048 1514 1516 | 6051 1515 1517 | 6052 1516 1518 | 6053 1517 1519 | 6056 1518 1520 | 6059 1519 1521 | 6071 1520 1522 | 6074 1521 1523 | 6081 1522 1524 | 6120 1523 1525 | 6123 1524 1526 | 6124 1525 1527 | 6137 1526 1528 | 6138 1527 1529 | 6151 1528 1530 | 6158 1529 1531 | 6160 1530 1532 | 6168 1531 1533 | 6170 1532 1534 | 6172 1533 1535 | 6180 1534 1536 | 6186 1535 1537 | 6187 1536 1538 | 6190 1537 1539 | 6194 1538 1540 | 6198 1539 1541 | 6200 1540 1542 | 6202 1541 1543 | 6203 1542 1544 | 6205 1543 1545 | 6211 1544 1546 | 6217 1545 1547 | 6221 1546 1548 | 6225 1547 1549 | 6230 1548 1550 | 6232 1549 1551 | 6233 1550 1552 | 6252 1551 1553 | 6271 1552 1554 | 6275 1553 1555 | 6289 1554 1556 | 6296 1555 1557 | 6298 1556 1558 | 6311 1557 1559 | 6319 1558 1560 | 6321 1559 1561 | 6322 1560 1562 | 6324 1561 1563 | 6325 1562 1564 | 6327 1563 1565 | 6328 1564 1566 | 6329 1565 1567 | 6330 1566 1568 | 6332 1567 1569 | 6334 1568 1570 | 6335 1569 1571 | 6346 1570 1572 | 6350 1571 1573 | 6357 1572 1574 | 6358 1573 1575 | 6365 1574 1576 | 6367 1575 1577 | 6378 1576 1578 | 6379 1577 1579 | 6381 1578 1580 | 6384 1579 1581 | 6390 1580 1582 | 6392 1581 1583 | 6393 1582 1584 | 6397 1583 1585 | 6398 1584 1586 | 6404 1585 1587 | 6409 1586 1588 | 6422 1587 1589 | 6432 1588 1590 | 6453 1589 1591 | 6454 1590 1592 | 6455 1591 1593 | 6458 1592 1594 | 6461 1593 1595 | 6463 1594 1596 | 6466 1595 1597 | 6479 1596 1598 | 6487 1597 1599 | 6500 1598 1600 | 6501 1599 1601 | 6503 1600 1602 | 6522 1601 1603 | 6530 1602 1604 | 6531 1603 1605 | 6542 1604 1606 | 6544 1605 1607 | 6546 1606 1608 | 6547 1607 1609 | 6548 1608 1610 | 6551 1609 1611 | 6553 1610 1612 | 6559 1611 1613 | 6562 1612 1614 | 6566 1613 1615 | 6568 1614 1616 | 6570 1615 1617 | 6571 1616 1618 | 6581 1617 1619 | 6583 1618 1620 | 6586 1619 1621 | 6587 1620 1622 | 6608 1621 1623 | 6611 1622 1624 | 6619 1623 1625 | 6621 1624 1626 | 6622 1625 1627 | 6623 1626 1628 | 6627 1627 1629 | 6629 1628 1630 | 6630 1629 1631 | 6633 1630 1632 | 6638 1631 1633 | 6642 1632 1634 | 6645 1633 1635 | 6648 1634 1636 | 6649 1635 1637 | 6650 1636 1638 | 6651 1637 1639 | 6652 1638 1640 | 6654 1639 1641 | 6655 1640 1642 | 6658 1641 1643 | 6660 1642 1644 | 6661 1643 1645 | 6663 1644 1646 | 6666 1645 1647 | 6667 1646 1648 | 6675 1647 1649 | 6679 1648 1650 | 6683 1649 1651 | 6687 1650 1652 | 6693 1651 1653 | 6695 1652 1654 | 6696 1653 1655 | 6697 1654 1656 | 6699 1655 1657 | 6704 1656 1658 | 6707 1657 1659 | 6711 1658 1660 | 6713 1659 1661 | 6714 1660 1662 | 6717 1661 1663 | 6724 1662 1664 | 6729 1663 1665 | 6730 1664 1666 | 6734 1665 1667 | 6735 1666 1668 | 6740 1667 1669 | 6743 1668 1670 | 6747 1669 1671 | 6752 1670 1672 | 6758 1671 1673 | 6764 1672 1674 | 6765 1673 1675 | 6768 1674 1676 | 6774 1675 1677 | 6779 1676 1678 | 6780 1677 1679 | 6785 1678 1680 | 6787 1679 1681 | 6788 1680 1682 | 6797 1681 1683 | 6798 1682 1684 | 6803 1683 1685 | 6805 1684 1686 | 6813 1685 1687 | 6815 1686 1688 | 6821 1687 1689 | 6827 1688 1690 | 6828 1689 1691 | 6837 1690 1692 | 6847 1691 1693 | 6851 1692 1694 | 6853 1693 1695 | 6859 1694 1696 | 6861 1695 1697 | 6862 1696 1698 | 6865 1697 1699 | 6873 1698 1700 | 6875 1699 1701 | 6876 1700 1702 | 6879 1701 1703 | 6886 1702 1704 | 6891 1703 1705 | 6893 1704 1706 | 6894 1705 1707 | 6895 1706 1708 | 6897 1707 1709 | 6898 1708 1710 | 6899 1709 1711 | 6902 1710 1712 | 6903 1711 1713 | 6910 1712 1714 | 6917 1713 1715 | 6928 1714 1716 | 6931 1715 1717 | 6933 1716 1718 | 6946 1717 1719 | 6956 1718 1720 | 6958 1719 1721 | 6960 1720 1722 | 6961 1721 1723 | 6983 1722 1724 | 6990 1723 1725 | 6991 1724 1726 | 7007 1725 1727 | 7009 1726 1728 | 7011 1727 1729 | 7021 1728 1730 | 7025 1729 1731 | 7038 1730 1732 | 7046 1731 1733 | 7050 1732 1734 | 7053 1733 1735 | 7057 1734 1736 | 7058 1735 1737 | 7061 1736 1738 | 7062 1737 1739 | 7074 1738 1740 | 7078 1739 1741 | 7080 1740 1742 | 7085 1741 1743 | 7091 1742 1744 | 7099 1743 1745 | 7102 1744 1746 | 7103 1745 1747 | 7104 1746 1748 | 7109 1747 1749 | 7117 1748 1750 | 7118 1749 1751 | 7130 1750 1752 | 7136 1751 1753 | 7140 1752 1754 | 7153 1753 1755 | 7157 1754 1756 | 7178 1755 1757 | 7183 1756 1758 | 7184 1757 1759 | 7186 1758 1760 | 7189 1759 1761 | 7191 1760 1762 | 7201 1761 1763 | 7205 1762 1764 | 7207 1763 1765 | 7211 1764 1766 | 7215 1765 1767 | 7221 1766 1768 | 7227 1767 1769 | 7228 1768 1770 | 7229 1769 1771 | 7231 1770 1772 | 7232 1771 1773 | 7233 1772 1774 | 7235 1773 1775 | 7242 1774 1776 | 7252 1775 1777 | 7259 1776 1778 | 7260 1777 1779 | 7263 1778 1780 | 7273 1779 1781 | 7275 1780 1782 | 7276 1781 1783 | 7278 1782 1784 | 7279 1783 1785 | 7280 1784 1786 | 7284 1785 1787 | 7286 1786 1788 | 7289 1787 1789 | 7290 1788 1790 | 7301 1789 1791 | 7315 1790 1792 | 7316 1791 1793 | 7318 1792 1794 | 7320 1793 1795 | 7343 1794 1796 | 7344 1795 1797 | 7345 1796 1798 | 7351 1797 1799 | 7352 1798 1800 | 7356 1799 1801 | 7357 1800 1802 | 7374 1801 1803 | 7376 1802 1804 | 7379 1803 1805 | 7382 1804 1806 | 7387 1805 1807 | 7397 1806 1808 | 7404 1807 1809 | 7408 1808 1810 | 7415 1809 1811 | 7421 1810 1812 | 7422 1811 1813 | 7426 1812 1814 | 7432 1813 1815 | 7434 1814 1816 | 7435 1815 1817 | 7438 1816 1818 | 7440 1817 1819 | 7445 1818 1820 | 7447 1819 1821 | 7448 1820 1822 | 7452 1821 1823 | 7472 1822 1824 | 7492 1823 1825 | 7501 1824 1826 | 7502 1825 1827 | 7507 1826 1828 | 7512 1827 1829 | 7514 1828 1830 | 7520 1829 1831 | 7526 1830 1832 | 7528 1831 1833 | 7532 1832 1834 | 7533 1833 1835 | 7536 1834 1836 | 7539 1835 1837 | 7540 1836 1838 | 7544 1837 1839 | 7551 1838 1840 | 7559 1839 1841 | 7560 1840 1842 | 7562 1841 1843 | 7569 1842 1844 | 7578 1843 1845 | 7580 1844 1846 | 7586 1845 1847 | 7587 1846 1848 | 7592 1847 1849 | 7595 1848 1850 | 7604 1849 1851 | 7642 1850 1852 | 7644 1851 1853 | 7645 1852 1854 | 7648 1853 1855 | 7650 1854 1856 | 7651 1855 1857 | 7655 1856 1858 | 7656 1857 1859 | 7657 1858 1860 | 7667 1859 1861 | 7668 1860 1862 | 7672 1861 1863 | 7679 1862 1864 | 7681 1863 1865 | 7691 1864 1866 | 7697 1865 1867 | 7717 1866 1868 | 7722 1867 1869 | 7723 1868 1870 | 7724 1869 1871 | 7725 1870 1872 | 7727 1871 1873 | 7728 1872 1874 | 7733 1873 1875 | 7735 1874 1876 | 7736 1875 1877 | 7748 1876 1878 | 7780 1877 1879 | 7802 1878 1880 | 7805 1879 1881 | 7822 1880 1882 | 7823 1881 1883 | 7824 1882 1884 | 7826 1883 1885 | 7836 1884 1886 | 7849 1885 1887 | 7852 1886 1888 | 7859 1887 1889 | 7865 1888 1890 | 7868 1889 1891 | 7869 1890 1892 | 7871 1891 1893 | 7872 1892 1894 | 7874 1893 1895 | 7879 1894 1896 | 7881 1895 1897 | 7885 1896 1898 | 7892 1897 1899 | 7893 1898 1900 | 7898 1899 1901 | 7899 1900 1902 | 7900 1901 1903 | 7903 1902 1904 | 7905 1903 1905 | 7913 1904 1906 | 7914 1905 1907 | 7918 1906 1908 | 7925 1907 1909 | 7928 1908 1910 | 7936 1909 1911 | 7940 1910 1912 | 7945 1911 1913 | 7955 1912 1914 | 7962 1913 1915 | 7966 1914 1916 | 7970 1915 1917 | 8006 1916 1918 | 8019 1917 1919 | 8020 1918 1920 | 8021 1919 1921 | 8022 1920 1922 | 8023 1921 1923 | 8026 1922 1924 | 8027 1923 1925 | 8044 1924 1926 | 8046 1925 1927 | 8058 1926 1928 | 8059 1927 1929 | 8060 1928 1930 | 8066 1929 1931 | 8073 1930 1932 | 8074 1931 1933 | 8077 1932 1934 | 8078 1933 1935 | 8082 1934 1936 | 8084 1935 1937 | 8087 1936 1938 | 8091 1937 1939 | 8092 1938 1940 | 8097 1939 1941 | 8101 1940 1942 | 8107 1941 1943 | 8110 1942 1944 | 8113 1943 1945 | 8114 1944 1946 | 8116 1945 1947 | 8117 1946 1948 | 8122 1947 1949 | 8129 1948 1950 | 8140 1949 1951 | 8141 1950 1952 | 8150 1951 1953 | 8152 1952 1954 | 8157 1953 1955 | 8158 1954 1956 | 8159 1955 1957 | 8160 1956 1958 | 8164 1957 1959 | 8166 1958 1960 | 8171 1959 1961 | 8173 1960 1962 | 8174 1961 1963 | 8177 1962 1964 | 8179 1963 1965 | 8185 1964 1966 | 8198 1965 1967 | 8199 1966 1968 | 8208 1967 1969 | 8214 1968 1970 | 8215 1969 1971 | 8216 1970 1972 | 8217 1971 1973 | 8223 1972 1974 | 8229 1973 1975 | 8244 1974 1976 | 8247 1975 1977 | 8248 1976 1978 | 8255 1977 1979 | 8257 1978 1980 | 8258 1979 1981 | 8259 1980 1982 | 8266 1981 1983 | 8267 1982 1984 | 8268 1983 1985 | 8278 1984 1986 | 8279 1985 1987 | 8280 1986 1988 | 8282 1987 1989 | 8307 1988 1990 | 8311 1989 1991 | 8320 1990 1992 | 8326 1991 1993 | 8333 1992 1994 | 8341 1993 1995 | 8353 1994 1996 | 8354 1995 1997 | 8358 1996 1998 | 8361 1997 1999 | 8368 1998 2000 | 8369 1999 2001 | 8374 2000 2002 | 8380 2001 2003 | 8384 2002 2004 | 8392 2003 2005 | 8403 2004 2006 | 8405 2005 2007 | 8424 2006 2008 | 8428 2007 2009 | 8429 2008 2010 | 8431 2009 2011 | 8441 2010 2012 | 8450 2011 2013 | 8458 2012 2014 | 8459 2013 2015 | 8460 2014 2016 | 8464 2015 2017 | 8470 2016 2018 | 8472 2017 2019 | 8474 2018 2020 | 8476 2019 2021 | 8479 2020 2022 | 8481 2021 2023 | 8483 2022 2024 | 8493 2023 2025 | 8495 2024 2026 | 8497 2025 2027 | 8501 2026 2028 | 8503 2027 2029 | 8504 2028 2030 | 8507 2029 2031 | 8510 2030 2032 | 8513 2031 2033 | 8516 2032 2034 | 8542 2033 2035 | 8545 2034 2036 | 8547 2035 2037 | 8553 2036 2038 | 8556 2037 2039 | 8560 2038 2040 | 8561 2039 2041 | 8564 2040 2042 | 8565 2041 2043 | 8570 2042 2044 | 8571 2043 2045 | 8575 2044 2046 | 8581 2045 2047 | 8583 2046 2048 | 8584 2047 2049 | 8591 2048 2050 | 8596 2049 2051 | 8598 2050 2052 | 8600 2051 2053 | 8602 2052 2054 | 8603 2053 2055 | 8605 2054 2056 | 8608 2055 2057 | 8621 2056 2058 | 8633 2057 2059 | 8649 2058 2060 | 8651 2059 2061 | 8658 2060 2062 | 8660 2061 2063 | 8671 2062 2064 | 8673 2063 2065 | 8674 2064 2066 | 8676 2065 2067 | 8678 2066 2068 | 8680 2067 2069 | 8702 2068 2070 | 8712 2069 2071 | 8715 2070 2072 | 8718 2071 2073 | 8724 2072 2074 | 8726 2073 2075 | 8728 2074 2076 | 8737 2075 2077 | 8738 2076 2078 | 8745 2077 2079 | 8750 2078 2080 | 8754 2079 2081 | 8755 2080 2082 | 8756 2081 2083 | 8759 2082 2084 | 8761 2083 2085 | 8765 2084 2086 | 8767 2085 2087 | 8773 2086 2088 | 8774 2087 2089 | 8779 2088 2090 | 8782 2089 2091 | 8790 2090 2092 | 8796 2091 2093 | 8798 2092 2094 | 8799 2093 2095 | 8803 2094 2096 | 8804 2095 2097 | 8812 2096 2098 | 8813 2097 2099 | 8816 2098 2100 | 8822 2099 2101 | 8828 2100 2102 | 8833 2101 2103 | 8836 2102 2104 | 8840 2103 2105 | 8845 2104 2106 | 8849 2105 2107 | 8850 2106 2108 | 8852 2107 2109 | 8856 2108 2110 | 8857 2109 2111 | 8858 2110 2112 | 8859 2111 2113 | 8861 2112 2114 | 8868 2113 2115 | 8881 2114 2116 | 8883 2115 2117 | 8884 2116 2118 | 8897 2117 2119 | 8904 2118 2120 | 8906 2119 2121 | 8910 2120 2122 | 8911 2121 2123 | 8919 2122 2124 | 8936 2123 2125 | 8954 2124 2126 | 8955 2125 2127 | 8956 2126 2128 | 8957 2127 2129 | 8960 2128 2130 | 8963 2129 2131 | 8975 2130 2132 | 8977 2131 2133 | 8984 2132 2134 | 8998 2133 2135 | 9008 2134 2136 | 9011 2135 2137 | 9017 2136 2138 | 9021 2137 2139 | 9023 2138 2140 | 9030 2139 2141 | 9038 2140 2142 | 9040 2141 2143 | 9045 2142 2144 | 9048 2143 2145 | 9054 2144 2146 | 9059 2145 2147 | 9076 2146 2148 | 9079 2147 2149 | 9084 2148 2150 | 9108 2149 2151 | 9118 2150 2152 | 9126 2151 2153 | 9128 2152 2154 | 9129 2153 2155 | 9133 2154 2156 | 9134 2155 2157 | 9138 2156 2158 | 9141 2157 2159 | 9144 2158 2160 | 9155 2159 2161 | 9157 2160 2162 | 9160 2161 2163 | 9187 2162 2164 | 9193 2163 2165 | 9194 2164 2166 | 9204 2165 2167 | 9205 2166 2168 | 9207 2167 2169 | 9208 2168 2170 | 9210 2169 2171 | 9222 2170 2172 | 9224 2171 2173 | 9226 2172 2174 | 9229 2173 2175 | 9232 2174 2176 | 9234 2175 2177 | 9235 2176 2178 | 9248 2177 2179 | 9251 2178 2180 | 9252 2179 2181 | 9253 2180 2182 | 9255 2181 2183 | 9258 2182 2184 | 9259 2183 2185 | 9262 2184 2186 | 9266 2185 2187 | 9284 2186 2188 | 9289 2187 2189 | 9294 2188 2190 | 9299 2189 2191 | 9300 2190 2192 | 9305 2191 2193 | 9318 2192 2194 | 9319 2193 2195 | 9321 2194 2196 | 9322 2195 2197 | 9329 2196 2198 | 9332 2197 2199 | 9341 2198 2200 | 9344 2199 2201 | 9369 2200 2202 | 9370 2201 2203 | 9371 2202 2204 | 9382 2203 2205 | 9383 2204 2206 | 9389 2205 2207 | 9390 2206 2208 | 9391 2207 2209 | 9394 2208 2210 | 9399 2209 2211 | 9400 2210 2212 | 9402 2211 2213 | 9407 2212 2214 | 9416 2213 2215 | 9417 2214 2216 | 9427 2215 2217 | 9428 2216 2218 | 9431 2217 2219 | 9439 2218 2220 | 9440 2219 2221 | 9442 2220 2222 | 9459 2221 2223 | 9465 2222 2224 | 9479 2223 2225 | 9480 2224 2226 | 9483 2225 2227 | 9484 2226 2228 | 9492 2227 2229 | 9494 2228 2230 | 9495 2229 2231 | 9499 2230 2232 | 9513 2231 2233 | 9514 2232 2234 | 9516 2233 2235 | 9528 2234 2236 | 9540 2235 2237 | 9544 2236 2238 | 9553 2237 2239 | 9554 2238 2240 | 9556 2239 2241 | 9559 2240 2242 | 9563 2241 2243 | 9565 2242 2244 | 9591 2243 2245 | 9592 2244 2246 | 9595 2245 2247 | 9596 2246 2248 | 9600 2247 2249 | 9609 2248 2250 | 9610 2249 2251 | 9626 2250 2252 | 9628 2251 2253 | 9629 2252 2254 | 9630 2253 2255 | 9632 2254 2256 | 9634 2255 2257 | 9637 2256 2258 | 9644 2257 2259 | 9653 2258 2260 | 9655 2259 2261 | 9656 2260 2262 | 9660 2261 2263 | 9665 2262 2264 | 9669 2263 2265 | 9672 2264 2266 | 9674 2265 2267 | 9678 2266 2268 | 9679 2267 2269 | 9681 2268 2270 | 9684 2269 2271 | 9685 2270 2272 | 9689 2271 2273 | 9700 2272 2274 | 9701 2273 2275 | 9706 2274 2276 | 9713 2275 2277 | 9720 2276 2278 | 9730 2277 2279 | 9731 2278 2280 | 9734 2279 2281 | 9763 2280 2282 | 9766 2281 2283 | 9768 2282 2284 | 9772 2283 2285 | 9782 2284 2286 | 9799 2285 2287 | 9800 2286 2288 | 9805 2287 2289 | 9813 2288 2290 | 9821 2289 2291 | 9823 2290 2292 | 9827 2291 2293 | 9829 2292 2294 | 9830 2293 2295 | 9833 2294 2296 | 9834 2295 2297 | 9836 2296 2298 | 9847 2297 2299 | 9860 2298 2300 | 9862 2299 2301 | 9864 2300 2302 | 9868 2301 2303 | 9869 2302 2304 | 9874 2303 2305 | 9877 2304 2306 | 9880 2305 2307 | 9887 2306 2308 | 9888 2307 2309 | 9889 2308 2310 | 9892 2309 2311 | 9893 2310 2312 | 9902 2311 2313 | 9903 2312 2314 | 9904 2313 2315 | 9916 2314 2316 | 9920 2315 2317 | 9924 2316 2318 | 9927 2317 2319 | 9928 2318 2320 | 9936 2319 2321 | 9940 2320 2322 | 9941 2321 2323 | 9959 2322 2324 | 9963 2323 2325 | 9965 2324 2326 | 9969 2325 2327 | 9971 2326 2328 | 9976 2327 2329 | 9986 2328 2330 | 9994 2329 2331 | 9997 2330 2332 | 9999 2331 2333 | 10007 2332 2334 | 10010 2333 2335 | 10014 2334 2336 | 10016 2335 2337 | 10017 2336 2338 | 10022 2337 2339 | 10023 2338 2340 | 10024 2339 2341 | 10026 2340 2342 | 10041 2341 2343 | 10050 2342 2344 | 10056 2343 2345 | 10062 2344 2346 | 10072 2345 2347 | 10078 2346 2348 | 10080 2347 2349 | 10088 2348 2350 | 10089 2349 2351 | 10095 2350 2352 | 10097 2351 2353 | 10100 2352 2354 | 10110 2353 2355 | 10115 2354 2356 | 10120 2355 2357 | 10122 2356 2358 | 10125 2357 2359 | 10126 2358 2360 | 10134 2359 2361 | 10142 2360 2362 | 10151 2361 2363 | 10152 2362 2364 | 10156 2363 2365 | 10163 2364 2366 | 10164 2365 2367 | 10166 2366 2368 | 10169 2367 2369 | 10172 2368 2370 | 10174 2369 2371 | 10176 2370 2372 | 10182 2371 2373 | 10184 2372 2374 | 10190 2373 2375 | 10191 2374 2376 | 10197 2375 2377 | 10210 2376 2378 | 10216 2377 2379 | 10218 2378 2380 | 10219 2379 2381 | 10222 2380 2382 | 10223 2381 2383 | 10233 2382 2384 | 10234 2383 2385 | 10245 2384 2386 | 10248 2385 2387 | 10249 2386 2388 | 10250 2387 2389 | 10254 2388 2390 | 10263 2389 2391 | 10264 2390 2392 | 10271 2391 2393 | 10274 2392 2394 | 10281 2393 2395 | 10289 2394 2396 | 10304 2395 2397 | 10305 2396 2398 | 10311 2397 2399 | 10313 2398 2400 | 10315 2399 2401 | 10317 2400 2402 | 10318 2401 2403 | 10320 2402 2404 | 10322 2403 2405 | 10325 2404 2406 | 10331 2405 2407 | 10333 2406 2408 | 10335 2407 2409 | 10336 2408 2410 | 10337 2409 2411 | 10339 2410 2412 | 10345 2411 2413 | 10351 2412 2414 | 10355 2413 2415 | 10358 2414 2416 | 10359 2415 2417 | 10361 2416 2418 | 10362 2417 2419 | 10363 2418 2420 | 10364 2419 2421 | 10365 2420 2422 | 10375 2421 2423 | 10393 2422 2424 | 10395 2423 2425 | 10397 2424 2426 | 10398 2425 2427 | 10400 2426 2428 | 10401 2427 2429 | 10405 2428 2430 | 10412 2429 2431 | 10413 2430 2432 | 10414 2431 2433 | 10415 2432 2434 | 10416 2433 2435 | 10418 2434 2436 | 10421 2435 2437 | 10428 2436 2438 | 10432 2437 2439 | 10433 2438 2440 | 10447 2439 2441 | 10455 2440 2442 | 10459 2441 2443 | 10461 2442 2444 | 10462 2443 2445 | 10463 2444 2446 | 10467 2445 2447 | 10472 2446 2448 | 10479 2447 2449 | 10496 2448 2450 | 10509 2449 2451 | 10514 2450 2452 | 10515 2451 2453 | 10517 2452 2454 | 10522 2453 2455 | 10523 2454 2456 | 10524 2455 2457 | 10525 2456 2458 | 10529 2457 2459 | 10539 2458 2460 | 10541 2459 2461 | 10543 2460 2462 | 10547 2461 2463 | 10548 2462 2464 | 10554 2463 2465 | 10556 2464 2466 | 10558 2465 2467 | 10559 2466 2468 | 10561 2467 2469 | 10563 2468 2470 | 10564 2469 2471 | 10571 2470 2472 | 10574 2471 2473 | 10582 2472 2474 | 10584 2473 2475 | 10596 2474 2476 | 10599 2475 2477 | 10613 2476 2478 | 10616 2477 2479 | 10618 2478 2480 | 10619 2479 2481 | 10624 2480 2482 | 10630 2481 2483 | 10637 2482 2484 | 10638 2483 2485 | 10639 2484 2486 | 10640 2485 2487 | 10647 2486 2488 | 10655 2487 2489 | 10656 2488 2490 | 10657 2489 2491 | 10661 2490 2492 | 10662 2491 2493 | 10663 2492 2494 | 10668 2493 2495 | 10670 2494 2496 | 10673 2495 2497 | 10679 2496 2498 | 10680 2497 2499 | 10685 2498 2500 | 10686 2499 2501 | 10694 2500 2502 | 10695 2501 2503 | 10699 2502 2504 | 10708 2503 2505 | 10717 2504 2506 | 10719 2505 2507 | 10720 2506 2508 | 10729 2507 2509 | 10742 2508 2510 | 10745 2509 2511 | 10746 2510 2512 | 10747 2511 2513 | 10753 2512 2514 | 10756 2513 2515 | 10761 2514 2516 | 10763 2515 2517 | 10764 2516 2518 | 10766 2517 2519 | 10769 2518 2520 | 10793 2519 2521 | 10808 2520 2522 | 10811 2521 2523 | 10820 2522 2524 | 10825 2523 2525 | 10828 2524 2526 | 10829 2525 2527 | 10837 2526 2528 | 10843 2527 2529 | 10850 2528 2530 | 10867 2529 2531 | 10871 2530 2532 | 10891 2531 2533 | 10894 2532 2534 | 10899 2533 2535 | 10908 2534 2536 | 10910 2535 2537 | 10913 2536 2538 | 10939 2537 2539 | 10953 2538 2540 | 10960 2539 2541 | 10964 2540 2542 | 10968 2541 2543 | 10976 2542 2544 | 10977 2543 2545 | 10979 2544 2546 | 10982 2545 2547 | 10988 2546 2548 | 10990 2547 2549 | 10992 2548 2550 | 10999 2549 2551 | 11002 2550 2552 | 11021 2551 2553 | 11036 2552 2554 | 11041 2553 2555 | 11042 2554 2556 | 11043 2555 2557 | 11045 2556 2558 | 11050 2557 2559 | 11053 2558 2560 | 11055 2559 2561 | 11064 2560 2562 | 11066 2561 2563 | 11069 2562 2564 | 11074 2563 2565 | 11076 2564 2566 | 11078 2565 2567 | 11082 2566 2568 | 11087 2567 2569 | 11092 2568 2570 | 11098 2569 2571 | 11101 2570 2572 | 11105 2571 2573 | 11107 2572 2574 | 11110 2573 2575 | 11111 2574 2576 | 11113 2575 2577 | 11120 2576 2578 | 11124 2577 2579 | 11126 2578 2580 | 11128 2579 2581 | 11129 2580 2582 | 11131 2581 2583 | 11133 2582 2584 | 11149 2583 2585 | 11150 2584 2586 | 11176 2585 2587 | 11179 2586 2588 | 11181 2587 2589 | 11191 2588 2590 | 11192 2589 2591 | 11200 2590 2592 | 11201 2591 2593 | 11202 2592 2594 | 11204 2593 2595 | 11208 2594 2596 | 11222 2595 2597 | 11233 2596 2598 | 11234 2597 2599 | 11240 2598 2600 | 11242 2599 2601 | 11243 2600 2602 | 11244 2601 2603 | 11249 2602 2604 | 11252 2603 2605 | 11253 2604 2606 | 11272 2605 2607 | 11282 2606 2608 | 11284 2607 2609 | 11287 2608 2610 | 11289 2609 2611 | 11309 2610 2612 | 11315 2611 2613 | 11319 2612 2614 | 11331 2613 2615 | 11343 2614 2616 | 11351 2615 2617 | 11360 2616 2618 | 11377 2617 2619 | 11378 2618 2620 | 11380 2619 2621 | 11381 2620 2622 | 11382 2621 2623 | 11389 2622 2624 | 11392 2623 2625 | 11397 2624 2626 | 11409 2625 2627 | 11418 2626 2628 | 11421 2627 2629 | 11423 2628 2630 | 11424 2629 2631 | 11426 2630 2632 | 11434 2631 2633 | 11438 2632 2634 | 11440 2633 2635 | 11450 2634 2636 | 11452 2635 2637 | 11453 2636 2638 | 11457 2637 2639 | 11463 2638 2640 | 11464 2639 2641 | 11466 2640 2642 | 11471 2641 2643 | 11479 2642 2644 | 11484 2643 2645 | 11487 2644 2646 | 11494 2645 2647 | 11499 2646 2648 | 11501 2647 2649 | 11510 2648 2650 | 11520 2649 2651 | 11531 2650 2652 | 11537 2651 2653 | 11548 2652 2654 | 11552 2653 2655 | 11553 2654 2656 | 11556 2655 2657 | 11559 2656 2658 | 11578 2657 2659 | 11583 2658 2660 | 11588 2659 2661 | 11592 2660 2662 | 11602 2661 2663 | 11603 2662 2664 | 11622 2663 2665 | 11627 2664 2666 | 11630 2665 2667 | 11631 2666 2668 | 11633 2667 2669 | 11634 2668 2670 | 11638 2669 2671 | 11639 2670 2672 | 11646 2671 2673 | 11647 2672 2674 | 11651 2673 2675 | 11652 2674 2676 | 11653 2675 2677 | 11654 2676 2678 | 11655 2677 2679 | 11656 2678 2680 | 11659 2679 2681 | 11660 2680 2682 | 11663 2681 2683 | 11664 2682 2684 | 11667 2683 2685 | 11668 2684 2686 | 11671 2685 2687 | 11676 2686 2688 | 11677 2687 2689 | 11682 2688 2690 | 11685 2689 2691 | 11686 2690 2692 | 11687 2691 2693 | 11700 2692 2694 | 11703 2693 2695 | 11712 2694 2696 | 11716 2695 2697 | 11718 2696 2698 | 11723 2697 2699 | 11725 2698 2700 | 11726 2699 2701 | 11727 2700 2702 | 11730 2701 2703 | 11738 2702 2704 | 11739 2703 2705 | 11748 2704 2706 | 11749 2705 2707 | 11762 2706 2708 | 11766 2707 2709 | 11768 2708 2710 | 11770 2709 2711 | 11771 2710 2712 | 11775 2711 2713 | 11777 2712 2714 | 11804 2713 2715 | 11809 2714 2716 | 11811 2715 2717 | 11817 2716 2718 | 11824 2717 2719 | 11834 2718 2720 | 11849 2719 2721 | 11855 2720 2722 | 11862 2721 2723 | 11874 2722 2724 | 11881 2723 2725 | 11883 2724 2726 | 11884 2725 2727 | 11889 2726 2728 | 11898 2727 2729 | 11905 2728 2730 | 11909 2729 2731 | 11910 2730 2732 | 11913 2731 2733 | 11917 2732 2734 | 11920 2733 2735 | 11928 2734 2736 | 11929 2735 2737 | 11936 2736 2738 | 11937 2737 2739 | 11939 2738 2740 | 11950 2739 2741 | 11953 2740 2742 | 11958 2741 2743 | 11966 2742 2744 | 11968 2743 2745 | 11971 2744 2746 | 11982 2745 2747 | 11983 2746 2748 | 11987 2747 2749 | 11993 2748 2750 | 11995 2749 2751 | 12003 2750 2752 | 12004 2751 2753 | 12006 2752 2754 | 12014 2753 2755 | 12015 2754 2756 | 12019 2755 2757 | 12032 2756 2758 | 12039 2757 2759 | 12044 2758 2760 | 12049 2759 2761 | 12050 2760 2762 | 12052 2761 2763 | 12053 2762 2764 | 12054 2763 2765 | 12059 2764 2766 | 12060 2765 2767 | 12061 2766 2768 | 12065 2767 2769 | 12066 2768 2770 | 12073 2769 2771 | 12082 2770 2772 | 12083 2771 2773 | 12085 2772 2774 | 12086 2773 2775 | 12088 2774 2776 | 12095 2775 2777 | 12097 2776 2778 | 12105 2777 2779 | 12111 2778 2780 | 12113 2779 2781 | 12121 2780 2782 | 12123 2781 2783 | 12130 2782 2784 | 12131 2783 2785 | 12132 2784 2786 | 12135 2785 2787 | 12137 2786 2788 | 12138 2787 2789 | 12139 2788 2790 | 12140 2789 2791 | 12151 2790 2792 | 12170 2791 2793 | 12191 2792 2794 | 12197 2793 2795 | 12198 2794 2796 | 12201 2795 2797 | 12203 2796 2798 | 12205 2797 2799 | 12211 2798 2800 | 12229 2799 2801 | 12233 2800 2802 | 12234 2801 2803 | 12237 2802 2804 | 12241 2803 2805 | 12243 2804 2806 | 12247 2805 2807 | 12254 2806 2808 | 12256 2807 2809 | 12259 2808 2810 | 12260 2809 2811 | 12261 2810 2812 | 12263 2811 2813 | 12266 2812 2814 | 12267 2813 2815 | 12268 2814 2816 | 12273 2815 2817 | 12277 2816 2818 | 12280 2817 2819 | 12284 2818 2820 | 12288 2819 2821 | 12291 2820 2822 | 12299 2821 2823 | 12309 2822 2824 | 12315 2823 2825 | 12316 2824 2826 | 12318 2825 2827 | 12330 2826 2828 | 12333 2827 2829 | 12340 2828 2830 | 12343 2829 2831 | 12349 2830 2832 | 12353 2831 2833 | 12360 2832 2834 | 12363 2833 2835 | 12367 2834 2836 | 12370 2835 2837 | 12372 2836 2838 | 12376 2837 2839 | 12379 2838 2840 | 12386 2839 2841 | 12389 2840 2842 | 12394 2841 2843 | 12396 2842 2844 | 12403 2843 2845 | 12404 2844 2846 | 12408 2845 2847 | 12409 2846 2848 | 12414 2847 2849 | 12416 2848 2850 | 12429 2849 2851 | 12437 2850 2852 | 12484 2851 2853 | 12485 2852 2854 | 12486 2853 2855 | 12488 2854 2856 | 12492 2855 2857 | 12495 2856 2858 | 12498 2857 2859 | 12505 2858 2860 | 12517 2859 2861 | 12527 2860 2862 | 12538 2861 2863 | 12556 2862 2864 | 12567 2863 2865 | 12569 2864 2866 | 12571 2865 2867 | 12572 2866 2868 | 12574 2867 2869 | 12576 2868 2870 | 12582 2869 2871 | 12595 2870 2872 | 12604 2871 2873 | 12630 2872 2874 | 12635 2873 2875 | 12636 2874 2876 | 12644 2875 2877 | 12647 2876 2878 | 12651 2877 2879 | 12658 2878 2880 | 12661 2879 2881 | 12663 2880 2882 | 12665 2881 2883 | 12668 2882 2884 | 12670 2883 2885 | 12674 2884 2886 | 12677 2885 2887 | 12690 2886 2888 | 12707 2887 2889 | 12723 2888 2890 | 12725 2889 2891 | 12726 2890 2892 | 12729 2891 2893 | 12733 2892 2894 | 12734 2893 2895 | 12735 2894 2896 | 12740 2895 2897 | 12742 2896 2898 | 12756 2897 2899 | 12760 2898 2900 | 12777 2899 2901 | 12821 2900 2902 | 12823 2901 2903 | 12827 2902 2904 | 12832 2903 2905 | 12835 2904 2906 | 12838 2905 2907 | 12842 2906 2908 | 12850 2907 2909 | 12865 2908 2910 | 12876 2909 2911 | 12918 2910 2912 | 12928 2911 2913 | 12958 2912 2914 | 12959 2913 2915 | 12960 2914 2916 | 12961 2915 2917 | 12962 2916 2918 | 12971 2917 2919 | 12974 2918 2920 | 12976 2919 2921 | 12977 2920 2922 | 12987 2921 2923 | 12988 2922 2924 | 12990 2923 2925 | 12991 2924 2926 | 12994 2925 2927 | 12998 2926 2928 | 13007 2927 2929 | 13010 2928 2930 | 13014 2929 2931 | 13034 2930 2932 | 13035 2931 2933 | 13041 2932 2934 | 13049 2933 2935 | 13057 2934 2936 | 13063 2935 2937 | 13064 2936 2938 | 13079 2937 2939 | 13112 2938 2940 | 13116 2939 2941 | 13127 2940 2942 | 13133 2941 2943 | 13136 2942 2944 | 13156 2943 2945 | 13159 2944 2946 | 13163 2945 2947 | 13176 2946 2948 | 13177 2947 2949 | 13178 2948 2950 | 13179 2949 2951 | 13185 2950 2952 | 13211 2951 2953 | 13216 2952 2954 | 13219 2953 2955 | 13227 2954 2956 | 13228 2955 2957 | 13230 2956 2958 | 13236 2957 2959 | 13246 2958 2960 | 13247 2959 2961 | 13253 2960 2962 | 13258 2961 2963 | 13268 2962 2964 | 13272 2963 2965 | 13274 2964 2966 | 13277 2965 2967 | 13281 2966 2968 | 13285 2967 2969 | 13308 2968 2970 | 13318 2969 2971 | 13331 2970 2972 | 13333 2971 2973 | 13338 2972 2974 | 13339 2973 2975 | 13343 2974 2976 | 13347 2975 2977 | 13351 2976 2978 | 13352 2977 2979 | 13355 2978 2980 | 13356 2979 2981 | 13364 2980 2982 | 13368 2981 2983 | 13385 2982 2984 | 13386 2983 2985 | 13387 2984 2986 | 13393 2985 2987 | 13395 2986 2988 | 13396 2987 2989 | 13398 2988 2990 | 13400 2989 2991 | 13401 2990 2992 | 13409 2991 2993 | 13417 2992 2994 | 13418 2993 2995 | 13427 2994 2996 | 13431 2995 2997 | 13432 2996 2998 | 13435 2997 2999 | 13436 2998 3000 | 13437 2999 3001 | 13441 3000 3002 | 13443 3001 3003 | 13445 3002 3004 | 13446 3003 3005 | 13447 3004 3006 | 13450 3005 3007 | 13451 3006 3008 | 13453 3007 3009 | 13454 3008 3010 | 13499 3009 3011 | 13504 3010 3012 | 13510 3011 3013 | 13515 3012 3014 | 13521 3013 3015 | 13522 3014 3016 | 13524 3015 3017 | 13526 3016 3018 | 13527 3017 3019 | 13532 3018 3020 | 13545 3019 3021 | 13546 3020 3022 | 13548 3021 3023 | 13558 3022 3024 | 13567 3023 3025 | 13579 3024 3026 | 13585 3025 3027 | 13586 3026 3028 | 13588 3027 3029 | 13599 3028 3030 | 13601 3029 3031 | 13606 3030 3032 | 13607 3031 3033 | 13612 3032 3034 | 13613 3033 3035 | 13622 3034 3036 | 13625 3035 3037 | 13629 3036 3038 | 13633 3037 3039 | 13634 3038 3040 | 13638 3039 3041 | 13664 3040 3042 | 13669 3041 3043 | 13678 3042 3044 | 13686 3043 3045 | 13695 3044 3046 | 13696 3045 3047 | 13714 3046 3048 | 13715 3047 3049 | 13720 3048 3050 | 13732 3049 3051 | 13736 3050 3052 | 13738 3051 3053 | 13741 3052 3054 | 13748 3053 3055 | 13750 3054 3056 | 13751 3055 3057 | 13752 3056 3058 | 13754 3057 3059 | 13770 3058 3060 | 13771 3059 3061 | 13772 3060 3062 | 13775 3061 3063 | 13778 3062 3064 | 13796 3063 3065 | 13808 3064 3066 | 13813 3065 3067 | 13820 3066 3068 | 13822 3067 3069 | 13823 3068 3070 | 13827 3069 3071 | 13829 3070 3072 | 13843 3071 3073 | 13844 3072 3074 | 13846 3073 3075 | 13848 3074 3076 | 13873 3075 3077 | 13877 3076 3078 | 13878 3077 3079 | 13880 3078 3080 | 13882 3079 3081 | 13884 3080 3082 | 13887 3081 3083 | 13889 3082 3084 | 13891 3083 3085 | 13902 3084 3086 | 13908 3085 3087 | 13913 3086 3088 | 13914 3087 3089 | 13915 3088 3090 | 13917 3089 3091 | 13931 3090 3092 | 13932 3091 3093 | 13935 3092 3094 | 13941 3093 3095 | 13942 3094 3096 | 13943 3095 3097 | 13948 3096 3098 | 13952 3097 3099 | 13957 3098 3100 | 13966 3099 3101 | 13967 3100 3102 | 13968 3101 3103 | 13971 3102 3104 | 13988 3103 3105 | 13995 3104 3106 | 14002 3105 3107 | 14005 3106 3108 | 14012 3107 3109 | 14013 3108 3110 | 14019 3109 3111 | 14020 3110 3112 | 14021 3111 3113 | 14022 3112 3114 | 14024 3113 3115 | 14034 3114 3116 | 14053 3115 3117 | 14055 3116 3118 | 14062 3117 3119 | 14072 3118 3120 | 14073 3119 3121 | 14079 3120 3122 | 14080 3121 3123 | 14083 3122 3124 | 14089 3123 3125 | 14092 3124 3126 | 14093 3125 3127 | 14107 3126 3128 | 14108 3127 3129 | 14109 3128 3130 | 14110 3129 3131 | 14117 3130 3132 | 14118 3131 3133 | 14120 3132 3134 | 14136 3133 3135 | 14137 3134 3136 | 14138 3135 3137 | 14139 3136 3138 | 14141 3137 3139 | 14142 3138 3140 | 14147 3139 3141 | 14148 3140 3142 | 14151 3141 3143 | 14153 3142 3144 | 14155 3143 3145 | 14162 3144 3146 | 14185 3145 3147 | 14186 3146 3148 | 14189 3147 3149 | 14206 3148 3150 | 14219 3149 3151 | 14220 3150 3152 | 14222 3151 3153 | 14231 3152 3154 | 14232 3153 3155 | 14233 3154 3156 | 14236 3155 3157 | 14239 3156 3158 | 14242 3157 3159 | 14254 3158 3160 | 14256 3159 3161 | 14264 3160 3162 | 14279 3161 3163 | 14308 3162 3164 | 14323 3163 3165 | 14327 3164 3166 | 14329 3165 3167 | 14344 3166 3168 | 14345 3167 3169 | 14347 3168 3170 | 14348 3169 3171 | 14350 3170 3172 | 14352 3171 3173 | 14354 3172 3174 | 14356 3173 3175 | 14361 3174 3176 | 14362 3175 3177 | 14371 3176 3178 | 14372 3177 3179 | 14373 3178 3180 | 14388 3179 3181 | 14391 3180 3182 | 14392 3181 3183 | 14394 3182 3184 | 14395 3183 3185 | 14396 3184 3186 | 14401 3185 3187 | 14435 3186 3188 | 14436 3187 3189 | 14443 3188 3190 | 14444 3189 3191 | 14450 3190 3192 | 14451 3191 3193 | 14456 3192 3194 | 14461 3193 3195 | 14468 3194 3196 | 14469 3195 3197 | 14474 3196 3198 | 14476 3197 3199 | 14483 3198 3200 | 14485 3199 3201 | 14494 3200 3202 | 14501 3201 3203 | 14507 3202 3204 | 14511 3203 3205 | 14518 3204 3206 | 14521 3205 3207 | 14529 3206 3208 | 14538 3207 3209 | 14540 3208 3210 | 14553 3209 3211 | 14569 3210 3212 | 14574 3211 3213 | 14578 3212 3214 | 14599 3213 3215 | 14621 3214 3216 | 14625 3215 3217 | 14626 3216 3218 | 14628 3217 3219 | 14636 3218 3220 | 14638 3219 3221 | 14670 3220 3222 | 14672 3221 3223 | 14673 3222 3224 | 14675 3223 3225 | 14702 3224 3226 | 14704 3225 3227 | 14706 3226 3228 | 14726 3227 3229 | 14728 3228 3230 | 14735 3229 3231 | 14743 3230 3232 | 14749 3231 3233 | 14751 3232 3234 | 14752 3233 3235 | 14754 3234 3236 | 14760 3235 3237 | 14766 3236 3238 | 14772 3237 3239 | 14780 3238 3240 | 14794 3239 3241 | 14798 3240 3242 | 14802 3241 3243 | 14828 3242 3244 | 14833 3243 3245 | 14840 3244 3246 | 14845 3245 3247 | 14853 3246 3248 | 14876 3247 3249 | 14878 3248 3250 | 14879 3249 3251 | 14886 3250 3252 | 14888 3251 3253 | 14890 3252 3254 | 14891 3253 3255 | 14894 3254 3256 | 14895 3255 3257 | 14905 3256 3258 | 14908 3257 3259 | 14913 3258 3260 | 14914 3259 3261 | 14919 3260 3262 | 14926 3261 3263 | 14937 3262 3264 | 14938 3263 3265 | 14942 3264 3266 | 14947 3265 3267 | 14948 3266 3268 | 14954 3267 3269 | 14959 3268 3270 | 14960 3269 3271 | 14961 3270 3272 | 14967 3271 3273 | 14983 3272 3274 | 14985 3273 3275 | 15005 3274 3276 | 15008 3275 3277 | 15013 3276 3278 | 15021 3277 3279 | 15024 3278 3280 | 15027 3279 3281 | 15028 3280 3282 | 15033 3281 3283 | 15038 3282 3284 | 15048 3283 3285 | 15049 3284 3286 | 15063 3285 3287 | 15064 3286 3288 | 15065 3287 3289 | 15068 3288 3290 | 15069 3289 3291 | 15074 3290 3292 | 15085 3291 3293 | 15086 3292 3294 | 15087 3293 3295 | 15100 3294 3296 | 15101 3295 3297 | 15118 3296 3298 | 15120 3297 3299 | 15122 3298 3300 | 15124 3299 3301 | 15134 3300 3302 | 15145 3301 3303 | 15148 3302 3304 | 15153 3303 3305 | 15159 3304 3306 | 15161 3305 3307 | 15163 3306 3308 | 15165 3307 3309 | 15173 3308 3310 | 15179 3309 3311 | 15181 3310 3312 | 15203 3311 3313 | 15205 3312 3314 | 15215 3313 3315 | 15220 3314 3316 | 15235 3315 3317 | 15240 3316 3318 | 15249 3317 3319 | 15270 3318 3320 | 15296 3319 3321 | 15307 3320 3322 | 15328 3321 3323 | 15333 3322 3324 | 15351 3323 3325 | 15355 3324 3326 | 15375 3325 3327 | 15378 3326 3328 | 15387 3327 3329 | 15388 3328 3330 | 15395 3329 3331 | 15396 3330 3332 | 15410 3331 3333 | 15412 3332 3334 | 15422 3333 3335 | 15431 3334 3336 | 15439 3335 3337 | 15444 3336 3338 | 15446 3337 3339 | 15448 3338 3340 | 15452 3339 3341 | 15465 3340 3342 | 15466 3341 3343 | 15469 3342 3344 | 15498 3343 3345 | 15510 3344 3346 | 15513 3345 3347 | 15517 3346 3348 | 15527 3347 3349 | 15533 3348 3350 | 15537 3349 3351 | 15538 3350 3352 | 15544 3351 3353 | 15548 3352 3354 | 15551 3353 3355 | 15564 3354 3356 | 15566 3355 3357 | 15569 3356 3358 | 15571 3357 3359 | 15572 3358 3360 | 15574 3359 3361 | 15579 3360 3362 | 15583 3361 3363 | 15584 3362 3364 | 15587 3363 3365 | 15590 3364 3366 | 15593 3365 3367 | 15594 3366 3368 | 15595 3367 3369 | 15600 3368 3370 | 15605 3369 3371 | 15613 3370 3372 | 15614 3371 3373 | 15619 3372 3374 | 15626 3373 3375 | 15627 3374 3376 | 15641 3375 3377 | 15642 3376 3378 | 15652 3377 3379 | 15664 3378 3380 | 15665 3379 3381 | 15668 3380 3382 | 15669 3381 3383 | 15673 3382 3384 | 15674 3383 3385 | 15677 3384 3386 | 15678 3385 3387 | 15679 3386 3388 | 15680 3387 3389 | 15685 3388 3390 | 15686 3389 3391 | 15691 3390 3392 | 15703 3391 3393 | 15707 3392 3394 | 15724 3393 3395 | 15740 3394 3396 | 15748 3395 3397 | 15749 3396 3398 | 15754 3397 3399 | 15760 3398 3400 | 15761 3399 3401 | 15770 3400 3402 | 15772 3401 3403 | 15781 3402 3404 | 15793 3403 3405 | 15797 3404 3406 | 15800 3405 3407 | 15802 3406 3408 | 15819 3407 3409 | 15823 3408 3410 | 15845 3409 3411 | 15851 3410 3412 | 15854 3411 3413 | 15859 3412 3414 | 15864 3413 3415 | 15865 3414 3416 | 15868 3415 3417 | 15869 3416 3418 | 15871 3417 3419 | 15878 3418 3420 | 15879 3419 3421 | 15880 3420 3422 | 15884 3421 3423 | 15885 3422 3424 | 15889 3423 3425 | 15897 3424 3426 | 15911 3425 3427 | 15918 3426 3428 | 15921 3427 3429 | 15923 3428 3430 | 15924 3429 3431 | 15927 3430 3432 | 15942 3431 3433 | 15959 3432 3434 | 15967 3433 3435 | 15973 3434 3436 | 15977 3435 3437 | 15979 3436 3438 | 15981 3437 3439 | 15983 3438 3440 | 15987 3439 3441 | 15991 3440 3442 | 15994 3441 3443 | 15995 3442 3444 | 16012 3443 3445 | 16025 3444 3446 | 16028 3445 3447 | 16040 3446 3448 | 16047 3447 3449 | 16053 3448 3450 | 16061 3449 3451 | 16062 3450 3452 | 16063 3451 3453 | 16076 3452 3454 | 16083 3453 3455 | 16084 3454 3456 | 16097 3455 3457 | 16100 3456 3458 | 16101 3457 3459 | 16104 3458 3460 | 16107 3459 3461 | 16125 3460 3462 | 16133 3461 3463 | 16140 3462 3464 | 16151 3463 3465 | 16156 3464 3466 | 16157 3465 3467 | 16172 3466 3468 | 16175 3467 3469 | 16198 3468 3470 | 16207 3469 3471 | 16238 3470 3472 | 16247 3471 3473 | 16266 3472 3474 | 16269 3473 3475 | 16284 3474 3476 | 16290 3475 3477 | 16304 3476 3478 | 16314 3477 3479 | 16316 3478 3480 | 16322 3479 3481 | 16323 3480 3482 | 16352 3481 3483 | 16357 3482 3484 | 16358 3483 3485 | 16373 3484 3486 | 16380 3485 3487 | 16386 3486 3488 | 16388 3487 3489 | 16405 3488 3490 | 16407 3489 3491 | 16411 3490 3492 | 16426 3491 3493 | 16432 3492 3494 | 16435 3493 3495 | 16437 3494 3496 | 16447 3495 3497 | 16458 3496 3498 | 16467 3497 3499 | 16501 3498 3500 | 16503 3499 3501 | 16505 3500 3502 | 16506 3501 3503 | 16519 3502 3504 | 16531 3503 3505 | 16561 3504 3506 | 16565 3505 3507 | 16568 3506 3508 | 16570 3507 3509 | 16575 3508 3510 | 16585 3509 3511 | 16598 3510 3512 | 16631 3511 3513 | 16634 3512 3514 | 16643 3513 3515 | 16649 3514 3516 | 16655 3515 3517 | 16662 3516 3518 | 16666 3517 3519 | 16667 3518 3520 | 16674 3519 3521 | 16676 3520 3522 | 16679 3521 3523 | 16680 3522 3524 | 16682 3523 3525 | 16688 3524 3526 | 16690 3525 3527 | 16691 3526 3528 | 16692 3527 3529 | 16695 3528 3530 | 16699 3529 3531 | 16700 3530 3532 | 16702 3531 3533 | 16707 3532 3534 | 16709 3533 3535 | 16713 3534 3536 | 16718 3535 3537 | 16732 3536 3538 | 16740 3537 3539 | 16744 3538 3540 | 16745 3539 3541 | 16746 3540 3542 | 16757 3541 3543 | 16763 3542 3544 | 16764 3543 3545 | 16766 3544 3546 | 16767 3545 3547 | 16770 3546 3548 | 16774 3547 3549 | 16779 3548 3550 | 16806 3549 3551 | 16809 3550 3552 | 16823 3551 3553 | 16838 3552 3554 | 16840 3553 3555 | 16841 3554 3556 | 16843 3555 3557 | 16847 3556 3558 | 16850 3557 3559 | 16855 3558 3560 | 16859 3559 3561 | 16864 3560 3562 | 16868 3561 3563 | 16873 3562 3564 | 16877 3563 3565 | 16879 3564 3566 | 16883 3565 3567 | 16884 3566 3568 | 16885 3567 3569 | 16887 3568 3570 | 16891 3569 3571 | 16892 3570 3572 | 16898 3571 3573 | 16905 3572 3574 | 16906 3573 3575 | 16908 3574 3576 | 16911 3575 3577 | 16918 3576 3578 | 16919 3577 3579 | 16929 3578 3580 | 16950 3579 3581 | 16952 3580 3582 | 16954 3581 3583 | 16956 3582 3584 | 16957 3583 3585 | 16961 3584 3586 | 17007 3585 3587 | 17013 3586 3588 | 17022 3587 3589 | 17026 3588 3590 | 17041 3589 3591 | 17044 3590 3592 | 17049 3591 3593 | 17052 3592 3594 | 17053 3593 3595 | 17054 3594 3596 | 17063 3595 3597 | 17065 3596 3598 | 17067 3597 3599 | 17080 3598 3600 | 17081 3599 3601 | 17091 3600 3602 | 17113 3601 3603 | 17118 3602 3604 | 17121 3603 3605 | 17126 3604 3606 | 17138 3605 3607 | 17142 3606 3608 | 17152 3607 3609 | 17155 3608 3610 | 17158 3609 3611 | 17179 3610 3612 | 17180 3611 3613 | 17199 3612 3614 | 17209 3613 3615 | 17211 3614 3616 | 17231 3615 3617 | 17238 3616 3618 | 17248 3617 3619 | 17249 3618 3620 | 17250 3619 3621 | 17251 3620 3622 | 17254 3621 3623 | 17265 3622 3624 | 17280 3623 3625 | 17284 3624 3626 | 17286 3625 3627 | 17298 3626 3628 | 17320 3627 3629 | 17321 3628 3630 | 17330 3629 3631 | 17331 3630 3632 | 17333 3631 3633 | 17338 3632 3634 | 17341 3633 3635 | 17345 3634 3636 | 17348 3635 3637 | 17357 3636 3638 | 17358 3637 3639 | 17369 3638 3640 | 17371 3639 3641 | 17373 3640 3642 | 17394 3641 3643 | 17401 3642 3644 | 17411 3643 3645 | 17429 3644 3646 | 17436 3645 3647 | 17454 3646 3648 | 17470 3647 3649 | 17477 3648 3650 | 17478 3649 3651 | 17498 3650 3652 | 17501 3651 3653 | 17503 3652 3654 | 17513 3653 3655 | 17514 3654 3656 | 17515 3655 3657 | 17526 3656 3658 | 17528 3657 3659 | 17531 3658 3660 | 17552 3659 3661 | 17555 3660 3662 | 17564 3661 3663 | 17568 3662 3664 | 17605 3663 3665 | 17619 3664 3666 | 17626 3665 3667 | 17630 3666 3668 | 17641 3667 3669 | 17647 3668 3670 | 17649 3669 3671 | 17651 3670 3672 | 17657 3671 3673 | 17661 3672 3674 | 17702 3673 3675 | 17703 3674 3676 | 17706 3675 3677 | 17736 3676 3678 | 17744 3677 3679 | 17745 3678 3680 | 17764 3679 3681 | 17775 3680 3682 | 17783 3681 3683 | 17787 3682 3684 | 17795 3683 3685 | 17799 3684 3686 | 17801 3685 3687 | 17802 3686 3688 | 17803 3687 3689 | 17811 3688 3690 | 17814 3689 3691 | 17839 3690 3692 | 17845 3691 3693 | 17849 3692 3694 | 17870 3693 3695 | 17876 3694 3696 | 17884 3695 3697 | 17885 3696 3698 | 17887 3697 3699 | 17889 3698 3700 | 17902 3699 3701 | 17904 3700 3702 | 17913 3701 3703 | 17914 3702 3704 | 17917 3703 3705 | 17927 3704 3706 | 17928 3705 3707 | 17929 3706 3708 | 17938 3707 3709 | 17940 3708 3710 | 17956 3709 3711 | 17960 3710 3712 | 17961 3711 3713 | 17965 3712 3714 | 17981 3713 3715 | 17982 3714 3716 | 17990 3715 3717 | 17991 3716 3718 | 17996 3717 3719 | 18000 3718 3720 | 18010 3719 3721 | 18029 3720 3722 | 18043 3721 3723 | 18045 3722 3724 | 18048 3723 3725 | 18058 3724 3726 | 18062 3725 3727 | 18072 3726 3728 | 18073 3727 3729 | 18076 3728 3730 | 18080 3729 3731 | 18084 3730 3732 | 18086 3731 3733 | 18089 3732 3734 | 18090 3733 3735 | 18091 3734 3736 | 18095 3735 3737 | 18102 3736 3738 | 18103 3737 3739 | 18104 3738 3740 | 18108 3739 3741 | 18119 3740 3742 | 18121 3741 3743 | 18158 3742 3744 | 18160 3743 3745 | 18161 3744 3746 | 18166 3745 3747 | 18170 3746 3748 | 18173 3747 3749 | 18174 3748 3750 | 18180 3749 3751 | 18182 3750 3752 | 18183 3751 3753 | 18184 3752 3754 | 18186 3753 3755 | 18205 3754 3756 | 18216 3755 3757 | 18227 3756 3758 | 18231 3757 3759 | 18234 3758 3760 | 18243 3759 3761 | 18263 3760 3762 | 18270 3761 3763 | 18271 3762 3764 | 18279 3763 3765 | 18280 3764 3766 | 18286 3765 3767 | 18288 3766 3768 | 18295 3767 3769 | 18297 3768 3770 | 18298 3769 3771 | 18300 3770 3772 | 18306 3771 3773 | 18315 3772 3774 | 18316 3773 3775 | 18322 3774 3776 | 18330 3775 3777 | 18335 3776 3778 | 18339 3777 3779 | 18345 3778 3780 | 18362 3779 3781 | 18363 3780 3782 | 18365 3781 3783 | 18375 3782 3784 | 18383 3783 3785 | 18388 3784 3786 | 18391 3785 3787 | 18398 3786 3788 | 18401 3787 3789 | 18402 3788 3790 | 18403 3789 3791 | 18405 3790 3792 | 18411 3791 3793 | 18414 3792 3794 | 18431 3793 3795 | 18432 3794 3796 | 18433 3795 3797 | 18435 3796 3798 | 18436 3797 3799 | 18440 3798 3800 | 18441 3799 3801 | 18443 3800 3802 | 18446 3801 3803 | 18447 3802 3804 | 18448 3803 3805 | 18454 3804 3806 | 18456 3805 3807 | 18465 3806 3808 | 18467 3807 3809 | 18472 3808 3810 | 18473 3809 3811 | 18512 3810 3812 | 18517 3811 3813 | 18518 3812 3814 | 18520 3813 3815 | 18527 3814 3816 | 18528 3815 3817 | 18530 3816 3818 | 18531 3817 3819 | 18541 3818 3820 | 18551 3819 3821 | 18556 3820 3822 | 18557 3821 3823 | 18564 3822 3824 | 18572 3823 3825 | 18582 3824 3826 | 18587 3825 3827 | 18619 3826 3828 | 18641 3827 3829 | 18658 3828 3830 | 18662 3829 3831 | 18664 3830 3832 | 18665 3831 3833 | 18673 3832 3834 | 18684 3833 3835 | 18687 3834 3836 | 18695 3835 3837 | 18707 3836 3838 | 18711 3837 3839 | 18713 3838 3840 | 18714 3839 3841 | 18719 3840 3842 | 18722 3841 3843 | 18738 3842 3844 | 18739 3843 3845 | 18740 3844 3846 | 18741 3845 3847 | --------------------------------------------------------------------------------