├── README.md ├── code ├── __init__.py ├── __pycache__ │ ├── attack.cpython-36.pyc │ └── know_map.cpython-36.pyc ├── attack.py ├── dataProcess │ ├── __pycache__ │ │ ├── dataEnhance.cpython-36.pyc │ │ └── train_w2v.cpython-36.pyc │ ├── dataEnhance.py │ ├── save_w2v_model │ │ └── w2v_model.md │ ├── stopword │ └── train_w2v.py ├── data_process.py ├── merge_subresult.py ├── test_model.py ├── train_model.py └── utils │ ├── Logger.py │ └── __pycache__ │ ├── Logger.cpython-36.pyc │ ├── Logger.cpython-38.pyc │ └── tool.cpython-36.pyc ├── datasets ├── stage1_test_data.csv ├── stage1_train_data.csv ├── test_data.csv ├── tmp_data │ └── label_id2cat.pkl └── train_data.csv ├── logs └── train.log ├── requirements.txt ├── result └── result.csv ├── test.sh └── train.sh /README.md: -------------------------------------------------------------------------------- 1 | # xf-knowledgeClassify2021-top2 2 | 2021科大讯飞试题标签预测挑战赛亚军方案 3 | 4 | 5 | ## Environment 6 | 环境:python3.6 + pytorch1.7 \ 7 | 基于pytorch1.7环境,载入相关的安装包: "pip install -r requirements.txt" 8 | 9 | 10 | ## 执行方法: 11 | 12 | 1. 模型训练:sh train.sh 13 | 2. 模型预测:sh test.sh 14 | 15 | ## 模型加载(可选) 16 | 如果不想自己训练模型,可加载我训练好的模型。\ 17 | 将百度网盘中的“save_model”文件夹存放于主目录下;\ 18 | (链接:https://pan.baidu.com/s/1nr6nsB5Qsm32MrMbaQUdww 密码:objk) 19 | 20 | 21 | ## 两点说明: 22 | 23 | 1. 本方案只使用官方提供的数据集,未使用任何外部数据; 24 | 2. 本方案的预训练模型为roberta_chinese_wwm_ext, 可在huggingface中进行下载。 25 | -------------------------------------------------------------------------------- /code/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sfeng-m/xf-knowledgeClassify2021-top2/6d8674ecb985f3620dbe11776cbd6a2115ed06dc/code/__init__.py -------------------------------------------------------------------------------- /code/__pycache__/attack.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sfeng-m/xf-knowledgeClassify2021-top2/6d8674ecb985f3620dbe11776cbd6a2115ed06dc/code/__pycache__/attack.cpython-36.pyc -------------------------------------------------------------------------------- /code/__pycache__/know_map.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sfeng-m/xf-knowledgeClassify2021-top2/6d8674ecb985f3620dbe11776cbd6a2115ed06dc/code/__pycache__/know_map.cpython-36.pyc -------------------------------------------------------------------------------- /code/attack.py: -------------------------------------------------------------------------------- 1 | 2 | import torch 3 | 4 | class FGM(): 5 | def __init__(self, model): 6 | self.model = model 7 | self.backup = {} 8 | 9 | def attack(self, epsilon=1., emb_name1='bert.embeddings.word_embeddings.weight',emb_name2='bert.embeddings.position_embeddings.weight',emb_name3='bert.embeddings.token_type_embeddings.weight'): 10 | # emb_name这个参数要换成你模型中embedding的参数名 11 | for name, param in self.model.named_parameters(): 12 | if param.requires_grad and emb_name1 in name: 13 | self.backup[name] = param.data.clone() 14 | norm = torch.norm(param.grad) 15 | if norm != 0: 16 | r_at = epsilon * param.grad / norm 17 | param.data.add_(r_at) 18 | if param.requires_grad and emb_name2 in name: 19 | self.backup[name] = param.data.clone() 20 | norm = torch.norm(param.grad) 21 | if norm != 0: 22 | r_at = epsilon * param.grad / norm 23 | param.data.add_(r_at) 24 | if param.requires_grad and emb_name3 in name: 25 | self.backup[name] = param.data.clone() 26 | norm = torch.norm(param.grad) 27 | if norm != 0: 28 | r_at = epsilon * param.grad / norm 29 | param.data.add_(r_at) 30 | 31 | def restore(self, emb_name1='bert.embeddings.word_embeddings.weight',emb_name2='bert.embeddings.position_embeddings.weight',emb_name3='bert.embeddings.token_type_embeddings.weight'): 32 | # emb_name这个参数要换成你模型中embedding的参数名 33 | for name, param in self.model.named_parameters(): 34 | if param.requires_grad and emb_name1 in name: 35 | assert name in self.backup 36 | param.data = self.backup[name] 37 | if param.requires_grad and emb_name2 in name: 38 | assert name in self.backup 39 | param.data = self.backup[name] 40 | if param.requires_grad and emb_name3 in name: 41 | assert name in self.backup 42 | param.data = self.backup[name] 43 | self.backup = {} 44 | 45 | 46 | class PGD(): 47 | def __init__(self, model, k=5): 48 | self.model = model 49 | self.emb_backup = {} 50 | self.grad_backup = {} 51 | self.k = k 52 | 53 | def attack(self, epsilon=1., alpha=0.33, emb_name='word_embedding.', is_first_attack=False): 54 | # emb_name 这个参数要换成你模型中 embedding 的参数名 55 | for name, param in self.model.named_parameters(): 56 | if param.requires_grad and emb_name in name: 57 | if is_first_attack: 58 | self.emb_backup[name] = param.data.clone() 59 | norm = torch.norm(param.grad) 60 | if norm != 0 and not torch.isnan(norm): 61 | r_at = alpha * param.grad / norm 62 | param.data.add_(r_at) 63 | param.data = self.project(name, param.data, epsilon) 64 | 65 | def restore(self, emb_name='word_embedding.'): 66 | # emb_name 这个参数要换成你模型中 embedding 的参数名 67 | for name, param in self.model.named_parameters(): 68 | if param.requires_grad and emb_name in name: 69 | assert name in self.emb_backup 70 | param.data = self.emb_backup[name] 71 | self.emb_backup = {} 72 | 73 | def project(self, param_name, param_data, epsilon): 74 | r = param_data - self.emb_backup[param_name] 75 | if torch.norm(r) > epsilon: 76 | r = epsilon * r / torch.norm(r) 77 | return param_data + r 78 | 79 | def backup_grad(self): 80 | for name, param in self.model.named_parameters(): 81 | if param.requires_grad: 82 | self.grad_backup[name] = param.grad 83 | 84 | def restore_grad(self): 85 | for name, param in self.model.named_parameters(): 86 | if param.requires_grad: 87 | param.grad = self.grad_backup[name] -------------------------------------------------------------------------------- /code/dataProcess/__pycache__/dataEnhance.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sfeng-m/xf-knowledgeClassify2021-top2/6d8674ecb985f3620dbe11776cbd6a2115ed06dc/code/dataProcess/__pycache__/dataEnhance.cpython-36.pyc -------------------------------------------------------------------------------- /code/dataProcess/__pycache__/train_w2v.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sfeng-m/xf-knowledgeClassify2021-top2/6d8674ecb985f3620dbe11776cbd6a2115ed06dc/code/dataProcess/__pycache__/train_w2v.cpython-36.pyc -------------------------------------------------------------------------------- /code/dataProcess/dataEnhance.py: -------------------------------------------------------------------------------- 1 | 2 | import random 3 | import pandas as pd 4 | import jieba 5 | import pickle 6 | import os 7 | from copy import copy 8 | import translators as ts 9 | from gensim.models.word2vec import Word2Vec 10 | 11 | from dataProcess.train_w2v import w2v_main, load_stopwords 12 | 13 | import io, sys 14 | #改变标准输出的默认编码 15 | # sys.stdout=io.TextIOWrapper(sys.stdout.buffer,encoding='utf8') 16 | 17 | def translator_constructor(api): 18 | if api == 'google': 19 | return ts.google 20 | elif api == 'bing': 21 | return ts.bing 22 | elif api == 'baidu': 23 | return ts.baidu 24 | elif api == 'sogou': 25 | return ts.sogou 26 | elif api == 'youdao': 27 | return ts.youdao 28 | elif api == 'tencent': 29 | return ts.tencent 30 | elif api == 'alibaba': 31 | return ts.alibaba 32 | else: 33 | raise NotImplementedError(f'{api} translator is not realised!') 34 | 35 | 36 | class SampleExpand(): 37 | def __init__(self) -> None: 38 | super(SampleExpand, self).__init__() 39 | self.w2v_model_path = './code/dataProcess/save_w2v_model/w2v_model.md' 40 | self.train_data = pd.read_csv("./datasets/train_data.csv") 41 | self.train_data['Analysis'] = self.train_data['Analysis'].fillna('') 42 | self.senlen_threshold = 16 43 | self.sample_maxlen = 512 44 | self.random_name = ['random_add', 'random_del', 'random_replace', 'random_swap', 'random_puncuation'] 45 | self.change_ratio = 0.15 46 | self.stopwords = load_stopwords() 47 | if not os.path.exists(self.w2v_model_path): 48 | w2v_main(self.train_data) 49 | self.model = Word2Vec.load(self.w2v_model_path) 50 | self.API = 'google' 51 | self.PUNCTUATIONS = [',','。','?','!',':',';'] 52 | self.PUNC_RATIO = 0.3 53 | 54 | def expand_sample(self, sentence): 55 | """扩增样本主函数,包括随机增删改及同义词替换等方法""" 56 | if self.is_senlen_enough(sentence): 57 | rand_name = random.choice(self.random_name) 58 | if rand_name == 'random_add': 59 | return self.random_add(sentence) 60 | elif rand_name == 'random_del': 61 | return self.random_del(sentence) 62 | elif rand_name == 'random_replace': 63 | return self.random_replace(sentence) 64 | elif rand_name == 'random_swap': 65 | return self.random_swap(sentence) 66 | elif rand_name == 'translate': 67 | return self.translate(sentence) 68 | elif rand_name == 'random_puncuation': 69 | return self.insert_punctuation_marks(sentence) 70 | else: 71 | return sentence 72 | 73 | def truncate_sample(self, sample): 74 | """对样本进行截断处理""" 75 | 76 | def is_senlen_enough(self, sentence): 77 | """句子长度是否满足要求(太短的句子不进行增删改) 78 | """ 79 | return True if len(sentence) > self.senlen_threshold else False 80 | 81 | def random_add(self, sentence): 82 | """对一个句子进行随机添加一个词,添加的词来自于词表 83 | """ 84 | words = jieba.lcut(sentence) 85 | new_words = words.copy() 86 | add_num = int(len(words)*self.change_ratio) 87 | for _ in range(add_num): 88 | counter = 0 89 | while True: 90 | random_word = new_words[random.randint(0, len(new_words)-1)] 91 | if random_word in self.model.wv.vocab: 92 | random_synonym = self.model.similar_by_word(random_word)[0][0] 93 | random_idx = random.randint(0, len(new_words)-1) 94 | new_words.insert(random_idx, random_synonym) 95 | break 96 | counter += 1 97 | if counter >= 10: 98 | break 99 | return ''.join(new_words) 100 | 101 | def random_del(self, sentence): 102 | """对一个句子进行随机删除一个词(当句子中某个词出现多次时,一起删除) 103 | """ 104 | words = jieba.lcut(sentence) 105 | #obviously, if there's only one word, don't delete it 106 | if len(words) == 1: 107 | return sentence 108 | #randomly delete words with probability p 109 | new_words = [] 110 | for word in words: 111 | r = random.uniform(0, 1) 112 | if r > self.change_ratio: 113 | new_words.append(word) 114 | #if you end up deleting all words, just return a random word 115 | if len(new_words) == 0: 116 | rand_int = random.randint(0, len(words)-1) 117 | return words[rand_int] 118 | return ''.join(new_words) 119 | 120 | def random_replace(self, sentence): 121 | """对一个句子随机选择一个词进行替换,替换的词通过词向量相似度计算获取(当句子中某个词出现多次时,一起替换) 122 | """ 123 | words = jieba.lcut(sentence) 124 | new_words = words.copy() 125 | random_word_list = [word for word in words if word not in self.stopwords and word in self.model.wv.vocab] 126 | random.shuffle(random_word_list) 127 | replace_num = int(len(random_word_list)*self.change_ratio) 128 | num_replaced = 0 129 | for random_word in random_word_list: 130 | similar_words = self.model.similar_by_word(random_word)[:2] 131 | # similar_words = [w[0] for w in similar_words if w[1] >= 0.9] 132 | if len(similar_words) >= 1: 133 | sim_word = random.choice(similar_words)[0] 134 | new_words = [sim_word if word == random_word else word for word in new_words] 135 | #print("replaced", random_word, "with", synonym) 136 | num_replaced += 1 137 | if num_replaced >= replace_num: #only replace up to n words 138 | break 139 | return ''.join(new_words) 140 | 141 | def random_swap(self, sentence): 142 | words = jieba.lcut(sentence) 143 | new_words = words.copy() 144 | swap_num = int(len(words)*self.change_ratio) 145 | for _ in range(swap_num): 146 | random_idx_1 = random.randint(0, len(new_words)-1) 147 | random_idx_2 = random_idx_1 148 | counter = 0 149 | while random_idx_2 == random_idx_1: 150 | random_idx_2 = random.randint(0, len(new_words)-1) 151 | counter += 1 152 | if counter > 3: 153 | break 154 | new_words[random_idx_1], new_words[random_idx_2] = new_words[random_idx_2], new_words[random_idx_1] 155 | return ''.join(new_words) 156 | 157 | def translate(self, text): 158 | r = random.uniform(0, 1) 159 | if r > 0.5: 160 | trans_lang = 'en' 161 | else: 162 | trans_lang = random.choice(['fr', 'de', 'ja', 'es']) 163 | try: 164 | trans_text = translator_constructor(self.API)(text, 'zh', trans_lang) 165 | return translator_constructor(self.API)(trans_text, trans_lang, 'zh') 166 | except: 167 | return text 168 | 169 | # Insert punction words into a given sentence with the given ratio "punc_ratio" 170 | def insert_punctuation_marks(self, sentence): 171 | words = jieba.lcut(sentence) 172 | new_line = [] 173 | q = random.randint(1, int(self.PUNC_RATIO * len(words) + 1)) 174 | qs = random.sample(range(0, len(words)), q) 175 | 176 | for j, word in enumerate(words): 177 | if j in qs: 178 | new_line.append(self.PUNCTUATIONS[random.randint(0, len(self.PUNCTUATIONS)-1)]) 179 | new_line.append(word) 180 | else: 181 | new_line.append(word) 182 | return ''.join(new_line) 183 | 184 | 185 | def eda_data(data, repeat_n=5): 186 | """使用增删换等进行数据增强 187 | """ 188 | EDA = SampleExpand() 189 | result = data.copy(deep=True) 190 | for n in range(repeat_n): 191 | new_data = data.copy(deep=True) 192 | new_data['Content'] = new_data['Content'].apply(lambda x: EDA.expand_sample(x)) 193 | new_data['Analysis'] = new_data['Analysis'].apply(lambda x: EDA.expand_sample(x)) 194 | new_data['options'] = new_data['options'].apply(lambda x: EDA.expand_sample(x)) 195 | result = pd.concat([result, new_data]) 196 | return result 197 | 198 | 199 | def translate(text): 200 | API = 'google' 201 | trans_text = translator_constructor(API)(text, 'zh', 'es') 202 | return translator_constructor(API)(trans_text, 'es', 'zh') 203 | 204 | 205 | if __name__ == '__main__': 206 | text = "本题考核电子商务法的基本制度。电子商务法的基本制度包括电子商务合同法律制度、电子签名和电子认证法律制度、电子支付法律制度。" 207 | print(translate(text)) -------------------------------------------------------------------------------- /code/dataProcess/save_w2v_model/w2v_model.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sfeng-m/xf-knowledgeClassify2021-top2/6d8674ecb985f3620dbe11776cbd6a2115ed06dc/code/dataProcess/save_w2v_model/w2v_model.md -------------------------------------------------------------------------------- /code/dataProcess/stopword: -------------------------------------------------------------------------------- 1 | ——— 2 | 》), 3 | )÷(1- 4 | ”, 5 | )、 6 | =( 7 | : 8 | → 9 | ℃ 10 | & 11 | * 12 | 一一 13 | ~~~~ 14 | ’ 15 | . 16 | 『 17 | .一 18 | ./ 19 | -- 20 | 』 21 | =″ 22 | 【 23 | [*] 24 | }> 25 | [⑤]] 26 | [①D] 27 | c] 28 | ng昉 29 | * 30 | // 31 | [ 32 | ] 33 | [②e] 34 | [②g] 35 | ={ 36 | } 37 | ,也 38 | ‘ 39 | A 40 | [①⑥] 41 | [②B] 42 | [①a] 43 | [④a] 44 | [①③] 45 | [③h] 46 | ③] 47 | 1. 48 | -- 49 | [②b] 50 | ’‘ 51 | ××× 52 | [①⑧] 53 | 0:2 54 | =[ 55 | [⑤b] 56 | [②c] 57 | [④b] 58 | [②③] 59 | [③a] 60 | [④c] 61 | [①⑤] 62 | [①⑦] 63 | [①g] 64 | ∈[ 65 | [①⑨] 66 | [①④] 67 | [①c] 68 | [②f] 69 | [②⑧] 70 | [②①] 71 | [①C] 72 | [③c] 73 | [③g] 74 | [②⑤] 75 | [②②] 76 | 一. 77 | [①h] 78 | .数 79 | [] 80 | [①B] 81 | 数/ 82 | [①i] 83 | [③e] 84 | [①①] 85 | [④d] 86 | [④e] 87 | [③b] 88 | [⑤a] 89 | [①A] 90 | [②⑧] 91 | [②⑦] 92 | [①d] 93 | [②j] 94 | 〕〔 95 | ][ 96 | :// 97 | ′∈ 98 | [②④ 99 | [⑤e] 100 | 12% 101 | b] 102 | ... 103 | ................... 104 | …………………………………………………③ 105 | ZXFITL 106 | [③F] 107 | 」 108 | [①o] 109 | ]∧′=[ 110 | ∪φ∈ 111 | ′| 112 | {- 113 | ②c 114 | } 115 | [③①] 116 | R.L. 117 | [①E] 118 | Ψ 119 | -[*]- 120 | ↑ 121 | .日 122 | [②d] 123 | [② 124 | [②⑦] 125 | [②②] 126 | [③e] 127 | [①i] 128 | [①B] 129 | [①h] 130 | [①d] 131 | [①g] 132 | [①②] 133 | [②a] 134 | f] 135 | [⑩] 136 | a] 137 | [①e] 138 | [②h] 139 | [②⑥] 140 | [③d] 141 | [②⑩] 142 | e] 143 | 〉 144 | 】 145 | 元/吨 146 | [②⑩] 147 | 2.3% 148 | 5:0 149 | [①] 150 | :: 151 | [②] 152 | [③] 153 | [④] 154 | [⑤] 155 | [⑥] 156 | [⑦] 157 | [⑧] 158 | [⑨] 159 | …… 160 | —— 161 | ? 162 | 、 163 | 。 164 | “ 165 | ” 166 | 《 167 | 》 168 | ! 169 | , 170 | : 171 | ; 172 | ? 173 | . 174 | , 175 | . 176 | ' 177 | ? 178 | · 179 | ——— 180 | ── 181 | ? 182 | — 183 | < 184 | > 185 | ( 186 | ) 187 | 〔 188 | 〕 189 | [ 190 | ] 191 | ( 192 | ) 193 | - 194 | + 195 | ~ 196 | × 197 | / 198 | / 199 | ① 200 | ② 201 | ③ 202 | ④ 203 | ⑤ 204 | ⑥ 205 | ⑦ 206 | ⑧ 207 | ⑨ 208 | ⑩ 209 | Ⅲ 210 | В 211 | " 212 | ; 213 | # 214 | @ 215 | γ 216 | μ 217 | φ 218 | φ. 219 | × 220 | Δ 221 | ■ 222 | ▲ 223 | sub 224 | exp 225 | sup 226 | sub 227 | Lex 228 | # 229 | % 230 | & 231 | ' 232 | + 233 | +ξ 234 | ++ 235 | - 236 | -β 237 | < 238 | <± 239 | <Δ 240 | <λ 241 | <φ 242 | << 243 | = 244 | = 245 | =☆ 246 | =- 247 | > 248 | >λ 249 | _ 250 | ~± 251 | ~+ 252 | [⑤f] 253 | [⑤d] 254 | [②i] 255 | ≈ 256 | [②G] 257 | [①f] 258 | LI 259 | ㈧ 260 | [- 261 | ...... 262 | 〉 263 | [③⑩] 264 | 第二 265 | 一番 266 | 一直 267 | 一个 268 | 一些 269 | 许多 270 | 种 271 | 有的是 272 | 也就是说 273 | 末##末 274 | 啊 275 | 阿 276 | 哎 277 | 哎呀 278 | 哎哟 279 | 唉 280 | 俺 281 | 俺们 282 | 按 283 | 按照 284 | 吧 285 | 吧哒 286 | 把 287 | 罢了 288 | 被 289 | 本 290 | 本着 291 | 比 292 | 比方 293 | 比如 294 | 鄙人 295 | 彼 296 | 彼此 297 | 边 298 | 别 299 | 别的 300 | 别说 301 | 并 302 | 并且 303 | 不比 304 | 不成 305 | 不单 306 | 不但 307 | 不独 308 | 不管 309 | 不光 310 | 不过 311 | 不仅 312 | 不拘 313 | 不论 314 | 不怕 315 | 不然 316 | 不如 317 | 不特 318 | 不惟 319 | 不问 320 | 不只 321 | 朝 322 | 朝着 323 | 趁 324 | 趁着 325 | 乘 326 | 冲 327 | 除 328 | 除此之外 329 | 除非 330 | 除了 331 | 此 332 | 此间 333 | 此外 334 | 从 335 | 从而 336 | 打 337 | 待 338 | 但 339 | 但是 340 | 当 341 | 当着 342 | 到 343 | 得 344 | 的 345 | 的话 346 | 等 347 | 等等 348 | 地 349 | 第 350 | 叮咚 351 | 对 352 | 对于 353 | 多 354 | 多少 355 | 而 356 | 而况 357 | 而且 358 | 而是 359 | 而外 360 | 而言 361 | 而已 362 | 尔后 363 | 反过来 364 | 反过来说 365 | 反之 366 | 非但 367 | 非徒 368 | 否则 369 | 嘎 370 | 嘎登 371 | 该 372 | 赶 373 | 个 374 | 各 375 | 各个 376 | 各位 377 | 各种 378 | 各自 379 | 给 380 | 根据 381 | 跟 382 | 故 383 | 故此 384 | 固然 385 | 关于 386 | 管 387 | 归 388 | 果然 389 | 果真 390 | 过 391 | 哈 392 | 哈哈 393 | 呵 394 | 和 395 | 何 396 | 何处 397 | 何况 398 | 何时 399 | 嘿 400 | 哼 401 | 哼唷 402 | 呼哧 403 | 乎 404 | 哗 405 | 还是 406 | 还有 407 | 换句话说 408 | 换言之 409 | 或 410 | 或是 411 | 或者 412 | 极了 413 | 及 414 | 及其 415 | 及至 416 | 即 417 | 即便 418 | 即或 419 | 即令 420 | 即若 421 | 即使 422 | 几 423 | 几时 424 | 己 425 | 既 426 | 既然 427 | 既是 428 | 继而 429 | 加之 430 | 假如 431 | 假若 432 | 假使 433 | 鉴于 434 | 将 435 | 较 436 | 较之 437 | 叫 438 | 接着 439 | 结果 440 | 借 441 | 紧接着 442 | 进而 443 | 尽 444 | 尽管 445 | 经 446 | 经过 447 | 就 448 | 就是 449 | 就是说 450 | 据 451 | 具体地说 452 | 具体说来 453 | 开始 454 | 开外 455 | 靠 456 | 咳 457 | 可 458 | 可见 459 | 可是 460 | 可以 461 | 况且 462 | 啦 463 | 来 464 | 来着 465 | 离 466 | 例如 467 | 哩 468 | 连 469 | 连同 470 | 两者 471 | 了 472 | 临 473 | 另 474 | 另外 475 | 另一方面 476 | 论 477 | 嘛 478 | 吗 479 | 慢说 480 | 漫说 481 | 冒 482 | 么 483 | 每 484 | 每当 485 | 们 486 | 莫若 487 | 某 488 | 某个 489 | 某些 490 | 拿 491 | 哪 492 | 哪边 493 | 哪儿 494 | 哪个 495 | 哪里 496 | 哪年 497 | 哪怕 498 | 哪天 499 | 哪些 500 | 哪样 501 | 那 502 | 那边 503 | 那儿 504 | 那个 505 | 那会儿 506 | 那里 507 | 那么 508 | 那么些 509 | 那么样 510 | 那时 511 | 那些 512 | 那样 513 | 乃 514 | 乃至 515 | 呢 516 | 能 517 | 你 518 | 你们 519 | 您 520 | 宁 521 | 宁可 522 | 宁肯 523 | 宁愿 524 | 哦 525 | 呕 526 | 啪达 527 | 旁人 528 | 呸 529 | 凭 530 | 凭借 531 | 其 532 | 其次 533 | 其二 534 | 其他 535 | 其它 536 | 其一 537 | 其余 538 | 其中 539 | 起 540 | 起见 541 | 起见 542 | 岂但 543 | 恰恰相反 544 | 前后 545 | 前者 546 | 且 547 | 然而 548 | 然后 549 | 然则 550 | 让 551 | 人家 552 | 任 553 | 任何 554 | 任凭 555 | 如 556 | 如此 557 | 如果 558 | 如何 559 | 如其 560 | 如若 561 | 如上所述 562 | 若 563 | 若非 564 | 若是 565 | 啥 566 | 上下 567 | 尚且 568 | 设若 569 | 设使 570 | 甚而 571 | 甚么 572 | 甚至 573 | 省得 574 | 时候 575 | 什么 576 | 什么样 577 | 使得 578 | 是 579 | 是的 580 | 首先 581 | 谁 582 | 谁知 583 | 顺 584 | 顺着 585 | 似的 586 | 虽 587 | 虽然 588 | 虽说 589 | 虽则 590 | 随 591 | 随着 592 | 所 593 | 所以 594 | 他 595 | 他们 596 | 他人 597 | 它 598 | 它们 599 | 她 600 | 她们 601 | 倘 602 | 倘或 603 | 倘然 604 | 倘若 605 | 倘使 606 | 腾 607 | 替 608 | 通过 609 | 同 610 | 同时 611 | 哇 612 | 万一 613 | 往 614 | 望 615 | 为 616 | 为何 617 | 为了 618 | 为什么 619 | 为着 620 | 喂 621 | 嗡嗡 622 | 我 623 | 我们 624 | 呜 625 | 呜呼 626 | 乌乎 627 | 无论 628 | 无宁 629 | 毋宁 630 | 嘻 631 | 吓 632 | 相对而言 633 | 像 634 | 向 635 | 向着 636 | 嘘 637 | 呀 638 | 焉 639 | 沿 640 | 沿着 641 | 要 642 | 要不 643 | 要不然 644 | 要不是 645 | 要么 646 | 要是 647 | 也 648 | 也罢 649 | 也好 650 | 一 651 | 一般 652 | 一旦 653 | 一方面 654 | 一来 655 | 一切 656 | 一样 657 | 一则 658 | 依 659 | 依照 660 | 矣 661 | 以 662 | 以便 663 | 以及 664 | 以免 665 | 以至 666 | 以至于 667 | 以致 668 | 抑或 669 | 因 670 | 因此 671 | 因而 672 | 因为 673 | 哟 674 | 用 675 | 由 676 | 由此可见 677 | 由于 678 | 有 679 | 有的 680 | 有关 681 | 有些 682 | 又 683 | 于 684 | 于是 685 | 于是乎 686 | 与 687 | 与此同时 688 | 与否 689 | 与其 690 | 越是 691 | 云云 692 | 哉 693 | 再说 694 | 再者 695 | 在 696 | 在下 697 | 咱 698 | 咱们 699 | 则 700 | 怎 701 | 怎么 702 | 怎么办 703 | 怎么样 704 | 怎样 705 | 咋 706 | 照 707 | 照着 708 | 者 709 | 这 710 | 这边 711 | 这儿 712 | 这个 713 | 这会儿 714 | 这就是说 715 | 这里 716 | 这么 717 | 这么点儿 718 | 这么些 719 | 这么样 720 | 这时 721 | 这些 722 | 这样 723 | 正如 724 | 吱 725 | 之 726 | 之类 727 | 之所以 728 | 之一 729 | 只是 730 | 只限 731 | 只要 732 | 只有 733 | 至 734 | 至于 735 | 诸位 736 | 着 737 | 着呢 738 | 自 739 | 自从 740 | 自个儿 741 | 自各儿 742 | 自己 743 | 自家 744 | 自身 745 | 综上所述 746 | 总的来看 747 | 总的来说 748 | 总的说来 749 | 总而言之 750 | 总之 751 | 纵 752 | 纵令 753 | 纵然 754 | 纵使 755 | 遵照 756 | 作为 757 | 兮 758 | 呃 759 | 呗 760 | 咚 761 | 咦 762 | 喏 763 | 啐 764 | 喔唷 765 | 嗬 766 | 嗯 767 | 嗳 768 | -------------------------------------------------------------------------------- /code/dataProcess/train_w2v.py: -------------------------------------------------------------------------------- 1 | # -*- encoding: UTF-8 -*- 2 | 3 | import codecs 4 | import sys 5 | import os 6 | import re 7 | import jieba.posseg as pseg 8 | # import jieba 9 | from gensim.models.word2vec import Word2Vec 10 | sys.path.append('..') 11 | 12 | # sys.stdout=io.TextIOWrapper(sys.stdout.buffer,encoding='utf8') 13 | 14 | 15 | def load_stopwords(): 16 | stopwords = codecs.open('./code/dataProcess/stopword','r',encoding='utf8').readlines() #对问题进行分词 17 | stopwords = [w.strip() for w in stopwords] 18 | stopwords = {sw: 1 for sw in stopwords} 19 | return stopwords 20 | 21 | 22 | def tokenization(text, stopwords): 23 | """ 24 | 分词 25 | :param text: 26 | :return: 27 | """ 28 | # jieba.load_userdict('../file/user_dict') #添加用户词典 29 | result = [] 30 | words = pseg.cut(text) 31 | # words = jieba.lcut(text) 32 | for word, flag in words: 33 | if word not in stopwords and word != ' ': 34 | result.append(word) 35 | return result 36 | 37 | 38 | def word2vec_train(corpus): 39 | """ 40 | word2vec模型训练 41 | :param corpus: 42 | :return: 43 | """ 44 | model = Word2Vec(min_count=1) 45 | model.build_vocab(corpus) 46 | model.train(corpus, total_examples = model.corpus_count, epochs = 100) 47 | if not os.path.exists('./dataProcess/save_w2v_model'): 48 | os.makedirs('./dataProcess/save_w2v_model') 49 | model.save('./dataProcess/save_w2v_model/w2v_model.md') 50 | 51 | 52 | def w2v_main(data): 53 | w2v_train_data = data['Content'].values.tolist() + data['Analysis'].values.tolist() + data['options'].values.tolist() 54 | w2v_train_data = list(set(w2v_train_data)) 55 | w2v_train_data = [re.split('[。?!]', train) for train in w2v_train_data] 56 | w2v_train_data = [sentence for sample in w2v_train_data for sentence in sample] 57 | w2v_train_data = [sample.replace('\r','').replace('\n','').replace('\u3000','') for sample in w2v_train_data if sample != ''] 58 | stopwords = load_stopwords() 59 | train_corpus = [tokenization(x, stopwords) for x in w2v_train_data] 60 | word2vec_train(train_corpus) 61 | 62 | -------------------------------------------------------------------------------- /code/data_process.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author : Stefan 3 | # Time : 2021-09-23 4 | # Desc : 数据预处理操作,包括清洗脏数据,数据切分,构造伪标签数据等 5 | import pandas as pd 6 | import copy 7 | import Levenshtein 8 | import io, sys 9 | #改变标准输出的默认编码 10 | sys.stdout=io.TextIOWrapper(sys.stdout.buffer,encoding='utf8') 11 | 12 | 13 | def clean_data(): 14 | """清洗脏数据 15 | """ 16 | train_df = pd.read_csv('./datasets/train_data.csv') 17 | 18 | # 清洗同一TestQuestionID同一k_Level同一KnowledgeID条件下存在不同q_Level的数据 19 | df_dup = train_df.groupby(['TestQuestionID', 'k_Level', 'KnowledgeID'])['q_Level'].unique().to_frame().reset_index() 20 | df_dup['len'] = df_dup['q_Level'].apply(lambda x: len(x)) 21 | df_dirty = df_dup.loc[df_dup['len'] > 1] 22 | for qid, klevel, knowid in df_dirty[['TestQuestionID', 'k_Level', 'KnowledgeID']].values: 23 | train_df = train_df.drop(train_df[(train_df['TestQuestionID']==qid) & (train_df['k_Level']==klevel) & (train_df['KnowledgeID']==knowid)].index) 24 | 25 | # 清洗同一TestQuestionID同一k_Level条件下存在不同KnowledgeID的数据 26 | df_dup1 = train_df.groupby(['TestQuestionID', 'k_Level'])['KnowledgeID'].unique().to_frame().reset_index() 27 | df_dup1['len'] = df_dup1['KnowledgeID'].apply(lambda x: len(x)) 28 | df_dirty = df_dup1.loc[df_dup1['len'] > 1] 29 | for qid, klevel in df_dirty[['TestQuestionID', 'k_Level']].values: 30 | train_df = train_df.drop(train_df[(train_df['TestQuestionID']==qid) & (train_df['k_Level']==klevel)].index) 31 | 32 | train_df.to_csv('./datasets/tmp_data/new_train_data.csv', index=False) 33 | 34 | 35 | def split_kdata(): 36 | """将清洗后的数据按知识点类别进行划分,方便对各类别进行数据探索 37 | """ 38 | train_df = pd.read_csv('./datasets/tmp_data/new_train_data.csv') 39 | train_df = train_df[['type', 'Content', 'q_Level', 'Analysis', 'options', 'TestQuestionID', 'k_Level', 'KnowledgeID']] 40 | train_df['Analysis'] = train_df['Analysis'].fillna('') 41 | train_k1 = train_df[train_df['k_Level']=='CD'] 42 | train_k2 = train_df[train_df['k_Level']=='CR'] 43 | train_k3 = train_df[train_df['k_Level']=='JE'] 44 | train_k4 = train_df[train_df['k_Level']=='TS'] 45 | train_k5 = train_df[train_df['k_Level']=='KL'] 46 | 47 | train_k1.to_csv('./datasets/tmp_data/k1_train_data.csv', index=False) 48 | train_k2.to_csv('./datasets/tmp_data/k2_train_data.csv', index=False) 49 | train_k3.to_csv('./datasets/tmp_data/k3_train_data.csv', index=False) 50 | train_k4.to_csv('./datasets/tmp_data/k4_train_data.csv', index=False) 51 | train_k5.to_csv('./datasets/tmp_data/k5_train_data.csv', index=False) 52 | 53 | 54 | def get_stage1_diff_pseudo_label(edit_ratio=0.1): 55 | """根据编辑距离获取相似数据并以对应的标签作为伪标签数据参与训练 56 | """ 57 | train_stage1 = pd.read_csv('./datasets/stage1_train_data.csv') 58 | test_stage2 = pd.read_csv('./datasets/test_data.csv') 59 | train_stage1 = train_stage1.drop_duplicates(subset='TestQuestionID', keep='first') 60 | test_stage2 = test_stage2.drop_duplicates(subset='TestQuestionID', keep='first') 61 | stage1_value = train_stage1[['Content', 'Analysis', 'TestQuestionID']].values 62 | stage1_dict = {'##'.join([str(value[0]), str(value[1])]): value[2] for value in stage1_value} 63 | stage2_value = test_stage2[['Content', 'Analysis', 'TestQuestionID']].values 64 | stage2_dict = {'##'.join([str(value[0]), str(value[1])]): value[2] for value in stage2_value} 65 | remain_ids = [] 66 | for stage2_content, stage2_qid in stage2_dict.items(): 67 | if stage2_content in stage1_dict: 68 | remain_ids.append(stage1_dict[stage2_content]) 69 | for stage2_content, stage2_qid in stage2_dict.items(): 70 | for stage1_content, stage1_qid in stage1_dict.items(): 71 | edit = Levenshtein.distance(stage2_content, stage1_content) / max(len(stage1_content), len(stage2_content)) 72 | if edit <= edit_ratio and stage1_qid not in remain_ids: 73 | remain_ids.append(stage1_qid) 74 | sub_train_stage1 = train_stage1[train_stage1.TestQuestionID.isin(remain_ids)] 75 | # sub_test_stage2 = test_stage2[train_stage1.TestQuestionID.isin(remain_ids)] 76 | sub_train_stage1.to_csv('./datasets/tmp_data/sub_train_stage1_data.csv', index=False) 77 | 78 | 79 | def main(): 80 | clean_data() 81 | split_kdata() 82 | get_stage1_diff_pseudo_label(edit_ratio=0.1) 83 | 84 | 85 | if __name__ == '__main__': 86 | main() 87 | -------------------------------------------------------------------------------- /code/merge_subresult.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author : Stefan 3 | # Time : 2021-09-23 4 | # Desc : 将多个子模型的结果进行合并 5 | 6 | import pickle 7 | import pandas as pd 8 | # import know_map 9 | 10 | result_dict = {} 11 | for k in ['k1', 'k2', 'k3', 'k4', 'k5', 'q']: 12 | id_result = pickle.load(open(f'./result/sub_result/{k}_result.pkl', 'rb')) 13 | # id_result = pickle.load(open(f'./result/sub_result/tmp4test_v2_baseline_{k}_result.pkl', 'rb')) 14 | result_dict.update(id_result) 15 | 16 | test_df = pd.read_csv('./datasets/test_data.csv') 17 | test_df['KnowledgeID'] = test_df[['TestQuestionID', 'k_Level']].apply(lambda x: result_dict[str(x[0])+x[1]], axis=1) 18 | # test_df = know_map.main(test_df) # 这部分代码由队友完成,所以无法开源,敬请谅解。 19 | test_df['q_Level'] = test_df[['TestQuestionID', 'q_Level']].apply(lambda x: 20 | int(x[1]) if x[1]==-1 else int(result_dict[str(int(x[0]))]), axis=1) 21 | 22 | test_df[['index', 'TestQuestionID', 'KnowledgeID', 'q_Level']].to_csv('./result/result.csv',index=None) 23 | -------------------------------------------------------------------------------- /code/test_model.py: -------------------------------------------------------------------------------- 1 | 2 | # -*- coding: utf-8 -*- 3 | # Author : Stefan 4 | # Time : 2021-09-23 5 | # Desc : 模型预测 6 | 7 | import pandas as pd 8 | import numpy as np 9 | import os 10 | import torch 11 | import torch.nn as nn 12 | from torch.utils.data import Dataset, DataLoader 13 | from tqdm import tqdm 14 | import pickle 15 | from sklearn.model_selection import * 16 | from transformers import * 17 | 18 | CFG = { #训练的参数配置 19 | 'lr': 3e-5, #学习率 20 | 'model': './save_model/pretrain_model_parameter/chinese_roberta_wwm_ext_pytorch', # 网络不好用这个 21 | # 'model': 'hfl/chinese-roberta-wwm-ext', # 也可以直接从huggingface下载 22 | 'max_len': 512, #文本截断的最大长度 23 | 'valid_bs': 16, 24 | 'num_workers': 0, 25 | 'device': 0, 26 | 'model_index': 'k1', 27 | 'sub_label': False, 28 | 'split_science_law': 0, 29 | 'is_choose_layer': False, 30 | 'layers': [-4, -3, -2, -1], # [0, -1], # 31 | 'pooling': 'max', 32 | } 33 | 34 | def model_predict(model_index, sub_label=False): 35 | CFG['model_index'] = model_index 36 | CFG['sub_label'] = sub_label 37 | 38 | tokenizer = BertTokenizer.from_pretrained(CFG['model']) 39 | torch.cuda.set_device(CFG['device']) 40 | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') 41 | 42 | k_level_map = {'k1': 'CD', 'k2': 'CR', 'k3': 'JE', 'k4':'TS', 'k5':'KL'} 43 | test_df = pd.read_csv('./datasets/test_data.csv') 44 | if CFG['model_index'].startswith('k'): 45 | test_df = test_df[test_df['k_Level']==k_level_map[CFG['model_index']]] 46 | else: 47 | test_df = test_df.drop_duplicates(subset='TestQuestionID', keep='first').reset_index() 48 | if CFG['split_science_law'] == 1: 49 | test_df = test_df[test_df['TestQuestionID'] <= 4872] 50 | elif CFG['split_science_law'] == 2: 51 | test_df = test_df[test_df['TestQuestionID'] > 4872] 52 | 53 | test_df['Analysis'] = test_df['Analysis'].fillna('') 54 | 55 | label_id2cate = pickle.load(open('./datasets/tmp_data/label_id2cat.pkl', 'rb')) 56 | num_labels = len(label_id2cate['{}_label_id2cate'.format(CFG['model_index'])]) 57 | 58 | 59 | class MyDataset(Dataset): 60 | def __init__(self, dataframe): 61 | self.df = dataframe 62 | 63 | def __len__(self): 64 | return len(self.df) 65 | 66 | def __getitem__(self, idx): 67 | type = self.df.type.values[idx] 68 | content = self.df.Content.values[idx] 69 | analysis = self.df.Analysis.values[idx] 70 | options = self.df.options.values[idx].replace('\r', '').replace('\n', '') 71 | question = (type + '[SEP]' + content + '[SEP]' + analysis).replace('\r', '').replace('\n', '') 72 | return question, options 73 | 74 | def collate_fn(data): 75 | input_ids, attention_mask, token_type_ids = [], [], [] 76 | for x in data: 77 | text = tokenizer(x[0], text_pair=x[1], padding='max_length', truncation=True, max_length=CFG['max_len'], return_tensors='pt') 78 | input_ids.append(text['input_ids'].squeeze().tolist()) 79 | attention_mask.append(text['attention_mask'].squeeze().tolist()) 80 | token_type_ids.append(text['token_type_ids'].squeeze().tolist()) 81 | input_ids = torch.tensor(input_ids) 82 | attention_mask = torch.tensor(attention_mask) 83 | token_type_ids = torch.tensor(token_type_ids) 84 | return input_ids, attention_mask, token_type_ids 85 | 86 | class SelfNet(nn.Module): 87 | def __init__(self, model_name, num_labels): 88 | super(SelfNet,self).__init__() 89 | self.autoModel=BertModel.from_pretrained(model_name) 90 | self.classifier=nn.Linear(768, num_labels) 91 | self.dropout = nn.Dropout(0.1) 92 | 93 | def forward(self,input_ids, attention_mask, token_type_ids): 94 | outputs=self.autoModel(input_ids, attention_mask, token_type_ids, output_hidden_states=True) 95 | encoded_layers = outputs['hidden_states'] 96 | hidden_state = [] 97 | for l in CFG['layers']: 98 | hidden_state.append(encoded_layers[l][:, 0].unsqueeze(1)) 99 | hidden_state = torch.cat(hidden_state, dim=1) 100 | 101 | if CFG['pooling'] == 'max': 102 | hidden_state, _ = torch.max(hidden_state, dim=1) 103 | elif CFG['pooling'] == 'mean': 104 | hidden_state = torch.mean(hidden_state, dim=1) 105 | else: 106 | hidden_state = hidden_state.view(hidden_state.size(0), -1) 107 | 108 | hidden_state = self.dropout(hidden_state) 109 | output = self.classifier(hidden_state) 110 | return output 111 | 112 | 113 | test_df['label'] = 0 114 | test_set = MyDataset(test_df) 115 | test_loader = DataLoader(test_set, batch_size=CFG['valid_bs'], collate_fn=collate_fn, shuffle=False, num_workers=CFG['num_workers']) 116 | 117 | if CFG['is_choose_layer']: 118 | model = SelfNet(CFG['model'], num_labels=num_labels).to(device) 119 | else: 120 | model = BertForSequenceClassification.from_pretrained(CFG['model'],num_labels=num_labels).cuda() # 模型 121 | 122 | y_pred,predictions=[],[] 123 | y_all = np.zeros((len(test_df),num_labels)) 124 | 125 | idx_model = [] 126 | model_path = f"./save_model/{CFG['model_index']}_learning/" 127 | model_files = os.listdir(model_path) 128 | model_files = [file for file in model_files if file.startswith('5fold')] 129 | for fold in range(5): 130 | sub_model_files = [file for file in model_files if int(file.split('_')[1])==fold] 131 | idx_model.append(model_path + sorted(sub_model_files)[-1]) 132 | 133 | for m in idx_model: 134 | model.load_state_dict(torch.load(m, map_location='cuda:{}'.format(CFG['device']))) 135 | y_pred = [] 136 | with torch.no_grad(): 137 | tk = tqdm(test_loader, total=len(test_loader), position=0, leave=True) 138 | for idx, (input_ids, attention_mask, token_type_ids) in enumerate(tk): 139 | input_ids, attention_mask, token_type_ids = input_ids.to(device), attention_mask.to( 140 | device), token_type_ids.to(device) 141 | 142 | if CFG['is_choose_layer']: 143 | output = model(input_ids, attention_mask, token_type_ids) 144 | else: 145 | output = model(input_ids, attention_mask, token_type_ids).logits 146 | y_pred.extend(output.cpu().numpy()) 147 | 148 | y_all = y_all+np.array(y_pred) 149 | 150 | test_df['pred'] = y_all.argmax(1) 151 | 152 | SPLIT = '' 153 | if CFG['model_index'].startswith('k'): 154 | test_df['KnowledgeID'] = test_df['pred'].map(label_id2cate['{}_label_id2cate'.format(CFG['model_index'])]) 155 | id_result = {} 156 | for TestQuestionID, k_Level, KnowledgeID in test_df[['TestQuestionID', 'k_Level', 'KnowledgeID']].values: 157 | id_result[str(TestQuestionID)+k_Level] = KnowledgeID 158 | else: 159 | if CFG['split_science_law'] == 1: 160 | SPLIT = '_science' 161 | elif CFG['split_science_law'] == 2: 162 | SPLIT = '_law' 163 | test_df['q_Level'] = test_df['pred'].map(label_id2cate['{}{}_label_id2cate'.format(CFG['model_index'], SPLIT)]) 164 | id_result = {} 165 | for TestQuestionID, q_Level in test_df[['TestQuestionID', 'q_Level']].values: 166 | id_result[str(TestQuestionID)] = q_Level 167 | 168 | pickle.dump(id_result, open('./result/sub_result/{}_result.pkl'.format(CFG['model_index']), 'wb')) 169 | 170 | 171 | def main(): 172 | for model_index in ['k1', 'k2', 'k3', 'k4', 'k5', 'q']: 173 | sub_label = True if model_index == 'q' else False 174 | model_predict(model_index, sub_label) 175 | 176 | 177 | if __name__ == '__main__': 178 | main() -------------------------------------------------------------------------------- /code/train_model.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author : Stefan 3 | # Time : 2021-09-23 4 | # Desc : 模型训练 5 | 6 | import pandas as pd 7 | import numpy as np 8 | import torch 9 | import torch.nn as nn 10 | import torch.nn.functional as F 11 | from torch.utils.data import Dataset, DataLoader 12 | from torch.cuda.amp import autocast, GradScaler 13 | from tqdm import tqdm 14 | import random 15 | import os 16 | import time 17 | import pickle 18 | from sklearn.metrics import f1_score 19 | from sklearn.model_selection import * 20 | from transformers import * 21 | from torch.autograd import Variable 22 | from attack import FGM, PGD 23 | import io, sys 24 | sys.path.append('..') 25 | #改变标准输出的默认编码 26 | sys.stdout=io.TextIOWrapper(sys.stdout.buffer,encoding='utf8') 27 | from dataProcess.dataEnhance import eda_data 28 | from utils.Logger import initlog 29 | 30 | 31 | CFG = { #训练的参数配置 32 | 'fold_num': 5, # 五折交叉验证 33 | 'seed': 2, 34 | 'lr': 3e-5, #学习率 35 | 'model': './save_model/pretrain_model_parameter/chinese_roberta_wwm_ext_pytorch', # 网络不好用这个 36 | # 'model': 'hfl/chinese-roberta-wwm-ext', # 也可以直接从huggingface下载 37 | 'max_len': 512, #文本截断的最大长度 38 | 'epochs': 8, 39 | 'train_bs': 10, #batch_size,可根据自己的显存调整 40 | 'valid_bs': 16, 41 | 'num_workers': 0, 42 | 'accum_iter': 1, #梯度累积,相当于将batch_size*2 43 | 'weight_decay': 2e-4, #权重衰减,防止过拟合 44 | 'device': 0, 45 | 'model_index': 'q', 46 | 'is_enhance': True, 47 | 'sub_label': True, 48 | 'add_stage1_train': True, 49 | 'attack_mode': 'fgm', 50 | 'split_science_law': 0, # 是否把数据划分为文理科;0表示不做划分,1表示取理科数据,2表示取文科数据 51 | 'is_choose_layer': False, 52 | 'layers': [0, -1], # [-4, -3, -2, -1], 53 | 'pooling': 'mean', 54 | 'eda_repeat_n': 5, 55 | } 56 | 57 | log_path = 'train.log' 58 | logger = initlog(logfile= "./logs/" + log_path) 59 | logger.info('pid:{}'.format(os.getpid())) 60 | 61 | def seed_everything(seed): 62 | random.seed(seed) 63 | os.environ['PYTHONHASHSEED'] = str(seed) 64 | np.random.seed(seed) 65 | torch.manual_seed(seed) 66 | torch.cuda.manual_seed(seed) 67 | torch.cuda.manual_seed_all(seed) 68 | torch.backends.cudnn.deterministic = True 69 | torch.backends.cudnn.benchmark = False 70 | 71 | seed_everything(CFG['seed']) #固定随机种子 72 | 73 | torch.cuda.set_device(CFG['device']) 74 | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') 75 | 76 | def model_train(model_index, is_enhance=False, sub_label=False, add_stage1_train=False): 77 | CFG['model_index'] = model_index 78 | CFG['is_enhance'] = is_enhance 79 | CFG['sub_label'] = sub_label 80 | CFG['add_stage1_train'] = add_stage1_train 81 | 82 | if os.path.exists('./datasets/tmp_data/label_id2cat.pkl'): 83 | exist_label_id2cat = pickle.load(open('./datasets/tmp_data/label_id2cat.pkl', 'rb')) 84 | else: 85 | exist_label_id2cat = {} 86 | 87 | if CFG['model_index'].startswith('k'): 88 | train_df = pd.read_csv("./datasets/tmp_data/{}_train_data.csv".format(CFG['model_index'])) 89 | train_df = train_df.sample(frac=1) 90 | print('len of train data:{}'.format(len(train_df))) 91 | train_df = train_df[['type', 'Content', 'q_Level', 'Analysis', 'options', 'TestQuestionID', 'k_Level', 'KnowledgeID']] 92 | train_df['Analysis'] = train_df['Analysis'].fillna('') 93 | num_labels = len(train_df['KnowledgeID'].unique()) 94 | 95 | if '{}_label_id2cate'.format(CFG['model_index']) in exist_label_id2cat: 96 | label_id2cate = exist_label_id2cat['{}_label_id2cate'.format(CFG['model_index'])] 97 | else: 98 | label_id2cate = dict(enumerate(train_df['KnowledgeID'].unique())) 99 | exist_label_id2cat['{}_label_id2cate'.format(CFG['model_index'])] = label_id2cate 100 | pickle.dump(exist_label_id2cat, open('./datasets/tmp_data/label_id2cat.pkl', 'wb')) 101 | 102 | label_cate2id = {value: key for key, value in label_id2cate.items()} 103 | train_df['label'] = train_df['KnowledgeID'].map(label_cate2id) 104 | 105 | 106 | elif CFG['model_index'].startswith('q'): 107 | train_df = pd.read_csv("./datasets/tmp_data/new_train_data.csv") 108 | 109 | # tmp for program 110 | if CFG['split_science_law'] == 1: 111 | train_df = train_df[train_df['TestQuestionID'] <= 4854] 112 | SPLIT = '_science' 113 | elif CFG['split_science_law'] == 2: 114 | train_df = train_df[train_df['TestQuestionID'] > 4854] 115 | CFG['add_stage1_train'] = False 116 | SPLIT = '_law' 117 | else: 118 | SPLIT = '' 119 | 120 | train_df = train_df.drop_duplicates(subset='TestQuestionID', keep='first').reset_index() 121 | if CFG['sub_label']: 122 | train_df = train_df[train_df['q_Level'].isin([1,2,3,4,5])].reset_index() 123 | 124 | 125 | print('len of train data:{}'.format(len(train_df))) 126 | train_df = train_df[['type', 'Content', 'q_Level', 'Analysis', 'options', 'TestQuestionID', 'k_Level', 'KnowledgeID']] 127 | train_df['Analysis'] = train_df['Analysis'].fillna('') 128 | num_labels = len(train_df['q_Level'].unique()) 129 | 130 | if '{}{}_label_id2cate'.format(CFG['model_index'], SPLIT) in exist_label_id2cat: 131 | label_id2cate = exist_label_id2cat['{}{}_label_id2cate'.format(CFG['model_index'], SPLIT)] 132 | else: 133 | label_id2cate = dict(enumerate(train_df['q_Level'].unique())) 134 | exist_label_id2cat['{}{}_label_id2cate'.format(CFG['model_index'], SPLIT)] = label_id2cate 135 | pickle.dump(exist_label_id2cat, open(f'./datasets/{SUB}label_id2cat.pkl', 'wb')) 136 | label_cate2id = {value: key for key, value in label_id2cate.items()} 137 | train_df['label'] = train_df['q_Level'].map(label_cate2id) 138 | 139 | # add stage1 train data 140 | if CFG['add_stage1_train']: 141 | stage1_train_df = pd.read_csv("./datasets/tmp_data/sub_train_stage1_data.csv") 142 | stage1_train_df = stage1_train_df.drop_duplicates(subset='TestQuestionID', keep='first').reset_index() 143 | stage1_train_df = stage1_train_df[['type', 'Content', 'q_Level', 'Analysis', 'options', 'TestQuestionID', 'k_Level', 'KnowledgeID']] 144 | stage1_train_df['Analysis'] = stage1_train_df['Analysis'].fillna('') 145 | stage1_train_df['label'] = stage1_train_df['q_Level'].map(label_cate2id) 146 | 147 | 148 | class MyDataset(Dataset): 149 | def __init__(self, dataframe): 150 | self.df = dataframe 151 | 152 | def __len__(self): 153 | return len(self.df) 154 | 155 | def __getitem__(self, idx): 156 | 157 | label = self.df.label.values[idx] 158 | type = self.df.type.values[idx].replace('\r', '').replace('\n', '') 159 | content = self.df.Content.values[idx].replace('\r', '').replace('\n', '') 160 | analysis = self.df.Analysis.values[idx].replace('\r', '').replace('\n', '') 161 | options = self.df.options.values[idx].replace('\r', '').replace('\n', '') 162 | question = (type + '[SEP]' + content + '[SEP]' + analysis) 163 | return question, options, label 164 | 165 | def collate_fn(data): 166 | input_ids, attention_mask, token_type_ids, label = [], [], [], [] 167 | for x in data: 168 | text = tokenizer(x[0], text_pair=x[1], padding='max_length', truncation=True, max_length=CFG['max_len'], return_tensors='pt') 169 | input_ids.append(text['input_ids'].squeeze().tolist()) 170 | attention_mask.append(text['attention_mask'].squeeze().tolist()) 171 | token_type_ids.append(text['token_type_ids'].squeeze().tolist()) 172 | label.append(x[-1]) 173 | input_ids = torch.tensor(input_ids) 174 | attention_mask = torch.tensor(attention_mask) 175 | token_type_ids = torch.tensor(token_type_ids) 176 | label = torch.tensor(label) 177 | return input_ids, attention_mask, token_type_ids, label 178 | 179 | class AverageMeter: # 为了tqdm实时显示loss和acc 180 | def __init__(self): 181 | self.reset() 182 | 183 | def reset(self): 184 | self.val = 0 185 | self.avg = 0 186 | self.sum = 0 187 | self.count = 0 188 | 189 | def update(self, val, n=1): 190 | self.val = val 191 | self.sum += val * n 192 | self.count += n 193 | self.avg = self.sum / self.count 194 | 195 | 196 | class SelfNet(nn.Module): 197 | def __init__(self, model_name, num_labels): 198 | super(SelfNet,self).__init__() 199 | self.autoModel=BertModel.from_pretrained(model_name) 200 | self.classifier=nn.Linear(768, num_labels) 201 | self.dropout = nn.Dropout(0.1) 202 | 203 | def forward(self,input_ids, attention_mask, token_type_ids): 204 | outputs=self.autoModel(input_ids, attention_mask, token_type_ids, output_hidden_states=True) 205 | encoded_layers = outputs['hidden_states'] 206 | hidden_state = [] 207 | for l in CFG['layers']: 208 | hidden_state.append(encoded_layers[l][:, 0].unsqueeze(1)) 209 | hidden_state = torch.cat(hidden_state, dim=1) 210 | 211 | if CFG['pooling'] == 'max': 212 | hidden_state, _ = torch.max(hidden_state, dim=1) 213 | elif CFG['pooling'] == 'mean': 214 | hidden_state = torch.mean(hidden_state, dim=1) 215 | else: 216 | hidden_state = hidden_state.view(hidden_state.size(0), -1) 217 | 218 | hidden_state = self.dropout(hidden_state) 219 | output = self.classifier(hidden_state) 220 | return output 221 | 222 | 223 | def train_model(model, fgm,pgd,train_loader): # 训练一个epoch 224 | model.train() 225 | 226 | losses = AverageMeter() 227 | accs = AverageMeter() 228 | 229 | optimizer.zero_grad() 230 | 231 | tk = tqdm(train_loader, total=len(train_loader), position=0, leave=True) 232 | 233 | for step, (input_ids, attention_mask, token_type_ids, label) in enumerate(tk): 234 | input_ids, attention_mask, token_type_ids, y = input_ids.to(device), attention_mask.to( 235 | device), token_type_ids.to(device), label.to(device).long() 236 | 237 | with autocast(): # 使用半精度训练 238 | 239 | if CFG['is_choose_layer']: 240 | output = model(input_ids, attention_mask, token_type_ids) 241 | loss = criterion(output, y) / CFG['accum_iter'] 242 | else: 243 | output = model(input_ids, attention_mask, token_type_ids)[0] 244 | loss = criterion(output, y) / CFG['accum_iter'] 245 | 246 | scaler.scale(loss).backward() 247 | 248 | if CFG['attack_mode'] == 'fgm': 249 | fgm.attack() # 在embedding上添加对抗扰动 250 | else: 251 | pgd.attack() 252 | 253 | if CFG['is_choose_layer']: 254 | output2 = model(input_ids, attention_mask, token_type_ids) 255 | loss2 = criterion(output2, y) / CFG['accum_iter'] 256 | else: 257 | output2 = model(input_ids, attention_mask, token_type_ids)[0] 258 | loss2 = criterion(output2, y)/ CFG['accum_iter'] 259 | 260 | scaler.scale(loss2).backward() # 反向传播,并在正常的grad基础上,累加对抗训练的梯度 261 | if CFG['attack_mode'] == 'fgm': 262 | fgm.restore() # 恢复 embedding 参数 263 | else: 264 | pgd.restore() 265 | 266 | if ((step + 1) % CFG['accum_iter'] == 0) or ((step + 1) == len(train_loader)): # 梯度累加 267 | scaler.step(optimizer) 268 | scaler.update() 269 | optimizer.zero_grad() 270 | scheduler.step() 271 | 272 | acc = (output.argmax(1) == y).sum().item() / y.size(0) 273 | losses.update(loss.item() * CFG['accum_iter'], y.size(0)) 274 | accs.update(acc, y.size(0)) 275 | tk.set_postfix(loss=losses.avg, acc=accs.avg) 276 | 277 | return losses.avg, accs.avg 278 | 279 | 280 | def test_model(model, val_loader): # 验证 281 | model.eval() 282 | 283 | losses = AverageMeter() 284 | accs = AverageMeter() 285 | y_truth, y_pred = [], [] 286 | 287 | with torch.no_grad(): 288 | tk = tqdm(val_loader, total=len(val_loader), position=0, leave=True) 289 | for idx, (input_ids, attention_mask, token_type_ids, label) in enumerate(tk): 290 | input_ids, attention_mask, token_type_ids, y = input_ids.to(device), attention_mask.to( 291 | device), token_type_ids.to(device), label.to(device).long() 292 | 293 | if CFG['is_choose_layer']: 294 | output = model(input_ids, attention_mask, token_type_ids) 295 | loss = criterion(output, y) / CFG['accum_iter'] 296 | else: 297 | output = model(input_ids, attention_mask, token_type_ids).logits 298 | 299 | y_truth.extend(y.cpu().numpy()) 300 | y_pred.extend(output.argmax(1).cpu().numpy()) 301 | loss = criterion(output, y) 302 | acc = (output.argmax(1) == y).sum().item() / y.size(0) 303 | losses.update(loss.item(), y.size(0)) 304 | accs.update(acc, y.size(0)) 305 | 306 | tk.set_postfix(loss=losses.avg, acc=accs.avg) 307 | 308 | micro_f1 = f1_score(y_truth, y_pred, average="micro") 309 | print('evaluate micro_f1:{}'.format(round(micro_f1, 4))) 310 | return losses.avg, accs.avg, micro_f1 311 | 312 | 313 | tokenizer = BertTokenizer.from_pretrained(CFG['model']) 314 | folds = StratifiedKFold(n_splits=CFG['fold_num'], shuffle=True, random_state=CFG['seed'])\ 315 | .split(np.arange(train_df.shape[0]), train_df.label.values) #五折交叉验证 316 | 317 | for fold, (trn_idx, val_idx) in enumerate(folds): 318 | train = train_df.loc[trn_idx] 319 | val = train_df.loc[val_idx] 320 | 321 | train_set = MyDataset(train) 322 | val_set = MyDataset(val) 323 | 324 | train_loader = DataLoader(train_set, batch_size=CFG['train_bs'], collate_fn=collate_fn, shuffle=True, 325 | num_workers=CFG['num_workers']) 326 | val_loader = DataLoader(val_set, batch_size=CFG['valid_bs'], collate_fn=collate_fn, shuffle=False, 327 | num_workers=CFG['num_workers']) 328 | 329 | if CFG['is_choose_layer']: 330 | model = SelfNet(CFG['model'], num_labels=num_labels).to(device) 331 | else: 332 | model = BertForSequenceClassification.from_pretrained(CFG['model'],num_labels=num_labels).to(device) # 模型 333 | 334 | scaler = GradScaler() 335 | optimizer = AdamW(model.parameters(), lr=CFG['lr'], weight_decay=CFG['weight_decay']) # AdamW优化器 336 | criterion = nn.CrossEntropyLoss() 337 | scheduler = get_cosine_schedule_with_warmup(optimizer, len(train_loader) // CFG['accum_iter'], 338 | CFG['epochs'] * len(train_loader) // CFG['accum_iter']) 339 | # get_cosine_schedule_with_warmup策略,学习率先warmup一个epoch,然后cos式下降 340 | fgm = FGM(model) 341 | pgd = PGD(model) 342 | save_path = './save_model/{}_learning/'.format(CFG['model_index']) 343 | 344 | if not os.path.exists(save_path): 345 | os.makedirs(save_path) 346 | best_f1 = 0.0 347 | for epoch in range(CFG['epochs']): 348 | if CFG['is_enhance']: 349 | train_eda = eda_data(train, repeat_n=CFG['eda_repeat_n']) 350 | if CFG['add_stage1_train']: 351 | train_eda = pd.concat([train_eda, stage1_train_df]) 352 | train_set = MyDataset(train_eda) 353 | elif CFG['add_stage1_train']: 354 | train_addstage1 = pd.concat([train, stage1_train_df]) 355 | train_set = MyDataset(train_addstage1) 356 | 357 | 358 | val_loader = DataLoader(val_set, batch_size=CFG['valid_bs'], collate_fn=collate_fn, shuffle=False, 359 | num_workers=CFG['num_workers']) 360 | train_loader = DataLoader(train_set, batch_size=CFG['train_bs'], collate_fn=collate_fn, shuffle=True, 361 | num_workers=CFG['num_workers']) 362 | time.sleep(0.2) 363 | 364 | train_loss, train_acc = train_model(model, fgm, pgd, train_loader) 365 | val_loss, val_acc, F1 = test_model(model, val_loader) 366 | logger.info(f'fold:{fold}, epoch:{epoch}, train_loss:{train_loss:.4f}, train_acc:{train_acc:.4f}, val_loss:{val_loss:.4f}, val_f1:{F1:.4f}') 367 | if F1 > best_f1 and epoch >= 3: 368 | best_f1 = F1 369 | save_model_path = save_path + '5fold_{}_{}_fgm_{}.pt'.format(fold,epoch,round(F1,3)) 370 | if CFG['split_science_law'] > 0: 371 | prefix = 'science' if CFG['split_science_law'] == 1 else 'law' 372 | save_model_path = save_path + '{}_5fold_{}_{}_fgm_{}.pt'.format(prefix,fold,epoch,round(F1,3)) 373 | torch.save(model.state_dict(), save_model_path) 374 | 375 | 376 | def main(): 377 | for model_index in ['k1', 'k2', 'k3', 'k4', 'k5', 'q']: 378 | sub_label = True if model_index == 'q' else False 379 | is_enhance = True if model_index in ['k5', 'q'] else False 380 | add_stage1_train = True if model_index == 'q' else False 381 | model_train(model_index, is_enhance, sub_label, add_stage1_train) 382 | 383 | 384 | if __name__ == '__main__': 385 | main() -------------------------------------------------------------------------------- /code/utils/Logger.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | def initlog(logfile): 4 | logger = logging.getLogger() # 实例化一个logger对象 5 | logger.setLevel(logging.INFO) # 设置初始显示级别 6 | if (len(logger.handlers) == 0): # 只创建一个句柄,当句柄存在时不创建,防止同个进程多次调用initlog()时相同日志信息重复写入; 7 | # 创建一个文件句柄 8 | file_handle = logging.FileHandler(logfile, encoding="UTF-8") 9 | # 创建一个流句柄 10 | stream_handle = logging.StreamHandler() 11 | # 创建一个输出格式 12 | fmt = logging.Formatter('[%(asctime)s] %(levelname)s [%(funcName)s: %(filename)s, %(lineno)d] %(message)s', 13 | datefmt="%a, %d %b %Y %H:%M:%S") 14 | file_handle.setFormatter(fmt) # 文件句柄设置格式 15 | stream_handle.setFormatter(fmt) # 流句柄设置格式 16 | logger.addHandler(file_handle) # logger对象绑定文件句柄 17 | logger.addHandler(stream_handle) # logger对象绑定流句柄 18 | return logger -------------------------------------------------------------------------------- /code/utils/__pycache__/Logger.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sfeng-m/xf-knowledgeClassify2021-top2/6d8674ecb985f3620dbe11776cbd6a2115ed06dc/code/utils/__pycache__/Logger.cpython-36.pyc -------------------------------------------------------------------------------- /code/utils/__pycache__/Logger.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sfeng-m/xf-knowledgeClassify2021-top2/6d8674ecb985f3620dbe11776cbd6a2115ed06dc/code/utils/__pycache__/Logger.cpython-38.pyc -------------------------------------------------------------------------------- /code/utils/__pycache__/tool.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sfeng-m/xf-knowledgeClassify2021-top2/6d8674ecb985f3620dbe11776cbd6a2115ed06dc/code/utils/__pycache__/tool.cpython-36.pyc -------------------------------------------------------------------------------- /datasets/tmp_data/label_id2cat.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/sfeng-m/xf-knowledgeClassify2021-top2/6d8674ecb985f3620dbe11776cbd6a2115ed06dc/datasets/tmp_data/label_id2cat.pkl -------------------------------------------------------------------------------- /logs/train.log: -------------------------------------------------------------------------------- 1 | [Fri, 24 Sep 2021 08:57:58] INFO [: train_model.py, 59] pid:41232 2 | [Fri, 24 Sep 2021 08:58:09] INFO [acquire: filelock.py, 274] Lock 140365409771416 acquired on /root/.cache/huggingface/transformers/ebc33cec9cd4890c20bd3b688fbf8e907167e0e2f209b801b3159123cd4630e4.d863eb12d1b0d00e5d41e9eb0d41914e4993c03e6de69e67bc10c79818f5fd4d.lock 3 | [Fri, 24 Sep 2021 08:58:12] INFO [release: filelock.py, 318] Lock 140365409771416 released on /root/.cache/huggingface/transformers/ebc33cec9cd4890c20bd3b688fbf8e907167e0e2f209b801b3159123cd4630e4.d863eb12d1b0d00e5d41e9eb0d41914e4993c03e6de69e67bc10c79818f5fd4d.lock 4 | [Fri, 24 Sep 2021 09:06:51] INFO [: train_model.py, 59] pid:42165 5 | [Fri, 24 Sep 2021 09:07:01] INFO [acquire: filelock.py, 274] Lock 139685739964176 acquired on /root/.cache/huggingface/transformers/ebc33cec9cd4890c20bd3b688fbf8e907167e0e2f209b801b3159123cd4630e4.d863eb12d1b0d00e5d41e9eb0d41914e4993c03e6de69e67bc10c79818f5fd4d.lock 6 | [Fri, 24 Sep 2021 09:08:46] INFO [release: filelock.py, 318] Lock 139685739964176 released on /root/.cache/huggingface/transformers/ebc33cec9cd4890c20bd3b688fbf8e907167e0e2f209b801b3159123cd4630e4.d863eb12d1b0d00e5d41e9eb0d41914e4993c03e6de69e67bc10c79818f5fd4d.lock 7 | [Fri, 24 Sep 2021 09:09:02] INFO [: train_model.py, 59] pid:42505 8 | [Fri, 24 Sep 2021 09:11:02] INFO [: train_model.py, 59] pid:43019 9 | [Fri, 24 Sep 2021 09:12:19] INFO [model_train: train_model.py, 366] fold:0, epoch:0, train_loss:1.4406, train_acc:0.3084, val_loss:1.2862, val_f1:0.4351 10 | [Fri, 24 Sep 2021 09:13:30] INFO [model_train: train_model.py, 366] fold:0, epoch:1, train_loss:1.1786, train_acc:0.4448, val_loss:1.2133, val_f1:0.4481 11 | [Fri, 24 Sep 2021 09:14:40] INFO [model_train: train_model.py, 366] fold:0, epoch:2, train_loss:1.0676, train_acc:0.5260, val_loss:1.2042, val_f1:0.4675 12 | [Fri, 24 Sep 2021 09:15:50] INFO [model_train: train_model.py, 366] fold:0, epoch:3, train_loss:0.9287, train_acc:0.6429, val_loss:1.0872, val_f1:0.5260 13 | [Fri, 24 Sep 2021 09:17:05] INFO [model_train: train_model.py, 366] fold:0, epoch:4, train_loss:0.7120, train_acc:0.7549, val_loss:1.1389, val_f1:0.5260 14 | [Fri, 24 Sep 2021 09:18:15] INFO [model_train: train_model.py, 366] fold:0, epoch:5, train_loss:0.5486, train_acc:0.8312, val_loss:1.2047, val_f1:0.5065 15 | [Fri, 24 Sep 2021 10:06:04] INFO [: train_model.py, 59] pid:52700 16 | [Fri, 24 Sep 2021 10:07:14] INFO [: train_model.py, 59] pid:53083 17 | [Fri, 24 Sep 2021 10:07:22] INFO [load: utils.py, 431] loading Word2Vec object from ./code/dataProcess/save_w2v_model/w2v_model.md 18 | [Fri, 24 Sep 2021 10:07:23] INFO [_load_specials: utils.py, 465] loading wv recursively from ./code/dataProcess/save_w2v_model/w2v_model.md.wv.* with mmap=None 19 | [Fri, 24 Sep 2021 10:07:23] INFO [_load_specials: utils.py, 503] setting ignored attribute vectors_norm to None 20 | [Fri, 24 Sep 2021 10:07:23] INFO [_load_specials: utils.py, 465] loading vocabulary recursively from ./code/dataProcess/save_w2v_model/w2v_model.md.vocabulary.* with mmap=None 21 | [Fri, 24 Sep 2021 10:07:23] INFO [_load_specials: utils.py, 465] loading trainables recursively from ./code/dataProcess/save_w2v_model/w2v_model.md.trainables.* with mmap=None 22 | [Fri, 24 Sep 2021 10:07:23] INFO [_load_specials: utils.py, 503] setting ignored attribute cum_table to None 23 | [Fri, 24 Sep 2021 10:07:23] INFO [load: utils.py, 437] loaded ./code/dataProcess/save_w2v_model/w2v_model.md 24 | [Fri, 24 Sep 2021 10:07:23] DEBUG [initialize: __init__.py, 113] Building prefix dict from the default dictionary ... 25 | [Fri, 24 Sep 2021 10:07:23] DEBUG [initialize: __init__.py, 133] Loading model from cache /tmp/jieba.cache 26 | [Fri, 24 Sep 2021 10:07:24] DEBUG [initialize: __init__.py, 165] Loading model cost 1.334 seconds. 27 | [Fri, 24 Sep 2021 10:07:24] DEBUG [initialize: __init__.py, 166] Prefix dict has been built successfully. 28 | [Fri, 24 Sep 2021 10:07:24] INFO [init_sims: keyedvectors.py, 1353] precomputing L2-norms of word weight vectors 29 | [Fri, 24 Sep 2021 10:10:31] INFO [: train_model.py, 59] pid:22729 30 | [Fri, 24 Sep 2021 10:10:41] INFO [load: utils.py, 431] loading Word2Vec object from ./code/dataProcess/save_w2v_model/w2v_model.md 31 | [Fri, 24 Sep 2021 10:10:42] INFO [_load_specials: utils.py, 465] loading wv recursively from ./code/dataProcess/save_w2v_model/w2v_model.md.wv.* with mmap=None 32 | [Fri, 24 Sep 2021 10:10:42] INFO [_load_specials: utils.py, 503] setting ignored attribute vectors_norm to None 33 | [Fri, 24 Sep 2021 10:10:42] INFO [_load_specials: utils.py, 465] loading vocabulary recursively from ./code/dataProcess/save_w2v_model/w2v_model.md.vocabulary.* with mmap=None 34 | [Fri, 24 Sep 2021 10:10:42] INFO [_load_specials: utils.py, 465] loading trainables recursively from ./code/dataProcess/save_w2v_model/w2v_model.md.trainables.* with mmap=None 35 | [Fri, 24 Sep 2021 10:10:42] INFO [_load_specials: utils.py, 503] setting ignored attribute cum_table to None 36 | [Fri, 24 Sep 2021 10:10:42] INFO [load: utils.py, 437] loaded ./code/dataProcess/save_w2v_model/w2v_model.md 37 | [Fri, 24 Sep 2021 10:10:42] DEBUG [initialize: __init__.py, 113] Building prefix dict from the default dictionary ... 38 | [Fri, 24 Sep 2021 10:10:42] DEBUG [initialize: __init__.py, 133] Loading model from cache /tmp/jieba.cache 39 | [Fri, 24 Sep 2021 10:10:43] DEBUG [initialize: __init__.py, 165] Loading model cost 1.029 seconds. 40 | [Fri, 24 Sep 2021 10:10:43] DEBUG [initialize: __init__.py, 166] Prefix dict has been built successfully. 41 | [Fri, 24 Sep 2021 10:10:43] INFO [init_sims: keyedvectors.py, 1353] precomputing L2-norms of word weight vectors 42 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | transformers==4.4.2 2 | matplotlib==3.1.1 3 | numpy==1.17.0 4 | jieba==0.42.1 5 | gensim==3.8.3 6 | pandas==1.1.2 7 | tqdm==4.46.0 8 | python_Levenshtein==0.12.2 9 | scikit_learn==0.24.2 10 | translators==4.9.5 11 | -------------------------------------------------------------------------------- /result/result.csv: -------------------------------------------------------------------------------- 1 | index,TestQuestionID,KnowledgeID,q_Level 2 | 0,4839,16,3 3 | 1,5122,142,2 4 | 2,4241,4,1 5 | 3,5362,36,1 6 | 4,5160,16,1 7 | 5,3930,27,1 8 | 6,782,24,2 9 | 7,2151,24,2 10 | 8,2952,36,3 11 | 9,5181,36,1 12 | 10,4437,19,1 13 | 11,4554,20,1 14 | 12,379,49,1 15 | 13,5389,28,1 16 | 14,5927,525,-1 17 | 15,1660,36,3 18 | 16,4440,223,1 19 | 17,5363,424,1 20 | 18,1770,4,1 21 | 19,4896,464,2 22 | 20,4240,127,1 23 | 21,1833,120,1 24 | 22,3839,51,-1 25 | 23,4725,36,3 26 | 24,3785,17,3 27 | 25,5298,416,3 28 | 26,2667,5,1 29 | 27,4829,315,-1 30 | 28,1565,135,-1 31 | 29,1225,24,1 32 | 30,5184,434,1 33 | 31,3447,49,3 34 | 32,2128,16,1 35 | 33,527,11,1 36 | 34,2887,137,2 37 | 35,1598,9,1 38 | 36,3924,8,1 39 | 37,1090,9,1 40 | 38,782,25,2 41 | 39,5050,429,2 42 | 40,5160,452,1 43 | 41,2980,89,1 44 | 42,1772,9,2 45 | 43,3186,4,3 46 | 44,813,247,1 47 | 45,1348,49,1 48 | 46,5627,540,-1 49 | 47,4923,36,1 50 | 48,3838,16,1 51 | 49,4887,16,1 52 | 50,2501,36,-1 53 | 51,2690,6,3 54 | 52,241,48,1 55 | 53,4924,434,2 56 | 54,2365,25,3 57 | 55,49,9,1 58 | 56,4807,230,1 59 | 57,2964,17,-1 60 | 58,1646,25,2 61 | 59,4839,19,3 62 | 60,2884,45,2 63 | 61,2144,310,1 64 | 62,4028,14,2 65 | 63,1624,172,1 66 | 64,2501,12,-1 67 | 65,5117,456,2 68 | 66,5070,419,2 69 | 67,5372,16,1 70 | 68,4867,9,1 71 | 69,3013,17,-1 72 | 70,1374,24,1 73 | 71,2207,27,1 74 | 72,962,4,3 75 | 73,377,16,1 76 | 74,519,12,1 77 | 75,4391,9,1 78 | 76,5653,541,-1 79 | 77,2535,12,-1 80 | 78,3082,16,-1 81 | 79,3112,24,2 82 | 80,4735,9,1 83 | 81,2465,25,3 84 | 82,1677,203,1 85 | 83,2432,9,1 86 | 84,5179,28,1 87 | 85,4379,9,1 88 | 86,773,114,1 89 | 87,2229,5,1 90 | 88,1296,279,1 91 | 89,5349,36,3 92 | 90,5035,476,1 93 | 91,4399,49,1 94 | 92,5550,524,-1 95 | 93,5032,142,1 96 | 94,3751,17,2 97 | 95,3126,36,1 98 | 96,702,24,1 99 | 97,4344,338,1 100 | 98,1625,227,3 101 | 99,1122,16,1 102 | 100,3620,20,2 103 | 101,1952,49,1 104 | 102,2733,8,1 105 | 103,5340,36,2 106 | 104,4475,24,2 107 | 105,2317,260,1 108 | 106,5281,36,2 109 | 107,5220,17,3 110 | 108,5279,419,2 111 | 109,3986,123,3 112 | 110,3101,209,3 113 | 111,5070,36,2 114 | 112,4266,294,1 115 | 113,1041,115,1 116 | 114,2687,8,1 117 | 115,306,8,2 118 | 116,1755,9,3 119 | 117,3096,123,3 120 | 118,2952,11,3 121 | 119,5374,28,1 122 | 120,2296,8,1 123 | 121,5203,36,3 124 | 122,3917,335,1 125 | 123,4361,9,1 126 | 124,2531,66,-1 127 | 125,2957,279,2 128 | 126,5315,450,1 129 | 127,4600,9,1 130 | 128,5533,524,-1 131 | 129,1079,229,1 132 | 130,5298,17,3 133 | 131,5227,422,1 134 | 132,5134,28,1 135 | 133,2974,325,3 136 | 134,1660,11,3 137 | 135,5418,499,2 138 | 136,1144,174,1 139 | 137,3627,11,1 140 | 138,5099,433,1 141 | 139,5127,425,1 142 | 140,1451,36,1 143 | 141,1487,165,1 144 | 142,3013,157,-1 145 | 143,5392,417,2 146 | 144,371,8,1 147 | 145,5024,424,1 148 | 146,3392,16,-1 149 | 147,5711,543,-1 150 | 148,3463,9,1 151 | 149,5116,28,1 152 | 150,4903,429,1 153 | 151,5525,528,-1 154 | 152,2462,9,1 155 | 153,335,36,1 156 | 154,2554,19,2 157 | 155,3702,5,5 158 | 156,3836,15,1 159 | 157,5397,503,-1 160 | 158,3101,13,3 161 | 159,1803,17,3 162 | 160,3082,12,-1 163 | 161,5299,487,1 164 | 162,311,84,1 165 | 163,2540,20,3 166 | 164,5091,419,3 167 | 165,5296,421,2 168 | 166,1378,48,1 169 | 167,2740,198,1 170 | 168,1880,13,-1 171 | 169,5670,541,-1 172 | 170,4949,489,1 173 | 171,1823,48,1 174 | 172,1621,48,1 175 | 173,2387,4,1 176 | 174,3412,17,2 177 | 175,1492,3,1 178 | 176,2880,9,1 179 | 177,3368,233,2 180 | 178,5571,525,-1 181 | 179,5122,422,2 182 | 180,1604,4,1 183 | 181,5230,425,1 184 | 182,5443,542,-1 185 | 183,917,24,3 186 | 184,5320,16,1 187 | 185,3533,148,1 188 | 186,5403,496,-1 189 | 187,3388,36,1 190 | 188,1070,84,1 191 | 189,5228,424,2 192 | 190,5476,511,-1 193 | 191,1391,24,2 194 | 192,1660,12,3 195 | 193,2207,130,1 196 | 194,5439,508,-1 197 | 195,2321,14,2 198 | 196,2359,20,2 199 | 197,2747,82,2 200 | 198,5565,532,-1 201 | 199,5335,28,1 202 | 200,209,5,1 203 | 201,5288,417,2 204 | 202,1767,108,1 205 | 203,1095,19,1 206 | 204,5528,524,-1 207 | 205,312,4,1 208 | 206,1983,16,1 209 | 207,5409,496,-1 210 | 208,2359,17,2 211 | 209,2501,93,-1 212 | 210,1942,378,3 213 | 211,1541,185,1 214 | 212,5655,515,-1 215 | 213,2365,106,3 216 | 214,3837,13,1 217 | 215,3879,234,1 218 | 216,359,19,1 219 | 217,5058,450,1 220 | 218,4033,12,1 221 | 219,1383,13,-1 222 | 220,353,16,-1 223 | 221,4936,16,1 224 | 222,1679,11,-1 225 | 223,3202,4,1 226 | 224,1441,36,-1 227 | 225,4485,15,1 228 | 226,5451,509,-1 229 | 227,3036,93,-1 230 | 228,4199,16,1 231 | 229,1399,9,2 232 | 230,3351,132,1 233 | 231,3468,30,1 234 | 232,2083,4,1 235 | 233,5077,16,1 236 | 234,278,369,1 237 | 235,2207,20,1 238 | 236,2358,8,1 239 | 237,3129,11,3 240 | 238,5165,419,3 241 | 239,4313,4,1 242 | 240,454,156,1 243 | 241,4683,52,1 244 | 242,3781,409,3 245 | 243,4875,28,1 246 | 244,2650,24,1 247 | 245,2034,105,2 248 | 246,1041,8,1 249 | 247,5607,525,-1 250 | 248,2439,5,1 251 | 249,2309,20,1 252 | 250,1231,20,1 253 | 251,4279,19,1 254 | 252,1872,11,1 255 | 253,5527,525,-1 256 | 254,4860,78,-1 257 | 255,3797,127,3 258 | 256,2974,49,3 259 | 257,1893,49,1 260 | 258,4967,420,1 261 | 259,1470,24,3 262 | 260,5272,476,1 263 | 261,519,16,1 264 | 262,1619,25,1 265 | 263,1699,151,1 266 | 264,920,9,1 267 | 265,599,26,1 268 | 266,3576,16,1 269 | 267,761,116,1 270 | 268,1740,16,1 271 | 269,4323,30,1 272 | 270,1366,4,1 273 | 271,1797,342,5 274 | 272,5252,422,2 275 | 273,719,8,1 276 | 274,4728,82,2 277 | 275,5450,507,-1 278 | 276,1392,16,1 279 | 277,2358,120,1 280 | 278,5088,441,1 281 | 279,1736,18,3 282 | 280,5117,16,2 283 | 281,1618,121,1 284 | 282,4346,24,2 285 | 283,4594,17,1 286 | 284,4747,24,1 287 | 285,2918,24,3 288 | 286,4813,9,1 289 | 287,4747,25,1 290 | 288,719,9,1 291 | 289,5356,28,1 292 | 290,2767,15,1 293 | 291,344,25,1 294 | 292,1231,27,1 295 | 293,5349,425,3 296 | 294,5099,16,1 297 | 295,5139,429,3 298 | 296,3021,262,1 299 | 297,28,5,3 300 | 298,1983,11,1 301 | 299,5227,433,1 302 | 300,5175,481,2 303 | 301,5026,489,3 304 | 302,2721,19,3 305 | 303,3562,24,1 306 | 304,232,5,3 307 | 305,3139,9,2 308 | 306,5602,525,-1 309 | 307,5160,414,1 310 | 308,4573,9,1 311 | 309,4894,17,2 312 | 310,2108,8,1 313 | 311,3636,164,1 314 | 312,5450,542,-1 315 | 313,2379,9,1 316 | 314,5711,85,-1 317 | 315,5902,543,-1 318 | 316,5413,496,-1 319 | 317,5387,425,1 320 | 318,5224,490,3 321 | 319,198,8,1 322 | 320,4981,28,1 323 | 321,5024,28,1 324 | 322,4992,16,1 325 | 323,1790,24,1 326 | 324,2545,25,2 327 | 325,5049,28,1 328 | 326,4811,17,3 329 | 327,824,15,1 330 | 328,5116,16,1 331 | 329,3515,31,3 332 | 330,2492,24,3 333 | 331,5144,434,3 334 | 332,4322,30,-1 335 | 333,5090,433,1 336 | 334,4954,16,1 337 | 335,487,20,2 338 | 336,2429,25,2 339 | 337,1142,142,1 340 | 338,311,5,1 341 | 339,2084,9,1 342 | 340,5335,428,1 343 | 341,1378,49,1 344 | 342,4983,142,2 345 | 343,4423,19,1 346 | 344,4698,9,1 347 | 345,3864,20,2 348 | 346,5034,36,2 349 | 347,39,16,-1 350 | 348,374,49,1 351 | 349,1857,25,2 352 | 350,4556,177,1 353 | 351,2093,215,2 354 | 352,4872,25,2 355 | 353,4907,28,3 356 | 354,473,16,1 357 | 355,2702,25,2 358 | 356,4628,48,1 359 | 357,5035,16,1 360 | 358,837,5,1 361 | 359,1677,8,1 362 | 360,1005,8,1 363 | 361,1694,48,1 364 | 362,4554,14,1 365 | 363,2387,323,1 366 | 364,3636,9,1 367 | 365,3609,17,1 368 | 366,2151,25,2 369 | 367,3805,15,-1 370 | 368,4914,414,2 371 | 369,5187,36,2 372 | 370,1524,49,1 373 | 371,1438,8,1 374 | 372,1896,4,1 375 | 373,5450,509,-1 376 | 374,4273,321,3 377 | 375,5297,420,1 378 | 376,2421,25,3 379 | 377,742,49,1 380 | 378,3234,16,1 381 | 379,5435,499,-1 382 | 380,5082,28,1 383 | 381,4956,470,1 384 | 382,308,11,1 385 | 383,2902,392,1 386 | 384,4207,48,3 387 | 385,1389,8,2 388 | 386,742,48,1 389 | 387,662,8,1 390 | 388,4727,8,2 391 | 389,5400,499,-1 392 | 390,1309,249,1 393 | 391,140,8,1 394 | 392,5124,428,1 395 | 393,436,8,1 396 | 394,4955,28,1 397 | 395,4878,442,1 398 | 396,4628,49,1 399 | 397,3838,85,1 400 | 398,2123,25,2 401 | 399,2785,4,1 402 | 400,5288,16,2 403 | 401,5410,497,-1 404 | 402,1672,4,3 405 | 403,5389,419,1 406 | 404,2067,5,1 407 | 405,1070,5,1 408 | 406,5474,512,-1 409 | 407,979,174,1 410 | 408,826,9,1 411 | 409,5110,28,1 412 | 410,4372,9,1 413 | 411,1998,11,1 414 | 412,1389,103,2 415 | 413,2767,17,1 416 | 414,5165,427,3 417 | 415,5198,16,2 418 | 416,2296,9,1 419 | 417,5593,525,-1 420 | 418,2869,9,1 421 | 419,5363,459,1 422 | 420,2321,20,2 423 | 421,1501,17,-1 424 | 422,1036,232,1 425 | 423,4971,28,1 426 | 424,4967,418,1 427 | 425,3855,19,3 428 | 426,4496,8,3 429 | 427,5112,428,1 430 | 428,5410,496,-1 431 | 429,311,4,1 432 | 430,5176,28,1 433 | 431,5062,448,4 434 | 432,3812,5,3 435 | 433,5151,16,2 436 | 434,3148,65,5 437 | 435,5256,28,1 438 | 436,3217,102,1 439 | 437,5199,28,1 440 | 438,5126,28,1 441 | 439,5476,512,-1 442 | 440,3839,25,-1 443 | 441,4275,48,1 444 | 442,4025,49,3 445 | 443,4728,24,2 446 | 444,4437,13,1 447 | 445,5024,414,1 448 | 446,2745,171,1 449 | 447,3276,12,3 450 | 448,541,27,2 451 | 449,3620,17,2 452 | 450,5902,542,-1 453 | 451,5039,424,4 454 | 452,4747,17,1 455 | 453,1968,9,1 456 | 454,2329,25,1 457 | 455,1772,8,2 458 | 456,5132,36,2 459 | 457,5228,414,2 460 | 458,1414,24,2 461 | 459,4138,11,1 462 | 460,3490,4,1 463 | 461,4967,16,1 464 | 462,2164,11,-1 465 | 463,3852,36,1 466 | 464,5383,36,3 467 | 465,5300,433,2 468 | 466,5538,524,-1 469 | 467,862,19,1 470 | 468,353,36,-1 471 | 469,5293,17,3 472 | 470,4420,16,-1 473 | 471,3841,12,1 474 | 472,1489,51,-1 475 | 473,3070,16,3 476 | 474,1968,33,1 477 | 475,3388,26,1 478 | 476,3799,12,1 479 | 477,5383,472,3 480 | 478,2162,9,1 481 | 479,1625,49,3 482 | 480,5435,496,-1 483 | 481,1944,8,1 484 | 482,1350,25,2 485 | 483,3953,51,1 486 | 484,5205,16,1 487 | 485,5186,16,1 488 | 486,3523,2,2 489 | 487,4906,28,1 490 | 488,1645,13,2 491 | 489,4067,25,3 492 | 490,129,74,3 493 | 491,2329,17,1 494 | 492,2387,5,1 495 | 493,5513,524,-1 496 | 494,2465,24,3 497 | 495,1602,9,1 498 | 496,4834,19,1 499 | 497,3336,249,1 500 | 498,5136,16,2 501 | 499,4439,13,3 502 | 500,4005,233,1 503 | 501,1264,294,1 504 | 502,5359,433,1 505 | 503,5258,16,3 506 | 504,5300,422,2 507 | 505,4482,13,5 508 | 506,5091,429,3 509 | 507,5169,463,2 510 | 508,5085,16,1 511 | 509,1599,9,2 512 | 510,1505,121,1 513 | 511,349,4,1 514 | 512,5609,525,-1 515 | 513,1749,71,1 516 | 514,704,202,3 517 | 515,5385,16,2 518 | 516,5204,471,1 519 | 517,742,145,1 520 | 518,3323,25,1 521 | 519,2740,49,1 522 | 520,5163,422,1 523 | 521,5252,36,2 524 | 522,3412,104,2 525 | 523,335,16,1 526 | 524,2554,20,2 527 | 525,344,13,1 528 | 526,2964,51,-1 529 | 527,793,4,1 530 | 528,5279,36,2 531 | 529,4839,20,3 532 | 530,1669,76,1 533 | 531,1189,75,1 534 | 532,2701,47,1 535 | 533,5374,424,1 536 | 534,4971,142,1 537 | 535,4220,115,1 538 | 536,4680,8,3 539 | 537,2309,27,1 540 | 538,1122,124,1 541 | 539,3721,188,2 542 | 540,361,19,2 543 | 541,3234,14,1 544 | 542,5083,461,2 545 | 543,1128,25,2 546 | 544,1338,16,1 547 | 545,3812,4,3 548 | 546,1872,17,1 549 | 547,2329,121,1 550 | 548,4874,36,2 551 | 549,5546,524,-1 552 | 550,4000,5,3 553 | 551,4681,4,2 554 | 552,2846,274,1 555 | 553,3234,19,1 556 | 554,4874,416,2 557 | 555,5616,512,-1 558 | 556,341,16,1 559 | 557,1614,333,1 560 | 558,2492,96,3 561 | 559,1602,8,1 562 | 560,3841,36,1 563 | 561,1777,5,1 564 | 562,1310,20,2 565 | 563,3021,9,1 566 | 564,703,122,2 567 | 565,3390,9,2 568 | 566,2099,211,2 569 | 567,1074,20,1 570 | 568,2570,48,3 571 | 569,2772,51,1 572 | 570,5132,416,2 573 | 571,5533,530,-1 574 | 572,1090,8,1 575 | 573,3070,19,3 576 | 574,4917,428,1 577 | 575,2128,27,1 578 | 576,3577,15,-1 579 | 577,1471,20,1 580 | 578,826,8,1 581 | 579,5676,542,-1 582 | 580,1566,182,1 583 | 581,5592,534,-1 584 | 582,5593,533,-1 585 | 583,2510,25,2 586 | 584,2540,13,3 587 | 585,5911,545,-1 588 | 586,1561,116,1 589 | 587,3903,16,1 590 | 588,2340,8,1 591 | 589,5594,525,-1 592 | 590,350,13,3 593 | 591,5473,512,-1 594 | 592,2464,9,1 595 | 593,5917,524,-1 596 | 594,5928,525,-1 597 | 595,4982,36,2 598 | 596,2650,25,1 599 | 597,2964,25,-1 600 | 598,542,25,3 601 | 599,2128,20,1 602 | 600,2744,197,3 603 | 601,4894,419,2 604 | 602,4987,422,2 605 | 603,4339,8,1 606 | 604,5189,486,1 607 | 605,5316,479,3 608 | 606,4959,36,2 609 | 607,5193,441,2 610 | 608,1717,13,3 611 | 609,1740,78,1 612 | 610,491,12,-1 613 | 611,4210,369,1 614 | 612,3209,8,1 615 | 613,1142,20,1 616 | 614,884,161,1 617 | 615,3958,48,3 618 | 616,2556,8,1 619 | 617,5157,17,3 620 | 618,1168,24,2 621 | 619,826,204,1 622 | 620,5602,535,-1 623 | 621,4906,142,1 624 | 622,5494,524,-1 625 | 623,656,24,1 626 | 624,1078,166,1 627 | 625,883,8,1 628 | 626,5281,434,2 629 | 627,5177,457,2 630 | 628,4924,36,2 631 | 629,2030,110,1 632 | 630,1880,24,-1 633 | 631,1207,5,3 634 | 632,4976,36,2 635 | 633,928,17,3 636 | 634,5144,16,3 637 | 635,5236,36,1 638 | 636,4516,5,1 639 | 637,1523,48,1 640 | 638,1388,12,-1 641 | 639,3522,48,3 642 | 640,4887,416,1 643 | 641,3595,9,3 644 | 642,5070,484,2 645 | 643,1435,159,1 646 | 644,2173,9,1 647 | 645,1031,121,-1 648 | 646,1944,8,1 649 | 647,1483,48,3 650 | 648,3130,25,3 651 | 649,928,104,3 652 | 650,2351,24,1 653 | 651,1332,49,3 654 | 652,4980,16,1 655 | 653,1519,13,-1 656 | 654,5112,426,1 657 | 655,5123,424,1 658 | 656,5438,541,-1 659 | 657,5249,16,1 660 | 658,3138,48,3 661 | 659,5002,16,1 662 | 660,2114,24,2 663 | 661,5157,429,3 664 | 662,1005,9,1 665 | 663,5059,417,3 666 | 664,1505,13,1 667 | 665,5104,422,2 668 | 666,5597,535,-1 669 | 667,2637,8,1 670 | 668,3819,48,3 671 | 669,3217,4,1 672 | 670,2916,8,3 673 | 671,2842,49,1 674 | 672,2393,8,1 675 | 673,3476,49,1 676 | 674,2607,20,3 677 | 675,3082,78,-1 678 | 676,813,9,1 679 | 677,1348,48,1 680 | 678,5063,434,1 681 | 679,1374,170,1 682 | 680,5184,16,1 683 | 681,210,77,1 684 | 682,1893,48,1 685 | 683,5220,474,3 686 | 684,3900,9,1 687 | 685,2143,331,1 688 | 686,1356,8,1 689 | 687,2330,5,1 690 | 688,519,179,1 691 | 689,5102,429,1 692 | 690,4361,187,1 693 | 691,1624,9,1 694 | 692,5282,36,2 695 | 693,5189,142,1 696 | 694,1348,127,1 697 | 695,3800,338,1 698 | 696,5283,429,2 699 | 697,826,8,1 700 | 698,1539,8,1 701 | 699,3886,9,1 702 | 700,3219,247,1 703 | 701,1258,171,1 704 | 702,5163,16,1 705 | 703,3390,8,2 706 | 704,339,49,3 707 | 705,5187,16,2 708 | 706,4894,429,2 709 | 707,1624,9,1 710 | 708,3875,36,-1 711 | 709,5655,542,-1 712 | 710,235,103,1 713 | 711,3410,9,3 714 | 712,5588,524,-1 715 | 713,1561,8,1 716 | 714,2980,25,1 717 | 715,4575,193,3 718 | 716,635,36,1 719 | 717,16,4,1 720 | 718,1956,101,1 721 | 719,2462,8,1 722 | 720,5406,496,-1 723 | 721,1983,15,1 724 | 722,4327,12,-1 725 | 723,3037,48,1 726 | 724,1492,5,1 727 | 725,500,49,1 728 | 726,5213,17,1 729 | 727,3545,9,1 730 | 728,4785,25,2 731 | 729,902,14,1 732 | 730,2955,36,1 733 | 731,5476,513,-1 734 | 732,1599,385,2 735 | 733,5118,16,2 736 | 734,4907,424,3 737 | 735,5293,16,3 738 | 736,454,48,1 739 | 737,2003,279,2 740 | 738,5067,434,2 741 | 739,4066,1,2 742 | 740,3946,9,1 743 | 741,3575,16,1 744 | 742,3922,25,2 745 | 743,3504,17,2 746 | 744,3770,16,1 747 | 745,5333,429,2 748 | 746,5458,512,-1 749 | 747,5116,479,1 750 | 748,2162,8,1 751 | 749,4987,433,2 752 | 750,5612,525,-1 753 | 751,3095,8,1 754 | 752,827,9,1 755 | 753,2766,49,1 756 | 754,4947,429,2 757 | 755,4534,25,2 758 | 756,3384,13,-1 759 | 757,1944,331,1 760 | 758,1117,24,2 761 | 759,3246,142,1 762 | 760,1968,8,1 763 | 761,5112,16,1 764 | 762,5001,28,1 765 | 763,5624,538,-1 766 | 764,704,49,3 767 | 765,1740,92,1 768 | 766,3695,71,1 769 | 767,3800,49,1 770 | 768,3943,149,1 771 | 769,2164,17,-1 772 | 770,4735,8,1 773 | 771,2254,48,1 774 | 772,2767,25,1 775 | 773,3150,12,1 776 | 774,350,24,3 777 | 775,3562,94,1 778 | 776,3246,36,1 779 | 777,5039,28,4 780 | 778,1502,5,1 781 | 779,5191,447,1 782 | 780,4279,20,1 783 | 781,3562,25,1 784 | 782,1903,20,2 785 | 783,5067,416,2 786 | 784,5387,419,1 787 | 785,2556,173,1 788 | 786,4947,481,2 789 | 787,5494,523,-1 790 | 788,5067,36,2 791 | 789,2006,254,3 792 | 790,2739,13,1 793 | 791,2495,226,1 794 | 792,4556,8,1 795 | 793,5077,434,1 796 | 794,688,125,1 797 | 795,5144,416,3 798 | 796,3667,157,1 799 | 797,5073,16,2 800 | 798,5264,28,1 801 | 799,3374,48,1 802 | 800,5480,513,-1 803 | 801,4946,441,2 804 | 802,4021,5,3 805 | 803,5565,525,-1 806 | 804,2905,225,1 807 | 805,4317,36,3 808 | 806,5081,16,1 809 | 807,1195,24,2 810 | 808,5124,28,1 811 | 809,4312,212,1 812 | 810,5403,502,-1 813 | 811,5067,16,2 814 | 812,5268,458,1 815 | 813,3621,370,1 816 | 814,1338,25,1 817 | 815,2551,4,1 818 | 816,3799,13,1 819 | 817,4420,12,-1 820 | 818,5249,432,1 821 | 819,761,9,1 822 | 820,5122,36,2 823 | 821,3013,13,-1 824 | 822,3630,208,1 825 | 823,1070,5,1 826 | 824,3838,36,1 827 | 825,3504,71,2 828 | 826,4903,16,1 829 | 827,5159,429,1 830 | 828,5181,459,1 831 | 829,4611,295,1 832 | 830,1714,49,3 833 | 831,5387,16,1 834 | 832,3143,51,1 835 | 833,1388,36,-1 836 | 834,3993,14,5 837 | 835,5427,496,-1 838 | 836,4896,16,2 839 | 837,631,217,1 840 | 838,5337,16,4 841 | 839,760,17,2 842 | 840,2009,9,1 843 | 841,4583,27,1 844 | 842,2317,8,1 845 | 843,716,51,1 846 | 844,1483,80,3 847 | 845,4907,16,3 848 | 846,4284,49,1 849 | 847,1740,36,1 850 | 848,3070,132,3 851 | 849,893,20,1 852 | 850,3179,152,1 853 | 851,5147,486,1 854 | 852,1252,8,1 855 | 853,1685,25,2 856 | 854,431,12,-1 857 | 855,4882,494,1 858 | 856,4983,475,2 859 | 857,4150,25,2 860 | 858,473,19,1 861 | 859,712,8,2 862 | 860,984,12,-1 863 | 861,919,24,2 864 | 862,3864,17,2 865 | 863,5935,546,-1 866 | 864,1562,125,3 867 | 865,28,4,3 868 | 866,1677,203,1 869 | 867,3296,49,1 870 | 868,4821,14,2 871 | 869,3315,123,3 872 | 870,4044,20,1 873 | 871,5349,16,3 874 | 872,4653,8,1 875 | 873,5303,16,1 876 | 874,5082,417,1 877 | 875,979,27,1 878 | 876,3289,295,1 879 | 877,1660,13,3 880 | 878,3740,176,-1 881 | 879,2869,385,1 882 | 880,3953,36,1 883 | 881,3418,8,1 884 | 882,1875,49,1 885 | 883,2558,327,2 886 | 884,5288,425,2 887 | 885,4440,5,1 888 | 886,5372,434,1 889 | 887,5032,28,1 890 | 888,4139,357,1 891 | 889,4725,11,3 892 | 890,2902,8,1 893 | 891,224,17,-1 894 | 892,3186,344,3 895 | 893,5035,434,1 896 | 894,5178,424,1 897 | 895,5435,503,-1 898 | 896,5008,425,3 899 | 897,4303,48,1 900 | 898,2637,8,1 901 | 899,1158,52,1 902 | 900,1094,49,1 903 | 901,4041,105,3 904 | 902,4867,8,1 905 | 903,577,93,1 906 | 904,1618,17,1 907 | 905,1387,25,1 908 | 906,2323,48,1 909 | 907,3721,25,2 910 | 908,541,133,2 911 | 909,4941,414,2 912 | 910,3514,157,1 913 | 911,1875,48,1 914 | 912,3070,20,3 915 | 913,1668,9,5 916 | 914,5063,28,1 917 | 915,1487,9,1 918 | 916,2029,9,1 919 | 917,2907,48,3 920 | 918,4424,20,2 921 | 919,5179,424,1 922 | 920,5346,416,1 923 | 921,1387,176,1 924 | 922,5176,425,1 925 | 923,1018,49,3 926 | 924,5006,414,2 927 | 925,5549,349,-1 928 | 926,4573,8,1 929 | 927,1880,17,-1 930 | 928,1837,5,1 931 | 929,5053,36,2 932 | 930,1850,205,1 933 | 931,3823,159,1 934 | 932,2501,92,-1 935 | 933,5175,429,2 936 | 934,1453,49,3 937 | 935,5184,416,1 938 | 936,3179,8,1 939 | 937,688,49,1 940 | 938,2739,20,1 941 | 939,1074,16,1 942 | 940,1070,4,1 943 | 941,3200,8,1 944 | 942,5215,422,3 945 | 943,308,15,1 946 | 944,1219,113,2 947 | 945,3148,36,5 948 | 946,1207,4,3 949 | 947,5186,420,1 950 | 948,147,13,-1 951 | 949,4323,36,1 952 | 950,2923,14,1 953 | 951,3533,16,1 954 | 952,5276,422,3 955 | 953,837,4,1 956 | 954,1383,14,-1 957 | 955,4242,17,1 958 | 956,4611,48,1 959 | 957,5126,16,1 960 | 958,4914,16,2 961 | 959,1198,12,1 962 | 960,5409,499,-1 963 | 961,4266,8,1 964 | 962,4879,428,1 965 | 963,5073,437,2 966 | 964,4424,16,2 967 | 965,436,177,1 968 | 966,5473,513,-1 969 | 967,2916,103,3 970 | 968,1528,67,1 971 | 969,3285,15,1 972 | 970,1723,298,1 973 | 971,832,103,1 974 | 972,3057,308,1 975 | 973,4745,4,1 976 | 974,2646,66,2 977 | 975,4860,36,-1 978 | 976,5359,142,1 979 | 977,5059,492,3 980 | 978,3667,12,1 981 | 979,603,78,1 982 | 980,4138,17,1 983 | 981,5088,417,1 984 | 982,21,16,2 985 | 983,3000,135,3 986 | 984,4903,419,1 987 | 985,3428,290,1 988 | 986,4936,419,1 989 | 987,4006,24,3 990 | 988,277,36,-1 991 | 989,527,16,1 992 | 990,1562,49,3 993 | 991,1655,13,1 994 | 992,1144,16,1 995 | 993,2650,105,1 996 | 994,491,92,-1 997 | 995,5157,488,3 998 | 996,862,17,1 999 | 997,719,9,1 1000 | 998,2083,205,1 1001 | 999,49,59,1 1002 | 1000,1561,8,1 1003 | 1001,2869,8,1 1004 | 1002,1767,108,1 1005 | 1003,5917,545,-1 1006 | 1004,5117,425,2 1007 | 1005,2316,36,1 1008 | 1006,2359,293,2 1009 | 1007,2866,245,2 1010 | 1008,5305,472,3 1011 | 1009,5296,425,2 1012 | 1010,5280,429,3 1013 | 1011,712,9,2 1014 | 1012,2077,24,2 1015 | 1013,1426,24,2 1016 | 1014,4983,36,2 1017 | 1015,1750,2,2 1018 | 1016,2168,25,1 1019 | 1017,5224,28,3 1020 | 1018,5062,16,4 1021 | 1019,1388,78,-1 1022 | 1020,3627,12,1 1023 | 1021,1896,314,1 1024 | 1022,1038,30,1 1025 | 1023,4399,297,1 1026 | 1024,4829,36,-1 1027 | 1025,4946,434,2 1028 | 1026,1679,15,-1 1029 | 1027,5374,436,1 1030 | 1028,4175,178,3 1031 | 1029,4468,19,2 1032 | 1030,2099,25,2 1033 | 1031,2372,8,1 1034 | 1032,5546,525,-1 1035 | 1033,4807,13,1 1036 | 1034,944,248,2 1037 | 1035,2619,194,1 1038 | 1036,4594,403,1 1039 | 1037,2488,48,1 1040 | 1038,2767,16,1 1041 | 1039,1539,172,1 1042 | 1040,4992,425,1 1043 | 1041,541,20,2 1044 | 1042,3412,19,2 1045 | 1043,3759,239,1 1046 | 1044,1718,25,2 1047 | 1045,3730,127,1 1048 | 1046,2716,24,3 1049 | 1047,1798,2,2 1050 | 1048,1761,300,3 1051 | 1049,1699,8,1 1052 | 1050,4240,48,1 1053 | 1051,4448,16,1 1054 | 1052,1755,139,3 1055 | 1053,4550,5,1 1056 | 1054,3986,8,3 1057 | 1055,1374,25,1 1058 | 1056,5400,497,-1 1059 | 1057,3463,217,1 1060 | 1058,487,16,2 1061 | 1059,3830,4,1 1062 | 1060,4957,416,1 1063 | 1061,882,9,2 1064 | 1062,5577,524,-1 1065 | 1063,4322,12,-1 1066 | 1064,4694,5,1 1067 | 1065,4916,422,1 1068 | 1066,760,240,2 1069 | 1067,614,122,3 1070 | 1068,3993,44,5 1071 | 1069,1296,48,1 1072 | 1070,5250,28,1 1073 | 1071,5128,430,1 1074 | 1072,2095,271,-1 1075 | 1073,4322,16,-1 1076 | 1074,4970,422,1 1077 | 1075,4033,16,1 1078 | 1076,5104,433,2 1079 | 1077,3350,24,2 1080 | 1078,1142,277,1 1081 | 1079,2716,25,3 1082 | 1080,4439,12,3 1083 | 1081,703,24,2 1084 | 1082,4867,356,1 1085 | 1083,2408,17,4 1086 | 1084,2591,48,3 1087 | 1085,311,5,1 1088 | 1086,787,9,1 1089 | 1087,5045,417,1 1090 | 1088,5283,419,2 1091 | 1089,3988,64,2 1092 | 1090,3886,8,1 1093 | 1091,1547,9,1 1094 | 1092,960,11,3 1095 | 1093,5487,524,-1 1096 | 1094,5565,524,-1 1097 | 1095,3751,259,2 1098 | 1096,1281,48,3 1099 | 1097,5627,513,-1 1100 | 1098,4765,17,1 1101 | 1099,2365,24,3 1102 | 1100,1281,145,3 1103 | 1101,4752,131,1 1104 | 1102,1403,36,1 1105 | 1103,2733,119,1 1106 | 1104,2108,9,1 1107 | 1105,3246,20,1 1108 | 1106,3303,20,1 1109 | 1107,964,9,1 1110 | 1108,5134,428,1 1111 | 1109,3504,20,2 1112 | 1110,5123,28,1 1113 | 1111,773,49,1 1114 | 1112,434,9,1 1115 | 1113,5298,434,3 1116 | 1114,1767,8,1 1117 | 1115,2212,49,1 1118 | 1116,839,17,1 1119 | 1117,5163,425,1 1120 | 1118,2010,4,1 1121 | 1119,1668,8,5 1122 | 1120,4323,13,1 1123 | 1121,5596,535,-1 1124 | 1122,4872,96,2 1125 | 1123,3679,8,1 1126 | 1124,5254,437,1 1127 | 1125,4564,142,3 1128 | 1126,884,8,1 1129 | 1127,2607,13,3 1130 | 1128,3778,48,3 1131 | 1129,1565,13,-1 1132 | 1130,2083,205,1 1133 | 1131,2136,8,3 1134 | 1132,4711,110,1 1135 | 1133,4565,78,-1 1136 | 1134,962,5,3 1137 | 1135,4039,17,1 1138 | 1136,902,27,1 1139 | 1137,1880,12,-1 1140 | 1138,603,175,1 1141 | 1139,5084,36,2 1142 | 1140,1417,17,-1 1143 | 1141,1387,16,1 1144 | 1142,5438,506,-1 1145 | 1143,5050,16,2 1146 | 1144,2360,17,1 1147 | 1145,928,13,3 1148 | 1146,4980,416,1 1149 | 1147,3900,8,1 1150 | 1148,1620,302,1 1151 | 1149,1746,78,1 1152 | 1150,2411,48,3 1153 | 1151,1723,9,1 1154 | 1152,3930,185,1 1155 | 1153,839,16,1 1156 | 1154,5269,424,1 1157 | 1155,4410,278,1 1158 | 1156,4874,16,2 1159 | 1157,4661,5,1 1160 | 1158,3836,12,1 1161 | 1159,5133,483,4 1162 | 1160,3993,20,5 1163 | 1161,2079,25,2 1164 | 1162,4821,27,2 1165 | 1163,410,8,5 1166 | 1164,5458,531,-1 1167 | 1165,3274,51,1 1168 | 1166,5292,425,1 1169 | 1167,5090,422,1 1170 | 1168,1021,57,1 1171 | 1169,3202,102,1 1172 | 1170,4944,425,1 1173 | 1171,4615,24,2 1174 | 1172,2923,394,1 1175 | 1173,718,4,1 1176 | 1174,1210,11,1 1177 | 1175,3299,25,1 1178 | 1176,5110,464,1 1179 | 1177,743,82,3 1180 | 1178,3731,9,1 1181 | 1179,1974,24,1 1182 | 1180,2144,9,1 1183 | 1181,1128,24,2 1184 | 1182,5374,142,1 1185 | 1183,830,23,3 1186 | 1184,3593,19,2 1187 | 1185,3770,87,1 1188 | 1186,3482,15,1 1189 | 1187,3836,17,1 1190 | 1188,4139,9,1 1191 | 1189,1645,19,2 1192 | 1190,5053,419,2 1193 | 1191,979,17,1 1194 | 1192,4916,36,1 1195 | 1193,1501,62,-1 1196 | 1194,1258,24,1 1197 | 1195,5223,16,2 1198 | 1196,5500,523,-1 1199 | 1197,3531,16,3 1200 | 1198,5423,499,-1 1201 | 1199,2918,45,3 1202 | 1200,4423,142,1 1203 | 1201,5295,36,2 1204 | 1202,1952,48,1 1205 | 1203,5147,422,1 1206 | 1204,3641,12,1 1207 | 1205,5198,420,2 1208 | 1206,4026,8,1 1209 | 1207,2463,293,1 1210 | 1208,4339,9,1 1211 | 1209,4885,419,3 1212 | 1210,359,16,1 1213 | 1211,5053,431,2 1214 | 1212,5177,429,2 1215 | 1213,5242,469,2 1216 | 1214,5139,17,3 1217 | 1215,2923,27,1 1218 | 1216,3482,17,1 1219 | 1217,1095,20,1 1220 | 1218,5062,422,4 1221 | 1219,5102,419,1 1222 | 1220,813,8,1 1223 | 1221,4401,48,1 1224 | 1222,5252,16,2 1225 | 1223,5252,432,2 1226 | 1224,5439,503,-1 1227 | 1225,5480,511,-1 1228 | 1226,3239,25,2 1229 | 1227,339,48,3 1230 | 1228,4811,25,3 1231 | 1229,1704,168,2 1232 | 1230,1524,184,1 1233 | 1231,5356,425,1 1234 | 1232,3327,78,1 1235 | 1233,1528,36,1 1236 | 1234,4226,8,1 1237 | 1235,1561,116,1 1238 | 1236,4556,9,1 1239 | 1237,3641,16,1 1240 | 1238,4982,414,2 1241 | 1239,4982,16,2 1242 | 1240,5157,419,3 1243 | 1241,3285,12,1 1244 | 1242,5133,28,4 1245 | 1243,4114,157,-1 1246 | 1244,1264,9,1 1247 | 1245,1804,48,3 1248 | 1246,5377,428,1 1249 | 1247,2584,17,2 1250 | 1248,5385,416,2 1251 | 1249,4956,422,1 1252 | 1250,3419,24,1 1253 | 1251,827,60,1 1254 | 1252,2955,16,1 1255 | 1253,5935,525,-1 1256 | 1254,5072,16,1 1257 | 1255,4399,48,1 1258 | 1256,4906,425,1 1259 | 1257,1325,19,2 1260 | 1258,2923,13,1 1261 | 1259,2095,8,-1 1262 | 1260,5283,36,2 1263 | 1261,5300,462,2 1264 | 1262,4373,49,3 1265 | 1263,4319,240,2 1266 | 1264,21,35,2 1267 | 1265,5290,414,2 1268 | 1266,826,9,1 1269 | 1267,21,17,2 1270 | 1268,4829,24,-1 1271 | 1269,2047,16,1 1272 | 1270,853,251,2 1273 | 1271,5034,443,2 1274 | 1272,224,15,-1 1275 | 1273,2313,9,1 1276 | 1274,2494,4,1 1277 | 1275,5380,422,2 1278 | 1276,1074,275,1 1279 | 1277,1309,9,1 1280 | 1278,5198,28,2 1281 | 1279,2321,27,2 1282 | 1280,2974,48,3 1283 | 1281,5297,428,1 1284 | 1282,5181,424,1 1285 | 1283,1036,8,1 1286 | 1284,2774,107,1 1287 | 1285,5501,524,-1 1288 | 1286,1685,95,2 1289 | 1287,1289,8,1 1290 | 1288,5236,453,1 1291 | 1289,5655,541,-1 1292 | 1290,2584,72,2 1293 | 1291,5213,416,1 1294 | 1292,4892,433,2 1295 | 1293,716,16,1 1296 | 1294,3384,17,-1 1297 | 1295,893,27,1 1298 | 1296,4872,24,2 1299 | 1297,960,67,3 1300 | 1298,3037,297,1 1301 | 1299,1036,9,1 1302 | 1300,5373,414,2 1303 | 1301,614,25,3 1304 | 1302,2005,49,3 1305 | 1303,3239,211,2 1306 | 1304,4319,20,2 1307 | 1305,1210,16,1 1308 | 1306,400,4,3 1309 | 1307,1790,308,1 1310 | 1308,1694,269,1 1311 | 1309,3677,8,2 1312 | 1310,5026,28,3 1313 | 1311,5269,414,1 1314 | 1312,5272,416,1 1315 | 1313,1244,9,2 1316 | 1314,5084,440,2 1317 | 1315,5305,17,3 1318 | 1316,3312,13,2 1319 | 1317,3491,17,1 1320 | 1318,4319,13,2 1321 | 1319,5165,429,3 1322 | 1320,4550,4,1 1323 | 1321,4067,45,3 1324 | 1322,2554,17,2 1325 | 1323,4361,8,1 1326 | 1324,4860,92,-1 1327 | 1325,1383,24,-1 1328 | 1326,4028,16,2 1329 | 1327,1033,20,1 1330 | 1328,1281,49,3 1331 | 1329,4401,49,1 1332 | 1330,3266,25,3 1333 | 1331,2351,25,1 1334 | 1332,1157,48,1 1335 | 1333,599,36,1 1336 | 1334,4878,420,1 1337 | 1335,5091,427,3 1338 | 1336,2607,19,3 1339 | 1337,3351,20,1 1340 | 1338,2501,16,-1 1341 | 1339,5215,16,3 1342 | 1340,4611,49,1 1343 | 1341,5677,518,-1 1344 | 1342,1778,24,2 1345 | 1343,5134,420,1 1346 | 1344,5918,524,-1 1347 | 1345,2952,67,3 1348 | 1346,2407,12,-1 1349 | 1347,1117,25,2 1350 | 1348,1388,77,-1 1351 | 1349,1758,207,1 1352 | 1350,2003,24,2 1353 | 1351,5575,525,-1 1354 | 1352,4546,49,5 1355 | 1353,4067,24,3 1356 | 1354,277,157,-1 1357 | 1355,2421,23,3 1358 | 1356,3021,8,1 1359 | 1357,3086,36,1 1360 | 1358,5276,433,3 1361 | 1359,3839,380,-1 1362 | 1360,5019,426,1 1363 | 1361,4954,422,1 1364 | 1362,3900,262,1 1365 | 1363,5024,28,1 1366 | 1364,2180,13,2 1367 | 1365,1541,20,1 1368 | 1366,3654,320,1 1369 | 1367,5037,16,1 1370 | 1368,3062,20,2 1371 | 1369,3150,16,1 1372 | 1370,2558,25,2 1373 | 1371,2637,9,1 1374 | 1372,4482,242,5 1375 | 1373,4446,16,1 1376 | 1374,1574,194,1 1377 | 1375,4123,49,3 1378 | 1376,1962,9,1 1379 | 1377,527,36,1 1380 | 1378,4896,425,2 1381 | 1379,4896,36,2 1382 | 1380,4930,451,2 1383 | 1381,5059,425,3 1384 | 1382,4653,9,1 1385 | 1383,4919,16,1 1386 | 1384,4930,28,2 1387 | 1385,1441,16,-1 1388 | 1386,2701,48,1 1389 | 1387,5058,425,1 1390 | 1388,4745,280,1 1391 | 1389,3391,48,1 1392 | 1390,1746,36,1 1393 | 1391,4785,24,2 1394 | 1392,5178,28,1 1395 | 1393,369,16,1 1396 | 1394,956,93,3 1397 | 1395,5340,475,2 1398 | 1396,278,4,1 1399 | 1397,4750,17,3 1400 | 1398,5080,433,1 1401 | 1399,4189,175,1 1402 | 1400,1038,17,1 1403 | 1401,2429,24,2 1404 | 1402,5917,525,-1 1405 | 1403,1703,8,1 1406 | 1404,3641,17,1 1407 | 1405,826,9,1 1408 | 1406,1717,36,3 1409 | 1407,2907,49,3 1410 | 1408,5156,17,3 1411 | 1409,5059,17,3 1412 | 1410,4278,14,2 1413 | 1411,4903,440,1 1414 | 1412,2702,113,2 1415 | 1413,1519,276,-1 1416 | 1414,1595,9,1 1417 | 1415,4189,78,1 1418 | 1416,953,16,-1 1419 | 1417,3490,5,1 1420 | 1418,1736,13,3 1421 | 1419,5002,433,1 1422 | 1420,5809,542,-1 1423 | 1421,306,385,2 1424 | 1422,1451,67,1 1425 | 1423,5198,420,2 1426 | 1424,4028,19,2 1427 | 1425,699,14,1 1428 | 1426,1021,9,1 1429 | 1427,919,122,2 1430 | 1428,274,95,2 1431 | 1429,1750,8,2 1432 | 1430,3215,9,3 1433 | 1431,2207,16,1 1434 | 1432,1618,25,1 1435 | 1433,5507,526,-1 1436 | 1434,5377,485,1 1437 | 1435,288,140,1 1438 | 1436,3086,13,1 1439 | 1437,3209,9,1 1440 | 1438,4220,8,1 1441 | 1439,5156,422,3 1442 | 1440,4575,5,3 1443 | 1441,2445,8,1 1444 | 1442,210,12,1 1445 | 1443,3062,71,2 1446 | 1444,839,20,1 1447 | 1445,3836,11,1 1448 | 1446,4982,452,2 1449 | 1447,4583,13,1 1450 | 1448,5333,419,2 1451 | 1449,1598,8,1 1452 | 1450,4210,4,1 1453 | 1451,862,98,1 1454 | 1452,960,36,3 1455 | 1453,4970,16,1 1456 | 1454,5259,36,1 1457 | 1455,3852,16,1 1458 | 1456,4954,28,1 1459 | 1457,5315,16,1 1460 | 1458,1723,8,1 1461 | 1459,5063,416,1 1462 | 1460,4039,12,1 1463 | 1461,5506,524,-1 1464 | 1462,3482,12,1 1465 | 1463,3924,9,1 1466 | 1464,3350,169,2 1467 | 1465,335,85,1 1468 | 1466,4167,48,1 1469 | 1467,3392,36,-1 1470 | 1468,5494,525,-1 1471 | 1469,3274,97,1 1472 | 1470,473,36,1 1473 | 1471,5577,525,-1 1474 | 1472,2391,48,3 1475 | 1473,1624,172,1 1476 | 1474,5191,16,1 1477 | 1475,1404,13,1 1478 | 1476,1078,48,1 1479 | 1477,2973,71,3 1480 | 1478,5180,17,2 1481 | 1479,5072,433,1 1482 | 1480,1453,90,3 1483 | 1481,710,25,1 1484 | 1482,2490,24,1 1485 | 1483,3078,8,1 1486 | 1484,4957,438,1 1487 | 1485,4110,17,1 1488 | 1486,5072,422,1 1489 | 1487,3126,86,1 1490 | 1488,5339,433,2 1491 | 1489,5382,463,2 1492 | 1490,1296,49,1 1493 | 1491,688,48,1 1494 | 1492,3285,17,1 1495 | 1493,1033,19,1 1496 | 1494,3491,51,1 1497 | 1495,5151,481,2 1498 | 1496,1655,147,1 1499 | 1497,1666,5,3 1500 | 1498,2421,24,3 1501 | 1499,3577,11,-1 1502 | 1500,629,36,-1 1503 | 1501,2438,8,1 1504 | 1502,1242,8,1 1505 | 1503,235,9,1 1506 | 1504,5299,16,1 1507 | 1505,1801,13,-1 1508 | 1506,2286,24,3 1509 | 1507,4319,17,2 1510 | 1508,5439,541,-1 1511 | 1509,3679,9,1 1512 | 1510,21,19,2 1513 | 1511,5180,433,2 1514 | 1512,2372,9,1 1515 | 1513,5500,524,-1 1516 | 1514,1031,51,-1 1517 | 1515,4919,438,1 1518 | 1516,3577,12,-1 1519 | 1517,4440,4,1 1520 | 1518,1129,12,1 1521 | 1519,4175,8,3 1522 | 1520,1095,13,1 1523 | 1521,2936,362,2 1524 | 1522,473,20,1 1525 | 1523,1911,17,1 1526 | 1524,400,5,3 1527 | 1525,5527,524,-1 1528 | 1526,3148,30,5 1529 | 1527,5487,523,-1 1530 | 1528,2123,24,2 1531 | 1529,2936,25,2 1532 | 1530,5711,542,-1 1533 | 1531,1522,24,2 1534 | 1532,1258,25,1 1535 | 1533,1620,4,1 1536 | 1534,5249,419,1 1537 | 1535,4520,280,1 1538 | 1536,1786,134,1 1539 | 1537,4811,24,3 1540 | 1538,2180,12,2 1541 | 1539,5090,28,1 1542 | 1540,1983,17,1 1543 | 1541,1169,36,1 1544 | 1542,5193,416,2 1545 | 1543,2772,16,1 1546 | 1544,5205,459,1 1547 | 1545,3531,14,3 1548 | 1546,4284,48,1 1549 | 1547,3447,48,3 1550 | 1548,5052,433,2 1551 | 1549,1885,117,1 1552 | 1550,5148,449,2 1553 | 1551,882,8,2 1554 | 1552,4564,20,3 1555 | 1553,5330,483,4 1556 | 1554,3112,154,2 1557 | 1555,1595,33,1 1558 | 1556,1776,90,1 1559 | 1557,1944,8,1 1560 | 1558,5389,425,1 1561 | 1559,2372,103,1 1562 | 1560,5033,425,1 1563 | 1561,1117,248,2 1564 | 1562,3363,205,1 1565 | 1563,1403,16,1 1566 | 1564,1684,25,2 1567 | 1565,1777,205,1 1568 | 1566,5577,533,-1 1569 | 1567,4453,20,1 1570 | 1568,2476,300,1 1571 | 1569,3164,23,3 1572 | 1570,1679,12,-1 1573 | 1571,935,4,1 1574 | 1572,3223,24,-1 1575 | 1573,3491,16,1 1576 | 1574,3695,16,1 1577 | 1575,1994,199,1 1578 | 1576,1375,57,1 1579 | 1577,2006,48,3 1580 | 1578,704,48,3 1581 | 1579,2307,1,2 1582 | 1580,1505,51,1 1583 | 1581,917,181,3 1584 | 1582,1288,48,3 1585 | 1583,5052,422,2 1586 | 1584,3875,300,-1 1587 | 1585,5362,485,1 1588 | 1586,1968,9,1 1589 | 1587,5293,480,3 1590 | 1588,1642,233,1 1591 | 1589,3782,24,2 1592 | 1590,1699,9,1 1593 | 1591,4184,8,1 1594 | 1592,2549,49,1 1595 | 1593,1318,369,1 1596 | 1594,5024,16,1 1597 | 1595,3899,175,-1 1598 | 1596,2988,49,3 1599 | 1597,1689,105,2 1600 | 1598,3497,323,1 1601 | 1599,4949,425,1 1602 | 1600,2003,25,2 1603 | 1601,3751,20,2 1604 | 1602,5935,524,-1 1605 | 1603,5070,16,2 1606 | 1604,249,19,2 1607 | 1605,4949,28,1 1608 | 1606,2846,25,1 1609 | 1607,5335,442,1 1610 | 1608,1850,9,1 1611 | 1609,2410,120,1 1612 | 1610,782,94,2 1613 | 1611,3089,8,1 1614 | 1612,5026,441,3 1615 | 1613,3186,5,3 1616 | 1614,5346,36,1 1617 | 1615,5929,525,-1 1618 | 1616,1714,48,3 1619 | 1617,5124,420,1 1620 | 1618,5538,525,-1 1621 | 1619,2721,17,3 1622 | 1620,5546,531,-1 1623 | 1621,2079,268,2 1624 | 1622,5134,16,1 1625 | 1623,839,27,1 1626 | 1624,1960,24,2 1627 | 1625,1598,334,1 1628 | 1626,1471,277,1 1629 | 1627,1528,11,1 1630 | 1628,3178,2,2 1631 | 1629,4924,16,2 1632 | 1630,5215,433,3 1633 | 1631,5072,445,1 1634 | 1632,2687,9,1 1635 | 1633,5911,525,-1 1636 | 1634,2548,184,1 1637 | 1635,3770,36,1 1638 | 1636,3013,30,-1 1639 | 1637,2297,48,1 1640 | 1638,3514,12,1 1641 | 1639,2774,8,1 1642 | 1640,5528,530,-1 1643 | 1641,5320,417,1 1644 | 1642,4247,14,3 1645 | 1643,5132,434,2 1646 | 1644,1971,13,2 1647 | 1645,4516,387,1 1648 | 1646,359,20,1 1649 | 1647,1580,12,1 1650 | 1648,1389,9,2 1651 | 1649,5144,443,3 1652 | 1650,5053,425,2 1653 | 1651,2464,278,1 1654 | 1652,3276,67,3 1655 | 1653,4970,470,1 1656 | 1654,1145,243,1 1657 | 1655,3062,14,2 1658 | 1656,4207,49,3 1659 | 1657,1005,33,1 1660 | 1658,5081,425,1 1661 | 1659,5315,425,1 1662 | 1660,519,36,1 1663 | 1661,5249,28,1 1664 | 1662,5533,525,-1 1665 | 1663,824,36,1 1666 | 1664,5723,543,-1 1667 | 1665,4301,9,1 1668 | 1666,2744,14,3 1669 | 1667,2064,15,1 1670 | 1668,882,33,2 1671 | 1669,1309,8,1 1672 | 1670,3462,8,1 1673 | 1671,4944,28,1 1674 | 1672,656,16,1 1675 | 1673,2329,51,1 1676 | 1674,5295,425,2 1677 | 1675,2747,24,2 1678 | 1676,3740,51,-1 1679 | 1677,3667,30,1 1680 | 1678,5073,36,2 1681 | 1679,4338,17,2 1682 | 1680,1660,112,3 1683 | 1681,953,86,-1 1684 | 1682,4978,425,1 1685 | 1683,3323,176,1 1686 | 1684,3333,281,1 1687 | 1685,28,43,3 1688 | 1686,4993,16,3 1689 | 1687,4807,27,1 1690 | 1688,2570,49,3 1691 | 1689,4247,19,3 1692 | 1690,1484,25,1 1693 | 1691,4583,20,1 1694 | 1692,4167,295,1 1695 | 1693,5246,442,1 1696 | 1694,3597,4,3 1697 | 1695,4453,26,1 1698 | 1696,5223,419,2 1699 | 1697,1541,13,1 1700 | 1698,5246,16,1 1701 | 1699,4681,251,2 1702 | 1700,3468,12,1 1703 | 1701,1642,25,1 1704 | 1702,3234,20,1 1705 | 1703,4323,12,1 1706 | 1704,1033,17,1 1707 | 1705,3131,8,1 1708 | 1706,527,12,1 1709 | 1707,2448,366,3 1710 | 1708,5071,458,2 1711 | 1709,5360,487,1 1712 | 1710,2108,164,1 1713 | 1711,1366,5,1 1714 | 1712,4094,409,1 1715 | 1713,4564,27,3 1716 | 1714,431,11,-1 1717 | 1715,5099,28,1 1718 | 1716,350,105,3 1719 | 1717,1310,16,2 1720 | 1718,1145,78,1 1721 | 1719,2759,8,1 1722 | 1720,1998,12,1 1723 | 1721,1749,16,1 1724 | 1722,147,68,-1 1725 | 1723,4122,24,3 1726 | 1724,1033,104,1 1727 | 1725,3312,19,2 1728 | 1726,3922,24,2 1729 | 1727,2095,2,-1 1730 | 1728,674,9,2 1731 | 1729,4424,19,2 1732 | 1730,2317,9,1 1733 | 1731,4971,424,1 1734 | 1732,1580,36,1 1735 | 1733,4978,419,1 1736 | 1734,5566,525,-1 1737 | 1735,991,4,1 1738 | 1736,1875,198,1 1739 | 1737,2584,16,2 1740 | 1738,5382,429,2 1741 | 1739,198,9,1 1742 | 1740,5372,28,1 1743 | 1741,3839,16,-1 1744 | 1742,541,13,2 1745 | 1743,4784,4,1 1746 | 1744,5480,512,-1 1747 | 1745,4059,14,3 1748 | 1746,4957,28,1 1749 | 1747,2480,8,1 1750 | 1748,1483,49,3 1751 | 1749,5282,424,2 1752 | 1750,5147,142,1 1753 | 1751,5276,16,3 1754 | 1752,1471,17,1 1755 | 1753,752,215,1 1756 | 1754,2092,49,3 1757 | 1755,4226,334,1 1758 | 1756,989,5,1 1759 | 1757,4955,16,1 1760 | 1758,2393,375,1 1761 | 1759,5021,477,2 1762 | 1760,767,25,3 1763 | 1761,5389,432,1 1764 | 1762,3667,17,1 1765 | 1763,5389,16,1 1766 | 1764,2006,49,3 1767 | 1765,4025,206,3 1768 | 1766,4301,8,1 1769 | 1767,3075,16,1 1770 | 1768,1903,17,2 1771 | 1769,2008,48,3 1772 | 1770,5198,437,2 1773 | 1771,760,20,2 1774 | 1772,5258,427,3 1775 | 1773,4956,16,1 1776 | 1774,3746,12,-1 1777 | 1775,4992,28,1 1778 | 1776,1038,157,1 1779 | 1777,1338,24,1 1780 | 1778,4338,20,2 1781 | 1779,4980,434,1 1782 | 1780,4524,108,1 1783 | 1781,5205,424,1 1784 | 1782,2980,51,1 1785 | 1783,2307,66,2 1786 | 1784,2418,325,1 1787 | 1785,5292,417,1 1788 | 1786,3089,168,1 1789 | 1787,2667,4,1 1790 | 1788,5154,463,2 1791 | 1789,902,230,1 1792 | 1790,1522,25,2 1793 | 1791,1453,48,3 1794 | 1792,1090,271,1 1795 | 1793,5584,524,-1 1796 | 1794,3096,8,3 1797 | 1795,5002,422,1 1798 | 1796,1734,136,3 1799 | 1797,5434,497,-1 1800 | 1798,2957,25,2 1801 | 1799,2465,135,3 1802 | 1800,1344,309,3 1803 | 1801,1655,216,1 1804 | 1802,3812,353,3 1805 | 1803,5062,36,4 1806 | 1804,4698,182,1 1807 | 1805,3903,12,1 1808 | 1806,369,18,1 1809 | 1807,1646,308,2 1810 | 1808,930,9,1 1811 | 1809,4062,12,1 1812 | 1810,3620,19,2 1813 | 1811,5049,425,1 1814 | 1812,767,105,3 1815 | 1813,2143,8,1 1816 | 1814,353,155,-1 1817 | 1815,1095,17,1 1818 | 1816,2488,49,1 1819 | 1817,4397,9,1 1820 | 1818,5072,28,1 1821 | 1819,5337,425,4 1822 | 1820,5723,542,-1 1823 | 1821,1278,296,1 1824 | 1822,4346,45,2 1825 | 1823,2690,5,3 1826 | 1824,4924,416,2 1827 | 1825,3131,9,1 1828 | 1826,3150,17,1 1829 | 1827,2090,359,1 1830 | 1828,5176,478,1 1831 | 1829,1619,24,1 1832 | 1830,5033,16,1 1833 | 1831,2099,24,2 1834 | 1832,1621,49,1 1835 | 1833,1438,75,1 1836 | 1834,4026,9,1 1837 | 1835,2958,344,3 1838 | 1836,2464,8,1 1839 | 1837,454,49,1 1840 | 1838,5154,416,2 1841 | 1839,2307,226,2 1842 | 1840,2772,36,1 1843 | 1841,4033,11,1 1844 | 1842,1984,347,1 1845 | 1843,5383,16,3 1846 | 1844,1008,5,3 1847 | 1845,2607,17,3 1848 | 1846,5392,477,2 1849 | 1847,2360,11,1 1850 | 1848,3379,16,1 1851 | 1849,3855,17,3 1852 | 1850,4327,11,-1 1853 | 1851,3075,17,1 1854 | 1852,1129,77,1 1855 | 1853,4227,378,1 1856 | 1854,2208,25,2 1857 | 1855,1811,9,1 1858 | 1856,2159,9,1 1859 | 1857,303,8,1 1860 | 1858,4978,454,1 1861 | 1859,4896,417,2 1862 | 1860,4875,142,1 1863 | 1861,3523,66,2 1864 | 1862,5670,542,-1 1865 | 1863,5154,36,2 1866 | 1864,5178,16,1 1867 | 1865,1231,17,1 1868 | 1866,5021,16,2 1869 | 1867,3930,17,1 1870 | 1868,1755,8,3 1871 | 1869,3899,13,-1 1872 | 1870,23,9,1 1873 | 1871,4594,12,1 1874 | 1872,3943,5,1 1875 | 1873,5122,433,2 1876 | 1874,3198,16,-1 1877 | 1875,5283,142,2 1878 | 1876,4062,67,1 1879 | 1877,4084,8,1 1880 | 1878,5385,434,2 1881 | 1879,3848,122,3 1882 | 1880,1562,48,3 1883 | 1881,1461,287,2 1884 | 1882,4175,9,3 1885 | 1883,577,16,1 1886 | 1884,5451,542,-1 1887 | 1885,4317,13,3 1888 | 1886,1152,144,2 1889 | 1887,2766,373,1 1890 | 1888,5295,16,2 1891 | 1889,4420,11,-1 1892 | 1890,2606,8,1 1893 | 1891,1087,192,2 1894 | 1892,4967,28,1 1895 | 1893,3695,19,1 1896 | 1894,4955,487,1 1897 | 1895,902,20,1 1898 | 1896,4600,283,1 1899 | 1897,2321,16,2 1900 | 1898,5156,433,3 1901 | 1899,1776,48,1 1902 | 1900,5033,419,1 1903 | 1901,5001,433,1 1904 | 1902,537,25,2 1905 | 1903,2297,49,1 1906 | 1904,2490,25,1 1907 | 1905,2123,68,2 1908 | 1906,2475,331,2 1909 | 1907,4546,235,5 1910 | 1908,4765,93,1 1911 | 1909,4085,8,1 1912 | 1910,924,121,2 1913 | 1911,4785,188,2 1914 | 1912,1392,51,1 1915 | 1913,3620,98,2 1916 | 1914,5931,524,-1 1917 | 1915,5116,428,1 1918 | 1916,2005,48,3 1919 | 1917,2463,17,1 1920 | 1918,3837,12,1 1921 | 1919,3129,36,3 1922 | 1920,5290,36,2 1923 | 1921,5258,17,3 1924 | 1922,4954,433,1 1925 | 1923,2584,19,2 1926 | 1924,3667,16,1 1927 | 1925,3013,12,-1 1928 | 1926,5280,36,3 1929 | 1927,3545,57,1 1930 | 1928,1414,189,2 1931 | 1929,2438,376,1 1932 | 1930,2884,25,2 1933 | 1931,3057,25,1 1934 | 1932,5151,36,2 1935 | 1933,5198,437,2 1936 | 1934,1655,36,1 1937 | 1935,3213,9,1 1938 | 1936,2494,288,1 1939 | 1937,1761,12,3 1940 | 1938,4923,142,1 1941 | 1939,5026,425,3 1942 | 1940,5224,16,3 1943 | 1941,4189,36,1 1944 | 1942,3823,9,1 1945 | 1943,3740,36,-1 1946 | 1944,5189,422,1 1947 | 1945,1124,24,2 1948 | 1946,4373,200,3 1949 | 1947,5193,463,2 1950 | 1948,5147,433,1 1951 | 1949,4987,36,2 1952 | 1950,4433,9,3 1953 | 1951,984,17,-1 1954 | 1952,1489,36,-1 1955 | 1953,5528,525,-1 1956 | 1954,1758,48,1 1957 | 1955,1718,327,2 1958 | 1956,5406,499,-1 1959 | 1957,1515,25,1 1960 | 1958,4959,469,2 1961 | 1959,1761,36,3 1962 | 1960,4468,17,2 1963 | 1961,5313,418,1 1964 | 1962,1074,19,1 1965 | 1963,5186,28,1 1966 | 1964,5316,420,3 1967 | 1965,5083,142,2 1968 | 1966,5314,439,2 1969 | 1967,4229,27,1 1970 | 1968,5305,434,3 1971 | 1969,4874,474,2 1972 | 1970,1278,48,1 1973 | 1971,3511,16,1 1974 | 1972,1106,116,1 1975 | 1973,1831,57,1 1976 | 1974,3148,12,5 1977 | 1975,884,9,1 1978 | 1976,3217,5,1 1979 | 1977,2973,16,3 1980 | 1978,5268,142,1 1981 | 1979,4653,60,1 1982 | 1980,2090,4,1 1983 | 1981,1471,13,1 1984 | 1982,5082,464,1 1985 | 1983,3595,115,3 1986 | 1984,638,219,1 1987 | 1985,3126,85,1 1988 | 1986,5039,414,4 1989 | 1987,2373,197,1 1990 | 1988,1242,273,1 1991 | 1989,5099,451,1 1992 | 1990,4529,5,1 1993 | 1991,537,24,2 1994 | 1992,5282,142,2 1995 | 1993,2379,347,1 1996 | 1994,4284,206,1 1997 | 1995,491,13,-1 1998 | 1996,527,155,1 1999 | 1997,635,148,1 2000 | 1998,5411,496,-1 2001 | 1999,5377,28,1 2002 | 2000,5298,16,3 2003 | 2001,3323,36,1 2004 | 2002,4025,48,3 2005 | 2003,3864,16,2 2006 | 2004,5272,28,1 2007 | 2005,5187,424,2 2008 | 2006,5175,419,2 2009 | 2007,1853,48,1 2010 | 2008,312,149,1 2011 | 2009,4468,20,2 2012 | 2010,1801,36,-1 2013 | 2011,4980,28,1 2014 | 2012,5550,531,-1 2015 | 2013,5062,433,4 2016 | 2014,2373,16,1 2017 | 2015,4433,8,3 2018 | 2016,1666,167,3 2019 | 2017,303,103,1 2020 | 2018,4745,5,1 2021 | 2019,5001,453,1 2022 | 2020,2721,20,3 2023 | 2021,1417,243,-1 2024 | 2022,761,8,1 2025 | 2023,826,204,1 2026 | 2024,894,4,1 2027 | 2025,984,15,-1 2028 | 2026,2445,9,1 2029 | 2027,5091,17,3 2030 | 2028,5245,36,2 2031 | 2029,5360,28,1 2032 | 2030,4929,16,1 2033 | 2031,1404,12,1 2034 | 2032,97,5,1 2035 | 2033,5406,497,-1 2036 | 2034,4328,75,1 2037 | 2035,3609,27,1 2038 | 2036,5176,16,1 2039 | 2037,991,5,1 2040 | 2038,362,48,3 2041 | 2039,3531,20,3 2042 | 2040,5409,498,-1 2043 | 2041,1470,25,3 2044 | 2042,5088,425,1 2045 | 2043,2329,13,1 2046 | 2044,5514,525,-1 2047 | 2045,5063,474,1 2048 | 2046,1952,295,1 2049 | 2047,1804,74,3 2050 | 2048,3208,25,1 2051 | 2049,953,85,-1 2052 | 2050,4400,268,2 2053 | 2051,3294,24,2 2054 | 2052,2692,24,2 2055 | 2053,5177,36,2 2056 | 2054,491,93,-1 2057 | 2055,3047,376,1 2058 | 2056,5187,414,2 2059 | 2057,3351,14,1 2060 | 2058,2619,5,1 2061 | 2059,1837,4,1 2062 | 2060,5232,16,2 2063 | 2061,5320,36,1 2064 | 2062,5230,439,1 2065 | 2063,2182,24,2 2066 | 2064,3333,8,1 2067 | 2065,2893,81,3 2068 | 2066,1591,186,2 2069 | 2067,1038,12,1 2070 | 2068,5501,526,-1 2071 | 2069,4845,8,1 2072 | 2070,5932,525,-1 2073 | 2071,2151,245,2 2074 | 2072,4344,48,1 2075 | 2073,1094,48,1 2076 | 2074,2182,25,2 2077 | 2075,5597,525,-1 2078 | 2076,920,187,1 2079 | 2077,5426,498,-1 2080 | 2078,4044,17,1 2081 | 2079,830,24,3 2082 | 2080,767,24,3 2083 | 2081,760,13,2 2084 | 2082,5154,16,2 2085 | 2083,5543,531,-1 2086 | 2084,1378,166,1 2087 | 2085,2868,9,1 2088 | 2086,4516,4,1 2089 | 2087,703,25,2 2090 | 2088,1523,201,1 2091 | 2089,4453,17,1 2092 | 2090,1223,8,1 2093 | 2091,5288,36,2 2094 | 2092,1106,8,1 2095 | 2093,3062,19,2 2096 | 2094,5620,513,-1 2097 | 2095,5362,16,1 2098 | 2096,4006,25,3 2099 | 2097,547,36,1 2100 | 2098,311,4,1 2101 | 2099,5359,415,1 2102 | 2100,5061,36,2 2103 | 2101,3943,4,1 2104 | 2102,4887,28,1 2105 | 2103,3468,17,1 2106 | 2104,5380,36,2 2107 | 2105,4420,36,-1 2108 | 2106,1489,25,-1 2109 | 2107,5256,467,1 2110 | 2108,2077,25,2 2111 | 2109,4303,339,1 2112 | 2110,3759,8,1 2113 | 2111,3886,392,1 2114 | 2112,4242,27,1 2115 | 2113,956,12,3 2116 | 2114,4081,24,3 2117 | 2115,752,5,1 2118 | 2116,2880,8,1 2119 | 2117,5576,524,-1 2120 | 2118,5245,16,2 2121 | 2119,1168,169,2 2122 | 2120,1146,266,1 2123 | 2121,5069,36,2 2124 | 2122,3208,24,1 2125 | 2123,2475,8,2 2126 | 2124,3150,15,1 2127 | 2125,3388,27,1 2128 | 2126,5380,465,2 2129 | 2127,5264,16,1 2130 | 2128,235,8,1 2131 | 2129,5245,495,2 2132 | 2130,4727,247,2 2133 | 2131,1230,48,1 2134 | 2132,4916,453,1 2135 | 2133,4821,13,2 2136 | 2134,1911,20,1 2137 | 2135,5085,416,1 2138 | 2136,3988,2,2 2139 | 2137,2969,203,1 2140 | 2138,5366,16,1 2141 | 2139,4929,428,1 2142 | 2140,5578,542,-1 2143 | 2141,1321,274,3 2144 | 2142,1031,25,-1 2145 | 2143,862,13,1 2146 | 2144,4892,16,2 2147 | 2145,2029,282,1 2148 | 2146,1465,25,1 2149 | 2147,5232,429,2 2150 | 2148,5592,542,-1 2151 | 2149,349,3,1 2152 | 2150,5281,16,2 2153 | 2151,2646,273,2 2154 | 2152,3953,16,1 2155 | 2153,3654,8,1 2156 | 2154,2411,289,3 2157 | 2155,3143,16,1 2158 | 2156,4317,93,3 2159 | 2157,3577,16,-1 2160 | 2158,5276,36,3 2161 | 2159,3645,5,1 2162 | 2160,2309,185,1 2163 | 2161,4904,16,1 2164 | 2162,3645,244,1 2165 | 2163,4482,19,5 2166 | 2164,1803,25,3 2167 | 2165,1853,110,1 2168 | 2166,1803,13,3 2169 | 2167,5359,422,1 2170 | 2168,2535,16,-1 2171 | 2169,3511,36,1 2172 | 2170,4807,20,1 2173 | 2171,818,48,1 2174 | 2172,962,190,3 2175 | 2173,5921,545,-1 2176 | 2174,1008,4,3 2177 | 2175,1087,5,2 2178 | 2176,2958,4,3 2179 | 2177,1541,17,1 2180 | 2178,5290,16,2 2181 | 2179,2286,25,3 2182 | 2180,4150,105,2 2183 | 2181,3514,30,1 2184 | 2182,1717,24,3 2185 | 2183,3198,17,-1 2186 | 2184,4039,16,1 2187 | 2185,4680,9,3 2188 | 2186,994,61,2 2189 | 2187,5099,422,1 2190 | 2188,4039,30,1 2191 | 2189,2041,24,3 2192 | 2190,1727,27,1 2193 | 2191,1028,9,1 2194 | 2192,3265,19,2 2195 | 2193,4317,92,3 2196 | 2194,5024,16,1 2197 | 2195,5513,523,-1 2198 | 2196,3323,16,1 2199 | 2197,2561,188,2 2200 | 2198,4944,432,1 2201 | 2199,1617,8,1 2202 | 2200,5931,525,-1 2203 | 2201,3609,13,1 2204 | 2202,4981,425,1 2205 | 2203,1484,312,1 2206 | 2204,2510,61,2 2207 | 2205,2011,48,1 2208 | 2206,5133,425,4 2209 | 2207,2545,16,2 2210 | 2208,4086,4,3 2211 | 2209,4439,17,3 2212 | 2210,1217,5,3 2213 | 2211,1942,49,3 2214 | 2212,5506,525,-1 2215 | 2213,2359,27,2 2216 | 2214,5245,419,2 2217 | 2215,5473,518,-1 2218 | 2216,5359,28,1 2219 | 2217,5254,420,1 2220 | 2218,4278,104,2 2221 | 2219,4044,16,1 2222 | 2220,2047,36,1 2223 | 2221,5180,422,2 2224 | 2222,3379,11,1 2225 | 2223,793,244,1 2226 | 2224,4322,157,-1 2227 | 2225,5303,424,1 2228 | 2226,1144,17,1 2229 | 2227,1944,9,1 2230 | 2228,306,9,2 2231 | 2229,2463,27,1 2232 | 2230,5077,28,1 2233 | 2231,2980,16,1 2234 | 2232,3497,5,1 2235 | 2233,5203,448,3 2236 | 2234,4482,20,5 2237 | 2235,5521,524,-1 2238 | 2236,4042,16,1 2239 | 2237,2476,78,1 2240 | 2238,5118,414,2 2241 | 2239,4834,16,1 2242 | 2240,4081,25,3 2243 | 2241,603,13,1 2244 | 2242,2297,81,1 2245 | 2243,2408,16,4 2246 | 2244,5279,484,2 2247 | 2245,4904,419,1 2248 | 2246,1574,5,1 2249 | 2247,3452,9,1 2250 | 2248,1987,11,1 2251 | 2249,5558,524,-1 2252 | 2250,2687,108,1 2253 | 2251,2047,87,1 2254 | 2252,2213,24,2 2255 | 2253,2964,121,-1 2256 | 2254,5179,414,1 2257 | 2255,5339,36,2 2258 | 2256,631,9,1 2259 | 2257,5684,518,-1 2260 | 2258,2535,11,-1 2261 | 2259,4930,433,2 2262 | 2260,2905,5,1 2263 | 2261,3805,12,-1 2264 | 2262,2738,9,1 2265 | 2263,862,98,1 2266 | 2264,2717,5,1 2267 | 2265,3412,20,2 2268 | 2266,3583,25,4 2269 | 2267,3384,15,-1 2270 | 2268,5230,28,1 2271 | 2269,5037,28,1 2272 | 2270,4266,9,1 2273 | 2271,2351,362,1 2274 | 2272,4944,419,1 2275 | 2273,1857,169,2 2276 | 2274,4992,492,1 2277 | 2275,5178,468,1 2278 | 2276,1704,9,2 2279 | 2277,1244,7,2 2280 | 2278,1885,9,1 2281 | 2279,312,5,1 2282 | 2280,5259,16,1 2283 | 2281,2444,162,1 2284 | 2282,5313,28,1 2285 | 2283,1231,272,1 2286 | 2284,4661,314,1 2287 | 2285,4936,431,1 2288 | 2286,5021,425,2 2289 | 2287,4454,249,1 2290 | 2288,4955,425,1 2291 | 2289,5297,16,1 2292 | 2290,5320,494,1 2293 | 2291,487,27,2 2294 | 2292,3681,225,1 2295 | 2293,4694,4,1 2296 | 2294,3198,24,-1 2297 | 2295,5069,416,2 2298 | 2296,2724,25,2 2299 | 2297,198,120,1 2300 | 2298,249,17,2 2301 | 2299,3274,17,1 2302 | 2300,5366,28,1 2303 | 2301,1225,25,1 2304 | 2302,5281,416,2 2305 | 2303,5144,17,3 2306 | 2304,5136,414,2 2307 | 2305,793,5,1 2308 | 2306,1318,4,1 2309 | 2307,4139,8,1 2310 | 2308,3129,12,3 2311 | 2309,1561,116,1 2312 | 2310,674,60,2 2313 | 2311,5653,542,-1 2314 | 2312,3265,138,2 2315 | 2313,5575,524,-1 2316 | 2314,2388,8,1 2317 | 2315,3078,9,1 2318 | 2316,3121,13,4 2319 | 2317,5215,17,3 2320 | 2318,4183,11,3 2321 | 2319,3447,201,3 2322 | 2320,5525,525,-1 2323 | 2321,3504,19,2 2324 | 2322,3126,87,1 2325 | 2323,5118,459,2 2326 | 2324,1388,13,-1 2327 | 2325,3778,368,3 2328 | 2326,5232,419,2 2329 | 2327,2561,25,2 2330 | 2328,1561,9,1 2331 | 2329,1717,25,3 2332 | 2330,2407,13,-1 2333 | 2331,1980,193,3 2334 | 2332,5268,422,1 2335 | 2333,4485,12,1 2336 | 2334,5090,423,1 2337 | 2335,3988,8,2 2338 | 2336,235,103,1 2339 | 2337,3202,5,1 2340 | 2338,5594,534,-1 2341 | 2339,1417,13,-1 2342 | 2340,5411,499,-1 2343 | 2341,4475,25,2 2344 | 2342,5001,142,1 2345 | 2343,5593,524,-1 2346 | 2344,5006,424,2 2347 | 2345,2407,30,-1 2348 | 2346,3807,8,1 2349 | 2347,4882,28,1 2350 | 2348,1356,9,1 2351 | 2349,5088,28,1 2352 | 2350,2404,16,1 2353 | 2351,4327,15,-1 2354 | 2352,2143,9,1 2355 | 2353,1197,9,1 2356 | 2354,4968,16,2 2357 | 2355,656,17,1 2358 | 2356,3848,25,3 2359 | 2357,5282,414,2 2360 | 2358,1561,116,1 2361 | 2359,3078,151,1 2362 | 2360,5605,536,-1 2363 | 2361,4887,438,1 2364 | 2362,2180,332,2 2365 | 2363,1591,9,2 2366 | 2364,4028,71,2 2367 | 2365,1764,8,3 2368 | 2366,3627,365,1 2369 | 2367,1223,229,1 2370 | 2368,948,48,1 2371 | 2369,4615,36,2 2372 | 2370,3807,168,1 2373 | 2371,893,16,1 2374 | 2372,3852,220,1 2375 | 2373,2360,15,1 2376 | 2374,1746,12,1 2377 | 2375,1831,9,1 2378 | 2376,1515,24,1 2379 | 2377,377,14,1 2380 | 2378,5156,465,3 2381 | 2379,1614,49,1 2382 | 2380,1790,25,1 2383 | 2381,232,4,3 2384 | 2382,500,114,1 2385 | 2383,3178,8,2 2386 | 2384,4313,5,1 2387 | 2385,1987,13,1 2388 | 2386,5071,433,2 2389 | 2387,892,5,1 2390 | 2388,1253,113,2 2391 | 2389,1942,48,3 2392 | 2390,3778,49,3 2393 | 2391,3595,8,3 2394 | 2392,2404,36,1 2395 | 2393,3418,9,1 2396 | 2394,973,24,3 2397 | 2395,1565,24,-1 2398 | 2396,2774,9,1 2399 | 2397,3782,25,2 2400 | 2398,5227,415,1 2401 | 2399,1586,9,1 2402 | 2400,4114,12,-1 2403 | 2401,5204,422,1 2404 | 2402,2740,48,1 2405 | 2403,4298,369,1 2406 | 2404,4683,4,1 2407 | 2405,2213,169,2 2408 | 2406,4929,28,1 2409 | 2407,1146,48,1 2410 | 2408,5280,16,3 2411 | 2409,4573,367,1 2412 | 2410,1944,9,1 2413 | 2411,5549,524,-1 2414 | 2412,752,4,1 2415 | 2413,1702,314,1 2416 | 2414,832,8,1 2417 | 2415,1321,25,3 2418 | 2416,341,19,1 2419 | 2417,1684,144,2 2420 | 2418,1536,159,1 2421 | 2419,3418,282,1 2422 | 2420,1070,84,1 2423 | 2421,5295,421,2 2424 | 2422,1621,76,1 2425 | 2423,603,12,1 2426 | 2424,3993,13,5 2427 | 2425,2008,49,3 2428 | 2426,1764,115,3 2429 | 2427,3374,49,1 2430 | 2428,1602,204,1 2431 | 2429,5006,435,2 2432 | 2430,1619,274,1 2433 | 2431,5362,428,1 2434 | 2432,3737,9,1 2435 | 2433,1021,8,1 2436 | 2434,5159,36,1 2437 | 2435,353,12,-1 2438 | 2436,147,24,-1 2439 | 2437,917,25,3 2440 | 2438,249,83,2 2441 | 2439,3082,157,-1 2442 | 2440,2488,338,1 2443 | 2441,5126,442,1 2444 | 2442,1968,33,1 2445 | 2443,209,84,1 2446 | 2444,1169,62,1 2447 | 2445,3838,87,1 2448 | 2446,1857,24,2 2449 | 2447,4167,49,1 2450 | 2448,5008,16,3 2451 | 2449,1850,4,1 2452 | 2450,2407,17,-1 2453 | 2451,5385,36,2 2454 | 2452,4879,28,1 2455 | 2453,1359,49,1 2456 | 2454,5518,523,-1 2457 | 2455,1980,4,3 2458 | 2456,5035,28,1 2459 | 2457,1727,20,1 2460 | 2458,2475,9,2 2461 | 2459,2606,159,1 2462 | 2460,1960,25,2 2463 | 2461,4401,378,1 2464 | 2462,4924,443,2 2465 | 2463,350,17,3 2466 | 2464,4929,426,1 2467 | 2465,5148,36,2 2468 | 2466,4086,5,3 2469 | 2467,4946,463,2 2470 | 2468,4279,14,1 2471 | 2469,924,36,2 2472 | 2470,1880,121,-1 2473 | 2471,5252,425,2 2474 | 2472,2554,104,2 2475 | 2473,5487,525,-1 2476 | 2474,5102,16,1 2477 | 2475,930,8,1 2478 | 2476,5226,428,1 2479 | 2477,1230,143,1 2480 | 2478,3057,24,1 2481 | 2479,3655,93,-1 2482 | 2480,2607,104,3 2483 | 2481,5063,16,1 2484 | 2482,5002,470,1 2485 | 2483,787,8,1 2486 | 2484,2717,4,1 2487 | 2485,4725,67,3 2488 | 2486,3983,48,1 2489 | 2487,487,17,2 2490 | 2488,1528,12,1 2491 | 2489,4448,12,1 2492 | 2490,5418,505,2 2493 | 2491,4885,429,3 2494 | 2492,349,5,1 2495 | 2493,5446,542,-1 2496 | 2494,2980,36,1 2497 | 2495,920,8,1 2498 | 2496,5165,16,3 2499 | 2497,5304,36,2 2500 | 2498,4917,28,1 2501 | 2499,5084,433,2 2502 | 2500,4227,48,1 2503 | 2501,5330,425,4 2504 | 2502,2393,9,1 2505 | 2503,1528,16,1 2506 | 2504,2182,279,2 2507 | 2505,5297,437,1 2508 | 2506,4298,236,1 2509 | 2507,2030,48,1 2510 | 2508,3740,25,-1 2511 | 2509,1801,12,-1 2512 | 2510,519,11,1 2513 | 2511,2128,14,1 2514 | 2512,5314,419,2 2515 | 2513,5236,433,1 2516 | 2514,1474,152,1 2517 | 2515,4220,9,1 2518 | 2516,3075,27,1 2519 | 2517,249,20,2 2520 | 2518,4128,5,1 2521 | 2519,2258,25,2 2522 | 2520,4874,434,2 2523 | 2521,1321,24,3 2524 | 2522,2766,48,1 2525 | 2523,635,147,1 2526 | 2524,2293,81,3 2527 | 2525,4485,11,1 2528 | 2526,5339,422,2 2529 | 2527,2958,5,3 2530 | 2528,1207,286,3 2531 | 2529,2180,11,2 2532 | 2530,4894,427,2 2533 | 2531,674,8,2 2534 | 2532,1944,331,1 2535 | 2533,2034,25,2 2536 | 2534,4813,8,1 2537 | 2535,4042,86,1 2538 | 2536,5181,414,1 2539 | 2537,5130,461,1 2540 | 2538,3917,9,1 2541 | 2539,537,82,2 2542 | 2540,1350,24,2 2543 | 2541,1004,24,3 2544 | 2542,4946,36,2 2545 | 2543,2410,8,1 2546 | 2544,5588,525,-1 2547 | 2545,5204,36,1 2548 | 2546,5134,426,1 2549 | 2547,1391,25,2 2550 | 2548,1366,313,1 2551 | 2549,473,124,1 2552 | 2550,4496,115,3 2553 | 2551,5228,36,2 2554 | 2552,3823,8,1 2555 | 2553,2083,4,1 2556 | 2554,1144,20,1 2557 | 2555,2789,193,3 2558 | 2556,5299,28,1 2559 | 2557,235,8,1 2560 | 2558,5103,416,3 2561 | 2559,1446,25,2 2562 | 2560,4970,28,1 2563 | 2561,2558,24,2 2564 | 2562,5139,419,3 2565 | 2563,1031,13,-1 2566 | 2564,4903,17,1 2567 | 2565,3946,178,1 2568 | 2566,1618,13,1 2569 | 2567,5502,525,-1 2570 | 2568,5045,425,1 2571 | 2569,5136,424,2 2572 | 2570,1095,18,1 2573 | 2571,1244,8,2 2574 | 2572,2418,48,1 2575 | 2573,5602,524,-1 2576 | 2574,5330,28,4 2577 | 2575,1198,112,1 2578 | 2576,919,25,2 2579 | 2577,2744,19,3 2580 | 2578,4021,4,3 2581 | 2579,1872,12,1 2582 | 2580,5400,496,-1 2583 | 2581,5259,427,1 2584 | 2582,3826,25,2 2585 | 2583,1122,19,1 2586 | 2584,2229,4,1 2587 | 2585,956,13,3 2588 | 2586,5090,467,1 2589 | 2587,3576,121,1 2590 | 2588,4968,414,2 2591 | 2589,5204,142,1 2592 | 2590,2789,4,3 2593 | 2591,4878,16,1 2594 | 2592,944,24,2 2595 | 2593,1332,201,3 2596 | 2594,4583,272,1 2597 | 2595,431,16,-1 2598 | 2596,571,47,1 2599 | 2597,2093,5,2 2600 | 2598,1669,49,1 2601 | 2599,3875,78,-1 2602 | 2600,1197,8,1 2603 | 2601,368,143,3 2604 | 2602,1278,49,1 2605 | 2603,4423,36,1 2606 | 2604,70,4,1 2607 | 2605,1971,132,2 2608 | 2606,2973,17,3 2609 | 2607,1561,8,1 2610 | 2608,2328,9,1 2611 | 2609,2164,12,-1 2612 | 2610,1519,78,-1 2613 | 2611,5382,424,2 2614 | 2612,3312,17,2 2615 | 2613,5080,16,1 2616 | 2614,948,49,1 2617 | 2615,5532,530,-1 2618 | 2616,4338,16,2 2619 | 2617,5305,16,3 2620 | 2618,2260,20,3 2621 | 2619,5269,16,1 2622 | 2620,773,48,1 2623 | 2621,2309,13,1 2624 | 2622,1911,19,1 2625 | 2623,210,13,1 2626 | 2624,5037,428,1 2627 | 2625,1288,136,3 2628 | 2626,2173,8,1 2629 | 2627,5675,542,-1 2630 | 2628,4423,71,1 2631 | 2629,4914,36,2 2632 | 2630,5184,476,1 2633 | 2631,5019,16,1 2634 | 2632,3737,8,1 2635 | 2633,1492,4,1 2636 | 2634,4368,11,1 2637 | 2635,2952,12,3 2638 | 2636,5911,524,-1 2639 | 2637,5071,36,2 2640 | 2638,4437,197,1 2641 | 2639,5558,531,-1 2642 | 2640,4956,433,1 2643 | 2641,4042,36,1 2644 | 2642,3363,4,1 2645 | 2643,4885,427,3 2646 | 2644,129,49,3 2647 | 2645,3426,4,1 2648 | 2646,1872,15,1 2649 | 2647,5316,28,3 2650 | 2648,3468,16,1 2651 | 2649,403,96,3 2652 | 2650,5032,424,1 2653 | 2651,1461,4,2 2654 | 2652,4917,420,1 2655 | 2653,2916,9,3 2656 | 2654,4184,331,1 2657 | 2655,3930,13,1 2658 | 2656,5346,476,1 2659 | 2657,3695,17,1 2660 | 2658,5596,524,-1 2661 | 2659,1399,8,2 2662 | 2660,2213,25,2 2663 | 2661,3047,8,1 2664 | 2662,4122,25,3 2665 | 2663,5696,541,-1 2666 | 2664,973,25,3 2667 | 2665,2432,8,1 2668 | 2666,3899,78,-1 2669 | 2667,5373,423,2 2670 | 2668,1617,232,1 2671 | 2669,3462,284,1 2672 | 2670,2969,8,1 2673 | 2671,4322,17,-1 2674 | 2672,1580,11,1 2675 | 2673,1987,17,1 2676 | 2674,5080,28,1 2677 | 2675,3695,20,1 2678 | 2676,4138,16,1 2679 | 2677,4752,369,1 2680 | 2678,4845,291,1 2681 | 2679,4041,21,3 2682 | 2680,4971,436,1 2683 | 2681,5363,16,1 2684 | 2682,3879,8,1 2685 | 2683,2476,13,1 2686 | 2684,2540,19,3 2687 | 2685,39,25,-1 2688 | 2686,4229,17,1 2689 | 2687,1676,48,1 2690 | 2688,1169,25,1 2691 | 2689,3903,11,1 2692 | 2690,3265,20,2 2693 | 2691,5080,422,1 2694 | 2692,5411,505,-1 2695 | 2693,1801,175,-1 2696 | 2694,3903,17,1 2697 | 2695,4919,434,1 2698 | 2696,5136,475,2 2699 | 2697,2551,194,1 2700 | 2698,3655,36,-1 2701 | 2699,2984,253,3 2702 | 2700,2918,25,3 2703 | 2701,4947,36,2 2704 | 2702,5032,436,1 2705 | 2703,1106,9,1 2706 | 2704,147,25,-1 2707 | 2705,1685,24,2 2708 | 2706,277,11,-1 2709 | 2707,3336,9,1 2710 | 2708,1604,369,1 2711 | 2709,1561,116,1 2712 | 2710,3522,386,3 2713 | 2711,4680,375,3 2714 | 2712,4273,4,3 2715 | 2713,3101,11,3 2716 | 2714,2114,25,2 2717 | 2715,2893,49,3 2718 | 2716,1169,51,1 2719 | 2717,824,16,1 2720 | 2718,4681,5,2 2721 | 2719,3265,13,2 2722 | 2720,5264,432,1 2723 | 2721,2001,9,1 2724 | 2722,5553,531,-1 2725 | 2723,2084,8,1 2726 | 2724,436,9,1 2727 | 2725,2316,147,1 2728 | 2726,5520,528,-1 2729 | 2727,4639,253,3 2730 | 2728,3121,98,4 2731 | 2729,781,19,1 2732 | 2730,5077,438,1 2733 | 2731,4410,8,1 2734 | 2732,3351,16,1 2735 | 2733,5034,416,2 2736 | 2734,2084,348,1 2737 | 2735,2439,223,1 2738 | 2736,5130,28,1 2739 | 2737,4044,19,1 2740 | 2738,2868,237,1 2741 | 2739,4885,16,3 2742 | 2740,3468,157,1 2743 | 2741,5340,414,2 2744 | 2742,5067,466,2 2745 | 2743,2228,229,1 2746 | 2744,2759,291,1 2747 | 2745,5074,425,2 2748 | 2746,547,13,1 2749 | 2747,2316,16,1 2750 | 2748,3246,27,1 2751 | 2749,1223,9,1 2752 | 2750,5045,16,1 2753 | 2751,5403,499,-1 2754 | 2752,4084,9,1 2755 | 2753,5127,464,1 2756 | 2754,2083,5,1 2757 | 2755,1823,156,1 2758 | 2756,3641,157,1 2759 | 2757,5339,142,2 2760 | 2758,1645,98,2 2761 | 2759,1624,8,1 2762 | 2760,2733,9,1 2763 | 2761,5139,427,3 2764 | 2762,3202,5,1 2765 | 2763,5259,543,1 2766 | 2764,4241,5,1 2767 | 2765,3575,11,1 2768 | 2766,2260,19,3 2769 | 2767,4128,286,1 2770 | 2768,3523,1,2 2771 | 2769,3515,5,3 2772 | 2770,4993,433,3 2773 | 2771,3852,148,1 2774 | 2772,1974,25,1 2775 | 2773,1451,12,1 2776 | 2774,5156,16,3 2777 | 2775,2424,8,1 2778 | 2776,3841,11,1 2779 | 2777,5264,419,1 2780 | 2778,5296,16,2 2781 | 2779,5304,16,2 2782 | 2780,5653,518,-1 2783 | 2781,5711,543,-1 2784 | 2782,361,83,2 2785 | 2783,2404,85,1 2786 | 2784,5147,36,1 2787 | 2785,5061,16,2 2788 | 2786,3624,5,1 2789 | 2787,5670,521,-1 2790 | 2788,1405,24,2 2791 | 2789,5085,17,1 2792 | 2790,4041,13,3 2793 | 2791,4930,422,2 2794 | 2792,4475,315,2 2795 | 2793,3770,86,1 2796 | 2794,1749,14,1 2797 | 2795,2738,187,1 2798 | 2796,541,174,2 2799 | 2797,1614,48,1 2800 | 2798,4006,279,3 2801 | 2799,4565,77,-1 2802 | 2800,3819,49,3 2803 | 2801,5809,511,-1 2804 | 2802,70,5,1 2805 | 2803,1736,19,3 2806 | 2804,3223,15,-1 2807 | 2805,830,25,3 2808 | 2806,2228,8,1 2809 | 2807,614,24,3 2810 | 2808,2090,5,1 2811 | 2809,3799,243,1 2812 | 2810,3609,20,1 2813 | 2811,2535,15,-1 2814 | 2812,23,38,1 2815 | 2813,2410,9,1 2816 | 2814,1679,16,-1 2817 | 2815,2887,25,2 2818 | 2816,3593,20,2 2819 | 2817,5609,524,-1 2820 | 2818,2549,129,1 2821 | 2819,235,103,1 2822 | 2820,5037,420,1 2823 | 2821,3036,157,-1 2824 | 2822,2330,4,1 2825 | 2823,4457,8,1 2826 | 2824,4725,16,3 2827 | 2825,4875,461,1 2828 | 2826,147,17,-1 2829 | 2827,3410,8,3 2830 | 2828,5372,416,1 2831 | 2829,1465,62,1 2832 | 2830,5521,525,-1 2833 | 2831,5349,417,3 2834 | 2832,4615,25,2 2835 | 2833,2164,15,-1 2836 | 2834,3626,87,1 2837 | 2835,3621,13,1 2838 | 2836,3531,71,3 2839 | 2837,5242,16,2 2840 | 2838,4882,16,1 2841 | 2839,491,36,-1 2842 | 2840,5017,16,1 2843 | 2841,1684,24,2 2844 | 2842,5074,466,2 2845 | 2843,3143,36,1 2846 | 2844,5017,419,1 2847 | 2845,1217,4,3 2848 | 2846,1451,11,1 2849 | 2847,5133,16,4 2850 | 2848,2474,165,1 2851 | 2849,1253,24,2 2852 | 2850,5227,28,1 2853 | 2851,699,18,1 2854 | 2852,5474,515,-1 2855 | 2853,1489,176,-1 2856 | 2854,5123,436,1 2857 | 2855,2001,64,1 2858 | 2856,3412,16,2 2859 | 2857,1595,8,1 2860 | 2858,719,8,1 2861 | 2859,1157,49,1 2862 | 2860,2984,19,3 2863 | 2861,515,78,1 2864 | 2862,5074,36,2 2865 | 2863,2411,49,3 2866 | 2864,5597,524,-1 2867 | 2865,1971,20,2 2868 | 2866,3531,19,3 2869 | 2867,487,130,2 2870 | 2868,4550,342,1 2871 | 2869,5091,16,3 2872 | 2870,5596,525,-1 2873 | 2871,3922,95,2 2874 | 2872,5627,512,-1 2875 | 2873,2173,260,1 2876 | 2874,3384,11,-1 2877 | 2875,4312,36,1 2878 | 2876,1018,48,3 2879 | 2877,5520,525,-1 2880 | 2878,2955,85,1 2881 | 2879,5130,424,1 2882 | 2880,5502,524,-1 2883 | 2881,5126,428,1 2884 | 2882,4917,16,1 2885 | 2883,3864,83,2 2886 | 2884,5236,422,1 2887 | 2885,1561,9,1 2888 | 2886,5313,428,1 2889 | 2887,5337,489,4 2890 | 2888,5928,524,-1 2891 | 2889,5366,425,1 2892 | 2890,1405,25,2 2893 | 2891,1822,49,1 2894 | 2892,4993,440,3 2895 | 2893,5033,28,1 2896 | 2894,3627,16,1 2897 | 2895,2476,36,1 2898 | 2896,3626,86,1 2899 | 2897,5314,425,2 2900 | 2898,4529,4,1 2901 | 2899,5232,460,2 2902 | 2900,3138,49,3 2903 | 2901,3733,9,1 2904 | 2902,3426,369,1 2905 | 2903,4829,25,-1 2906 | 2904,2463,20,1 2907 | 2905,5103,16,3 2908 | 2906,5116,420,1 2909 | 2907,4957,16,1 2910 | 2908,1338,17,1 2911 | 2909,3609,174,1 2912 | 2910,1772,161,2 2913 | 2911,3836,16,1 2914 | 2912,4247,13,3 2915 | 2913,3805,16,-1 2916 | 2914,5417,499,-1 2917 | 2915,3130,279,3 2918 | 2916,638,8,1 2919 | 2917,371,9,1 2920 | 2918,1758,49,1 2921 | 2919,699,19,1 2922 | 2920,4936,36,1 2923 | 2921,1586,8,1 2924 | 2922,603,36,1 2925 | 2923,3299,17,1 2926 | 2924,2627,305,3 2927 | 2925,3875,13,-1 2928 | 2926,1777,4,1 2929 | 2927,3452,310,1 2930 | 2928,4275,127,1 2931 | 2929,880,49,1 2932 | 2930,4658,170,3 2933 | 2931,5122,449,2 2934 | 2932,2064,16,1 2935 | 2933,5002,36,1 2936 | 2934,1501,25,-1 2937 | 2935,3759,9,1 2938 | 2936,1004,135,3 2939 | 2937,249,16,2 2940 | 2938,3826,24,2 2941 | 2939,5180,462,2 2942 | 2940,5258,419,3 2943 | 2941,4811,13,3 2944 | 2942,2556,9,1 2945 | 2943,3511,20,1 2946 | 2944,5316,428,3 2947 | 2945,2692,25,2 2948 | 2946,5254,28,1 2949 | 2947,4930,16,2 2950 | 2948,5124,16,1 2951 | 2949,5339,449,2 2952 | 2950,3266,24,3 2953 | 2951,2955,87,1 2954 | 2952,3179,9,1 2955 | 2953,5304,419,2 2956 | 2954,2095,9,-1 2957 | 2955,1677,9,1 2958 | 2956,5073,428,2 2959 | 2957,368,48,3 2960 | 2958,1778,25,2 2961 | 2959,3497,4,1 2962 | 2960,1803,24,3 2963 | 2961,434,37,1 2964 | 2962,1485,24,-1 2965 | 2963,1519,12,-1 2966 | 2964,2738,8,1 2967 | 2965,2286,268,3 2968 | 2966,4313,3,1 2969 | 2967,5303,28,1 2970 | 2968,4424,17,2 2971 | 2969,2789,5,3 2972 | 2970,4980,480,1 2973 | 2971,5148,422,2 2974 | 2972,1210,17,1 2975 | 2973,4278,142,2 2976 | 2974,1219,24,2 2977 | 2975,2902,9,1 2978 | 2976,3164,24,3 2979 | 2977,2463,13,1 2980 | 2978,3086,93,1 2981 | 2979,2495,8,1 2982 | 2980,3327,16,1 2983 | 2981,629,11,-1 2984 | 2982,1998,16,1 2985 | 2983,3731,407,1 2986 | 2984,1124,25,2 2987 | 2985,306,2,2 2988 | 2986,2128,199,1 2989 | 2987,5250,491,1 2990 | 2988,4879,16,1 2991 | 2989,2531,385,-1 2992 | 2990,500,48,1 2993 | 2991,5081,28,1 2994 | 2992,3986,9,3 2995 | 2993,3360,190,3 2996 | 2994,3575,17,1 2997 | 2995,4959,16,2 2998 | 2996,1968,8,1 2999 | 2997,5199,16,1 3000 | 2998,5110,16,1 3001 | 2999,1702,4,1 3002 | 3000,1944,331,1 3003 | 3001,1727,16,1 3004 | 3002,4042,87,1 3005 | 3003,4368,17,1 3006 | 3004,1158,5,1 3007 | 3005,4506,106,1 3008 | 3006,4247,132,3 3009 | 3007,5203,422,3 3010 | 3008,3452,8,1 3011 | 3009,3327,36,1 3012 | 3010,3246,199,1 3013 | 3011,1403,176,1 3014 | 3012,1561,8,1 3015 | 3013,5383,429,3 3016 | 3014,5290,424,2 3017 | 3015,224,11,-1 3018 | 3016,3504,16,2 3019 | 3017,5446,543,-1 3020 | 3018,3202,102,1 3021 | 3019,5276,473,3 3022 | 3020,1655,148,1 3023 | 3021,5416,503,-1 3024 | 3022,4081,106,3 3025 | 3023,818,90,1 3026 | 3024,4882,425,1 3027 | 3025,4005,8,1 3028 | 3026,2260,98,3 3029 | 3027,2047,86,1 3030 | 3028,2316,216,1 3031 | 3029,3223,13,-1 3032 | 3030,341,98,1 3033 | 3031,379,48,1 3034 | 3032,1561,9,1 3035 | 3033,5045,450,1 3036 | 3034,2408,83,4 3037 | 3035,1094,238,1 3038 | 3036,5199,414,1 3039 | 3037,5058,417,1 3040 | 3038,5392,16,2 3041 | 3039,5230,419,1 3042 | 3040,5224,419,3 3043 | 3041,374,48,1 3044 | 3042,1893,110,1 3045 | 3043,2030,49,1 3046 | 3044,1971,14,2 3047 | 3045,359,17,1 3048 | 3046,1767,9,1 3049 | 3047,5711,542,-1 3050 | 3048,1734,48,3 3051 | 3049,3681,5,1 3052 | 3050,1451,13,1 3053 | 3051,4582,298,1 3054 | 3052,5296,419,2 3055 | 3053,3624,302,1 3056 | 3054,5380,433,2 3057 | 3055,3368,25,2 3058 | 3056,224,16,-1 3059 | 3057,4183,12,3 3060 | 3058,2296,204,1 3061 | 3059,3993,19,5 3062 | 3060,3621,14,1 3063 | 3061,4971,414,1 3064 | 3062,209,5,1 3065 | 3063,3289,48,1 3066 | 3064,5527,530,-1 3067 | 3065,5154,434,2 3068 | 3066,1145,17,1 3069 | 3067,1604,221,1 3070 | 3068,4588,8,2 3071 | 3069,4941,424,2 3072 | 3070,2408,20,4 3073 | 3071,4189,13,1 3074 | 3072,4878,28,1 3075 | 3073,1833,8,1 3076 | 3074,5184,36,1 3077 | 3075,379,125,1 3078 | 3076,3037,49,1 3079 | 3077,4784,257,1 3080 | 3078,5373,468,2 3081 | 3079,5929,524,-1 3082 | 3080,5605,524,-1 3083 | 3081,5423,496,-1 3084 | 3082,4062,16,1 3085 | 3083,4183,13,3 3086 | 3084,5186,418,1 3087 | 3085,1004,25,3 3088 | 3086,5133,417,4 3089 | 3087,4000,6,3 3090 | 3088,781,17,1 3091 | 3089,4229,16,1 3092 | 3090,5169,16,2 3093 | 3091,4968,28,2 3094 | 3092,4000,4,3 3095 | 3093,4807,17,1 3096 | 3094,1023,8,1 3097 | 3095,5269,17,1 3098 | 3096,235,8,1 3099 | 3097,1546,82,3 3100 | 3098,5127,419,1 3101 | 3099,5373,424,2 3102 | 3100,656,25,1 3103 | 3101,1798,66,2 3104 | 3102,2439,4,1 3105 | 3103,4085,9,1 3106 | 3104,5346,434,1 3107 | 3105,5330,417,4 3108 | 3106,4639,36,3 3109 | 3107,5335,420,1 3110 | 3108,5213,16,1 3111 | 3109,924,13,2 3112 | 3110,2001,8,1 3113 | 3111,4033,15,1 3114 | 3112,4279,71,1 3115 | 3113,1960,113,2 3116 | 3114,1968,8,1 3117 | 3115,5417,502,-1 3118 | 3116,2328,195,1 3119 | 3117,4625,48,1 3120 | 3118,1392,17,1 3121 | 3119,5438,542,-1 3122 | 3120,2323,49,1 3123 | 3121,3799,78,1 3124 | 3122,5180,16,2 3125 | 3123,5021,417,2 3126 | 3124,210,36,1 3127 | 3125,2373,17,1 3128 | 3126,3983,312,1 3129 | 3127,2360,16,1 3130 | 3128,4448,17,1 3131 | 3129,5001,422,1 3132 | 3130,1392,176,1 3133 | 3131,5356,16,1 3134 | 3132,5006,16,2 3135 | 3133,1994,27,1 3136 | 3134,5366,417,1 3137 | 3135,4278,19,2 3138 | 3136,892,4,1 3139 | 3137,5518,525,-1 3140 | 3138,16,31,1 3141 | 3139,4845,9,1 3142 | 3140,5259,542,1 3143 | 3141,5377,420,1 3144 | 3142,1586,111,1 3145 | 3143,3086,374,1 3146 | 3144,1944,9,1 3147 | 3145,410,123,5 3148 | 3146,5226,16,1 3149 | 3147,4698,8,1 3150 | 3148,994,25,2 3151 | 3149,3826,188,2 3152 | 3150,1971,19,2 3153 | 3151,4344,49,1 3154 | 3152,2208,24,2 3155 | 3153,3593,72,2 3156 | 3154,2724,24,2 3157 | 3155,1750,9,2 3158 | 3156,4860,13,-1 3159 | 3157,2772,25,1 3160 | 3158,5584,525,-1 3161 | 3159,3533,36,1 3162 | 3160,1359,48,1 3163 | 3161,2011,49,1 3164 | 3162,3681,4,1 3165 | 3163,5104,463,2 3166 | 3164,1672,311,3 3167 | 3165,1997,353,3 3168 | 3166,4875,424,1 3169 | 3167,2637,204,1 3170 | 3168,3819,127,3 3171 | 3169,3641,11,1 3172 | 3170,1911,13,1 3173 | 3171,3511,19,1 3174 | 3172,3491,25,1 3175 | 3173,1028,8,1 3176 | 3174,4892,36,2 3177 | 3175,5024,414,1 3178 | 3176,826,204,1 3179 | 3177,2785,5,1 3180 | 3178,5193,36,2 3181 | 3179,4439,332,3 3182 | 3180,3946,8,1 3183 | 3181,5380,16,2 3184 | 3182,1524,48,1 3185 | 3183,3855,104,3 3186 | 3184,3751,16,2 3187 | 3185,5697,541,-1 3188 | 3186,5932,524,-1 3189 | 3187,4811,122,3 3190 | 3188,883,9,1 3191 | 3189,1152,25,2 3192 | 3190,3139,8,2 3193 | 3191,2258,24,2 3194 | 3192,1676,49,1 3195 | 3193,2984,14,3 3196 | 3194,278,131,1 3197 | 3195,5249,425,1 3198 | 3196,274,24,2 3199 | 3197,4752,4,1 3200 | 3198,4639,19,3 3201 | 3199,3838,86,1 3202 | 3200,5123,414,1 3203 | 3201,4242,16,1 3204 | 3202,1903,27,2 3205 | 3203,4955,417,1 3206 | 3204,4829,13,-1 3207 | 3205,4747,263,1 3208 | 3206,5434,496,-1 3209 | 3207,2880,233,1 3210 | 3208,5366,447,1 3211 | 3209,3830,5,1 3212 | 3210,2785,267,1 3213 | 3211,2208,113,2 3214 | 3212,991,52,1 3215 | 3213,1797,5,5 3216 | 3214,515,13,1 3217 | 3215,964,219,1 3218 | 3216,1740,12,1 3219 | 3217,5300,423,2 3220 | 3218,2340,152,1 3221 | 3219,830,25,3 3222 | 3220,4885,17,3 3223 | 3221,4457,9,1 3224 | 3222,4904,430,1 3225 | 3223,3303,14,1 3226 | 3224,1974,268,1 3227 | 3225,3737,356,1 3228 | 3226,3630,9,1 3229 | 3227,4397,8,1 3230 | 3228,1464,311,1 3231 | 3229,5488,523,-1 3232 | 3230,3797,49,3 3233 | 3231,5050,424,2 3234 | 3232,3476,297,1 3235 | 3233,1404,78,1 3236 | 3234,1703,38,1 3237 | 3235,4303,49,1 3238 | 3236,3875,12,-1 3239 | 3237,5088,450,1 3240 | 3238,2229,5,1 3241 | 3239,3178,9,2 3242 | 3240,4565,13,-1 3243 | 3241,5592,541,-1 3244 | 3242,1144,27,1 3245 | 3243,1962,117,1 3246 | 3244,1129,36,1 3247 | 3245,5128,16,1 3248 | 3246,4084,116,1 3249 | 3247,3316,9,3 3250 | 3248,781,242,1 3251 | 3249,4400,24,2 3252 | 3250,719,237,1 3253 | 3251,4949,417,1 3254 | 3252,5272,434,1 3255 | 3253,4981,439,1 3256 | 3254,2474,8,1 3257 | 3255,3294,25,2 3258 | 3256,3134,49,3 3259 | 3257,5558,525,-1 3260 | 3258,2114,94,2 3261 | 3259,5382,36,2 3262 | 3260,2535,36,-1 3263 | 3261,2545,89,2 3264 | 3262,4298,4,1 3265 | 3263,3126,16,1 3266 | 3264,1146,49,1 3267 | 3265,1561,8,1 3268 | 3266,5280,422,3 3269 | 3267,5117,417,2 3270 | 3268,1383,12,-1 3271 | 3269,2744,16,3 3272 | 3270,1956,48,1 3273 | 3271,3303,197,1 3274 | 3272,4565,12,-1 3275 | 3273,377,19,1 3276 | 3274,2619,4,1 3277 | 3275,3096,9,3 3278 | 3276,4959,429,2 3279 | 3277,4735,239,1 3280 | 3278,4240,49,1 3281 | 3279,3351,19,1 3282 | 3280,4834,275,1 3283 | 3281,5297,28,1 3284 | 3282,862,20,1 3285 | 3283,361,20,2 3286 | 3284,70,73,1 3287 | 3285,3360,5,3 3288 | 3286,5349,492,3 3289 | 3287,2646,2,2 3290 | 3288,1618,51,1 3291 | 3289,1169,16,1 3292 | 3290,4968,424,2 3293 | 3291,3899,36,-1 3294 | 3292,2010,302,1 3295 | 3293,827,8,1 3296 | 3294,1485,25,-1 3297 | 3295,5397,496,-1 3298 | 3296,1702,5,1 3299 | 3297,5008,417,3 3300 | 3298,1028,226,1 3301 | 3299,4987,445,2 3302 | 3300,2772,89,1 3303 | 3301,2955,86,1 3304 | 3302,4594,13,1 3305 | 3303,3627,17,1 3306 | 3304,1689,24,2 3307 | 3305,5653,541,-1 3308 | 3306,3958,361,3 3309 | 3307,5049,417,1 3310 | 3308,2952,16,3 3311 | 3309,4066,298,2 3312 | 3310,2490,137,1 3313 | 3311,3626,85,1 3314 | 3312,4373,48,3 3315 | 3313,3164,25,3 3316 | 3314,3276,16,3 3317 | 3315,1761,78,3 3318 | 3316,1387,51,1 3319 | 3317,2391,49,3 3320 | 3318,4625,269,1 3321 | 3319,629,16,-1 3322 | 3320,5578,541,-1 3323 | 3321,787,116,1 3324 | 3322,2207,17,1 3325 | 3323,1547,8,1 3326 | 3324,3239,24,2 3327 | 3325,5676,515,-1 3328 | 3326,5616,513,-1 3329 | 3327,4765,15,1 3330 | 3328,3988,9,2 3331 | 3329,2229,84,1 3332 | 3330,5373,36,2 3333 | 3331,2973,20,3 3334 | 3332,5061,416,2 3335 | 3333,209,4,1 3336 | 3334,1994,14,1 3337 | 3335,4907,414,3 3338 | 3336,2005,238,3 3339 | 3337,2765,201,1 3340 | 3338,3303,19,1 3341 | 3339,4372,119,1 3342 | 3340,5360,425,1 3343 | 3341,1344,142,3 3344 | 3342,5163,28,1 3345 | 3343,2079,24,2 3346 | 3344,5576,533,-1 3347 | 3345,1591,8,2 3348 | 3346,3148,16,5 3349 | 3347,662,9,1 3350 | 3348,5337,417,4 3351 | 3349,4506,24,1 3352 | 3350,5071,422,2 3353 | 3351,5169,416,2 3354 | 3352,1764,9,3 3355 | 3353,1694,49,1 3356 | 3354,4410,9,1 3357 | 3355,2313,229,1 3358 | 3356,5052,36,2 3359 | 3357,359,98,1 3360 | 3358,5203,433,3 3361 | 3359,5300,28,2 3362 | 3360,3285,16,1 3363 | 3361,515,77,1 3364 | 3362,894,5,1 3365 | 3363,4615,13,2 3366 | 3364,635,16,1 3367 | 3365,5198,28,2 3368 | 3366,303,9,1 3369 | 3367,4114,17,-1 3370 | 3368,3463,8,1 3371 | 3369,702,169,1 3372 | 3370,3785,122,3 3373 | 3371,1079,8,1 3374 | 3372,5045,28,1 3375 | 3373,716,176,1 3376 | 3374,5525,524,-1 3377 | 3375,3223,12,-1 3378 | 3376,4839,18,3 3379 | 3377,4554,16,1 3380 | 3378,1018,200,3 3381 | 3379,3070,14,3 3382 | 3380,5034,16,2 3383 | 3381,4368,332,1 3384 | 3382,1956,49,1 3385 | 3383,4658,24,3 3386 | 3384,3593,17,2 3387 | 3385,4044,71,1 3388 | 3386,1023,116,1 3389 | 3387,5538,530,-1 3390 | 3388,710,51,1 3391 | 3389,2307,2,2 3392 | 3390,2868,8,1 3393 | 3391,2077,189,2 3394 | 3392,3848,24,3 3395 | 3393,1749,20,1 3396 | 3394,1822,48,1 3397 | 3395,4728,25,2 3398 | 3396,1997,4,3 3399 | 3397,3888,348,1 3400 | 3398,3476,48,1 3401 | 3399,5226,28,1 3402 | 3400,1426,94,2 3403 | 3401,4391,8,1 3404 | 3402,3178,162,2 3405 | 3403,2495,9,1 3406 | 3404,4437,20,1 3407 | 3405,3392,78,-1 3408 | 3406,3533,32,1 3409 | 3407,5413,500,-1 3410 | 3408,3043,49,1 3411 | 3409,5205,28,1 3412 | 3410,5084,422,2 3413 | 3411,3841,13,1 3414 | 3412,1776,49,1 3415 | 3413,1157,266,1 3416 | 3414,5261,16,4 3417 | 3415,5228,16,2 3418 | 3416,1310,124,2 3419 | 3417,5383,419,3 3420 | 3418,3215,290,3 3421 | 3419,4241,359,1 3422 | 3420,311,84,1 3423 | 3421,1853,49,1 3424 | 3422,5566,534,-1 3425 | 3423,4954,473,1 3426 | 3424,3733,8,1 3427 | 3425,5189,36,1 3428 | 3426,374,101,1 3429 | 3427,4094,4,1 3430 | 3428,3001,49,3 3431 | 3429,5377,16,1 3432 | 3430,3150,11,1 3433 | 3431,818,49,1 3434 | 3432,4346,25,2 3435 | 3433,3208,245,1 3436 | 3434,4929,420,1 3437 | 3435,5104,36,2 3438 | 3436,2330,213,1 3439 | 3437,5553,524,-1 3440 | 3438,3677,186,2 3441 | 3439,3482,16,1 3442 | 3440,4041,12,3 3443 | 3441,2359,13,2 3444 | 3442,547,12,1 3445 | 3443,2159,8,1 3446 | 3444,3000,24,3 3447 | 3445,4765,12,1 3448 | 3446,4150,24,2 3449 | 3447,3630,8,1 3450 | 3448,3299,176,1 3451 | 3449,1145,13,1 3452 | 3450,3533,147,1 3453 | 3451,1704,8,2 3454 | 3452,699,20,1 3455 | 3453,893,17,1 3456 | 3454,656,15,1 3457 | 3455,3855,13,3 3458 | 3456,1546,24,3 3459 | 3457,4981,419,1 3460 | 3458,97,5,1 3461 | 3459,341,14,1 3462 | 3460,4420,145,-1 3463 | 3461,4062,36,1 3464 | 3462,4919,28,1 3465 | 3463,2474,9,1 3466 | 3464,1306,48,3 3467 | 3465,5675,518,-1 3468 | 3466,1574,4,1 3469 | 3467,5346,16,1 3470 | 3468,4041,17,3 3471 | 3469,4424,83,2 3472 | 3470,4594,11,1 3473 | 3471,2650,16,1 3474 | 3472,5019,428,1 3475 | 3473,5250,16,1 3476 | 3474,1546,25,3 3477 | 3475,1833,118,1 3478 | 3476,2159,187,1 3479 | 3477,893,256,1 3480 | 3478,5624,513,-1 3481 | 3479,403,25,3 3482 | 3480,2923,20,1 3483 | 3481,5520,524,-1 3484 | 3482,1122,36,1 3485 | 3483,5070,425,2 3486 | 3484,5282,475,2 3487 | 3485,853,4,2 3488 | 3486,973,170,3 3489 | 3487,4887,434,1 3490 | 3488,3134,48,3 3491 | 3489,5014,36,3 3492 | 3490,1414,25,2 3493 | 3491,3782,181,2 3494 | 3492,4199,17,1 3495 | 3493,5927,546,-1 3496 | 3494,1446,95,2 3497 | 3495,4110,147,1 3498 | 3496,5298,443,3 3499 | 3497,4978,16,1 3500 | 3498,4319,19,2 3501 | 3499,5566,524,-1 3502 | 3500,2548,48,1 3503 | 3501,5333,16,2 3504 | 3502,2692,23,2 3505 | 3503,5230,16,1 3506 | 3504,5333,36,2 3507 | 3505,3410,284,3 3508 | 3506,5151,429,2 3509 | 3507,2064,36,1 3510 | 3508,5014,419,3 3511 | 3509,4860,12,-1 3512 | 3510,3217,5,1 3513 | 3511,862,13,1 3514 | 3512,3121,19,4 3515 | 3513,5006,36,2 3516 | 3514,5374,414,1 3517 | 3515,3636,8,1 3518 | 3516,4765,16,1 3519 | 3517,4583,17,1 3520 | 3518,4813,140,1 3521 | 3519,4946,416,2 3522 | 3520,5039,468,4 3523 | 3521,5049,16,1 3524 | 3522,2759,9,1 3525 | 3523,1547,250,1 3526 | 3524,1987,15,1 3527 | 3525,1831,8,1 3528 | 3526,3575,15,1 3529 | 3527,1580,16,1 3530 | 3528,953,17,-1 3531 | 3529,5246,28,1 3532 | 3530,515,36,1 3533 | 3531,1198,11,1 3534 | 3532,984,11,-1 3535 | 3533,4750,89,3 3536 | 3534,3746,11,-1 3537 | 3535,2391,289,3 3538 | 3536,3391,49,1 3539 | 3537,2445,22,1 3540 | 3538,3086,12,1 3541 | 3539,1031,17,-1 3542 | 3540,5198,16,2 3543 | 3541,1746,13,1 3544 | 3542,4439,11,3 3545 | 3543,4982,419,2 3546 | 3544,3121,14,4 3547 | 3545,4453,27,1 3548 | 3546,1565,17,-1 3549 | 3547,4312,24,1 3550 | 3548,979,20,1 3551 | 3549,710,36,1 3552 | 3550,353,11,-1 3553 | 3551,599,27,1 3554 | 3552,3062,13,2 3555 | 3553,97,84,1 3556 | 3554,4520,5,1 3557 | 3555,5110,425,1 3558 | 3556,3276,11,3 3559 | 3557,1189,8,1 3560 | 3558,1124,63,2 3561 | 3559,1502,4,1 3562 | 3560,3864,19,2 3563 | 3561,1403,51,1 3564 | 3562,577,12,1 3565 | 3563,5124,442,1 3566 | 3564,638,9,1 3567 | 3565,3583,24,4 3568 | 3566,129,48,3 3569 | 3567,1736,20,3 3570 | 3568,5014,16,3 3571 | 3569,4468,104,2 3572 | 3570,2561,24,2 3573 | 3571,5159,419,1 3574 | 3572,4875,414,1 3575 | 3573,3576,25,1 3576 | 3574,5165,17,3 3577 | 3575,5081,477,1 3578 | 3576,3392,354,-1 3579 | 3577,4110,16,1 3580 | 3578,4628,235,1 3581 | 3579,3130,24,3 3582 | 3580,2815,8,1 3583 | 3581,362,49,3 3584 | 3582,1198,17,1 3585 | 3583,5061,466,2 3586 | 3584,2990,68,2 3587 | 3585,5292,28,1 3588 | 3586,1087,4,2 3589 | 3587,5314,142,2 3590 | 3588,210,78,1 3591 | 3589,5416,499,-1 3592 | 3590,1195,113,2 3593 | 3591,2866,24,2 3594 | 3592,1465,36,1 3595 | 3593,5181,142,1 3596 | 3594,1485,17,-1 3597 | 3595,1998,155,1 3598 | 3596,5293,434,3 3599 | 3597,4529,3,1 3600 | 3598,5264,425,1 3601 | 3599,1417,12,-1 3602 | 3600,4468,13,2 3603 | 3601,5191,28,1 3604 | 3602,5136,36,2 3605 | 3603,1465,16,1 3606 | 3604,2476,12,1 3607 | 3605,3075,20,1 3608 | 3606,140,9,1 3609 | 3607,2765,48,1 3610 | 3608,5261,425,4 3611 | 3609,2136,249,3 3612 | 3610,4916,16,1 3613 | 3611,3730,49,1 3614 | 3612,1676,227,1 3615 | 3613,4949,16,1 3616 | 3614,1128,79,2 3617 | 3615,4453,16,1 3618 | 3616,1461,5,2 3619 | 3617,23,8,1 3620 | 3618,3213,8,1 3621 | 3619,5236,16,1 3622 | 3620,3002,24,3 3623 | 3621,1387,36,1 3624 | 3622,3428,9,1 3625 | 3623,4059,20,3 3626 | 3624,3770,85,1 3627 | 3625,4242,20,1 3628 | 3626,4729,49,3 3629 | 3627,3294,181,2 3630 | 3628,1446,24,2 3631 | 3629,1677,8,1 3632 | 3630,5931,546,-1 3633 | 3631,5501,525,-1 3634 | 3632,277,16,-1 3635 | 3633,5148,142,2 3636 | 3634,5082,425,1 3637 | 3635,3583,214,4 3638 | 3636,4446,85,1 3639 | 3637,3138,325,3 3640 | 3638,5112,28,1 3641 | 3639,4967,428,1 3642 | 3640,542,45,3 3643 | 3641,5292,450,1 3644 | 3642,3523,139,2 3645 | 3643,16,5,1 3646 | 3644,4482,133,5 3647 | 3645,5584,533,-1 3648 | 3646,1158,4,1 3649 | 3647,2624,116,1 3650 | 3648,2254,361,1 3651 | 3649,4904,28,1 3652 | 3650,4229,394,1 3653 | 3651,1668,298,5 3654 | 3652,1885,8,1 3655 | 3653,2136,9,3 3656 | 3654,1536,8,1 3657 | 3655,839,174,1 3658 | 3656,5418,496,2 3659 | 3657,1464,4,1 3660 | 3658,5159,16,1 3661 | 3659,1288,49,3 3662 | 3660,3797,48,3 3663 | 3661,2624,9,1 3664 | 3662,5677,541,-1 3665 | 3663,3621,30,1 3666 | 3664,710,16,1 3667 | 3665,2424,9,1 3668 | 3666,5250,429,1 3669 | 3667,2957,24,2 3670 | 3668,4327,16,-1 3671 | 3669,3805,11,-1 3672 | 3670,5034,434,2 3673 | 3671,2667,43,1 3674 | 3672,5157,16,3 3675 | 3673,5082,16,1 3676 | 3674,1872,16,1 3677 | 3675,5256,16,1 3678 | 3676,4026,152,1 3679 | 3677,1350,109,2 3680 | 3678,2724,362,2 3681 | 3679,5427,497,-1 3682 | 3680,2064,147,1 3683 | 3681,3336,8,1 3684 | 3682,2570,150,3 3685 | 3683,5927,524,-1 3686 | 3684,1210,29,1 3687 | 3685,4457,265,1 3688 | 3686,5073,420,2 3689 | 3687,3852,147,1 3690 | 3688,2093,4,2 3691 | 3689,3514,16,1 3692 | 3690,1325,17,2 3693 | 3691,4879,420,1 3694 | 3692,5550,525,-1 3695 | 3693,5102,444,1 3696 | 3694,862,19,1 3697 | 3695,2313,8,1 3698 | 3696,3577,17,-1 3699 | 3697,4417,199,2 3700 | 3698,97,4,1 3701 | 3699,1356,57,1 3702 | 3700,4976,422,2 3703 | 3701,1620,5,1 3704 | 3702,5532,524,-1 3705 | 3703,3419,25,1 3706 | 3704,5303,414,1 3707 | 3705,4625,49,1 3708 | 3706,5024,424,1 3709 | 3707,4808,24,1 3710 | 3708,4808,25,1 3711 | 3709,1566,8,1 3712 | 3710,2408,19,4 3713 | 3711,2988,48,3 3714 | 3712,4882,417,1 3715 | 3713,5126,420,1 3716 | 3714,1230,49,1 3717 | 3715,4033,36,1 3718 | 3716,5261,28,4 3719 | 3717,1464,5,1 3720 | 3718,3785,24,3 3721 | 3719,3142,340,1 3722 | 3720,1599,2,2 3723 | 3721,1505,36,1 3724 | 3722,5191,425,1 3725 | 3723,5008,17,3 3726 | 3724,3219,8,1 3727 | 3725,434,8,1 3728 | 3726,2637,204,1 3729 | 3727,1519,36,-1 3730 | 3728,4892,422,2 3731 | 3729,274,25,2 3732 | 3730,5232,36,2 3733 | 3731,5160,424,1 3734 | 3732,4066,66,2 3735 | 3733,2429,366,2 3736 | 3734,2744,20,3 3737 | 3735,4906,419,1 3738 | 3736,3686,398,1 3739 | 3737,4588,9,2 3740 | 3738,1325,20,2 3741 | 3739,5227,16,1 3742 | 3740,1580,67,1 3743 | 3741,2591,49,3 3744 | 3742,235,103,1 3745 | 3743,1405,214,2 3746 | 3744,5033,432,1 3747 | 3745,1734,49,3 3748 | 3746,2545,17,2 3749 | 3747,371,103,1 3750 | 3748,4123,48,3 3751 | 3749,5084,16,2 3752 | 3750,3655,12,-1 3753 | 3751,1421,8,1 3754 | 3752,2041,105,3 3755 | 3753,1426,25,2 3756 | 3754,4183,67,3 3757 | 3755,984,16,-1 3758 | 3756,1736,14,3 3759 | 3757,3514,17,1 3760 | 3758,3837,175,1 3761 | 3759,4582,9,1 3762 | 3760,1599,8,2 3763 | 3761,1522,135,2 3764 | 3762,4821,20,2 3765 | 3763,3576,24,1 3766 | 3764,3095,9,1 3767 | 3765,3001,285,3 3768 | 3766,577,36,1 3769 | 3767,1672,5,3 3770 | 3768,3953,69,1 3771 | 3769,2011,166,1 3772 | 3770,5242,433,2 3773 | 3771,3515,4,3 3774 | 3772,4983,424,2 3775 | 3773,3834,8,1 3776 | 3774,5077,416,1 3777 | 3775,781,13,1 3778 | 3776,1332,48,3 3779 | 3777,4524,8,1 3780 | 3778,3781,5,3 3781 | 3779,4417,142,2 3782 | 3780,1441,25,-1 3783 | 3781,1306,49,3 3784 | 3782,3327,175,1 3785 | 3783,2990,25,2 3786 | 3784,4328,8,1 3787 | 3785,571,49,1 3788 | 3786,3312,20,2 3789 | 3787,1983,12,1 3790 | 3788,1289,9,1 3791 | 3789,5014,484,3 3792 | 3790,837,73,1 3793 | 3791,2606,9,1 3794 | 3792,4839,17,3 3795 | 3793,5616,518,-1 3796 | 3794,1904,25,1 3797 | 3795,4391,195,1 3798 | 3796,4546,48,5 3799 | 3797,288,9,1 3800 | 3798,1338,15,1 3801 | 3799,1797,4,5 3802 | 3800,4987,16,2 3803 | 3801,97,4,1 3804 | 3802,3142,48,1 3805 | 3803,1718,24,2 3806 | 3804,1505,25,1 3807 | 3805,5118,424,2 3808 | 3806,1565,25,-1 3809 | 3807,1041,9,1 3810 | 3808,629,12,-1 3811 | 3809,4433,60,3 3812 | 3810,3620,16,2 3813 | 3811,4448,8,1 3814 | 3812,5594,524,-1 3815 | 3813,4496,9,3 3816 | 3814,3799,36,1 3817 | 3815,4582,8,1 3818 | 3816,3129,374,3 3819 | 3817,335,87,1 3820 | 3818,5205,414,1 3821 | 3819,3047,9,1 3822 | 3820,902,13,1 3823 | 3821,2716,135,3 3824 | 3822,3333,9,1 3825 | 3823,5246,420,1 3826 | 3824,4454,9,1 3827 | 3825,3312,34,2 3828 | 3826,3626,13,1 3829 | 3827,5578,536,-1 3830 | 3828,400,167,3 3831 | 3829,5313,420,1 3832 | 3830,5426,543,-1 3833 | 3831,3002,25,3 3834 | 3832,2067,192,1 3835 | 3833,3198,25,-1 3836 | 3834,4917,442,1 3837 | 3835,2492,25,3 3838 | 3836,5356,419,1 3839 | 3837,5514,524,-1 3840 | 3838,1770,213,1 3841 | 3839,1746,77,1 3842 | 3840,235,8,1 3843 | 3841,5513,525,-1 3844 | 3842,5139,16,3 3845 | 3843,235,9,1 3846 | 3844,3511,124,1 3847 | 3845,5080,415,1 3848 | 3846,4279,16,1 3849 | 3847,1417,93,-1 3850 | 3848,5103,17,3 3851 | 3849,1624,8,1 3852 | 3850,4914,452,2 3853 | 3851,5697,518,-1 3854 | 3852,662,37,1 3855 | 3853,308,12,1 3856 | 3854,5488,525,-1 3857 | 3855,3917,8,1 3858 | 3856,5127,28,1 3859 | 3857,5316,16,3 3860 | 3858,5620,538,-1 3861 | 3859,3888,9,1 3862 | 3860,4683,5,1 3863 | 3861,4317,12,3 3864 | 3862,2162,250,1 3865 | 3863,5169,28,2 3866 | 3864,5288,456,2 3867 | 3865,3390,265,2 3868 | 3866,5128,28,1 3869 | 3867,2168,24,1 3870 | 3868,3746,16,-1 3871 | 3869,5335,16,1 3872 | 3870,5295,419,2 3873 | 3871,3276,36,3 3874 | 3872,5071,142,2 3875 | 3873,1968,9,1 3876 | 3874,4906,454,1 3877 | 3875,4750,25,3 3878 | 3876,3043,48,1 3879 | 3877,1896,5,1 3880 | 3878,2973,19,3 3881 | 3879,3223,17,-1 3882 | 3880,5337,28,4 3883 | 3881,1968,33,1 3884 | 3882,4892,445,2 3885 | 3883,5177,419,2 3886 | 3884,1904,170,1 3887 | 3885,232,6,3 3888 | 3886,960,12,3 3889 | 3887,209,4,1 3890 | 3888,5177,16,2 3891 | 3889,5058,28,1 3892 | 3890,5085,480,1 3893 | 3891,2739,19,1 3894 | 3892,2340,9,1 3895 | 3893,718,236,1 3896 | 3894,2009,8,1 3897 | 3895,3209,204,1 3898 | 3896,4123,368,3 3899 | 3897,4506,25,1 3900 | 3898,5918,545,-1 3901 | 3899,3679,260,1 3902 | 3900,3368,24,2 3903 | 3901,5261,417,4 3904 | 3902,2164,16,-1 3905 | 3903,5008,483,3 3906 | 3904,3626,17,1 3907 | 3905,3363,5,1 3908 | 3906,547,175,1 3909 | 3907,2709,159,1 3910 | 3908,2721,13,3 3911 | 3909,1617,9,1 3912 | 3910,5272,16,1 3913 | 3911,5549,525,-1 3914 | 3912,3575,12,1 3915 | 3913,4417,27,2 3916 | 3914,4956,28,1 3917 | 3915,1689,25,2 3918 | 3916,4947,16,2 3919 | 3917,5313,16,1 3920 | 3918,5085,434,1 3921 | 3919,5130,16,1 3922 | 3920,3731,8,1 3923 | 3921,1811,187,1 3924 | 3922,5420,499,-1 3925 | 3923,5203,16,3 3926 | 3924,702,25,1 3927 | 3925,4278,20,2 3928 | 3926,3733,119,1 3929 | 3927,1539,9,1 3930 | 3928,5392,28,2 3931 | 3929,3522,49,3 3932 | 3930,1911,71,1 3933 | 3931,5362,420,1 3934 | 3932,3746,17,-1 3935 | 3933,5261,483,4 3936 | 3934,5653,542,-1 3937 | 3935,4600,8,1 3938 | 3936,935,5,1 3939 | 3937,21,20,2 3940 | 3938,1438,120,1 3941 | 3939,2788,48,1 3942 | 3940,726,194,2 3943 | 3941,5385,444,2 3944 | 3942,5014,425,3 3945 | 3943,4711,48,1 3946 | 3944,5050,446,2 3947 | 3945,2212,207,1 3948 | 3946,1487,8,1 3949 | 3947,4565,36,-1 3950 | 3948,3121,20,4 3951 | 3949,4694,313,1 3952 | 3950,1252,159,1 3953 | 3951,830,24,3 3954 | 3952,3805,17,-1 3955 | 3953,4534,24,2 3956 | 3954,4976,433,2 3957 | 3955,5711,85,-1 3958 | 3956,4941,142,2 3959 | 3957,1129,13,1 3960 | 3958,1441,89,-1 3961 | 3959,718,369,1 3962 | 3960,4042,85,1 3963 | 3961,2788,340,1 3964 | 3962,1465,51,1 3965 | 3963,4454,8,1 3966 | 3964,5268,433,1 3967 | 3965,5024,436,1 3968 | 3966,3841,332,1 3969 | 3967,4919,416,1 3970 | 3968,5417,496,-1 3971 | 3969,3802,49,1 3972 | 3970,5024,436,1 3973 | 3971,3837,36,1 3974 | 3972,1962,8,1 3975 | 3973,4878,428,1 3976 | 3974,1421,320,1 3977 | 3975,4588,2,2 3978 | 3976,5069,444,2 3979 | 3977,2893,48,3 3980 | 3978,928,20,3 3981 | 3979,710,176,1 3982 | 3980,3198,317,-1 3983 | 3981,3426,221,1 3984 | 3982,3374,127,1 3985 | 3983,3101,12,3 3986 | 3984,1984,8,1 3987 | 3985,726,5,2 3988 | 3986,5921,525,-1 3989 | 3987,2540,17,3 3990 | 3988,5488,524,-1 3991 | 3989,5696,518,-1 3992 | 3990,5532,525,-1 3993 | 3991,2551,5,1 3994 | 3992,4368,13,1 3995 | 3993,3839,17,-1 3996 | 3994,5303,459,1 3997 | 3995,956,17,3 3998 | 3996,5304,460,2 3999 | 3997,1786,9,1 4000 | 3998,5198,428,2 4001 | 3999,4338,83,2 4002 | 4000,5213,474,1 4003 | 4001,344,89,1 4004 | 4002,2988,386,3 4005 | 4003,2360,12,1 4006 | 4004,4227,49,1 4007 | 4005,5242,28,2 4008 | 4006,1474,9,1 4009 | 4007,1645,20,2 4010 | 4008,4834,20,1 4011 | 4009,944,25,2 4012 | 4010,3379,15,1 4013 | 4011,4941,36,2 4014 | 4012,5258,429,3 4015 | 4013,5283,460,2 4016 | 4014,3702,4,5 4017 | 4015,1770,5,1 4018 | 4016,4485,17,1 4019 | 4017,2984,13,3 4020 | 4018,1804,49,3 4021 | 4019,2034,24,2 4022 | 4020,2887,24,2 4023 | 4021,2990,24,2 4024 | 4022,1375,9,1 4025 | 4023,3899,12,-1 4026 | 4024,1079,9,1 4027 | 4025,2690,4,3 4028 | 4026,5246,428,1 4029 | 4027,5198,428,2 4030 | 4028,97,84,1 4031 | 4029,4564,17,3 4032 | 4030,1714,269,3 4033 | 4031,1904,24,1 4034 | 4032,3036,36,-1 4035 | 4033,288,8,1 4036 | 4034,2480,134,1 4037 | 4035,5179,16,1 4038 | 4036,5443,509,-1 4039 | 4037,3953,25,1 4040 | 4038,4110,15,1 4041 | 4039,4808,245,1 4042 | 4040,1344,14,3 4043 | 4041,3217,102,1 4044 | 4042,3296,296,1 4045 | 4043,5159,491,1 4046 | 4044,994,24,2 4047 | 4045,3200,229,1 4048 | 4046,3624,4,1 4049 | 4047,4711,49,1 4050 | 4048,1310,17,2 4051 | 4049,3316,159,3 4052 | 4050,1231,13,1 4053 | 4051,1023,9,1 4054 | 4052,5588,533,-1 4055 | 4053,1359,127,1 4056 | 4054,2258,169,2 4057 | 4055,4086,321,3 4058 | 4056,3043,339,1 4059 | 4057,2047,85,1 4060 | 4058,5571,532,-1 4061 | 4059,5333,457,2 4062 | 4060,5189,433,1 4063 | 4061,431,15,-1 4064 | 4062,824,148,1 4065 | 4063,2444,9,1 4066 | 4064,5220,416,3 4067 | 4065,4059,44,3 4068 | 4066,4379,271,1 4069 | 4067,4226,9,1 4070 | 4068,1798,1,2 4071 | 4069,2549,48,1 4072 | 4070,4564,309,3 4073 | 4071,4661,4,1 4074 | 4072,3112,25,2 4075 | 4073,4993,36,3 4076 | 4074,4021,190,3 4077 | 4075,3134,305,3 4078 | 4076,2701,49,1 4079 | 4077,547,78,1 4080 | 4078,350,25,3 4081 | 4079,781,20,1 4082 | 4080,377,20,1 4083 | 4081,5446,509,-1 4084 | 4082,2309,17,1 4085 | 4083,5193,434,2 4086 | 4084,3462,9,1 4087 | 4085,3202,4,1 4088 | 4086,2868,9,1 4089 | 4087,1997,5,3 4090 | 4088,5118,36,2 4091 | 4089,5250,419,1 4092 | 4090,3781,4,3 4093 | 4091,953,87,-1 4094 | 4092,4423,20,1 4095 | 4093,3036,16,-1 4096 | 4094,39,17,-1 4097 | 4095,5058,16,1 4098 | 4096,4328,120,1 4099 | 4097,3095,377,1 4100 | 4098,3213,398,1 4101 | 4099,1642,24,1 4102 | 4100,308,36,1 4103 | 4101,5279,142,2 4104 | 4102,4554,293,1 4105 | 4103,862,20,1 4106 | 4104,4615,301,2 4107 | 4105,924,51,2 4108 | 4106,1038,16,1 4109 | 4107,5220,16,3 4110 | 4108,410,9,5 4111 | 4109,5320,425,1 4112 | 4110,2009,123,1 4113 | 4111,5575,533,-1 4114 | 4112,5254,16,1 4115 | 4113,5224,425,3 4116 | 4114,2404,87,1 4117 | 4115,2316,148,1 4118 | 4116,1325,16,2 4119 | 4117,2646,1,2 4120 | 4118,5032,414,1 4121 | 4119,960,13,3 4122 | 4120,5675,541,-1 4123 | 4121,4138,12,1 4124 | 4122,3075,256,1 4125 | 4123,5684,541,-1 4126 | 4124,2905,4,1 4127 | 4125,3686,8,1 4128 | 4126,1383,77,-1 4129 | 4127,277,12,-1 4130 | 4128,2868,8,1 4131 | 4129,5049,487,1 4132 | 4130,1219,25,2 4133 | 4131,3299,51,1 4134 | 4132,930,204,1 4135 | 4133,4229,20,1 4136 | 4134,1344,27,3 4137 | 4135,344,36,1 4138 | 4136,4978,36,1 4139 | 4137,1253,25,2 4140 | 4138,4312,16,1 4141 | 4139,5226,420,1 4142 | 4140,2293,49,3 4143 | 4141,5663,542,-1 4144 | 4142,2964,16,-1 4145 | 4143,3139,234,2 4146 | 4144,5019,28,1 4147 | 4145,140,7,1 4148 | 4146,3234,197,1 4149 | 4147,4059,19,3 4150 | 4148,251,79,2 4151 | 4149,2180,36,2 4152 | 4150,4639,13,3 4153 | 4151,5019,420,1 4154 | 4152,4923,433,1 4155 | 4153,5186,428,1 4156 | 4154,5451,507,-1 4157 | 4155,4729,325,3 4158 | 4156,4588,407,2 4159 | 4157,2328,8,1 4160 | 4158,3621,12,1 4161 | 4159,5102,28,1 4162 | 4160,4658,25,3 4163 | 4161,2907,200,3 4164 | 4162,989,267,1 4165 | 4163,760,19,2 4166 | 4164,4976,465,2 4167 | 4165,5416,496,-1 4168 | 4166,1391,63,2 4169 | 4167,5423,497,-1 4170 | 4168,5521,528,-1 4171 | 4169,241,74,1 4172 | 4170,2650,17,1 4173 | 4171,4199,12,1 4174 | 4172,5663,541,-1 4175 | 4173,3645,4,1 4176 | 4174,2373,19,1 4177 | 4175,4207,378,3 4178 | 4176,2510,24,2 4179 | 4177,5128,429,1 4180 | 4178,4379,8,1 4181 | 4179,3597,5,3 4182 | 4180,2373,20,1 4183 | 4181,5256,433,1 4184 | 4182,3490,270,1 4185 | 4183,3143,25,1 4186 | 4184,3089,9,1 4187 | 4185,3296,48,1 4188 | 4186,3384,12,-1 4189 | 4187,826,8,1 4190 | 4188,5112,420,1 4191 | 4189,3593,16,2 4192 | 4190,5220,434,3 4193 | 4191,2092,48,3 4194 | 4192,5223,429,2 4195 | 4193,5387,36,1 4196 | 4194,2969,9,1 4197 | 4195,5215,462,3 4198 | 4196,4301,281,1 4199 | 4197,3491,69,1 4200 | 4198,577,179,1 4201 | 4199,2168,274,1 4202 | 4200,1625,48,3 4203 | 4201,4747,16,1 4204 | 4202,880,129,1 4205 | 4203,1561,9,1 4206 | 4204,1677,9,1 4207 | 4205,5612,524,-1 4208 | 4206,5163,432,1 4209 | 4207,1210,12,1 4210 | 4208,5663,515,-1 4211 | 4209,5315,28,1 4212 | 4210,5279,425,2 4213 | 4211,5676,543,-1 4214 | 4212,341,20,1 4215 | 4213,5179,458,1 4216 | 4214,3419,233,1 4217 | 4215,3677,9,2 4218 | 4216,4784,5,1 4219 | 4217,344,51,1 4220 | 4218,3001,48,3 4221 | 4219,3274,25,1 4222 | 4220,377,18,1 4223 | 4221,629,155,-1 4224 | 4222,1375,8,1 4225 | 4223,4941,471,2 4226 | 4224,1195,25,2 4227 | 4225,5199,424,1 4228 | 4226,4968,452,2 4229 | 4227,1778,274,2 4230 | 4228,5130,414,1 4231 | 4229,4417,20,2 4232 | 4230,5696,542,-1 4233 | 4231,5059,16,3 4234 | 4232,5677,542,-1 4235 | 4233,2064,148,1 4236 | 4234,1489,16,-1 4237 | 4235,1523,49,1 4238 | 4236,39,56,-1 4239 | 4237,3785,13,3 4240 | 4238,1566,9,1 4241 | 4239,1727,230,1 4242 | 4240,5213,434,1 4243 | 4241,4923,471,1 4244 | 4242,1903,13,2 4245 | 4243,3888,8,1 4246 | 4244,4312,25,1 4247 | 4245,5697,542,-1 4248 | 4246,2448,25,3 4249 | 4247,1749,19,1 4250 | 4248,5397,499,-1 4251 | 4249,5223,457,2 4252 | 4250,2815,9,1 4253 | 4251,635,15,1 4254 | 4252,2229,84,1 4255 | 4253,2358,118,1 4256 | 4254,5132,463,2 4257 | 4255,1435,9,1 4258 | 4256,3289,49,1 4259 | 4257,2745,25,1 4260 | 4258,880,48,1 4261 | 4259,2548,49,1 4262 | 4260,4114,30,-1 4263 | 4261,3316,8,3 4264 | 4262,825,193,3 4265 | 4263,2293,48,3 4266 | 4264,1325,124,2 4267 | 4265,5039,16,4 4268 | 4266,4894,16,2 4269 | 4267,5420,500,-1 4270 | 4268,224,12,-1 4271 | 4269,4936,425,1 4272 | 4270,3379,36,1 4273 | 4271,4520,4,1 4274 | 4272,2092,200,3 4275 | 4273,2637,9,1 4276 | 4274,5169,434,2 4277 | 4275,5117,28,2 4278 | 4276,4904,429,1 4279 | 4277,3428,8,1 4280 | 4278,4729,48,3 4281 | 4279,3834,205,1 4282 | 4280,2444,8,1 4283 | 4281,4993,422,3 4284 | 4282,4639,20,3 4285 | 4283,4368,12,1 4286 | 4284,5204,433,1 4287 | 4285,948,202,1 4288 | 4286,5491,523,-1 4289 | 4287,1501,51,-1 4290 | 4288,5458,513,-1 4291 | 4289,251,25,2 4292 | 4290,4727,9,2 4293 | 4291,2884,24,2 4294 | 4292,4983,414,2 4295 | 4293,1823,49,1 4296 | 4294,3131,385,1 4297 | 4295,2404,86,1 4298 | 4296,4199,11,1 4299 | 4297,3299,16,1 4300 | 4298,4372,8,1 4301 | 4299,1198,16,1 4302 | 4300,241,49,1 4303 | 4301,2747,25,2 4304 | 4302,5315,417,1 4305 | 4303,4437,14,1 4306 | 4304,928,19,3 4307 | 4305,235,9,1 4308 | 4306,2228,9,1 4309 | 4307,1225,274,1 4310 | 4308,4992,417,1 4311 | 4309,2424,208,1 4312 | 4310,1903,293,2 4313 | 4311,3002,96,3 4314 | 4312,5104,16,2 4315 | 4313,5299,425,1 4316 | 4314,1646,24,2 4317 | 4315,403,24,3 4318 | 4316,1536,9,1 4319 | 4317,4534,154,2 4320 | 4318,3830,326,1 4321 | 4319,743,25,3 4322 | 4320,1471,27,1 4323 | 4321,4210,369,1 4324 | 4322,2260,17,3 4325 | 4323,5340,424,2 4326 | 4324,699,16,1 4327 | 4325,5571,524,-1 4328 | 4326,5506,526,-1 4329 | 4327,5223,36,2 4330 | 4328,4959,422,2 4331 | 4329,5074,417,2 4332 | 4330,825,4,3 4333 | 4331,1624,9,1 4334 | 4332,4066,2,2 4335 | 4333,4907,452,3 4336 | 4334,5187,452,2 4337 | 4335,3303,16,1 4338 | 4336,3266,279,3 4339 | 4337,3837,11,1 4340 | 4338,3101,17,3 4341 | 4339,1515,214,1 4342 | 4340,5723,509,-1 4343 | 4341,5502,523,-1 4344 | 4342,3482,11,1 4345 | 4343,5304,429,2 4346 | 4344,1561,9,1 4347 | 4345,2866,25,2 4348 | 4346,5932,546,-1 4349 | 4347,5228,475,2 4350 | 4348,5293,422,3 4351 | 4349,1798,115,2 4352 | 4350,5929,546,-1 4353 | 4351,2144,8,1 4354 | 4352,5918,525,-1 4355 | 4353,3576,17,1 4356 | 4354,4273,5,3 4357 | 4355,5281,480,2 4358 | 4356,2554,16,2 4359 | 4357,2480,9,1 4360 | 4358,4247,20,3 4361 | 4359,4397,204,1 4362 | 4360,1485,95,-1 4363 | 4361,5356,478,1 4364 | 4362,5500,525,-1 4365 | 4363,743,24,3 4366 | 4364,369,20,1 4367 | 4365,1306,254,3 4368 | 4366,824,147,1 4369 | 4367,712,186,2 4370 | 4368,2767,24,1 4371 | 4369,2432,173,1 4372 | 4370,209,84,1 4373 | 4371,3983,49,1 4374 | 4372,883,33,1 4375 | 4373,5607,533,-1 4376 | 4374,5103,480,3 4377 | 4375,1264,8,1 4378 | 4376,1318,131,1 4379 | 4377,1541,27,1 4380 | 4378,5296,36,2 4381 | 4379,2721,104,3 4382 | 4380,1624,172,1 4383 | 4381,1502,326,1 4384 | 4382,2531,1,-1 4385 | 4383,4110,148,1 4386 | 4384,5160,36,1 4387 | 4385,1008,270,3 4388 | 4386,1404,276,1 4389 | 4387,1404,36,1 4390 | 4388,5128,419,1 4391 | 4389,2321,272,2 4392 | 4390,2448,24,3 4393 | 4391,5226,442,1 4394 | 4392,5427,499,-1 4395 | 4393,2540,34,3 4396 | 4394,4976,16,2 4397 | 4395,5175,36,2 4398 | 4396,4446,86,1 4399 | 4397,2846,24,1 4400 | 4398,3834,9,1 4401 | 4399,825,5,3 4402 | 4400,4947,419,2 4403 | 4401,5360,16,1 4404 | 4402,3360,4,3 4405 | 4403,3219,9,1 4406 | 4404,5127,16,1 4407 | 4405,3217,4,1 4408 | 4406,235,9,1 4409 | 4407,2717,387,1 4410 | 4408,5299,417,1 4411 | 4409,5069,16,2 4412 | 4410,2407,157,-1 4413 | 4411,5392,425,2 4414 | 4412,2545,24,2 4415 | 4413,5624,522,-1 4416 | 4414,4879,426,1 4417 | 4415,2739,71,1 4418 | 4416,3802,48,1 4419 | 4417,3800,48,1 4420 | 4418,5268,28,1 4421 | 4419,2388,9,1 4422 | 4420,3785,25,3 4423 | 4421,716,36,1 4424 | 4422,5363,414,1 4425 | 4423,5017,17,1 4426 | 4424,2627,49,3 4427 | 4425,4199,15,1 4428 | 4426,5434,499,-1 4429 | 4427,3215,8,3 4430 | 4428,515,12,1 4431 | 4429,5191,417,1 4432 | 4430,1392,25,1 4433 | 4431,5132,16,2 4434 | 4432,335,86,1 4435 | 4433,5199,436,1 4436 | 4434,4128,4,1 4437 | 4435,2212,48,1 4438 | 4436,4327,17,-1 4439 | 4437,1078,49,1 4440 | 4438,5928,546,-1 4441 | 4439,5330,16,4 4442 | 4440,3751,19,2 4443 | 4441,2229,4,1 4444 | 4442,4750,19,3 4445 | 4443,4183,36,3 4446 | 4444,3730,48,1 4447 | 4445,716,25,1 4448 | 4446,1811,8,1 4449 | 4447,3388,16,1 4450 | 4448,1197,38,1 4451 | 4449,251,24,2 4452 | 4450,3903,15,1 4453 | 4451,1786,8,1 4454 | 4452,1703,9,1 4455 | 4453,5902,530,-1 4456 | 4454,862,17,1 4457 | 4455,5372,474,1 4458 | 4456,5605,525,-1 4459 | 4457,3545,8,1 4460 | 4458,3142,49,1 4461 | 4459,1129,78,1 4462 | 4460,4417,14,2 4463 | 4461,3315,9,3 4464 | 4462,3655,92,-1 4465 | 4463,1435,8,1 4466 | 4464,4916,433,1 4467 | 4465,1152,24,2 4468 | 4466,2438,9,1 4469 | 4467,1399,33,2 4470 | 4468,4554,27,1 4471 | 4469,1168,25,2 4472 | 4470,892,205,1 4473 | 4471,3036,12,-1 4474 | 4472,5474,513,-1 4475 | 4473,3686,9,1 4476 | 4474,368,49,3 4477 | 4475,1624,8,1 4478 | 4476,2388,22,1 4479 | 4477,830,23,3 4480 | 4478,4400,25,2 4481 | 4479,5254,428,1 4482 | 4480,5175,16,2 4483 | 4481,361,16,2 4484 | 4482,3807,9,1 4485 | 4483,5242,422,2 4486 | 4484,2842,48,1 4487 | 4485,5050,36,2 4488 | 4486,5290,459,2 4489 | 4487,4957,434,1 4490 | 4488,5514,527,-1 4491 | 4489,3265,14,2 4492 | 4490,4085,111,1 4493 | 4491,4184,9,1 4494 | 4492,5921,524,-1 4495 | 4493,3327,12,1 4496 | 4494,1145,12,1 4497 | 4495,1998,36,1 4498 | 4496,1242,9,1 4499 | 4497,3392,12,-1 4500 | 4498,1767,8,1 4501 | 4499,2029,8,1 4502 | 4500,4028,20,2 4503 | 4501,1801,78,-1 4504 | 4502,1803,135,3 4505 | 4503,362,47,3 4506 | 4504,4944,16,1 4507 | 4505,5420,496,-1 4508 | 4506,5363,28,1 4509 | 4507,2627,48,3 4510 | 4508,5360,417,1 4511 | 4509,2745,24,1 4512 | 4510,5607,524,-1 4513 | 4511,719,237,1 4514 | 4512,3200,9,1 4515 | 4513,3388,20,1 4516 | 4514,5245,425,2 4517 | 4515,3721,24,2 4518 | 4516,2739,17,1 4519 | 4517,4094,5,1 4520 | 4518,5052,142,2 4521 | 4519,5382,16,2 4522 | 4520,5083,414,2 4523 | 4521,3702,261,5 4524 | 4522,1474,8,1 4525 | 4523,2083,5,1 4526 | 4524,832,9,1 4527 | 4525,5081,417,1 4528 | 4526,3391,338,1 4529 | 4527,4005,9,1 4530 | 4528,4338,19,2 4531 | 4529,956,358,3 4532 | 4530,2531,2,-1 4533 | 4531,1980,5,3 4534 | 4532,3143,121,1 4535 | 4533,4448,226,1 4536 | 4534,1750,162,2 4537 | 4535,3129,13,3 4538 | 4536,1421,9,1 4539 | 4537,1122,20,1 4540 | 4538,5280,440,3 4541 | 4539,339,150,3 4542 | 4540,5017,429,1 4543 | 4541,1669,48,1 4544 | 4542,1994,20,1 4545 | 4543,1217,288,3 4546 | 4544,599,13,1 4547 | 4545,3958,49,3 4548 | 4546,1441,24,-1 4549 | 4547,5083,36,2 4550 | 4548,5518,524,-1 4551 | 4549,5037,442,1 4552 | 4550,5069,434,2 4553 | 4551,1484,24,1 4554 | 4552,5553,525,-1 4555 | 4553,5074,16,2 4556 | 4554,2842,384,1 4557 | 4555,2260,13,3 4558 | 4556,3597,190,3 4559 | 4557,4039,263,1 4560 | 4558,3379,12,1 4561 | 4559,5340,16,2 4562 | 4560,5026,417,3 4563 | 4561,4138,15,1 4564 | 4562,1470,106,3 4565 | 4563,3655,16,-1 4566 | 4564,4914,424,2 4567 | 4565,924,25,2 4568 | 4566,3000,25,3 4569 | 4567,1252,9,1 4570 | 4568,2591,312,3 4571 | 4569,5017,491,1 4572 | 4570,1403,25,1 4573 | 4571,4834,17,1 4574 | 4572,3285,11,1 4575 | 4573,989,4,1 4576 | 4574,1767,9,1 4577 | 4575,853,5,2 4578 | 4576,5314,28,2 4579 | 4577,431,17,-1 4580 | 4578,5612,536,-1 4581 | 4579,2815,162,1 4582 | 4580,1142,133,1 4583 | 4581,3930,20,1 4584 | 4582,3082,36,-1 4585 | 4583,5387,439,1 4586 | 4584,2984,20,3 4587 | 4585,49,8,1 4588 | 4586,3315,8,3 4589 | 4587,3323,51,1 4590 | 4588,726,4,2 4591 | 4589,4821,199,2 4592 | 4590,3746,155,-1 4593 | 4591,1070,4,1 4594 | 4592,5176,419,1 4595 | 4593,2624,8,1 4596 | 4594,3274,121,1 4597 | 4595,5083,424,2 4598 | 4596,5609,536,-1 4599 | 4597,964,8,1 4600 | 4598,1142,27,1 4601 | 4599,4725,12,3 4602 | 4600,3654,9,1 4603 | 4601,2584,20,2 4604 | 4602,369,19,1 4605 | 4603,1485,16,-1 4606 | 4604,5684,542,-1 4607 | 4605,4485,16,1 4608 | 4606,631,8,1 4609 | 4607,308,16,1 4610 | 4608,5021,28,2 4611 | 4609,5178,414,1 4612 | 4610,2709,9,1 4613 | 4611,2379,8,1 4614 | 4612,1679,17,-1 4615 | 4613,4059,16,3 4616 | 4614,1289,229,1 4617 | 4615,5148,433,2 4618 | 4616,5809,541,-1 4619 | 4617,5576,525,-1 4620 | 4618,2709,8,1 4621 | 4619,935,261,1 4622 | 4620,4323,307,1 4623 | 4621,2702,24,2 4624 | 4622,2788,49,1 4625 | 4623,2010,5,1 4626 | 4624,1837,287,1 4627 | 4625,2067,4,1 4628 | 4626,571,48,1 4629 | 4627,5103,434,3 4630 | 4628,4122,63,3 4631 | 4629,2494,5,1 4632 | 4630,5035,416,1 4633 | 4631,1994,13,1 4634 | 4632,1189,120,1 4635 | 4633,4750,16,3 4636 | 4634,3879,9,1 4637 | 4635,5413,499,-1 4638 | 4636,369,14,1 4639 | 4637,5052,438,2 4640 | 4638,5110,419,1 4641 | 4639,1984,9,1 4642 | 4640,1761,13,3 4643 | 4641,4446,87,1 4644 | 4642,1666,4,3 4645 | 4643,2462,186,1 4646 | 4644,1987,148,1 4647 | 4645,2936,24,2 4648 | 4646,5123,16,1 4649 | 4647,4970,433,1 4650 | 4648,3350,25,2 4651 | 4649,4575,4,3 4652 | 4650,39,51,-1 4653 | 4651,4981,16,1 4654 | 4652,3924,59,1 4655 | 4653,2041,25,3 4656 | 4654,2765,49,1 4657 | 4655,4524,9,1 4658 | 4656,2418,49,1 4659 | 4657,979,13,1 4660 | 4658,4189,12,1 4661 | 4659,5620,540,-1 4662 | 4660,5912,523,-1 4663 | 4661,5305,416,3 4664 | 4662,2254,49,1 4665 | 4663,1717,317,3 4666 | 4664,1033,16,1 4667 | 4665,5269,452,1 4668 | 4666,894,257,1 4669 | 4667,1310,19,2 4670 | 4668,5426,542,-1 4671 | 4669,3802,384,1 4672 | 4670,2008,285,3 4673 | 4671,5292,16,1 4674 | 4672,4242,26,1 4675 | 4673,5410,499,-1 4676 | 4674,1645,17,2 4677 | 4675,1727,17,1 4678 | 4676,2868,237,1 4679 | 4677,5256,422,1 4680 | 4678,2323,238,1 4681 | 4679,1344,20,3 4682 | 4680,4446,36,1 4683 | 4681,3855,20,3 4684 | 4682,1074,17,1 4685 | 4683,5053,16,2 4686 | 4684,599,20,1 4687 | 4685,3740,16,-1 4688 | 4686,4339,260,1 4689 | 4687,5443,543,-1 4690 | 4688,1822,198,1 4691 | 4689,5151,419,2 4692 | 4690,4114,16,-1 4693 | 4691,4923,422,1 4694 | 4692,361,17,2 4695 | 4693,4275,49,1 4696 | 4694,4062,11,1 4697 | 4695,1501,16,-1 4698 | 4696,5061,434,2 4699 | 4697,542,24,3 4700 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | 2 | python3 code/test_model.py 3 | python3 code/merge_subresult.py 4 | -------------------------------------------------------------------------------- /train.sh: -------------------------------------------------------------------------------- 1 | 2 | python3 code/data_process.py 3 | python3 code/train_model.py 4 | 5 | --------------------------------------------------------------------------------