├── .gitattributes ├── Distillation ├── Dataset │ └── univ.zip ├── DecisionNode.py ├── RandomForest.py ├── SoftTree.py ├── Tree.py ├── __pycache__ │ ├── DecisionNode.cpython-37.pyc │ ├── RandomForest.cpython-37.pyc │ ├── SoftTree.cpython-37.pyc │ ├── Tree.cpython-37.pyc │ ├── train_teacher.cpython-37.pyc │ └── utils.cpython-37.pyc ├── bdt_test.py ├── dt_test.py ├── models │ ├── GRU.py │ ├── __init__.py │ └── __pycache__ │ │ ├── GRU.cpython-37.pyc │ │ └── __init__.cpython-37.pyc ├── result_acc │ ├── univ_rf_acc.txt │ ├── univ_rf_sdt.txt │ └── univ_rf_teacher.txt ├── rule_tree │ ├── univ_rf_kk0_round0.txt │ ├── univ_rf_kk0_round1.txt │ ├── univ_rf_kk0_round2.txt │ ├── univ_rf_kk0_round3.txt │ └── univ_rf_kk0_round4.txt ├── train_teacher.py └── utils.py ├── P4 └── flowcontrol.p4 ├── readme.md └── rule2entry ├── code ├── entry2dataplane.py └── rule2entry.py └── output ├── rule_tree └── univ │ ├── univ_rf_kk0_round0.txt │ ├── univ_rf_kk0_round1.txt │ ├── univ_rf_kk0_round2.txt │ ├── univ_rf_kk0_round3.txt │ └── univ_rf_kk0_round4.txt └── ternary_entry └── univ ├── univ_rf_kk0_round0.txt ├── univ_rf_kk0_round1.txt ├── univ_rf_kk0_round2.txt ├── univ_rf_kk0_round3.txt └── univ_rf_kk0_round4.txt /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /Distillation/Dataset/univ.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xgr19/Mousika/76c9db9b4200a7c591bd1e9476ba5ab44a467512/Distillation/Dataset/univ.zip -------------------------------------------------------------------------------- /Distillation/DecisionNode.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | 4 | 5 | class DecisionNode: 6 | 7 | def __init__(self, feature=-1, threshold=None, label=None, label_dict=None, true_branch=None, 8 | false_branch=None): 9 | self.feature = feature 10 | self.threshold = threshold 11 | # 'if label is not None,then it's a leaf node' 12 | self.label_dict = label_dict 13 | self.label = label 14 | self.true_branch = true_branch 15 | self.false_branch = false_branch 16 | 17 | def set_branch(self, true_branch, false_branch): 18 | 19 | self.true_branch = true_branch 20 | self.false_branch = false_branch 21 | 22 | def find_path(self, data_vec, path, feature_attr, attr_map): 23 | if self.label is not None: 24 | return path 25 | else: 26 | if feature_attr[self.feature] == "c": 27 | path.append([self.feature+1, self.threshold, data_vec[self.feature]]) 28 | if data_vec[self.feature] < self.threshold: 29 | path[-1].append(-1) 30 | return self.false_branch.find_path(data_vec, path, feature_attr, attr_map) 31 | else: 32 | path[-1].append(1) 33 | return self.true_branch.find_path(data_vec, path, feature_attr, attr_map) 34 | else: 35 | path.append([self.feature+1, attr_map[str(self.feature) + "_" + str(int(self.threshold))], 36 | attr_map[str(self.feature) + "_" + str(int(data_vec[self.feature]))]]) 37 | if data_vec[self.feature] != self.threshold: 38 | path[-1].append(-1) 39 | return self.false_branch.find_path(data_vec, path, feature_attr, attr_map) 40 | else: 41 | path[-1].append(1) 42 | return self.true_branch.find_path(data_vec, path, feature_attr, attr_map) 43 | def show_tree(self,path, feature_attr, attr_map,model_path): 44 | if self.label is not None: 45 | path.append(' then '+str(self.label)+'\n') 46 | #print(path) 47 | with open(model_path, 'a') as f: 48 | message =''.join(path) 49 | # print(message) 50 | f.write(message) 51 | path.pop() 52 | return path 53 | else: 54 | if feature_attr[self.feature] == "c": 55 | path.append(' if feature_'+ str(self.feature)+'<'+str(self.threshold)) 56 | #左递归 57 | self.false_branch.show_tree(path, feature_attr, attr_map,model_path) 58 | path.pop() 59 | 60 | path.append(' if feature_' + str(self.feature) + '>=' + str(self.threshold)) 61 | # 右递归 62 | self.true_branch.show_tree(path, feature_attr, attr_map,model_path) 63 | path.pop() 64 | else: 65 | path.append(' if feature_' + str(self.feature) + '!=' + str(self.threshold)) 66 | # 左递归 67 | self.false_branch.show_tree(path, feature_attr, attr_map,model_path) 68 | path.pop() 69 | 70 | path.append(' if feature_' + str(self.feature) + '=' + str(self.threshold)) 71 | # 右递归 72 | self.true_branch.show_tree(path, feature_attr, attr_map,model_path) 73 | path.pop() 74 | 75 | -------------------------------------------------------------------------------- /Distillation/RandomForest.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | 4 | from utils import * 5 | from Tree import * 6 | import numpy as np 7 | from multiprocessing import Pool 8 | import math 9 | 10 | 11 | class RandomForest: 12 | 13 | def __init__(self, n_estimators=100, min_sample_leaf=5, n_features="sqrt", q_method="fix", 14 | q_value=2, q_min=None, q_max=None, q_mean=2, n_jobs=4, 15 | silent=False, softmax=False, softmax_factor=1, softmax_method="fix", cutoff=None): 16 | self.n_estimators = n_estimators 17 | self.min_sample_leaf = min_sample_leaf 18 | self.n_features = n_features 19 | self.q_method = q_method 20 | self.q_value = q_value 21 | self.q_min = q_min 22 | self.q_max = q_max 23 | self.q_mean = q_mean 24 | self.q_list = None 25 | self.n_jobs = n_jobs 26 | self.silent = silent 27 | self.tree_list = None 28 | self.features_attr = None 29 | self.softmax_factor = softmax_factor 30 | self.softmax = softmax 31 | self.softmax_method = softmax_method 32 | self.cutoff = cutoff 33 | 34 | def fit(self, X, y, features_attr=None): 35 | # train with the features X and labels y 36 | 37 | if features_attr is None: 38 | features_attr = [] 39 | for feature in np.transpose(X): 40 | if isinstance(feature[0], float): 41 | features_attr.append('c') 42 | continue 43 | if isinstance(feature[0], str): 44 | features_attr.append('d') 45 | continue 46 | unique_feature = np.unique(feature) 47 | if len(unique_feature) > 0.1 * len(feature): 48 | features_attr.append('c') 49 | else: 50 | features_attr.append('d') 51 | self.features_attr = features_attr 52 | 53 | self.tree_list = np.array([]) 54 | 55 | self.__generate_q_list() 56 | self.__generate_ind_factor_list() 57 | 58 | pool = Pool(processes=self.n_jobs) 59 | jobs_set = [] 60 | for i in range(self.n_estimators): 61 | sample_index, unsample_index = bootstrap(X.shape[0]) 62 | new_X, new_y = X[sample_index], y[sample_index] 63 | tree = TreeClassifier(q_value=self.q_list[i], n_features=self.n_features, 64 | min_sample_leaf=self.min_sample_leaf, 65 | softmax=self.softmax, softmax_factor=self.softmax_factor_list[i], 66 | cutoff=self.cutoff) 67 | jobs_set.append(pool.apply_async(self.train_one_tree, 68 | (i, tree, new_X, new_y, self.features_attr, ))) 69 | pool.close() 70 | pool.join() 71 | 72 | for job in jobs_set: 73 | self.tree_list = np.append(self.tree_list, job.get()) 74 | 75 | @staticmethod 76 | def train_one_tree(id, tree, X_train, y_train, features_attr=None): 77 | 78 | tree.fit(X_train, y_train, features_attr) 79 | 80 | return tree 81 | 82 | def predict(self, X): 83 | # predict with feature(s) X 84 | tree_pred_res = [] 85 | for tree in self.tree_list: 86 | tree_pred_res.extend([tree.predict(X)]) 87 | tree_pred_res = np.array(tree_pred_res).T 88 | 89 | if np.ndim(tree_pred_res) == 1: 90 | return voting(cal_label_dic(tree_pred_res)) 91 | else: 92 | return np.array([voting(cal_label_dic(res)) for res in tree_pred_res]) 93 | 94 | def __generate_q_list(self): 95 | # for every tree, generate one q of Tsallis entropy 96 | if self.q_method == "exp": 97 | #self.q_list = np.random.exponential(self.q_mean-1, self.n_estimators)+1 98 | self.q_list = self.exponential_rand_list(self.q_mean, 1, self.n_estimators) 99 | elif self.q_method == "uniform": 100 | self.q_list = [np.random.uniform(self.q_min, self.q_max) for i in range(self.n_estimators)] 101 | elif self.q_method == "fix": 102 | self.q_list = [self.q_value for i in range(self.n_estimators)] 103 | else: 104 | self.q_list = [2 for i in range(self.n_estimators)] 105 | 106 | def __generate_ind_factor_list(self): 107 | if self.softmax_method == "exp": 108 | self.softmax_factor_list = np.random.exponential(self.softmax_factor, self.n_estimators) 109 | elif self.softmax_method == "fix": 110 | self.softmax_factor_list = [self.softmax_factor for i in range(self.n_estimators)] 111 | elif self.softmax_method == "rec_exp": 112 | mean_value = 1 / self.softmax_factor 113 | self.softmax_factor_list = 1 / np.random.exponential(mean_value, self.n_estimators) 114 | 115 | def exponential_rand(self, lam, lower_bound): 116 | if lam <= 0: 117 | return -1 118 | U = random.uniform(0.0, 1.0) 119 | return lower_bound + (-1.0 / lam) * math.log(U) 120 | 121 | def exponential_rand_list(self, lam, lower_bound, num): 122 | expo_list = [] 123 | for i in range(num): 124 | expo_tmp = self.exponential_rand(lam, lower_bound) 125 | expo_list.append(expo_tmp) 126 | return expo_list 127 | -------------------------------------------------------------------------------- /Distillation/SoftTree.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | 4 | from __future__ import print_function 5 | from utils import * 6 | from DecisionNode import * 7 | import numpy as np 8 | 9 | 10 | class SoftTreeClassifier: 11 | 12 | def __init__(self, n_features=None, min_sample_leaf=5): 13 | self.root = None 14 | self.min_sample_leaf = min_sample_leaf 15 | self.n_features = n_features 16 | self.features_attr = None 17 | 18 | def fit(self, X, y, features_attr=None): 19 | 20 | # feature_atrr: an array, its size is same as the number of features, 21 | # 'd' 'is discrete', 'c' is continuous 22 | 23 | np.random.seed() 24 | self.features_attr = features_attr 25 | 26 | if self.n_features is None or self.n_features == "all": 27 | self.n_features = X.shape[1] 28 | elif self.n_features == "sqrt": 29 | self.n_features = int(np.sqrt(X.shape[1])) 30 | elif self.n_features == "half": 31 | self.n_features = int(0.5 * X.shape[1]) 32 | elif self.n_features == "sqrt(nlogn)": 33 | self.n_features = int(np.sqrt(X.shape[1]*np.log(X.shape[1]))) 34 | 35 | X, y = np.array(X), np.array(y) 36 | self.root = self.__build_tree(X, y) 37 | 38 | def predict(self, X): 39 | 40 | if np.ndim(X) == 1: 41 | return self.__predict_rec(X, self.root) 42 | else: 43 | result = [] 44 | for sample in X: #可以改成并行 45 | result.append(self.__predict_rec(sample, self.root)) 46 | return np.array(result) 47 | 48 | def __predict_rec(self, X, node): 49 | if node.label is not None: 50 | return node.label 51 | else: 52 | feat_value = X[node.feature] 53 | feat_attr = self.features_attr[node.feature] 54 | threshold = node.threshold 55 | 56 | if feat_value is None or feat_value is np.nan: 57 | choice = np.random.randint(1, 3) 58 | if choice == 1: 59 | return self.__predict_rec(X, node.true_branch) 60 | else: 61 | return self.__predict_rec(X, node.false_branch) 62 | else: 63 | if feat_attr == 'd': 64 | if feat_value == threshold: 65 | return self.__predict_rec(X, node.true_branch) 66 | else: 67 | return self.__predict_rec(X, node.false_branch) 68 | elif feat_attr == 'c': 69 | if feat_value >= threshold: 70 | return self.__predict_rec(X, node.true_branch) 71 | else: 72 | return self.__predict_rec(X, node.false_branch) 73 | 74 | def __split(self, dataset, split_feature, threshold): 75 | 76 | true_index = [] 77 | false_index = [] 78 | 79 | if self.features_attr[split_feature] == 'd': 80 | for i in range(len(dataset)): 81 | if dataset[i][split_feature] == threshold: 82 | true_index.append(i) 83 | else: 84 | false_index.append(i) 85 | elif self.features_attr[split_feature] == 'c': 86 | for i in range(len(dataset)): 87 | if dataset[i][split_feature] >= threshold: 88 | true_index.append(i) 89 | else: 90 | false_index.append(i) 91 | 92 | return true_index, false_index 93 | 94 | def __split_pair(self, X, y, candidate_features): 95 | 96 | current_gini = soft_gini(y) 97 | 98 | ret = [] 99 | 100 | for feat in candidate_features: 101 | col = X[:, feat] 102 | unique_col = np.unique(col) 103 | attr = self.features_attr[feat] 104 | 105 | threshold_list = [] 106 | if attr == 'd' or unique_col.shape == 1: 107 | threshold_list = unique_col 108 | elif attr == 'c': 109 | threshold_list = [(unique_col[i]+unique_col[i+1]) / 2 for i in range(len(unique_col)-1)] 110 | 111 | for t in threshold_list: 112 | true_index, false_index = self.__split(X, feat, t) 113 | p = float(len(true_index)) / len(X) 114 | next_gini = p * soft_gini(y[true_index]) + (1-p) * soft_gini(y[false_index]) 115 | gain = current_gini - next_gini 116 | ret.append([gain, feat, t]) 117 | ret = np.array(ret) 118 | return ret[np.argsort(-ret[:, 0])] 119 | 120 | def __build_tree(self, X, y): 121 | 122 | y_dict = soft_label_dic(y) 123 | 124 | if np.sum((y > (1.0 / y.shape[1])).astype(int), axis=0).max() == y.shape[0]: 125 | return DecisionNode(label_dict=y_dict, label=soft_voting(y_dict)) 126 | 127 | # if len(y_dict) == 1: 128 | # d = y_dict 129 | # l = soft_voting(y_dict) 130 | # return DecisionNode(label_dict=d, label=l) 131 | 132 | candidate_features = [] 133 | for i in range(X.shape[1]): 134 | if len(np.unique(X[:, i])) > 1: 135 | candidate_features.append(i) 136 | 137 | if candidate_features == []: 138 | d = y_dict 139 | l = soft_voting(y_dict) 140 | return DecisionNode(label_dict=d, label=l) 141 | 142 | candidate_features = np.random.choice(candidate_features, 143 | min(self.n_features, len(candidate_features)), replace=False) 144 | 145 | split_pair = self.__split_pair(X, y, candidate_features) 146 | 147 | split_feature, threshold = int(split_pair[0][1]), split_pair[0][2] 148 | 149 | true_index, false_index = self.__split(X, split_feature, threshold) 150 | 151 | if len(true_index) == 0 or len(false_index) == 0: 152 | d = y_dict 153 | l = voting(y_dict) 154 | return DecisionNode(label_dict=d, label=l) 155 | 156 | if len(true_index) <= self.min_sample_leaf: 157 | y_true_dict = soft_label_dic(y[true_index]) 158 | d = y_true_dict 159 | l = soft_voting(y_true_dict) 160 | true_branch = DecisionNode(label_dict=d, label=l) 161 | else: 162 | true_branch = self.__build_tree(X[true_index], y[true_index]) 163 | 164 | if len(false_index) <= self.min_sample_leaf: 165 | y_false_dict = soft_label_dic(y[false_index]) 166 | d = y_false_dict 167 | l = soft_voting(y_false_dict) 168 | false_branch = DecisionNode(label_dict=d, label=l) 169 | else: 170 | false_branch = self.__build_tree(X[false_index], y[false_index]) 171 | 172 | return DecisionNode(feature=split_feature, threshold=threshold, label_dict=y_dict, 173 | true_branch=true_branch, false_branch=false_branch) 174 | 175 | 176 | def find_path(self, data, attr_map): 177 | 178 | path = [] 179 | for d in data: 180 | p = [] 181 | path.append(self.root.find_path(d, p, self.features_attr, attr_map)) 182 | return path 183 | 184 | def show_tree(self,model_path): 185 | path=[] 186 | attr_map=[] 187 | self.root.show_tree(path, self.features_attr, attr_map,model_path) 188 | -------------------------------------------------------------------------------- /Distillation/Tree.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | 4 | from __future__ import print_function 5 | from utils import * 6 | from DecisionNode import * 7 | import numpy as np 8 | 9 | 10 | class TreeClassifier: 11 | 12 | def __init__(self, n_features=None, min_sample_leaf=5): 13 | self.root = None 14 | self.min_sample_leaf = min_sample_leaf 15 | self.n_features = n_features 16 | self.features_attr = None 17 | 18 | def fit(self, X, y, features_attr=None): 19 | 20 | # feature_atrr: an array, its size is same as the number of features, 21 | # 'd' 'is discrete', 'c' is continuous 22 | 23 | np.random.seed() 24 | self.features_attr = features_attr 25 | 26 | if self.n_features is None or self.n_features == "all": 27 | self.n_features = X.shape[1] 28 | elif self.n_features == "sqrt": 29 | self.n_features = int(np.sqrt(X.shape[1])) 30 | elif self.n_features == "half": 31 | self.n_features = int(0.5 * X.shape[1]) 32 | elif self.n_features == "sqrt(nlogn)": 33 | self.n_features = int(np.sqrt(X.shape[1]*np.log(X.shape[1]))) 34 | 35 | dataset = np.concatenate((np.array(X), np.array([y]).T), axis=1) 36 | self.root = self.__build_ind_tree_rec(dataset.astype(np.double)) 37 | 38 | def predict(self, X): 39 | 40 | if np.ndim(X) == 1: 41 | return self.__predict_rec(X, self.root) 42 | else: 43 | result = [] 44 | for sample in X: 45 | result.append(self.__predict_rec(sample, self.root)) 46 | return np.array(result) 47 | 48 | def __predict_rec(self, X, node): 49 | if node.label is not None: 50 | return node.label 51 | else: 52 | feat_value = X[node.feature] 53 | feat_attr = self.features_attr[node.feature] 54 | threshold = node.threshold 55 | 56 | if feat_value is None or feat_value is np.nan: 57 | choice = np.random.randint(1, 3) 58 | if choice == 1: 59 | return self.__predict_rec(X, node.true_branch) 60 | else: 61 | return self.__predict_rec(X, node.false_branch) 62 | else: 63 | if feat_attr == 'd': 64 | if feat_value == threshold: 65 | return self.__predict_rec(X, node.true_branch) 66 | else: 67 | return self.__predict_rec(X, node.false_branch) 68 | elif feat_attr == 'c': 69 | if feat_value >= threshold: 70 | return self.__predict_rec(X, node.true_branch) 71 | else: 72 | return self.__predict_rec(X, node.false_branch) 73 | 74 | def __split(self, dataset, split_feature, threshold): 75 | 76 | true_index = [] 77 | false_index = [] 78 | 79 | if self.features_attr[split_feature] == 'd': 80 | for i in range(len(dataset)): 81 | if dataset[i][split_feature] == threshold: 82 | true_index.append(i) 83 | else: 84 | false_index.append(i) 85 | elif self.features_attr[split_feature] == 'c': 86 | for i in range(len(dataset)): 87 | if dataset[i][split_feature] >= threshold: 88 | true_index.append(i) 89 | else: 90 | false_index.append(i) 91 | 92 | return true_index, false_index 93 | 94 | def __split_pair(self, dataset, candidate_features): 95 | 96 | current_gini = cal_gini(dataset[:, -1]) 97 | 98 | ret = [] 99 | 100 | for feat in candidate_features: 101 | col = dataset[:, feat] 102 | unique_col = np.unique(col) 103 | attr = self.features_attr[feat] 104 | 105 | threshold_list = [] 106 | if attr == 'd' or unique_col.shape == 1: 107 | threshold_list = unique_col 108 | elif attr == 'c': 109 | threshold_list = [(unique_col[i]+unique_col[i+1]) / 2 for i in range(len(unique_col)-1)] 110 | 111 | for t in threshold_list: 112 | true_index, false_index = self.__split(dataset, feat, t) 113 | p = float(len(true_index)) / len(dataset) 114 | next_gini = p * cal_gini(dataset[true_index, -1]) + \ 115 | (1-p) * cal_gini(dataset[false_index, -1]) 116 | gain = current_gini - next_gini 117 | ret.append([gain, feat, t]) 118 | ret = np.array(ret) 119 | return ret[np.argsort(-ret[:, 0])] 120 | 121 | def __build_ind_tree_rec(self, dataset): 122 | 123 | y_dict = cal_label_dic(dataset[:, -1]) 124 | 125 | if len(y_dict) == 1: 126 | d = y_dict 127 | l = voting(y_dict) 128 | return DecisionNode(label_dict=d, label=l) 129 | 130 | candidate_features = [] 131 | for i in range(dataset.shape[1]-1): 132 | if len(np.unique(dataset[:, i])) > 1: 133 | candidate_features.append(i) 134 | if candidate_features == []: 135 | d = y_dict 136 | l = voting(y_dict) 137 | return DecisionNode(label_dict=d, label=l) 138 | 139 | candidate_features = np.random.choice(candidate_features, 140 | min(self.n_features, len(candidate_features)), replace=False) 141 | 142 | split_pair = self.__split_pair(dataset, candidate_features) 143 | 144 | split_feature, threshold = int(split_pair[0][1]), split_pair[0][2] 145 | 146 | true_index, false_index = self.__split(dataset, split_feature, threshold) 147 | 148 | if len(true_index) == 0 or len(false_index) == 0: 149 | d = y_dict 150 | l = voting(y_dict) 151 | return DecisionNode(label_dict=d, label=l) 152 | 153 | if len(true_index) <= self.min_sample_leaf: 154 | y_true_dict = cal_label_dic(dataset[true_index, -1]) 155 | d = y_true_dict 156 | l = voting(y_true_dict) 157 | true_branch = DecisionNode(label_dict=d, label=l) 158 | else: 159 | true_branch = self.__build_ind_tree_rec(dataset[true_index]) 160 | 161 | if len(false_index) <= self.min_sample_leaf: 162 | y_false_dict = cal_label_dic(dataset[false_index, -1]) 163 | d = y_false_dict 164 | l = voting(y_false_dict) 165 | false_branch = DecisionNode(label_dict=d, label=l) 166 | else: 167 | false_branch = self.__build_ind_tree_rec(dataset[false_index]) 168 | 169 | return DecisionNode(feature=split_feature, threshold=threshold, label_dict=y_dict, 170 | true_branch=true_branch, false_branch=false_branch) 171 | def show_tree(self,model_path): 172 | path=[] 173 | attr_map=[] 174 | self.root.show_tree(path, self.features_attr, attr_map,model_path) 175 | 176 | 177 | -------------------------------------------------------------------------------- /Distillation/__pycache__/DecisionNode.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xgr19/Mousika/76c9db9b4200a7c591bd1e9476ba5ab44a467512/Distillation/__pycache__/DecisionNode.cpython-37.pyc -------------------------------------------------------------------------------- /Distillation/__pycache__/RandomForest.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xgr19/Mousika/76c9db9b4200a7c591bd1e9476ba5ab44a467512/Distillation/__pycache__/RandomForest.cpython-37.pyc -------------------------------------------------------------------------------- /Distillation/__pycache__/SoftTree.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xgr19/Mousika/76c9db9b4200a7c591bd1e9476ba5ab44a467512/Distillation/__pycache__/SoftTree.cpython-37.pyc -------------------------------------------------------------------------------- /Distillation/__pycache__/Tree.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xgr19/Mousika/76c9db9b4200a7c591bd1e9476ba5ab44a467512/Distillation/__pycache__/Tree.cpython-37.pyc -------------------------------------------------------------------------------- /Distillation/__pycache__/train_teacher.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xgr19/Mousika/76c9db9b4200a7c591bd1e9476ba5ab44a467512/Distillation/__pycache__/train_teacher.cpython-37.pyc -------------------------------------------------------------------------------- /Distillation/__pycache__/utils.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xgr19/Mousika/76c9db9b4200a7c591bd1e9476ba5ab44a467512/Distillation/__pycache__/utils.cpython-37.pyc -------------------------------------------------------------------------------- /Distillation/bdt_test.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | 4 | 5 | import warnings 6 | warnings.filterwarnings('ignore') 7 | import imp 8 | from Tree import * 9 | from utils import * 10 | from RandomForest import * 11 | import torch 12 | from sklearn.model_selection import KFold 13 | from sklearn.ensemble import RandomForestClassifier 14 | from sklearn.ensemble import GradientBoostingClassifier 15 | from sklearn.model_selection import train_test_split 16 | from sklearn.tree import DecisionTreeClassifier 17 | import argparse 18 | import time 19 | from train_teacher import * 20 | from SoftTree import * 21 | from models import * 22 | import time 23 | from sklearn.metrics import classification_report 24 | import torch.nn.functional as F 25 | 26 | 27 | def softmax(x): 28 | """ 29 | 对输入x的每一行计算softmax。 30 | 31 | 该函数对于输入是向量(将向量视为单独的行)或者矩阵(M x N)均适用。 32 | 33 | 代码利用softmax函数的性质: softmax(x) = softmax(x + c) 34 | 35 | 参数: 36 | x -- 一个N维向量,或者M x N维numpy矩阵. 37 | 38 | 返回值: 39 | x -- 在函数内部处理后的x 40 | """ 41 | orig_shape = x.shape 42 | 43 | # 根据输入类型是矩阵还是向量分别计算softmax 44 | if len(x.shape) > 1: 45 | # 矩阵 46 | tmp = np.max(x, axis=1) # 得到每行的最大值,用于缩放每行的元素,避免溢出。 shape为(x.shape[0],) 47 | x -= tmp.reshape((x.shape[0], 1)) # 利用性质缩放元素 48 | x = np.exp(x) # 计算所有值的指数 49 | tmp = np.sum(x, axis=1) # 每行求和 50 | x /= tmp.reshape((x.shape[0], 1)) # 求softmax 51 | else: 52 | # 向量 53 | tmp = np.max(x) # 得到最大值 54 | x -= tmp # 利用最大值缩放数据 55 | x = np.exp(x) # 对所有元素求指数 56 | tmp = np.sum(x) # 求元素和 57 | x /= tmp # 求somftmax 58 | return x 59 | 60 | 61 | def produce_soft_labels(data, round_num, fold_num, k=1,model='rf'): 62 | 63 | soft_label = np.zeros([data.shape[0], len(np.unique(data[:, -1]))]) 64 | 65 | for i in range(round_num): 66 | kf = KFold(n_splits=fold_num) 67 | for train_index, test_index in kf.split(X=data[:, :-1], y=data[:, -1], groups=data[:, -1]): 68 | train_set, test_set = data[train_index], data[test_index] 69 | train_X,train_Y=train_set[:, :-1],train_set[:, -1].astype(int) 70 | test_X=test_set[:, :-1] 71 | if model=='rf': 72 | clf = RandomForestClassifier(300, min_samples_leaf=5, criterion="gini") 73 | clf.fit(train_X, train_Y) 74 | 75 | pred_prob = clf.predict_proba(test_X) 76 | soft_label[test_index] += pred_prob 77 | 78 | soft_label /= round_num 79 | 80 | hard_label = np.zeros([data.shape[0], len(np.unique(data[:, -1]))]) 81 | for i in range(np.shape(data)[0]): 82 | hard_label[i][int(data[i, -1])] = 1 83 | 84 | soft_label = (soft_label + hard_label*k) / (k+1) 85 | 86 | return soft_label 87 | 88 | def NN_produce_soft_kf(data, round_num, fold_num, k=1,T=1,model='mlp',data_name='iot'): 89 | #input_size = data.shape[1] - 1 90 | output_size = len(np.unique(data[:, -1])) 91 | soft_label = np.zeros([data.shape[0], output_size]) 92 | 93 | 94 | for i in range(round_num): 95 | kf = KFold(n_splits=fold_num) 96 | for train_index, test_index in kf.split(X=data[:, :-1], y=data[:, -1], groups=data[:, -1]): 97 | train_set, test_set = data[train_index], data[test_index] 98 | train_X, train_Y = train_set[:, :-1], train_set[:, -1].astype(int) 99 | test_X = test_set[:, :-1] 100 | NN,best_acc=Train_Teacher(train_X,train_Y,model,data_name) 101 | 102 | test_X = torch.tensor(test_X, dtype=torch.float32) 103 | 104 | temp = NN(test_X[0:1000].cuda()) 105 | pred_prob = temp.detach().cpu().numpy() 106 | for i in range(1000, test_X.shape[0], 1000): 107 | temp = NN(test_X[i:i + 1000].cuda()) 108 | pred_prob = np.append(pred_prob, temp.detach().cpu().numpy(), axis=0) 109 | 110 | soft_label[test_index] += pred_prob 111 | 112 | soft_label /= round_num 113 | 114 | hard_label = np.zeros([data.shape[0], len(np.unique(data[:, -1]))]) 115 | for i in range(np.shape(data)[0]): 116 | hard_label[i][int(data[i, -1])] = 1 117 | 118 | soft_label = softmax(soft_label / T) 119 | soft_label = (soft_label + hard_label*k) / (k+1) 120 | 121 | return soft_label 122 | 123 | 124 | 125 | if __name__ == "__main__": 126 | parser = argparse.ArgumentParser(description='PyTorch SDT Training') 127 | parser.add_argument( 128 | '--teacher', default='rf', choices=['rf', 'gru'], type=str, help='teacher model selection') 129 | parser.add_argument( 130 | '--cuda', default=1, choices=[0, 1, 2, 3], type=int, help='cuda selection') 131 | parser.add_argument( 132 | '--K', default=1, type=int, help='the proportion of hard label') 133 | parser.add_argument( 134 | '--T', default=1, type=int, help='the temperature of soft label') 135 | 136 | args = parser.parse_args() 137 | # torch.cuda.set_device(args.cuda) 138 | 139 | ROUND_NUM = 5 140 | TEST_SIZE = 0.3 141 | #MAX_T=[1,2,3,4,5,6] 142 | MAX_K = 1 143 | 144 | for data_name in ['univ']: 145 | data_train, feature_attr = load_data(data_name) 146 | data_eval, feature_attr=load_data('univ_test') 147 | print('training data:') 148 | print(data_name, data_train.shape) 149 | 150 | #kk=args.K 151 | T=1 152 | output = [] 153 | teacher_output = [] 154 | sdt_output = [] 155 | for kk in range(MAX_K): 156 | print("--------------", kk, "--------------") 157 | acc_sdt, acc_dt, acc_Teacher, sdt_time, dt_time, sdt_test_time, NN_time = [], [], [], [], [], [], [] 158 | teacher_report, sdt_report = [], [] 159 | for i in range(ROUND_NUM): 160 | print("ROUND:", str(i)) 161 | 162 | 163 | begin_time = time.time() 164 | if args.teacher == 'rf': 165 | soft_label = produce_soft_labels(data_train, round_num=1, fold_num=2, k=kk, model='rf') 166 | else: 167 | soft_label = NN_produce_soft_kf(data_train, round_num=1, fold_num=2, k=kk, T=T, model=args.teacher, 168 | data_name=data_name) 169 | end_time = time.time() 170 | print('produce soft label needs {:}s'.format(end_time - begin_time)) 171 | 172 | if args.teacher == 'rf': 173 | # random forest 174 | clf = RandomForestClassifier(n_estimators=300, min_samples_leaf=5, criterion="gini") 175 | clf.fit(data_train[:, :-1], data_train[:, -1].astype(int)) 176 | pred = clf.predict(data_eval[:, :-1]) 177 | acc_Teacher.append(accuracy(pred, data_eval[:, -1].astype(int))) 178 | else: # NN 179 | begin_time = time.time() 180 | NN, best_acc = Train_Teacher(data_train[:, :-1], data_train[:, -1].astype(int), args.teacher, 181 | data_name) 182 | end_time = time.time() 183 | t1 = end_time - begin_time 184 | NN_time.append(t1) 185 | print('training {:} needs {:}s'.format(args.teacher, t1)) 186 | test_X = data_eval[:, :-1] 187 | test_X = torch.tensor(test_X, dtype=torch.float32) 188 | test_Y = data_eval[:, -1].astype(int) 189 | test_Y = torch.tensor(test_Y) 190 | 191 | test_datasets = Data.TensorDataset(test_X, test_Y) 192 | test_loader = Data.DataLoader(dataset=test_datasets, batch_size=128, shuffle=False, num_workers=2) 193 | correct = 0 194 | pred = np.array([]) 195 | for batch_idx, (data_X, target) in enumerate(test_loader): 196 | data_X = data_X.cuda() 197 | test_output = NN(data_X) 198 | pred_y = torch.max(test_output, 1)[1].cpu().data.numpy() 199 | pred = np.concatenate([pred, pred_y], axis=0) 200 | correct += (pred_y == np.array(target.view(-1).data)).sum() 201 | acc = float(correct) / len(test_loader.dataset) 202 | print(args.teacher + '| test accuracy: %.4f' % acc) 203 | train_acc = 0.0 204 | acc_Teacher.append(acc) 205 | teacher_round = get_c_avg(classification_report(data_eval[:, -1], pred, digits=4, output_dict=True)) 206 | teacher_report.append(teacher_round) 207 | 208 | # soft decision tree 209 | begin_time = time.time() 210 | clf = SoftTreeClassifier(n_features="all", min_sample_leaf=5) 211 | clf.fit(data_train[:, :-1], soft_label, feature_attr) 212 | end_time = time.time() 213 | t = end_time - begin_time 214 | sdt_time.append(t) 215 | print('training sdt needs {:}s'.format(t)) 216 | 217 | begin_time = time.time() 218 | pred = clf.predict(data_eval[:, :-1]) 219 | acc = accuracy(pred, data_eval[:, -1]) 220 | end_time = time.time() 221 | t = end_time - begin_time 222 | sdt_test_time.append(t) 223 | print('sdt testing needs {:}s'.format(t)) 224 | print(" soft decision tree:", acc) 225 | acc_sdt.append(acc) 226 | 227 | 228 | 229 | sdt_round = get_c_avg(classification_report(data_eval[:, -1], pred, digits=4, output_dict=True)) 230 | sdt_report.append(sdt_round) 231 | 232 | model_path = './rule_tree/{}_{}_kk{}_round{}.txt'.format(data_name, args.teacher, str(kk),str(i)) 233 | clf.show_tree(model_path) 234 | print("teacher_report", teacher_round) 235 | print("sdt_report", sdt_round) 236 | 237 | begin_time = time.time() 238 | clf = TreeClassifier(n_features="all", min_sample_leaf=5) 239 | clf.fit(data_train[:, :-1], data_train[:, -1], feature_attr) 240 | 241 | end_time = time.time() 242 | t1 = end_time - begin_time 243 | dt_time.append(t1) 244 | print('training BDT needs {:}s'.format(t1)) 245 | 246 | pred = clf.predict(data_eval[:, :-1]) 247 | acc = accuracy(pred, data_eval[:, -1].astype(int)) 248 | print('BDT accuracy {:}%'.format(acc)) 249 | acc_dt.append(acc) 250 | 251 | if data_name == "mnist" and i == 2: 252 | break 253 | print(" {:} Classifier:".format(args.teacher), np.mean(acc_Teacher)) 254 | print(" binary decision tree:", np.mean(acc_dt)) 255 | print(" soft decision tree mean acc:", np.mean(acc_sdt)) 256 | print(" soft decision tree mean time cost:", np.mean(sdt_time)) 257 | print(' BDT mean time cost ', np.mean(dt_time)) 258 | print(' NN mean time cost ', np.mean(NN_time)) 259 | 260 | output.append( 261 | [kk, np.mean(acc_Teacher), np.mean(acc_dt), np.mean(acc_sdt), np.mean(sdt_time), 262 | np.mean(dt_time), np.mean(NN_time)]) 263 | teacher_output.append(np.mean(teacher_report, axis=0)) 264 | sdt_output.append(np.mean(sdt_report, axis=0)) 265 | 266 | np.savetxt("./result_acc/" + data_name + "_" + args.teacher + "_acc.txt", np.array(output)) 267 | np.savetxt("./result_acc/" + data_name + "_" + args.teacher + "_teacher.txt", np.array(teacher_output)) 268 | np.savetxt("./result_acc/" + data_name + "_" + args.teacher + "_sdt.txt", np.array(sdt_output)) 269 | -------------------------------------------------------------------------------- /Distillation/dt_test.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | # @author: Dongyutao 4 | 5 | 6 | import warnings 7 | 8 | warnings.filterwarnings('ignore') 9 | from Tree import * 10 | from utils import * 11 | from RandomForest import * 12 | from SoftTree import * 13 | import time 14 | import numpy as np 15 | from sklearn.metrics import classification_report 16 | 17 | 18 | if __name__ == "__main__": 19 | 20 | ROUND_NUM = 5 21 | TEST_SIZE = 0.3 22 | MAX_K = 1 23 | 24 | for data_name in ['univ_C']: # 25 | data_train, feature_attr = load_data(data_name) 26 | 27 | data_eval, feature_attr = load_data('univ_test_C') 28 | print('training data:') 29 | 30 | print('\n testing data:') 31 | print(data_eval.shape) 32 | 33 | output = [] 34 | for kk in range(MAX_K): 35 | print("--------------", kk, "--------------") 36 | acc_hdt, hdt_time, hdt_test_time = [], [], [] 37 | hdt_report = [] 38 | for i in range(ROUND_NUM): 39 | print("ROUND:", str(i)) 40 | 41 | # hand decision tree 42 | begin_time = time.time() 43 | clf = TreeClassifier(n_features="all", min_sample_leaf=5) 44 | clf.fit(data_train[:, :-1], data_train[:, -1], feature_attr) 45 | 46 | end_time = time.time() 47 | t1 = end_time - begin_time 48 | hdt_time.append(t1) 49 | print('training dt needs {:}s'.format(t1)) 50 | 51 | begin_time = time.time() 52 | pred = clf.predict(data_eval[:, :-1]) 53 | acc = accuracy(pred, data_eval[:, -1].astype(int)) 54 | end_time = time.time() 55 | t1 = end_time - begin_time 56 | hdt_test_time.append((t1)) 57 | print('dt accuracy {:}%'.format(acc)) 58 | acc_hdt.append(acc) 59 | 60 | model_path = './rule_tree/{}_round{}_DT.txt'.format(data_name, str(i)) 61 | clf.show_tree(model_path) 62 | 63 | hdt_round = get_c_avg(classification_report(data_eval[:, -1], pred, digits=4, output_dict=True)) 64 | hdt_report.append(hdt_round) 65 | 66 | print('acc_hdt', acc_hdt) 67 | output.append([kk, np.mean(acc_hdt), np.mean(hdt_time), np.mean(hdt_test_time)]) 68 | 69 | np.savetxt("./result_acc/" + data_name + "_DT_Acc.txt", np.array(output)) 70 | np.savetxt("./result_acc/" + data_name + "_DT_report.txt", np.mean(hdt_report, axis=0)) 71 | -------------------------------------------------------------------------------- /Distillation/models/GRU.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import numpy as np 4 | class GRU(nn.Module): 5 | def __init__(self,INPUT_SIZE, output_size): 6 | super(GRU, self).__init__() 7 | # self.LN1=nn.LayerNorm(INPUT_SIZE) 8 | #self.WINDOW_SIZE=WINDOW_SIZE 9 | self.INPUT_SIZE=INPUT_SIZE 10 | self.gru = nn.GRU(input_size=self.INPUT_SIZE, 11 | hidden_size=256, 12 | num_layers=2, 13 | batch_first=True, 14 | # dropout=0.5 15 | ) 16 | 17 | self.out = nn.Sequential(nn.Linear(256, output_size), nn.Softmax()) 18 | 19 | def forward(self, x): 20 | x = x[:, np.newaxis, :] 21 | r_out, self.hidden = self.gru(x, None) # x(batch,time_step,input_size) 22 | # choose r_out at the last time step 23 | out = self.out(r_out[:, -1, :]) 24 | return out -------------------------------------------------------------------------------- /Distillation/models/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from .GRU import * 3 | -------------------------------------------------------------------------------- /Distillation/models/__pycache__/GRU.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xgr19/Mousika/76c9db9b4200a7c591bd1e9476ba5ab44a467512/Distillation/models/__pycache__/GRU.cpython-37.pyc -------------------------------------------------------------------------------- /Distillation/models/__pycache__/__init__.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xgr19/Mousika/76c9db9b4200a7c591bd1e9476ba5ab44a467512/Distillation/models/__pycache__/__init__.cpython-37.pyc -------------------------------------------------------------------------------- /Distillation/result_acc/univ_rf_acc.txt: -------------------------------------------------------------------------------- 1 | 0.000000000000000000e+00 9.382377514669663077e-01 8.875743170100284241e-01 9.149716535852583377e-01 9.398486614227294922e+01 1.634311057567596492e+02 nan 2 | -------------------------------------------------------------------------------- /Distillation/result_acc/univ_rf_sdt.txt: -------------------------------------------------------------------------------- 1 | 7.867671077746412056e-01 8.962951893628094968e-01 8.274530676129312479e-01 9.367135329054965265e-01 9.149716535852583377e-01 9.217991014081887702e-01 2 | -------------------------------------------------------------------------------- /Distillation/result_acc/univ_rf_teacher.txt: -------------------------------------------------------------------------------- 1 | 8.330224837220192580e-01 9.092606574866722857e-01 8.652982825393358279e-01 9.477821747880795789e-01 9.382377514669663077e-01 9.414024832586120883e-01 2 | -------------------------------------------------------------------------------- /Distillation/train_teacher.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | from models import * 4 | import torch 5 | import torch.utils.data as Data 6 | import numpy as np 7 | from sklearn.model_selection import train_test_split 8 | from utils import * 9 | # torch.manual_seed(1) # reproducible 10 | import torch.nn.functional as F 11 | 12 | 13 | 14 | def select_model(T,input_size,output_size): 15 | return GRU(input_size, output_size) 16 | 17 | def Train_Teacher(X,y,model_name,data_name): 18 | input_size = X.shape[1] 19 | output_size = len(np.unique(y)) 20 | 21 | model = select_model(model_name, input_size, output_size) 22 | model = model.cuda() 23 | BATCH_SIZE=256 24 | X_train, X_test, y_train, y_test = train_test_split(X, y, shuffle=True, random_state=20, test_size=0.2) 25 | X_train = torch.tensor(X_train, dtype=torch.float32) 26 | y_train = torch.tensor(y_train) 27 | 28 | X_test = torch.tensor(X_test, dtype=torch.float32) 29 | y_test = torch.tensor(y_test) 30 | 31 | train_datasets = Data.TensorDataset(X_train, y_train) 32 | test_datasets = Data.TensorDataset(X_test, y_test) 33 | 34 | train_loader = Data.DataLoader(dataset=train_datasets, batch_size=BATCH_SIZE, shuffle=True, num_workers=2) 35 | test_loader = Data.DataLoader(dataset=test_datasets, batch_size=BATCH_SIZE, shuffle=True, num_workers=2) 36 | # Hyper Parameters 37 | EPOCH = 10 # train the training data n times, to save time, we just train 1 epoch 38 | LR = 0.001 # learning rate 39 | best_testing_acc = 0.0 40 | best_epoch=0 41 | 42 | train_acc=0.0 43 | 44 | 45 | model_path = "../params/Teacher_"+data_name+"_model_"+model_name+".pkl" 46 | optimizer = torch.optim.Adam(model.parameters(), lr=LR) # optimize all model parameters 47 | loss_func = nn.CrossEntropyLoss() # the target label is not one-hotted 48 | # training and testing 49 | 50 | for epoch in range(EPOCH): 51 | for step, (b_x, b_y) in enumerate(train_loader): # gives batch data, normalize x when iterate train_loader 52 | b_x=b_x.cuda() 53 | b_y=b_y.cuda() 54 | output = model(b_x) # model output 55 | output= F.softmax(output,dim=1) 56 | loss = loss_func(output, b_y) # cross entropy loss 57 | optimizer.zero_grad() # clear gradients for this training step 58 | loss.backward() # backpropagation, compute gradients 59 | optimizer.step() # apply gradients 60 | 61 | train_pred_y = torch.max(output, 1)[1].cpu().data.numpy() 62 | train_acc += 100.0 * float((train_pred_y == np.array(b_y.cpu().view(-1).data)).sum())/ len(b_y) 63 | 64 | if step % 100 == 0: 65 | correct = 0. 66 | for batch_idx, (data, target) in enumerate(test_loader): 67 | data=data.cuda() 68 | test_output = model(data) 69 | pred_y = torch.max(test_output, 1)[1].cpu().data.numpy() 70 | 71 | correct += (pred_y==np.array(target.view(-1).data)).sum() 72 | 73 | accuracy = 100.0 * float(correct) / len(test_loader.dataset) 74 | 75 | #print('Epoch: ', epoch, '| train loss: %.4f' % loss.cpu().data.numpy(),'| train acc: %.4f' % float(train_acc/100), '| test accuracy: %.2f' % accuracy) 76 | train_acc=0.0 77 | 78 | if accuracy > best_testing_acc: 79 | best_epoch=epoch 80 | best_testing_acc = accuracy 81 | torch.save(model.state_dict(), model_path) 82 | #print('epoch| {:} best training acc {:}'.format(epoch,best_testing_acc)) 83 | ''' 84 | correct = 0 85 | for batch_idx, (data, target) in enumerate(eval_loader): 86 | data = data.cuda() 87 | test_output = model(data) 88 | pred_y = torch.max(test_output, 1)[1].cpu().data.numpy() 89 | 90 | correct += (pred_y == np.array(target.view(-1).data)).sum() 91 | 92 | acc = float(correct) / len(eval_loader.dataset) 93 | 94 | print(model_name + '| test accuracy: %.4f' % acc) 95 | #train_acc = 0.0 96 | ''' 97 | 98 | print('epoch| {:} best training acc {:}'.format(best_epoch, best_testing_acc)) 99 | model.load_state_dict(torch.load(model_path)) 100 | return model, best_testing_acc 101 | 102 | #BATCH_SIZE = 50 103 | # Load data 104 | #load=LoadData(BATCH_SIZE) 105 | #train_loader,test_loader=load.get_univ_data() 106 | 107 | #cnn = CNN() 108 | #mlp=MLP(128,2) 109 | #mlp=mlp.cuda() 110 | #print(cnn) # net architecture 111 | #Train_Teacher(train_loader,test_loader,mlp) 112 | ''' 113 | data_name='iot' 114 | model_name='lstm' 115 | data_train, feature_attr = load_data('iot_bin') 116 | 117 | data_eval, feature_attr=load_data('iot_test_bin') 118 | 119 | 120 | test_X = data_eval[:, :-1] 121 | test_X = torch.tensor(test_X, dtype=torch.float32) 122 | test_Y=data_eval[:, -1].astype(int) 123 | test_Y = torch.tensor(test_Y) 124 | 125 | test_datasets = Data.TensorDataset(test_X, test_Y) 126 | eval_loader = Data.DataLoader(dataset=test_datasets, batch_size=128, shuffle=True, num_workers=2) 127 | NN,best_acc=Train_Teacher(data_train[:, :-1], data_train[:, -1].astype(int),model_name,data_name) 128 | ''' -------------------------------------------------------------------------------- /Distillation/utils.py: -------------------------------------------------------------------------------- 1 | # encoding: utf-8 2 | 3 | 4 | import random 5 | import numpy as np 6 | import pandas as pd 7 | import time 8 | #import Cython 9 | import copy 10 | from sklearn.model_selection import train_test_split 11 | 12 | def cal_label_dic(label_col): 13 | # return a dic, key is label, value is count of label 14 | dic = {} 15 | for label in label_col: 16 | if label not in dic: 17 | dic[label] = 0 18 | dic[label] += 1 19 | return dic 20 | 21 | 22 | def split_train_test(df, train_percent=0.8,bin=True): 23 | """ 24 | @description : divide train set and test set according to flow 25 | @param : df(dtype=np.int16), tran set percent 26 | @Returns : training sets and test sets that contain binary features 27 | """ 28 | drop_cols = ["srcPort", "dstPort", "protocol", 'srcIP', 'dstIP', 29 | "ip_ihl", "ip_tos", "ip_flags", "ip_ttl", "tcp_dataofs", "tcp_flag", "tcp_window", 30 | "udp_len", 31 | "length", 32 | 'srcAddr1', 'srcAddr2', 'srcAddr3', 'srcAddr4', 'dstAddr1', 'dstAddr2', 'dstAddr3', 33 | 'dstAddr4'] 34 | for col_names in ['srcAddr{}'.format(i) for i in range(1, 5)]: 35 | df[col_names] = df[col_names].astype('str') 36 | for col_names in ['dstAddr{}'.format(i) for i in range(1, 5)]: 37 | df[col_names] = df[col_names].astype('str') 38 | df['srcIP'] = df['srcAddr1'].str.cat([df['srcAddr2'], df['srcAddr3'], df['srcAddr4']], sep='.') 39 | df['dstIP'] = df['dstAddr1'].str.cat([df['dstAddr2'], df['dstAddr3'], df['dstAddr4']], sep='.') 40 | group = df.groupby(["srcIP", "srcPort", "dstIP", "dstPort", "protocol"]) 41 | 42 | # ngroups: the number of groups 43 | total_index = np.arange(group.ngroups) 44 | print('total flow number', len(total_index)) 45 | np.random.seed(1234) 46 | np.random.shuffle(total_index) 47 | split_index = int(len(total_index) * train_percent) 48 | # ngroup(): Number each group from 0 to the number of groups - 1. 49 | df_train = df[group.ngroup().isin(total_index[: split_index])] 50 | df_test = df[group.ngroup().isin(total_index[split_index:])] 51 | df_train.reset_index(drop=True, inplace=True) 52 | df_test.reset_index(drop=True, inplace=True) 53 | if bin: 54 | df_train.drop(drop_cols, axis=1, inplace=True) 55 | df_test.drop(drop_cols, axis=1, inplace=True) 56 | 57 | 58 | return df_train, df_test 59 | 60 | 61 | 62 | 63 | 64 | def cal_tsallis_entropy(label_column, q): 65 | # Tsallis entropy as a impurity criterion 66 | total = len(label_column) 67 | label_dic = cal_label_dic(label_column) 68 | tsa = 0 69 | 70 | if q != 1: 71 | tmp = 0 72 | for k in label_dic: 73 | p = float(label_dic[k]) / total 74 | tmp += p ** q 75 | tsa = (tmp - 1) / (1 - q) 76 | else: 77 | for k in label_dic: 78 | p = float(label_dic[k]) / total 79 | tsa += -p*np.log(p) 80 | 81 | return tsa 82 | 83 | def cal_entropy_from_histogram(h): 84 | 85 | l_num, r_num, l_gini, r_gini = 0, 0, 1, 1 86 | for c, n in h['L'].items(): 87 | l_num += n 88 | for c, n in h['R'].items(): 89 | r_num += n 90 | 91 | if l_num == 0: 92 | l_gini = 0 93 | else: 94 | for c, n in h['L'].items(): 95 | l_gini -= (n/l_num)**2 96 | 97 | if r_num == 0: 98 | r_gini = 0 99 | else: 100 | for c, n in h['R'].items(): 101 | r_gini -= (n / r_num) ** 2 102 | 103 | return (r_num/(r_num+l_num)) * r_gini + (l_num/(r_num+l_num)) * l_gini 104 | 105 | def cal_entropy_from_histogram2(h): 106 | 107 | l_list = np.array(list(h['L'].items())) 108 | l_num = np.sum(l_list) 109 | l_gini = 0 110 | l_gini = 1-np.sum(np.square(l_list / l_num)) 111 | 112 | r_list = np.array(list(h['R'].items())) 113 | r_num = np.sum(r_list) 114 | r_gini = 0 115 | r_gini = 1-np.sum(np.square(r_list / r_num)) 116 | 117 | return (r_num/(r_num+l_num)) * r_gini + (l_num/(r_num+l_num)) * l_gini 118 | 119 | 120 | def voting(label_dic, voting_rule="random", priority=None, random_seed=None): 121 | # return majority label, counts is a dic,key is label,value is counts of label 122 | np.random.seed(random_seed) 123 | winner_key = list(label_dic.keys())[0] 124 | for key in label_dic: 125 | if label_dic[key] > label_dic[winner_key]: 126 | winner_key = key 127 | elif label_dic[key] == label_dic[winner_key]: 128 | if voting_rule == "random": 129 | winner_key = np.random.choice([key, winner_key], 1)[0] # return a list with len 1 130 | elif voting_rule == "prior": 131 | if priority[winner_key] < priority[key]: 132 | winner_key = key 133 | elif priority[winner_key] == priority[key]: 134 | winner_key = np.random.choice([key, winner_key], 1)[0] # return a list with len 1 135 | 136 | return winner_key 137 | 138 | 139 | def bootstrap(n_samples, random_seed = None): 140 | # generate indices of samples for training a tree 141 | 142 | if random_seed is not None: 143 | np.random.seed(random_seed) 144 | sample_indices = np.random.randint(0, n_samples, n_samples) 145 | # sample_indices = np.unique(sample_indices) 146 | # all_sample = np.array([i for i in range(n_samples)]) 147 | # unsample_indices = np.delete(all_sample, sample_indices) 148 | unsample_indices = None 149 | return sample_indices, unsample_indices 150 | 151 | 152 | def accuracy(pred, true_value): 153 | 154 | true_num = 0 155 | for i in range(len(pred)): 156 | if pred[i] == true_value[i]: 157 | true_num += 1 158 | 159 | return float(true_num) / len(pred) 160 | 161 | 162 | def k_statistic(labels, r1, r2): 163 | 164 | dict1 = {} 165 | dict2 = {} 166 | for i in labels: 167 | dict1[i] = [] 168 | dict2[i] = [] 169 | 170 | for i in range(len(r1)): 171 | dict1[r1[i]].append(i) 172 | for i in range(len(r2)): 173 | dict2[r2[i]].append(i) 174 | 175 | c_table = {} 176 | for i in labels: 177 | t = {} 178 | for j in labels: 179 | t[j] = 0 180 | c_table[i] = t 181 | for k1 in dict1: 182 | for v1 in dict1[k1]: 183 | for k2 in dict2: 184 | if v1 in dict2[k2]: 185 | c_table[k1][k2] += 1 186 | continue 187 | # print(c_table) 188 | 189 | theta1 = 0 190 | for i in labels: 191 | theta1 += c_table[i][i] 192 | theta1 = theta1/len(r1) 193 | theta2 = 0 194 | for i in labels: 195 | factor1 = 0 196 | factor2 = 0 197 | for j in labels: 198 | factor1 += c_table[i][j] 199 | factor2 += c_table[j][i] 200 | theta2 += (factor1*factor2) / (len(r1)*len(r1)) 201 | return (theta1 - theta2) / (1 - theta2) 202 | 203 | 204 | def load_data(data_name): 205 | seed=5 206 | feature_number=112 207 | 208 | features_attr = [] 209 | if data_name=='univ': 210 | inputName = "./Dataset/univ/univ1-1.csv" 211 | df = pd.read_csv(inputName) 212 | 213 | iot_feature_names = ["srcPort", "dstPort", "protocol", 214 | "ip_ihl", "ip_tos", "ip_flags", "ip_ttl", "tcp_dataofs", "tcp_flag", "tcp_window", 215 | "udp_len", 216 | "length", 217 | 'srcAddr1', 'srcAddr2', 'srcAddr3', 'srcAddr4', 'dstAddr1', 'dstAddr2', 'dstAddr3', 218 | 'dstAddr4'] 219 | df.drop(columns=iot_feature_names, inplace=True) 220 | data = df.values 221 | from imblearn.under_sampling import RandomUnderSampler 222 | 223 | rus = RandomUnderSampler(random_state=seed) 224 | X, y = rus.fit_resample(data[:, :-1], data[:, -1]) 225 | data = np.column_stack((X, y)) 226 | data = np.random.permutation(data) 227 | for i in range(data.shape[1]-1): 228 | features_attr.append('d') 229 | #data, drop_data = train_test_split(data, shuffle=True, random_state=20, test_size=0.5) 230 | if data_name=='univ_C': 231 | inputName = "./Dataset/univ/univ1-1.csv" 232 | 233 | df = pd.read_csv(inputName) 234 | iot_feature_names = ["srcPort", "dstPort","protocol", "ip_ihl", "ip_tos", "ip_ttl", "tcp_dataofs", 235 | "tcp_window", "udp_len", "length", 'flowSize'] 236 | df = df[iot_feature_names] 237 | data = df.values 238 | from imblearn.under_sampling import RandomUnderSampler 239 | 240 | rus = RandomUnderSampler(random_state=seed) 241 | X, y = rus.fit_resample(data[:, :-1], data[:, -1]) 242 | data = np.column_stack((X, y)) 243 | data = np.random.permutation(data) 244 | 245 | for i in range(data.shape[1] - 1): 246 | features_attr.append('c') 247 | 248 | if data_name=='univ_test': 249 | inputName = "./Dataset/univ/univ1-2.csv" 250 | df = pd.read_csv(inputName) 251 | 252 | iot_feature_names = ["srcPort", "dstPort", "protocol", 253 | "ip_ihl", "ip_tos", "ip_flags", "ip_ttl", "tcp_dataofs", "tcp_flag", "tcp_window", 254 | "udp_len", 255 | "length", 256 | 'srcAddr1', 'srcAddr2', 'srcAddr3', 'srcAddr4', 'dstAddr1', 'dstAddr2', 'dstAddr3', 257 | 'dstAddr4'] 258 | df.drop(columns=iot_feature_names, inplace=True) 259 | data = df.values 260 | for i in range(data.shape[1] - 1): 261 | features_attr.append('d') 262 | if data_name=='univ_test_C': 263 | inputName = "./Dataset/univ/univ1-2.csv" 264 | 265 | df = pd.read_csv(inputName) 266 | iot_feature_names = ["srcPort", "dstPort", "protocol", "ip_ihl", "ip_tos", "ip_ttl", "tcp_dataofs", 267 | "tcp_window", "udp_len", "length", 'flowSize'] 268 | df = df[iot_feature_names] 269 | data = df.values 270 | for i in range(data.shape[1] - 1): 271 | features_attr.append('c') 272 | 273 | for i in range(data.shape[1]): 274 | if isinstance(data[0, i], str): 275 | col = data[:, i] 276 | new_col = [] 277 | for k in range(len(col)): 278 | if col[k] is np.nan: 279 | data[k, i] = -1 280 | else: 281 | new_col.append(col[k]) 282 | unique_val = np.unique(new_col) 283 | for num in range(len(unique_val)): 284 | for k in range(data.shape[0]): 285 | if data[k, i] == unique_val[num]: 286 | data[k, i] = num 287 | label = dict(zip(np.unique(data[:, -1]), list(range(len(np.unique(data[:, -1])))))) 288 | for i in range(data.shape[0]): 289 | data[i][-1] = label[data[i][-1]] 290 | 291 | for i in range(data.shape[0]): 292 | for j in range(data.shape[1]): 293 | if np.isnan(data[i][j]): 294 | data[i][j] = -1.0 295 | 296 | 297 | return data, features_attr 298 | 299 | def get_thres(flowSize, elePercent): 300 | # param flowSize is DataFrame 301 | np_flowSize = np.array(flowSize) 302 | quantile = 1 - elePercent 303 | thres = np.quantile(np_flowSize, quantile) 304 | return thres 305 | 306 | 307 | class SplitPair(list): 308 | 309 | def __hash__(self): 310 | return hash(self[0]) 311 | 312 | def __eq__(self, other): 313 | return self[0] == other[0] and self[1] == other[1] 314 | 315 | 316 | def addtodict2(thedict, key_a, key_b, val): 317 | 318 | if key_a in thedict: 319 | thedict[key_a].update({key_b: val}) 320 | else: 321 | thedict.update({key_a:{key_b: val}}) 322 | 323 | 324 | def max_min_normalization(arr): 325 | min_ = np.min(arr) 326 | max_ = np.max(arr) 327 | if max_ - min_ == 0: 328 | return np.zeros(np.shape(arr)) 329 | return (arr - min_) / (max_-min_) 330 | 331 | 332 | # histogram = {'L': {0: 1313, 1: 4242}, 'R': {0: 1331, 1: 1212}} 333 | # 334 | # l_list = np.array(list(histogram['L'].items())) 335 | # l_num = np.sum(l_list) 336 | # start_time = time.clock() 337 | # for i in range(10707060): 338 | # y = 5 - cal_entropy_from_histogram(histogram) 339 | # end_time = time.clock() 340 | # 341 | # print(end_time - start_time) 342 | 343 | def add_noise(data, percent): 344 | 345 | noise_index = np.random.choice(len(data), int(percent * len(data)), replace=False) 346 | 347 | ret_data = copy.deepcopy(data) 348 | labels = np.unique(data[:, -1]) 349 | 350 | for i in noise_index: 351 | ret_data[i][-1] = np.random.choice(labels[labels!=data[i][-1]], 1)[0] 352 | 353 | return ret_data 354 | 355 | 356 | def soft_gini(label): 357 | 358 | assert np.ndim(label) == 2 359 | 360 | label = np.array(label) 361 | sum = 0 362 | for i in range(label.shape[1]): 363 | sum += (np.sum(label[:, i]) / label.shape[0]) ** 2 364 | return 1-sum 365 | 366 | 367 | def soft_label_dic(label): 368 | 369 | assert np.ndim(label) == 2 370 | label = np.array(label) 371 | 372 | label_dict = {} 373 | for i in range(label.shape[1]): 374 | label_dict[i] = np.sum(label[:, i]) 375 | 376 | return label_dict 377 | 378 | 379 | def soft_voting(label_dic): 380 | 381 | winner_key = list(label_dic.keys())[0] 382 | for key in label_dic: 383 | if label_dic[key] > label_dic[winner_key]: 384 | winner_key = key 385 | elif label_dic[key] == label_dic[winner_key]: 386 | winner_key = np.random.choice([key, winner_key], 1)[0] 387 | 388 | return winner_key 389 | 390 | 391 | def cal_gini(label_column): 392 | total = len(label_column) 393 | label_dic = cal_label_dic(label_column) 394 | sum = 0 395 | for k1 in label_dic: 396 | 397 | sum += (float(label_dic[k1])/total) ** 2 398 | 399 | return 1 - sum 400 | 401 | def tree_node_num_our(node): 402 | node_num = 1 403 | if node.label is not None: 404 | return node_num 405 | else: 406 | node_num += tree_node_num_our(node.true_branch) 407 | node_num += tree_node_num_our(node.false_branch) 408 | return node_num 409 | 410 | 411 | def get_c_avg(c): 412 | """ 413 | @description : from classification_report(ans, pred, digits=4, output_dict=True) 414 | @param : classification_report 415 | @Returns : macro avg:[precision, recall, f1-score], weighted avg:[precision, recall, f1-score] 416 | """ 417 | m_avg_values = list(c['macro avg'].values())[:-1] 418 | w_avg_values = list(c['weighted avg'].values())[:-1] 419 | print('macro', m_avg_values) 420 | m_avg_values.extend(w_avg_values) 421 | return m_avg_values 422 | 423 | 424 | # def print_tree(node, parent_id, node_id): 425 | # if node.label is not None: 426 | # print(parent_id, node_id, node.label) 427 | # return 428 | # else: 429 | # print(parent_id, node_id, node.feature, node.threshold) 430 | # node_id+=1 431 | # print_tree(node.true_branch, node_id, node_id) 432 | # node_id += 1 433 | # print_tree(node.false_branch, node_id, node_id) 434 | -------------------------------------------------------------------------------- /P4/flowcontrol.p4: -------------------------------------------------------------------------------- 1 | /* -*- P4_16 -*- */ 2 | 3 | /* 4 | * Copyright (c) pcl, Inc. 5 | * 6 | * 7 | *Author: Guanglin Duan 8 | */ 9 | 10 | 11 | 12 | 13 | 14 | #include 15 | #if __TARGET_TOFINO__ == 2 16 | #include 17 | #else 18 | #include 19 | #endif 20 | #include "common/headers.p4" 21 | #include "common/util.p4" 22 | /* MACROS */ 23 | 24 | #define CPU_PORT 320 25 | #define THRESHOLD_NUMBER 100 26 | #define FLAG_NUM 6 27 | #if __TARGET_TOFINO__ == 1 28 | typedef bit<3> mirror_type_t; 29 | #else 30 | typedef bit<4> mirror_type_t; 31 | #endif 32 | const mirror_type_t MIRROR_TYPE_I2E = 1; 33 | const mirror_type_t MIRROR_TYPE_E2E = 2; 34 | /************************************************************************* 35 | *********************** H E A D E R S *********************************** 36 | *************************************************************************/ 37 | 38 | typedef bit<9> egressSpec_t; 39 | typedef bit<48> macAddr_t; 40 | typedef bit<32> ip4Addr_t; 41 | typedef bit<9> port_num_t; 42 | 43 | header ethernet_t { 44 | macAddr_t dstAddr; 45 | macAddr_t srcAddr; 46 | bit<16> etherType; 47 | } 48 | 49 | header wireless_8021q_t { 50 | bit<16> q_other; 51 | bit<16> q_type; 52 | } 53 | 54 | header ipv4_t { 55 | bit<4> version; 56 | bit<4> ihl; 57 | bit<8> diffserv; 58 | bit<16> totalLen; 59 | bit<16> identification; 60 | bit<3> flags; 61 | bit<13> fragOffset; 62 | bit<8> ttl; 63 | bit<8> protocol; 64 | bit<16> hdrChecksum; 65 | ip4Addr_t srcAddr; 66 | ip4Addr_t dstAddr; 67 | } 68 | 69 | header tcp_t { 70 | bit<16> srcPort; 71 | bit<16> dstPort; 72 | bit<32> seqNo; 73 | bit<32> ackNo; 74 | bit<4> dataOffset; 75 | bit<4> res; 76 | bit<8> flags; 77 | bit<16> window; 78 | bit<16> checksum; 79 | bit<16> urgentPtr; 80 | } 81 | 82 | header udp_t { 83 | bit<16> srcPort; 84 | bit<16> dstPort; 85 | bit<16> length_; 86 | bit<16> checksum; 87 | } 88 | 89 | 90 | struct my_ingress_metadata_t { 91 | bit<4> tcp_dataOffset; 92 | bit<16> tcp_window; 93 | bit<16> udp_length; 94 | bit<16> srcport; 95 | bit<16> dstport; 96 | bit<112> bin_feature; // total binary feature 97 | } 98 | 99 | struct my_ingress_headers_t { 100 | // my change 101 | ethernet_t ethernet; 102 | wireless_8021q_t wireless_8021q; 103 | ipv4_t ipv4; 104 | tcp_t tcp; 105 | udp_t udp; 106 | } 107 | 108 | /*********************** H E A D E R S ************************/ 109 | 110 | struct my_egress_headers_t { 111 | } 112 | 113 | /******** G L O B A L E G R E S S M E T A D A T A *********/ 114 | 115 | struct my_egress_metadata_t { 116 | } 117 | 118 | 119 | const bit<16> TYPE_IPV4 = 0x800; 120 | const bit<16> TYPE_8021q = 0x8100; 121 | const bit<8> PROTO_TCP = 6; 122 | const bit<8> PROTO_UDP = 17; 123 | 124 | 125 | /************************************************************************* 126 | *********************** P A R S E R *********************************** 127 | *************************************************************************/ 128 | parser IngressParser(packet_in pkt, 129 | out my_ingress_headers_t hdr, 130 | out my_ingress_metadata_t meta, 131 | out ingress_intrinsic_metadata_t ig_intr_md) 132 | { 133 | 134 | //TofinoIngressParser() tofino_parser; 135 | state start { 136 | pkt.extract(ig_intr_md); 137 | transition parse_port_metadata; 138 | } 139 | 140 | state parse_port_metadata { 141 | pkt.advance(PORT_METADATA_SIZE); 142 | transition parse_ethernet; 143 | } 144 | // 145 | state parse_ethernet { 146 | pkt.extract(hdr.ethernet); 147 | transition select(hdr.ethernet.etherType) { 148 | TYPE_IPV4 : parse_ipv4; 149 | TYPE_8021q : parse_8021q; 150 | // default: accept; 151 | } 152 | } 153 | 154 | state parse_8021q { 155 | pkt.extract(hdr.wireless_8021q); 156 | transition select(hdr.wireless_8021q.q_type) { 157 | TYPE_IPV4 : parse_ipv4; 158 | // default: accept; 159 | } 160 | } 161 | 162 | state parse_ipv4 { 163 | 164 | pkt.extract(hdr.ipv4); 165 | transition select(hdr.ipv4.protocol) { 166 | PROTO_TCP : parse_tcp; 167 | PROTO_UDP : parse_udp; 168 | // default: accept; 169 | } 170 | } 171 | 172 | state parse_tcp { 173 | pkt.extract(hdr.tcp); 174 | meta.tcp_dataOffset = hdr.tcp.dataOffset; 175 | meta.tcp_window = hdr.tcp.window; 176 | meta.udp_length = 0x0; 177 | meta.srcport=hdr.tcp.srcPort; 178 | meta.dstport=hdr.tcp.dstPort; 179 | transition accept; 180 | } 181 | 182 | state parse_udp { 183 | pkt.extract(hdr.udp); 184 | meta.tcp_dataOffset = 0x0; 185 | meta.tcp_window = 0x0; 186 | meta.udp_length = hdr.udp.length_; 187 | meta.srcport=hdr.udp.srcPort; 188 | meta.dstport=hdr.udp.dstPort; 189 | transition accept; 190 | } 191 | } 192 | 193 | 194 | control Ingress( 195 | /* User */ 196 | inout my_ingress_headers_t hdr, 197 | inout my_ingress_metadata_t meta, 198 | /* Intrinsic */ 199 | in ingress_intrinsic_metadata_t ig_intr_md, 200 | in ingress_intrinsic_metadata_from_parser_t ig_prsr_md, 201 | inout ingress_intrinsic_metadata_for_deparser_t ig_dprsr_md, 202 | inout ingress_intrinsic_metadata_for_tm_t ig_tm_md 203 | ) 204 | { 205 | 206 | 207 | action ac_parse_ip_feature() { 208 | meta.bin_feature[71:68] = hdr.ipv4.ihl; 209 | meta.bin_feature[67:60] = hdr.ipv4.diffserv; 210 | meta.bin_feature[59:52] = hdr.ipv4.ttl; 211 | meta.bin_feature[15:0] = hdr.ipv4.totalLen; 212 | meta.bin_feature[79:72] = hdr.ipv4.protocol; 213 | } 214 | action ac_parse_tcp_feature() { 215 | meta.bin_feature[51:48] = meta.tcp_dataOffset; 216 | meta.bin_feature[47:32] = meta.tcp_window; 217 | } 218 | action ac_parse_udp_feature() { 219 | meta.bin_feature[31:16] = meta.udp_length; 220 | } 221 | action ac_parse_port_feature() { 222 | meta.bin_feature[111:96] = meta.srcport; 223 | meta.bin_feature[95:80] = meta.dstport; 224 | } 225 | action ac_parse_bin_feature() { 226 | ac_parse_ip_feature(); 227 | ac_parse_tcp_feature(); 228 | ac_parse_udp_feature(); 229 | ac_parse_port_feature(); 230 | } 231 | 232 | @pragma stage 0 233 | table parse_bin_feature{ 234 | actions = { 235 | ac_parse_bin_feature; 236 | } 237 | default_action = ac_parse_bin_feature; 238 | } 239 | 240 | // action: decide forward port 241 | action ac_packet_forward(macAddr_t dstAddr, PortId_t port){ 242 | // ig_tm_md.ucast_egress_port = port; 243 | ig_tm_md.ucast_egress_port = 0; 244 | hdr.ethernet.dstAddr = dstAddr; 245 | } 246 | action default_forward() { 247 | ig_tm_md.ucast_egress_port = 0; 248 | hdr.ethernet.dstAddr = 0x000000020204; 249 | } 250 | @pragma stage 1 251 | table tb_packet_cls { 252 | key = { 253 | meta.bin_feature: ternary; 254 | } 255 | actions = { 256 | ac_packet_forward; 257 | default_forward; 258 | } 259 | default_action = default_forward(); 260 | size = 1000; 261 | } 262 | 263 | 264 | 265 | apply { 266 | // stage 0 concat binary feature 267 | parse_bin_feature.apply(); 268 | 269 | // stage 1 classification 270 | tb_packet_cls.apply(); 271 | 272 | ig_tm_md.bypass_egress = 1w1; 273 | } 274 | 275 | } 276 | 277 | 278 | control IngressDeparser(packet_out pkt, 279 | /* User */ 280 | inout my_ingress_headers_t hdr, 281 | in my_ingress_metadata_t meta, 282 | /* Intrinsic */ 283 | in ingress_intrinsic_metadata_for_deparser_t ig_dprsr_md) 284 | { 285 | // Resubmit() resubmit; 286 | apply { 287 | // resubmit with resubmit_data 288 | // if (ig_dprsr_md.resubmit_type == 2) { 289 | // resubmit.emit(meta.resubmit_data); 290 | // } 291 | pkt.emit(hdr); 292 | } 293 | } 294 | 295 | 296 | 297 | /************************************************************************* 298 | *********************** S W I T C H ******************************* 299 | *************************************************************************/ 300 | 301 | /************ F I N A L P A C K A G E ******************************/ 302 | Pipeline( 303 | IngressParser(), 304 | Ingress(), 305 | IngressDeparser(), 306 | EmptyEgressParser(), 307 | EmptyEgress(), 308 | EmptyEgressDeparser() 309 | ) pipe; 310 | 311 | Switch(pipe) main; 312 | 313 | 314 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # Mousika Series of In-Network Intelligence in INFOCOM22 and ToN23 2 | **More information about us** [https://xgr19.github.io](https://xgr19.github.io) 3 | 4 | This repository consists of two branches, the ["main"](https://github.com/xgr19/Mousika/tree/main) branch provides the demo code of generating distilled BDT from RF and then installing it to switches, ["Mousikav2"](https://github.com/xgr19/Mousika/tree/Mousikav2) branch provides the whole distillation codes in the extended ToN version. 5 | 6 | We hope that our another In-network intelligence work [Soter](https://github.com/xgr19/Soter) would be also helpful for you. 7 | 8 | ## Code Architecture 9 | 10 | ``` 11 | -- Distillation 12 | -- code 13 | -- train_teacher.py(train the teacher model) 14 | -- bdt_test.py(an example for distilling the model from teacher model to BDT) 15 | -- dt_test.py(train DT model) 16 | -- SoftTree.py(the class of soft DT, which supports tree distillation) 17 | -- Tree.py(the original DT) 18 | -- utils.py(include the load-data function) 19 | -- Dataset/(univ.zip is the flow size prediction task dataset, both 20 | the continuous feature 'univ_C' and binary feature 'univ' are stored here) 21 | -- models/(teacher model) 22 | -- result_acc/ 23 | -- rule_tree/(store rules.txt of bdt_test.py) 24 | 25 | -- rule2entry 26 | -- code 27 | -- rule2entry.py(convert rule of DT to ternary entry of P4 table) 28 | -- entry2dataplane.py(load ternary entry and send to P4 dataplane) 29 | -- output 30 | -- rule_tree/(copy Distillation/rule_tree/*.txt to here) 31 | -- ternary_entry/(generated P4 tale entries from rules.txt) 32 | 33 | -- P4 34 | -- flowcontrol.p4(implementation of P4 data plane) 35 | ``` 36 | 37 | ## Run Decision Trees 38 | #### (dyt20@mails.tsinghua.edu.cn) 39 | Training the BDT model, this script outputs the results of sdt(BDT with distillation) and bdt(BDT without distillation): 40 | ``` 41 | python bdt_test.py 42 | 43 | ``` 44 | Training the DT model, this script outputs the results of DT: 45 | 46 | ``` 47 | python dt_test.py 48 | 49 | ``` 50 | ## Run P4 Program 51 | #### (imbaplayer@163.com) 52 | compile P4 code: flowcontrol.p4 under your p4 path of a Barefoot tofino switch 53 | 54 | ``` 55 | cd $SDE/pkgsrc/p4-build 56 | ./configure --prefix=$SDE_INSTALL --with-tofino --with-bf-runtime P4_NAME=flowcontrol P4_PATH= P4_VERSION=p4-16 P4C=p4c --enable-thrift 57 | make 58 | make install 59 | ``` 60 | 61 | run the compiled P4 program 62 | 63 | ``` 64 | cd $SDE 65 | ./run_switchd.sh -p flowcontrol 66 | ``` 67 | 68 | convert rule of BDT/distilled BDT to ternary entry of P4 table 69 | 70 | ``` 71 | cd rule2entry/code 72 | python rule2entry.py 73 | ``` 74 | 75 | load ternary entry and send to P4 data plane 76 | 77 | ``` 78 | mv rule2entry/code/entry2dataplane.py /root/static_entry/ 79 | cd $SDE 80 | ./run_p4_tests.sh -t /root/static_entry/ 81 | ``` 82 | 83 | -------------------------------------------------------------------------------- /rule2entry/code/entry2dataplane.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- encoding: utf-8 -*- 3 | ''' 4 | @File : rule2entry.py 5 | @Time : 2021-01-08 16:46:41 6 | @Author : Guanglin Duan 7 | @Version : 1.0 8 | load ternary entry and send to P4 dataplane 9 | ''' 10 | import logging 11 | import random 12 | 13 | from ptf import config 14 | import ptf.testutils as testutils 15 | from bfruntime_client_base_tests import BfRuntimeTest 16 | import bfrt_grpc.bfruntime_pb2 as bfruntime_pb2 17 | import bfrt_grpc.client as gc 18 | 19 | ##### ******************* ##### 20 | 21 | logger = logging.getLogger('Test') 22 | if not len(logger.handlers): 23 | logger.addHandler(logging.StreamHandler()) 24 | 25 | swports = [] 26 | for device, port, ifname in config["interfaces"]: 27 | swports.append(port) 28 | swports.sort() 29 | 30 | if swports == []: 31 | swports = list(range(9)) 32 | 33 | # from cls to mac 34 | def cls2mac(tmp_cls): 35 | return "00:00:00:02:02:0{}".format(tmp_cls) 36 | 37 | # from mac str to int 38 | def mac2int(mac_str): 39 | return int(mac_str.replace(':', ''), 16) 40 | 41 | data_type = ['iot-attack-retest', 'univ', 'iscx'] 42 | data_type_index = 2 43 | file_name_list = ['univ_gru.txt', 'univ_lstm.txt', 'univ_rf.txt'] 44 | file_name_index = 0 45 | total_entry_number = 1 46 | 47 | class DecisionTreeTest(BfRuntimeTest): 48 | def setUp(self): 49 | client_id = 0 50 | p4_name = "flowcontrol" 51 | BfRuntimeTest.setUp(self, client_id, p4_name) 52 | def get_entry(self): 53 | """ 54 | @description :load ternary entry from file 55 | @param : 56 | @Returns :list [[mask, value, cls]...] 57 | """ 58 | ternary_list = [] 59 | 60 | dec_file_path = "'../output/ternary_entry/{}/{}".format(data_type[data_type_index], file_name_list[file_name_index]) 61 | with open(dec_file_path, 'r') as f: 62 | for line in f: 63 | ternary_list.append([int(x) for x in line.strip().split()]) 64 | return ternary_list 65 | 66 | def my_add_table_entry(self): 67 | p4_name = "flowcontrol" 68 | 69 | # Get bfrt_info and set it as part of the test 70 | bfrt_info = self.interface.bfrt_info_get(p4_name) 71 | 72 | target = gc.Target(device_id=0, pipe_id=0xffff) 73 | 74 | # get table entry 75 | ternary_list = self.get_entry() 76 | 77 | # table tb_packet_cls 78 | logger.info("Insert tb_packet_cls table entry") 79 | tb_packet_cls = bfrt_info.table_get("Ingress.tb_packet_cls") 80 | entry_count = 0 81 | for tmp_mask, tmp_value, tmp_cls in ternary_list: 82 | print(tmp_mask, tmp_value, tmp_cls) 83 | tb_packet_cls.entry_add( 84 | target, 85 | [tb_packet_cls.make_key( 86 | #('key', value, mask) 87 | [gc.KeyTuple('meta.bin_feature', int(tmp_value), int(tmp_mask)), 88 | gc.KeyTuple('$MATCH_PRIORITY', 1)] 89 | # [gc.KeyTuple('meta.bin_feature', int(tmp_mask), int(tmp_value))] 90 | )], 91 | [tb_packet_cls.make_data( 92 | [gc.DataTuple('dstAddr', mac2int(cls2mac(tmp_cls))), 93 | gc.DataTuple('port', 0)], 94 | 'Ingress.ac_packet_forward')] 95 | 96 | ) 97 | entry_count += 1 98 | 99 | logger.info("add entry ok") 100 | logger.info(file_name_list[file_name_index]) 101 | logger.info(entry_count) 102 | 103 | 104 | def runTest(self): 105 | self.my_add_table_entry() -------------------------------------------------------------------------------- /rule2entry/code/rule2entry.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- encoding: utf-8 -*- 3 | ''' 4 | @File : rule2entry.py 5 | @Time : 2021/04/27 16:09:47 6 | @Author : Guanglin Duan 7 | @Version : 1.0 8 | convert if-form rule of DT to ternary entry of P4 table 9 | ''' 10 | import re 11 | import os 12 | 13 | def set_bit_val(byte, index, val): 14 | """ 15 | set one bit of byte 16 | 17 | :param byte: original byte 18 | :param index: position 19 | :param val: target value, 0 or 1 20 | :returns: modified value 21 | """ 22 | # 112bits from left to right 23 | total_bit = 111 24 | index = total_bit - index 25 | if val: 26 | return byte | (1 << index) 27 | else: 28 | return byte & ~(1 << index) 29 | 30 | class Rule2Binary(): 31 | 32 | def get_ternary(self, feature_list, tmp_cls): 33 | """ 34 | @description :get ternary entry, key & mask = value 35 | @param :feature_list 36 | @Returns :list [mask, value, cls] 37 | """ 38 | tmp_mask = 0 39 | tmp_value = 0 40 | for feature_index, equal_str, feature_value in feature_list: 41 | tmp_mask = set_bit_val(tmp_mask, int(feature_index), 1) 42 | if "!" in equal_str: 43 | tmp_value = set_bit_val(tmp_value, int(feature_index), int(feature_value)^1) 44 | else: 45 | tmp_value = set_bit_val(tmp_value, int(feature_index), int(feature_value)) 46 | return [tmp_mask, tmp_value, tmp_cls] 47 | 48 | 49 | 50 | 51 | def parse_file(self, file_path): 52 | """ 53 | @description :load rules and get mask and value of every line 54 | @param :file path 55 | @Returns :list [[mask, value, cls]...] 56 | """ 57 | 58 | 59 | ternary_list = [] 60 | with open(file_path, 'r', encoding='utf-8') as f: 61 | for line in f: 62 | pattern = re.compile(r'feature_(\d*)(\D*=)(\d)') 63 | matchObj = pattern.findall(line) 64 | tmp_cls = int(line.split()[-1]) 65 | ternary_list.append(self.get_ternary(matchObj, tmp_cls)) 66 | return ternary_list 67 | 68 | def write_ternary_file(self, ternary_list, dec_file_path): 69 | """ 70 | @description :output mask,value,cls to file 71 | @param :ternary_list 72 | @Returns :output files in decimal formats 73 | """ 74 | with open(dec_file_path, 'w', encoding='utf-8') as f: 75 | for data in ternary_list: 76 | f.write("{} {} {}\n".format(*data)) 77 | f.close() 78 | 79 | 80 | def load_ternary(self, dec_file_path): 81 | """ 82 | @description :load ternary entry 83 | @param : 84 | @Returns :list [[mask, value, cls]...] 85 | """ 86 | ternary_list = [] 87 | with open(dec_file_path, 'r', encoding='utf-8') as f: 88 | for line in f: 89 | ternary_list.append([int(x) for x in line.strip().split()]) 90 | return ternary_list 91 | 92 | 93 | 94 | def file_name_walk(file_dir): 95 | file_list = [] 96 | for root, dirs, files in os.walk(file_dir): 97 | for file in files: 98 | if os.path.splitext(file)[1] == ".txt": 99 | file_list.append("{}/{}".format(root, file)) 100 | return file_list 101 | 102 | 103 | def rule2entry(): 104 | root_path = '../output/rule_tree/univ' 105 | save_root = '../output/ternary_entry/univ' 106 | file_list = file_name_walk(root_path) 107 | for input_file in file_list: 108 | file_name = input_file.split('/')[-1].split('.')[0] 109 | print(file_name) 110 | save_file = '{}/{}.txt'.format(save_root, file_name) 111 | rule2binaly = Rule2Binary() 112 | ternary_list = rule2binaly.parse_file(input_file) 113 | rule2binaly.write_ternary_file(ternary_list, save_file) 114 | 115 | def main(): 116 | rule2entry() 117 | 118 | if __name__ == "__main__": 119 | main() -------------------------------------------------------------------------------- /rule2entry/output/ternary_entry/univ/univ_rf_kk0_round3.txt: -------------------------------------------------------------------------------- 1 | 2636713248474718074059157000947200 2636713248474717443344068574707712 1 2 | 2636713248474718074059294439900672 2636713248474717443343656257847296 1 3 | 2636713325845970533916753434510856 2636713325845969903183385627459592 0 4 | 2636713325845970533916753434510984 2636713325845969898679786000089224 1 5 | 2636713325845970533916753434510984 2636713325845969898679786000089096 0 6 | 2636713325845970529413153807140360 2636713248474717443343518818893832 1 7 | 2961231802133144800860051236455944 2636713248474717443343527408828416 0 8 | 2961231802133144800860051236455944 2636713248474717443343518818893824 1 9 | 2962816365383430087611913525528072 2962816365383429456878545718476800 0 10 | 2962816365383430087611913525528072 2961548714783201227477049015271424 1 11 | 2961548714783201858210416822322696 2961231802133144170126674839470080 0 12 | 2638139355399974832153570417051136 2637980899074945672762607708143616 1 13 | 2640991569250488348306937999263232 2640674656600430660240788202455040 1 14 | 2640991569250488348306937999263232 2640991569250487717591162378256384 1 15 | 2640674656600431290956563823461888 2638139355399974201437794796044288 0 16 | 2640516200275402762281376735561216 2639248549675173902164104411348992 1 17 | 2640753884762945555294157367412224 2636792476637231707698704548888576 0 18 | 2640753884762945555294157367412224 2636713248474717443361111004938240 0 19 | 2640674656600431290956563823461888 2636871704799745972036298092838912 1 20 | 2636713248474718074058607245133312 2636713248474717443343553178632192 1 21 | 2646854994875311096662075052656128 2596148506638666557831891497517056 1 22 | 2646854994875311096662075052656128 2596148429267414102495624316321792 0 23 | 2646854917504058641325807871460864 2596148429267414102495899194228736 1 24 | 2646854917504058641325532993553920 2596148584009919013168158678712320 0 25 | 2646854762761553730652998631163392 2596148429267414102497823339577344 1 26 | 2646854762761553730650799607907840 2596148738752423923840693041102848 1 27 | 2646854453276543909305730883126784 2596148429267414102530808688410624 1 28 | 2646854453276543909270563690907136 2606289634069239937707615121833984 1 29 | 2646854458112247187729080389731840 2606289638904943216166114640789504 0 30 | 2971373011770673914515534945191424 2606289634069239937709796965220352 1 31 | 2971373011770673919019134572561936 2606289634069239942211197569335296 0 32 | 2971373011770673919019134572561936 2606289634069239937707597941964800 1 33 | 2971373011770673914515534945191440 2606289634069239937707597941964816 1 34 | 2971373011770673914513335921935872 2930808187727666664490753962541056 1 35 | 2646854458112247187730179901359616 2606289634069239937708697453592576 1 36 | 2596148430476340552825307557529088 2596148429267414102565993060499456 1 37 | 2596148739961350374170376282310144 2596148739961349743525690959986688 1 38 | 2596148749632756931087409679959552 2596148440147746479097655632855040 0 39 | 2596148749632756931087409679959552 2596148430476339922180622235205632 1 40 | 2758407706096628106102036857095680 2758407706096627475598089023127552 0 41 | 2760943007297084564905030263506688 2758407706096627475035139069706240 1 42 | 2761656060759712943943372159059712 2760943007297083933838132476116992 0 43 | 2761656060759712943943372159059712 2761576832597198048538880827719680 1 44 | 2761022235459598829242623807457024 2761022235459598198175726020067328 1 45 | 2763478308497541023708023669917440 2763478308497540392641125882528000 0 46 | 2763478308497541023708023669917440 2758407706096627475035139069706496 0 47 | 2758407706096628105539224342627840 2758407706096627466028077253918720 1 48 | 2763478308497541023426686132160000 2758407706096627466027939814965248 1 49 | 2764270590122683666802621571663424 2763478308497540383633926627786752 1 50 | 2764349818285197931140215115613760 2763715992985083176646707259637760 1 51 | 2764349818285197931140215115613760 2763636764822568912309113715687424 1 52 | 2764112133797655138127434483762752 2764112133797654498334674979389440 1 53 | 2763478308497541023426686132160064 2763478308497540383633926627786816 1 54 | 2758407706096628105820699319338496 2758407706096627466309414791675904 1 55 | 2758407708514479736325195463394816 2596148431685265742429669619138560 0 56 | 2758407708514479738576995277080064 2596148429267414103200411269726208 0 57 | 2758407708514479738576995277080064 2596148429267414105452211083411456 1 58 | 2758566163630582450170827980670464 2596148429267414102918936293015552 1 59 | 2758566163630582450170827980670465 2596306885592442631594123380916225 1 60 | 4056640378264289357303452062975617 2596306885592442631594123380916352 1 61 | 4056640378264289357303589501929153 2596306885592442631594260819869760 0 62 | 4076922787867941027727536753215169 2596306885592442631594260819869696 1 63 | 4076922787867941027727536753215169 2616589295196094302018208071155712 0 64 | 4056640378264289357303589501929089 2596306885592442631594123380916224 1 65 | 4056640378264289357303452062975489 3894381100226149538726747463221248 1 66 | 2758407707305553921495640892769792 2596148430476339922533565467721728 1 67 | 2758407706096628101881011718063616 2596148429267414107422535920386048 1 68 | 2758407706096628097377412090693120 2596148429267414102637461316304896 1 69 | 2758407707305553988205485251102336 2596148430476339922250990979383424 1 70 | 2758407707305553988205485251102336 2596148430476339922250990979383296 0 71 | 2758407707305553988205485251102208 2596148429267414102636361804677120 0 72 | 2758407706096628168590856076396037 2596148429267414102636636682584064 0 73 | 2758407706096628168590856076396037 2596148429267414102636636682584065 0 74 | 2758448558117924588391727561180676 2596189281288710520185708353683460 1 75 | 2758448558117924588391727561180676 2596188043348671234805433454559236 0 76 | 2758447320177885303011452662056452 2596148429267414102636636682584068 0 77 | 2758407706096628170842655890081284 2596148429267414104888436496269316 1 78 | 2759041531396742283291329550091792 2596782254567528289394704194207744 1 79 | 2761576832597198742094322956502544 2596148429267414174693955842605056 1 80 | 2761576832597198742094322956502544 2598683730467870633496949249015808 0 81 | 2758407706096628168590581198489104 2596148429267414174693955842605072 0 82 | 3427885679342161749197334029272576 669477973245533941036560020406272 1 83 | 3752404233000588484987689304589824 344959419587107214253403999830016 0 84 | 3757474835401501403156626070832640 20440865928680488033197932675072 1 85 | 3757474855953240336605322040837632 20440867137606307084877153959936 1 86 | 3757474855953240336605322040837632 20440865928680487470247979253760 1 87 | 3757477330624393087751242664379904 20440885271493601304314774552576 1 88 | 3757477330624393087751242664379904 20443361151572172064864572801024 1 89 | 3757474835401501402593676117411328 25511468329593405076234792075264 1 90 | 3427885681760013397438189679937024 20440865928680496477447233994752 1 91 | 3427885681760013397438189679937024 20440868346532135706705583407104 0 92 | 3427885679342161758208931330524672 20440865928680496481845280505856 0 93 | 2819413545975116547151450853082624 40723275532332157894195230539776 1 94 | 2819413545975116547151588292036096 40723430274837068566867031883776 0 95 | 2819413545975116547151588292036096 40723430274837068566729592930304 1 96 | 2819413391232611636478916490692352 158456325028817046300727968000 1 97 | 2819730303882668693829290666493841 475368975085874396674903769089 0 98 | 2819730303882668693829290666493841 475368975085874396674903769217 0 99 | 2819731541822707979209565565618001 475368975085874396674903769152 1 100 | 2819731541822707979209565565618001 476606915125159776949802893376 1 101 | 2820997954482896923230787369699153 475368975085874396674903769088 0 102 | 2820997954482897067345975445555025 1743019575314247913359682830336 1 103 | 2820998733031124899167163956332369 1743024411017382256688305799168 0 104 | 2820998820073783911420464535177041 1743096946566559134438788169728 0 105 | 2820998820073783911420464535177041 1743019575314103798171606974464 0 106 | 2820998742702531456084197353981777 1743029246720660715205004623872 1 107 | 2820998728195421620708647257507665 1743174317819014470705969364992 0 108 | 2820998573452916710036112895117137 1743638545333746488309056536576 0 109 | 2820997954482896923230787369699088 475368975085874396674903769104 0 110 | 2820997954482896923230787369699156 1743019575314103798171606974544 1 111 | 2820998031854149378567054550894421 1743019575314103798171606974480 0 112 | 2820998031854149378567054550894421 1743019575314103798171606974481 1 113 | 2820998031854149378567054550894428 1743096946566559134438788169752 0 114 | 2820998186596654289239588913284956 1743251689071469806973150560272 1 115 | 2820998186596654289239588913284956 1743096946566559134438788169744 0 116 | 2820997954482896923230787369699092 1743019575314103798171606974484 1 117 | 2819730303882668696081090480178944 158456325028817046300727967744 0 118 | 2819730303882668696081090480178960 158456325028819298100541653008 0 119 | 2819730303882668696644040433600272 158456325028819861050495074304 0 120 | 2819740207402982979686239626594064 168359845343102340299734646784 0 121 | 2819740207402982979686239626594064 158456325028819298100541652992 0 122 | 2789306844219696099428922824852992 172875850606125073000226539700224 1 123 | 3438343951536549552995234866005504 821912957922978526566607300329472 1 124 | 3438343951536549552995234866005504 172875850606125073000295259176960 1 125 | 2789306844219696099428854105376256 162734645804299237788252914057216 1 126 | 2779165639417870264216880479733248 162417733154242180437878738255872 1 127 | 2778848726767813206866540663670272 182700142757893850861860349280256 1 128 | 2778848726767813206866557843539456 182700142757893850861843169411072 1 129 | 2778848726767813206866557843539456 182700142757893850861825989541888 1 130 | 3083124524798397442677274610501120 486976095530982997345128658501632 0 131 | 3083124524798397442677274610501120 486976094322057177730499483795456 0 132 | 3083124523589471623062645435794944 486976055636430950062365893197824 1 133 | 3083124484903845395394511845197312 486936441555173817893569121222656 1 134 | 3083084870822588263226264829036032 162417887896747091110962856460288 0 135 | 3083084870822588263226264829036032 162417887896747091110413100646400 1 136 | 2758566162421656630273628612593152 162417733154242185504428319047680 0 137 | 2799130981628959971121523115165184 162417733154242181000828691677184 1 138 | 2799130981628959971121523115165184 202982552361545521848723194249216 1 139 | 2758566162421656625770028985222656 162417733154242181000832986644480 0 140 | 2596307040334948244545625126864449 360428707677995009 1 141 | 2596307040334948244545625126864449 154742505271101242040385537 0 142 | 2596306962963695789209357945669201 77371252815764974859190353 1 143 | 2596306962963695789209357945669201 77371252815764974859190337 0 144 | 2596306962963695789209357945669185 360428707677995073 1 145 | 2596306885592443333873095059441153 288371117935034369 1 146 | 2596306885592443336131491942893441 288371113640067073 1 147 | 2596306885592443336131491942893441 288371113640067201 1 148 | 2596308280692839171413559553820545 1240357891212985044935114881 1 149 | 2596308280692839171413559553820545 1395100396123657579297505409 1 150 | 2596308125950334260741025191430017 1237940039573755786585702529 1 151 | 2596308123532482621511766842017665 1237940039573755786585702401 1 152 | 2596308123532482621511766842017537 288375511686578177 1 153 | 2596306885592443336127093896382209 290622913453752321 1 154 | 2596306885592443333875294082696961 288371113640067329 1 155 | 2596306885592443333875294082696961 288373312663322881 1 156 | 2758566162421656697266872092985089 288373312663322625 1 157 | 2758883075071713754617246268786497 162259276829213651764890673610753 0 158 | 2758883075071713754617246268786497 162259276829213651764890673610817 1 159 | 2758883075071713754617246268786433 162576189479270709115264849412097 1 160 | 2840330941447669171890303382062721 162893179500580221799707183153152 0 161 | 2840330941447669171890303382062721 162893102129327766463440001957888 1 162 | 2840330864076416716554036200867457 244022740543934448159229007101952 1 163 | 2759201225661810034858247195723393 162893102129327766463440001958016 1 164 | 2759201225661810034858247195723265 162893102129327767026389955379200 0 165 | 2759201225661810034295297242301953 162894340069367051843714901082112 1 166 | 2840329630972080709073726093657665 162259276829213651767089696866304 1 167 | 2840329630972080709073726093657793 162259276829213651762691650355200 0 168 | 2840329630972080709073726093657793 162259276829213651762691650355328 1 169 | 2840329630972080709069328047146689 162259281664916930221208349179904 0 170 | 2840329630972080709069328047146689 162259281664916930221208349180032 1 171 | 2840349433177005996695347173262915 243388915243820333458618094452736 0 172 | 2840349433177005996695347173262915 243388915243820333458618094452738 0 173 | 2840349433177005996704143266285249 243388915243820333467276748521600 1 174 | 2840349510548258452040410447480513 243388915243820333467276748521472 1 175 | 2840349510548258452040410447480513 243388992615072788803543929716736 0 176 | 2840354384937163138225242862782017 243393867003977474979580251996160 0 177 | 2840354384937163138225242862782017 243388915243820333458480655499264 1 178 | 2840349433177005996695347173262945 243408722284448899543016480440352 0 179 | 2840349433177005996695347173262945 243408722284448899543016480440320 1 180 | 2840349433177005996695347173262913 243408722284448899542879041486848 0 181 | 2759244592248811250273327180154433 162298929596097011599896890835008 0 182 | 2759244592248811250273327180154433 162259315514839879431100118859840 1 183 | 2759204978167554118104530408179265 162259315514839879430825240952896 1 184 | 2759204978167554118104264120206913 162259276829213651762700240289856 1 185 | 2759204997510367231938330915505733 162259276829213651762691650355264 1 186 | 2759205152252872142610865277896261 162259296172026765596758445654080 1 187 | 2759205152252872142610865277896261 162259450914531676269292808044608 0 188 | 2759204978167554118104264120206917 162259276829213651762691650355268 1 189 | 2759207420197709739655188436747969 162264228589370793283791246852160 0 190 | 2759207420197709739655738192561857 162264233425074071742857701490752 0 191 | 2759207420197709739655738192561857 162264233425074071742307945676864 1 192 | 2759207415362006461196671737923265 162264228589370793283791246852288 1 193 | 2759207415362006461196671737923137 162266704469449364044341045100608 0 194 | 2758566162421656634214273991575297 288371113640067328 1 195 | 2839695800836263315913361531602817 288371113640067200 1 196 | 2839695800836263315913361531602849 288373312663322752 1 197 | 2839695800836263315913361531602851 288373312663322786 1 198 | 2839695878207515771249628712798115 77371252743709579844518048 1 199 | 2839695916893141998917899742349219 288373450102276256 1 200 | 2839695921728845277376416441173923 4835703566831829362147488 1 201 | 2839695921728845277376416441173923 288373312663322784 1 202 | 2839695916893141998917762303395747 38685626516041446253920416 1 203 | 2859978519924924807680178484414337 288372213151694976 1 204 | 2859978597296177263016445665609601 309485010109717281876476032 1 205 | 2859979836445142368011349739440001 1624796301850433823956795520 1 206 | 2859979836445142368011349739440001 1626005227670048453131501696 1 207 | 2859979835236216548396720564733825 386856262565053549057671296 1 208 | 2859978210439914986335109759633281 20282409603651958796160402980992 0 209 | 2839695800836263315910062996719489 81129638414606970066902645211264 1 210 | 2758587207402324557737091070429061 288371663395880964 1 211 | 2758597110922638841905190170265477 11141460353856794137487998980 1 212 | 2758597343036396207914000303785861 1315311292029088214066135044 0 213 | 2758597343036396207914000303785861 1315311292029088205476200452 1 214 | 2758597343036396207913991713851269 1237940039573751938295005188 1 215 | 2758597265665143752577724532656005 1392682544484424472657395716 1 216 | 2758587207402324558862990977271685 1237940039574877838201847812 1 217 | 2758593397102520984673649938139013 7427640236000688497162715136 1 218 | 2758593397102520984673649938139013 7427640236000653312790626304 1 219 | 2758593479309476718442045539092373 4951760157429892762992377856 0 220 | 2758593479309476718442045539092373 5029131409885229030173573120 1 221 | 2758593401938224263105778357897109 4956595860708351279691202560 1 222 | 2758633011183778116816058431047573 4951760157429901559085400064 1 223 | 2758633013601629756045316780459925 44565841414562070355857375232 1 224 | 2758633129658508439049717552252821 44645630518656635881387982848 1 225 | 2758633284401013349722251914643349 44839058649794976549340971008 1 226 | 2758633303743826463556318709942165 44684316144884304014978580480 1 227 | 2758633303743826463556318709942165 44703658957998138081773879296 1 228 | 2758633095808585489840100660479893 44568259266201299614206787584 1 229 | 2758633250551090400512635022870421 44727837474390430665268002816 1 230 | 2758633269893903514346701818169237 44573094969479758130905612288 1 231 | 2758633269893903514346701818169237 44592437782593592197700911104 0 232 | 2758593397102520984638465566050197 4951760157429892762992377872 1 233 | 2758591656249340739572453989156741 116056878971376064167673856 1 234 | 2758591656249340739574653012412293 38685626516039796986478592 1 235 | 2758591812200771469861816549508997 193428131426714530372124672 1 236 | 2758591836379287862154400043632525 58028439629876062805032968 1 237 | 2758591836379287862154400043632525 62864142908334579503857672 0 238 | 2758601735063898866738082537801613 9961548753912918261998026752 1 239 | 2758601735063898866738082537801613 58028439629876062805032960 0 240 | 2758591831543584583695883344807813 38685626516041996009734144 1 241 | 2758631273957375330587337308506005 42312403974885883533852688 0 242 | 2758631273957375330587337308506005 39894552335656625184440336 0 243 | 2758631271539523691358078959093653 39894552335656625184440320 1 244 | 2758641175059837974400312511825805 39653975809467825421956415496 1 245 | 2758641179895541252858829210650509 49562331827029326137848233992 0 246 | 2758641179895541252858829210650509 49557496123750867621149409288 1 247 | 2758631271539523691358113318832013 39653975809467825456316153864 1 248 | 2758631271539523691358078959093645 39653975809467825421956415488 1 249 | 2758591578878088284245120339937157 288371663395880960 1 250 | 2758591578878088284245120339937157 288371800834834432 0 251 | 2758591578878088284244982900983685 288380459488903168 1 252 | 2758601443712776339610252410357653 10522490334214104000038436880 1 253 | 2758641057794033471787845275355029 50136571591346272796810412032 1 254 | 2758642472237242420906178704836501 51396272295384727391970525184 1 255 | 2758642472237242420906178704836501 51551014800295399926332915712 1 256 | 2758642317494737510233644342445973 50158332256099347117071400960 1 257 | 2758641079554698224853369443321749 50155914404460117858721988608 1 258 | 2758641396293262963886213216339861 50446056601167628860651470848 1 259 | 2758641401128966242344729915164565 50141407294624742308625514496 1 260 | 2758641401128966242344729915164565 50136571591346283791926689792 1 261 | 2758641086808253142541144491558805 50146242997903200825324339200 1 262 | 2758641057794033471790044298610581 50136571591346281592903434240 1 263 | 2758641057794033471779049182332821 10522490334214104000038436864 1 264 | 2758601443712776339619048503379845 618970019931070596938465280 1 265 | 2758601443712776339619048503379845 618970019931061800845443072 0 266 | 2758903036854847240379724709496705 154742505271101791796199424 0 267 | 2759536862154961355080473061099393 633825300114475130005785411584 1 268 | 2759536862154961359584072688469889 364932857061179392 1 269 | 2759536862154961359584072688469897 360429257433808896 0 270 | 2760804512755189588985569391675273 360429257433808904 1 271 | 2760804532098002702819636186974089 1267650600228589830754137014280 0 272 | 2760804533306928522434265361680265 1267669943041703664820932313096 0 273 | 2760804533306928522434265361680265 1267671151967523279450107019272 0 274 | 2758902882112342329707190347106177 316912650057417779631609610240 1 275 | 2758590921222442341820390449350537 19807040628854456061781868552 1 276 | 2758592159162481627200665348474761 19807040628854456061781868544 1 277 | 2758592780550352909120061147449257 21666368539421755732479967264 1 278 | 2758592780550352909120061147449257 21663950687782526474130554912 1 279 | 2758592778132501269890802798036905 21663950687782526474130554880 1 280 | 2758592778132501269890802798036873 21044980668139836336680992768 1 281 | 2758590930893848898737423846999937 24768472192552894194776014848 1 282 | 2758600834414163181797215226038145 24758800785995994753564409856 1 283 | 2758600911785415637133482407233409 24758800785995977161378365440 1 284 | 2758603387665494207894032205481857 24836172038451313428559560704 1 285 | 2758603387665494207894032205481857 27312052117022073978357809152 0 286 | 2758600834414163181779623039993729 34662321100279019360571359232 1 287 | 2758585969462285200299290852853633 19807040628854456130501345280 1 288 | 2768751937900600021856719776778113 4951760157429892213236563968 1 289 | 2768753175840639307236994675902337 6194535899993731004834512896 1 290 | 2768753175840639307236994675902337 4956595860708350729935388672 0 291 | 2768751933064896743398203077953409 4951760157429892247596302336 1 292 | 2768752167596505748640901535631233 288371388517974016 0 293 | 2768752188148244682089597505636225 97922991677156351669174272 0 294 | 2768752188148244682089597505636225 96714065857541722494468096 0 295 | 2768752186939318862474968330930049 77371252743707655699169280 1 296 | 2768752090225253293304634354435969 2417851927600646867386368 1 297 | 2768752087807401654075376005023617 288375786564485120 1 298 | 2768752706777421296761115408074625 618970019931061251089629184 1 299 | 2768752706777421296761115408074625 288371113640067072 0 300 | 2768752087807401654070703080605569 154742505199043648002457600 1 301 | 2768746981304739601877069121718209 39614081257420539910412042304 1 302 | 2768756923510680112587401905309633 39614081257420539910412042240 1 303 | 2768777194778823410689437738207169 59614550017124964976751017984 1 304 | 2768777194778823410689437738207169 59614550017124965011110756352 1 305 | 2768777194778823410689678256375745 59459807512214292717266534400 0 306 | 2768782147747906371825407027578817 64411567669355813541985124352 1 307 | 2768782147747906371825407027578817 59459807512214292442388627456 0 308 | 2768782147747906371825407027578817 59461016438033907071563333632 1 309 | 2768782147747906371825407027578817 64412776595175428171159830528 1 310 | 2768777040036318500016869016078273 39652766883648208044002639872 1 311 | 2768758470935729219312745529214913 39962251893469553112727420928 1 312 | 2768758470935729219312779888953281 41200191932754933387626545152 0 313 | 2768758470935729219312779888953281 41200191932754933421986283520 1 314 | 2768756884825053884991836082145217 49517601571703582109605036032 1 315 | 2768756884825053884991836082145217 49517601571703652478349213696 1 316 | 2768761836585211026442566934464449 49517601571703584308628291584 1 317 | 2768761935717128234842159260370881 54471779580484334666574200832 1 318 | 2768781742757756800926557646358465 74276402357411189806610776064 1 319 | 2768781747593460079385074345183169 54474197432123563924923613184 1 320 | 2768781747593460079385074345183169 54469361728845105408224788480 1 321 | 2768761933299276595612900910958529 54546732981300441675405983744 1 322 | 2768761855928024140276633729763265 54488704541958939475020087296 1 323 | 2768707988611353752208210288183233 288670180802822144 1 324 | 2768730271532060889053158472419265 19809458480493983837538222080 1 325 | 2768730271532060889053433350326209 2417851927899439152234496 1 326 | 2768769904956131135056331277338561 39616499109060068545161854976 1 327 | 2768769943641757362724464867936193 2417851927899748389879808 0 328 | 2768769943641757362724464867936193 41103478155567881980477440 1 329 | 2768730290874874002887534505363393 21760665041733815185178624 1 330 | 2768730271532060889053467710064577 2417851927899714030141440 0 331 | 2768750078572689455139755881662401 42092379187630830984745713664 0 332 | 2768750078572689455139755881662401 42092379187630828785722458112 1 333 | 2768750078572689455137556858406849 2478297930498659988950482944 1 334 | 2768707986193502112978951938770881 288670180802822208 1 335 | 2768777319298182831576750512670593 40239095906161602260897890304 1 336 | 2768782271058339973097850109167489 45189647137483508731319681024 0 337 | 2768782271058339973097850109167489 40237886980341987631723184128 1 338 | 2768777318089257011962121337964417 40240304831981216890072596480 0 339 | 2768777318089257011962121337964417 626223574849048093300621312 1 340 | 2768780113125751960984773258643329 3418842218158841486871887872 0 341 | 2768780113125751960984773258643329 3109357208337496418147106816 1 342 | 2768779803640742139639704533862273 633477129766735868348858368 1 343 | 2768777327760663568879154735613825 623805723209818834951208960 1 344 | 2768737701590148240564066216576897 20430846351775903233337196544 1 345 | 2768722884995303043673299064194945 5570730177072885815895392256 1 346 | 2768722887413154682902557413607297 621387871570593974648307712 1 347 | 2768722887413154682902557413607297 618970019931364716298895360 1 348 | 2768717933235145902152199467698049 657655646159032849889492992 1 349 | 2768717894549519674484065877100417 618970019931360318252384256 1 350 | 2768719441974568781205286332401537 12074751086599586652646014976 0 351 | 2768719444392420420434544681813889 12072333234960357394296602624 0 352 | 2768719444392420420434544681813889 12069915383321128135947190272 0 353 | 2768719437138865502746769633576833 11760430373499783067222409216 1 354 | 2768738934694484247486099294783361 30329530962780487190709272576 1 355 | 2768738934694484247486099294783361 10522490334214402792323284992 1 356 | 2768717889713816396021426009671553 10522490334214402517445378048 1 357 | 2768715413833737825260601333516161 3094850098502683818004054016 1 358 | 2768725317354052108302800526509953 10522490334214965467398799360 0 359 | 2768725317354052108302800526509953 618970019931923268205805568 0 360 | 2768712940371510893729309884680065 289233130756243456 0 361 | 2768713269199333828908445404759937 21760665042296455900954624 1 362 | 2768713269199333828908445404759937 2417851928462389105655808 1 363 | 2768713249856520715074378609461121 311902861749807457830436864 0 364 | 2768712318983639611809914085705601 4951760157430754230352740352 1 365 | 2768759401808610322594802239014849 42131064813858479327127011392 0 366 | 2768759401808610322594802239014849 39655184735287718777328762944 0 367 | 2768757235413541573179321165547457 39962251893469834587704131648 1 368 | 2768757854383561215869458615109569 39652766883648489518979350592 0 369 | 2768757854383561215869458615109569 40271736903291179656428912704 1 370 | 2768756938017789947980544187828161 39623752663977738418786402368 1 371 | 2768756938017789947980544187828161 39628588367256196935485227072 0 372 | 2768756933182086669522027489003457 39614081257420821385388752960 1 373 | 2768756884825053884936860500756417 49517601571703863584581746752 1 374 | 2768752861519926207450967078602689 39923566267242166454113533952 1 375 | 2768753016262431118123501440993217 39614081257420821385388752896 0 376 | 2768753025933837675040534838642625 39768823762331493919751143424 0 377 | 2768753028351689314269793188054977 39780913020527640211498205184 1 378 | 2768753028351689314269793188054977 39778495168888410953148792832 1 379 | 2768752552034916386105898353821633 44565841414562342484985249792 0 380 | 2768747642587162931096819872040897 40272945829110794285603618816 0 381 | 2768747642587162931096819872040897 40234260202883126152013021184 0 382 | 2768747603901536703428686281443265 40233051277063511522838315008 1 383 | 2768747602692610883814057106737089 40235469128702740781187727360 0 384 | 2768747600274759244584798757324673 288652588616777728 0 385 | 2768747605110462523043315456149377 618970019931342726066339840 1 386 | 2768747605110462523043315456149377 623805723209801242765164544 0 387 | 2771242668423938928229790779443137 10141204801826123583087265710080 1 388 | 2771242668423938928229790779443137 12676506002282582386080672120832 0 389 | 2768707367223482469426797373032385 10141204801826123583087265710144 1 390 | 2596308278274987466875505242801669 154742505208050847257198592 0 391 | 2596308278274987466875505242801669 154742505208050847257198596 0 392 | 2596308278274987466875505242801697 1392682544493431122156322816 1 393 | 2596308278274987466875505242801697 1392682544493431122156322848 0 394 | 2601377643944786918715846331205121 5070603609839034598928882335744 0 395 | 3250414751261640372282158372357633 649037108525779570559254110666752 0 396 | 3250414751261640372282192732096001 1208926116992976429252608 1 397 | 3251048576561754486982941083698689 633826509040231693690421116928 1 398 | 3251048576561754486982941083698689 1208926116992942069514240 0 399 | 2596307041543874001109859518383617 297378312894808064 1 400 | 2598683730467871192506252998477376 2535301200456747596319511543872 1 401 | 2598683730467871192506252998477376 288793326105133120 0 402 | 2598693672673811703216723221022307 288230376151711843 1 403 | 2598693672673811703216723221022311 38685626515898509742309479 0 404 | 2598693672673811703216723221022311 38685626515898509742309475 1 405 | 2598693633988185475548589630424675 288230376151711842 1 406 | 2598701061628421187830239025170027 9903520314571272575344705642 1 407 | 2598701061628421187830239025170027 12379400393142033125142954090 1 408 | 2598698585748342617069689226921579 14855280471712793674941202538 0 409 | 2598693643659592032465623028074091 9903520314571272575344705643 0 410 | 2598693643659592032465623028074091 9913191721128189608742355051 1 411 | 2598693633988185475548589630424682 9903520314571272575344705634 0 412 | 2598683730467871192506390437431282 288230376151712082 1 413 | 2598683749810684306340457232730098 288230376151711826 0 414 | 2598683749810684306340457232730106 19342813402064442947010650 0 415 | 2598683749810684306340457232730106 19342813402064442947010642 1 416 | 2598683807839123647842657618626298 77371252743566643332907082 1 417 | 2598683809048049467457286793332474 77371252743566643332907074 0 418 | 2598683809048049467457286793332474 78580178563181272507613250 1 419 | 2598683807839123647842657618626291 288230376151711811 1 420 | 2598683807839123647842657618626291 288230376151711810 1 421 | 2598683730467871192506390437431010 288230376151711938 1 422 | 2598683730467871192506390437430850 2535301200456747033369558122562 1 423 | 2596148429267414733720989217064550 288247968337756256 1 424 | 2596153390698978432159122211210854 9671406845147409549361248 1 425 | 2596153390698978432159122211210862 288230376151711840 0 426 | 2596153390698978432159122211210862 288230376151711848 1 427 | 2596155856907650446002638611809894 7427640236000512025546457184 1 428 | 2596156166392660267347707336590950 4951760157429751475748208736 1 429 | 2596156166392660267347707336590950 5261245167251096544472989792 0 430 | 2596148429267414733703397031020134 288230376151711808 1 431 | 2596782254567528848404145382622951 288230376151711813 1 432 | 2596782254567528848404145382622951 288230376151711812 1 433 | 2596782254567528848404145382622950 288230376151711940 1 434 | 2596782255776454668018774557328999 288230376151711845 0 435 | 2596782255776454668018774557328999 288230376151711844 0 436 | 2596782410518959578691308919719783 1208926107845005326418020 1 437 | 2596782410518959578691308919719783 155951431018517539688808548 1 438 | 2596782255776454668018774557329255 1208926107845005326418021 0 439 | 2596782255776454668018774557329254 1208926107845005326418276 1 440 | 2596782254567528848404145382622790 633825300114402931124503314500 1 441 | 2606448090394269099842361853216580 288230517885632832 1 442 | 2606448245136774010515445971421132 288230513590665536 0 443 | 2606448245136774010515445971421132 154742505198903047953056064 1 444 | 2606448090394269099842911609030604 288230513590665544 1 445 | 2606448090394269099842911609030596 288231063346479424 1 446 | 2606448090394269099842361853216708 288230513590665664 1 447 | 2606448090394269099842357558249284 288230513590665540 1 448 | 2606448090394269099842357558249408 290482313404350912 1 449 | 2606448245136774010514891920639953 290482313404350784 0 450 | 2606448245136774010514891920639953 290482313404350785 0 451 | 2606448245136774010514891920639952 154742505201154847766741312 0 452 | 2606448168974447374793253914150864 1208926110096942579056976 0 453 | 2606448168974447374793253914150864 78580178565433209760252240 1 454 | 2606448091603194919456986732955600 290482313404350800 0 455 | 2611518692795182015196544557385536 288230513590665280 1 456 | 2611518692795182015198760760510272 5070602400913205838716606611520 0 457 | 2611518692795182015198760760510272 5070602400913205838699426742336 0 458 | 2611519002280191836543812305422145 5070602400913205836500403486785 0 459 | 2611519621250211479233949754984385 5070911885923027181569128267841 1 460 | 2611519621250211479233949754984385 5071530855942669871706577829953 0 461 | 2611519002280191836543812305422273 5070911885923027181569128267969 0 462 | 2611518692795182015198743580641097 5070602400913205836500403486784 0 463 | 2611518692795182015198760760510281 5070602400913205836500403486792 0 464 | 2611518692795182015198795120248649 5070602400913205836517583355976 0 465 | 2611518692795182015198795120248649 5070602400913205836551943094344 1 466 | 2606537222077097644970900237321792 158456325028816905700678565952 1 467 | 2606537222077097644970900237321792 158456325028816906250434379840 0 468 | 2606537222077097644970350481507904 237684487543081243294222516288 1 469 | 2606457993914583380632756937557568 168359845343099947899871559744 1 470 | 2606289634069240568924991383406272 10141204801826123443311850029248 1 471 | 2606289634069240568924991383406272 10141204801826123443311850029120 0 472 | 2606289634069240568924991383406144 10141204801826123442762094215232 1 473 | 2606289634069240568924441627592256 10141204801826123451558187237440 1 474 | 2606289634069240568915645534570048 10141204801826123442487216308288 1 475 | 2601535944318384780160861734962752 5070602400913205840761011044352 0 476 | 2601535983004011007828995325560384 5070641086539433511093624897536 0 477 | 2601535983004011007828995325560384 5070602400913205842960034299904 1 478 | 2601535944318384780158662711707200 5070602400913277898355048972288 1 479 | 2601537182258423993481343572903488 288234774198222848 1 480 | 2601537182258423993483542596159040 1237940039573615049097347072 0 481 | 2601537182258423993483542596159040 1237940039573617248120602624 1 482 | 2596466579857511077001291026663360 289356310418293120 0 483 | 2596466618543137304669424617260992 1237940039574736585317417344 1 484 | 2596466618543137304669424617260992 1276625665802404718908014976 1 485 | 2596465343126397611235645302245352 1208926108970939592999168 0 486 | 2596465343126397611235645302245352 289356310418292992 0 487 | 2596465341917471791621016127539176 289356310418293000 0 488 | 2596465341917471791621016127539184 289356310418293024 0 489 | 2596465344335323430850274476951536 289356310418293040 0 490 | 2596465344335323430850291656820720 2417851928585568767705392 0 491 | 2596465344335323430850291656820720 2417851928585585947574576 0 492 | 2596465341917471791621016127539136 288230410511450368 0 493 | 2596465341917471791621016127539144 288230410511450504 0 494 | 2596465341917471791621016127539144 288230410511450496 1 495 | 2596465341917471790530300592785216 288230410511450112 1 496 | 2596465496659976701202834955175780 288265594883538944 0 497 | 2596465496659976701202834955175781 288265594883538948 0 498 | 2596465496659976701202834955175781 288265594883538949 0 499 | 2596465496659976701202834955175776 154742505198938129245929472 0 500 | 2596465341917471790531400104413024 288266694395166752 0 501 | 2596465341917471790531400104413168 288265594883539104 0 502 | 2596465341917471790531400104413168 288265594883539120 0 503 | 2596465341917471790531400104413152 288265594883538976 0 504 | 2616747751521123460919063471982532 20282409603651958654323402998144 1 505 | 2616748370491143103609200921544644 288230376151712128 0 506 | 2616748370491143103609200921544660 618970019930920513601274256 1 507 | 2616748370491143103609200921544660 618970019930920513601274240 0 508 | 2596465341917471790495116220696516 288230376151712000 0 509 | 2596465341917471790495133400565573 288230376151712004 1 510 | 2596465516002789815001734558254917 154742505198902910514102533 1 511 | 2596465516002789815001734558254917 288230376151712005 0 512 | 2596465361260284904329200195864389 19342813402064442947010821 1 513 | 2596465341917471790495133400565716 288230393331581188 0 514 | 2596465341917471790495133400565716 288230393331581316 1 515 | 2596465341917471790495133400565590 288230393331581204 1 516 | 2596470293677628932016232997062487 288230393331581207 1 517 | 2596472769557707502776782795310935 7427640236000512042726326551 0 518 | 2596472769557707502776782795310935 4951760157429751492928078103 0 519 | 2596465341917471790495133400565591 288230393331581206 0 520 | 2597099167217585905231048944387920 288265560523800592 0 521 | 2597099167217585905231048944387920 633825300114402966308875403280 1 522 | 2616748989461162746334522744244048 1237940039573610651050835984 0 523 | 2616748989461162746334522744244048 1237940039573610651051884560 1 524 | 2616748989461162746404891487373136 288230376151711760 0 525 | 2616748989461162746404891487373136 288300744895889424 1 526 | 2616747751521123460954247844071248 20282409603651958654323402997776 1 527 | 2596475245437786073574698809034576 9903520314571274774367961088 0 528 | 2596475245437786073574698809034576 288232575174967296 1 529 | 2596465341917471790532499616040784 288267759547056128 0 530 | 2596465341917471939151287319267152 436849163854938112 0 531 | 2596465341917471939151287319267152 432345564227567616 0 532 | 2596465341917471934647687691896656 432380748599656448 1 533 | 2596465341917471934612503319807824 288230376151711744 0 534 | 2597178395380118616273099419551296 316912650075792324824037064704 1 535 | 2597178395380118616290777504941632 316912650057345580750327513088 1 536 | 2597178395380118616290777504941632 316912650057345598342513557504 1 537 | 2597178395380118616273185318897216 316912650057345580819046989824 1 538 | 2597178395380118616273116599420480 316912650057345580767507382272 1 539 | 3895252610013807076732018536482368 1299024952583878367484491505598464 1 540 | 3895252610013807076732018536482368 1299024952583878367414122761420800 0 541 | 3976382248428413758990757495047760 950737950171460351867423293440 1 542 | 3976382248428413758990757495047760 950737950171460281498679115776 0 543 | 3976382248428413758920388750870096 950737950171460281498679115792 1 544 | 3976382248428413758920388750870080 82080376364778141977287684259840 1 545 | 3895252610013807077224599745726016 950737950171460844448632537088 1 546 | 2596544570079986126885910051030864 396140812571609918348166430720 1 547 | 2596544570079986126885910051030864 396140812571609918382526169088 0 548 | 2596544570079986126885875691292496 396140812571609918348166430976 1 549 | 2596544584587095962279017973810768 396140812571609935936057507840 1 550 | 2596544584587095962279017973810768 396150483978166852969455157248 1 551 | 2596544577333541044591242925573712 396145648274888394452756332544 0 552 | 2596544577333541044591242925573968 396148066126527623711105745152 1 553 | 2596544577333541044591242925573968 396148066126527623711105744896 0 554 | 2596544570079986126903467877336656 396140812571609918343871463424 0 555 | 2597812220680214356287368099530320 396140812571609918343871463440 1 556 | 2597812231560546732819034966853200 1663791412799839319844869636112 1 557 | 2597812231560546732819034966853200 1663792621725658934474044342288 0 558 | 2597812230351620913204414382081616 1663801084206396236878267285520 0 559 | 2597812230351620913204414382081616 1663801084206396236886857220112 1 560 | 2597817172440371497808471990994512 1668743172956980840940171165712 1 561 | 2597817172440371497808489170863696 1663791412799839319840574668816 1 562 | 2597817191783184611642555966162512 1663810755612953153924549836816 1 563 | 2597817191783184611642555966162512 1663791412799839319857754538000 1 564 | 2637109544029794378406300261287488 396295555076592648472271781888 0 565 | 2637109544029794378406300261287488 396140812571681975937909391360 1 566 | 2637109389287289467734040776803904 40960960019875022824107289870336 1 567 | 2637109389287289476741240031544896 40960960019875022823832411963392 0 568 | 2637109389287289476741240031544896 40960960019875031831031666704384 1 569 | 623805723839918617772688908 618970019948970268281536520 1 570 | 623805723839918617772688908 623805723227428784980361224 1 571 | 20283028573672231895043441428236 306282329855230216 1 572 | 20283029782598051509672616134412 1208926125896959029936136 0 573 | 20283029782598051509672616134412 306282329855229960 1 574 | 20283028573672231895043441427980 306291125948252168 0 575 | 20283028573672231886797104219660 20282409603651976706826862329864 0 576 | 20283028573672231886797104219660 20282409603651976706277106515976 1 577 | 618970020561471645945955852 306280130831974408 0 578 | 618970020561471645945955852 306288926924996616 1 579 | 618970020561462867032802828 306280680587788296 0 580 | 618970020561462867032802828 306280697767657480 1 581 | 154742505829442497986692620 154742505216952665194364928 0 582 | 157160357468671756336104972 306280130831974400 0 583 | 157160357468671756336104972 2417851945509389181386752 0 584 | 918840332368479748 306280405709881344 1 585 | 1208926738456627990496772 306351324209872896 0 586 | 1208926738456627990496772 306350774454059008 0 587 | 162259278038140101849855268226564 306352990657183744 1 588 | 162259278038140101849855268226564 162259276829213669744568667471872 0 589 | 1208926738458277257938436 306350791633928192 1 590 | 1208926738456061054813700 1208926125965403628765184 0 591 | 918841431880107524 306351873965686784 0 592 | 927776888001136260 306280130831974404 1 593 | 927776888001136260 315287330086715396 0 594 | 918769688746395268 306280130831974532 1 595 | 649037107316854372406644409632320 306280268270927872 1 596 | 649037107316854372406644409632322 306350637015105538 1 597 | 649354019966911434261167968618050 306351186770919424 0 598 | 649354019966911434261167968618050 310854786398289920 0 599 | 649354329451921251102637066028610 309485010127695705739886592 0 600 | 649354950839792533022032865003074 618970019949040774464667648 1 601 | 649354950839792533023132376630850 306350637015105536 0 602 | 649354989525418760691265967228482 38685626534019870117330944 0 603 | 649354990734344580305895141934658 1208926125966365701439488 1 604 | 649988816034458695006643493537346 633825300114421052484878336000 1 605 | 649988893405711150342910674732610 306351736526733312 1 606 | 649988893405711150342910674732610 77371252761688003707928576 0 607 | 649354486612277801004429777831490 2417851945579895364517888 0 608 | 649354563983530256340696959026754 234531609311588696908103680 0 609 | 649354563983530256340696959026754 157160356856252429726908416 0 610 | 649354019966911429757568341247554 316912650057363701011190906880 1 611 | 649354019966911429757568341247554 316912650057363701560946720768 0 612 | 1947111321950561279468904042726976 649037107316853759846580312080384 1 613 | 1947745147250675394169652394329664 1947111321950560666979204394385408 1 614 | 1947745147250675394169652394329664 1947745147250674781679952745988096 0 615 | 649037107316854372336279960421952 649037107316853759846584607047680 0 616 | 5149849906241214547627956241984 5070602400913223885980205842432 0 617 | 5149849906241214547627956241984 5070621743726337720047001141248 1 618 | 5149830563428100713561160943168 306279993393020928 0 619 | 79305533767638443841529316928 77371252761616277754085376 1 620 | 79305533767638443841529316928 306280010572890112 0 621 | 5149830563428100713543981073984 5149830563427488223573749792768 1 622 | 167409107392641464105121991362112 79228162514570617586936971264 1 623 | 167409107392641464105121991362112 162338504991727934009164947259392 1 624 | 918769688746395200 306279993393020992 1 625 | 918769551307441664 306244809020932096 1 626 | 918744537417909776 306245092488773632 1 627 | 1298074214633707825877728435897872 306253888581795840 1 628 | 1300609515834164284680721842308624 2535301200456765057431744020480 1 629 | 1300609515834164284680721842308624 306254438337609728 0 630 | 1298074219469411104335695378908688 4835703584712422460489728 0 631 | 1298235151674518203771432265057808 158456325028834929092849565696 0 632 | 1298235151674518203771432265057808 160932205107405689642647814144 1 633 | 1298235151674518203771432265057808 2475880078877014455559913472 0 634 | 1299502802274746433172928968263184 306253905761665024 1 635 | 1299502802274746433172928968263184 1267650600228535655402464870400 0 636 | 1298074214633707825879360523470352 1298074214633707213388711687356416 0 637 | 1298074214633707825879360523470352 1298074214633707213386512664100864 1 638 | 1298074214633707825939283907184272 1298074214633707213378816082706432 1 639 | 1298074214633707825948080000206480 1298074214633707213449184826884096 1 640 | 1303144817034620743554066813027984 1303144817034620131063967732727808 0 641 | 1303144817034620743554066813027984 1298074214633707213457980919906304 0 642 | 1298074214633707834876114417747600 1298074214633707222386565093261312 0 643 | 1298074214633707834876131597616784 1298074214633707213379383018389504 0 644 | 1298074214633707834878330620872336 1298074214633707213381564861775872 1 645 | 1298074214633707834878330620872336 1298074214633707213379365838520320 0 646 | 1298074214633707825868365407192720 1298074214633707213378816082706560 0 647 | 4543264702978132235221093928928912 649037107316853759812504041553920 0 648 | 4543264702978132235221093928928912 649037107316853759812572761030656 0 649 | 4543264702978132235221025209452176 649037107316853759812504041554048 0 650 | 4543264702978132239724624836822544 306246192000401408 0 651 | 4868100169286616023858155033200144 324835466308484094883321824149504 0 652 | 5111489084530436068945522048632336 567907468902247082620314663780352 0 653 | 5115292036331120757150012158248464 409451143873718407433226763108352 0 654 | 5115292036331120757150012158248464 408183493273490178031730059902976 0 655 | 5114024385730892527748515455043088 405648192073033719228736653492224 0 656 | 4949229807701222705553944038344208 324518553658427037532947648348160 0 657 | 4874359194125242906528045005276688 237684487543103762572259622912 0 658 | 4874993019425357021228793356879376 792281625142954125727067275264 0 659 | 4874993019425357021228793356879376 158456325028839424978715672576 1 660 | 4874279965962728642190451461326352 310749791627771904 0 661 | 4874121509637700113515264373425680 5070602400913228355778440593408 0 662 | 4869050907236787195909277560604176 1267650600228540151288330977280 0 663 | 3894227595661278781672442793297424 2596148429267414120511440165011456 1 664 | 3894227595661278781742811537475088 2596148429267414120529032351055872 1 665 | 3894227595661278781742811537475088 2596148429267414120599401095233536 0 666 | 3904368800463104616866824232896144 2606289634069239955723551229607936 0 667 | 4553405907779958074936735901419152 2596148429267414120511577603964928 0 668 | 4715982097259228495679237843322512 3245502449234324631428263820918784 1 669 | 4715982097259228495679237843322512 3245502449234324631428813576732672 0 670 | 4736264506862880166102635338794640 3407761726063537994819841831206912 0 671 | 4741335109263793083708622151616144 3428044135667189665243789082492928 0 672 | 4741335109263793083708622151616144 3433114738068102582849775895314432 0 673 | 4553722820430015132287110077220496 3245185536584267574077889645117440 0 674 | 3904368802880956260599682209678992 2596148429267414125015177231335424 0 675 | 3904368802880956260599682209678992 2596148431685265764244435580747776 1 676 | 3894227595661278781654850607253136 2596148429267414120511577603965056 0 677 | 1298079166393864967389465003689488 4951760157447767291596898304 1 678 | 919860541720102416 306245092488773648 1 679 | 919860541720102808 307370992395616408 1 680 | 919860541720102808 307370992395616664 0 681 | 919860541720102552 307370992395616400 0 682 | 1237940040205240816619226768 1237940039592751267294740496 1 683 | 1237940040205240816619226768 307370992395616272 0 684 | 918875379301615104 306244817610866688 1 685 | 918875379301615120 306385555099222032 1 686 | 39614081258051044176073590320 39614081257438554351871197184 1 687 | 39614081258051044176073590320 39614081257438554351871197216 0 688 | 1298113828714964958187795272173072 1298074214633707213518179181527040 1 689 | 1298113828714964958187812452042256 1298074214633707213520378204782592 1 690 | 1298113828714964967195011706783248 1298074214633707222527594639392768 0 691 | 1298113828714964967195011706783248 1298074214633707213520395384651776 1 692 | 1310790334717247252200563280971280 1310750720635989507541942306603008 1 693 | 1310790334717247252200563280971280 1308215419435533048738948900192256 0 694 | 1957292140833644246963950635189776 1947111321950560667093356035178496 0 695 | 1957292140833644246963950635189776 1298074214633707213527043994025984 0 696 | 1308255033516790793397638594037264 1298074214633707213526975274549248 0 697 | 1298113906086217413513067337090576 306385555099222016 0 698 | 3894262335353631227787111594722832 2596148506638666575987070445027328 1 699 | 3894579248003688285137485770524176 2596465419288723633346240713850880 0 700 | 3894579248003688285137485770524176 2596148506638666575995866538049536 1 701 | 3894262335353631227778315501700624 77371252761721822280417280 1 702 | 39614081258050976904000833024 306317411148103680 1 703 | 39614081258050985700093855232 306324008217870336 1 704 | 39923566267872330768818636288 309485010127660280849629184 0 705 | 39923566267872330768818636288 306315212124848128 1 706 | 39691452510506310972158772736 306315220714782720 0 707 | 324558245110937233094128179348992 77371252761651487895977984 1 708 | 324558245110937233094128179348992 324518631029679488434643916554240 0 709 | 81169252495864732681480509064704 81129638414606988022000541237248 1 710 | 81169252495864732681617948018176 306326211536093184 1 711 | 770771179020021527095824491742720 689601926524157100740555518771200 1 712 | 770771179020021527095824491742720 40564819207303647174243477618688 1 713 | 121734071703168073529512450590208 306326348975046656 0 714 | 39614081258059992890758661632 306324012512837632 1 715 | 39614081258059992890758661632 315331211767578624 1 716 | 39614081258050983492480665088 306315216419815424 1 717 | 39618916961329433208791500288 4835703584773660104196096 0 718 | 39618916961329433213086467584 306315147700338688 1 719 | 39618916961329433213086467592 306315143405371392 1 720 | 81169257331568015632601718982152 81129638414606992514532037885960 1 721 | 81169257331568015632601718982152 310818743032741896 0 722 | 81169257331568024639800973723144 306315143405371400 1 723 | 81486169981625081990175149524488 81446551064664045361306586316808 0 724 | 81486169981625081990175149524488 81129638414606988010932410515464 1 725 | 118847079475607281605512529416 79228162514579659936204062728 1 726 | 118847079475607281605512529416 315322342660112392 0 727 | 81170645178408928732017732879872 81130876354646273401102914289664 1 728 | 898805282325616892699590323602944 82397289014835217424523741626368 1 729 | 898805282325616892699590323602944 244656565844048580816101751914496 1 730 | 777110824703706870155906815886848 81129638414606988023027038420992 1 731 | 3373259253971120684421154980496896 121694457621910328870921540993024 0 732 | 3373259253971120684421154980496896 2717842886889324143136169705603072 1 733 | 917820041329040333722040871683584 101412048018258658444775266451456 0 734 | 917820041329040333722040871683584 81129638414606988020828015165440 1 735 | 897537631725388663298102210332160 243388915243820351412414615388160 0 736 | 897537631725388663298102210332160 243388915243820351412406025453568 1 737 | 735912180196289414605064938456576 730166745731460441587140056317952 1 738 | 735991408358803678942658482406912 730800571031574556287888407920640 0 739 | 735991408358803678942658482406912 730879799194088820625481951870976 1 740 | 88142723479664190440249600509440 87467891415748135028311531192320 1 741 | 88142723479664190440318319986176 86200240815519905626883547463680 1 742 | 88142723479664190440318319986176 86200240815519905626814827986944 0 743 | 1384949287513142868171445699085824 86834066115634020327631899066368 0 744 | 1385028515675657132509039243036160 1384987508911855191797849525321728 1 745 | 1385028515675657132509039243036160 1384908280749340927460255981371392 1 746 | 86875072879435961038821616780800 86834066115634020327563179589632 1 747 | 81169407238369643351742833755648 81129793157111898693362377555968 0 748 | 1399842789383280372107345794172416 316912650057363693005371867136 1 749 | 1399842789383280372107345794172416 316912650057368196604999237632 1 750 | 1399842789383280367603763346671104 306342648375934976 1 751 | 1399842789383280367603763346671104 306342631196065792 0 752 | 1400238930195851689292005944329728 1298153442796221477812866002190336 0 753 | 1400238930195851689292005944329728 1298153442796221477813140880097280 0 754 | 1400238930195851689291731066422784 1298074214633707213475272458240000 1 755 | 1400159702033337424954141817439744 1298708039933821328176020809842688 0 756 | 1440724521240640765802036320011776 1298708039933821328176025104809984 0 757 | 1440724521240640765802036320011776 1339272859141124669023919607382016 0 758 | 1399525876733223310253526609823232 1298074214633707213475392717324288 0 759 | 1399525876733223310253526609823232 1298074214633707213475255278370816 1 760 | 101451662099516403120747908695552 306325039010021376 1 761 | 101451662099516403105354745906720 20282409603651976748986261307392 1 762 | 2697600091366930217370602910516768 2616430838871065791014234425917472 0 763 | 2707741296168756052582576536159776 30423614405477811960959886950432 1 764 | 2707741296168756052582576536159776 20282409603651976748986261307424 0 765 | 101530890262030667442956879791616 20361637766166241088778828513280 1 766 | 263790167091244030834534890079744 182620914595379604480365428736000 1 767 | 263790167091244030834534890079744 20361637766166241088787418447872 0 768 | 101847802912087724793322465658368 20599322253709034101559460364288 1 769 | 101847802912087724793872221472256 20282409603651976751735040376832 0 770 | 101847802912087724793872221472256 20282409603651976751185284562944 1 771 | 39614081258050983969222034944 306316792672813056 1 772 | 39614081258050983969222034944 306316242916999168 1 773 | 39614081258059980860555265568 39614081257438483940177346592 0 774 | 39614081258059980860555265576 39614081257438484077616300064 1 775 | 39614081258059980860555265576 39614081257438484077616300072 0 776 | 39643095477731294773262681640 39633424070561325206227386408 0 777 | 39643095477731294773262681640 39643095477118242239625035816 0 778 | 39633424071174377739865032232 39633424070561888156180807720 0 779 | 39633424071173814789911610920 39633424070561325206227386400 1 780 | 39633424071173814789911610912 39614081257447491139432087584 0 781 | 5110216482170968579510674392608 39614081257438483940177346560 1 782 | 5110216482170968579510674392740 5110216482170356089926990168068 1 783 | 5110216482170968579510674392740 5110216482170356089926990168196 1 784 | 5110216482170968580610186020388 5110216482170356089926990168064 1 785 | 5110216482170968580610186020388 5110216482170356091026501795840 0 786 | 918945464577951232 306385512149549056 1 787 | 921201662438147584 308496574474878976 1 788 | 1298074214633707828334286520452608 306244774661193728 0 789 | 1298074214633707828335403211949568 1298074214633707213378515434995712 0 790 | 1298074214633707828335403211949568 1298074214633707213377415923367936 0 791 | 1298074214633707828334303700321792 1298074214633707213377398743498752 0 792 | 1267650600229148386543699756544 1267650600228535685853782999040 0 793 | 1267650600229148386543699756544 306284357079793664 1 794 | 918985046996551168 306249172707704832 1 795 | 990791918023607808 360287970189640192 1 796 | 990793017535235712 378303468210750080 1 797 | 990795353997444736 378303605649703424 1 798 | 10220432964341090344921167038080 79228162514642643398216909312 0 799 | 10220432964341090344921167038080 378305804672958976 1 800 | 10141204801826826007327623087744 10141204801826213517778298601984 0 801 | 990793154974189184 378303468210749952 1 802 | 990793017535235584 378302368699122176 1 803 | 972918257002481408 288371113640067840 1 804 | 79228162515237255850546431781 79228162514552708707184017921 1 805 | 79228162515237255850546431781 79228162514552708707184017953 0 806 | 79228162515237255850546431749 79228162514552708707184017925 1 807 | 79228162515237255850546431745 79228162514552708707184017920 1 808 | 79228162515237255850546431744 288371113640067584 1 809 | 972918257002481152 288230376151712256 1 810 | 39624961590409455569192616960 9671406881176206568325120 1 811 | 39624961590409455569192616960 39623752664013345003340300288 0 812 | 10880333277286909859595264 324259310609629184 1 813 | 39624961590409455706631570432 39614081257456427969942650880 0 814 | 39624961590414049534931895300 324259241890152448 1 815 | 185275068919940453357639197262852 324259173170675712 0 816 | 185275068919940453357639197262852 20282409603651994683120421961728 1 817 | 170063261717201700539678758798340 2535301200456783062166577086464 0 818 | 170063261717201700539678758798340 7605903601369700668153389907968 0 819 | 162457358115832324130698539566084 158456325028852934360258576384 1 820 | 162298901790803795455511451665412 162259276829213687650751180963840 1 821 | 1379401934334932549567533616727044 342273571680157696 1 822 | 1381937235535389008370527023137796 81129638414607023969360685301760 0 823 | 1706455789193815735153683043714052 83664939615063482772354091712512 0 824 | 1706455789193815735153683043714052 408183493273490209555510112288768 0 825 | 1298589208570382925222118787384324 475368975085928299132943859712 1 826 | 1298589208570382925222118787384324 158456325028870948758768058368 1 827 | 1298113839595297339196557523682308 1298074214633707249406195762462720 0 828 | 39624961590414049466212418564 324261372193931264 1 829 | 39624961590414047267189163012 324259173170675716 1 830 | 39624961590414047267189163008 324276765356720128 1 831 | 39624961590414029675003118592 324329541914853376 1 832 | 39627379442125246158646281216 328762772798046208 1 833 | 39627379442125386930494374912 400820401195712512 1 834 | 649076734696295578953242535527424 649037107316853854386678877126656 1 835 | 649076734696295579023611279705088 400890735580151808 1 836 | 651691264059266302164198230066176 2614529362971123960953786335232 1 837 | 651691264059266302164198230066176 2535301200456859623360242384896 1 838 | 651612035896752037826604686115840 400820366835974144 1 839 | 39627379442125386896134636544 400961104324329472 1 840 | 39627379442053188564608353280 2417851967992031147458560 1 841 | 39770032688763774020110779392 39614081257457026104268161024 0 842 | 2596188199300102668111810578613256 2596148429267414157137503926091776 0 843 | 2596822024600216782812558930215944 342872255761481728 1 844 | 2596980480925245311487746018116616 633825300114457573004113084416 1 845 | 2596980480925245311487746018116616 792281625142986248191200985088 1 846 | 39789375501967680629209302024 414929849799409664 0 847 | 123318810247697281479783223395336 81446570407477267810079775653888 1 848 | 123318810247697281479783223395336 316931992870586114290770509824 1 849 | 62471581436742270207941469537288 40881751200173926962185273081856 1 850 | 62550809599256534545535013487624 61243388966339861723726068318208 1 851 | 62550809599256534545535013487624 61164160803825597386132524367872 0 852 | 1624352625787258936099715679240 19342813528763916594708480 1 853 | 1307439975730201585725539877896 19342813533267516222078976 1 854 | 1307439975730197082125912507400 1267669943041758165413297913856 1 855 | 39770032688781788968376075272 342872255761481736 1 856 | 3001836391373136005659061473248256 405648192073033734462702184562688 0 857 | 3001836391373136005659061473248256 324518553658427052766913179418624 1 858 | 2961271572165832664811166970676224 40564819207303666831651661414400 0 859 | 2961271572165832664811166970676224 325983757158842368 0 860 | 2596188199300102597180116447527936 2596148429267414140249005323452416 0 861 | 39770032688782914868282917888 324857857251999744 1 862 | 39770032688763774569866593280 324857307496185856 1 863 | 20272477070019034690449572864 20116525638711723824653533184 0 864 | 20272477070019034690449572864 19807040628890378755928752128 1 865 | 19962992060211200420606903296 328797957170135040 1 866 | 19962992060229214819116385280 342308756052246528 1 867 | 334679721452312791209948762604544 334659758460252886289487188983808 1 868 | 984984479369394474177757506962436 11408855402054388907827871612928 1 869 | 1005900714273160259302453109851140 10775030101940274207079520010240 1 870 | 1168159991102373622694031120139268 193316716534805308022604781584384 1 871 | 1168159991102373622694031120139268 31057439705591944631026771296256 1 872 | 985618304669508588878505858565124 10141204801826159506331168407552 1 873 | 983716828769166244776260803757060 10141204801826159506331168407556 0 874 | 983716828769166244776260803757056 659178312118679613072643209560064 1 875 | 10161167793886064567530230383616 324294357542764544 1 876 | 1957272489744446425267016109655040 649037107316853778001956828086272 1 877 | 1958540140344674654668512812860416 1947111321950560685134580910391296 1 878 | 1958540140344674654668512812860420 1948378972550788914536077613596676 0 879 | 1958619368507188919006106356810756 1948458200713303178873671157547008 0 880 | 1958619368507188919006106356810756 1948378972550788914536077613596672 1 881 | 3295911523585456673247534694532101 324435644786933764 0 882 | 3382270220726004801224497600398341 86200240815519923737420604899328 0 883 | 3382270220726004801224497600398341 5070602400913242041631599755264 1 884 | 3303675883511854578331702001665029 5229058725941770716818687655936 0 885 | 3323958293115506248755649252951045 28046769530049899943759345352704 1 886 | 3323958293115506248755649252951045 7764359926398229519812094066688 1 887 | 3300982125986369590853521507353605 324435644786933760 1 888 | 3295911523585456673247534694532097 40564819207303665283539289505792 1 889 | 3260417306779066250005627004781569 2596148429267414138700892951543808 0 890 | 3260417306779066250005627004781569 2601219031668327056306879764365312 0 891 | 659198275110739518134392027350017 324435644786933761 0 892 | 10161167793886064568079986197504 324435095031119872 1 893 | 2596326848584502567651671921394692 2596148429267414165581203471597572 1 894 | 2596326848584502567651671921394692 2596148429267414165581203471597568 0 895 | 2596326848584502567651671921394688 2596306885592442694256390559498240 1 896 | 2596168392259474038976484833494016 2596148429267414147566804962115584 1 897 | 2596168392259474020962086324012032 2596148429267414147566822141984768 1 898 | 2597436042859702250363565847348224 333301556797505536 1 899 | 2597438518739780821124390523503616 1270126480307133463878176866304 1 900 | 2597438518739780821133186616525824 1270126480307133463603298959360 0 901 | 2597438518739780821133186616525824 1270126480307133472399391981568 1 902 | 2597438528411187378041149043246080 1267660271635119620086898360320 1 903 | 2597438533246890656499665742071040 1267655435931841161570199535872 1 904 | 2597438533246890656499674332005632 1267655435931841161570199535616 0 905 | 2597438533246890656504072378516736 1267655435931841165976835981312 1 906 | 2597438533246890656504072378516736 1267655435931841161578789470208 0 907 | 2597438533246890656499665742070784 1267650600228562703053500710912 1 908 | 233322684177013483067409412 232113757780938101587181568 0 909 | 233322684177013483067409412 232113757780375151633760256 1 910 | 233322684176450533113988100 232113757780375151633760260 1 911 | 233322684176450533113988096 232113757762360753124278272 0 912 | 233322684158440532651017216 232113757690303159086350336 0 913 | 233322684176454931160499200 232113757690307557132861440 0 914 | 233322684176454931160499201 232113757708321955642343425 1 915 | 233322684176454965520237569 232113757708321955642343424 0 916 | 233322684176454965520237569 232113757708321990002081792 0 917 | 852292703729073076062651392 773712524877657029354717184 1 918 | 40565671500007074425669704221696 773712524877661427401228288 1 919 | 40565671500007074425669704221696 773712524877662526912856064 0 920 | 40565671500007074424570192593920 773712524882165027028598784 0 921 | 40644899662521334821514062595201 40565592919828219072271857221632 0 922 | 40644899662521334821514062595201 40565592919828218509321903800320 1 923 | 40644899662521334258564109173889 40644821082342482846915447750656 1 924 | 40565671500007069920970565223553 40565592919828218509321903800321 1 925 | 40565671500007069920970565223552 40565592919828218509321903800448 1 926 | 324519405951130455851834036716544 324518708400931961750047925731328 1 927 | 334660610752956291063807662359552 154742505234966891905155072 1 928 | 334661848692995576444082561483776 10142597484370355559140429922304 1 929 | 334661848692995576444082561483776 10141359544331070178865530798080 1 930 | 20302382267118446952708157998089 1208926166391834841972736 0 931 | 20302382267118446952708157998089 1208926161888235214602240 1 932 | 20302382267118442449245969581065 1208926161890571676811264 1 933 | 20302382267118442449245969581065 1208926161890434237857792 0 934 | 60867201474421783295010168374281 1208926161888441373032456 0 935 | 60867201474421783295010168374281 1208926161888303934078984 1 936 | 60867201474421783294872729420809 1208926161888235214602248 0 937 | 63402502674878242097801711322121 40564820416229502736129717174280 1 938 | 63402502674878242097801711322121 40564820416229502736134012141576 1 939 | 63402502674878242097797416354825 43100121616685961539123123585032 0 940 | 20302382267118443010477936083971 1208926161888312524013568 0 941 | 30443587068944278222451561726979 10141206010751997100835905470464 1 942 | 33295800919457794375819143939075 2535302409382620691855686238208 1 943 | 33295800919457794375819143939091 1208926161888862279827472 1 944 | 38366403320370711981805956760595 5070603609839079494849092648960 1 945 | 38366403320370711981805956760595 1208926161888862279827456 0 946 | 30760499719001335572825737528323 316913858983219239236455628800 1 947 | 20302382267118443009928180270083 1208926162451262477434880 0 948 | 20302382267118442446978226848771 1208926161888312524013570 1 949 | 27908285868487818855958446081025 1208926161888243804536832 0 950 | 27908285868487818855958446081025 5070603609839079494230617358336 0 951 | 22837683467574901249971633259521 2535302409382620691237210947584 1 952 | 20619294917175499803614464967809 1208926161892598901374976 1 953 | 20619294917175499803614464967817 1208926161892873779281920 1 954 | 20619294917175499803614464967817 1208926161892873779281928 0 955 | 20619294917175499803339587060865 1208926161888200854863872 0 956 | 20619294917175499798941540549761 1208926161888218034733056 1 957 | 20777751242204028474111448581249 158457533954690563937698578432 1 958 | 101907389656810710169900453725313 1208926161888750610677760 0 959 | 101907389656810710169900453725313 81129639623532843584539615821824 0 960 | 20619294917175499798374604866705 316913858983219238575030665232 1 961 | 20619294917175499800573628122261 316913858983219240774053920768 0 962 | 20619294917175499800573628122261 316913858983219238575030665216 1 963 | 20619294917175499798374604866709 316913858983219238575030665220 0 964 | 27908285868487818856997828166801 1208926161889300366491664 0 965 | 27908285868487818856997828166801 2535302409382620692293772902416 1 966 | 25372984668031360054004421756049 5070603609839079495287179313168 1 967 | 20302382267118442448017608934545 1208926161889317546360848 1 968 | 30443587068944277660115788629137 1208926161889304661458944 1 969 | 30443587068944277660115788629137 10141206010751997101278287101952 0 970 | 20302382267118442448142162986129 1208926161889442100412416 0 971 | 20302382267118442448004724032657 1208926161889300366491648 0 972 | 20302382267118442446900917437569 1208926161888200854864000 1 973 | 20302382267118442447966069326869 1208926161889300366491653 1 974 | 20302382267118442447966069326869 1208926161889300366491649 0 975 | 1318376596900825349580590151631889 1298074215842633069020824937168897 0 976 | 1318376596900825349580590151631897 1208926161888200854863873 1 977 | 1318376596900825349580663166075929 1208926161888273869307913 0 978 | 1318376596900825349580663166075929 1208926161888205149831177 1 979 | 1318376596900825349580594446599193 1208926161888200854863881 0 980 | 344820935925545169230022578275345 324518554867352888671356875440145 0 981 | 344820935925545169234420624786449 1208926161892598901374993 1 982 | 344820935925545169234420624786449 1208926161888200854863889 1 983 | 20302382267118442464458743743488 10880332718822826438557696 1 984 | 20302382267118442464458743743488 10880332718805234252513280 1 985 | 61025650545895394282194574771216 60847232437732812389305253101584 0 986 | 61025650545895394282194574771216 20282413230429471541410750529552 1 987 | 20460831338592053434300072199184 20282413230429471541427930398736 1 988 | 1319485783922470132618029502039056 20282413230429471541406455562256 1 989 | 1319485783922470132618029502039060 20916238530543586242154807164948 1 990 | 1319485783922470132635621688083476 20916238530543586259746993209360 1 991 | 1319485783922470132635621688083476 20916238530543586242154807164944 0 992 | 1318851958622356017917281150436368 1318356627864136378674030537867280 1 993 | 20777743988649110784657068131344 20599325880486528891780631363600 1 994 | 20460831338592053434278597362704 20282410812577832312148106149904 0 995 | 20540057083254678542613791900688 20520095300120625324928738000912 1 996 | 20540057083254678683351280256016 20440867137606361128072682405904 1 997 | 20540057083254678683351280256016 20440867137606360987335194050576 1 998 | 27997407879909809318606116226192 27898217934261491763327518375936 0 999 | 27997407879909809318606116226192 27898217934261491763327518376064 1 1000 | 27997407879909809318674835702800 27888314413947208721128325382144 1 1001 | 28155902890564565661995514201104 27888314413947208721197044858880 1 1002 | 28155902890564565661995514201104 27888353099573436389330635456512 1 1003 | 28155864204938337993861923603472 28046770738975737396384132759552 1 1004 | 38287165486449890163567636775952 32958916814860126327115138203648 0 1005 | 38287165486449890163567636775952 22817712013034291115141512560640 1 1006 | 28145960684624054951594011132944 22976168338062819790328600461312 0 1007 | 22919377838761179431004268397585 22896940175548555452735056510977 0 1008 | 185188558188288825864781471679505 22906843695862838494934249504768 0 1009 | 185505470838345883215155647480849 22896940175548555452735056510976 1 1010 | 185505470838345883215155647480849 23213852825605612803109232312320 1 1011 | 185178654667974542822582278685721 185156217004761918844313066799104 1 1012 | 185178654667974542822719717639193 185156217004761918844450505752584 0 1013 | 185178654667974542822719717639193 185156217004761918844313066799112 0 1014 | 22919377838761179431004268397585 22896940175548555452769416249344 1 1015 | 22919416524387407099137858995217 22896978861174783120903006846977 1 1016 | 347437970182814133882293879571473 347415493833975282235925436825601 1 1017 | 347437970182814142889493134312465 22896940175548555452769416249345 0 1018 | 347437970182814142889493134312465 22896940175548564459968670990337 1 1019 | 22919377838761179430969908659216 22899416055627126213284854759424 1 1020 | 109037914611687943634602384426128 25353013213490749918134918971520 1 1021 | 110305565211916173040531493880978 25353013213490749922532965482496 1 1022 | 110305565211916173040531493880978 26620663813718979324029668687872 0 1023 | 109037914611687943641233813931154 25353013213490749918134918971392 0 1024 | 109037914611687943641233813931154 25353013213490749920333942226944 1 1025 | 119179119413513778846610369807506 35494218015316585130142904352768 1 1026 | 119179119413513778846610369807506 25353013213490749918169278709760 0 1027 | 109037914611687943634602384426130 25353013213490749918134918971394 0 1028 | 443697673071940505629783570252816 30423615614403667524121731792896 0 1029 | 443697673071940505629783570252816 30423615614403667524138911662080 0 1030 | 443697982556950326974835115164688 20282410812577832312148106149888 1 1031 | 443697982556950326974835115164688 20282720297587653657216830930944 0 1032 | 433556468270114670488161508918288 20282410812577832312182465888256 0 1033 | 433556468270114670488161508918290 20282410812577832382551210065920 1 1034 | 433556468270114670488161508918290 20282410812577832382551210065922 0 1035 | 433556468270114670417758405002256 344800964471004559095304126726144 1 1036 | 104046540373289290366209115554833 101412049227184514007937111293953 0 1037 | 104046540373289290366209115554833 101491277389698778345530655244289 0 1038 | 2700115741478188840293863736214549 101412049227184514007937111293956 1 1039 | 2700115741478188840294417786995733 101412049227184514008486867107840 0 1040 | 2700115741478188840294417786995733 101412049227184514007937111293952 0 1041 | 2701066479428360012344990558586005 101728961877241571358315582062720 1 1042 | 2701066479428360012344990558586005 101728961877241571358315582062592 0 1043 | 2701066479428360012344990558585877 102362787177355686059063933665280 1 1044 | 2700432654128245897644242206983189 101412049227184514007941406261248 1 1045 | 2700115741478188840293863736214545 2697560478494598328273185275904000 1 1046 | 24953437843584295119095858176 19846935180955640732831449088 0 1047 | 24953437843584295119095858176 24798695338097161832427945984 1 1048 | 20001677686442774019499361280 19808249554727972599240851456 0 1049 | 40567451038813560700300567774277 1208926143944171089559616 1 1050 | 202905956030541192934171261011525 162259278038139511839348727218176 0 1051 | 202905956030541192934171261011525 1208926148447770716930048 1 1052 | 45717281602240747150779086800453 1208926148451069251813376 0 1053 | 45717281602240747150847806277189 5070603609839066057124784111616 0 1054 | 45717281602240747150847806277189 5070603609839066057056064634880 0 1055 | 40646679201327829544792273978949 1208926148448870228557824 0 1056 | 40646679201327829541493739095621 1208926148447770716930560 1 1057 | 40646679201327829542180933862469 1208926143944308528513024 0 1058 | 40646679201327829542180933862469 1208926143944171089559552 1 1059 | 40646679201327829542047789876293 1208926143944720845373440 0 1060 | 40646679201327829542047789876293 1208926143944725140340736 0 1061 | 40646679201337048409965326238789 79229371449631653835848024064 0 1062 | 40646679201337048409965326238789 79229371449631653801488285696 0 1063 | 40646679201337048409930966500421 79229371440408281764633509888 0 1064 | 40567451038813560735484939863077 1208926143944171089559585 0 1065 | 40567451038813560735484939863077 1208926143979355461648417 0 1066 | 40567451038813560700300567774245 1208926143944171089559553 1 1067 | 40567451038813560700300567774212 1208926143873802345381888 0 1068 | 40567451038813560629931823597060 1208926143873802345382404 1 1069 | 121855545778448771000907916641796 158457533954672548989433282564 1 1070 | 121855545778448771000907916641796 1208926143873802345381892 1 1071 | 121697089453420242325789548217860 81129639623532825569591350525956 1 1072 | 283956366282633605717367558505989 81129639623532825569660070002692 0 1073 | 283956366282633605719566581761541 81129639623532825569660070002693 0 1074 | 283956366282633605719566581761541 81129639623532825571859093258245 1 1075 | 283956366282633605717367558505988 243388916452746188961238080290820 0 1076 | 527345281526453650804665854460944 365083374074656211504852868530176 1 1077 | 527345281526453650804665854460944 40564820416229484721696847953920 0 1078 | 202826727868026924021509833884688 40564820416229484721696847953936 1 1079 | 202826727868026924022059589698560 202824097245442848113274858242048 0 1080 | 205362029068483391832527128757248 202824097245442857121298746703872 0 1081 | 205362029068483391832527128757248 205359398445899315924292153114624 1 1082 | 202826727868026933029533722346496 202824097245442857121023868796928 0 1083 | 202826727868026933029258844439552 202824097245442848113824614055936 0 1084 | 10143836633336054994010946667520 10143681890830549846325769273344 1 1085 | 10144146118345876339079671448576 2477089004714634352143630336 0 1086 | 10144146118345876339079671448576 2786574014535979420868411392 1 1087 | 5117382995330008206320600128 5107711588196067436304269312 0 1088 | 2540418583451788811199727010880 5107711588196067470664007680 0 1089 | 2540418583451788811199727010880 2540408912044654870464070418432 0 1090 | 1272767983223559409668664067136 1272758311816425468933007474752 1 1091 | 1272767983223559409668664067136 5107711588196067436304269376 0 1092 | 5117382995330008171960861696 155951431054546336707772416 1 1093 | 165622838192990671991735296 165622837611463370105421824 1 1094 | 40564984830141533838575084241952 40564984830140956814864235364384 1 1095 | 41278038292769912876916979795104 41278038292769335853214720852000 0 1096 | 41278038292769912876951339533472 41198810130255071515621176901664 1 1097 | 41278038292769912877020059010208 41198810130255071515655536640032 0 1098 | 41278038292769912877020059010208 41198810130255071515724256116768 0 1099 | 41198810130255648539323435844768 40564984830140956814872825298976 0 1100 | 40564984830141533838575084242080 40564984830140956814872825299104 1 1101 | 40566222770180819221040416687136 1403562876901347244631916576 0 1102 | 40566222770180819221040416687136 165622837615966969732792352 1 1103 | 40564984830141533840765517562912 165622837615969168756047904 1 1104 | 165622838192990671991735328 165622837615966969732792320 1 1105 | 633983669397403514444177212416 633983669396809039293362208768 1 1106 | 633983669397403514444177212416 633983669396827053691871690752 0 1107 | 633983669397385500045667730432 158369282694338545010606080 0 1108 | 158369283270799297316127744 155951431055109286661193728 0 1109 | 918787444141196288 882758647120134144 0 1110 | 918787444141196288 882758372242227200 1 1111 | 918787169263289344 864743973732745216 1 1112 | 900772770753807488 864726381546700928 0 1113 | 900773320509621376 864726931302514688 0 1114 | 2417852540002578859033728 2417852503955639896113152 0 1115 | 157160357450675113221424256 154742505775398915909091328 0 1116 | 157160357450675113221424256 864726381546700800 0 1117 | 2475880079471515728366011392 864691197174611968 0 1118 | 2475880079471515728366011394 2475880079435451746972860416 0 1119 | 2475880079471515728366011394 2475880079435451746972860418 0 1120 | 900755178567762944 900755178565664768 0 1121 | 19807040629466839576953750528 900719994193575936 0 1122 | 19807040629466839576953750528 19807040629466804392579563520 1 1123 | 5071221370933443001719948576256 5070602400913782297115267957248 1 1124 | 5071221370933443001719948576256 5070602400913782297115267956736 0 1125 | 5071221370933443001719948575744 5071221370933424987252717518848 0 1126 | 5070602400913872369193716810752 5070602400913800311530957307904 0 1127 | 5070602400913872369193716810880 5070602400913800311513777438848 0 1128 | 5070612072320429295023207482496 5070612072320357228547175088128 1 1129 | 5070612072320429295023207482496 5070602400913800311513777438720 0 1130 | 5070602400913872377989809833088 5070602400913800320309870460928 0 1131 | 5070602400913872404360909030400 5070602400913872369107815366656 0 1132 | 5070602400913872404360909030400 5070602400913872404292187455488 1 1133 | 5070602400913782332368361620480 864726312827224064 1 1134 | 5149830563428046669961905570816 864691128455135232 0 1135 | 5149830563428046669961905570816 79228162515129028721999085568 1 1136 | 730206359812717555661278267048960 730206359812717267430902113239040 1 1137 | 730206359812717555661278267048960 730206359812717267430897818271744 0 1138 | 1054724913471144318473227011621892 81169252495863813864585777119236 1 1139 | 1054724913471144318474326523249668 81169252495863813864585777119232 0 1140 | 1054724913471144318474326523249684 81169252495863813865685288747008 0 1141 | 1054724913471144318474326523249684 81169252495863813865685288747024 1 1142 | 1054724913471144318473227011621888 81169252495863849893382796083200 1 1143 | 1054734821827161843945145884476416 405697709674604823689940990689280 0 1144 | 1054734831498568400862179282125824 405697724181714659065491087163392 1 1145 | 1054734831498568400862196461995008 405697714510308102148474869383168 1 1146 | 1054734831498568400862196461995008 405697714510308102148457689513984 0 1147 | 1055051729641515622837003361453312 406004718804347597998115973497088 1 1148 | 1055051734477218901295520060278016 406004718804347597998115973496832 1 1149 | 1055051734477218901295520060278016 406004723640050876456632672321536 0 1150 | 1055051729641515622977740849808384 405687806154290540647741797695488 0 1151 | 1055051768327141850645874440406528 405687806154290540788479286050816 1 1152 | 1055052087483558228907976562837056 405687854511323325373646274297856 1 1153 | 1055052088692484048522605737543232 405687844839916768456612876648448 1 1154 | 1055052088692484048522605737543232 405687846048842588071242051354624 1 1155 | 1055052077812151671990943165187648 405688154324926589801681601429504 1 1156 | 1055051768327141850645874440406592 405687844839916768456612876648512 1 1157 | 1055051729641515622977740849808896 405687806154290540788479286051328 1 1158 | 81189059536492668328893898294784 39614081257132168796771975680 1 1159 | 81189078879305782162960693593856 39614081257132177592864998144 1 1160 | 81189078879305782163098132547328 39633424070246011659660296960 0 1161 | 81189078879305782163098132547328 39633424070246011797099250432 1 1162 | 81189059536492668328893898295040 39614081257132177592864997888 0 1163 | 81189059536492668320097805272592 39614081257132309534260331008 1 1164 | 81189059536492668337689991317008 39614081257132327126446375440 1 1165 | 81189678506512311027827440879248 39614081257132309534260331024 1 1166 | 81189678506512311027827440879248 40233051276774999671709893136 1 1167 | 81189059536492668337689991317136 39614081257132309534260331152 1 1168 | 81189059536492812435285881128448 39614081257132309534260330496 1 1169 | 81268287699007077054354401789440 118842243771540762315880136704 1 1170 | 81268287699007077054491840743040 118842243771541043790856847360 1 1171 | 81269525639046362434766739867264 118842243771541043928295800832 1 1172 | 81269525639046362434766739867264 120080183810826424203194925056 1 1173 | 81268287699007077054354401789568 118842243771541043790856847488 1 1174 | 81268287699007103794477189301888 39614081257276424722336186368 0 1175 | 81270154280472589344872889058944 41480662722770982317290684416 0 1176 | 81270154280472589344872889058944 41480662722770419367337263104 0 1177 | 81270154280472588781922935637632 41470991316213502333939613696 0 1178 | 81270144609066031864889537988224 40233051276928122059040489472 0 1179 | 81268926011839860389050178340480 39614081257285502290335105024 0 1180 | 81269274182475909402252493719168 39672109696627004490721001472 0 1181 | 81269283853882466319285891368584 39633424070399336357130403848 0 1182 | 81269283853882466319285891368584 39633424070399336357130403840 1 1183 | 81269283853882466319285891368576 39643095476956253390528053248 0 1184 | 81269235496849681734118903121536 39942909080220681425855184896 0 1185 | 81268906669026746556082894669440 39614081257285431921590927360 0 1186 | 81268906669026746556289053099648 39614081257285433021102555136 0 1187 | 81268906669026746556289053099648 39614081257285433089822031872 0 1188 | 81268906669026746556220333622912 39614081257285433158541508608 0 1189 | 81268287699007131097549930235520 39614081257330467917864632320 1 1190 | 81268294952562048785324978472576 39616499108969978651190755328 0 1191 | 81268294952562048785324978472576 39614081257330749392841342976 1 1192 | 81268331220336637224200219657856 39618916960609207909540167680 1 1193 | 86338933621249554852177265034880 39657602586836876043130765312 1 1194 | 86339088363754465524711627425408 5110414746004665154564305977344 0 1195 | 86339088363754465524711627425424 5110260003499754482029943586832 0 1196 | 86341564243833036285261425673872 5112735883578325242579741835264 0 1197 | 86341564243833036285261425673872 5110260003499754482029943586816 0 1198 | 81268331220336637246190452213376 39657602586836893635316809728 1 1199 | 81268331220336637228598266168960 39657602586836880441177276416 0 1200 | 81268287699007130816633299273361 39614081257294439129435602944 0 1201 | 81268287699007130816702018750097 39614081257294439189565145088 0 1202 | 81268287699007130816702018750097 39614081257294439120845668352 0 1203 | 81268287699007130816624709338769 39614081257294439120845668368 1 1204 | 81268287699007130816624709338753 39614081257294439670601482240 0 1205 | 81268287699007130816624709338753 39614081257294439670601482241 0 1206 | 81268287699007130816624709338753 39614081257294439120845668353 1 1207 | 121833106906310417620773927651012 39614081257276424722336186564 1 1208 | 121833109324162056850032277063364 40604435706412256501875188170948 0 1209 | 121833109324162056850032277063364 40604433288560617272616838758596 0 1210 | 81268287699007076772879425078980 39614081257276424722336186500 1 1211 | 81268287699007076772879425079172 39614081257276424722336186496 0 1212 | 81268290116858752030934793455492 39616499108951682777704563072 0 1213 | 81268290116858752030934793455492 39616499108915653980685599104 0 1214 | 81268290116858716072506518669220 39614081257276424722336186784 1 1215 | 81268599601868537417575243450276 39614081257276424722336186752 0 1216 | 81268599601868537417575243450276 39923566267097769791060967808 1 1217 | 81268290116858716072506518669188 39614081257276495091080364416 0 1218 | 81189059536492669445997712115584 39614081257133294696678817920 0 1219 | 81189059536492669516366456293249 39614081257133365065422995840 1 1220 | 81189059536492669516366456293249 39614081257133365065422995841 0 1221 | 81189059536492669516366456293252 39614081257133294696678818176 0 1222 | 81189369021502490861435181074308 39614081257133294696678818180 0 1223 | 81189369021502490862534692702085 39923566266954639765403599236 1 1224 | 81189369021502490862534692702085 39923566266954640864915227012 0 1225 | 81189369021502490861435181074309 39923566266954639765403599237 0 1226 | 81198963056806970502595414591104 49517601571416336895871811584 0 1227 | 81198963056806970502595414591108 39614081257133294696678817792 0 1228 | 81198963056806970502595414591108 39614081257133294696678817796 1 1229 | 81189059536492687460396221597312 39614081257151309095188299776 0 1230 | 81189369021502508805464946378240 39614081257132168796771975168 0 1231 | 81189412542832014932115235800576 39923566266953513865496756224 1 1232 | 81189412542832014932115235800576 39962251893181181999087353856 0 1233 | 81189373857205787263981645202944 39928401970231972382195580928 0 1234 | 81189059536492687460396221597184 39614081257150183195281457152 1 1235 | 245983637566162490514669221970944 162318697951099061785510656606208 0 1236 | 245983637566162490514669221970944 59421121885698393932646318080 1 1237 | 93868041418853569124411654538240 61897001964305042541975175168 1 1238 | 93868041418853569133207747560448 59421121885734281992176926720 1 1239 | 93868041418853569133207747560448 59421121885734290788269948928 1 1240 | 93865565538774998363861856289792 10200625923711569493965802569728 1 1241 | 83725598676988448532163129771016 60659061924983633470057086976 0 1242 | 83733026317224160813812524516360 59421121885698253195157962752 0 1243 | 83733026317224160813812524516360 64372882042839774294754459648 1 1244 | 83728074557067019292712928019464 61897001964269013744956211200 0 1245 | 83724360736949163151888230646792 59421121885698253195157962760 0 1246 | 88794963137862044588340536148996 7665324723255074662175377195012 1 1247 | 88799914898019186109440132645956 7665324723255074662175377195072 1 1248 | 88799914898019186109440132645956 7665324723255074662175377195008 1 1249 | 88799914898019186109440132645892 7670276483412216183274973691904 0 1250 | 88794963137862044588340536148992 2594722322342157056188564373504 1 1251 | 49517601571704039506444289041 35184372088833 1 1252 | 1318406162390669215044773747885073 1318356644789097511040451675684864 1 1253 | 1318406201076295442712941698221073 1318356664131910624874552830722048 0 1254 | 1318406201076295442712941698221073 1318356664131910624874518470983680 0 1255 | 1318406201076295442712907338482705 1318356625446284397206384880386048 0 1256 | 1318406162390669224051973002626065 1318356624237358577591755705679872 1 1257 | 1318406162390669224052110441579537 1318356643580171691425822500978688 1 1258 | 1318406471875679045397179166360593 1318356953065181512771028664713216 1 1259 | 1318723539268241013420087704552465 1318673556230228748776334115733504 1 1260 | 1323794141669153931026083107308561 1323427245981084609031955342688256 1 1261 | 1323873369831668195363715305964561 1318356643580171691426007184572416 1 1262 | 1323873369831668195363715305964561 1318435871742685955763600728522752 1 1263 | 1323794141669153931026121762014225 1318356643580171691426002889605120 1 1264 | 1323794141669153931026117467046929 1318356643580171691425968529866752 1 1265 | 1318723539268241013420096294487057 1318356643580171691425959939932160 1 1266 | 1318406626618183956069713528751121 1318356798322676602098494302322688 0 1267 | 1318723055697913167568426412016657 1318673536887415643949329136222208 0 1268 | 1318723055697913167568426412016657 1318356624237358586598954960420864 1 1269 | 1318406143047856110218052236215313 1318356624237358586598963550355456 1 1270 | 1318406143047856110218043646280721 1318356624237358586599092399374336 0 1271 | 1318425948879558847680476163867665 1298094021674335473252206840381440 0 1272 | 1318425948879558847680476163867665 1298074214633706907167808454393856 1 1273 | 1625180691615666571629737665365017 2537777080535029598865015701512 0 1274 | 1625180691615666571629741960332313 2535301200456458838319512420360 1 1275 | 1625180691615666571629827859678233 2535301200456458838401116798984 1 1276 | 1625180691615666571629827859678233 2535301200456458838332397322248 1 1277 | 1625180691615666571630034018108441 2535301200456458838590095360008 0 1278 | 1625180691615666571630034018108441 2535301200456458838315217453064 1 1279 | 1625178215735588000869256586593305 2535301200456458838246497976328 0 1280 | 1625178215735588000869256586593305 2535301200456458838177778499592 1 1281 | 1625178215735588000869050428163097 2535301200456458838177778499584 1 1282 | 1625178215735588000869050428163089 35184372088832 1 1283 | 1622642914535131578094854040716305 40567581018423296 1 1284 | 1623276739835245692795662521861137 4538809769263104 1 1285 | 1624861303085530979547537695835153 1267650600228233940345127174144 0 1286 | 1624861303085530979547537695835153 4538848423968768 1 1287 | 1623593652485302750146040992629777 4538844129001472 1 1288 | 1623672880647817014483630241612817 396140812571326226811848753152 1 1289 | 1626208181848273473286623648023569 2852213850513520692211711213568 1 1290 | 1626208181848273473286623648023569 316912650057061889218304802816 1 1291 | 1623276739835245692795628162122769 633825300114119239558120865792 1 1292 | 1622642914535131578094879810520081 4538792589393920 1 1293 | 1622642914535131578094862630650897 4538783999459328 1 1294 | 1634051794115701994468511217353745 1267684450151178646297967067136 1 1295 | 1634051794115701994468511217353745 1267665107338064812231171768320 0 1296 | 1634051774772888880634444422054929 1267660271634786353714472943616 1 1297 | 1634051769937185602175927723230225 9671406556952217769738240 1 1298 | 1632784119336957372774431020024849 10141214473232392164191395381248 1 1299 | 4867829679388031533857259101619249 649038964226912381671908761927680 1 1300 | 4867909217035555619539921370350769 649118501874436467354571030659104 0 1301 | 4867909217035555619539921370350769 649118192389426646009502305878048 0 1302 | 4867908907550545798194852645569713 649118192389426646009502305878176 1 1303 | 4867908907550545798194852645569585 649038964226912381671908761927712 1 1304 | 4867829679388031533857259101619217 3245187393494326195937156926537728 1 1305 | 2272315114106358061960892879209493 649671551586987210992382214406148 0 1306 | 2272315133449171175794959674508309 649671609615426552494582600302596 0 1307 | 2272315133449171175794959674508309 649671590272613438660515805003780 1 1308 | 2272315075420731834292759288611861 649037726286873096291633862803460 1 1309 | 2317316671728833978045892252402709 694673147895089354745515178196992 1 1310 | 2317316671728833978045892252402837 654108328687786013897620675624960 0 1311 | 2317316671728833978045892252402837 654108328687786013897620675625088 1 1312 | 2276751852521530637197997749830677 649037726286873096291633862803456 0 1313 | 2276750614581491351817722850706577 5071221370932560331308634472576 1 1314 | 2317315433788794692665617353278609 45636040578235901179203137044480 1 1315 | 2317315511160047148002163707348117 5071298742185015667850693574656 0 1316 | 2317315511160047148002163707348117 5071221370932560331583512379392 1 1317 | 2317315433788794692665896526152853 5071221370932560331308634472448 1 1318 | 2317632347647777569630624998753429 5388134020989617681687105241088 1 1319 | 2317632347647777569635023045264533 5388135229915437296316279947264 1 1320 | 2317632347647777569635160484218005 5388135229915437300851765411840 0 1321 | 2317632347647777569635160484218005 5388135229915437300714326458368 0 1322 | 2317632655923861571364363083711637 5071221370932560331312929439744 1 1323 | 2317632655923861571364363083711637 5071221370932560332412441067520 1 1324 | 2317632655923861571363263572083861 5071530855942381676381654220800 1 1325 | 2317632346438851750018194847302805 5071221370932560333511952695296 1 1326 | 2317315433788794692665617353278613 5071221370932560331308634472452 1 1327 | 2286891819383317187029700771316753 618970019642725326116618240 1 1328 | 2286891819383317259087294809244689 618970019714782915859578880 0 1329 | 2286891819383317259087294809244689 618970019642725321821650944 1 1330 | 2327456638590620528018328467276849 50706642979148818925927438221344 1 1331 | 2327456638590620528018328467276849 50706642979148818925927438221312 1 1332 | 2327456638590620528018328467276817 10141823771845478078032935649280 1 1333 | 2286891819383317187170433964704785 10141823771845477937295447293952 1 1334 | 1622644761773783908733428531463185 324521029538505297596482376957952 1 1335 | 1622644761773783908733428531463185 324521029538505297578890190913536 0 1336 | 1644196059917703093921563788968977 346068613862306626643792937091072 1 1337 | 1725959523632423890318101145715729 425930601676685078938085239029760 1 1338 | 1725959523632423890318101145715729 344800963262078397242296233885696 0 1339 | 1644829885217817208622312140571665 345434788562192511943044585488384 1 1340 | 1642928409317474864520067085763601 344802201202117682622571133009920 1 1341 | 1642927171377435579139792186639377 344800963262078397242287643951104 1 1342 | 1642927171377435579139783596704785 324518553658426726818340392665088 1 1343 | 49556287197940714839289627696 9942205940519752716410421280 0 1344 | 208012612226469390026377528496 168398530969048427903498322048 1 1345 | 208012612226469390026377528496 168398530969048427903498321920 0 1346 | 208012612226469390026377528368 9942205940519752716410421248 0 1347 | 49556287197940714839289627824 9903520314292084582819823776 1 1348 | 10190761089023775926812915270832 10151108322140127296556445466656 1 1349 | 10190761089023775927362671084720 9903520314292084582819823648 0 1350 | 10190761089023775927362671084720 9903520314292085132575637536 0 1351 | 49556287197940714839289627696 9903520314292084582819823616 1 1352 | 40614336808875053894600201602064 9903520314283077383565082624 1 1353 | 2636762766076288868159848366212112 2636723151995031438190526232264704 1 1354 | 2636762766076288868159848366212116 40574722727617623925278067654660 1 1355 | 2636762766076288868159848366212116 40574722727617623925278067654656 0 1356 | 25553558290929144492874732143632 2475880078570760549798248448 1 1357 | 25553558290929144492874732143632 160932205107099435736886149120 0 1358 | 25395101965900615817687644242960 20284885483730241184497049534464 0 1359 | 5112692362248945393740392956944 5073078280991488366536611069952 0 1360 | 42089961336027787753580135440 2475880078606789346817212416 0 1361 | 2596824383214490148393086667981904 633863985740342368881942200384 1 1362 | 2596824383214490148393086667981904 633863985740342368881942200320 0 1363 | 2596824383214490148393086667981840 38685626227668133590597632 0 1364 | 2596190557914376033692407035855888 2596148467953040041933381755207680 1 1365 | 2596191176884395676382578845156368 2596148467953040041933484834422784 1 1366 | 2596191176884395676382578845156432 2596149086923059684623622283984960 1 1367 | 2596191176884395676382578845156432 2596149086923059684623622283984896 0 1368 | 2596190557914376033692441395594256 2596148467953040041933450474684416 1 1369 | 2677320207209315127948586912842768 2596148429267413850294045183574016 1 1370 | 2677320207209315127948591207810064 2596148429267413814265252459577344 1 1371 | 3001838760867741854731781588124688 2596148429267413814265248164610048 0 1372 | 3650875868184595308300292652532752 2920666982925840541048404185186304 1 1373 | 3650875868184595308300292652532752 3569704090242693994614716226338816 1 1374 | 3001838760867741854733980611380240 2920666982925840541050603208441856 1 1375 | 2677320207209315127948625567548432 2596148429267413814265282524348416 1 1376 | 2677320207209315109934188403360784 18014398509481984 1 1377 | 2679855508409771568737181809771536 2535301200456458802993406410752 0 1378 | 2679855508409771568737181809771536 0 0 1379 | 81171777941901277654541729268752 1208925819614629174706176 0 1380 | 81173014673014743420256173163536 81130876354645967076063904268288 1 1381 | 81173014673014761434654682645520 81129638414606699710187514626048 1 1382 | 81173014673014761443450775667728 81129638414606681704585098166272 1 1383 | 81173014673014762569350682510352 81129638414606681695789005144064 1 1384 | 243432291502228125960928692798480 81129638414606682821688911986688 0 1385 | 2839580720769641940226176857408528 2839537344511233860478515086884864 1 1386 | 2839580720769641940226176857408528 243388915243820046213266922274816 0 1387 | 81171779150827097273637669962768 81129640832458320929514120544256 0 1388 | 81171779150827097273637669962768 81129640832458320925116074033152 1 1389 | 81171779150827097269239623451664 81129638414606681695857724620800 1 1390 | 40606957525672117192018051990544 40564828878709897764927900221440 0 1391 | 40606957525672117192018051990544 9671406556917033397649408 1 1392 | 39618916960699597660950103316 141836999983364 0 1393 | 39618916960699597660950103316 4835703278600353698808068 1 1394 | 39614081257421139144251278740 141836999983236 1 1395 | 39924775193062098842150765973 141836999983108 0 1396 | 39924775193062098842150765973 141836999983109 1 1397 | 39924775193062098842150765972 309485009821486905724764164 1 1398 | 39615290183240753773425984916 1208925819756466174689284 1 1399 | 39614081257421147940344300564 150633093005312 1 1400 | 82437057838597242917760415040532 1267805342733140215868065579008 1 1401 | 82437057838597242917760415040533 154742504910814371362373633 1 1402 | 82437057838597242917760415040533 154742504910814371362373632 0 1403 | 81169407238369013516263711835220 81129793157111592510160367517696 0 1404 | 81169407238369013516263711835220 81129793157111592510160367517760 0 1405 | 42244703840902581024504939540 2475880078570902386798231552 1 1406 | 42244703840902581024504939540 141836999983104 0 1407 | 39614081257421139419129185296 1374389534720 1 1408 | 39614081257426768987382875792 1168231104640 0 1409 | 39614081257426768987382875792 4504767858475136 1 1410 | 39614081257422265387755505296 1168231104512 0 1411 | 338502324342194672465007511340560 327053854858883186713217564934144 1 1412 | 338502324342194672465007511340560 324518553658426727910224158523392 1 1413 | 498226299970951577053592115217936 488045481087868320703298872016896 1 1414 | 498226299970951577053592115217936 325786204258654957311720861728768 1 1415 | 339769974942422901866504214545936 339730360861165480728184596987904 0 1416 | 339769974942422901866504214545936 334659758460252563122197784166400 1 1417 | 324558167739684149048543776081424 1127068137947136 1 1418 | 39614081257421139487848662544 1168231105024 1 1419 | 39614081257421139487848662032 1099511627776 1 1420 | 39614081404994914712555619344 147573952589676412944 1 1421 | 39614081405039970585937970192 45036013453574160 1 1422 | 1622633001343558316577328124134416 324519172628446414510114377564176 1 1423 | 1622712229506072580914921668084752 79847132533952064551900938256 1 1424 | 1622712229506072580914921668084752 618970019687726958356987920 0 1425 | 1298114447685131589794172103558160 45036820907425808 0 1426 | 1298113828715111947104034653996048 45036546029518864 1 1427 | 1298113828715111947103759776089104 1298074214633706952169170111823888 0 1428 | 1307264681633269372632396989457 45035996273704977 1 1429 | 325825818340060000659388044936209 324518553658426776322751921651728 1 1430 | 326142730990117058009762220737553 324518553658426771819152294281232 0 1431 | 326142730990117058009762220737553 324835466308483829169526470082576 0 1432 | 325825818340059996155788417565713 45035996273704976 0 1433 | 1307264681633269372632396989456 1267650600228274437492976910352 1 1434 | 39614081405039970568758101008 45053588459749392 1 1435 | 40387793929593315648384009232 45036064993181712 1 1436 | 40387793929593315648384009232 154742504955708599355572240 0 1437 | 40233051424682643114021618768 618970019687726202442743888 0 1438 | 40233051424682643114021618768 618970019687726202442743824 0 1439 | 39614081405039952907852579856 36028797018963984 1 1440 | 1054744722929772247204885080048656 19807040628566084398385987600 0 1441 | 1054744722929772247204885080048656 16 1 1442 | 1054724915889143681120490989028496 281474976710800 1 1443 | 1054724915889143681120490989028496 281479271678096 1 1444 | 1054724915889143681120486694061200 281474976710672 1 1445 | 1054724915889143680839011717350416 144115188075855888 1 1446 | 1095368963258961141909865738798096 365083372865730067631050523148304 1 1447 | 1095368963258961141909865738798096 324518553658426726783156020576272 0 1448 | 1054804144051657801061971236226064 324597781820940991120749564526608 1 1449 | 1054724915889143536724377692275728 324518553658426726783705776390160 1 1450 | 1054724915889143536723827936461840 324518553658426726783160315543568 1 1451 | 730206362230716809940667620918288 81129638414606681695789005144080 1 1452 | 973595277474536855028034636350480 4503599627370512 1 1453 | 993877687078188525452531643450384 324518553658426731287305403760656 1 1454 | 993877687078188525453631155078160 324518553658426731287855159574544 1 1455 | 2303360757113949497199725566231568 325786204258654960688252351152144 1 1456 | 2303360757113949497199794285708308 324518553658426731286824367423508 1 1457 | 2343925576321252838047688788280340 365083372865730072134650150518804 1 1458 | 2343925576321252838047688788280340 324518553658426731286755647946772 1 1459 | 2303360757113949497199725566231572 324518553658426731286755647946768 1 1460 | 2302093106513721267798228863026192 1622592768292133638419379730251792 1 1461 | 1004018891880014360665604780721168 334659758460252566498729273589776 1 1462 | 993877687078188525451981887636496 344800963262078401710702899232784 1 1463 | 649076723816110123741278988403728 649037107316853453566312041152528 1 1464 | 39616499257246776731616936976 2417851639370270715674640 0 1465 | 39616499257246776731616936976 2417851639229533227319312 1 1466 | 39616499257255643193383322640 2417851639229258349412368 0 1467 | 39616499257255643193383322640 2417851648236457604153360 1 1468 | 39616499257246635719250674705 2417852215690010652835857 1 1469 | 39616499257246917194227385361 2417852215690010652835856 1 1470 | 39616499257246917194227385361 2417852215971485629546512 1 1471 | 2594722322490019804303371273232 2555108241085024889590815653904 0 1472 | 2594722322490022056103184958480 19807040628566086597409243152 1 1473 | 2753178647518550731290272859152 19807040628568338397222928400 1 1474 | 2753178647518550731290272859152 178263365657097013584310829072 0 1475 | 61975582290429235152302375952 1208925842134826334814224 0 1476 | 61975582290429235152302375952 22520197160108048 1 1477 | 61974373364609620523127669776 22524595206619152 1 1478 | 61993716177723450191876457488 19342813131850664328036368 1 1479 | 61993716177723450191876457744 18016597532737552 0 1480 | 61993716177723450191876457744 18016597532737808 1 1481 | 61974373364605112525453788176 77371252473352864713932816 1 1482 | 20344925575783462890342973441040 2475880078588777147330986000 1 1483 | 20344925575783462890342973441040 3094850098231467284780548112 1 1484 | 20344306605763820200205523878928 20284885483730259201094582272016 1 1485 | 649096530856738671811313224647696 649037109734705092797803773558800 1 1486 | 649096530856738671811313224647696 649037109734705092797769413820432 0 1487 | 649096530856738671811278864909328 2417851639231457372667920 1 1488 | 1327093482926561480530322261008 1267669943041343237762521759760 1 1489 | 1327093482926633538124360188944 1267650600228301461289764388880 1 1490 | 1327093482926633538158719927440 1267650600228229403695726460944 1 1491 | 1327093482926633538158719927444 1267650600228229403730086199316 1 1492 | 1299401308116633540670782802232468 1299341865233935136536354168504336 1 1493 | 1299401308116633540670782802232468 1267650600228229403730086199312 1 1494 | 1327093482926633538124360189072 1267650600228229403695726461072 1 1495 | 1327074140113447646463526962192 2199023255568 1 1496 | 39614081257997422875182629888 577023702256844800 0 1497 | 162298890910471434138684626043904 703687441776640 1 1498 | 182581300514123104562631877329920 162259276829213364095265452064768 1 1499 | 182581300514123104562631877329920 182541686432865034519212703350784 0 1500 | 39614081258070747106615755776 72761281479704576 1 1501 | 39846195015364698314121413632 154742504911235484315811840 1 1502 | 39846195015364698314121413632 232113757366571751497007104 0 1503 | 649076876140616362928358981370880 649037107316853454129261994573824 1 1504 | 973595429799043089711523591881728 562949953421312 1 1505 | 2312234463640053355706440686240768 1663157587499436975326624558874624 1 1506 | 2323009493741993305619162663486464 1673298792301262828552996693999616 1 1507 | 2323326406392050362969536839287808 1674249530251434000604119221403648 1 1508 | 2323326406392050362969536839287808 1673932617601376943253745045602304 1 1509 | 2322375668441879190918414311883776 1663157587499436993341023068356608 1 1510 | 2312234463640053337692042176758784 1622592768292133634478730056302592 1 1511 | 2271669644432749996844147674186752 324518553658426727346105973997568 1 1512 | 730206514555223044624156576449536 81129638414606682258747548499968 1 1513 | 730206514555223044624156576449536 562958543355904 0 1514 | 39615290183818167802310689792 1208925821307877081481216 0 1515 | 39634632996932001869105988608 1693247906775040 0 1516 | 42112930927141991677253649408 21760664754756573051486208 1 1517 | 42112930927141991677253649408 19342813115527314702073856 0 1518 | 42110513075511769618158978048 2495222891686287864500322304 0 1519 | 42110513075511769618158978048 2495222891695295063755063296 1 1520 | 39614081257998553740071666688 1688849860263936 1 1521 | 39614081257998553740071666688 1689399616077824 0 1522 | 40233051277641243327765414912 1688867040133120 1 1523 | 40233051277641243327765414912 618970019644379004489695232 0 1524 | 1267650600228256423094469526528 1267650600228238408695960043520 0 1525 | 1267650600228256423094469526528 9007199256838144 1 1526 | 495177224639998746186590716928 178264574582941395812414914560 1 1527 | 495177224639998746186590716944 19808249554412720625327013904 0 1528 | 495177224639998746186590716944 19808249554412720625327013888 1 1529 | 20620368443302425875221653226496 20283648752616802440449091436544 1 1530 | 25690970844215343481208466048008 1239148965132016501840150536 1 1531 | 25690970844215343481208466048008 1239148965132016501840150528 0 1532 | 25690970844215343481208466048000 5071841549878049622488652972032 0 1533 | 338597152483656090666722657280 639521758603160431185887232 1 1534 | 338597152483656090666722657280 639521758747275619261743104 0 1535 | 338597152483511975478646801408 20551738960470293736325120 0 1536 | 327401775247287570085423035188225 2535341095008537610953565405185 1 1537 | 327401775247287570085423035188225 2535341095008537610953565405184 0 1538 | 327401775247287570085423035188224 327053894753435264394109585981440 1 1539 | 2883221588860843302267014611968 2545244615322820653152758398976 0 1540 | 2873318068546560260067821618176 2535302409382309942819974807552 1 1541 | 2873279382920332591934231020544 2535302409382305439220347437056 0 1542 | 2873279382920328088334603650048 1208925846636226941026304 0 1543 | 5387516259895821592587929650176 5070602400912944627584579141632 0 1544 | 5545972584924350267775017550848 158456325028555696784854220800 1 1545 | 5605393706810048520970175513624 59421121885725274792924282880 1 1546 | 5605393706810048520970175513624 59421121885725274792924282896 0 1547 | 5605393706810048520970175513608 39614081257159190394538295296 0 1548 | 5585589084033121665830138938376 39616499108798419652887707656 1 1549 | 5585589084033121665830138938376 39614081257159190394538295304 0 1550 | 5585586666181482436571789526016 27021597766320128 1 1551 | 1300933433000419301972627305792613 2852989980889740270495137267813 0 1552 | 1300933433000419301972627305792613 2852989980889740270495137267781 1 1553 | 1300933433000419301972627305792581 2852835238384829597960774877253 0 1554 | 1300933278257914391300092943402053 2852835238384829597960774877249 0 1555 | 1300933278257914391300092943402049 2857786998541971119060371374145 0 1556 | 1300928326497757249778993346905153 2852873924011057266094365474881 0 1557 | 1300928287812131022110859756307521 2852216268365186907823325315137 0 1558 | 1300927668842111379420722306745409 2853454208404472288098224439361 0 1559 | 1300926430902072094040447407621185 2852216268365186907823325315073 0 1560 | 1300926430902072094040447407621121 2852216268365186907823325315072 0 1561 | 1300926430902072094040447407621120 316915067908728104829918904320 1 1562 | 1298430917868190791912851931923584 317086735375113382172728229888 0 1563 | 1947468025185044245479163973076096 317067392561999548105932931072 1 1564 | 1947468025185044245479163973076096 649354174709415453114417974083584 0 1565 | 1298430898525377678078785136624768 317067392561999548105932931200 0 1566 | 1298430898525377678078785136624640 356681473819131716902704906240 0 1567 | 1298391284444120545909988364649472 316912650057088875571570540544 0 1568 | 1298391129701615635237454002258944 316912650057088875571569491968 0 1569 | 1298391146626577109842262447096832 1298391146626577109842262447095808 0 1570 | 1303461749027490027448249259918336 1303461729684676913614182464618496 1 1571 | 1303461903769994938120783622308864 1298391127283763996008195651796992 1 1572 | 1303461906187846577350041971721216 1298391284444120545909988363599872 0 1573 | 1303461906187846577350041971721216 1298391282026268906680730014187520 1 1574 | 316915067908728104829918905344 316912650057084371971942121472 0 1575 | 316915067908728104829918905344 316915067908723601230291533824 1 1576 | 1298709587358870281681103390508201 633825300114114700748353700009 0 1577 | 1298709587358870281681103390508201 633825300114114700748353699977 1 1578 | 1298709587358870281681103390508169 633825300114114700748353699969 0 1579 | 1298709587358870281681103390508193 633825300114114700748353699968 1 1580 | 1298709587358870281681103390508209 633825300114114700748353700000 0 1581 | 1298709587358870281681103390508217 633825300114114700748353700024 0 1582 | 1298709587358870281681103390508217 633825300114114700748353700016 0 1583 | 1298709587358870281681103390508160 2097280 1 1584 | 3975353830949702770786397535077512 1208925819614629176803336 1 1585 | 3975353830949702770786397535077512 81129639623532501310418181947400 0 1586 | 3894224192535096089090608529933480 1208926107845005328515112 0 1587 | 3894303420697610353428202073883816 79229371440372182598872465416 0 1588 | 3894303420697610353428202073883816 1208926107845005328515080 1 1589 | 3894224192535095800860232378221704 2596148430476339633879877341413384 1 1590 | 1298075763267681986594984213611656 2097160 1 1591 | 1300611063259212625783348445316744 2097152 1 1592 | 1300611064468138445397977620022920 2535301200456458802993408507904 1 1593 | 1300611064468138445397977620022920 2535302409382278417622583214080 0 1594 | 1298075762058756743441107342329480 2097664 0 1595 | 1298075762058756743441107342329480 576460752305521152 0 1596 | 1298075762058756166980355038905344 1237940039285380274901221376 1 1597 | 1338639343326020222447974642615296 40564819207303484963082580787200 1 1598 | 1338678957407277354616771414590464 40604433288560617131879352500224 0 1599 | 1338678957407277354616771414590464 40564819207303484963082580525056 0 1600 | 1339273168626134337148722993955840 633825300114258815936429555712 0 1601 | 1349414373427960172360696619598848 144115188077953024 0 1602 | 1349414373427960172360696619598848 10141204801825979327161703596032 0 1603 | 1298074678861221648157426426315794 1298074369376211817805158446792704 1 1604 | 1784852509348861738332160457180178 1460333646205425181196736457080834 1 1605 | 2433889616665715191898472498332690 2433889307180705361546204518809602 0 1606 | 2435474179916000478650343377339410 1786436763114137194731763356663810 1 1607 | 2436108005216114593351091728942098 1785802937814023080031015005061122 1 1608 | 2436187233378628857688685272892434 1785248340676423229667860197408770 1 1609 | 2436187233378628857688685272892434 1785169112513908965330266653458434 1 1610 | 2434285757478286513586440218084370 1784931428026366172317486021607426 0 1611 | 2434285757478286513586440218084370 1784852199863851907979892477657090 1 1612 | 1470871301304832168448945781998610 1298470510188783139493126166544386 1 1613 | 1470871301304832168448945781998610 1298153597538726082142751990743042 0 1614 | 1470554398326181668015605003846674 1298074379047618374722191844442114 1 1615 | 1470554398326181668015605003846674 1298074369376211817805158446792706 1 1616 | 1473010461692717305563971468657682 1308215574178037653017132072435714 0 1617 | 1817811424954795702771074740519954 1310750875378494111820125478846466 0 1618 | 1817811424954795702771074740519954 1331033284982145782244072730132482 1 1619 | 1797529015351144032347127489233938 1635269429036920838603281499422722 0 1620 | 1298074678861221648157426426315792 1298074214633706907132624084402176 0 1621 | 1298074526536568376714150414386320 1298074214633706907132624085450768 0 1622 | 1298074526536568376714150414386320 1298074214633706907132624085450896 1 1623 | 1298074526536568376714150414386192 1298074214633706907132624084402192 0 1624 | 1298074526536568376714150413337616 1298074217051558546361882433814544 0 1625 | 309485009830352267982668804 309485009821345068727926784 1 1626 | 20116525638396436666368656388 20116525638387429467113914372 0 1627 | 20116525638396436666368656388 309485009821345068727926788 1 1628 | 309485009830352267982668800 309485009821345068726878208 0 1629 | 144115188075857280 1280 1 1630 | 144119586122368384 144119586122368256 1 1631 | 324518592344053098570875733542272 324518592344053098566477687031040 1 1632 | 324518592344053098570944453019008 38685626371783390385931520 0 1633 | 324518592344053098570944453019008 38685626371783321666454784 1 1634 | 38685626371787719712966016 144115188075857152 1 1635 | 1408 1024 1 1636 | 1152 1152 1 1637 | --------------------------------------------------------------------------------