├── .gitignore ├── README.md ├── criteria.py ├── demo.py ├── models ├── __init__.py ├── br.py ├── cc.py ├── cft.py ├── clems.py ├── mdsw.py └── pcc.py └── scene ├── data.x └── data.y /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.swp 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Cost-Sensitive Multi-Label Classification 2 | 3 | Python implementation of our paper [Cost-Sensitive Label Embedding for Multi-Label Classification](https://arxiv.org/abs/1603.09048) and related algorithms, including: 4 | 5 | - Cost-Sensitive Label Embedding with Multidimensional Scaling (CLEMS) 6 | - Condensed Filter Tree (CFT) 7 | - Probabilistic Classifier Chains (PCC) 8 | - Classifier Chains (CC) 9 | - Binary Relevance (BR) 10 | 11 | If you find our paper or implementation is useful in your research, please consider citing our paper for CLEMS and the 12 | references below for other algorithms. 13 | 14 | @article{Huang2017clems, 15 | author = {Kuan-Hao Huang and 16 | Hsuan-Tien Lin}, 17 | title = {Cost-sensitive label embedding for multi-label classification}, 18 | journal = {Machine Learning}, 19 | volume = {106}, 20 | number = {9-10}, 21 | pages = {1725--1746}, 22 | year = {2017}, 23 | } 24 | 25 | ### Prerequisites 26 | - Python 2.7.12 27 | - NumPy 1.13.3 28 | - scikit-learn 0.17 29 | 30 | ### Usage 31 | 32 | $ python demo.py 33 | 34 | ### Dataset 35 | 36 | - scene (downloaded from [Mulan](http://mulan.sourceforge.net/datasets-mlc.html)) 37 | 38 | ### Evaluation Criteria 39 | 40 | - Hamming loss 41 | - Rank loss 42 | - F1 score 43 | - Accuracy score 44 | 45 | ### Result 46 | 47 | ============================================================ 48 | algorithm hamming_loss rank_loss f1_score accuracy_score 49 | ============================================================ 50 | BR 0.0907 1.1844 0.5742 0.5627 51 | CC 0.0880 1.1424 0.5947 0.5851 52 | PCC 0.0900 0.6898 0.7460 0.6909 53 | CFT 0.0867 0.9460 0.6478 0.6267 54 | CLEMS 0.0825 0.6553 0.7690 0.7600 55 | ============================================================ 56 | 57 | ### Reference 58 | 59 | - Grigorios Tsoumakas and Ioannis Katakis. 60 | Multi-Label Classification: An Overview. 61 | International Journal of Data Warehousing and Mining, 2007. 62 | 63 | - Jesse Read, Bernhard, Pfahringer, Geoff Holmes, and Eibe Frank. 64 | Classifier chains for multi-label classification. 65 | Machine Learning, 2011 66 | 67 | - Krzysztof Dembczynski, Weiwei Cheng, and Eyke Hullermeier. 68 | Bayes Optimal Multilabel Classification via Probabilistic Classifier Chains. 69 | ICML, 2012. 70 | 71 | - Chun-Liang Li and Hsuan-Tien Lin. 72 | Condensed Filter Tree for Cost-Sensitive Multi-Label Classification. 73 | ICML, 2014. 74 | 75 | - Kuan-Hao Huang and Hsuan-Tien Lin. 76 | Cost-Sensitive Label Embedding for Multi-Label Classification. 77 | Machine Learning, 2017 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | ### Author 90 | 91 | Kuan-Hao Huang / [@ej0cl6](http://ej0cl6.github.io/) 92 | -------------------------------------------------------------------------------- /criteria.py: -------------------------------------------------------------------------------- 1 | def hamming_loss(y_test, p_test): 2 | return 1.0 - (p_test==y_test).mean(axis=1) 3 | 4 | def rank_loss(y_test, p_test): 5 | revloss = 1.0 * ((y_test==1) & (p_test==0)).sum(axis=1) * ((y_test==0) & (p_test==1)).sum(axis=1) 6 | eq0loss = 0.5 * ((y_test==1) & (p_test==0)).sum(axis=1) * ((y_test==0) & (p_test==0)).sum(axis=1) 7 | eq1loss = 0.5 * ((y_test==1) & (p_test==1)).sum(axis=1) * ((y_test==0) & (p_test==1)).sum(axis=1) 8 | return (revloss + eq0loss + eq1loss) 9 | 10 | def f1_score(y_test, p_test): 11 | v1 = 2.0*(p_test*y_test).sum(axis=1) 12 | v2 = p_test.sum(axis=1) + y_test.sum(axis=1) 13 | v1[v2<=0] = 1.0 14 | v1[y_test.sum(axis=1)<=0] = 1.0 15 | v1[v2>0] /= v2[v2>0] 16 | return v1 17 | 18 | def accuracy_score(y_test, p_test): 19 | v1 = 1.0 * ((p_test==1) & (y_test==1)).sum(axis=1) 20 | v2 = 1.0 * ((p_test==1) | (y_test==1)).sum(axis=1) 21 | v1[v2<=0] = 1.0 22 | v1[v2>0] /= v2[v2>0] 23 | return v1 24 | -------------------------------------------------------------------------------- /demo.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from criteria import hamming_loss, rank_loss, f1_score, accuracy_score 3 | from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor 4 | from models.br import BR 5 | from models.cc import CC 6 | from models.pcc import PCC 7 | from models.cft import CFT 8 | from models.clems import CLEMS 9 | 10 | # set random seed 11 | np.random.seed(1) 12 | 13 | # load data 14 | x_data = np.loadtxt('scene/data.x', dtype=float) 15 | y_data = np.loadtxt('scene/data.y', dtype=int) 16 | 17 | # split data for training and testing 18 | idxs = np.arange(x_data.shape[0]) 19 | np.random.shuffle(idxs) 20 | x_train = x_data[:x_data.shape[0]/2] 21 | y_train = y_data[:y_data.shape[0]/2] 22 | x_test = x_data[x_data.shape[0]/2:] 23 | y_test = y_data[x_data.shape[0]/2:] 24 | 25 | # algorithms (there is no inference rule for pcc_acc, using pcc_f1 instead) 26 | params = {"n_estimators":500, "max_depth": 10, "max_features": "sqrt", "n_jobs": 10} 27 | alg_br = BR(RandomForestClassifier, params) 28 | alg_cc = CC(RandomForestClassifier, params) 29 | alg_pcc_ham = PCC('ham', RandomForestClassifier, params) 30 | alg_pcc_rank = PCC('rank', RandomForestClassifier, params) 31 | alg_pcc_f1 = PCC('f1', RandomForestClassifier, params) 32 | alg_cft_ham = CFT('ham', RandomForestClassifier, params) 33 | alg_cft_rank = CFT('rank', RandomForestClassifier, params) 34 | alg_cft_f1 = CFT('f1', RandomForestClassifier, params) 35 | alg_cft_acc = CFT('acc', RandomForestClassifier, params) 36 | alg_clems_ham = CLEMS('ham', RandomForestRegressor, params) 37 | alg_clems_rank = CLEMS('rank', RandomForestRegressor, params) 38 | alg_clems_f1 = CLEMS('f1', RandomForestRegressor, params) 39 | alg_clems_acc = CLEMS('acc', RandomForestRegressor, params) 40 | 41 | print 'training BR ...' 42 | alg_br.fit(x_train, y_train) 43 | p_br = alg_br.predict(x_test) 44 | 45 | print 'training CC ...' 46 | alg_cc.fit(x_train, y_train) 47 | p_cc = alg_cc.predict(x_test) 48 | 49 | print 'training PCC_ham ...' 50 | alg_pcc_ham.fit(x_train, y_train) 51 | p_pcc_ham = alg_pcc_ham.predict(x_test) 52 | 53 | print 'training PCC_rank ...' 54 | alg_pcc_rank.fit(x_train, y_train) 55 | p_pcc_rank = alg_pcc_rank.predict(x_test) 56 | 57 | print 'training PCC_f1 ...' 58 | alg_pcc_f1.fit(x_train, y_train) 59 | p_pcc_f1 = alg_pcc_f1.predict(x_test) 60 | 61 | print 'training CFT_ham ...' 62 | alg_cft_ham.fit(x_train, y_train) 63 | p_cft_ham = alg_cft_ham.predict(x_test) 64 | 65 | print 'training CFT_rank ...' 66 | alg_cft_rank.fit(x_train, y_train) 67 | p_cft_rank = alg_cft_rank.predict(x_test) 68 | 69 | print 'training CFT_f1 ...' 70 | alg_cft_f1.fit(x_train, y_train) 71 | p_cft_f1 = alg_cft_f1.predict(x_test) 72 | 73 | print 'training CFT_acc ...' 74 | alg_cft_acc.fit(x_train, y_train) 75 | p_cft_acc = alg_cft_acc.predict(x_test) 76 | 77 | print 'training CLEMS_ham ...' 78 | alg_clems_ham.fit(x_train, y_train) 79 | p_clems_ham = alg_clems_ham.predict(x_test) 80 | 81 | print 'training CLEMS_rank ...' 82 | alg_clems_rank.fit(x_train, y_train) 83 | p_clems_rank = alg_clems_rank.predict(x_test) 84 | 85 | print 'training CLEMS_f1 ...' 86 | alg_clems_f1.fit(x_train, y_train) 87 | p_clems_f1 = alg_clems_f1.predict(x_test) 88 | 89 | print 'training CLEMS_acc ...' 90 | alg_clems_acc.fit(x_train, y_train) 91 | p_clems_acc = alg_clems_acc.predict(x_test) 92 | 93 | ham_br, rank_br, f1_br, acc_br = hamming_loss(y_test, p_br).mean(), rank_loss(y_test, p_br).mean(), f1_score(y_test, p_br).mean(), accuracy_score(y_test, p_br).mean() 94 | ham_cc, rank_cc, f1_cc, acc_cc = hamming_loss(y_test, p_cc).mean(), rank_loss(y_test, p_cc).mean(), f1_score(y_test, p_cc).mean(), accuracy_score(y_test, p_cc).mean() 95 | ham_pcc, rank_pcc, f1_pcc, acc_pcc = hamming_loss(y_test, p_pcc_ham).mean(), rank_loss(y_test, p_pcc_rank).mean(), f1_score(y_test, p_pcc_f1).mean(), accuracy_score(y_test, p_pcc_f1).mean() 96 | ham_cft, rank_cft, f1_cft, acc_cft = hamming_loss(y_test, p_cft_ham).mean(), rank_loss(y_test, p_cft_rank).mean(), f1_score(y_test, p_cft_f1).mean(), accuracy_score(y_test, p_cft_acc).mean() 97 | ham_clems, rank_clems, f1_clems, acc_clems = hamming_loss(y_test, p_clems_ham).mean(), rank_loss(y_test, p_clems_rank).mean(), f1_score(y_test, p_clems_f1).mean(), accuracy_score(y_test, p_clems_acc).mean() 98 | 99 | show_title = 'algorithm hamming_loss rank_loss f1_score accuracy_score' 100 | show_bar = '============================================================' 101 | show_br = ' BR {:.4f} {:.4f} {:.4f} {:.4f}'.format(ham_br, rank_br, f1_br, acc_br) 102 | show_cc = ' CC {:.4f} {:.4f} {:.4f} {:.4f}'.format(ham_cc, rank_cc, f1_cc, acc_cc) 103 | show_pcc = ' PCC {:.4f} {:.4f} {:.4f} {:.4f}'.format(ham_pcc, rank_pcc, f1_pcc, acc_pcc) 104 | show_cft = ' CFT {:.4f} {:.4f} {:.4f} {:.4f}'.format(ham_cft, rank_cft, f1_cft, acc_cft) 105 | show_clems = ' CLEMS {:.4f} {:.4f} {:.4f} {:.4f}'.format(ham_clems, rank_clems, f1_clems, acc_clems) 106 | 107 | print '' 108 | print show_bar 109 | print show_title 110 | print show_bar 111 | print show_br 112 | print show_cc 113 | print show_pcc 114 | print show_cft 115 | print show_clems 116 | print show_bar 117 | -------------------------------------------------------------------------------- /models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ej0cl6/csmlc/369c2bbe49c2b097438ae82df895dc5555b5ffed/models/__init__.py -------------------------------------------------------------------------------- /models/br.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | class BR: 4 | def __init__(self, base_learner, params={}): 5 | self.base_learner = base_learner 6 | self.params = params 7 | 8 | def fit(self, x_train, y_train): 9 | self.K = y_train.shape[1] 10 | self.clfs = [self.base_learner(**self.params) for i in xrange(self.K)] 11 | for i in xrange(self.K): 12 | self.clfs[i].fit(x_train, y_train[:, i]) 13 | 14 | def predict(self, x_test): 15 | p_test = np.zeros((x_test.shape[0], self.K), dtype=int) 16 | for i in xrange(self.K): 17 | p_test[:, i] = self.clfs[i].predict(x_test) 18 | return p_test 19 | -------------------------------------------------------------------------------- /models/cc.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | class CC: 4 | def __init__(self, base_learner, params={}): 5 | self.base_learner = base_learner 6 | self.params = params 7 | 8 | def fit(self, x_train, y_train): 9 | self.K = y_train.shape[1] 10 | self.clfs = [self.base_learner(**self.params) for i in xrange(self.K)] 11 | for i in xrange(self.K): 12 | self.clfs[i].fit(np.concatenate((x_train, y_train[:, :i]), axis=1), y_train[:, i]) 13 | 14 | def predict(self, x_test): 15 | p_test = np.zeros((x_test.shape[0], self.K), dtype=int) 16 | for i in xrange(self.K): 17 | p_test[:, i] = self.clfs[i].predict(np.concatenate((x_test, p_test[:, :i]), axis=1)) 18 | return p_test 19 | -------------------------------------------------------------------------------- /models/cft.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from criteria import hamming_loss, rank_loss, f1_score, accuracy_score 3 | 4 | class CFT: 5 | def __init__(self, cost, base_learner, params={}, n_round=8): 6 | self.cost = cost 7 | self.base_learner = base_learner 8 | self.params = params 9 | self.n_round = n_round 10 | if self.cost == 'ham': 11 | self.func = hamming_loss 12 | elif self.cost == 'rank': 13 | self.func = rank_loss 14 | elif self.cost == 'f1': 15 | self.func = f1_score 16 | elif self.cost == 'acc': 17 | self.func = accuracy_score 18 | 19 | def fit(self, x_train, y_train): 20 | self.K = y_train.shape[1] 21 | 22 | x_train_new = x_train 23 | p_train_new = y_train 24 | y_train_new = y_train 25 | w_train_new = self.cal_weight(y_train, y_train) 26 | for rd in xrange(self.n_round): 27 | self.clfs = [self.base_learner(**self.params) for i in xrange(self.K)] 28 | for i in xrange(self.K): 29 | self.clfs[i].fit(np.concatenate((x_train_new, p_train_new[:, :i]), axis=1), y_train_new[:, i], w_train_new[:, i]) 30 | p_train = self.predict(x_train) 31 | w_train = self.cal_weight(y_train, p_train) 32 | 33 | x_train_new = np.concatenate((x_train_new, x_train), axis=0) 34 | p_train_new = np.concatenate((p_train_new, p_train), axis=0) 35 | y_train_new = np.concatenate((y_train_new, y_train), axis=0) 36 | w_train_new = np.concatenate((w_train_new, w_train), axis=0) 37 | 38 | def predict(self, x_test): 39 | p_test = np.zeros((x_test.shape[0], self.K), dtype=int) 40 | for i in xrange(self.K): 41 | p_test[:, i] = self.clfs[i].predict(np.concatenate((x_test, p_test[:, :i]), axis=1)) 42 | return p_test 43 | 44 | def cal_weight(self, y_train, p_train): 45 | score0 = np.zeros(y_train.shape) 46 | score1 = np.zeros(y_train.shape) 47 | 48 | for i in xrange(y_train.shape[1]): 49 | t_train = p_train.copy() 50 | t_train[:, i] = 0 51 | score0[:, i] = self.func(y_train, t_train) 52 | t_train[:, i] = 1 53 | score1[:, i] = self.func(y_train, t_train) 54 | 55 | w_train = np.abs(score0 - score1) 56 | w_train /= w_train.sum() 57 | w_train *= w_train.shape[0]*w_train.shape[1] 58 | 59 | return w_train 60 | -------------------------------------------------------------------------------- /models/clems.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from criteria import hamming_loss, rank_loss, f1_score, accuracy_score 3 | from sklearn.neighbors import NearestNeighbors 4 | from mdsw import MDSW 5 | 6 | class CLEMS: 7 | def __init__(self, cost, base_learner, params={}): 8 | self.cost = cost 9 | self.base_learner = base_learner 10 | self.params = params 11 | if self.cost == 'ham': 12 | self.dis = hamming_loss 13 | elif self.cost == 'rank': 14 | self.dis = rank_loss 15 | elif self.cost == 'f1': 16 | self.dis = lambda x1, x2: 1.0 - f1_score(x1, x2) 17 | elif self.cost == 'acc': 18 | self.dis = lambda x1, x2: 1.0 - accuracy_score(x1, x2) 19 | 20 | def fit(self, x_train, y_train): 21 | self.K = y_train.shape[1] 22 | self.z_dim = self.K 23 | 24 | # get unique label vectors 25 | bb = np.ascontiguousarray(y_train).view(np.dtype((np.void, y_train.dtype.itemsize * y_train.shape[1]))) 26 | _, idx = np.unique(bb, return_index=True) 27 | self.y_train_uq = y_train[idx] 28 | num_uq = self.y_train_uq.shape[0] 29 | 30 | self.nn_y_uq = NearestNeighbors(n_neighbors=1) 31 | self.nn_y_uq.fit(self.y_train_uq) 32 | 33 | # calculate weight 34 | uq_weight = self.cal_count(y_train) 35 | 36 | # calculate delta matrix 37 | delta = np.zeros((2*num_uq, 2*num_uq)) 38 | for i in xrange(num_uq): 39 | for j in xrange(num_uq): 40 | delta[i, num_uq+j] = np.sqrt(self.dis(self.y_train_uq[None, i], self.y_train_uq[None, j])) 41 | delta[num_uq+j, i] = delta[i, num_uq+j] 42 | 43 | # calculate MDS embedding 44 | mds = MDSW(n_components=self.z_dim, n_uq=num_uq, uq_weight=uq_weight, max_iter=300, eps=1e-6, dissimilarity="precomputed", n_init=8, n_jobs=1) 45 | z_train_uq = mds.fit(delta).embedding_ 46 | 47 | self.nn_z_uq = NearestNeighbors(n_neighbors=1) 48 | self.nn_z_uq.fit(z_train_uq[num_uq:]) 49 | 50 | _dis, _idxs = self.nn_y_uq.kneighbors(y_train) 51 | z_train_uq[_idxs[:, 0]] 52 | z_train = z_train_uq[_idxs[:, 0]] 53 | 54 | # train regressor 55 | self.rgrs = [self.base_learner(**self.params) for i in xrange(self.z_dim)] 56 | for i in xrange(self.z_dim): 57 | self.rgrs[i].fit(x_train, z_train[:, i]) 58 | 59 | def predict(self, x_test): 60 | z_test = np.zeros((x_test.shape[0],self.z_dim)) 61 | for i in xrange(self.z_dim): 62 | z_test[:, i] = self.rgrs[i].predict(x_test) 63 | 64 | _dis, _idxs = self.nn_z_uq.kneighbors(z_test) 65 | p_test = self.y_train_uq[_idxs[:, 0]] 66 | return p_test 67 | 68 | def cal_count(self, y_train): 69 | _dis, _idxs = self.nn_y_uq.kneighbors(y_train) 70 | idxs = _idxs[:, 0] 71 | uq_, uq_weight = np.unique(idxs, return_counts=True) 72 | return uq_weight 73 | 74 | 75 | -------------------------------------------------------------------------------- /models/mdsw.py: -------------------------------------------------------------------------------- 1 | """ 2 | Multi-dimensional Scaling (MDS) 3 | """ 4 | 5 | # author: Nelle Varoquaux 6 | # Licence: BSD 7 | 8 | import numpy as np 9 | import sklearn 10 | import ipdb 11 | 12 | import warnings 13 | 14 | from sklearn.base import BaseEstimator 15 | from sklearn.metrics import euclidean_distances 16 | from sklearn.utils import check_random_state, check_array, check_symmetric 17 | from sklearn.externals.joblib import Parallel 18 | from sklearn.externals.joblib import delayed 19 | from sklearn.isotonic import IsotonicRegression 20 | 21 | 22 | def _smacof_single_w(similarities, n_uq, uq_weight, metric=True, n_components=2, init=None, 23 | max_iter=300, verbose=0, eps=1e-3, random_state=None): 24 | """ 25 | Computes multidimensional scaling using SMACOF algorithm 26 | 27 | Parameters 28 | ---------- 29 | similarities: symmetric ndarray, shape [n * n] 30 | similarities between the points 31 | 32 | metric: boolean, optional, default: True 33 | compute metric or nonmetric SMACOF algorithm 34 | 35 | n_components: int, optional, default: 2 36 | number of dimension in which to immerse the similarities 37 | overwritten if initial array is provided. 38 | 39 | init: {None or ndarray}, optional 40 | if None, randomly chooses the initial configuration 41 | if ndarray, initialize the SMACOF algorithm with this array 42 | 43 | max_iter: int, optional, default: 300 44 | Maximum number of iterations of the SMACOF algorithm for a single run 45 | 46 | verbose: int, optional, default: 0 47 | level of verbosity 48 | 49 | eps: float, optional, default: 1e-6 50 | relative tolerance w.r.t stress to declare converge 51 | 52 | random_state: integer or numpy.RandomState, optional 53 | The generator used to initialize the centers. If an integer is 54 | given, it fixes the seed. Defaults to the global numpy random 55 | number generator. 56 | 57 | Returns 58 | ------- 59 | X: ndarray (n_samples, n_components), float 60 | coordinates of the n_samples points in a n_components-space 61 | 62 | stress_: float 63 | The final value of the stress (sum of squared distance of the 64 | disparities and the distances for all constrained points) 65 | 66 | n_iter : int 67 | Number of iterations run. 68 | 69 | """ 70 | similarities = check_symmetric(similarities, raise_exception=True) 71 | 72 | n_samples = similarities.shape[0] 73 | random_state = check_random_state(random_state) 74 | 75 | W = np.ones((n_samples, n_samples)) 76 | W[:n_uq, :n_uq] = 0.0 77 | W[n_uq:, n_uq:] = 0.0 78 | # W[np.arange(len(W)), np.arange(len(W))] = 0.0 79 | 80 | if uq_weight is not None: 81 | W[:n_uq, n_uq:] *= uq_weight.reshape((uq_weight.shape[0] ,-1)) 82 | W[n_uq:, :n_uq] *= uq_weight.reshape((-1, uq_weight.shape[0])) 83 | 84 | V = -W 85 | V[np.arange(len(V)), np.arange(len(V))] = W.sum(axis=1) 86 | e = np.ones((n_samples, 1)) 87 | 88 | Vp = np.linalg.inv(V + np.dot(e, e.T)/n_samples) - np.dot(e, e.T)/n_samples 89 | # Vp = np.linalg.pinv(V) 90 | 91 | 92 | sim_flat = ((1 - np.tri(n_samples)) * similarities).ravel() 93 | sim_flat_w = sim_flat[sim_flat != 0] 94 | if init is None: 95 | # Randomly choose initial configuration 96 | X = random_state.rand(n_samples * n_components) 97 | X = X.reshape((n_samples, n_components)) 98 | else: 99 | # overrides the parameter p 100 | n_components = init.shape[1] 101 | if n_samples != init.shape[0]: 102 | raise ValueError("init matrix should be of shape (%d, %d)" % 103 | (n_samples, n_components)) 104 | X = init 105 | 106 | old_stress = None 107 | ir = IsotonicRegression() 108 | for it in range(max_iter): 109 | # Compute distance and monotonic regression 110 | dis = euclidean_distances(X) 111 | 112 | if metric: 113 | disparities = similarities 114 | else: 115 | dis_flat = dis.ravel() 116 | # similarities with 0 are considered as missing values 117 | dis_flat_w = dis_flat[sim_flat != 0] 118 | 119 | # Compute the disparities using a monotonic regression 120 | disparities_flat = ir.fit_transform(sim_flat_w, dis_flat_w) 121 | disparities = dis_flat.copy() 122 | disparities[sim_flat != 0] = disparities_flat 123 | disparities = disparities.reshape((n_samples, n_samples)) 124 | disparities *= np.sqrt((n_samples * (n_samples - 1) / 2) / 125 | (disparities ** 2).sum()) 126 | 127 | # Compute stress 128 | # stress = ((dis.ravel() - disparities.ravel()) ** 2).sum() / 2 129 | _stress = (W.ravel()*((dis.ravel() - disparities.ravel()) ** 2)).sum() / 2 130 | 131 | # Update X using the Guttman transform 132 | # dis[dis == 0] = 1e-5 133 | # ratio = disparities / dis 134 | # B = - ratio 135 | # B[np.arange(len(B)), np.arange(len(B))] += ratio.sum(axis=1) 136 | # X = 1. / n_samples * np.dot(B, X) 137 | # print (1. / n_samples * np.dot(B, X))[:5].T 138 | 139 | dis[dis == 0] = 1e-5 140 | ratio = disparities / dis 141 | _B = - W*ratio 142 | _B[np.arange(len(_B)), np.arange(len(_B))] += (W*ratio).sum(axis=1) 143 | 144 | X = np.dot(Vp, np.dot(_B, X)) 145 | # print X[:5].T 146 | 147 | dis = np.sqrt((X ** 2).sum(axis=1)).sum() 148 | 149 | if verbose >= 2: 150 | print('it: %d, stress %s' % (it, stress)) 151 | if old_stress is not None: 152 | if(old_stress - _stress / dis) < eps: 153 | if verbose: 154 | print('breaking at iteration %d with stress %s' % (it, 155 | stress)) 156 | break 157 | old_stress = _stress / dis 158 | 159 | return X, _stress, it + 1 160 | 161 | 162 | def smacof_w(similarities, n_uq, uq_weight, metric=True, n_components=2, init=None, n_init=8, 163 | n_jobs=1, max_iter=300, verbose=0, eps=1e-3, random_state=None, 164 | return_n_iter=False): 165 | """ 166 | Computes multidimensional scaling using SMACOF (Scaling by Majorizing a 167 | Complicated Function) algorithm 168 | 169 | The SMACOF algorithm is a multidimensional scaling algorithm: it minimizes 170 | a objective function, the *stress*, using a majorization technique. The 171 | Stress Majorization, also known as the Guttman Transform, guarantees a 172 | monotone convergence of Stress, and is more powerful than traditional 173 | techniques such as gradient descent. 174 | 175 | The SMACOF algorithm for metric MDS can summarized by the following steps: 176 | 177 | 1. Set an initial start configuration, randomly or not. 178 | 2. Compute the stress 179 | 3. Compute the Guttman Transform 180 | 4. Iterate 2 and 3 until convergence. 181 | 182 | The nonmetric algorithm adds a monotonic regression steps before computing 183 | the stress. 184 | 185 | Parameters 186 | ---------- 187 | similarities : symmetric ndarray, shape (n_samples, n_samples) 188 | similarities between the points 189 | 190 | metric : boolean, optional, default: True 191 | compute metric or nonmetric SMACOF algorithm 192 | 193 | n_components : int, optional, default: 2 194 | number of dimension in which to immerse the similarities 195 | overridden if initial array is provided. 196 | 197 | init : {None or ndarray of shape (n_samples, n_components)}, optional 198 | if None, randomly chooses the initial configuration 199 | if ndarray, initialize the SMACOF algorithm with this array 200 | 201 | n_init : int, optional, default: 8 202 | Number of time the smacof_p algorithm will be run with different 203 | initialisation. The final results will be the best output of the 204 | n_init consecutive runs in terms of stress. 205 | 206 | n_jobs : int, optional, default: 1 207 | 208 | The number of jobs to use for the computation. This works by breaking 209 | down the pairwise matrix into n_jobs even slices and computing them in 210 | parallel. 211 | 212 | If -1 all CPUs are used. If 1 is given, no parallel computing code is 213 | used at all, which is useful for debugging. For n_jobs below -1, 214 | (n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one 215 | are used. 216 | 217 | max_iter : int, optional, default: 300 218 | Maximum number of iterations of the SMACOF algorithm for a single run 219 | 220 | verbose : int, optional, default: 0 221 | level of verbosity 222 | 223 | eps : float, optional, default: 1e-6 224 | relative tolerance w.r.t stress to declare converge 225 | 226 | random_state : integer or numpy.RandomState, optional 227 | The generator used to initialize the centers. If an integer is 228 | given, it fixes the seed. Defaults to the global numpy random 229 | number generator. 230 | 231 | return_n_iter : bool 232 | Whether or not to return the number of iterations. 233 | 234 | Returns 235 | ------- 236 | X : ndarray (n_samples,n_components) 237 | Coordinates of the n_samples points in a n_components-space 238 | 239 | stress : float 240 | The final value of the stress (sum of squared distance of the 241 | disparities and the distances for all constrained points) 242 | 243 | n_iter : int 244 | The number of iterations corresponding to the best stress. 245 | Returned only if `return_n_iter` is set to True. 246 | 247 | Notes 248 | ----- 249 | "Modern Multidimensional Scaling - Theory and Applications" Borg, I.; 250 | Groenen P. Springer Series in Statistics (1997) 251 | 252 | "Nonmetric multidimensional scaling: a numerical method" Kruskal, J. 253 | Psychometrika, 29 (1964) 254 | 255 | "Multidimensional scaling by optimizing goodness of fit to a nonmetric 256 | hypothesis" Kruskal, J. Psychometrika, 29, (1964) 257 | """ 258 | 259 | similarities = check_array(similarities) 260 | random_state = check_random_state(random_state) 261 | 262 | if hasattr(init, '__array__'): 263 | init = np.asarray(init).copy() 264 | if not n_init == 1: 265 | warnings.warn( 266 | 'Explicit initial positions passed: ' 267 | 'performing only one init of the MDS instead of %d' 268 | % n_init) 269 | n_init = 1 270 | 271 | best_pos, best_stress = None, None 272 | 273 | if n_jobs == 1: 274 | for it in range(n_init): 275 | pos, stress, n_iter_ = _smacof_single_w( 276 | similarities, n_uq, uq_weight, metric=metric, 277 | n_components=n_components, init=init, 278 | max_iter=max_iter, verbose=verbose, 279 | eps=eps, random_state=random_state) 280 | if best_stress is None or stress < best_stress: 281 | best_stress = stress 282 | best_pos = pos.copy() 283 | best_iter = n_iter_ 284 | else: 285 | seeds = random_state.randint(np.iinfo(np.int32).max, size=n_init) 286 | results = Parallel(n_jobs=n_jobs, verbose=max(verbose - 1, 0))( 287 | delayed(_smacof_single_w)( 288 | similarities, n_uq, uq_weight, metric=metric, n_components=n_components, 289 | init=init, max_iter=max_iter, verbose=verbose, eps=eps, 290 | random_state=seed) 291 | for seed in seeds) 292 | positions, stress, n_iters = zip(*results) 293 | best = np.argmin(stress) 294 | best_stress = stress[best] 295 | best_pos = positions[best] 296 | best_iter = n_iters[best] 297 | 298 | if return_n_iter: 299 | return best_pos, best_stress, best_iter 300 | else: 301 | return best_pos, best_stress 302 | 303 | 304 | class MDSW(BaseEstimator): 305 | """Multidimensional scaling 306 | 307 | Parameters 308 | ---------- 309 | metric : boolean, optional, default: True 310 | compute metric or nonmetric SMACOF (Scaling by Majorizing a 311 | Complicated Function) algorithm 312 | 313 | n_components : int, optional, default: 2 314 | number of dimension in which to immerse the similarities 315 | overridden if initial array is provided. 316 | 317 | n_init : int, optional, default: 4 318 | Number of time the smacof_p algorithm will be run with different 319 | initialisation. The final results will be the best output of the 320 | n_init consecutive runs in terms of stress. 321 | 322 | max_iter : int, optional, default: 300 323 | Maximum number of iterations of the SMACOF algorithm for a single run 324 | 325 | verbose : int, optional, default: 0 326 | level of verbosity 327 | 328 | eps : float, optional, default: 1e-6 329 | relative tolerance w.r.t stress to declare converge 330 | 331 | n_jobs : int, optional, default: 1 332 | The number of jobs to use for the computation. This works by breaking 333 | down the pairwise matrix into n_jobs even slices and computing them in 334 | parallel. 335 | 336 | If -1 all CPUs are used. If 1 is given, no parallel computing code is 337 | used at all, which is useful for debugging. For n_jobs below -1, 338 | (n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one 339 | are used. 340 | 341 | random_state : integer or numpy.RandomState, optional 342 | The generator used to initialize the centers. If an integer is 343 | given, it fixes the seed. Defaults to the global numpy random 344 | number generator. 345 | 346 | dissimilarity : string 347 | Which dissimilarity measure to use. 348 | Supported are 'euclidean' and 'precomputed'. 349 | 350 | 351 | Attributes 352 | ---------- 353 | embedding_ : array-like, shape [n_components, n_samples] 354 | Stores the position of the dataset in the embedding space 355 | 356 | stress_ : float 357 | The final value of the stress (sum of squared distance of the 358 | disparities and the distances for all constrained points) 359 | 360 | 361 | References 362 | ---------- 363 | "Modern Multidimensional Scaling - Theory and Applications" Borg, I.; 364 | Groenen P. Springer Series in Statistics (1997) 365 | 366 | "Nonmetric multidimensional scaling: a numerical method" Kruskal, J. 367 | Psychometrika, 29 (1964) 368 | 369 | "Multidimensional scaling by optimizing goodness of fit to a nonmetric 370 | hypothesis" Kruskal, J. Psychometrika, 29, (1964) 371 | 372 | """ 373 | def __init__(self, n_components=2, n_uq=1, uq_weight=None, metric=True, n_init=4, 374 | max_iter=300, verbose=0, eps=1e-3, n_jobs=1, 375 | random_state=None, dissimilarity="euclidean"): 376 | self.n_components = n_components 377 | self.n_uq = n_uq 378 | self.uq_weight = uq_weight 379 | self.dissimilarity = dissimilarity 380 | self.metric = metric 381 | self.n_init = n_init 382 | self.max_iter = max_iter 383 | self.eps = eps 384 | self.verbose = verbose 385 | self.n_jobs = n_jobs 386 | self.random_state = random_state 387 | 388 | @property 389 | def _pairwise(self): 390 | return self.kernel == "precomputed" 391 | 392 | def fit(self, X, y=None, init=None): 393 | """ 394 | Computes the position of the points in the embedding space 395 | 396 | Parameters 397 | ---------- 398 | X : array, shape=[n_samples, n_features], or [n_samples, n_samples] \ 399 | if dissimilarity='precomputed' 400 | Input data. 401 | 402 | init : {None or ndarray, shape (n_samples,)}, optional 403 | If None, randomly chooses the initial configuration 404 | if ndarray, initialize the SMACOF algorithm with this array. 405 | """ 406 | self.fit_transform(X, init=init) 407 | return self 408 | 409 | def fit_transform(self, X, y=None, init=None): 410 | """ 411 | Fit the data from X, and returns the embedded coordinates 412 | 413 | Parameters 414 | ---------- 415 | X : array, shape=[n_samples, n_features], or [n_samples, n_samples] \ 416 | if dissimilarity='precomputed' 417 | Input data. 418 | 419 | init : {None or ndarray, shape (n_samples,)}, optional 420 | If None, randomly chooses the initial configuration 421 | if ndarray, initialize the SMACOF algorithm with this array. 422 | 423 | """ 424 | X = check_array(X) 425 | if X.shape[0] == X.shape[1] and self.dissimilarity != "precomputed": 426 | warnings.warn("The MDS API has changed. ``fit`` now constructs an" 427 | " dissimilarity matrix from data. To use a custom " 428 | "dissimilarity matrix, set " 429 | "``dissimilarity=precomputed``.") 430 | 431 | if self.dissimilarity == "precomputed": 432 | self.dissimilarity_matrix_ = X 433 | elif self.dissimilarity == "euclidean": 434 | self.dissimilarity_matrix_ = euclidean_distances(X) 435 | else: 436 | raise ValueError("Proximity must be 'precomputed' or 'euclidean'." 437 | " Got %s instead" % str(self.dissimilarity)) 438 | 439 | self.embedding_, self.stress_, self.n_iter_ = smacof_w( 440 | self.dissimilarity_matrix_, self.n_uq, self.uq_weight, metric=self.metric, 441 | n_components=self.n_components, init=init, n_init=self.n_init, 442 | n_jobs=self.n_jobs, max_iter=self.max_iter, verbose=self.verbose, 443 | eps=self.eps, random_state=self.random_state, 444 | return_n_iter=True) 445 | 446 | return self.embedding_ 447 | -------------------------------------------------------------------------------- /models/pcc.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from criteria import rank_loss 3 | 4 | class PCC: 5 | def __init__(self, cost, base_learner, params={}, n_sample=100): 6 | self.cost = cost 7 | self.base_learner = base_learner 8 | self.params = params 9 | self.n_sample = n_sample 10 | 11 | def fit(self, x_train, y_train): 12 | self.K = y_train.shape[1] 13 | self.clfs = [self.base_learner(**self.params) for i in xrange(self.K)] 14 | for i in xrange(self.K): 15 | self.clfs[i].fit(np.concatenate((x_train, y_train[:, :i]), axis=1), y_train[:, i]) 16 | 17 | def predict(self, x_test): 18 | r_test = self.predict_prob(x_test) 19 | p_test = np.zeros((x_test.shape[0], self.K), dtype=int) 20 | for i in xrange(x_test.shape[0]): 21 | p_test[i, :] = self.predict_one(x_test[i, :], r_test[i, :]) 22 | return p_test 23 | 24 | def predict_prob(self, x_test): 25 | r_test = np.zeros((x_test.shape[0], self.K)) 26 | for i in xrange(self.K): 27 | r_test[:, i] = 1.0 - self.clfs[i].predict_proba(np.concatenate((x_test, (r_test[:, :i]>0.5).astype(int)), axis=1))[:, 0] 28 | return r_test 29 | 30 | def predict_one(self, x, pb): 31 | if self.cost == 'ham': 32 | return (pb>0.5).astype(int) 33 | prob = np.repeat(pb, self.n_sample).reshape((pb.shape[0], self.n_sample)).T 34 | y_sample = (np.random.random((self.n_sample, self.K))thr).astype(int) 38 | p_sample = np.repeat(pred, self.n_sample).reshape((pred.shape[0], self.n_sample)).T 39 | score = rank_loss(y_sample, p_sample).mean() 40 | for p in pb: 41 | pred = (pb>p).astype(int) 42 | p_sample = np.repeat(pred, self.n_sample).reshape((pred.shape[0], self.n_sample)).T 43 | score_t = rank_loss(y_sample, p_sample).mean() 44 | if score_t < score: 45 | score = score_t 46 | thr = p 47 | return (pb>thr).astype(int) 48 | elif self.cost == 'f1': 49 | s_idxs = y_sample.sum(axis=1) 50 | P = np.zeros((self.K, self.K)) 51 | for i in xrange(self.K): 52 | P[i, :] = y_sample[s_idxs==(i+1), :].sum(axis=0)*1.0/self.n_sample 53 | 54 | W = 1.0 / (np.cumsum(np.ones((self.K, self.K)), axis=1) + np.cumsum(np.ones((self.K, self.K)), axis=0)) 55 | F = P*W 56 | idxs = (-F).argsort(axis=1) 57 | H = np.zeros((self.K, self.K), dtype=int) 58 | for i in xrange(self.K): 59 | H[i, idxs[i, :i+1]] = 1 60 | scores = (F*H).sum(axis=1) 61 | pred = H[scores.argmax(), :] 62 | # if (s_idxs==0).mean() > 2*scores.max(): 63 | # pred = np.zeros((self.K, ), dtype=int) 64 | return pred 65 | 66 | -------------------------------------------------------------------------------- /scene/data.y: -------------------------------------------------------------------------------- 1 | 1 0 0 0 1 0 2 | 1 0 0 0 0 1 3 | 1 0 0 0 0 0 4 | 1 0 0 0 0 0 5 | 1 0 0 0 0 0 6 | 1 0 0 0 0 0 7 | 1 0 0 0 0 0 8 | 1 0 0 0 1 0 9 | 1 0 0 0 0 0 10 | 1 0 0 0 0 0 11 | 1 0 0 0 0 0 12 | 1 0 0 0 1 0 13 | 1 0 0 0 0 0 14 | 1 0 0 0 0 0 15 | 1 0 0 0 0 0 16 | 1 0 0 0 1 0 17 | 1 0 0 0 1 0 18 | 1 0 0 0 0 0 19 | 1 0 0 0 0 0 20 | 1 0 0 0 0 0 21 | 1 0 0 0 0 0 22 | 1 0 0 0 0 0 23 | 1 0 0 0 0 0 24 | 1 0 0 0 0 0 25 | 1 0 0 0 0 0 26 | 1 0 0 0 0 0 27 | 1 0 0 0 0 0 28 | 1 0 0 0 0 0 29 | 1 0 0 0 0 0 30 | 1 0 0 0 0 0 31 | 1 0 0 0 0 0 32 | 1 0 0 0 0 1 33 | 1 0 0 0 0 0 34 | 1 0 0 0 0 0 35 | 1 0 0 0 0 0 36 | 1 0 0 0 0 0 37 | 1 0 0 0 0 0 38 | 1 0 0 0 0 0 39 | 1 0 0 0 1 0 40 | 1 0 0 0 0 0 41 | 1 0 0 0 0 0 42 | 1 0 0 0 0 0 43 | 1 0 0 0 0 0 44 | 1 0 0 0 0 0 45 | 1 0 0 0 0 0 46 | 1 0 0 0 0 0 47 | 1 0 0 0 0 0 48 | 1 0 0 0 0 0 49 | 1 0 0 0 0 0 50 | 1 0 0 0 0 0 51 | 1 0 0 0 0 0 52 | 1 0 0 0 0 0 53 | 1 0 0 0 0 0 54 | 1 0 0 0 0 0 55 | 1 0 0 0 0 0 56 | 1 0 0 0 0 0 57 | 1 0 0 0 0 0 58 | 1 0 0 0 0 0 59 | 1 0 0 0 0 0 60 | 1 0 0 0 0 0 61 | 1 0 0 0 0 0 62 | 1 0 0 0 1 0 63 | 1 0 0 0 0 0 64 | 1 0 0 0 0 0 65 | 1 0 0 0 0 0 66 | 1 0 0 0 0 0 67 | 1 0 0 0 0 0 68 | 1 0 0 0 0 0 69 | 1 0 0 0 0 0 70 | 1 0 0 0 1 0 71 | 1 0 0 0 0 0 72 | 1 0 0 0 0 0 73 | 1 0 0 0 0 0 74 | 1 0 0 0 0 0 75 | 1 0 0 0 0 0 76 | 1 0 0 0 0 0 77 | 1 0 0 0 0 0 78 | 1 0 0 0 0 0 79 | 1 0 0 0 0 0 80 | 1 0 0 0 0 0 81 | 1 0 0 0 0 0 82 | 1 0 0 0 0 0 83 | 1 0 0 0 0 0 84 | 1 0 0 0 0 0 85 | 1 0 0 0 0 0 86 | 1 0 0 0 0 0 87 | 1 0 0 0 0 0 88 | 1 0 0 0 0 0 89 | 1 0 0 0 0 0 90 | 1 0 0 0 1 0 91 | 1 0 0 0 0 0 92 | 1 0 0 0 0 0 93 | 1 0 0 0 0 0 94 | 1 0 0 0 0 0 95 | 1 0 0 0 0 0 96 | 1 0 0 0 0 0 97 | 1 0 0 0 0 0 98 | 1 0 0 0 0 0 99 | 1 0 0 0 0 0 100 | 1 0 0 0 0 0 101 | 1 0 0 0 1 0 102 | 1 0 0 0 0 0 103 | 1 0 0 0 0 0 104 | 1 0 0 0 0 0 105 | 1 0 0 0 0 0 106 | 1 0 0 0 0 0 107 | 1 0 0 0 0 0 108 | 1 0 0 0 0 0 109 | 1 0 0 0 0 0 110 | 1 0 0 0 1 0 111 | 1 0 0 0 0 0 112 | 1 0 0 0 0 0 113 | 1 0 0 0 0 0 114 | 1 0 0 0 0 0 115 | 1 0 0 0 0 0 116 | 1 0 0 0 0 0 117 | 1 0 0 0 0 0 118 | 1 0 0 0 0 0 119 | 1 0 0 0 0 0 120 | 1 0 0 0 0 0 121 | 1 0 0 0 0 1 122 | 1 0 0 0 0 0 123 | 1 0 0 0 0 0 124 | 1 0 0 0 0 0 125 | 1 0 0 0 0 0 126 | 1 0 0 0 1 0 127 | 1 0 0 0 0 0 128 | 1 0 0 0 0 0 129 | 1 0 0 0 0 0 130 | 1 0 0 0 0 0 131 | 1 0 0 0 0 0 132 | 1 0 0 0 0 0 133 | 1 0 0 0 0 1 134 | 1 0 0 0 0 0 135 | 1 0 0 0 0 0 136 | 1 0 0 0 0 0 137 | 1 0 0 0 0 0 138 | 1 0 0 0 0 0 139 | 1 0 0 0 0 1 140 | 1 0 0 0 0 0 141 | 1 0 0 0 0 0 142 | 1 0 0 0 0 0 143 | 1 0 0 0 0 0 144 | 1 0 0 0 0 0 145 | 1 0 0 0 0 0 146 | 1 0 0 0 0 0 147 | 1 0 0 0 0 1 148 | 1 0 0 0 0 0 149 | 1 0 0 0 0 0 150 | 1 0 0 0 1 0 151 | 1 0 0 0 0 0 152 | 1 0 0 0 0 0 153 | 1 0 0 0 0 0 154 | 1 0 0 0 0 0 155 | 1 0 0 0 0 0 156 | 1 0 0 0 0 0 157 | 1 0 0 0 1 0 158 | 1 0 0 0 0 0 159 | 1 0 0 0 0 0 160 | 1 0 0 0 0 1 161 | 1 0 0 0 0 1 162 | 1 0 0 0 1 0 163 | 1 0 0 0 0 0 164 | 1 0 0 0 0 0 165 | 1 0 0 0 0 0 166 | 1 0 0 0 0 0 167 | 1 0 0 0 0 0 168 | 1 0 0 0 0 0 169 | 1 0 0 0 0 0 170 | 1 0 0 0 0 0 171 | 1 0 0 0 0 0 172 | 1 0 0 0 0 0 173 | 1 0 0 0 0 0 174 | 1 0 0 0 1 0 175 | 1 0 0 0 0 0 176 | 1 0 0 0 0 0 177 | 1 0 0 0 0 0 178 | 1 0 0 0 0 0 179 | 1 0 0 0 0 0 180 | 1 0 0 0 0 0 181 | 1 0 0 0 0 0 182 | 1 0 0 0 0 0 183 | 1 0 0 0 0 0 184 | 1 0 0 0 0 0 185 | 1 0 0 0 0 0 186 | 1 0 0 0 0 0 187 | 1 0 0 0 0 0 188 | 1 0 0 0 0 0 189 | 1 0 0 0 0 0 190 | 1 0 0 0 0 0 191 | 1 0 0 0 0 0 192 | 1 0 0 0 0 1 193 | 1 0 0 0 0 1 194 | 1 0 0 0 1 0 195 | 1 0 0 0 0 0 196 | 1 0 0 0 0 0 197 | 1 0 0 0 0 1 198 | 1 0 0 0 1 0 199 | 1 0 0 0 0 0 200 | 1 0 0 0 0 0 201 | 1 0 0 0 0 1 202 | 1 0 0 0 0 0 203 | 1 0 0 0 0 0 204 | 1 0 0 0 1 0 205 | 1 0 0 0 0 0 206 | 1 0 0 0 0 0 207 | 1 0 0 0 0 0 208 | 1 0 0 0 0 0 209 | 1 0 0 0 0 0 210 | 1 0 0 0 0 0 211 | 1 0 0 0 0 0 212 | 1 0 0 0 0 0 213 | 1 0 0 0 0 0 214 | 1 0 0 0 0 0 215 | 1 0 0 0 0 0 216 | 1 0 0 0 0 0 217 | 1 0 0 0 0 0 218 | 1 0 0 0 1 0 219 | 1 0 0 0 0 0 220 | 1 0 0 0 1 0 221 | 1 0 0 0 0 0 222 | 1 0 0 0 0 0 223 | 1 0 0 0 0 0 224 | 1 0 0 0 0 0 225 | 1 0 0 0 0 0 226 | 1 0 0 0 0 0 227 | 1 0 0 0 0 0 228 | 0 1 0 0 0 0 229 | 0 1 0 0 0 0 230 | 0 1 0 0 0 0 231 | 0 1 0 0 0 0 232 | 0 1 0 0 0 0 233 | 0 1 0 0 0 0 234 | 0 1 0 0 0 0 235 | 0 1 0 0 0 0 236 | 0 1 0 0 0 0 237 | 0 1 0 0 0 0 238 | 0 1 0 0 0 0 239 | 0 1 0 0 0 0 240 | 0 1 0 0 0 0 241 | 0 1 0 0 0 0 242 | 0 1 0 0 0 0 243 | 0 1 0 0 0 0 244 | 0 1 0 0 0 0 245 | 0 1 0 0 0 0 246 | 0 1 0 0 0 0 247 | 0 1 0 0 0 0 248 | 0 1 0 0 0 0 249 | 0 1 0 0 0 0 250 | 0 1 0 0 0 0 251 | 0 1 0 0 0 0 252 | 0 1 0 0 0 0 253 | 0 1 0 0 0 0 254 | 0 1 0 0 0 0 255 | 0 1 0 0 0 0 256 | 0 1 0 0 0 0 257 | 0 1 0 0 0 0 258 | 0 1 0 0 0 0 259 | 0 1 0 0 0 0 260 | 0 1 0 0 0 0 261 | 0 1 0 0 0 0 262 | 0 1 0 0 0 0 263 | 0 1 0 0 0 0 264 | 0 1 0 0 0 0 265 | 0 1 0 0 0 0 266 | 0 1 0 0 0 0 267 | 0 1 0 0 0 0 268 | 0 1 0 0 0 0 269 | 0 1 0 0 0 0 270 | 0 1 0 0 0 0 271 | 0 1 0 0 0 0 272 | 0 1 0 0 0 0 273 | 0 1 0 0 0 0 274 | 0 1 0 0 0 0 275 | 0 1 0 0 0 0 276 | 0 1 0 0 0 0 277 | 0 1 0 0 0 0 278 | 0 1 0 0 0 0 279 | 0 1 0 0 0 0 280 | 0 1 0 0 0 0 281 | 0 1 0 0 0 0 282 | 0 1 0 0 0 0 283 | 0 1 0 0 0 0 284 | 0 1 0 0 0 0 285 | 0 1 0 0 0 0 286 | 0 1 0 0 0 0 287 | 0 1 0 0 0 0 288 | 0 1 0 0 0 0 289 | 0 1 0 0 0 0 290 | 0 1 0 0 0 0 291 | 0 1 0 0 0 0 292 | 0 1 0 0 0 0 293 | 0 1 0 0 0 0 294 | 0 1 0 0 0 0 295 | 0 1 0 0 0 0 296 | 0 1 0 0 0 0 297 | 0 1 0 0 0 0 298 | 0 1 0 0 0 0 299 | 0 1 0 0 0 0 300 | 0 1 0 0 0 0 301 | 0 1 0 0 0 0 302 | 0 1 0 0 0 0 303 | 0 1 0 0 0 0 304 | 0 1 0 0 0 0 305 | 0 1 0 0 0 0 306 | 0 1 0 0 0 0 307 | 0 1 0 0 0 0 308 | 0 1 0 0 0 0 309 | 0 1 0 0 0 0 310 | 0 1 0 0 0 0 311 | 0 1 0 0 0 0 312 | 0 1 0 0 0 0 313 | 0 1 0 0 0 0 314 | 0 1 0 0 0 0 315 | 0 1 0 0 0 0 316 | 0 1 0 0 0 0 317 | 0 1 0 0 0 0 318 | 0 1 0 0 0 0 319 | 0 1 0 0 0 0 320 | 0 1 0 0 0 0 321 | 0 1 0 0 0 0 322 | 0 1 0 0 0 0 323 | 0 1 0 0 0 0 324 | 0 1 0 0 0 0 325 | 0 1 0 0 0 0 326 | 0 1 0 0 0 0 327 | 0 1 0 0 0 0 328 | 0 1 0 0 0 0 329 | 0 1 0 0 0 0 330 | 0 1 0 0 0 0 331 | 0 1 0 0 0 0 332 | 0 1 0 0 0 0 333 | 0 1 0 0 0 0 334 | 0 1 0 0 0 0 335 | 0 1 0 0 0 0 336 | 0 1 0 0 0 0 337 | 0 1 0 0 0 0 338 | 0 1 0 0 0 0 339 | 0 1 0 0 0 0 340 | 0 1 0 0 0 0 341 | 0 1 0 0 0 0 342 | 0 1 0 0 0 0 343 | 0 1 0 0 0 0 344 | 0 1 0 0 0 0 345 | 0 1 0 0 0 0 346 | 0 1 0 0 0 0 347 | 0 1 0 0 0 0 348 | 0 1 0 0 0 0 349 | 0 1 0 0 0 0 350 | 0 1 0 0 0 0 351 | 0 1 0 0 0 0 352 | 0 1 0 0 0 0 353 | 0 1 0 0 0 0 354 | 0 1 0 0 0 0 355 | 0 1 0 0 0 0 356 | 0 1 0 0 0 0 357 | 0 1 0 0 0 0 358 | 0 1 0 0 0 0 359 | 0 1 0 0 0 0 360 | 0 1 0 0 0 0 361 | 0 1 0 0 0 0 362 | 0 1 0 0 0 0 363 | 0 1 0 0 0 0 364 | 0 1 0 0 0 0 365 | 0 1 0 0 0 0 366 | 0 1 0 0 0 0 367 | 0 1 0 0 0 0 368 | 0 1 0 0 0 0 369 | 0 1 0 0 0 0 370 | 0 1 0 0 0 0 371 | 0 1 0 0 0 0 372 | 0 1 0 0 0 0 373 | 0 1 0 0 0 0 374 | 0 1 0 0 0 0 375 | 0 1 0 0 0 0 376 | 0 1 0 0 0 0 377 | 0 1 0 0 0 0 378 | 0 1 0 0 0 0 379 | 0 1 0 0 0 0 380 | 0 1 0 0 0 0 381 | 0 1 0 0 0 0 382 | 0 1 0 0 0 0 383 | 0 1 0 0 0 0 384 | 0 1 0 0 0 0 385 | 0 1 0 0 0 0 386 | 0 1 0 0 0 0 387 | 0 1 0 0 0 0 388 | 0 1 0 0 0 0 389 | 0 1 0 0 0 0 390 | 0 1 0 0 0 0 391 | 0 1 0 0 0 0 392 | 0 1 0 0 0 0 393 | 0 0 1 0 0 0 394 | 0 0 1 1 0 0 395 | 0 0 1 0 0 0 396 | 0 0 1 1 0 0 397 | 0 0 1 0 0 0 398 | 0 0 1 0 0 0 399 | 0 0 1 0 0 0 400 | 0 0 1 1 1 0 401 | 0 0 1 0 0 0 402 | 0 0 1 1 0 0 403 | 0 0 1 0 0 0 404 | 0 0 1 0 1 0 405 | 0 0 1 0 0 0 406 | 0 0 1 0 0 0 407 | 0 0 1 0 0 0 408 | 0 0 1 0 0 0 409 | 0 0 1 0 0 0 410 | 0 0 1 0 0 0 411 | 0 0 1 1 0 0 412 | 0 0 1 1 0 0 413 | 0 0 1 0 0 0 414 | 0 0 1 0 0 0 415 | 0 0 1 0 0 0 416 | 0 0 1 0 0 0 417 | 0 0 1 0 0 0 418 | 0 0 1 0 0 0 419 | 0 0 1 0 0 0 420 | 0 0 1 0 0 0 421 | 0 0 1 1 0 0 422 | 0 0 1 0 1 0 423 | 0 0 1 0 0 0 424 | 0 0 1 0 1 0 425 | 0 0 1 0 0 0 426 | 0 0 1 0 0 0 427 | 0 0 1 0 0 0 428 | 0 0 1 0 0 0 429 | 0 0 1 0 0 0 430 | 0 0 1 0 0 0 431 | 0 0 1 0 0 0 432 | 0 0 1 0 0 0 433 | 0 0 1 0 0 0 434 | 0 0 1 0 0 0 435 | 0 0 1 0 0 0 436 | 0 0 1 0 0 0 437 | 0 0 1 0 0 0 438 | 0 0 1 0 0 0 439 | 0 0 1 0 0 0 440 | 0 0 1 0 0 0 441 | 0 0 1 0 0 0 442 | 0 0 1 0 0 0 443 | 0 0 1 0 0 0 444 | 0 0 1 0 0 0 445 | 0 0 1 0 0 0 446 | 0 0 1 0 0 0 447 | 0 0 1 0 0 0 448 | 0 0 1 0 0 0 449 | 0 0 1 0 0 0 450 | 0 0 1 0 0 0 451 | 0 0 1 0 0 0 452 | 0 0 1 0 0 0 453 | 0 0 1 0 0 0 454 | 0 0 1 0 0 0 455 | 0 0 1 0 0 0 456 | 0 0 1 0 0 0 457 | 0 0 1 0 0 0 458 | 0 0 1 0 0 0 459 | 0 0 1 0 0 0 460 | 0 0 1 0 0 0 461 | 0 0 1 0 0 0 462 | 0 0 1 0 0 0 463 | 0 0 1 0 0 0 464 | 0 0 1 0 0 0 465 | 0 0 1 0 0 0 466 | 0 0 1 0 0 0 467 | 0 0 1 0 0 0 468 | 0 0 1 0 0 0 469 | 0 0 1 0 0 0 470 | 0 0 1 0 0 0 471 | 0 0 1 0 0 0 472 | 0 0 1 0 0 0 473 | 0 0 1 0 0 0 474 | 0 0 1 0 0 0 475 | 0 0 1 0 0 0 476 | 0 0 1 0 0 0 477 | 0 0 1 0 0 0 478 | 0 0 1 0 0 0 479 | 0 0 1 0 0 0 480 | 0 0 1 0 0 0 481 | 0 0 1 0 0 0 482 | 0 0 1 0 0 0 483 | 0 0 1 0 0 0 484 | 0 0 1 0 0 0 485 | 0 0 1 0 0 0 486 | 0 0 1 0 0 0 487 | 0 0 1 0 0 0 488 | 0 0 1 0 0 0 489 | 0 0 1 0 0 0 490 | 0 0 1 0 0 0 491 | 0 0 1 0 0 0 492 | 0 0 1 0 0 0 493 | 0 0 1 0 0 0 494 | 0 0 1 0 0 0 495 | 0 0 1 0 0 0 496 | 0 0 1 0 0 0 497 | 0 0 1 0 0 0 498 | 0 0 1 0 0 0 499 | 0 0 1 0 0 0 500 | 0 0 1 0 0 0 501 | 0 0 1 0 0 0 502 | 0 0 1 0 0 0 503 | 0 0 1 0 0 0 504 | 0 0 1 0 0 0 505 | 0 0 1 0 0 0 506 | 0 0 1 0 0 0 507 | 0 0 1 0 0 0 508 | 0 0 1 0 0 0 509 | 0 0 1 0 0 0 510 | 0 0 1 0 0 0 511 | 0 0 1 0 0 0 512 | 0 0 1 0 0 0 513 | 0 0 1 0 0 0 514 | 0 0 1 0 0 0 515 | 0 0 1 0 0 0 516 | 0 0 1 0 0 0 517 | 0 0 1 0 0 0 518 | 0 0 1 0 0 0 519 | 0 0 1 0 0 0 520 | 0 0 1 0 0 0 521 | 0 0 1 0 0 0 522 | 0 0 1 0 0 0 523 | 0 0 1 0 0 0 524 | 0 0 1 1 0 0 525 | 0 0 1 0 0 0 526 | 0 0 1 0 0 0 527 | 0 0 1 0 0 0 528 | 0 0 1 0 0 0 529 | 0 0 1 0 0 0 530 | 0 0 1 0 0 0 531 | 0 0 1 0 0 0 532 | 0 0 1 0 0 0 533 | 0 0 1 0 0 0 534 | 0 0 1 0 0 0 535 | 0 0 1 0 0 0 536 | 0 0 1 0 0 0 537 | 0 0 1 0 0 0 538 | 0 0 1 0 0 0 539 | 0 0 1 0 0 0 540 | 0 0 1 0 0 0 541 | 0 0 1 0 0 0 542 | 0 0 1 0 0 0 543 | 0 0 1 0 0 0 544 | 0 0 1 0 0 0 545 | 0 0 1 0 1 0 546 | 0 0 1 0 1 0 547 | 0 0 1 0 0 0 548 | 0 0 1 0 0 0 549 | 0 0 1 0 0 0 550 | 0 0 1 0 0 0 551 | 0 0 1 0 0 0 552 | 0 0 1 0 0 0 553 | 0 0 1 0 0 0 554 | 0 0 1 0 0 0 555 | 0 0 1 0 0 0 556 | 0 0 1 0 0 0 557 | 0 0 1 0 0 0 558 | 0 0 1 0 0 0 559 | 0 0 1 0 0 0 560 | 0 0 1 0 0 0 561 | 0 0 1 0 0 0 562 | 0 0 1 0 0 0 563 | 0 0 1 0 0 0 564 | 0 0 1 0 0 0 565 | 0 0 1 0 0 0 566 | 0 0 1 0 0 0 567 | 0 0 1 0 0 0 568 | 0 0 1 0 0 0 569 | 0 0 1 0 0 0 570 | 0 0 1 0 0 0 571 | 0 0 1 0 0 0 572 | 0 0 1 0 0 0 573 | 0 0 1 0 0 0 574 | 0 0 1 0 0 0 575 | 0 0 1 0 0 0 576 | 0 0 1 0 0 0 577 | 0 0 1 0 0 0 578 | 0 0 1 0 0 0 579 | 0 0 1 0 0 0 580 | 0 0 1 0 0 0 581 | 0 0 1 0 0 0 582 | 0 0 1 0 0 0 583 | 0 0 1 0 0 0 584 | 0 0 1 0 0 0 585 | 0 0 1 0 0 0 586 | 0 0 1 0 0 0 587 | 0 0 1 0 0 0 588 | 0 0 1 0 0 0 589 | 0 0 1 0 0 0 590 | 0 0 0 1 1 0 591 | 0 0 0 1 0 0 592 | 0 0 0 1 1 0 593 | 0 0 0 1 0 0 594 | 0 0 0 1 0 0 595 | 0 0 0 1 0 0 596 | 0 0 0 1 0 0 597 | 0 0 0 1 0 0 598 | 0 0 0 1 0 0 599 | 0 0 0 1 0 0 600 | 0 0 0 1 0 0 601 | 0 0 0 1 0 0 602 | 0 0 0 1 0 0 603 | 0 0 0 1 0 0 604 | 0 0 0 1 0 0 605 | 0 0 0 1 0 0 606 | 0 0 0 1 0 0 607 | 0 0 0 1 0 0 608 | 0 0 0 0 1 0 609 | 0 0 0 1 0 0 610 | 0 0 0 1 0 0 611 | 0 0 0 1 0 0 612 | 0 0 0 1 0 0 613 | 0 0 0 1 1 0 614 | 0 0 0 1 0 0 615 | 0 0 0 1 0 0 616 | 0 0 0 1 0 0 617 | 0 0 0 1 0 0 618 | 0 0 0 1 0 0 619 | 0 0 0 1 0 0 620 | 0 0 0 1 0 0 621 | 0 0 0 1 0 0 622 | 0 0 0 1 0 0 623 | 0 0 0 1 0 0 624 | 0 0 0 1 0 0 625 | 0 0 0 1 0 0 626 | 0 0 0 1 0 0 627 | 0 0 0 1 0 0 628 | 0 0 0 1 0 0 629 | 0 0 0 1 0 0 630 | 0 0 0 1 0 0 631 | 0 0 0 1 0 0 632 | 0 0 0 1 0 0 633 | 0 0 0 1 0 0 634 | 0 0 0 1 0 0 635 | 0 0 0 1 0 0 636 | 0 0 0 1 0 0 637 | 0 0 0 1 0 0 638 | 0 0 0 1 0 0 639 | 0 0 0 1 0 0 640 | 0 0 0 1 0 0 641 | 0 0 0 1 0 0 642 | 0 0 0 1 0 0 643 | 0 0 0 1 0 0 644 | 0 0 0 1 1 0 645 | 0 0 0 1 0 0 646 | 0 0 0 1 0 0 647 | 0 0 0 1 0 0 648 | 0 0 0 1 1 0 649 | 0 0 0 1 0 0 650 | 0 0 0 1 0 0 651 | 0 0 0 1 0 0 652 | 0 0 0 1 0 0 653 | 0 0 0 1 0 0 654 | 0 0 0 1 0 0 655 | 0 0 0 1 0 0 656 | 0 0 0 1 0 0 657 | 0 0 0 1 0 0 658 | 0 0 0 1 0 0 659 | 0 0 0 1 1 0 660 | 0 0 0 1 0 0 661 | 0 0 0 1 0 0 662 | 0 0 0 1 1 0 663 | 0 0 0 1 0 0 664 | 0 0 0 1 1 0 665 | 0 0 0 1 0 0 666 | 0 0 0 1 0 0 667 | 0 0 0 1 0 0 668 | 0 0 0 1 0 0 669 | 0 0 0 1 0 0 670 | 0 0 0 1 0 0 671 | 0 0 0 1 0 0 672 | 0 0 0 1 0 0 673 | 0 0 0 1 0 0 674 | 0 0 0 1 0 0 675 | 0 0 0 1 0 0 676 | 0 0 0 1 0 0 677 | 0 0 0 1 0 0 678 | 0 0 0 1 0 0 679 | 0 0 0 1 0 0 680 | 0 0 0 1 0 0 681 | 0 0 0 1 0 0 682 | 0 0 0 1 0 0 683 | 0 0 0 1 1 0 684 | 0 0 0 1 0 0 685 | 0 0 0 1 0 0 686 | 0 0 0 1 0 0 687 | 0 0 0 1 0 0 688 | 0 0 0 1 0 0 689 | 0 0 0 1 0 0 690 | 0 0 0 1 0 0 691 | 0 0 0 1 0 0 692 | 0 0 0 1 0 0 693 | 0 0 0 1 0 0 694 | 0 0 0 1 0 0 695 | 0 0 0 1 0 0 696 | 0 0 0 1 0 0 697 | 0 0 0 1 0 0 698 | 0 0 0 1 0 0 699 | 0 0 0 1 0 0 700 | 0 0 0 1 1 0 701 | 0 0 0 1 0 0 702 | 0 0 0 1 0 0 703 | 0 0 0 1 1 0 704 | 0 0 0 1 0 0 705 | 0 0 0 1 0 0 706 | 0 0 0 1 0 0 707 | 0 0 0 1 0 0 708 | 0 0 0 1 0 0 709 | 0 0 0 1 0 0 710 | 0 0 0 1 0 0 711 | 0 0 0 1 1 0 712 | 0 0 0 1 0 0 713 | 0 0 0 1 0 0 714 | 0 0 0 1 0 0 715 | 0 0 0 1 0 0 716 | 0 0 0 1 0 0 717 | 0 0 0 1 0 0 718 | 0 0 0 1 0 0 719 | 0 0 0 1 0 0 720 | 0 0 0 1 0 0 721 | 0 0 0 1 0 0 722 | 0 0 0 1 1 0 723 | 0 0 0 1 0 0 724 | 0 0 0 1 0 0 725 | 0 0 0 1 0 0 726 | 0 0 0 1 0 0 727 | 0 0 0 1 0 0 728 | 0 0 0 1 0 0 729 | 0 0 0 1 0 0 730 | 0 0 0 1 0 0 731 | 0 0 0 1 0 0 732 | 0 0 0 1 0 0 733 | 0 0 0 1 0 0 734 | 0 0 0 1 0 0 735 | 0 0 0 1 0 0 736 | 0 0 0 1 0 0 737 | 0 0 0 1 0 0 738 | 0 0 0 1 0 0 739 | 0 0 0 1 0 0 740 | 0 0 0 1 0 0 741 | 0 0 0 1 0 0 742 | 0 0 0 1 0 0 743 | 0 0 0 1 0 0 744 | 0 0 0 1 0 0 745 | 0 0 0 1 1 0 746 | 0 0 0 1 0 0 747 | 0 0 0 1 0 0 748 | 0 0 0 1 0 0 749 | 0 0 0 1 0 0 750 | 0 0 0 1 0 0 751 | 0 0 0 1 0 0 752 | 0 0 0 1 0 0 753 | 0 0 0 1 1 0 754 | 0 0 0 1 0 0 755 | 0 0 0 1 0 0 756 | 0 0 0 1 0 0 757 | 0 0 0 1 0 0 758 | 0 0 0 1 0 0 759 | 0 0 0 1 0 0 760 | 0 0 0 1 0 0 761 | 0 0 0 1 0 0 762 | 0 0 0 1 0 0 763 | 0 0 0 1 0 0 764 | 0 0 0 1 0 0 765 | 0 0 0 1 0 0 766 | 0 0 0 1 0 0 767 | 0 0 0 0 1 0 768 | 0 0 0 0 1 0 769 | 0 0 0 0 1 0 770 | 0 0 0 0 1 0 771 | 0 0 0 0 1 0 772 | 0 0 0 0 1 0 773 | 0 0 0 0 1 0 774 | 0 0 0 0 1 0 775 | 0 0 0 0 1 0 776 | 0 0 0 0 1 0 777 | 0 0 0 0 1 0 778 | 0 0 0 0 1 0 779 | 0 0 0 0 1 0 780 | 0 0 0 0 1 0 781 | 0 0 0 0 1 0 782 | 0 0 0 0 1 0 783 | 0 0 0 0 1 0 784 | 0 0 0 0 1 0 785 | 0 0 0 0 1 0 786 | 0 0 0 0 1 0 787 | 0 0 0 0 1 0 788 | 0 0 0 0 1 0 789 | 0 0 0 0 1 0 790 | 0 0 0 0 1 0 791 | 0 0 0 0 1 0 792 | 0 0 0 0 1 0 793 | 0 0 0 0 1 0 794 | 0 0 0 0 1 0 795 | 0 0 0 0 1 0 796 | 0 0 0 0 1 0 797 | 0 0 0 0 1 0 798 | 0 0 0 1 1 0 799 | 0 0 0 1 1 0 800 | 0 0 0 0 1 0 801 | 0 0 0 0 1 0 802 | 0 0 0 0 1 0 803 | 0 0 0 0 1 0 804 | 0 0 0 0 1 0 805 | 0 0 0 0 1 0 806 | 0 0 0 0 1 0 807 | 0 0 0 0 1 0 808 | 0 0 0 0 1 0 809 | 0 0 0 1 1 0 810 | 0 0 0 1 1 0 811 | 0 0 0 0 1 0 812 | 0 0 0 0 1 0 813 | 0 0 0 0 1 0 814 | 0 0 0 0 1 0 815 | 0 0 0 0 1 0 816 | 0 0 0 0 1 0 817 | 0 0 0 0 1 0 818 | 0 0 0 0 1 0 819 | 0 0 0 0 1 0 820 | 0 0 0 1 1 0 821 | 0 0 0 0 1 0 822 | 0 0 0 0 1 0 823 | 0 0 0 0 1 0 824 | 0 0 0 0 1 0 825 | 0 0 0 0 1 0 826 | 0 0 0 0 1 0 827 | 0 0 0 0 1 0 828 | 0 0 0 0 1 0 829 | 0 0 0 0 1 0 830 | 0 0 0 0 1 0 831 | 0 0 0 0 1 0 832 | 0 0 0 0 1 0 833 | 0 0 0 0 1 0 834 | 0 0 0 0 1 0 835 | 0 0 0 0 1 0 836 | 0 0 0 0 1 0 837 | 0 0 0 0 1 0 838 | 0 0 0 0 1 0 839 | 0 0 0 0 1 0 840 | 0 0 0 1 1 0 841 | 0 0 0 1 1 0 842 | 0 0 0 0 1 0 843 | 0 0 0 0 1 0 844 | 0 0 0 0 1 0 845 | 0 0 0 0 1 0 846 | 0 0 0 0 1 0 847 | 0 0 0 0 1 0 848 | 0 0 0 0 1 0 849 | 0 0 0 0 1 0 850 | 0 0 0 0 1 0 851 | 0 0 0 0 1 0 852 | 0 0 0 0 1 0 853 | 0 0 0 0 1 0 854 | 0 0 0 0 1 0 855 | 0 0 0 0 1 0 856 | 0 0 0 0 1 0 857 | 0 0 0 0 1 0 858 | 0 0 0 0 1 0 859 | 0 0 0 0 1 0 860 | 0 0 0 0 1 0 861 | 0 0 0 0 1 0 862 | 0 0 0 0 1 0 863 | 0 0 0 0 1 0 864 | 0 0 0 0 1 0 865 | 0 0 0 0 1 0 866 | 0 0 0 0 1 0 867 | 0 0 0 0 1 0 868 | 0 0 0 0 1 0 869 | 0 0 0 0 1 0 870 | 0 0 0 0 1 0 871 | 0 0 0 0 1 0 872 | 0 0 0 0 1 0 873 | 0 0 0 0 1 0 874 | 0 0 0 0 1 0 875 | 0 0 0 0 1 0 876 | 0 0 0 0 1 0 877 | 0 0 0 0 1 0 878 | 0 0 0 0 1 0 879 | 0 0 0 0 1 0 880 | 0 0 0 0 1 0 881 | 0 0 0 0 1 0 882 | 0 0 0 0 1 0 883 | 0 0 0 0 1 0 884 | 0 0 0 0 1 0 885 | 0 0 0 0 1 0 886 | 0 0 0 0 1 0 887 | 0 0 0 0 1 0 888 | 0 0 0 0 1 0 889 | 0 0 0 0 1 0 890 | 0 0 0 0 1 0 891 | 0 0 0 0 1 0 892 | 0 0 0 0 1 0 893 | 0 0 0 0 1 0 894 | 0 0 0 0 1 0 895 | 0 0 0 0 1 0 896 | 0 0 0 0 1 0 897 | 0 0 0 0 1 0 898 | 0 0 0 0 1 0 899 | 0 0 0 0 1 0 900 | 0 0 0 0 1 0 901 | 0 0 0 0 1 0 902 | 0 0 0 1 1 0 903 | 0 0 0 0 1 0 904 | 0 0 0 0 1 0 905 | 0 0 0 1 1 0 906 | 0 0 0 0 1 0 907 | 0 0 0 0 1 0 908 | 0 0 0 0 1 0 909 | 0 0 0 0 1 0 910 | 0 0 0 0 1 0 911 | 0 0 0 0 1 0 912 | 0 0 0 0 1 0 913 | 0 0 0 0 1 0 914 | 0 0 0 0 1 0 915 | 0 0 0 0 1 0 916 | 0 0 0 0 1 0 917 | 0 0 0 0 1 0 918 | 0 0 0 0 1 0 919 | 0 0 0 0 1 0 920 | 0 0 0 0 1 0 921 | 0 0 0 0 1 0 922 | 0 0 0 0 1 0 923 | 0 0 0 0 1 0 924 | 0 0 0 1 1 0 925 | 0 0 0 0 1 0 926 | 0 0 0 0 1 0 927 | 0 0 0 0 1 0 928 | 0 0 0 0 1 0 929 | 0 0 0 0 1 0 930 | 0 0 0 0 1 0 931 | 0 0 0 0 1 0 932 | 0 0 0 0 1 0 933 | 0 0 0 0 1 0 934 | 0 0 0 0 1 0 935 | 0 0 0 0 1 0 936 | 0 0 0 0 1 0 937 | 0 0 0 0 1 0 938 | 0 0 0 0 1 0 939 | 0 0 0 0 1 0 940 | 0 0 0 0 1 0 941 | 0 0 0 0 1 0 942 | 0 0 0 0 1 0 943 | 0 0 0 0 1 0 944 | 0 0 0 0 1 0 945 | 0 0 0 0 1 0 946 | 0 0 0 0 1 0 947 | 0 0 0 0 1 0 948 | 0 0 0 0 1 0 949 | 0 0 0 0 1 0 950 | 0 0 0 0 1 0 951 | 0 0 0 0 1 0 952 | 0 0 0 0 1 0 953 | 0 0 0 0 1 0 954 | 0 0 0 0 1 0 955 | 0 0 0 0 1 0 956 | 0 0 0 0 1 0 957 | 0 0 0 0 1 0 958 | 0 0 0 0 1 0 959 | 0 0 0 0 1 0 960 | 0 0 0 0 1 0 961 | 0 0 0 0 1 0 962 | 0 0 0 0 1 0 963 | 0 0 0 0 1 0 964 | 0 0 0 0 1 0 965 | 0 0 0 0 1 0 966 | 0 0 0 0 1 0 967 | 0 0 0 0 1 0 968 | 0 0 0 0 1 0 969 | 0 0 0 0 1 0 970 | 0 0 0 0 1 0 971 | 0 0 0 0 1 0 972 | 0 0 0 0 1 0 973 | 0 0 0 0 1 0 974 | 0 0 0 0 1 0 975 | 0 0 0 0 1 0 976 | 0 0 0 0 1 0 977 | 0 0 0 0 1 0 978 | 0 0 0 0 1 0 979 | 0 0 0 0 1 0 980 | 0 0 0 0 1 0 981 | 0 0 0 0 1 0 982 | 0 0 0 0 1 0 983 | 0 0 0 0 1 0 984 | 0 0 0 0 1 0 985 | 0 0 0 0 1 0 986 | 0 0 0 0 1 0 987 | 0 0 0 0 1 0 988 | 0 0 0 0 1 0 989 | 0 0 0 0 1 0 990 | 0 0 0 1 1 0 991 | 0 0 0 0 1 0 992 | 0 0 0 0 1 0 993 | 0 0 0 0 1 0 994 | 0 0 0 0 1 0 995 | 0 0 0 0 1 0 996 | 0 0 0 0 1 0 997 | 0 0 0 0 1 0 998 | 0 0 0 0 1 0 999 | 0 0 0 0 1 0 1000 | 0 0 0 0 0 1 1001 | 0 0 0 0 0 1 1002 | 0 0 0 0 0 1 1003 | 0 0 0 0 0 1 1004 | 0 0 0 0 0 1 1005 | 0 0 0 0 0 1 1006 | 0 0 0 0 0 1 1007 | 0 0 0 0 0 1 1008 | 0 0 0 0 0 1 1009 | 0 0 0 0 0 1 1010 | 0 0 0 0 0 1 1011 | 0 0 0 0 0 1 1012 | 0 0 0 0 0 1 1013 | 0 0 0 0 0 1 1014 | 0 0 0 0 0 1 1015 | 0 0 0 0 0 1 1016 | 0 0 0 0 0 1 1017 | 0 0 0 0 0 1 1018 | 0 0 0 0 0 1 1019 | 0 0 0 0 0 1 1020 | 0 0 0 0 0 1 1021 | 0 0 0 0 0 1 1022 | 0 0 0 0 0 1 1023 | 0 0 0 0 0 1 1024 | 0 0 0 0 0 1 1025 | 0 0 0 0 0 1 1026 | 0 0 0 0 0 1 1027 | 0 0 0 0 0 1 1028 | 0 0 0 0 0 1 1029 | 0 0 0 0 0 1 1030 | 0 0 0 0 0 1 1031 | 0 0 0 0 0 1 1032 | 0 0 0 0 0 1 1033 | 0 0 0 0 0 1 1034 | 0 0 0 0 0 1 1035 | 0 0 0 0 0 1 1036 | 0 0 0 0 0 1 1037 | 0 0 0 0 0 1 1038 | 0 0 0 0 0 1 1039 | 0 0 0 0 0 1 1040 | 0 0 0 0 0 1 1041 | 0 0 0 0 0 1 1042 | 0 0 0 0 0 1 1043 | 0 0 0 0 0 1 1044 | 0 0 0 0 0 1 1045 | 0 0 0 0 0 1 1046 | 0 0 0 0 0 1 1047 | 0 0 0 0 0 1 1048 | 0 0 0 0 0 1 1049 | 0 0 0 0 0 1 1050 | 0 0 0 0 0 1 1051 | 0 0 0 0 0 1 1052 | 0 0 0 0 0 1 1053 | 0 0 0 0 0 1 1054 | 0 0 0 0 0 1 1055 | 0 0 0 0 0 1 1056 | 0 0 0 0 0 1 1057 | 0 0 0 0 0 1 1058 | 0 0 0 0 0 1 1059 | 0 0 0 0 0 1 1060 | 0 0 0 0 0 1 1061 | 0 0 0 0 0 1 1062 | 0 0 0 0 0 1 1063 | 0 0 0 0 0 1 1064 | 0 0 0 0 0 1 1065 | 0 0 0 0 0 1 1066 | 0 0 0 0 0 1 1067 | 0 0 0 0 0 1 1068 | 0 0 0 0 0 1 1069 | 0 0 0 0 0 1 1070 | 0 0 0 0 0 1 1071 | 0 0 0 0 0 1 1072 | 0 0 0 0 0 1 1073 | 0 0 0 0 0 1 1074 | 0 0 0 0 0 1 1075 | 0 0 0 0 0 1 1076 | 0 0 0 0 0 1 1077 | 0 0 0 0 0 1 1078 | 0 0 0 0 0 1 1079 | 0 0 0 0 0 1 1080 | 0 0 0 0 0 1 1081 | 0 0 0 0 0 1 1082 | 0 0 0 0 0 1 1083 | 0 0 0 0 0 1 1084 | 0 0 0 0 0 1 1085 | 0 0 0 0 0 1 1086 | 0 0 0 0 0 1 1087 | 0 0 0 0 0 1 1088 | 0 0 0 0 0 1 1089 | 0 0 0 0 0 1 1090 | 0 0 0 0 0 1 1091 | 0 0 0 0 0 1 1092 | 0 0 0 0 0 1 1093 | 0 0 0 0 0 1 1094 | 0 0 0 0 0 1 1095 | 0 0 0 0 0 1 1096 | 0 0 0 0 0 1 1097 | 0 0 0 0 0 1 1098 | 0 0 0 0 0 1 1099 | 0 0 0 0 0 1 1100 | 0 0 0 0 0 1 1101 | 0 0 0 0 0 1 1102 | 0 0 0 0 0 1 1103 | 0 0 0 0 0 1 1104 | 0 0 0 0 0 1 1105 | 0 0 0 0 0 1 1106 | 0 0 0 0 0 1 1107 | 0 0 0 0 0 1 1108 | 0 0 0 0 0 1 1109 | 0 0 0 0 0 1 1110 | 0 0 0 0 0 1 1111 | 0 0 0 0 0 1 1112 | 0 0 0 0 0 1 1113 | 0 0 0 0 0 1 1114 | 0 0 0 0 0 1 1115 | 0 0 0 0 0 1 1116 | 0 0 0 0 0 1 1117 | 0 0 0 0 0 1 1118 | 0 0 0 0 0 1 1119 | 0 0 0 0 0 1 1120 | 0 0 0 0 0 1 1121 | 0 0 0 0 0 1 1122 | 0 0 0 0 0 1 1123 | 0 0 0 0 1 1 1124 | 0 0 0 0 0 1 1125 | 0 0 0 0 0 1 1126 | 0 0 0 0 0 1 1127 | 0 0 0 0 0 1 1128 | 0 0 0 0 0 1 1129 | 0 0 0 0 0 1 1130 | 0 0 0 0 0 1 1131 | 0 0 0 0 0 1 1132 | 0 0 0 0 0 1 1133 | 0 0 0 0 0 1 1134 | 0 0 0 0 0 1 1135 | 0 0 0 0 0 1 1136 | 0 0 0 0 0 1 1137 | 0 0 0 0 0 1 1138 | 0 0 0 0 0 1 1139 | 0 0 0 0 0 1 1140 | 0 0 0 0 0 1 1141 | 0 0 0 0 0 1 1142 | 0 0 0 0 0 1 1143 | 0 0 0 0 0 1 1144 | 0 0 0 0 0 1 1145 | 0 0 0 0 0 1 1146 | 0 0 0 0 0 1 1147 | 0 0 0 0 0 1 1148 | 0 0 0 0 0 1 1149 | 0 0 0 0 0 1 1150 | 0 0 0 0 0 1 1151 | 0 0 0 0 0 1 1152 | 0 0 0 0 0 1 1153 | 0 0 0 0 0 1 1154 | 0 0 0 0 0 1 1155 | 0 0 0 0 0 1 1156 | 0 0 0 0 0 1 1157 | 0 0 0 0 0 1 1158 | 0 0 0 0 0 1 1159 | 0 0 0 0 0 1 1160 | 0 0 0 0 0 1 1161 | 0 0 0 0 0 1 1162 | 0 0 0 0 0 1 1163 | 0 0 0 0 0 1 1164 | 0 0 0 0 0 1 1165 | 0 0 0 0 0 1 1166 | 0 0 0 0 0 1 1167 | 0 0 0 1 0 1 1168 | 0 0 0 0 0 1 1169 | 0 0 0 0 0 1 1170 | 0 0 0 0 0 1 1171 | 0 0 0 0 0 1 1172 | 0 0 0 0 0 1 1173 | 0 0 0 0 0 1 1174 | 0 0 0 0 0 1 1175 | 0 0 0 0 0 1 1176 | 0 0 0 0 0 1 1177 | 0 0 0 0 0 1 1178 | 0 0 0 0 0 1 1179 | 0 0 0 0 0 1 1180 | 0 0 0 0 0 1 1181 | 0 0 0 0 0 1 1182 | 0 0 0 0 0 1 1183 | 0 0 0 0 0 1 1184 | 0 0 0 0 0 1 1185 | 0 0 0 0 0 1 1186 | 0 0 0 0 0 1 1187 | 0 0 0 0 0 1 1188 | 0 0 0 0 0 1 1189 | 0 0 0 0 0 1 1190 | 0 0 0 0 0 1 1191 | 0 0 0 0 0 1 1192 | 0 0 0 0 0 1 1193 | 0 0 0 0 0 1 1194 | 0 0 0 0 0 1 1195 | 0 0 0 0 0 1 1196 | 0 0 0 0 0 1 1197 | 0 0 0 0 0 1 1198 | 0 0 0 0 0 1 1199 | 0 0 0 0 0 1 1200 | 0 0 0 0 0 1 1201 | 0 0 0 0 0 1 1202 | 0 0 0 0 0 1 1203 | 0 0 0 0 0 1 1204 | 0 0 0 0 0 1 1205 | 0 0 0 0 0 1 1206 | 0 0 0 0 0 1 1207 | 0 0 0 0 0 1 1208 | 0 0 0 0 0 1 1209 | 0 0 0 0 0 1 1210 | 0 0 0 0 0 1 1211 | 0 0 0 0 0 1 1212 | 1 0 0 0 1 0 1213 | 1 0 0 0 0 0 1214 | 1 0 0 0 0 0 1215 | 1 0 0 0 1 0 1216 | 1 0 0 0 0 0 1217 | 1 0 0 0 0 0 1218 | 1 0 0 0 0 0 1219 | 1 0 0 0 0 0 1220 | 1 0 0 0 0 0 1221 | 1 0 0 0 0 0 1222 | 1 0 0 0 0 0 1223 | 1 0 0 0 0 0 1224 | 1 0 0 0 0 0 1225 | 1 0 0 0 0 0 1226 | 1 0 0 0 0 0 1227 | 1 0 0 0 0 0 1228 | 1 0 0 0 0 0 1229 | 1 0 0 0 1 0 1230 | 1 0 0 0 0 0 1231 | 1 0 0 0 0 0 1232 | 1 0 0 0 1 0 1233 | 1 0 0 0 0 0 1234 | 1 0 0 0 0 0 1235 | 1 0 0 0 0 0 1236 | 1 0 0 0 0 0 1237 | 1 0 0 0 0 0 1238 | 1 0 0 0 0 0 1239 | 1 0 0 0 1 0 1240 | 1 0 0 0 0 0 1241 | 1 0 0 0 0 0 1242 | 1 0 0 0 0 0 1243 | 1 0 0 0 0 0 1244 | 1 0 0 0 0 0 1245 | 1 0 0 0 0 0 1246 | 1 0 0 0 0 0 1247 | 1 0 0 0 0 0 1248 | 1 0 0 0 0 1 1249 | 1 0 0 0 0 0 1250 | 1 0 0 0 0 0 1251 | 1 0 0 0 0 0 1252 | 1 0 0 0 0 0 1253 | 1 0 0 0 0 0 1254 | 1 0 0 0 0 0 1255 | 1 0 0 0 0 0 1256 | 1 0 0 0 0 0 1257 | 1 0 0 0 0 0 1258 | 1 0 0 0 0 0 1259 | 1 0 0 0 0 0 1260 | 1 0 0 0 0 0 1261 | 1 0 0 0 0 0 1262 | 1 0 0 0 0 0 1263 | 1 0 0 0 0 0 1264 | 1 0 0 0 0 0 1265 | 1 0 0 0 0 0 1266 | 1 0 0 0 0 0 1267 | 1 0 0 0 0 1 1268 | 1 0 0 0 0 0 1269 | 1 0 0 0 0 0 1270 | 1 0 0 0 0 1 1271 | 1 0 0 0 0 0 1272 | 1 0 0 0 0 0 1273 | 1 0 0 0 0 0 1274 | 1 0 0 0 0 0 1275 | 1 0 0 0 0 0 1276 | 1 0 0 0 0 0 1277 | 1 0 0 0 0 0 1278 | 1 0 0 0 0 0 1279 | 1 0 0 0 0 0 1280 | 1 0 0 0 0 0 1281 | 1 0 0 0 0 0 1282 | 1 0 0 0 0 0 1283 | 1 0 0 0 0 0 1284 | 1 0 0 0 0 0 1285 | 1 0 0 0 0 0 1286 | 1 0 0 0 0 0 1287 | 1 0 0 0 0 0 1288 | 1 0 0 0 0 1 1289 | 1 0 0 0 0 0 1290 | 1 0 0 0 0 0 1291 | 1 0 0 0 0 0 1292 | 1 0 0 0 0 0 1293 | 1 0 0 0 0 0 1294 | 1 0 0 0 0 0 1295 | 1 0 0 0 0 0 1296 | 1 0 0 0 0 0 1297 | 1 0 0 0 0 0 1298 | 1 0 0 0 0 0 1299 | 1 0 0 0 0 0 1300 | 1 0 0 0 0 0 1301 | 1 0 0 0 0 0 1302 | 1 0 0 0 0 0 1303 | 1 0 0 0 0 0 1304 | 1 0 0 0 0 0 1305 | 1 0 0 0 0 0 1306 | 1 0 0 0 0 0 1307 | 1 0 0 0 0 0 1308 | 1 0 0 0 0 0 1309 | 1 0 0 0 0 0 1310 | 1 0 0 0 0 0 1311 | 1 0 0 0 0 0 1312 | 1 0 0 0 0 0 1313 | 1 0 0 0 0 0 1314 | 1 0 0 0 0 0 1315 | 1 0 0 0 0 0 1316 | 1 0 0 0 0 0 1317 | 1 0 0 0 0 0 1318 | 1 0 0 0 0 0 1319 | 1 0 0 0 0 0 1320 | 1 0 0 0 1 0 1321 | 1 0 0 0 0 0 1322 | 1 0 0 0 0 0 1323 | 1 0 0 0 0 0 1324 | 1 0 0 0 1 0 1325 | 1 0 0 0 0 0 1326 | 1 0 0 0 0 0 1327 | 1 0 0 0 0 0 1328 | 1 0 0 0 0 0 1329 | 1 0 0 0 0 0 1330 | 1 0 0 0 0 0 1331 | 1 0 0 0 0 0 1332 | 1 0 0 0 0 0 1333 | 1 0 0 0 0 0 1334 | 1 0 0 0 0 0 1335 | 1 0 0 0 0 0 1336 | 1 0 0 0 0 0 1337 | 1 0 0 0 0 1 1338 | 1 0 0 0 0 0 1339 | 1 0 0 0 0 0 1340 | 1 0 0 0 0 0 1341 | 1 0 0 0 0 0 1342 | 1 0 0 0 0 1 1343 | 1 0 0 0 0 0 1344 | 1 0 0 0 1 0 1345 | 1 0 0 0 0 0 1346 | 1 0 0 0 0 0 1347 | 1 0 0 0 0 0 1348 | 1 0 0 0 0 0 1349 | 1 0 0 0 0 0 1350 | 1 0 0 0 0 0 1351 | 1 0 0 0 0 0 1352 | 1 0 0 0 0 0 1353 | 1 0 0 0 0 0 1354 | 1 0 0 0 0 0 1355 | 1 0 0 0 0 0 1356 | 1 0 0 0 0 0 1357 | 1 0 0 0 0 0 1358 | 1 0 0 0 0 0 1359 | 1 0 0 0 0 0 1360 | 1 0 0 0 0 0 1361 | 1 0 0 0 0 0 1362 | 1 0 0 0 0 0 1363 | 1 0 0 0 0 0 1364 | 1 0 0 0 0 0 1365 | 1 0 0 0 1 0 1366 | 1 0 0 0 0 0 1367 | 1 0 0 0 1 0 1368 | 1 0 0 0 0 0 1369 | 1 0 0 0 0 0 1370 | 1 0 0 0 0 0 1371 | 1 0 0 0 0 0 1372 | 1 0 0 0 0 0 1373 | 1 0 0 0 1 0 1374 | 1 0 0 0 0 0 1375 | 1 0 0 0 0 0 1376 | 1 0 0 0 0 0 1377 | 1 0 0 1 0 0 1378 | 1 0 0 0 0 0 1379 | 1 0 0 0 0 0 1380 | 1 0 0 0 0 0 1381 | 1 0 0 0 0 1 1382 | 1 0 0 0 0 0 1383 | 1 0 0 0 0 0 1384 | 1 0 0 0 0 0 1385 | 1 0 0 0 1 0 1386 | 1 0 0 0 1 0 1387 | 1 0 0 0 0 0 1388 | 1 0 0 0 1 0 1389 | 1 0 0 0 0 0 1390 | 1 0 0 0 0 0 1391 | 1 0 0 0 1 0 1392 | 1 0 0 0 1 0 1393 | 1 0 0 0 0 0 1394 | 1 0 0 0 0 0 1395 | 1 0 0 0 0 0 1396 | 1 0 0 0 0 0 1397 | 1 0 0 0 0 0 1398 | 1 0 0 0 0 0 1399 | 1 0 0 0 0 0 1400 | 1 0 0 0 0 0 1401 | 1 0 0 0 0 0 1402 | 1 0 0 0 0 0 1403 | 1 0 0 0 0 0 1404 | 1 0 0 0 0 0 1405 | 1 0 0 0 1 0 1406 | 1 0 0 0 0 0 1407 | 1 0 0 0 0 0 1408 | 1 0 0 0 0 0 1409 | 1 0 0 0 0 0 1410 | 1 0 0 0 0 0 1411 | 1 0 0 0 0 0 1412 | 0 1 0 0 0 0 1413 | 0 1 0 0 0 0 1414 | 0 1 0 0 0 0 1415 | 0 1 0 0 0 0 1416 | 0 1 0 0 0 0 1417 | 0 1 0 0 0 0 1418 | 0 1 0 0 0 0 1419 | 0 1 0 0 0 0 1420 | 0 1 0 0 0 0 1421 | 0 1 0 0 0 0 1422 | 0 1 0 0 0 0 1423 | 0 1 0 0 0 0 1424 | 0 1 0 0 0 0 1425 | 0 1 0 0 0 0 1426 | 0 1 0 0 0 0 1427 | 0 1 0 0 0 0 1428 | 0 1 0 0 0 0 1429 | 0 1 0 0 0 0 1430 | 0 1 0 0 0 0 1431 | 0 1 0 0 0 0 1432 | 0 1 0 0 0 0 1433 | 0 1 0 0 0 0 1434 | 0 1 0 0 0 0 1435 | 0 1 0 0 0 0 1436 | 0 1 0 0 0 0 1437 | 0 1 0 0 0 0 1438 | 0 1 0 0 0 0 1439 | 0 1 0 0 0 0 1440 | 0 1 0 0 0 0 1441 | 0 1 0 0 0 0 1442 | 0 1 0 0 0 0 1443 | 0 1 0 0 0 0 1444 | 0 1 0 0 0 0 1445 | 0 1 0 0 0 0 1446 | 0 1 0 0 0 0 1447 | 0 1 0 0 0 0 1448 | 0 1 0 0 0 0 1449 | 0 1 0 0 0 0 1450 | 0 1 0 0 0 0 1451 | 0 1 0 0 0 0 1452 | 0 1 0 0 0 0 1453 | 0 1 0 0 0 0 1454 | 0 1 0 0 0 0 1455 | 0 1 0 0 0 0 1456 | 0 1 0 0 0 0 1457 | 0 1 0 0 0 0 1458 | 0 1 0 0 0 0 1459 | 0 1 0 0 0 0 1460 | 0 1 0 0 0 0 1461 | 0 1 0 0 0 0 1462 | 0 1 0 0 0 0 1463 | 0 1 0 0 0 0 1464 | 0 1 0 0 0 0 1465 | 0 1 0 0 0 0 1466 | 0 1 0 0 0 0 1467 | 0 1 0 0 0 0 1468 | 0 1 0 0 0 0 1469 | 0 1 0 0 0 0 1470 | 0 1 0 0 0 0 1471 | 0 1 0 0 0 0 1472 | 0 1 0 0 0 0 1473 | 0 1 0 0 0 0 1474 | 0 1 0 0 0 0 1475 | 0 1 0 0 0 0 1476 | 0 1 0 0 0 0 1477 | 0 1 0 0 0 0 1478 | 0 1 0 0 0 0 1479 | 0 1 0 0 0 0 1480 | 0 1 0 0 0 0 1481 | 0 1 0 0 0 0 1482 | 0 1 0 0 0 0 1483 | 0 1 0 0 0 0 1484 | 0 1 0 0 0 0 1485 | 0 1 0 0 0 0 1486 | 0 1 0 0 0 0 1487 | 0 1 0 0 0 0 1488 | 0 1 0 0 0 0 1489 | 0 1 0 0 0 0 1490 | 0 1 0 0 0 0 1491 | 0 1 0 0 0 0 1492 | 0 1 0 0 0 0 1493 | 0 1 0 0 0 0 1494 | 0 1 0 0 0 0 1495 | 0 1 0 0 0 0 1496 | 0 1 0 0 0 0 1497 | 0 1 0 0 0 0 1498 | 0 1 0 0 0 0 1499 | 0 1 0 0 0 0 1500 | 0 1 0 0 0 0 1501 | 0 1 0 0 0 0 1502 | 0 1 0 0 0 0 1503 | 0 1 0 0 0 0 1504 | 0 1 0 0 0 0 1505 | 0 1 0 0 0 0 1506 | 0 1 0 0 0 0 1507 | 0 1 0 0 0 0 1508 | 0 1 0 0 0 0 1509 | 0 1 0 0 0 0 1510 | 0 1 0 0 0 0 1511 | 0 1 0 0 0 0 1512 | 0 1 0 0 0 0 1513 | 0 1 0 0 0 0 1514 | 0 1 0 0 0 0 1515 | 0 1 0 0 0 0 1516 | 0 1 0 0 0 0 1517 | 0 1 0 0 0 0 1518 | 0 1 0 0 0 0 1519 | 0 1 0 0 0 0 1520 | 0 1 0 0 0 0 1521 | 0 1 0 0 0 0 1522 | 0 1 0 0 0 0 1523 | 0 1 0 0 0 0 1524 | 0 1 0 0 0 0 1525 | 0 1 0 0 0 0 1526 | 0 1 0 0 0 0 1527 | 0 1 0 0 0 0 1528 | 0 1 0 0 0 0 1529 | 0 1 0 0 0 0 1530 | 0 1 0 0 0 0 1531 | 0 1 0 0 0 0 1532 | 0 1 0 0 0 0 1533 | 0 1 0 0 0 0 1534 | 0 1 0 0 0 0 1535 | 0 1 0 0 0 0 1536 | 0 1 0 0 0 0 1537 | 0 1 0 0 0 0 1538 | 0 1 0 0 0 0 1539 | 0 1 0 0 0 0 1540 | 0 1 0 0 0 0 1541 | 0 1 0 0 0 0 1542 | 0 1 0 0 0 0 1543 | 0 1 0 0 0 0 1544 | 0 1 0 0 0 0 1545 | 0 1 0 0 0 0 1546 | 0 1 0 0 0 0 1547 | 0 1 0 0 0 0 1548 | 0 1 0 0 0 0 1549 | 0 1 0 0 0 0 1550 | 0 1 0 0 0 0 1551 | 0 1 0 0 0 0 1552 | 0 1 0 0 0 0 1553 | 0 1 0 0 0 0 1554 | 0 1 0 0 0 0 1555 | 0 1 0 0 0 0 1556 | 0 1 0 0 0 0 1557 | 0 1 0 0 0 0 1558 | 0 1 0 0 0 0 1559 | 0 1 0 0 0 0 1560 | 0 1 0 0 0 0 1561 | 0 1 0 0 0 0 1562 | 0 1 0 0 0 0 1563 | 0 1 0 0 0 0 1564 | 0 1 0 0 0 0 1565 | 0 1 0 0 0 0 1566 | 0 1 0 0 0 0 1567 | 0 1 0 0 0 0 1568 | 0 1 0 0 0 0 1569 | 0 1 0 0 0 0 1570 | 0 1 0 0 0 0 1571 | 0 1 0 0 0 0 1572 | 0 1 0 0 0 0 1573 | 0 1 0 0 0 0 1574 | 0 1 0 0 0 0 1575 | 0 1 0 0 0 0 1576 | 0 1 0 0 0 0 1577 | 0 1 0 0 0 0 1578 | 0 1 0 0 0 0 1579 | 0 1 0 0 0 0 1580 | 0 1 0 0 0 0 1581 | 0 1 0 0 0 0 1582 | 0 1 0 0 0 0 1583 | 0 1 0 0 0 0 1584 | 0 1 0 0 0 0 1585 | 0 1 0 0 0 0 1586 | 0 1 0 0 0 0 1587 | 0 1 0 0 0 0 1588 | 0 1 0 0 0 0 1589 | 0 1 0 0 0 0 1590 | 0 1 0 0 0 0 1591 | 0 1 0 0 0 0 1592 | 0 1 0 0 0 0 1593 | 0 1 0 0 0 0 1594 | 0 1 0 0 0 0 1595 | 0 1 0 0 0 0 1596 | 0 1 0 0 0 0 1597 | 0 1 0 0 0 0 1598 | 0 1 0 0 0 0 1599 | 0 1 0 0 0 0 1600 | 0 1 0 0 0 0 1601 | 0 1 0 0 0 0 1602 | 0 1 0 0 0 0 1603 | 0 1 0 0 0 0 1604 | 0 1 0 0 0 0 1605 | 0 1 0 0 0 0 1606 | 0 1 0 0 0 0 1607 | 0 1 0 0 0 0 1608 | 0 1 0 0 0 0 1609 | 0 1 0 0 0 0 1610 | 0 1 0 0 0 0 1611 | 0 0 1 0 0 0 1612 | 0 0 1 0 0 0 1613 | 0 0 1 0 0 0 1614 | 0 0 1 0 0 0 1615 | 0 0 1 0 0 0 1616 | 0 0 1 1 0 0 1617 | 0 0 1 1 0 0 1618 | 0 0 1 0 1 0 1619 | 0 0 1 1 0 0 1620 | 0 0 1 0 0 0 1621 | 0 0 1 1 0 0 1622 | 0 0 1 1 0 0 1623 | 0 0 1 0 0 0 1624 | 0 0 1 0 0 0 1625 | 0 0 1 0 0 0 1626 | 0 0 1 1 0 0 1627 | 0 0 1 0 0 0 1628 | 0 0 1 0 0 0 1629 | 0 0 1 0 0 0 1630 | 0 0 1 0 0 0 1631 | 0 0 1 1 0 0 1632 | 0 0 1 0 0 0 1633 | 0 0 1 0 0 0 1634 | 0 0 1 0 0 0 1635 | 0 0 1 1 0 0 1636 | 0 0 1 0 0 0 1637 | 0 0 1 0 0 0 1638 | 0 0 1 0 0 0 1639 | 0 0 1 0 0 0 1640 | 0 0 1 0 0 0 1641 | 0 0 1 0 0 0 1642 | 0 0 1 0 1 0 1643 | 0 0 1 0 1 0 1644 | 0 0 1 0 0 0 1645 | 0 0 1 0 0 0 1646 | 0 0 1 0 0 0 1647 | 0 0 1 0 0 0 1648 | 0 0 1 0 0 0 1649 | 0 0 1 0 0 0 1650 | 0 0 1 0 0 0 1651 | 0 0 1 1 0 0 1652 | 0 0 1 1 0 0 1653 | 0 0 1 1 0 0 1654 | 0 0 1 0 0 0 1655 | 0 0 1 0 0 0 1656 | 0 0 1 0 0 0 1657 | 0 0 1 0 0 0 1658 | 0 0 1 0 0 0 1659 | 0 0 1 0 0 0 1660 | 0 0 1 0 0 0 1661 | 0 0 1 0 0 0 1662 | 0 0 1 0 0 0 1663 | 0 0 1 0 0 0 1664 | 0 0 1 0 0 0 1665 | 0 0 1 0 0 0 1666 | 0 0 1 0 0 0 1667 | 0 0 1 0 0 0 1668 | 0 0 1 0 0 0 1669 | 0 0 1 0 1 0 1670 | 0 0 1 0 0 0 1671 | 0 0 1 0 0 0 1672 | 0 0 1 0 0 0 1673 | 0 0 1 0 0 0 1674 | 0 0 1 0 0 0 1675 | 0 0 1 0 0 0 1676 | 0 0 1 0 0 0 1677 | 0 0 1 0 0 0 1678 | 0 0 1 0 1 0 1679 | 0 0 1 0 0 0 1680 | 0 0 1 0 0 0 1681 | 0 0 1 0 0 0 1682 | 0 0 1 0 0 0 1683 | 0 0 1 0 0 0 1684 | 0 0 1 0 0 0 1685 | 0 0 1 0 0 0 1686 | 0 0 1 0 0 0 1687 | 0 0 1 0 0 0 1688 | 0 0 1 0 0 0 1689 | 0 0 1 1 0 0 1690 | 0 0 1 0 0 0 1691 | 0 0 1 0 0 0 1692 | 0 0 1 0 0 0 1693 | 0 0 1 0 0 0 1694 | 0 0 1 0 0 0 1695 | 0 0 1 0 0 0 1696 | 0 0 1 0 0 0 1697 | 0 0 1 0 0 0 1698 | 0 0 1 0 0 0 1699 | 0 0 1 0 0 0 1700 | 0 0 1 0 0 0 1701 | 0 0 1 0 0 0 1702 | 0 0 1 0 0 0 1703 | 0 0 1 0 0 0 1704 | 0 0 1 1 0 0 1705 | 0 0 1 0 0 0 1706 | 0 0 1 0 0 0 1707 | 0 0 1 0 0 0 1708 | 0 0 1 0 0 0 1709 | 0 0 1 0 0 0 1710 | 0 0 1 0 0 0 1711 | 0 0 1 0 0 0 1712 | 0 0 1 0 0 0 1713 | 0 0 1 0 0 0 1714 | 0 0 1 0 0 0 1715 | 0 0 1 0 0 0 1716 | 0 0 1 0 0 0 1717 | 0 0 1 0 0 0 1718 | 0 0 1 0 0 0 1719 | 0 0 1 0 0 0 1720 | 0 0 1 0 0 0 1721 | 0 0 1 0 0 0 1722 | 0 0 1 0 0 0 1723 | 0 0 1 0 0 0 1724 | 0 0 1 0 0 0 1725 | 0 0 1 0 0 0 1726 | 0 0 1 0 0 0 1727 | 0 0 1 0 0 0 1728 | 0 0 1 0 0 0 1729 | 0 0 1 0 0 0 1730 | 0 0 1 0 0 0 1731 | 0 0 1 0 0 0 1732 | 0 0 1 0 0 0 1733 | 0 0 1 0 0 0 1734 | 0 0 1 0 0 0 1735 | 0 0 1 0 0 0 1736 | 0 0 1 0 0 0 1737 | 0 0 1 0 0 0 1738 | 0 0 1 0 0 0 1739 | 0 0 1 0 0 0 1740 | 0 0 1 0 0 0 1741 | 0 0 1 0 0 0 1742 | 0 0 1 1 0 0 1743 | 0 0 1 0 0 0 1744 | 0 0 1 0 0 0 1745 | 0 0 1 0 0 0 1746 | 0 0 1 0 0 0 1747 | 0 0 1 0 0 0 1748 | 0 0 1 0 0 0 1749 | 0 0 1 0 0 0 1750 | 0 0 1 1 0 0 1751 | 0 0 1 0 0 0 1752 | 0 0 1 0 0 0 1753 | 0 0 1 0 0 0 1754 | 0 0 1 0 0 0 1755 | 0 0 1 0 0 0 1756 | 0 0 1 0 0 0 1757 | 0 0 1 0 0 0 1758 | 0 0 1 0 0 0 1759 | 0 0 1 0 0 0 1760 | 0 0 1 0 0 0 1761 | 0 0 1 0 0 0 1762 | 0 0 1 0 0 0 1763 | 0 0 1 0 0 0 1764 | 0 0 1 0 0 0 1765 | 0 0 1 0 0 0 1766 | 0 0 1 1 0 0 1767 | 0 0 1 0 0 0 1768 | 0 0 1 0 1 0 1769 | 0 0 1 0 1 0 1770 | 0 0 1 0 0 0 1771 | 0 0 1 0 0 0 1772 | 0 0 1 0 0 0 1773 | 0 0 1 0 0 0 1774 | 0 0 1 0 0 0 1775 | 0 0 1 0 0 0 1776 | 0 0 1 0 0 0 1777 | 0 0 1 0 0 0 1778 | 0 0 1 0 0 0 1779 | 0 0 1 0 0 0 1780 | 0 0 1 0 0 0 1781 | 0 0 1 0 0 0 1782 | 0 0 1 0 0 0 1783 | 0 0 1 0 0 0 1784 | 0 0 1 0 0 0 1785 | 0 0 1 0 0 0 1786 | 0 0 1 0 0 0 1787 | 0 0 1 0 0 0 1788 | 0 0 1 0 0 0 1789 | 0 0 1 0 0 0 1790 | 0 0 1 0 0 0 1791 | 0 0 1 0 0 0 1792 | 0 0 1 0 0 0 1793 | 0 0 1 0 0 0 1794 | 0 0 1 0 0 0 1795 | 0 0 1 0 0 0 1796 | 0 0 1 0 0 0 1797 | 0 0 1 0 0 0 1798 | 0 0 1 0 0 0 1799 | 0 0 1 0 1 0 1800 | 0 0 1 0 0 0 1801 | 0 0 1 0 0 0 1802 | 0 0 1 0 0 0 1803 | 0 0 1 0 0 0 1804 | 0 0 1 0 0 0 1805 | 0 0 1 0 0 0 1806 | 0 0 1 0 0 0 1807 | 0 0 1 0 0 0 1808 | 0 0 1 0 0 0 1809 | 0 0 1 0 0 0 1810 | 0 0 1 0 0 0 1811 | 0 0 0 1 0 0 1812 | 0 0 0 1 0 0 1813 | 0 0 0 1 1 0 1814 | 0 0 0 1 1 0 1815 | 0 0 0 1 0 0 1816 | 0 0 0 1 0 0 1817 | 0 0 0 1 0 0 1818 | 0 0 0 1 0 0 1819 | 0 0 0 1 0 0 1820 | 0 0 0 1 0 0 1821 | 0 0 0 1 1 0 1822 | 0 0 0 1 0 0 1823 | 0 0 0 1 0 0 1824 | 0 0 0 1 1 0 1825 | 0 0 0 1 1 0 1826 | 0 0 0 1 1 0 1827 | 0 0 0 1 0 0 1828 | 0 0 0 1 0 0 1829 | 0 0 0 1 0 0 1830 | 0 0 0 1 0 0 1831 | 0 0 0 1 0 0 1832 | 0 0 0 1 0 0 1833 | 0 0 0 1 0 0 1834 | 0 0 0 1 0 0 1835 | 0 0 0 1 1 0 1836 | 0 0 0 1 0 0 1837 | 0 0 0 1 0 0 1838 | 0 0 0 1 0 0 1839 | 0 0 0 1 0 0 1840 | 0 0 0 1 0 0 1841 | 0 0 0 1 0 0 1842 | 0 0 0 1 0 0 1843 | 0 0 0 1 0 0 1844 | 0 0 0 1 0 0 1845 | 0 0 0 1 0 0 1846 | 0 0 0 1 0 0 1847 | 0 0 0 1 0 0 1848 | 0 0 0 1 0 0 1849 | 0 0 0 1 0 0 1850 | 0 0 0 1 0 0 1851 | 0 0 0 1 0 0 1852 | 0 0 0 1 0 0 1853 | 0 0 0 1 0 0 1854 | 0 0 0 1 0 0 1855 | 0 0 0 1 0 0 1856 | 0 0 0 1 0 0 1857 | 0 0 0 1 0 0 1858 | 0 0 0 1 1 0 1859 | 0 0 0 1 0 0 1860 | 0 0 0 1 0 0 1861 | 0 0 0 1 0 0 1862 | 0 0 0 1 0 0 1863 | 0 0 0 1 0 0 1864 | 0 0 0 1 0 0 1865 | 0 0 0 1 0 0 1866 | 0 0 0 1 0 0 1867 | 0 0 0 1 0 0 1868 | 0 0 0 1 0 0 1869 | 0 0 0 1 0 0 1870 | 0 0 0 1 0 0 1871 | 0 0 0 1 1 0 1872 | 0 0 0 1 1 0 1873 | 0 0 0 1 0 0 1874 | 0 0 0 1 0 0 1875 | 0 0 0 1 0 0 1876 | 0 0 0 1 0 0 1877 | 0 0 0 1 0 0 1878 | 0 0 0 1 0 0 1879 | 0 0 0 1 0 0 1880 | 0 0 0 1 0 0 1881 | 0 0 0 1 0 0 1882 | 0 0 0 1 0 0 1883 | 0 0 0 1 0 0 1884 | 0 0 0 1 0 0 1885 | 0 0 0 1 0 0 1886 | 0 0 0 1 1 0 1887 | 0 0 0 1 1 0 1888 | 0 0 0 1 1 0 1889 | 0 0 0 1 0 0 1890 | 0 0 0 1 0 0 1891 | 0 0 0 1 0 0 1892 | 0 0 0 1 1 0 1893 | 0 0 0 1 0 0 1894 | 0 0 0 1 0 0 1895 | 0 0 0 1 0 0 1896 | 0 0 0 1 0 0 1897 | 0 0 0 1 0 0 1898 | 0 0 0 1 0 0 1899 | 0 0 0 1 0 0 1900 | 0 0 0 1 0 0 1901 | 0 0 0 1 0 0 1902 | 0 0 0 1 0 0 1903 | 0 0 0 1 1 0 1904 | 0 0 0 1 0 0 1905 | 0 0 0 1 0 0 1906 | 0 0 0 1 0 0 1907 | 0 0 0 1 0 0 1908 | 0 0 0 1 0 0 1909 | 0 0 0 1 0 0 1910 | 0 0 0 1 0 0 1911 | 0 0 0 1 0 0 1912 | 0 0 0 1 0 0 1913 | 0 0 0 1 0 0 1914 | 0 0 0 1 0 0 1915 | 0 0 0 1 0 0 1916 | 0 0 0 1 0 0 1917 | 0 0 0 1 0 0 1918 | 0 0 0 1 0 0 1919 | 0 0 0 1 0 0 1920 | 0 0 0 1 0 0 1921 | 0 0 0 1 0 0 1922 | 0 0 0 1 0 0 1923 | 0 0 0 1 0 0 1924 | 0 0 0 1 0 0 1925 | 0 0 0 1 1 0 1926 | 0 0 0 1 1 0 1927 | 0 0 0 1 1 0 1928 | 0 0 0 1 0 0 1929 | 0 0 0 1 0 0 1930 | 0 0 0 1 0 0 1931 | 0 0 0 1 1 0 1932 | 0 0 0 1 0 0 1933 | 0 0 0 1 0 0 1934 | 0 0 0 1 1 0 1935 | 0 0 0 1 0 0 1936 | 0 0 0 1 0 0 1937 | 0 0 0 1 1 0 1938 | 0 0 0 1 1 0 1939 | 0 0 0 1 0 0 1940 | 0 0 0 1 0 0 1941 | 0 0 0 1 0 0 1942 | 0 0 0 1 1 0 1943 | 0 0 0 1 0 0 1944 | 0 0 0 1 0 0 1945 | 0 0 0 1 0 0 1946 | 0 0 0 1 0 0 1947 | 0 0 0 1 0 0 1948 | 0 0 0 1 0 0 1949 | 0 0 0 1 1 0 1950 | 0 0 0 1 0 0 1951 | 0 0 0 1 0 0 1952 | 0 0 0 1 1 0 1953 | 0 0 0 1 1 0 1954 | 0 0 0 1 0 0 1955 | 0 0 0 1 0 0 1956 | 0 0 0 1 0 0 1957 | 0 0 0 1 0 0 1958 | 0 0 0 1 0 0 1959 | 0 0 0 1 0 0 1960 | 0 0 0 1 0 0 1961 | 0 0 0 1 0 0 1962 | 0 0 0 1 1 0 1963 | 0 0 0 1 1 0 1964 | 0 0 0 1 0 0 1965 | 0 0 0 1 0 0 1966 | 0 0 0 1 1 0 1967 | 0 0 0 1 0 0 1968 | 0 0 0 1 0 0 1969 | 0 0 0 1 0 0 1970 | 0 0 0 1 0 0 1971 | 0 0 0 1 0 0 1972 | 0 0 0 1 0 0 1973 | 0 0 0 1 0 0 1974 | 0 0 0 1 0 0 1975 | 0 0 0 1 1 0 1976 | 0 0 0 1 0 0 1977 | 0 0 0 1 1 0 1978 | 0 0 0 1 0 0 1979 | 0 0 0 1 0 0 1980 | 0 0 0 1 0 0 1981 | 0 0 0 1 0 0 1982 | 0 0 0 1 0 0 1983 | 0 0 0 1 0 0 1984 | 0 0 0 1 0 0 1985 | 0 0 0 1 0 0 1986 | 0 0 0 1 0 0 1987 | 0 0 0 1 0 0 1988 | 0 0 0 1 0 0 1989 | 0 0 0 1 0 0 1990 | 0 0 0 1 0 0 1991 | 0 0 0 1 0 0 1992 | 0 0 0 1 0 0 1993 | 0 0 0 1 0 0 1994 | 0 0 0 1 0 0 1995 | 0 0 0 1 0 0 1996 | 0 0 0 1 0 0 1997 | 0 0 0 1 0 0 1998 | 0 0 0 1 0 0 1999 | 0 0 0 1 0 0 2000 | 0 0 0 1 0 0 2001 | 0 0 0 1 0 0 2002 | 0 0 0 1 0 0 2003 | 0 0 0 1 0 0 2004 | 0 0 0 1 0 0 2005 | 0 0 0 1 0 0 2006 | 0 0 0 1 0 0 2007 | 0 0 0 1 0 0 2008 | 0 0 0 0 1 0 2009 | 0 0 0 0 1 0 2010 | 0 0 0 0 1 0 2011 | 0 0 0 0 1 0 2012 | 0 0 0 0 1 0 2013 | 0 0 0 0 1 0 2014 | 0 0 0 0 1 0 2015 | 0 0 0 0 1 0 2016 | 0 0 0 0 1 0 2017 | 0 0 0 0 1 0 2018 | 0 0 0 0 1 0 2019 | 0 0 0 0 1 0 2020 | 0 0 0 0 1 0 2021 | 0 0 0 0 1 0 2022 | 0 0 0 0 1 0 2023 | 0 0 0 0 1 0 2024 | 0 0 0 0 1 0 2025 | 0 0 0 0 1 0 2026 | 0 0 0 0 1 0 2027 | 0 0 0 0 1 0 2028 | 0 0 0 0 1 0 2029 | 0 0 0 0 1 0 2030 | 0 0 0 0 1 0 2031 | 0 0 0 0 1 0 2032 | 0 0 0 0 1 0 2033 | 0 0 0 0 1 0 2034 | 0 0 0 0 1 0 2035 | 0 0 0 1 1 0 2036 | 0 0 0 1 1 0 2037 | 0 0 0 0 1 0 2038 | 0 0 0 0 1 0 2039 | 0 0 0 0 1 0 2040 | 0 0 0 0 1 0 2041 | 0 0 0 0 1 0 2042 | 0 0 0 1 1 0 2043 | 0 0 0 0 1 0 2044 | 0 0 0 1 1 0 2045 | 0 0 0 0 1 0 2046 | 0 0 0 0 1 0 2047 | 0 0 0 0 1 0 2048 | 0 0 0 0 1 0 2049 | 0 0 0 0 1 0 2050 | 0 0 0 0 1 0 2051 | 0 0 0 0 1 0 2052 | 0 0 0 0 1 0 2053 | 0 0 0 1 1 0 2054 | 0 0 0 0 1 0 2055 | 0 0 0 0 1 0 2056 | 0 0 0 0 1 0 2057 | 0 0 0 0 1 0 2058 | 0 0 0 0 1 0 2059 | 0 0 0 0 1 0 2060 | 0 0 0 0 1 0 2061 | 0 0 0 0 1 0 2062 | 0 0 0 0 1 0 2063 | 0 0 0 0 1 0 2064 | 0 0 0 0 1 0 2065 | 0 0 0 0 1 0 2066 | 0 0 0 0 1 0 2067 | 0 0 0 0 1 0 2068 | 0 0 0 0 1 0 2069 | 0 0 0 0 1 0 2070 | 0 0 0 0 1 0 2071 | 0 0 0 0 1 0 2072 | 0 0 0 0 1 0 2073 | 0 0 0 0 1 0 2074 | 0 0 0 0 1 0 2075 | 0 0 0 1 1 0 2076 | 0 0 0 0 1 0 2077 | 0 0 0 1 1 0 2078 | 0 0 0 0 1 0 2079 | 0 0 0 0 1 0 2080 | 0 0 0 0 1 0 2081 | 0 0 0 0 1 0 2082 | 0 0 0 0 1 0 2083 | 0 0 0 0 1 0 2084 | 0 0 0 0 1 0 2085 | 0 0 0 0 1 0 2086 | 0 0 0 0 1 0 2087 | 0 0 0 0 1 0 2088 | 0 0 0 0 1 0 2089 | 0 0 0 0 1 0 2090 | 0 0 0 0 1 0 2091 | 0 0 0 0 1 0 2092 | 0 0 0 0 1 0 2093 | 0 0 0 0 1 0 2094 | 0 0 0 0 1 0 2095 | 0 0 0 0 1 0 2096 | 0 0 0 0 1 0 2097 | 0 0 0 0 1 0 2098 | 0 0 0 0 1 0 2099 | 0 0 0 0 1 0 2100 | 0 0 0 0 1 0 2101 | 0 0 0 0 1 0 2102 | 0 0 0 0 1 0 2103 | 0 0 0 0 1 0 2104 | 0 0 0 0 1 0 2105 | 0 0 0 0 1 0 2106 | 0 0 0 0 1 0 2107 | 0 0 0 0 1 0 2108 | 0 0 0 0 1 0 2109 | 0 0 0 0 1 0 2110 | 0 0 0 0 1 0 2111 | 0 0 0 0 1 0 2112 | 0 0 0 0 1 0 2113 | 0 0 0 0 1 0 2114 | 0 0 0 0 1 0 2115 | 0 0 0 0 1 0 2116 | 0 0 0 0 1 0 2117 | 0 0 0 0 1 0 2118 | 0 0 0 1 1 0 2119 | 0 0 0 0 1 0 2120 | 0 0 0 0 1 0 2121 | 0 0 0 0 1 0 2122 | 0 0 0 0 1 0 2123 | 0 0 0 1 1 0 2124 | 0 0 0 0 1 0 2125 | 0 0 0 0 1 0 2126 | 0 0 0 0 1 0 2127 | 0 0 0 1 1 0 2128 | 0 0 0 0 1 0 2129 | 0 0 0 0 1 0 2130 | 0 0 0 0 1 0 2131 | 0 0 0 0 1 0 2132 | 0 0 0 0 1 0 2133 | 0 0 0 0 1 0 2134 | 0 0 0 1 1 0 2135 | 0 0 0 0 1 0 2136 | 0 0 0 0 1 0 2137 | 0 0 0 0 1 0 2138 | 0 0 0 0 1 0 2139 | 0 0 0 0 1 0 2140 | 0 0 0 0 1 0 2141 | 0 0 0 0 1 0 2142 | 0 0 0 0 1 0 2143 | 0 0 0 0 1 0 2144 | 0 0 0 0 1 0 2145 | 0 0 0 0 1 0 2146 | 0 0 0 0 1 0 2147 | 0 0 0 0 1 0 2148 | 0 0 0 0 1 0 2149 | 0 0 0 0 1 0 2150 | 0 0 0 0 1 0 2151 | 0 0 0 0 1 0 2152 | 0 0 0 0 1 0 2153 | 0 0 0 0 1 0 2154 | 0 0 0 0 1 0 2155 | 0 0 0 0 1 0 2156 | 0 0 0 0 1 0 2157 | 0 0 0 0 1 0 2158 | 0 0 0 0 1 0 2159 | 0 0 0 0 1 0 2160 | 0 0 0 0 1 0 2161 | 0 0 0 0 1 0 2162 | 0 0 0 0 1 0 2163 | 0 0 0 0 1 0 2164 | 0 0 0 0 1 0 2165 | 0 0 0 0 1 0 2166 | 0 0 0 1 1 0 2167 | 0 0 0 0 1 0 2168 | 0 0 0 1 1 0 2169 | 0 0 0 0 1 0 2170 | 0 0 0 0 1 0 2171 | 0 0 0 0 1 0 2172 | 0 0 0 0 1 0 2173 | 0 0 0 0 1 0 2174 | 0 0 0 0 1 0 2175 | 0 0 0 0 1 0 2176 | 0 0 0 1 1 0 2177 | 0 0 0 0 1 0 2178 | 0 0 0 1 1 0 2179 | 0 0 0 0 1 0 2180 | 0 0 0 0 1 0 2181 | 0 0 0 0 1 0 2182 | 0 0 0 0 1 0 2183 | 0 0 0 0 1 0 2184 | 0 0 0 0 1 0 2185 | 0 0 0 0 1 0 2186 | 0 0 0 0 1 0 2187 | 0 0 0 0 1 0 2188 | 0 0 0 0 1 0 2189 | 0 0 0 0 1 0 2190 | 0 0 0 0 1 0 2191 | 0 0 0 1 1 0 2192 | 0 0 0 0 1 0 2193 | 0 0 0 0 1 0 2194 | 0 0 0 0 1 0 2195 | 0 0 0 0 1 0 2196 | 0 0 0 0 1 0 2197 | 0 0 0 1 1 0 2198 | 0 0 0 0 1 0 2199 | 0 0 0 1 1 0 2200 | 0 0 0 0 1 0 2201 | 0 0 0 0 1 0 2202 | 0 0 0 0 1 0 2203 | 0 0 0 0 1 0 2204 | 0 0 0 0 1 0 2205 | 0 0 0 0 1 0 2206 | 0 0 0 0 1 0 2207 | 0 0 0 0 1 0 2208 | 0 0 0 0 0 1 2209 | 0 0 0 0 0 1 2210 | 0 0 0 0 0 1 2211 | 0 0 0 0 0 1 2212 | 0 0 0 0 0 1 2213 | 0 0 0 0 0 1 2214 | 0 0 0 0 0 1 2215 | 0 0 0 0 0 1 2216 | 0 0 0 0 0 1 2217 | 0 0 0 0 0 1 2218 | 0 0 0 0 0 1 2219 | 0 0 0 0 0 1 2220 | 0 0 0 0 0 1 2221 | 0 0 0 0 0 1 2222 | 0 0 0 0 0 1 2223 | 0 0 0 0 0 1 2224 | 0 0 0 0 0 1 2225 | 0 0 0 0 0 1 2226 | 0 0 0 0 0 1 2227 | 0 0 0 0 0 1 2228 | 0 0 0 0 0 1 2229 | 0 0 0 0 0 1 2230 | 0 0 0 0 0 1 2231 | 0 0 0 0 0 1 2232 | 0 0 0 0 0 1 2233 | 0 0 0 0 0 1 2234 | 0 0 0 0 0 1 2235 | 0 0 0 0 0 1 2236 | 0 0 0 0 0 1 2237 | 0 0 0 0 0 1 2238 | 0 0 0 0 0 1 2239 | 0 0 0 0 0 1 2240 | 0 0 0 0 0 1 2241 | 0 0 0 0 0 1 2242 | 0 0 0 0 0 1 2243 | 0 0 0 0 0 1 2244 | 0 0 0 0 0 1 2245 | 0 0 0 0 0 1 2246 | 0 0 0 0 0 1 2247 | 0 0 0 0 0 1 2248 | 0 0 0 0 0 1 2249 | 0 0 0 0 0 1 2250 | 0 0 0 0 0 1 2251 | 0 0 0 0 0 1 2252 | 0 0 0 0 0 1 2253 | 0 0 0 0 0 1 2254 | 0 0 0 0 0 1 2255 | 0 0 0 0 0 1 2256 | 0 0 0 0 0 1 2257 | 0 0 0 0 0 1 2258 | 0 0 0 0 0 1 2259 | 0 0 0 0 0 1 2260 | 0 0 0 0 0 1 2261 | 0 0 0 0 0 1 2262 | 0 0 0 0 0 1 2263 | 0 0 0 0 0 1 2264 | 0 0 0 1 0 1 2265 | 0 0 0 0 0 1 2266 | 0 0 0 0 0 1 2267 | 0 0 0 0 0 1 2268 | 0 0 0 0 0 1 2269 | 0 0 0 0 0 1 2270 | 0 0 0 0 0 1 2271 | 0 0 0 0 0 1 2272 | 0 0 0 0 0 1 2273 | 0 0 0 0 0 1 2274 | 0 0 0 0 0 1 2275 | 0 0 0 0 0 1 2276 | 0 0 0 0 0 1 2277 | 0 0 0 0 0 1 2278 | 0 0 0 0 0 1 2279 | 0 0 0 0 0 1 2280 | 0 0 0 0 0 1 2281 | 0 0 0 0 0 1 2282 | 0 0 0 0 0 1 2283 | 0 0 0 0 0 1 2284 | 0 0 0 0 0 1 2285 | 0 0 0 0 0 1 2286 | 0 0 0 0 0 1 2287 | 0 0 0 0 0 1 2288 | 0 0 0 1 0 1 2289 | 0 0 0 1 0 1 2290 | 0 0 0 0 0 1 2291 | 0 0 0 0 0 1 2292 | 0 0 0 0 0 1 2293 | 0 0 0 0 0 1 2294 | 0 0 0 0 0 1 2295 | 0 0 0 0 0 1 2296 | 0 0 0 0 0 1 2297 | 0 0 0 0 0 1 2298 | 0 0 0 0 0 1 2299 | 0 0 0 0 0 1 2300 | 0 0 0 0 0 1 2301 | 0 0 0 0 0 1 2302 | 0 0 0 0 0 1 2303 | 0 0 0 0 0 1 2304 | 0 0 0 0 0 1 2305 | 0 0 0 0 0 1 2306 | 0 0 0 0 0 1 2307 | 0 0 0 0 0 1 2308 | 0 0 0 0 0 1 2309 | 0 0 0 0 0 1 2310 | 0 0 0 0 0 1 2311 | 0 0 0 0 0 1 2312 | 0 0 0 0 0 1 2313 | 0 0 0 0 0 1 2314 | 0 0 0 0 0 1 2315 | 0 0 0 0 0 1 2316 | 0 0 0 0 0 1 2317 | 0 0 0 0 0 1 2318 | 0 0 0 0 0 1 2319 | 0 0 0 0 0 1 2320 | 0 0 0 0 0 1 2321 | 0 0 0 0 0 1 2322 | 0 0 0 0 0 1 2323 | 0 0 0 0 0 1 2324 | 0 0 0 0 0 1 2325 | 0 0 0 0 0 1 2326 | 0 0 0 0 0 1 2327 | 0 0 0 0 0 1 2328 | 0 0 0 0 0 1 2329 | 0 0 0 0 0 1 2330 | 0 0 0 0 0 1 2331 | 0 0 0 0 0 1 2332 | 0 0 0 0 0 1 2333 | 0 0 0 0 0 1 2334 | 0 0 0 0 0 1 2335 | 0 0 0 0 0 1 2336 | 0 0 0 0 0 1 2337 | 0 0 0 0 0 1 2338 | 0 0 0 0 0 1 2339 | 0 0 0 0 0 1 2340 | 0 0 0 0 0 1 2341 | 0 0 0 0 0 1 2342 | 0 0 0 0 0 1 2343 | 0 0 0 0 0 1 2344 | 0 0 0 0 0 1 2345 | 0 0 0 1 0 1 2346 | 0 0 0 0 0 1 2347 | 0 0 0 0 0 1 2348 | 0 0 0 0 0 1 2349 | 0 0 0 0 0 1 2350 | 0 0 0 0 0 1 2351 | 0 0 0 0 0 1 2352 | 0 0 0 0 0 1 2353 | 0 0 0 0 0 1 2354 | 0 0 0 0 0 1 2355 | 0 0 0 0 0 1 2356 | 0 0 0 0 0 1 2357 | 0 0 0 0 0 1 2358 | 0 0 0 0 0 1 2359 | 0 0 0 0 0 1 2360 | 0 0 0 0 0 1 2361 | 0 0 0 0 0 1 2362 | 0 0 0 0 0 1 2363 | 0 0 0 0 0 1 2364 | 0 0 0 0 0 1 2365 | 0 0 0 0 0 1 2366 | 0 0 0 1 0 1 2367 | 0 0 0 0 0 1 2368 | 0 0 0 0 0 1 2369 | 0 0 0 0 0 1 2370 | 0 0 0 0 0 1 2371 | 0 0 0 0 0 1 2372 | 0 0 0 0 0 1 2373 | 0 0 0 0 0 1 2374 | 0 0 0 0 0 1 2375 | 0 0 0 0 0 1 2376 | 0 0 0 0 0 1 2377 | 0 0 0 0 0 1 2378 | 0 0 0 0 0 1 2379 | 0 0 0 0 0 1 2380 | 0 0 0 0 0 1 2381 | 0 0 0 0 0 1 2382 | 0 0 0 0 0 1 2383 | 0 0 0 0 0 1 2384 | 0 0 0 0 0 1 2385 | 0 0 0 0 0 1 2386 | 0 0 0 0 0 1 2387 | 0 0 0 0 0 1 2388 | 0 0 0 0 0 1 2389 | 0 0 0 0 0 1 2390 | 0 0 0 0 0 1 2391 | 0 0 0 0 0 1 2392 | 0 0 0 0 0 1 2393 | 0 0 0 0 0 1 2394 | 0 0 0 0 0 1 2395 | 0 0 0 0 0 1 2396 | 0 0 0 0 0 1 2397 | 0 0 0 0 0 1 2398 | 0 0 0 0 0 1 2399 | 0 0 0 0 0 1 2400 | 0 0 0 0 0 1 2401 | 0 0 0 0 0 1 2402 | 0 0 0 0 0 1 2403 | 0 0 0 0 0 1 2404 | 0 0 0 0 0 1 2405 | 0 0 0 0 0 1 2406 | 0 0 0 0 0 1 2407 | 0 0 0 0 0 1 2408 | --------------------------------------------------------------------------------