├── README.md ├── data_loader └── data_utils.py ├── dataset ├── V_1.csv ├── V_45.csv └── W_45.csv ├── loss_curve └── Train_Val_loss.png ├── main.py ├── models ├── base_model.py ├── layer_module.py ├── tester.py ├── trainer.py └── transformer.py ├── output └── fc.pkl ├── picture ├── single_n_pred18_node0.png ├── single_point0_node0.png ├── single_point10_node0.png ├── single_point11_node0.png ├── single_point12_node0.png ├── single_point13_node0.png ├── single_point14_node0.png ├── single_point15_node0.png ├── single_point16_node0.png ├── single_point17_node0.png ├── single_point1_node0.png ├── single_point2_node0.png ├── single_point3_node0.png ├── single_point4_node0.png ├── single_point5_node0.png ├── single_point6_node0.png ├── single_point7_node0.png ├── single_point8_node0.png └── single_point9_node0.png └── utils ├── math_graph.py └── math_utils.py /README.md: -------------------------------------------------------------------------------- 1 | # Spatio-temporal-forecasting 2 | Leverage on recent advances in graph convolution and sequence modeling to design neural networks for spatio-temporal forecasting, which including the use of graph convolutional neural networks, gated recurrent units, encoder-decoder framework, attention mechanism and transformers. 3 | 4 | There are two models I designed for spatio-temporal forecasting in this repository, both adapts an encoder-decoder architecture. 5 | 6 | The first one is devised on the basis of Diffusion Convolutional Recurrent Neural Network(DCRNN) proposed in [Diffusion Convolutional Recurrent Neural Network: Data-Driven Traffic Forecasting](https://arxiv.org/abs/1707.01926) by Yaguang Li, Rose Yu, Cyrus Shahabi, Yan Liu. I added attention mechanism to the original model, which models the direct relationships between historical and future time steps, helping to alleviate the error propagation problem among prediction time steps. In addition, I implemented some regularization methods using python, such as early stop and hold out dataset, to avoid overfitting. The code of this model is included in `layer_module.py` and `base_model.py`. 7 | 8 | In the second model, it captures the spatial dependency using graph convolutional neural network, and the temporal dependency using transformer. The code of this model is included in `transformer.py`. 9 | 10 | The problem defined in this repository and the folder structure are the same as [STGCN](https://github.com/VeritasYin/STGCN_IJCAI-18), please refer to that repository for further details. 11 | 12 | ## Requirements 13 | * PyTorch 14 | * Numpy 15 | * Pandas 16 | * Scipy 17 | * Matplotlib 18 | -------------------------------------------------------------------------------- /data_loader/data_utils.py: -------------------------------------------------------------------------------- 1 | from utils.math_utils import z_score 2 | import torch 3 | import numpy as np 4 | import pandas as pd 5 | from torch.utils.data import Dataset, DataLoader 6 | 7 | 8 | class WaterDataset(Dataset): 9 | def __init__(self, data, n_his): 10 | self.__data = data 11 | self.n_his = n_his 12 | 13 | def __len__(self): 14 | return len(self.__data) 15 | 16 | def __getitem__(self, idx): 17 | return self.__data[idx, :self.n_his, :, :], self.__data[idx, self.n_his:, :, :] 18 | 19 | 20 | def seq_gen(len_seq, data_seq, offset, n_frame, n_route, C_0=1): 21 | """ 22 | Gain dataset from the original time series. 23 | :param len_seq: int, length of the sequence. 24 | :param data_seq: np.ndarray, [len_seq, n_well * C_0]. 25 | :param offset: start point to make the new dataset. 26 | :param n_frame: n_his + n_pred. 27 | :param n_well: number of the vertices on the graph. 28 | :param C_0: number of the channels of source data. 29 | :return: np.ndarray, [n_slot, n_frame, n_well, C_0]. 30 | """ 31 | n_slot = len_seq - n_frame + 1 32 | tmp_seq = np.zeros((n_slot, n_frame, n_route, C_0)) 33 | # data_seq = data_seq[:-144, 3:4] 34 | for i in range(n_slot): 35 | sta = offset + i 36 | end = sta + n_frame 37 | tmp_seq[i, :, :, :] = np.reshape(data_seq[sta:end, :], [n_frame, n_route, C_0]) 38 | return tmp_seq 39 | 40 | 41 | def data_gen(file_path, data_config, n_route, n_frame, device): 42 | # Generate datasets for training, validation, and test. 43 | # file_path: the path of the file. 44 | # data_config: the portion of each set. 45 | # n_route: number of the vertices on the graph. 46 | # return: dict that contains training, validation and test data,stats. 47 | 48 | n_train, n_val, n_test = data_config 49 | r_train, r_val = float(n_train)/(n_train+n_val+n_test), float(n_val)/(n_train+n_val+n_test) 50 | 51 | try: 52 | data_seq = pd.read_csv(file_path, header=None).values 53 | except FileNotFoundError: 54 | raise FileNotFoundError(f'ERROR: input file was not found in {file_path}.') 55 | 56 | length = data_seq.shape[0] 57 | data_frame = seq_gen(length, data_seq, 0, n_frame, n_route) 58 | num_data = data_frame.shape[0] 59 | seq_train = data_frame[:int(num_data*r_train), :, :, :] 60 | seq_val = data_frame[int(num_data*r_train):int(num_data*r_train)+int(num_data*r_val), :, :, :] 61 | seq_test = data_frame[int(num_data*r_train)+int(num_data*r_val):, :, :, :] 62 | 63 | # x_stats: dict, the stats for the training dataset, including the value of mean and standard deviation. 64 | x_stats = {'mean': np.mean(seq_train), 'std': np.std(seq_train)} 65 | 66 | # x_train, x_val, x_test: tensor, [len_seq, n_frame, n_route, C_0]. 67 | x_train = z_score(seq_train, x_stats['mean'], x_stats['std']) 68 | x_val = z_score(seq_val, x_stats['mean'], x_stats['std']) 69 | x_test = z_score(seq_test, x_stats['mean'], x_stats['std']) 70 | x_train = torch.from_numpy(x_train).type(torch.float32).to(device) 71 | x_val = torch.from_numpy(x_val).type(torch.float32).to(device) 72 | x_test = torch.from_numpy(x_test).type(torch.float32).to(device) 73 | x_data = {'train': x_train, 'val': x_val, 'test': x_test} 74 | return x_data, x_stats 75 | 76 | 77 | def loader_gen(data_file, n_train, n_val, n_test, n_his, n_pred, n_route, batch_size, device): 78 | # Wrap the dataset with data loaders. 79 | # data_file: the path of the file 80 | # n_train, n_val, n_test: the configuration of dataset. 81 | # n_his: length of source series. 82 | # n_pred: length of target series. 83 | # return: dict of dataloaders for training and validation, dict of sizes for training dataset 84 | # validation dataset, dataset for testing, statics for the dataset. 85 | # data: [batch_size, seq_len, n_well, C_0]. 86 | 87 | data_wl, stats = data_gen(data_file, (n_train, n_val, n_test), n_route, n_his + n_pred, device) 88 | trainset = WaterDataset(data_wl['train'], n_his) 89 | validset = WaterDataset(data_wl['val'], n_his) 90 | testset = WaterDataset(data_wl['test'], n_his) 91 | train_data_gen = DataLoader(trainset, shuffle=True, batch_size=batch_size) 92 | valid_data_gen = DataLoader(validset, batch_size=batch_size) 93 | test_data_gen = DataLoader(testset, batch_size=batch_size) 94 | dataset_sizes = {'train': len(train_data_gen.dataset), 'valid': len(valid_data_gen.dataset)} 95 | dataloaders = {'train': train_data_gen, 'valid': valid_data_gen} 96 | return dataloaders, dataset_sizes, test_data_gen, stats 97 | -------------------------------------------------------------------------------- /dataset/V_1.csv: -------------------------------------------------------------------------------- 1 | 8.972728964 2 | 8.837237951699999 3 | 10.716838514800001 4 | 8.904409599 5 | 6.8581773986000005 6 | 5.284514632900001 7 | 6.0969095283 8 | 7.886007762799999 9 | 7.9172426387 10 | 7.5450815888 11 | 4.4325341631 12 | 7.324596887799999 13 | 7.5467988992 14 | 8.8359511074 15 | 10.2077398958 16 | 8.4048502044 17 | 6.6102531838 18 | 6.0819109676 19 | 7.3103318927 20 | 6.9534469533 21 | 7.2820627349 22 | 7.7141083027 23 | 7.3394374677 24 | 7.2133820599 25 | 8.771464866699999 26 | 9.037276371699999 27 | 9.3197329858 28 | 6.0079325001 29 | 4.2630808858 30 | 4.6419067361 31 | 5.0832563496 32 | 6.020648411900001 33 | 8.4178573982 34 | 6.7635269011000005 35 | 5.868084999 36 | 6.5219879718 37 | 9.100252177 38 | 9.1648896612 39 | 9.718686127 40 | 7.2034853420000005 41 | 6.341574469099999 42 | 6.1825390601 43 | 7.5462799977 44 | 8.2421210066 45 | 9.91492158 46 | 6.8986012184000005 47 | 6.190823121599999 48 | 6.7358412773000005 49 | 9.0026294949 50 | 8.9124447037 51 | 8.3773358986 52 | 7.5391370341999995 53 | 5.5749822377 54 | 4.7412483249 55 | 5.7880415424 56 | 5.6830398478 57 | 8.4739868368 58 | 7.4165250571 59 | 4.8386637862 60 | 7.2194637112 61 | 10.0489238052 62 | 9.1174074249 63 | 9.4844878526 64 | 8.9805315125 65 | 4.9065506861 66 | 5.7893406162 67 | 7.2543564682 68 | 8.912181799299999 69 | 7.392761845800001 70 | 7.396345879 71 | 4.8347087281 72 | 7.265792525 73 | 9.213940087000001 74 | 9.4822795584 75 | 10.4681292384 76 | 8.7084561594 77 | 5.8765688575 78 | 5.4374069985 79 | 6.2034790388 80 | 6.5392017222000005 81 | 9.1829937693 82 | 8.1613393083 83 | 4.5093902331 84 | 6.361519829500001 85 | 9.707285928200001 86 | 9.388881849199999 87 | 8.7392711075 88 | 7.8428531611 89 | 7.6120575187 90 | 6.5865266032 91 | 5.4125839723 92 | 8.0884594137 93 | 7.2745707204 94 | 6.7300876177 95 | 3.9499619888999997 96 | 6.270862767100001 97 | 7.3096706004 98 | 8.267301638400001 99 | 9.5670900484 100 | 8.5797313863 101 | 5.4766284548 102 | 5.273589638 103 | 6.95911323 104 | 6.708450655 105 | 7.3945263069000005 106 | 6.888823342799999 107 | 5.6778711418 108 | 6.8578472569 109 | 7.9649197838 110 | 8.5362422017 111 | 8.3072500889 112 | 7.0187079798 113 | 4.5029332386 114 | 6.2849465083 115 | 6.9081535976 116 | 6.5459771819000006 117 | 7.8622919891 118 | 7.6988466176 119 | 5.1541271415 120 | 6.5616755275 121 | 8.2024262882 122 | 9.4745240103 123 | 8.7060000246 124 | 8.0760718308 125 | 4.6677119702 126 | 4.7737786968 127 | 7.0881596334 128 | 8.3356989488 129 | 8.8852705717 130 | 8.7453597732 131 | 5.319094786 132 | 6.308273917999999 133 | 7.6475898738 134 | 9.1615206376 135 | 10.0513610431 136 | 7.8917952873 137 | 7.202117884 138 | 7.3181654511000005 139 | 6.473298021000001 140 | 7.3519418885 141 | 7.7416124147000005 142 | 8.2582967962 143 | 3.9749151444999997 144 | 6.2398553094 145 | 9.1600318335 146 | 8.1842023323 147 | 9.735723354700001 148 | 8.006104086699999 149 | 5.589704837 150 | 4.6394531333 151 | 5.747343760700001 152 | 6.4308159126 153 | 8.7294292494 154 | 7.9329949439 155 | 5.793181429 156 | 5.420527625 157 | 8.2856885456 158 | 9.032417103 159 | 9.1947298699 160 | 8.9618038092 161 | 5.1941795566 162 | 6.791392453099999 163 | 6.8242571638 164 | 5.3346744476 165 | 8.9661012989 166 | 8.8800850284 167 | 4.6790693867 168 | 5.9895643152 169 | -------------------------------------------------------------------------------- /dataset/V_45.csv: -------------------------------------------------------------------------------- 1 | 1.49,1.1,1.46,1.91,1.89,1.68,1.09,1.99,1.19,1.51,2.6,1.23,1.52,1.25,2.9,1.86,2.06,2.58,6.31,6.58,3.09,1.31,1.98,1.48,3.14,1.39,1.74,1.52,1.67,1.84,1.2,1.58,2.61,1.37,1.34,1.49,1.5,2.29,2.79,1.91,2.45,1.68,2.58,2.64,3.66 2 | 1.65,1.12,1.6,1.95,1.98,1.72,1.36,2.03,1.41,1.58,2.71,1.3,1.7,1.36,2.9,1.87,2.14,2.62,6.27,6.59,3.19,1.36,2.03,1.62,3.2,1.45,1.78,1.6,1.74,1.87,1.37,1.6,2.7,1.4,1.44,1.53,1.58,2.35,2.79,1.96,2.45,1.66,2.65,2.63,3.68 3 | 1.65,1.15,1.75,1.98,2.03,1.75,1.49,2.49,1.58,1.64,2.83,1.34,1.72,1.52,2.85,1.9,2.08,2.66,6.25,6.59,3.17,1.38,2.1,1.69,3.27,1.51,1.82,1.68,1.8,1.89,1.43,1.64,2.78,1.45,1.47,1.69,1.67,2.37,2.83,2.02,2.47,1.63,2.72,2.62,3.71 4 | 1.73,1.17,1.8,2.04,2.13,1.78,1.52,2.57,1.69,1.71,2.89,1.36,1.73,1.56,2.85,1.92,2.16,2.69,6.25,6.6,3.2,1.44,2.25,1.77,3.34,1.56,1.87,1.76,1.86,1.91,1.5,1.68,2.83,1.58,1.61,1.7,1.7,2.49,2.83,2.09,2.5,1.67,2.77,2.54,3.72 5 | 1.85,1.19,1.86,2.1,2.19,1.82,1.56,2.69,1.74,1.79,2.94,1.48,1.74,1.7,2.8,1.95,2.28,2.72,6.25,6.61,3.15,1.55,2.29,1.88,3.39,1.62,1.9,1.84,1.93,1.91,1.57,1.7,2.92,1.65,1.68,1.84,1.73,2.58,2.84,2.15,2.51,1.73,2.82,2.5,3.76 6 | 1.91,1.23,1.97,2.17,2.25,1.86,1.67,2.74,1.8,1.91,2.97,1.55,1.75,1.75,2.75,1.97,2.35,2.76,6.23,6.62,3.13,1.62,2.36,2.0,3.47,1.67,1.94,1.91,2.0,1.92,1.64,1.72,2.96,1.69,1.88,1.92,1.9,2.6,2.85,2.21,2.51,1.79,2.85,2.47,3.77 7 | 2.03,1.26,2.07,2.22,2.31,1.89,1.85,2.77,1.85,1.98,3.08,1.7,1.76,1.88,2.75,1.99,2.4,2.8,6.23,6.62,3.15,1.78,2.4,2.17,3.54,1.7,1.97,1.97,2.03,1.96,1.71,1.76,3.02,1.74,1.89,1.91,1.97,2.65,2.85,2.28,2.52,2.2,2.9,2.38,3.77 8 | 2.1,1.32,2.12,2.26,2.41,1.91,1.91,2.81,1.89,2.04,3.14,1.75,1.77,2.0,2.73,1.98,2.43,2.82,6.21,6.63,3.17,1.88,2.47,2.34,3.6,1.73,2.0,2.04,2.07,1.99,1.79,1.78,3.06,1.87,1.91,2.0,2.03,2.69,2.87,2.3,2.54,2.12,2.93,2.39,3.78 9 | 2.12,1.36,2.17,2.37,2.45,1.94,1.96,2.81,1.99,2.1,3.19,1.79,2.11,2.12,2.73,2.0,2.48,2.85,6.2,6.64,3.2,1.93,2.58,2.4,3.68,1.77,2.02,2.11,2.11,1.99,1.84,1.8,3.13,1.99,2.05,2.0,2.05,2.7,2.87,2.31,2.55,2.1,2.96,2.42,3.83 10 | 2.17,1.4,2.19,2.43,2.52,2.0,2.06,3.07,2.02,2.18,3.23,1.87,2.27,2.19,2.7,2.02,2.52,2.88,6.19,6.65,3.24,2.05,2.69,2.46,3.74,1.8,2.04,2.16,2.15,2.01,1.91,1.84,3.15,2.11,2.13,1.92,2.08,2.72,2.9,2.35,2.55,1.98,2.98,2.44,3.84 11 | 2.21,1.45,2.29,2.48,2.53,2.04,2.16,2.86,2.07,2.25,3.27,1.93,2.3,2.28,2.73,2.06,2.55,2.91,6.17,6.66,3.43,2.17,2.81,2.53,3.81,1.84,2.07,2.22,2.19,2.03,2.03,1.87,3.22,2.21,2.13,1.93,2.08,2.74,2.94,2.37,2.55,1.83,2.92,2.44,3.86 12 | 2.23,1.52,2.34,2.52,2.59,2.06,2.2,2.92,2.1,2.34,3.3,1.97,2.32,2.34,2.73,2.07,2.58,2.96,6.16,6.67,3.67,2.34,2.94,2.57,3.84,1.87,2.1,2.26,2.22,2.05,2.15,1.93,2.27,2.36,2.2,2.14,2.12,1.75,2.99,2.42,2.56,1.77,2.85,2.46,3.89 13 | 2.25,1.53,2.36,2.53,2.61,2.07,2.23,2.94,2.12,2.36,3.32,1.99,2.42,2.36,2.75,2.14,2.6,2.99,6.2,6.67,3.69,2.43,2.96,2.6,3.84,1.91,2.12,2.28,2.25,2.05,2.18,1.93,3.26,2.38,2.26,2.14,2.24,1.84,3.0,2.33,2.64,1.55,3.02,2.47,3.91 14 | 2.27,1.55,2.39,2.55,2.63,2.09,2.26,2.97,2.15,2.39,3.34,2.0,2.62,2.39,2.81,2.15,2.61,3.01,6.19,6.68,3.7,2.44,2.99,2.62,3.88,1.94,2.14,2.31,2.26,2.06,2.21,1.95,3.26,2.38,2.26,2.02,2.28,1.87,3.04,2.3,2.65,1.62,3.03,2.47,3.99 15 | 2.21,1.61,2.32,2.51,2.63,2.09,2.25,2.93,2.11,2.38,3.27,2.08,2.6,2.44,2.82,2.14,2.64,2.98,6.19,6.67,3.77,2.29,2.75,2.55,3.89,1.98,2.17,2.24,2.3,2.1,2.24,1.97,3.23,2.16,2.31,2.04,2.27,1.89,2.99,2.16,2.65,1.73,3.02,2.45,3.89 16 | 2.13,1.65,2.25,2.48,2.63,2.07,2.22,2.91,2.07,2.33,3.23,2.01,2.35,2.39,2.84,2.07,2.62,2.94,6.22,6.68,3.69,2.18,2.69,2.52,3.89,2.02,2.2,2.2,2.28,2.08,2.2,1.95,3.2,2.13,2.4,2.06,2.27,1.71,2.85,2.17,2.66,1.45,2.98,2.44,3.81 17 | 2.01,1.62,2.2,2.44,2.61,2.05,2.16,2.89,2.05,2.23,3.19,1.91,2.28,2.36,2.84,2.09,2.49,2.88,6.28,6.68,3.64,2.16,2.73,2.49,3.86,2.05,2.17,2.12,2.25,2.06,2.17,1.91,3.13,2.07,2.38,1.89,2.25,2.66,2.74,2.11,2.72,1.4,2.92,2.42,3.71 18 | 1.73,1.56,2.14,2.36,2.6,2.01,2.09,2.84,1.97,2.18,3.12,1.77,2.22,2.32,2.83,2.06,2.37,2.8,6.27,6.69,3.61,2.14,2.76,2.45,3.84,2.03,2.17,2.0,2.2,2.03,2.11,1.87,3.01,1.93,2.36,1.9,2.24,2.64,2.64,2.05,2.76,1.37,2.85,2.4,3.62 19 | 1.6,1.52,2.03,1.29,2.53,1.89,1.96,2.79,1.9,2.18,3.04,1.75,2.21,2.23,2.82,2.02,2.26,2.7,6.27,6.68,3.59,2.04,2.73,2.4,3.77,1.95,2.1,1.92,2.1,1.93,2.06,1.8,2.94,1.87,2.31,1.86,2.17,2.64,2.61,2.02,2.69,1.44,2.82,2.35,3.59 20 | 1.51,1.43,1.98,2.21,2.48,1.8,1.87,2.69,1.81,2.13,3.02,1.7,2.15,2.14,2.8,2.01,2.24,2.63,6.25,6.67,3.57,2.03,2.7,2.33,3.7,1.88,2.05,1.97,2.0,1.84,2.0,1.78,2.86,1.84,2.28,1.85,2.16,2.57,2.59,2.03,2.6,1.31,2.73,2.29,3.5 21 | 1.47,1.39,1.96,2.13,2.44,1.74,1.86,2.73,1.8,2.08,3.0,1.69,2.14,2.09,2.79,1.99,2.21,2.56,6.25,6.66,3.55,2.0,2.69,2.22,3.64,1.85,1.99,2.04,1.96,1.73,1.91,1.72,2.81,1.78,2.27,1.81,2.13,2.52,2.59,1.98,2.55,1.24,2.67,2.24,3.42 22 | 1.43,1.34,1.84,2.05,2.39,1.74,1.81,2.73,1.77,2.06,2.97,1.59,2.08,2.09,2.81,2.0,2.2,2.49,6.25,6.63,3.56,2.05,2.69,2.17,3.59,1.83,1.95,2.07,1.92,1.69,1.8,1.66,2.77,1.76,2.16,1.79,2.11,2.44,2.58,1.97,2.51,1.24,2.65,2.22,3.41 23 | 1.4,1.31,1.72,2.0,2.46,1.72,1.76,2.69,1.73,2.01,2.93,1.5,2.01,2.06,2.79,1.95,2.14,2.42,6.26,6.62,3.54,2.01,2.67,2.1,3.54,1.76,1.87,2.09,1.87,1.65,1.71,1.62,2.73,1.74,2.17,1.72,2.09,2.44,2.56,1.96,2.47,1.23,2.56,2.19,3.41 24 | 1.35,1.26,1.86,1.94,2.5,1.7,1.68,2.69,1.65,1.95,2.9,1.33,1.89,2.04,2.76,1.92,2.04,2.34,6.27,6.61,3.49,1.94,2.67,2.01,3.44,1.7,1.85,2.11,1.8,1.59,1.67,1.55,2.67,1.73,2.14,1.67,2.09,2.4,2.55,1.96,2.46,1.18,2.53,2.17,3.39 25 | 1.06,1.0,1.87,1.81,2.17,1.52,1.56,3.19,0.91,1.88,2.6,1.31,1.88,1.69,2.73,1.91,2.04,2.27,6.27,6.6,3.39,1.46,2.47,1.62,3.0,1.55,1.47,1.7,1.62,1.59,1.65,1.18,2.66,1.43,2.13,1.51,2.01,2.28,2.46,1.82,2.44,0.58,2.48,2.4,3.28 26 | 0.8,0.73,1.89,1.1,1.73,1.62,0.96,2.73,0.57,1.83,1.95,1.38,1.86,1.42,2.7,1.88,1.93,1.82,6.28,6.59,3.34,1.42,1.58,1.3,2.89,1.05,1.42,1.62,1.5,1.6,1.5,1.14,2.71,1.46,2.1,1.51,1.95,2.15,2.34,1.87,2.46,0.3,2.36,2.42,3.04 27 | 0.87,0.73,1.87,1.14,1.55,1.77,0.66,2.29,0.77,1.78,2.12,0.96,1.27,0.82,2.66,2.0,1.79,1.69,6.3,6.56,3.19,1.38,1.57,1.38,2.54,0.35,1.27,1.58,1.41,1.49,1.5,0.84,2.79,1.4,1.64,0.26,1.98,2.17,2.37,1.94,2.32,0.26,1.73,2.44,2.82 28 | 0.96,0.76,1.08,1.26,1.67,1.82,0.76,2.32,0.91,1.6,2.3,0.81,0.89,1.02,2.65,2.03,1.63,1.84,6.31,6.57,1.92,1.29,1.41,1.47,2.35,0.93,1.36,1.53,1.27,1.29,1.05,0.99,2.57,0.61,1.79,0.47,0.49,1.69,2.38,2.0,1.56,0.6,1.25,2.25,2.72 29 | 1.05,0.55,1.12,1.16,1.87,1.78,0.36,2.49,0.86,1.54,2.36,0.79,1.14,1.28,2.64,2.16,1.47,1.93,6.33,6.56,1.96,1.31,1.44,1.53,2.49,1.22,1.44,1.4,1.13,1.44,0.9,1.12,2.05,0.72,1.84,0.97,0.53,1.76,2.52,2.02,1.78,0.74,1.31,2.76,2.64 30 | 1.12,0.52,0.23,1.09,1.17,1.65,0.18,2.54,0.62,1.4,2.43,1.07,1.26,1.08,2.63,2.15,1.41,2.02,6.34,6.54,2.05,1.38,1.56,1.68,2.54,1.52,1.5,1.43,0.9,1.09,0.95,1.2,2.14,1.04,1.85,1.24,0.59,1.82,2.52,2.08,1.99,0.78,1.4,2.91,2.7 31 | 0.87,0.6,1.02,1.34,1.51,1.78,0.36,2.33,0.71,1.48,2.05,1.22,1.35,1.26,2.7,2.16,1.51,1.59,6.34,6.52,2.29,1.43,1.47,1.72,2.75,1.61,1.57,1.48,0.97,0.99,0.7,1.01,2.2,0.85,1.73,1.21,0.68,1.85,2.47,2.02,2.55,0.96,1.67,2.64,2.71 32 | 0.75,0.89,1.21,1.19,1.72,1.53,0.26,2.27,0.89,1.43,2.18,1.33,1.39,1.39,2.8,2.24,1.65,1.66,6.35,6.51,2.47,1.3,1.53,1.44,2.9,1.7,1.69,1.44,1.08,1.04,0.93,0.93,2.1,0.61,1.4,1.49,0.78,1.91,2.52,1.95,2.43,1.08,1.87,2.58,2.72 33 | 0.89,0.93,1.04,1.31,1.57,1.62,0.36,2.75,0.97,1.38,2.1,1.39,1.6,1.31,2.9,2.33,1.82,1.81,6.35,6.51,2.58,1.22,1.56,1.51,2.96,1.25,1.81,1.37,1.19,0.74,1.0,0.91,2.3,0.56,1.59,1.47,0.87,2.03,2.55,1.92,2.36,1.14,1.96,2.57,2.75 34 | 0.97,0.94,1.16,1.4,1.75,1.67,0.46,2.78,1.11,1.38,1.97,1.26,1.67,1.18,3.01,2.37,1.96,1.95,6.36,6.5,2.34,1.27,1.64,1.62,3.01,1.48,1.72,1.31,0.9,1.14,1.02,1.2,2.23,0.77,1.63,1.51,1.01,2.12,2.38,1.89,2.32,1.24,2.08,2.66,3.02 35 | 0.72,0.95,1.28,1.47,1.88,1.7,0.48,3.44,1.24,1.45,2.21,1.14,1.51,1.42,3.05,2.4,1.83,1.54,6.37,6.49,2.14,1.3,1.67,1.34,3.04,1.59,1.83,1.26,0.98,1.19,1.05,1.26,2.23,0.93,1.65,1.47,1.13,2.18,2.44,1.85,2.33,1.41,2.29,2.74,3.06 36 | 0.83,1.06,1.37,1.28,2.02,1.73,0.49,3.11,0.94,1.28,2.27,0.97,1.45,1.48,3.05,2.42,1.42,1.69,6.38,6.49,2.12,1.31,1.52,1.45,3.08,1.72,1.94,1.23,1.06,1.24,1.09,1.07,2.13,1.09,1.66,1.48,1.25,2.25,2.48,1.82,2.31,1.53,1.88,2.77,3.06 37 | 0.93,1.03,1.28,1.37,1.68,1.92,0.36,3.24,0.63,1.45,2.09,1.01,1.64,1.3,3.03,2.52,1.57,1.94,6.37,6.5,2.25,1.39,1.6,1.51,2.98,1.59,1.96,1.44,1.15,1.29,1.06,1.24,2.17,1.4,1.78,1.52,1.32,2.33,2.49,1.96,2.28,1.37,1.83,2.76,2.85 38 | 1.02,0.79,0.48,1.46,1.89,2.12,0.46,3.34,1.0,1.53,2.3,1.08,1.83,1.49,3.03,2.63,1.6,2.09,6.39,6.51,2.33,1.41,1.68,1.47,2.88,1.32,2.01,1.39,1.27,1.44,1.06,1.41,2.38,1.53,1.85,1.67,1.33,2.35,2.51,1.98,2.26,0.58,2.0,2.7,2.64 39 | 1.13,1.03,1.27,1.08,1.92,2.27,0.51,3.14,1.31,1.28,2.68,1.15,1.91,1.72,3.0,2.64,1.66,2.26,6.4,6.51,2.34,1.5,1.76,1.18,2.78,1.4,2.07,1.27,1.39,1.59,1.04,1.05,2.57,1.57,1.97,1.8,1.39,1.96,2.54,1.99,2.24,0.88,2.15,2.69,2.67 40 | 0.8,0.95,1.33,1.16,2.01,2.51,0.66,3.29,1.06,1.43,2.47,1.24,1.98,1.95,3.0,2.66,1.82,1.88,6.4,6.53,2.38,1.65,1.87,1.3,2.86,1.48,2.04,1.2,1.0,1.29,1.01,1.26,2.16,1.14,1.89,1.22,1.15,1.84,2.59,1.95,1.99,1.15,2.18,2.46,2.71 41 | 0.89,1.17,1.4,1.29,2.18,2.22,0.46,3.34,1.01,1.57,2.44,1.29,1.9,1.56,2.98,2.67,1.74,2.02,6.41,6.54,2.09,1.56,1.98,1.37,3.0,1.61,1.82,1.29,1.11,1.49,0.9,1.24,1.98,0.97,1.53,1.09,0.81,1.78,2.62,1.87,1.94,1.56,1.91,2.34,2.76 42 | 0.97,1.34,1.56,1.4,2.43,2.32,0.56,3.39,1.1,1.75,2.42,1.27,1.85,1.67,2.99,2.69,1.68,2.18,6.42,6.55,1.91,1.55,2.12,1.43,3.14,1.72,1.91,1.31,1.27,1.76,1.01,1.35,2.31,0.95,1.47,1.03,0.89,1.67,2.61,1.84,1.91,1.63,1.97,2.55,2.76 43 | 1.02,1.39,0.81,1.25,1.91,2.39,0.71,3.11,1.18,1.78,2.52,1.34,1.46,1.76,3.0,2.66,1.87,2.24,6.4,6.56,1.69,1.64,2.2,1.47,3.25,1.59,1.96,1.4,1.33,1.49,1.07,1.08,2.42,1.03,1.71,1.05,1.1,1.85,2.55,1.72,1.94,1.68,2.06,2.63,2.65 44 | 1.08,1.44,1.24,1.32,2.06,2.48,0.51,3.09,1.16,1.98,2.55,0.98,1.66,1.8,3.06,2.68,1.74,2.31,6.42,6.57,1.65,1.47,1.99,1.51,3.32,1.52,1.99,1.43,1.4,1.59,1.12,1.16,1.97,1.0,1.79,1.3,0.97,1.91,2.07,1.66,1.8,1.7,2.13,2.56,2.55 45 | 1.16,1.47,1.33,1.39,2.2,2.51,0.56,3.05,1.3,2.15,2.57,1.03,1.71,1.57,3.07,2.68,1.92,2.37,6.43,6.6,1.92,1.48,2.05,1.55,3.35,1.58,2.05,1.35,1.48,1.59,1.2,1.3,2.14,0.95,1.83,1.29,0.98,1.89,2.43,1.62,1.84,1.73,2.25,2.69,2.72 46 | 1.19,1.51,1.42,1.45,2.37,2.55,0.57,3.25,1.36,2.34,2.61,1.05,1.83,1.37,3.09,2.72,1.95,2.43,6.47,6.62,2.09,1.59,2.1,1.6,3.39,1.66,2.12,1.27,1.53,1.39,1.16,1.7,2.07,1.18,1.85,1.35,1.2,1.84,2.52,1.56,1.2,1.79,2.33,2.78,2.76 47 | 1.1,1.42,1.07,1.4,2.21,2.57,0.58,3.04,1.51,2.42,2.67,1.2,1.89,1.46,3.1,2.72,2.03,2.34,6.51,6.62,2.15,1.79,2.25,1.57,3.46,1.61,2.07,1.16,1.44,1.29,1.09,1.55,2.56,1.22,1.9,1.46,1.35,2.07,2.64,1.52,1.21,1.72,2.61,2.88,2.84 48 | 1.13,1.48,1.26,1.46,2.31,2.62,0.66,2.99,1.59,2.28,2.72,1.39,1.91,1.55,3.1,2.71,2.1,2.41,6.53,6.63,2.25,1.84,2.31,1.59,3.53,1.69,2.1,1.13,1.5,1.23,1.14,1.49,2.14,1.35,1.92,1.64,1.43,2.15,2.65,1.5,1.51,1.94,2.57,2.91,3.02 49 | 1.17,1.57,1.33,1.63,2.53,2.71,0.67,2.98,1.64,1.79,2.8,1.57,1.44,1.73,3.06,2.71,2.2,2.44,6.47,6.62,2.55,1.95,2.3,1.64,3.74,1.6,2.1,1.2,1.6,1.09,1.2,1.55,2.05,1.4,1.94,1.78,1.49,2.12,2.67,1.62,2.96,1.97,2.63,2.89,3.17 50 | 1.22,1.6,1.48,1.74,2.4,2.77,0.69,3.04,1.69,1.98,2.83,1.58,1.76,1.8,3.07,2.73,2.24,2.46,6.46,6.61,2.68,2.01,2.41,1.8,3.84,1.55,2.12,1.24,1.49,0.99,1.27,1.64,2.2,1.47,1.93,1.88,1.49,2.03,2.68,1.65,3.0,1.82,2.73,2.92,3.27 51 | 1.28,1.69,1.58,1.87,2.47,2.84,0.7,3.02,1.75,2.15,2.88,1.65,1.96,2.01,3.1,2.74,2.32,2.49,6.44,6.6,2.98,2.01,2.48,1.96,3.94,1.58,2.14,1.32,1.55,0.99,1.35,1.7,2.32,1.48,1.95,1.89,1.55,2.0,2.69,1.68,3.01,1.92,2.76,2.95,3.28 52 | 1.35,1.82,1.67,2.01,2.56,2.92,0.73,2.97,1.35,2.34,2.82,1.67,2.09,2.18,3.1,2.75,2.35,2.53,6.42,6.59,3.27,2.15,2.54,2.1,3.54,1.62,2.17,1.45,1.63,1.14,1.4,1.85,2.45,1.56,2.0,1.81,1.57,2.13,2.72,1.72,3.02,2.01,2.78,2.97,3.33 53 | 1.23,1.89,1.55,2.1,2.59,2.77,0.76,2.95,0.83,2.42,2.78,1.73,2.05,1.77,3.13,2.75,2.39,2.59,6.42,6.6,3.29,2.18,2.65,2.25,3.04,1.65,2.08,1.58,1.7,1.29,1.49,1.76,2.17,1.62,2.16,1.93,1.58,2.17,2.74,1.77,3.05,2.1,2.81,3.0,3.32 54 | 1.18,1.95,1.82,2.18,2.65,2.67,0.66,2.91,0.32,2.28,2.75,1.97,2.01,1.29,3.14,2.77,2.47,2.65,6.43,6.6,3.38,2.27,2.7,2.2,2.54,1.69,2.01,1.66,1.76,1.39,1.53,1.59,2.52,1.71,2.19,1.94,1.66,2.25,2.74,2.0,3.05,2.07,2.85,3.02,3.28 55 | 0.87,1.59,2.06,1.91,2.56,1.97,0.46,2.49,0.11,1.98,1.98,1.82,1.99,0.62,2.83,2.78,0.87,2.69,6.43,6.57,3.39,2.11,2.58,1.8,2.24,1.35,1.57,1.39,1.5,1.29,1.41,1.41,2.45,1.86,2.2,1.97,1.67,2.31,2.77,1.92,3.04,1.68,2.59,3.03,3.27 56 | 0.72,1.39,1.51,1.46,2.14,1.02,0.63,2.54,0.42,1.98,1.63,1.73,1.94,0.68,2.85,2.8,0.75,2.75,6.41,6.51,3.41,1.88,2.39,1.4,2.09,0.85,1.37,1.41,1.1,1.29,1.22,1.16,2.53,1.87,2.24,2.01,1.47,2.39,2.79,1.88,2.86,0.98,2.38,3.03,3.13 57 | 0.69,1.13,1.11,1.46,1.47,0.82,0.58,1.89,0.28,1.68,1.65,1.24,2.05,0.8,2.85,2.67,0.7,2.43,6.39,6.48,3.44,1.34,1.38,1.1,1.94,0.21,1.17,1.43,0.85,1.19,0.3,0.86,2.66,1.72,2.26,2.0,1.04,2.44,2.6,1.76,2.84,0.62,2.09,2.98,3.11 58 | 0.72,0.68,1.56,1.23,0.66,0.76,0.12,1.84,0.27,1.48,1.71,1.14,2.03,0.91,2.86,2.53,0.64,1.94,6.38,6.44,3.36,1.12,1.21,0.92,1.79,0.25,1.08,1.45,0.63,0.99,0.13,0.78,2.47,1.38,1.71,1.61,0.84,2.13,2.5,1.72,2.7,0.65,1.09,2.63,3.1 59 | 0.74,0.34,0.22,1.28,0.83,0.77,0.13,1.82,0.07,1.38,1.78,1.07,1.0,0.97,2.86,2.45,0.71,1.73,6.37,6.43,3.21,0.99,0.69,0.75,1.79,0.28,1.11,1.48,0.47,0.79,0.17,0.66,2.17,1.03,1.53,1.36,0.87,1.76,2.13,1.54,2.31,0.7,0.98,2.59,2.71 60 | 0.77,0.25,0.28,1.31,0.99,0.83,0.21,1.81,0.14,1.58,1.96,0.89,0.65,1.02,2.87,2.44,0.84,1.49,6.36,6.42,1.79,0.61,0.8,0.77,1.81,0.32,1.14,1.52,0.5,0.79,0.22,0.69,2.17,0.86,1.18,0.93,0.59,1.18,1.9,1.46,1.95,0.87,0.83,2.58,3.21 61 | 0.72,0.18,0.34,1.33,1.03,0.92,0.23,1.84,0.27,1.38,1.83,0.31,0.46,1.04,2.85,1.83,0.87,1.41,6.35,6.42,1.71,0.66,0.87,0.55,1.89,0.35,1.14,1.33,0.5,0.69,0.3,0.67,1.59,0.72,0.82,0.87,0.63,0.87,1.95,1.22,1.84,0.98,0.86,2.57,2.06 62 | 0.74,0.22,0.38,1.36,1.12,1.06,0.25,1.86,0.4,1.43,1.78,0.4,0.7,1.07,2.84,1.94,0.94,1.43,6.34,6.41,1.56,0.71,0.85,0.5,1.95,0.41,1.16,1.01,0.51,0.59,0.36,0.58,1.65,0.84,0.84,1.11,0.69,1.02,2.01,1.31,1.83,1.05,0.91,2.59,2.13 63 | 0.76,0.26,0.46,1.38,1.15,1.15,0.27,1.89,0.48,1.48,1.8,0.41,0.6,1.09,2.84,2.0,1.02,1.48,6.33,6.41,1.58,0.74,0.55,0.53,2.04,0.49,1.19,0.83,0.53,0.59,0.43,0.58,1.81,0.89,1.08,1.13,0.76,1.03,2.04,1.4,1.93,0.18,0.95,2.65,2.5 64 | 0.77,0.29,0.51,1.42,1.19,1.27,0.3,2.14,0.55,1.51,1.82,0.45,0.01,1.11,2.87,2.13,1.07,1.54,6.34,6.42,1.61,0.8,0.7,0.57,2.14,0.54,1.23,0.67,0.55,0.64,0.49,0.63,1.88,1.0,1.13,1.2,0.78,1.08,2.12,1.48,1.96,0.25,1.0,2.7,2.51 65 | 0.79,0.33,0.57,1.45,1.21,1.29,0.35,2.3,0.64,1.56,1.84,0.48,0.13,1.14,2.88,2.19,1.12,1.58,6.36,6.44,1.65,0.81,0.78,0.61,2.22,0.6,1.27,0.86,0.59,0.69,0.54,0.69,1.96,1.04,1.14,1.23,0.87,1.13,2.15,1.53,2.01,0.44,1.04,2.76,2.55 66 | 0.79,0.37,0.63,1.5,1.24,1.33,0.39,2.33,0.73,1.61,1.88,0.51,0.32,1.17,2.9,2.21,1.16,1.61,6.39,6.45,1.69,0.84,0.86,0.66,2.29,0.65,1.32,1.0,0.63,0.79,0.6,0.74,2.02,1.11,1.19,1.28,0.94,1.15,2.2,1.55,2.04,0.64,1.06,2.8,2.56 67 | 0.87,0.47,0.65,1.53,1.3,1.8,0.41,1.95,0.82,1.68,2.03,0.54,0.47,1.26,2.9,2.24,1.17,1.69,6.4,6.56,1.86,0.84,0.9,0.67,2.34,0.8,1.39,1.08,0.77,0.89,0.69,0.77,2.04,1.14,1.27,1.34,0.94,1.2,2.28,1.61,2.05,0.68,1.11,2.81,2.57 68 | 0.99,0.53,0.82,1.58,1.38,1.6,0.46,1.97,0.95,1.73,2.2,0.54,0.58,1.37,2.91,2.26,1.2,1.77,6.42,6.58,2.0,0.85,0.91,0.69,2.39,0.92,1.46,1.16,0.85,0.99,0.8,0.81,2.13,1.17,1.33,1.34,1.05,1.33,2.39,1.62,2.06,0.76,1.18,2.83,2.6 69 | 1.05,0.69,1.28,1.6,1.42,1.75,0.52,2.04,1.09,1.78,2.34,0.57,0.62,1.49,2.94,2.28,1.22,1.85,6.44,6.59,2.12,0.85,0.98,0.76,2.46,1.08,1.52,1.22,0.9,1.04,0.92,0.91,2.19,1.24,1.41,1.46,1.07,1.37,2.44,1.67,2.08,0.82,1.24,2.86,2.61 70 | 0.21,0.86,1.34,1.62,1.48,1.97,0.6,2.27,0.17,1.82,2.45,0.59,0.69,1.58,2.95,2.29,1.28,1.93,6.45,6.6,2.41,0.86,1.23,0.94,2.54,1.15,1.58,1.27,0.95,1.13,0.99,0.99,2.27,1.34,1.5,1.54,1.1,1.41,2.52,1.69,2.12,0.85,1.33,2.86,2.75 71 | 0.35,0.96,1.4,1.65,1.52,2.15,0.71,2.04,0.31,1.88,2.56,0.68,0.74,1.7,2.95,2.29,1.37,1.99,6.47,6.61,2.68,0.87,1.43,1.3,2.65,1.22,1.65,1.3,0.98,1.2,1.05,1.05,2.37,1.39,1.56,1.58,1.14,1.48,2.55,1.83,2.13,0.9,1.47,2.87,2.87 72 | 0.44,1.09,1.47,1.7,1.56,2.31,0.84,2.17,0.4,1.96,2.61,0.75,0.81,1.86,2.96,2.3,1.42,2.05,6.5,6.61,2.77,0.88,1.56,1.34,2.79,1.29,1.72,1.35,1.0,1.29,1.13,1.1,2.44,1.5,1.58,1.79,1.16,1.58,2.58,2.04,2.16,0.93,1.55,2.87,3.01 73 | 1.48,1.22,1.55,1.8,1.78,2.37,0.96,2.23,1.52,1.99,2.68,0.78,0.83,1.89,2.98,2.3,2.21,2.08,6.51,6.6,2.78,0.96,1.38,1.4,2.9,1.4,1.8,1.55,1.18,1.34,1.25,1.24,2.51,1.52,1.6,1.84,1.25,1.7,2.6,2.05,2.17,1.4,1.57,2.12,3.06 74 | 1.52,1.38,1.79,1.89,1.91,2.48,1.1,2.35,1.56,2.01,2.71,0.84,0.89,1.96,3.01,2.31,2.14,2.11,6.54,6.62,2.92,1.04,1.41,1.48,3.01,1.51,1.88,1.67,1.32,1.39,1.36,1.35,2.6,1.54,1.61,1.86,1.29,1.8,2.7,2.09,2.21,1.41,1.61,2.21,3.05 75 | 1.57,1.53,2.04,1.97,1.98,2.56,1.23,2.43,1.71,2.03,2.77,0.92,0.96,2.03,3.02,2.32,2.28,2.16,6.53,6.62,2.94,1.11,1.45,1.53,3.23,1.63,1.97,1.75,1.41,1.46,1.43,1.51,2.68,1.55,1.74,1.88,1.38,1.86,2.77,2.11,2.23,1.42,1.66,2.2,3.07 76 | 1.62,1.63,2.06,2.04,2.05,2.63,1.36,2.49,1.83,2.06,2.84,0.94,1.11,2.06,3.03,2.32,2.33,2.19,6.53,6.64,2.94,1.18,1.47,1.61,3.29,1.67,2.01,1.81,1.49,1.53,1.54,1.64,2.77,1.63,1.82,1.95,1.45,1.91,2.82,2.12,2.29,1.42,1.77,2.25,3.1 77 | 1.67,1.72,2.07,2.09,2.14,2.71,1.42,2.54,1.85,2.06,2.95,0.97,1.15,2.13,3.03,2.32,2.36,2.24,6.54,6.65,2.94,1.19,1.48,1.67,3.44,1.73,2.06,1.86,1.6,1.74,1.64,1.68,2.83,1.78,1.86,2.04,1.47,2.0,2.89,2.17,2.3,1.44,1.82,2.3,3.13 78 | 1.73,1.82,2.07,2.16,2.22,2.74,1.49,2.59,1.93,2.08,3.04,0.98,1.25,2.19,3.05,2.35,2.4,2.28,6.56,6.66,2.95,1.2,1.61,1.73,3.54,1.77,2.08,1.9,1.68,1.79,1.77,1.76,2.92,1.95,1.85,2.13,1.5,2.03,2.9,2.17,2.34,1.47,1.87,2.35,3.14 79 | 1.79,1.93,2.09,2.22,2.26,2.77,1.56,2.64,1.97,2.13,3.09,1.0,1.27,2.23,3.07,2.37,2.42,2.32,6.58,6.67,2.95,1.21,1.64,1.77,3.59,1.82,2.18,1.98,1.74,1.79,1.85,1.8,3.04,2.01,1.84,2.15,1.62,2.03,2.93,2.2,2.36,1.49,2.01,2.4,3.18 80 | 1.85,1.98,2.18,2.25,2.35,2.81,1.6,2.72,2.0,2.18,3.16,1.07,1.28,2.29,3.08,2.38,2.45,2.39,6.6,6.67,2.97,1.21,1.72,1.8,3.62,1.86,2.24,2.03,1.81,1.84,1.87,1.83,3.09,2.07,1.83,2.18,1.65,2.06,2.94,2.21,2.36,1.51,2.06,2.41,3.18 81 | 1.9,2.06,2.22,2.27,2.38,2.84,1.66,2.69,2.04,2.28,3.2,1.08,1.32,2.33,3.07,2.41,2.49,2.46,6.56,6.67,2.98,1.22,1.71,1.84,3.64,1.92,2.31,2.05,1.89,1.89,1.91,1.8,3.1,2.13,1.82,2.18,1.68,2.07,2.96,2.21,2.39,1.51,2.1,2.42,3.24 82 | 1.96,2.1,2.27,2.31,2.41,2.89,1.69,2.69,2.09,2.3,3.22,1.07,1.33,2.27,3.05,2.44,2.46,2.54,6.53,6.68,2.97,1.23,1.76,1.87,3.69,1.99,2.38,2.07,1.94,1.89,1.93,1.76,3.11,2.16,1.81,2.19,1.69,2.09,3.01,2.0,2.4,1.52,2.11,2.44,3.31 83 | 2.01,2.07,2.29,2.34,2.41,2.92,1.73,2.79,2.12,2.36,3.2,1.08,1.39,2.26,3.05,2.45,2.49,2.61,6.53,6.71,2.96,1.27,1.72,1.91,3.74,2.04,2.47,2.09,2.03,1.87,2.0,1.74,3.09,2.14,1.81,2.19,1.73,2.1,3.04,1.95,2.42,1.45,2.11,2.41,3.32 84 | 2.04,2.03,2.3,2.36,2.43,2.95,1.77,2.79,2.15,2.38,3.17,1.24,1.51,2.24,3.04,2.42,2.5,2.7,6.49,6.72,2.96,1.33,1.75,1.94,3.79,2.09,2.57,2.1,2.11,1.84,2.05,1.72,3.08,2.13,1.79,2.22,1.76,2.1,3.04,1.95,2.41,1.32,2.08,2.37,3.31 85 | 2.05,2.01,2.32,2.38,2.44,2.93,1.77,2.79,2.16,2.37,3.14,1.32,1.51,2.22,3.04,2.41,2.51,2.69,6.49,6.71,2.96,1.34,1.76,1.71,3.84,2.08,2.55,2.09,2.09,1.81,2.03,1.7,3.06,2.13,1.8,2.23,1.87,2.11,3.07,1.97,2.44,1.21,2.02,2.38,3.3 86 | 2.02,1.99,2.31,2.35,2.42,2.92,1.76,2.74,2.14,2.35,3.11,1.45,1.52,2.19,3.03,2.4,2.6,2.67,6.48,6.71,2.99,1.44,1.75,1.88,3.83,2.06,2.53,2.07,2.05,1.78,2.0,1.68,3.04,2.11,1.8,2.23,1.95,2.04,3.08,1.98,2.45,1.18,2.03,2.4,3.3 87 | 2.0,1.97,2.29,2.33,2.39,2.88,1.74,2.74,2.08,2.31,3.1,1.4,1.48,2.16,2.98,2.25,2.51,2.64,6.45,6.69,2.92,1.42,1.85,1.84,3.8,2.04,2.5,2.03,2.0,1.74,1.97,1.64,2.99,2.07,1.77,2.2,1.92,1.99,3.03,1.94,2.41,1.23,2.0,2.37,3.29 88 | 1.97,1.92,2.24,2.28,2.33,2.82,1.7,2.74,2.04,2.26,3.08,1.23,1.45,2.12,2.97,2.19,2.49,2.59,6.42,6.68,2.89,1.41,1.89,1.8,3.74,1.99,2.46,1.99,1.95,1.7,1.91,1.6,2.91,2.06,1.74,2.17,1.91,1.95,3.01,1.93,2.37,1.22,1.98,2.34,3.27 89 | 1.9,1.88,2.2,2.22,2.28,2.77,1.66,2.69,1.97,2.2,3.05,1.2,1.43,2.06,2.96,2.05,2.45,2.52,6.36,6.66,2.85,1.38,1.92,1.73,3.67,1.95,2.4,1.95,1.9,1.65,1.88,1.55,2.73,2.03,1.66,2.12,1.88,1.92,3.04,1.92,2.35,1.2,1.93,2.33,3.25 90 | 1.85,1.82,2.15,2.15,2.22,2.72,1.6,2.79,1.91,2.16,3.03,1.19,1.37,1.99,2.96,2.01,2.38,2.47,6.27,6.63,2.86,1.35,2.01,1.64,3.62,1.89,2.32,1.87,1.83,1.59,1.82,1.47,2.61,2.02,1.65,2.1,1.83,1.87,3.04,1.89,2.32,1.19,1.92,2.28,3.23 91 | 1.8,1.78,2.1,2.11,2.14,2.69,1.56,2.79,1.85,2.12,3.0,1.2,1.42,1.94,2.88,2.0,2.39,2.45,6.38,6.63,2.86,1.32,2.0,1.6,3.57,1.85,2.28,1.83,1.8,1.57,1.8,1.45,2.4,1.99,1.66,2.06,1.84,1.84,2.9,1.88,2.25,1.21,1.95,2.3,3.21 92 | 1.74,1.73,2.04,2.05,2.09,2.64,1.51,2.74,1.8,2.07,2.94,1.16,1.44,1.91,2.88,1.99,2.39,2.41,6.39,6.62,2.84,1.31,1.98,1.55,3.54,1.82,2.25,1.8,1.76,1.51,1.77,1.41,2.48,1.98,1.64,2.05,1.81,1.83,2.86,1.87,2.22,1.26,1.94,2.29,3.18 93 | 1.68,1.7,1.99,1.99,2.04,2.6,1.45,2.74,1.73,2.0,2.88,1.12,1.59,1.85,2.87,1.94,2.35,2.37,6.4,6.6,2.8,1.31,1.89,1.5,3.5,1.76,2.21,1.75,1.71,1.46,1.72,1.37,2.45,1.97,1.62,2.01,1.79,1.8,2.78,1.84,2.16,1.25,1.92,2.46,3.16 94 | 1.6,1.67,1.95,1.95,2.01,2.57,1.41,2.64,1.65,1.98,2.85,1.14,1.79,1.81,2.85,1.95,2.21,2.32,6.38,6.57,2.89,1.3,1.9,1.47,3.5,1.73,2.18,1.71,1.69,1.44,1.69,1.35,2.28,1.89,1.62,1.9,1.83,1.81,2.74,1.81,2.16,1.23,1.92,2.45,3.12 95 | 1.54,1.47,1.91,1.91,1.96,2.53,1.35,2.54,1.57,1.96,2.83,1.16,1.92,1.76,2.81,1.91,2.16,2.28,6.36,6.57,2.85,1.31,1.87,1.42,3.47,1.69,2.15,1.66,1.61,1.4,1.65,1.32,2.38,1.89,1.61,1.86,1.82,1.78,2.65,1.79,2.17,1.22,1.88,2.45,3.09 96 | 1.19,1.33,1.86,1.65,1.83,2.42,1.16,2.39,1.29,1.79,2.78,1.17,2.05,1.48,2.8,1.87,2.13,2.1,6.34,6.56,2.86,1.28,1.85,1.35,3.44,1.55,1.99,1.52,1.53,1.37,1.6,1.24,2.47,1.88,1.61,1.82,1.8,1.76,2.55,1.78,2.16,1.18,1.84,2.6,3.05 97 | 1.11,1.11,1.81,1.08,1.75,2.17,0.56,2.39,0.75,1.78,2.38,1.1,2.05,0.85,2.75,1.84,2.13,1.94,6.31,6.54,2.78,1.12,1.58,1.13,3.39,1.37,1.79,1.52,1.43,1.29,1.59,1.05,2.55,1.87,1.43,1.76,1.66,1.72,2.4,1.62,2.18,0.51,1.78,2.34,2.91 98 | 1.0,0.85,1.52,0.8,1.36,1.42,0.36,2.91,0.33,1.68,1.98,1.06,2.14,0.89,2.75,1.58,2.14,1.79,6.28,6.52,2.9,1.03,1.47,0.28,3.34,1.0,1.57,1.5,0.6,1.19,1.4,0.84,2.62,1.32,1.29,1.74,1.57,1.69,2.39,1.61,2.2,0.48,1.77,2.48,2.82 99 | 0.94,0.77,1.01,1.02,1.34,1.37,0.46,2.74,0.56,1.68,1.76,0.78,1.38,1.19,2.76,2.09,2.31,1.69,6.25,6.52,2.92,1.31,1.36,0.53,2.99,0.3,1.52,1.47,0.65,1.09,1.33,0.74,2.71,1.44,1.32,1.66,1.43,1.46,2.34,1.45,2.16,0.67,1.46,2.6,2.61 100 | 0.87,0.59,0.51,1.14,1.46,1.52,0.49,3.29,0.88,1.58,1.81,0.72,1.36,1.39,2.86,2.11,1.39,1.74,6.22,6.51,2.54,1.3,1.26,0.8,2.64,0.47,1.44,1.42,0.7,0.99,1.41,0.82,2.24,1.14,0.96,1.28,0.57,1.57,2.29,1.39,2.21,0.69,1.33,2.64,2.56 101 | 0.8,0.64,0.11,1.06,1.61,1.07,0.36,3.39,0.81,1.48,1.77,0.67,1.3,0.96,2.9,1.99,1.24,1.82,6.17,6.49,2.57,1.31,1.19,1.05,2.44,0.59,1.37,1.38,0.85,1.06,1.05,0.99,2.05,0.58,0.88,1.25,0.68,1.62,1.46,1.25,2.27,0.72,1.25,2.7,2.51 102 | 0.65,0.57,0.4,0.91,1.25,1.12,0.41,2.43,0.6,1.38,1.75,0.68,1.46,1.05,2.9,1.93,1.06,1.64,6.17,6.48,2.79,1.28,1.09,1.07,2.09,0.88,1.43,1.31,0.8,1.14,0.89,0.84,2.13,0.81,0.89,1.12,0.46,1.68,1.48,1.22,2.26,0.76,1.17,2.76,2.43 103 | 0.62,0.59,0.49,0.96,1.41,1.34,0.52,1.94,0.64,1.38,1.98,0.94,1.22,1.17,2.87,1.92,1.17,1.79,6.18,6.47,2.78,1.12,1.86,0.95,2.24,1.12,1.5,0.97,0.75,0.99,0.93,0.91,2.03,0.95,1.02,1.13,0.45,1.6,1.61,1.21,2.09,0.58,1.3,2.55,2.61 104 | 0.74,0.67,0.33,1.08,1.66,1.55,0.64,2.39,0.67,1.43,2.26,1.06,1.61,1.29,2.85,1.87,1.61,1.96,6.2,6.46,2.72,1.03,0.7,1.1,2.41,1.17,1.27,1.03,0.89,0.96,1.02,1.03,2.07,1.0,1.08,1.24,0.57,1.15,1.7,1.42,2.22,0.7,1.33,2.43,2.72 105 | 0.8,0.5,0.63,0.83,1.37,1.3,0.3,2.71,0.9,1.52,1.89,0.71,1.81,1.04,2.83,1.88,1.28,1.69,6.21,6.46,2.56,0.98,0.88,1.31,2.54,1.26,1.39,1.08,0.97,1.07,1.18,0.77,2.22,1.06,0.91,1.21,0.72,1.22,1.77,1.56,2.31,0.47,1.08,2.75,2.38 106 | 0.81,0.57,0.84,0.89,1.4,1.42,0.44,2.94,1.22,1.68,1.97,0.63,1.65,0.94,2.83,1.84,1.14,1.78,6.17,6.44,2.32,0.52,0.92,1.43,2.36,1.35,1.51,1.2,1.15,0.89,0.81,0.86,2.38,0.86,0.87,0.86,0.84,1.4,1.51,1.09,2.2,0.44,1.15,2.64,2.47 107 | 0.85,0.72,1.11,1.01,1.42,1.59,0.55,2.96,1.24,1.88,2.1,0.75,1.67,1.0,2.79,2.11,0.89,1.93,6.16,6.44,1.99,0.49,1.05,1.2,2.39,1.43,1.65,1.32,1.29,0.89,0.88,1.03,2.25,0.63,0.59,0.99,0.33,0.92,1.55,1.27,2.1,0.62,1.26,2.66,2.51 108 | 0.91,0.97,1.21,1.13,1.65,1.77,0.63,3.44,0.88,1.78,1.82,1.14,1.96,1.11,2.75,2.15,1.01,1.61,6.15,6.44,2.21,0.81,1.28,1.28,2.48,1.5,1.77,1.36,1.0,0.99,0.95,1.1,1.98,0.72,0.88,1.03,0.39,1.1,1.58,1.43,2.02,0.95,1.25,2.71,2.26 109 | 0.97,1.03,0.61,1.18,1.79,1.87,0.46,3.59,0.95,1.78,1.9,1.25,1.95,0.79,2.82,2.17,1.3,1.82,6.2,6.43,2.21,0.93,1.51,1.35,2.69,1.59,1.84,1.37,1.06,1.07,1.12,1.16,2.26,0.88,1.26,1.68,0.58,1.15,1.59,1.71,2.07,1.19,1.52,2.72,2.4 110 | 1.02,0.93,0.51,1.26,1.97,1.95,0.51,3.54,0.95,1.88,2.01,1.28,1.89,0.95,2.82,2.21,1.43,1.96,6.23,6.45,2.3,1.2,1.62,1.47,2.75,1.67,1.92,1.38,1.15,1.04,1.53,1.22,2.44,0.97,1.43,1.67,0.63,1.2,1.67,1.86,2.13,1.37,1.02,2.84,2.45 111 | 1.08,0.93,0.49,1.31,1.88,2.07,0.52,3.29,1.2,1.98,2.06,1.41,1.84,1.03,2.85,2.11,1.66,2.07,6.26,6.46,2.42,1.25,1.6,1.54,2.84,1.4,2.01,1.39,1.23,1.07,1.64,1.28,2.62,1.06,1.42,1.66,0.67,1.36,1.77,2.1,2.18,1.39,1.84,3.03,2.58 112 | 0.84,1.24,0.74,1.34,2.11,2.12,0.36,2.94,0.91,2.08,1.94,1.34,2.04,1.12,2.89,1.88,1.92,1.69,6.31,6.48,2.15,1.1,1.33,1.3,2.94,1.49,1.87,1.41,1.34,1.11,1.72,1.35,2.45,0.64,1.29,1.47,0.44,1.49,1.88,2.14,2.17,1.17,1.51,3.1,2.73 113 | 0.89,1.46,1.52,1.42,2.16,2.26,0.46,3.29,1.15,1.89,2.0,1.18,2.22,1.27,2.78,1.87,2.19,1.84,6.35,6.48,1.96,1.13,1.37,1.42,3.04,1.61,1.95,1.45,1.11,1.16,1.56,1.22,2.15,0.62,1.53,1.48,0.72,1.5,1.9,1.85,2.1,1.42,1.4,3.02,2.61 114 | 0.93,1.6,1.8,1.46,2.4,2.41,0.58,3.32,1.46,1.97,2.06,1.43,2.38,1.38,2.77,1.79,2.36,1.98,6.38,6.49,1.98,1.27,1.44,1.56,3.14,1.72,2.04,1.49,1.25,1.21,1.61,1.28,2.23,1.11,1.68,1.54,0.9,1.64,1.93,1.8,2.15,1.48,1.62,3.05,2.53 115 | 1.05,1.65,1.92,1.06,2.58,2.57,0.67,3.46,1.66,2.14,2.21,1.48,2.44,1.09,2.85,1.88,2.4,2.03,6.4,6.52,2.11,1.39,1.69,1.62,3.24,1.8,2.1,1.6,1.33,1.29,1.71,1.35,2.65,1.34,1.87,1.62,1.44,1.95,2.31,2.2,2.22,1.54,1.98,2.97,2.78 116 | 1.12,1.42,1.41,1.24,2.67,2.7,0.48,3.53,1.68,2.38,2.1,1.56,2.64,1.17,2.87,2.11,2.55,1.8,6.41,6.54,2.34,1.44,1.82,1.75,3.34,1.61,2.16,1.81,1.41,1.34,1.7,1.45,2.1,1.51,1.98,1.76,1.68,2.03,2.38,2.28,2.21,1.67,2.15,2.92,2.96 117 | 1.31,1.53,0.43,1.42,2.26,2.8,0.64,3.33,1.59,2.48,2.01,1.65,2.59,1.29,2.9,2.24,2.71,2.06,6.43,6.54,2.47,1.69,1.38,1.56,3.24,1.7,1.97,1.78,1.52,1.4,1.72,1.22,2.18,1.41,1.84,1.79,1.17,2.15,2.62,2.17,2.06,1.63,1.92,2.93,2.81 118 | 1.54,1.61,0.96,1.61,2.3,2.92,0.73,3.24,1.25,2.38,1.93,1.1,2.46,1.4,2.98,1.97,2.41,2.18,6.46,6.54,2.6,1.85,1.36,1.67,3.14,1.79,2.03,1.76,1.64,1.46,1.74,1.32,2.55,1.36,1.55,1.67,1.54,1.87,2.55,2.12,2.16,1.62,1.89,3.07,3.0 119 | 1.5,1.56,1.47,1.79,2.39,2.95,0.78,3.05,1.11,2.44,2.1,1.15,2.15,1.51,2.89,1.99,2.08,2.1,6.44,6.55,2.22,1.51,1.56,1.79,3.29,1.85,2.1,1.74,1.6,1.53,1.75,1.39,2.55,1.58,1.8,1.6,1.8,1.74,2.61,2.21,2.32,1.72,2.43,3.12,2.93 120 | 1.57,1.68,1.84,2.0,2.44,3.02,0.84,2.97,1.4,2.08,2.21,1.38,2.35,1.57,2.92,1.95,2.3,2.22,6.44,6.55,2.14,1.77,1.91,1.95,3.37,1.97,2.19,1.71,1.71,1.62,1.74,1.53,2.62,1.69,1.82,1.74,1.86,2.09,2.79,2.34,2.37,1.77,2.56,3.16,3.07 121 | 1.75,1.75,1.91,2.06,2.46,1.82,0.86,2.99,1.6,2.18,2.27,1.42,2.55,1.68,2.9,1.98,2.11,2.29,6.41,6.54,2.37,1.9,1.94,2.0,3.45,2.03,2.22,1.75,1.74,1.74,1.77,1.55,2.85,1.7,1.84,1.74,1.78,2.24,2.81,2.38,2.4,1.85,2.6,3.15,3.11 122 | 1.81,1.85,2.01,2.11,2.49,1.87,0.89,3.09,1.76,2.28,2.32,1.54,2.58,1.79,2.93,2.02,1.76,2.36,6.4,6.53,2.51,2.15,1.95,2.03,3.52,2.08,2.24,1.77,1.77,1.79,1.81,1.59,2.99,1.82,1.81,1.8,1.93,2.31,2.84,2.49,2.45,1.9,2.58,3.1,3.14 123 | 1.85,1.97,2.13,2.14,2.53,1.93,0.91,3.14,1.84,2.38,2.46,1.67,2.67,1.85,2.95,2.02,1.93,2.43,6.36,6.53,2.76,2.24,2.07,2.07,3.59,2.12,2.29,1.79,1.8,1.84,1.85,1.62,3.07,1.83,1.73,1.9,2.06,2.28,2.85,2.5,2.62,2.03,2.77,2.97,3.16 124 | 1.89,2.11,2.31,2.2,2.57,1.99,0.95,3.14,1.9,2.48,2.58,1.75,2.74,1.91,2.96,2.21,2.23,2.49,6.33,6.51,2.88,2.36,2.2,2.1,3.65,2.17,2.32,1.82,1.84,1.84,1.89,1.66,2.95,2.01,1.67,2.06,2.08,2.27,2.87,2.35,2.76,2.15,2.87,3.03,3.2 125 | 1.76,2.17,2.33,1.89,2.61,2.06,1.0,3.14,1.96,2.53,2.74,1.88,2.62,1.96,2.96,2.35,2.3,2.55,6.31,6.51,3.04,2.37,2.39,2.14,3.71,2.23,2.35,1.87,1.9,1.89,1.93,1.7,2.76,2.03,1.76,2.15,1.95,2.34,2.85,2.33,2.71,2.21,2.91,3.02,3.23 126 | 1.75,2.09,2.37,1.72,2.65,2.13,1.03,3.09,2.08,2.58,2.95,1.9,2.42,2.01,2.98,2.37,1.81,2.63,6.33,6.51,2.86,2.33,2.29,2.19,3.54,2.1,2.41,1.93,1.95,1.89,1.86,1.74,2.78,1.95,1.91,2.29,1.94,2.16,2.76,2.23,2.49,2.22,2.92,3.01,3.21 127 | 1.33,2.24,2.39,1.28,2.81,2.26,1.21,3.04,2.11,2.73,3.2,2.08,1.28,2.42,2.9,2.4,1.56,2.85,6.31,6.5,2.78,2.42,1.65,2.26,3.44,2.22,2.49,2.1,2.03,1.55,1.4,1.95,2.65,2.08,1.97,2.28,1.93,2.18,2.64,2.22,2.39,2.4,2.97,2.87,3.36 128 | 1.1,1.6,2.41,1.06,2.96,2.4,1.37,3.04,2.14,2.85,3.42,2.05,1.2,2.59,2.8,2.44,1.31,2.96,6.3,6.49,2.5,2.59,1.61,2.11,2.54,2.32,2.66,2.23,2.09,1.47,1.2,1.85,2.11,2.22,2.03,2.24,1.78,2.19,2.45,2.13,2.12,2.41,2.7,2.74,3.03 129 | 1.07,1.53,2.44,0.99,2.96,2.47,1.54,2.99,1.49,2.68,3.44,1.99,0.91,2.54,2.79,2.53,1.35,2.69,6.28,6.49,2.32,2.6,1.36,1.96,2.34,2.4,2.74,2.22,2.12,1.2,0.42,1.57,2.35,2.25,1.81,2.1,1.75,2.12,2.02,1.96,1.83,2.02,2.68,2.99,2.86 130 | 1.05,1.38,2.01,0.9,2.77,2.22,1.59,2.49,1.3,2.52,2.37,1.28,0.56,1.99,2.78,2.42,1.08,2.53,6.28,6.48,2.23,2.33,1.08,1.62,2.14,2.37,2.86,2.2,2.16,0.59,0.5,1.3,2.65,2.0,1.75,2.11,1.51,1.19,1.94,1.59,1.71,1.41,1.95,3.12,2.61 131 | 1.08,1.13,1.07,0.95,2.51,1.85,1.35,2.44,0.96,2.21,2.1,1.01,0.7,1.78,2.77,2.09,1.04,2.2,6.26,6.48,2.21,1.41,1.08,1.37,2.14,2.15,2.42,2.17,1.9,0.83,0.61,1.05,2.52,1.14,1.1,2.17,1.42,0.95,1.73,1.27,1.48,1.24,1.79,3.08,2.48 132 | 1.09,0.66,0.06,1.01,2.01,1.32,0.88,2.09,0.51,1.83,2.01,0.88,0.78,1.45,2.75,2.08,1.0,1.98,6.25,6.46,2.28,0.89,1.09,1.2,2.1,1.6,1.99,2.03,1.64,1.05,0.69,0.74,2.56,0.91,0.82,1.66,1.06,0.86,1.7,1.14,1.46,1.08,1.62,3.03,2.27 133 | 0.78,0.34,0.06,1.05,1.57,0.92,0.42,2.64,0.92,1.48,1.97,0.72,0.93,1.11,2.76,2.19,0.74,2.69,6.23,6.47,2.43,1.27,1.14,0.88,2.34,1.21,1.74,1.93,1.47,0.81,0.78,0.7,2.34,0.84,0.68,1.44,0.95,1.02,1.51,1.04,1.85,1.13,1.4,2.73,2.12 134 | 0.65,0.23,0.13,1.08,0.82,0.84,0.21,3.22,0.8,1.08,1.95,0.68,0.95,0.97,2.8,1.94,0.82,2.62,6.22,6.48,2.49,1.23,1.17,0.82,2.54,1.0,1.62,1.66,1.36,0.81,0.8,0.56,2.05,0.6,0.77,1.04,0.64,1.33,1.59,0.95,2.36,1.16,0.8,2.49,2.15 135 | 0.67,0.41,0.21,1.12,0.91,0.85,0.16,3.24,0.74,0.98,1.9,0.74,0.99,0.99,2.8,1.93,0.91,2.64,6.22,6.48,2.55,1.14,1.25,0.87,2.74,1.04,1.65,1.63,1.28,0.83,0.85,0.54,2.08,0.66,0.85,0.89,0.37,1.58,1.62,1.0,2.76,1.4,0.65,2.32,2.21 136 | 0.75,0.33,0.29,1.14,1.09,0.87,0.19,3.32,0.82,1.0,1.92,0.82,1.23,1.01,2.82,1.86,0.98,2.65,6.19,6.49,2.59,0.99,1.07,0.9,2.82,1.06,1.67,1.6,1.3,0.86,0.9,0.58,2.15,0.71,0.91,0.91,0.44,1.72,1.92,1.01,2.78,1.41,0.78,2.33,2.32 137 | 0.81,0.32,0.35,1.16,1.13,0.88,0.21,2.72,0.79,1.05,1.94,0.86,1.35,1.05,2.85,1.77,1.0,2.68,6.18,6.5,2.61,0.67,0.98,0.92,2.89,1.1,1.7,1.62,1.33,0.89,0.94,0.64,2.2,0.82,0.99,0.93,0.69,1.83,1.93,1.07,2.77,1.44,0.92,2.35,2.41 138 | 0.89,0.35,0.42,1.21,1.18,0.9,0.22,3.34,0.78,1.08,1.97,0.91,1.44,1.11,2.87,1.85,1.02,2.71,6.17,6.5,2.68,0.83,0.86,0.95,2.94,1.13,1.72,1.65,1.36,0.96,0.99,0.68,2.29,0.83,1.14,0.93,0.7,1.85,2.01,1.09,2.8,1.51,0.94,2.45,2.47 139 | 0.97,0.1,0.56,1.23,1.26,0.96,0.32,2.14,0.07,1.14,2.0,0.99,1.51,1.17,2.88,1.88,1.3,1.84,6.17,6.51,2.73,0.94,0.88,1.03,2.96,1.15,1.74,1.67,1.36,1.0,1.07,0.75,2.43,0.98,1.16,1.0,0.92,2.04,2.16,1.12,2.83,1.54,1.27,2.51,2.52 140 | 1.09,0.22,0.72,1.26,1.42,1.03,0.44,2.12,0.19,1.2,2.01,1.08,1.64,1.22,2.9,1.96,1.44,1.9,6.16,6.52,2.82,1.06,0.89,1.11,3.01,1.17,1.77,1.69,1.38,1.03,1.19,0.82,2.51,1.03,1.19,1.02,1.07,2.1,2.19,1.2,2.84,1.59,1.41,2.56,2.58 141 | 1.18,0.37,0.84,1.3,1.51,1.1,0.51,2.12,0.3,1.26,2.1,1.11,1.82,1.26,2.93,1.99,1.51,1.97,6.15,6.54,2.84,1.11,1.03,1.24,3.04,1.21,1.8,1.69,1.39,1.08,1.27,0.91,2.55,1.13,1.22,1.14,1.1,2.13,2.24,1.26,2.85,1.63,1.48,2.65,2.62 142 | 1.29,0.49,1.1,1.33,1.58,1.16,0.53,2.16,0.38,1.33,2.19,1.16,1.95,1.33,2.98,2.01,1.64,2.01,6.2,6.55,2.9,1.28,1.2,1.37,3.04,1.34,1.83,1.72,1.41,1.12,1.42,0.99,2.58,1.14,1.37,1.26,1.12,2.13,2.27,1.33,2.96,1.69,1.5,2.66,2.69 143 | 1.32,0.63,1.15,1.38,1.66,1.24,0.58,2.21,0.46,1.42,2.36,1.3,2.1,1.37,2.99,2.12,1.7,2.04,6.21,6.55,2.95,1.39,1.28,1.5,3.06,1.42,1.87,1.73,1.43,1.17,1.48,1.1,2.61,1.15,1.64,1.28,1.18,2.18,2.36,1.52,2.98,1.71,1.56,2.66,2.73 144 | 1.37,0.77,1.27,1.44,1.72,1.32,0.63,2.24,0.55,1.5,2.48,1.41,2.16,1.41,2.99,2.18,1.77,2.08,6.26,6.58,2.94,1.35,1.48,1.61,3.08,1.51,1.95,1.75,1.46,1.21,1.57,1.23,2.64,1.17,1.69,1.49,1.29,2.19,2.46,1.55,3.0,1.88,1.58,2.7,2.78 145 | 1.45,0.88,1.3,1.56,1.96,1.4,0.76,2.35,0.71,1.63,2.6,1.49,2.23,1.57,3.0,2.21,1.93,2.11,6.25,6.57,3.08,1.44,1.52,1.64,3.15,1.55,1.98,1.76,1.5,1.3,1.63,1.27,2.73,1.24,1.73,1.6,1.44,2.25,2.47,1.72,2.99,1.89,1.84,2.66,2.82 146 | 1.69,1.0,1.33,1.68,2.06,1.56,0.89,2.39,0.86,1.69,2.71,1.53,2.3,1.8,3.0,2.2,2.1,2.15,6.22,6.57,3.19,1.47,1.53,1.65,3.22,1.6,2.01,1.79,1.53,1.32,1.67,1.28,2.79,1.33,1.77,1.8,1.5,2.4,2.48,1.78,3.01,1.91,1.99,2.65,2.87 147 | 1.8,1.14,1.39,1.83,2.27,1.69,0.97,2.42,1.03,1.75,2.78,1.58,2.38,2.01,3.0,2.23,2.17,2.2,6.2,6.56,3.36,1.5,1.56,1.67,3.28,1.64,2.04,1.87,1.57,1.36,1.69,1.33,2.85,1.44,1.82,1.8,1.56,2.54,2.52,1.81,3.01,1.94,2.25,2.63,2.93 148 | 2.07,1.23,1.42,1.89,2.29,1.76,1.08,2.54,1.11,1.81,2.87,1.76,2.42,2.1,3.01,2.25,2.24,2.27,6.18,6.54,3.5,1.59,1.79,1.69,3.34,1.68,2.08,1.91,1.61,1.39,1.7,1.35,2.95,1.48,1.85,1.85,1.66,2.66,2.54,1.84,3.02,2.08,2.49,2.62,2.99 149 | 2.18,1.38,1.5,1.97,2.31,1.81,1.16,2.59,1.24,1.86,3.0,1.84,2.51,2.16,3.03,2.26,2.29,2.3,6.15,6.54,3.67,1.63,1.92,1.71,3.42,1.71,2.1,2.0,1.68,1.42,1.72,1.37,2.97,1.69,1.89,1.89,1.69,2.7,2.54,1.87,3.04,2.19,2.73,2.63,3.02 150 | 2.28,1.44,1.54,2.06,2.31,1.86,1.23,2.64,1.26,1.88,3.06,1.91,2.54,2.18,3.04,2.32,2.33,2.33,6.15,6.54,3.74,1.68,2.07,1.73,3.5,1.76,2.1,2.03,1.71,1.44,1.74,1.41,2.99,1.88,1.91,1.98,1.72,2.72,2.59,1.98,3.05,2.2,2.85,2.64,3.1 151 | 2.35,1.74,1.58,2.13,2.46,1.9,1.25,2.64,1.33,1.9,3.14,1.94,2.61,2.2,3.04,2.33,2.36,2.37,6.11,6.53,3.82,1.67,2.1,1.77,3.56,1.78,2.12,2.08,1.74,1.46,1.9,1.44,3.07,1.95,1.93,1.99,1.76,2.74,2.65,2.0,3.05,2.22,2.88,2.65,3.16 152 | 2.39,1.82,1.6,2.19,2.5,1.93,1.27,2.69,1.36,1.94,3.16,1.96,2.65,2.23,3.05,2.36,2.4,2.41,6.09,6.53,3.91,1.71,2.11,1.81,3.6,1.8,2.14,2.14,1.77,1.46,1.93,1.49,3.16,1.96,1.94,2.02,1.8,2.83,2.66,2.03,3.05,2.21,2.9,2.66,3.23 153 | 2.43,1.89,1.63,2.22,2.5,1.97,1.31,2.69,1.42,1.96,3.19,1.97,2.72,2.27,3.07,2.37,2.43,2.44,6.07,6.52,3.96,1.73,2.18,1.83,3.64,1.83,2.17,2.19,1.79,1.49,1.95,1.5,3.17,2.01,1.97,2.03,1.84,2.85,2.68,2.05,3.08,2.21,2.92,2.68,3.29 154 | 2.44,2.0,1.67,2.26,2.54,2.01,1.36,2.79,1.47,2.0,3.21,1.99,2.75,2.29,3.07,2.37,2.46,2.46,6.05,6.52,3.99,1.77,2.23,1.85,3.69,1.85,2.2,2.21,1.82,1.52,1.97,1.53,3.2,2.06,2.05,2.06,1.89,2.89,2.69,2.06,3.09,2.19,2.95,2.68,3.32 155 | 2.45,2.06,1.7,2.29,2.56,2.02,1.42,2.84,1.51,2.05,3.23,2.0,2.78,2.33,3.08,2.38,2.49,2.5,6.03,6.52,4.07,1.85,2.25,1.9,3.75,1.87,2.22,2.25,1.85,1.54,2.01,1.55,3.21,2.07,2.1,2.09,1.97,2.93,2.71,2.1,3.11,2.08,2.95,2.69,3.38 156 | 2.41,2.13,1.75,2.34,2.62,2.05,1.48,2.84,1.53,2.09,3.25,2.04,2.79,2.49,3.08,2.38,2.48,2.53,6.0,6.52,4.11,1.9,2.34,1.95,3.8,1.89,2.24,2.3,1.88,1.57,2.04,1.59,3.24,2.07,2.12,2.1,2.03,2.94,2.72,2.11,3.13,1.78,2.96,2.7,3.43 157 | 2.4,2.14,1.76,2.35,2.62,2.06,1.54,2.96,1.55,2.11,3.25,2.06,2.83,2.51,3.08,2.4,2.47,2.57,5.98,6.51,4.15,2.02,2.37,1.97,3.84,1.95,2.26,2.3,1.9,1.59,2.1,1.6,3.25,2.06,2.16,2.08,2.06,2.96,2.74,2.12,3.11,1.4,2.96,2.72,3.45 158 | 2.4,2.15,1.78,2.23,2.64,2.08,1.56,2.89,1.56,2.15,3.27,2.1,2.84,2.52,3.1,2.39,2.44,2.7,5.96,6.51,4.18,2.04,2.39,2.0,3.84,1.98,2.28,2.33,1.9,1.64,2.15,1.62,3.25,2.04,2.23,2.11,2.09,2.96,2.75,2.13,3.1,1.33,2.95,2.74,3.46 159 | 2.36,2.13,1.78,2.35,2.62,2.08,1.57,2.89,1.56,2.17,3.27,2.13,2.83,2.52,3.09,2.37,2.42,2.72,5.98,6.5,4.17,2.05,2.4,1.99,3.83,1.98,2.29,2.3,1.88,1.61,2.14,1.61,3.24,2.05,2.18,2.12,2.08,2.91,2.71,2.15,3.07,1.32,2.95,2.75,3.47 160 | 2.35,2.11,1.76,2.33,2.6,2.07,1.56,2.81,1.55,2.15,3.2,2.11,2.81,2.49,3.06,2.34,2.42,2.69,5.97,6.48,4.17,1.99,2.43,1.99,3.81,1.95,2.26,2.24,1.85,1.59,2.11,1.59,3.23,2.03,2.19,2.13,2.04,2.89,2.67,2.16,3.05,1.3,2.93,2.76,3.45 161 | 2.33,2.09,1.74,2.32,2.57,2.03,1.54,2.69,1.53,2.11,3.11,2.07,2.8,2.46,3.05,2.34,2.42,2.67,5.97,6.46,4.16,1.98,2.43,1.97,3.77,1.93,2.26,2.21,1.81,1.56,2.07,1.56,3.18,2.0,2.16,2.14,1.99,2.85,2.68,2.17,3.04,1.3,2.92,2.77,3.43 162 | 2.28,2.06,1.71,2.29,2.53,2.0,1.48,2.69,1.48,2.08,3.07,2.05,2.78,2.4,3.01,2.33,2.37,2.63,5.95,6.46,4.14,1.95,2.42,1.95,3.73,1.87,2.21,2.13,1.76,1.53,2.02,1.53,3.12,1.99,2.11,2.18,1.99,2.86,2.68,2.18,3.05,1.28,2.92,2.79,3.39 163 | 2.26,2.03,1.69,1.27,2.51,1.97,1.45,2.68,1.45,2.06,3.03,2.03,2.76,2.35,3.0,2.32,2.38,2.59,5.96,6.45,4.11,1.84,2.39,1.93,3.7,1.85,2.18,2.07,1.72,1.5,2.0,1.49,3.05,1.95,2.1,2.16,2.0,2.84,2.64,2.14,3.03,1.28,2.9,2.73,3.37 164 | 2.22,2.0,1.66,1.24,2.5,1.95,1.41,2.66,1.42,2.02,2.96,1.99,2.75,2.31,3.0,2.32,2.34,2.56,5.97,6.44,4.06,1.82,2.35,1.9,3.65,1.82,2.15,2.03,1.69,1.46,1.97,1.44,3.02,1.91,2.1,2.15,1.94,2.83,2.62,2.13,3.02,1.22,2.88,2.72,3.33 165 | 2.18,1.95,1.62,1.21,2.47,1.9,1.36,2.64,1.39,1.97,2.92,2.0,2.72,2.25,2.99,2.29,2.34,2.51,5.95,6.44,3.99,1.79,2.34,1.84,3.61,1.77,2.11,2.01,1.65,1.41,1.92,1.39,2.99,1.87,2.08,2.19,1.94,2.8,2.6,2.1,2.97,1.23,2.87,2.72,3.27 166 | 2.12,1.92,1.59,1.15,2.44,1.85,1.35,2.64,1.33,1.95,2.9,2.0,2.69,2.22,2.97,2.27,2.33,2.51,5.93,6.45,3.98,1.75,2.33,1.81,3.46,1.74,2.07,1.93,1.61,1.39,1.88,1.38,2.99,1.76,2.06,2.09,1.94,2.74,2.59,2.09,2.96,1.24,2.81,2.7,3.23 167 | 2.06,1.88,1.56,1.11,2.39,1.81,1.33,2.64,1.21,1.92,2.89,1.96,2.69,2.18,2.95,2.24,2.32,2.47,5.91,6.45,3.95,1.75,2.32,1.79,3.34,1.71,2.0,1.86,1.57,1.35,1.82,1.35,2.95,1.74,2.06,2.06,1.91,2.72,2.56,2.07,2.94,1.25,2.79,2.67,3.19 168 | 2.0,1.82,1.51,1.04,2.33,1.74,1.28,2.62,1.04,1.88,2.89,1.95,2.68,2.12,2.95,2.23,2.3,2.39,5.91,6.46,3.9,1.68,2.3,1.71,3.29,1.65,1.94,1.79,1.5,1.31,1.77,1.3,2.89,1.73,2.05,2.04,1.9,2.7,2.55,2.04,2.92,1.29,2.78,2.63,3.13 169 | 1.9,1.67,1.01,1.01,2.29,1.63,1.06,2.74,0.94,1.88,2.83,1.91,2.63,1.84,2.76,2.14,2.26,2.29,5.95,6.48,3.71,1.52,2.18,1.6,3.19,1.61,1.78,1.52,1.4,1.24,1.5,1.15,2.89,1.68,1.91,2.04,1.84,2.68,2.24,2.0,2.96,1.26,2.62,2.59,2.94 170 | 1.8,1.42,0.81,0.91,2.15,1.43,0.78,2.69,0.91,1.68,2.79,1.63,2.09,1.49,2.68,2.07,2.15,2.19,5.98,6.49,3.2,1.13,1.77,1.56,3.24,1.58,1.7,1.52,1.21,1.19,1.43,1.03,2.81,1.59,1.76,1.59,1.61,2.38,1.92,1.78,2.84,1.17,2.56,2.6,2.88 171 | 1.5,1.12,0.98,0.76,1.96,1.25,0.37,2.69,0.87,1.58,2.6,1.17,1.88,1.12,2.64,2.03,1.68,2.0,6.08,6.52,2.78,0.9,1.35,1.4,2.94,1.25,1.55,1.35,1.0,1.29,1.25,0.86,2.54,1.16,1.74,1.6,1.45,2.25,1.56,1.69,2.7,1.14,2.53,2.83,2.66 172 | 1.39,0.98,1.11,0.65,2.02,1.34,0.19,2.59,0.14,1.53,2.33,0.83,1.82,0.95,2.61,1.99,1.27,2.07,6.11,6.53,2.53,0.73,1.23,1.24,2.74,1.16,1.42,1.26,0.92,1.34,1.16,0.74,2.42,0.77,1.54,1.26,1.02,1.73,1.5,1.65,2.84,0.89,2.45,2.86,2.72 173 | 1.26,1.02,1.01,0.68,2.1,1.4,0.3,2.51,0.64,1.53,2.06,0.92,1.56,1.01,2.4,1.98,1.02,2.13,6.13,6.55,2.48,0.65,1.12,1.2,2.54,1.18,1.48,1.29,0.85,1.39,1.2,0.78,2.15,0.58,1.5,0.93,0.55,1.5,1.34,1.63,2.36,0.74,2.02,2.81,2.61 174 | 1.11,1.0,0.84,0.74,1.91,1.45,0.46,2.94,0.55,1.48,1.88,0.94,1.11,1.06,2.38,1.79,0.84,1.87,6.15,6.55,2.44,0.61,1.0,1.11,2.34,1.2,1.54,1.32,0.8,1.44,1.02,0.84,1.91,0.51,1.52,1.02,0.16,1.39,1.11,1.62,2.04,0.42,1.2,2.78,2.68 175 | 0.73,0.62,0.44,0.95,1.31,1.18,0.47,1.96,0.43,1.38,1.86,1.0,1.14,1.09,2.4,1.77,1.06,1.67,6.16,6.55,2.4,0.72,1.08,1.1,2.39,0.94,1.47,1.4,0.83,1.21,0.93,0.89,1.83,0.57,1.42,1.04,0.33,1.47,1.27,1.65,2.05,0.63,1.28,2.82,2.51 176 | 0.78,0.68,0.51,0.99,1.37,1.24,0.53,1.89,0.51,1.4,1.83,1.02,1.18,1.12,2.43,1.82,1.11,1.75,6.17,6.57,2.38,0.8,1.22,1.14,2.49,0.99,1.52,1.36,0.87,1.27,1.01,0.94,2.13,0.69,1.31,1.12,0.4,1.5,1.3,1.66,2.06,0.69,1.29,2.86,2.57 177 | 0.81,0.84,0.6,1.03,1.43,1.31,0.59,1.79,0.63,1.48,1.88,0.96,1.26,1.16,2.46,1.86,1.13,1.88,6.17,6.58,2.22,0.79,1.3,1.02,2.54,1.03,1.55,1.33,0.91,1.34,1.11,1.01,2.25,0.68,1.18,1.06,0.39,1.54,1.34,1.69,2.09,0.75,1.33,2.94,2.63 178 | 0.87,1.03,0.68,1.08,1.49,1.36,0.64,2.19,0.81,1.58,1.95,0.97,1.22,0.94,2.47,1.88,0.97,1.93,6.19,6.58,2.25,0.84,1.27,1.08,2.59,1.07,1.6,1.31,0.96,1.39,1.2,1.05,2.15,0.62,1.32,0.95,0.45,1.63,1.33,1.86,2.07,0.88,1.36,2.97,2.72 179 | 0.91,1.05,0.75,1.01,1.55,1.4,0.68,2.16,1.02,1.68,2.0,0.99,1.14,0.98,2.53,2.0,0.98,1.72,6.2,6.6,2.39,0.85,1.28,1.17,2.64,1.13,1.48,1.29,1.02,1.45,1.3,1.11,2.3,0.66,1.47,1.01,0.5,1.67,1.19,1.87,1.98,0.85,1.33,2.93,2.75 180 | 1.0,0.9,1.06,0.93,1.6,1.48,0.76,3.22,0.85,1.78,2.09,1.07,1.21,1.01,2.54,2.09,0.93,1.65,6.21,6.61,2.52,0.94,1.33,1.26,2.71,1.17,1.51,1.27,0.92,1.49,1.36,1.0,2.59,0.75,1.49,1.14,0.55,1.72,1.21,1.93,1.96,0.91,1.41,2.94,2.68 181 | 1.08,1.49,1.16,1.44,1.93,2.23,1.19,3.12,0.89,1.98,2.08,1.71,2.17,1.69,2.75,2.31,1.82,2.17,6.25,6.67,2.88,1.19,1.71,1.52,3.0,1.7,1.76,1.55,1.24,1.54,1.24,1.21,1.97,1.64,2.08,1.68,1.27,2.03,1.95,1.85,2.23,1.64,2.23,3.86,2.99 182 | 0.84,1.53,1.24,1.47,1.96,2.27,1.23,3.14,0.97,2.08,2.15,1.5,2.24,1.72,2.77,2.44,1.87,2.22,6.27,6.69,2.95,1.27,1.77,1.55,3.03,1.72,1.79,1.66,1.3,1.56,1.28,1.25,2.08,1.6,2.03,1.67,1.4,2.07,2.0,1.91,2.26,1.67,2.29,3.73,3.02 183 | 0.86,1.57,1.3,1.5,2.0,2.32,1.27,3.33,1.11,2.16,2.27,1.55,2.36,1.76,2.78,2.46,1.99,2.28,6.28,6.69,3.08,1.41,1.93,1.59,3.06,1.75,1.84,1.7,1.34,1.61,1.35,1.29,2.18,1.71,2.17,1.72,1.55,2.13,2.19,1.91,2.35,1.83,2.41,3.78,3.1 184 | 0.99,1.61,1.34,1.55,2.04,2.37,1.33,3.33,1.22,2.18,2.45,1.59,2.43,1.79,2.8,2.4,2.14,2.33,6.31,6.71,3.19,1.46,2.03,1.63,3.14,1.78,1.89,1.73,1.38,1.68,1.41,1.33,2.3,1.89,2.29,1.79,1.68,2.18,2.38,1.99,2.47,1.86,2.46,3.92,3.16 185 | 1.05,1.66,1.37,1.6,2.09,2.52,1.39,3.35,1.37,2.28,2.57,1.82,2.5,1.84,2.83,2.38,2.23,2.37,6.32,6.72,3.24,1.52,2.19,1.66,3.22,1.81,1.94,1.84,1.41,1.73,1.48,1.38,2.42,1.93,2.33,1.88,1.76,2.28,2.4,2.16,2.5,1.85,2.53,3.95,3.21 186 | 1.08,1.71,1.45,1.63,2.16,2.87,1.46,3.39,1.51,2.28,2.81,1.86,2.56,1.92,2.88,2.37,2.28,2.41,6.32,6.72,3.26,1.58,2.28,1.71,3.3,1.85,1.99,1.92,1.47,1.8,1.54,1.45,2.58,1.91,2.37,1.96,1.79,2.31,2.41,2.2,2.52,1.9,2.64,3.98,3.25 187 | 1.08,1.49,1.16,1.44,1.93,2.23,1.19,3.12,0.89,1.98,2.08,1.71,2.17,1.69,2.75,2.31,1.82,2.17,6.25,6.67,2.88,1.19,1.71,1.52,3.0,1.7,1.76,1.55,1.24,1.54,1.24,1.21,1.97,1.64,2.08,1.68,1.27,2.03,1.95,1.85,2.23,1.64,2.23,3.86,2.99 188 | 0.84,1.53,1.24,1.47,1.96,2.27,1.23,3.14,0.97,2.08,2.15,1.5,2.24,1.72,2.77,2.44,1.87,2.22,6.27,6.69,2.95,1.27,1.77,1.55,3.03,1.72,1.79,1.66,1.3,1.56,1.28,1.25,2.08,1.6,2.03,1.67,1.4,2.07,2.0,1.91,2.26,1.67,2.29,3.73,3.02 189 | 0.86,1.57,1.3,1.5,2.0,2.32,1.27,3.33,1.11,2.16,2.27,1.55,2.36,1.76,2.78,2.46,1.99,2.28,6.28,6.69,3.08,1.41,1.93,1.59,3.06,1.75,1.84,1.7,1.34,1.61,1.35,1.29,2.18,1.71,2.17,1.72,1.55,2.13,2.19,1.91,2.35,1.83,2.41,3.78,3.1 190 | 0.99,1.61,1.34,1.55,2.04,2.37,1.33,3.33,1.22,2.18,2.45,1.59,2.43,1.79,2.8,2.4,2.14,2.33,6.31,6.71,3.19,1.46,2.03,1.63,3.14,1.78,1.89,1.73,1.38,1.68,1.41,1.33,2.3,1.89,2.29,1.79,1.68,2.18,2.38,1.99,2.47,1.86,2.46,3.92,3.16 191 | 1.05,1.66,1.37,1.6,2.09,2.52,1.39,3.35,1.37,2.28,2.57,1.82,2.5,1.84,2.83,2.38,2.23,2.37,6.32,6.72,3.24,1.52,2.19,1.66,3.22,1.81,1.94,1.84,1.41,1.73,1.48,1.38,2.42,1.93,2.33,1.88,1.76,2.28,2.4,2.16,2.5,1.85,2.53,3.95,3.21 192 | 1.08,1.71,1.45,1.63,2.16,2.87,1.46,3.39,1.51,2.28,2.81,1.86,2.56,1.92,2.88,2.37,2.28,2.41,6.32,6.72,3.26,1.58,2.28,1.71,3.3,1.85,1.99,1.92,1.47,1.8,1.54,1.45,2.58,1.91,2.37,1.96,1.79,2.31,2.41,2.2,2.52,1.9,2.64,3.98,3.25 193 | 1.13,1.75,1.48,1.72,2.25,2.89,1.48,3.49,1.54,2.32,2.95,1.88,2.51,1.97,2.9,2.32,2.38,2.44,6.33,6.72,3.35,1.65,2.36,1.77,3.33,1.99,2.02,1.96,1.5,1.83,1.6,1.48,2.61,2.01,2.41,1.99,1.9,2.42,2.53,2.24,2.57,1.96,2.69,3.9,3.31 194 | 1.18,1.82,1.52,1.76,2.38,2.92,1.5,3.55,1.57,2.36,3.09,1.94,2.48,2.03,2.87,2.3,2.4,2.48,6.32,6.73,3.44,1.68,2.38,1.88,3.35,2.08,2.04,2.01,1.52,1.87,1.68,1.51,2.8,2.09,2.43,2.03,1.95,2.57,2.65,2.3,2.61,1.93,2.72,3.84,3.36 195 | 1.23,1.88,1.55,1.81,2.46,2.95,1.53,3.43,1.62,2.42,3.2,1.86,2.4,2.07,2.79,2.26,2.42,2.53,6.32,6.74,3.62,1.76,2.44,1.99,3.37,2.2,2.05,2.05,1.59,1.89,1.77,1.55,3.0,2.14,2.44,2.09,2.09,2.73,2.66,2.27,2.62,2.1,2.75,3.43,3.41 196 | 1.27,1.94,1.59,1.83,2.54,2.97,1.56,3.41,1.66,2.45,3.28,1.83,2.33,2.11,2.73,2.29,2.22,2.6,6.31,6.74,3.69,1.83,2.45,2.03,3.41,2.35,2.08,2.09,1.64,1.93,1.86,1.6,3.09,2.13,2.48,2.04,2.16,3.0,2.64,2.26,2.64,2.25,2.76,3.22,3.44 197 | 1.32,2.0,1.62,1.9,2.62,3.0,1.59,3.47,1.68,2.48,3.36,1.52,2.26,2.15,2.72,2.32,2.13,2.68,6.3,6.74,3.78,1.84,2.41,2.12,3.44,2.45,2.1,2.13,1.69,1.97,1.93,1.64,2.83,2.11,2.5,2.02,2.2,3.03,2.6,2.15,2.62,2.31,2.78,3.13,3.46 198 | 1.38,2.04,1.65,1.95,2.75,3.03,1.63,3.48,1.72,2.51,3.39,1.45,1.96,2.21,2.57,2.35,1.96,2.8,6.28,6.76,3.77,1.97,2.25,2.3,3.48,2.55,2.15,2.17,1.75,2.0,2.05,1.68,2.63,2.07,2.46,2.01,2.21,3.05,2.49,2.19,2.45,2.3,2.79,3.05,3.49 199 | 1.46,2.06,2.42,1.47,2.69,2.2,1.12,3.17,2.05,2.48,3.04,1.3,1.94,1.78,2.53,2.36,1.78,2.69,6.28,6.76,3.32,1.39,2.17,2.28,3.14,2.18,2.46,2.08,2.08,1.59,1.62,1.8,2.58,1.5,2.13,1.97,2.23,2.89,2.26,2.04,2.21,2.21,2.82,3.06,3.17 200 | 1.05,2.02,2.51,1.33,2.73,2.25,1.21,3.16,1.85,2.43,3.16,1.13,1.51,1.71,2.21,2.38,1.54,2.77,6.29,6.75,3.06,1.27,1.83,2.2,2.64,2.36,2.53,2.19,2.16,1.49,1.36,1.86,2.38,1.59,1.87,1.83,2.12,2.58,2.03,2.17,1.94,2.17,2.61,3.11,3.06 201 | 1.08,1.82,2.57,0.91,2.33,2.34,1.3,3.09,1.51,2.28,2.94,0.97,0.88,0.8,2.26,2.39,1.28,2.54,6.29,6.74,2.84,1.12,1.65,1.98,1.94,2.42,2.62,2.23,1.95,1.46,1.3,1.91,2.29,1.62,1.33,1.81,1.84,1.99,2.05,2.3,1.84,2.14,2.48,3.07,2.61 202 | 1.09,1.51,2.36,0.69,2.13,2.18,1.06,2.99,1.15,2.18,2.52,0.76,0.73,0.67,2.28,2.43,1.36,2.12,6.3,6.73,2.49,1.13,1.44,1.59,1.99,1.97,2.41,2.36,1.62,1.39,0.8,1.95,2.16,1.55,1.36,1.67,1.62,1.92,2.04,2.31,1.77,1.79,2.33,3.03,2.44 203 | 1.13,1.18,2.11,0.47,1.46,1.77,0.81,2.49,0.31,1.88,2.39,0.7,0.5,0.57,2.31,2.47,1.39,1.71,6.32,6.72,2.47,1.18,1.37,1.31,2.04,0.66,2.2,2.28,1.32,1.29,0.72,1.81,1.7,1.43,1.42,1.45,1.43,1.7,2.09,2.35,1.82,1.82,1.72,3.0,2.23 204 | 1.18,1.16,1.81,0.5,1.2,1.42,0.39,2.39,0.02,1.78,2.31,0.61,0.62,0.54,2.35,2.38,1.45,1.49,6.32,6.72,2.48,1.2,1.25,1.13,2.09,0.34,1.72,2.22,1.0,1.09,0.72,1.39,1.88,1.34,1.44,1.41,1.27,1.56,2.16,2.36,1.89,1.83,1.26,2.96,1.83 205 | 1.21,0.79,1.51,0.55,0.85,1.32,0.06,3.17,0.49,1.61,2.14,0.64,0.73,0.52,2.46,2.4,0.67,1.4,6.31,6.71,2.48,1.2,1.04,1.02,2.12,0.4,1.37,2.21,0.9,0.89,0.8,0.87,1.9,0.87,1.45,1.09,0.93,1.73,2.31,2.01,2.16,1.74,0.39,3.05,1.88 206 | 1.23,0.44,0.21,0.66,0.91,1.3,0.42,3.33,0.27,1.44,1.97,0.68,0.85,0.49,2.54,2.41,0.7,1.27,6.29,6.71,2.49,1.22,1.23,0.96,2.15,0.51,1.09,2.16,0.8,0.79,0.88,0.53,1.91,0.59,1.48,0.84,0.75,1.88,2.34,1.19,2.26,1.62,0.44,3.33,1.93 207 | 1.24,0.33,0.45,0.82,0.99,1.32,0.02,3.53,0.26,1.38,1.8,0.78,0.98,0.42,2.55,2.42,0.72,1.3,6.29,6.69,2.51,1.26,1.25,1.0,2.22,0.62,0.87,1.67,0.7,0.69,0.99,0.57,1.94,0.54,1.49,0.86,0.77,1.93,2.38,1.2,2.46,1.65,0.49,4.06,2.01 208 | 1.27,0.37,0.63,0.95,1.06,1.38,0.08,3.2,0.34,1.44,1.85,0.86,1.03,0.47,2.62,2.44,0.82,1.41,6.29,6.68,2.55,1.34,1.36,1.1,2.33,0.75,0.99,1.42,0.7,0.78,1.1,0.62,1.98,0.6,1.6,1.01,0.9,2.01,2.39,1.35,2.51,1.71,0.6,4.14,2.13 209 | 1.31,0.42,0.72,1.06,1.17,1.49,0.17,2.79,0.29,1.53,1.94,0.97,1.19,0.56,2.65,2.48,0.87,1.53,6.28,6.66,2.62,1.37,1.43,1.22,2.46,0.87,1.12,1.45,0.74,0.9,1.21,0.67,2.03,0.63,1.62,1.12,0.94,2.09,2.41,1.44,2.52,1.74,0.68,4.11,2.24 210 | 1.35,0.5,0.8,1.14,1.23,1.61,0.29,1.99,0.09,1.62,2.02,1.05,1.25,0.65,2.66,2.48,0.89,1.65,6.28,6.65,2.64,1.42,1.53,1.34,2.59,1.0,1.27,1.5,0.8,1.01,1.35,0.75,2.1,0.76,1.75,1.2,1.19,2.09,2.47,1.56,2.55,1.77,0.79,4.0,2.42 211 | 1.35,0.59,0.88,1.18,1.3,1.63,0.36,2.07,0.15,1.65,2.03,1.32,1.39,0.7,2.69,2.52,0.95,1.69,6.28,6.66,2.71,1.47,1.56,1.37,2.59,1.03,1.31,1.5,0.84,1.06,1.41,0.78,2.21,0.89,1.74,1.32,1.2,2.11,2.54,1.56,2.54,1.8,0.96,3.91,2.46 212 | 1.38,0.69,0.94,1.22,1.35,1.67,0.42,2.09,0.16,1.65,2.04,1.4,1.54,0.76,2.71,2.52,1.04,1.75,6.27,6.68,2.85,1.56,1.58,1.41,2.61,1.07,1.36,1.52,0.87,1.12,1.49,0.82,2.25,0.92,1.78,1.37,1.26,2.13,2.6,1.58,2.5,1.85,1.11,3.68,2.51 213 | 1.43,0.75,1.1,1.25,1.41,1.72,0.48,2.14,0.16,1.72,2.06,1.56,1.66,0.81,2.72,2.53,1.11,1.8,6.27,6.68,2.89,1.62,1.65,1.49,2.63,1.12,1.39,1.54,0.91,1.2,1.53,0.87,2.28,1.08,1.81,1.42,1.28,2.19,2.61,1.6,2.58,1.86,1.3,3.55,2.57 214 | 1.55,0.98,1.15,1.3,1.48,1.78,0.56,2.24,0.33,1.76,2.09,1.58,1.82,0.94,2.76,2.54,1.23,1.88,6.26,6.69,2.93,1.67,1.74,1.54,2.66,1.16,1.46,1.55,0.95,1.33,1.61,0.94,2.36,1.17,1.83,1.5,1.37,2.22,2.66,1.63,2.64,1.91,1.47,3.13,2.63 215 | 1.6,1.16,1.21,1.35,1.55,1.83,0.6,2.29,0.5,1.8,2.12,1.65,1.97,1.07,2.77,2.56,1.41,1.92,6.25,6.69,2.95,1.68,1.8,1.61,2.66,1.22,1.51,1.57,1.01,1.48,1.74,1.03,2.47,1.19,1.84,1.63,1.51,2.28,2.69,1.64,2.69,1.93,1.64,3.05,2.71 216 | 1.64,1.28,1.29,1.42,1.63,1.92,0.66,2.29,0.74,1.92,2.16,1.67,2.08,1.18,2.81,2.59,1.5,2.0,6.26,6.71,2.98,1.73,1.84,1.73,2.68,1.33,1.68,1.61,1.14,1.59,1.83,1.1,2.58,1.28,1.87,1.68,1.63,2.29,2.77,1.71,2.69,2.05,1.79,2.85,2.79 217 | 1.75,1.35,1.35,1.53,1.76,2.06,0.84,2.41,0.88,2.04,2.27,1.71,2.15,1.33,2.8,2.61,1.58,2.14,6.24,6.7,3.11,1.73,1.85,1.9,2.74,1.48,1.87,1.67,1.27,1.67,1.92,1.25,2.81,1.46,1.97,1.69,1.61,2.38,2.8,1.72,2.71,2.04,2.1,2.84,2.91 218 | 1.87,1.47,1.47,1.64,1.87,2.21,0.99,2.49,0.94,2.16,2.44,1.7,2.17,1.46,2.8,2.63,1.62,2.28,6.23,6.7,3.32,1.74,1.9,2.03,2.83,1.62,2.06,1.73,1.4,1.76,2.04,1.41,2.86,1.55,2.1,1.7,1.62,2.51,2.85,1.74,2.74,2.0,2.36,2.81,3.1 219 | 2.0,1.6,1.53,1.76,1.98,2.32,1.15,2.55,0.97,2.29,2.69,1.73,2.24,1.64,2.82,2.64,1.78,2.42,6.2,6.69,3.49,1.78,1.94,2.16,2.94,1.75,2.19,1.84,1.55,1.88,2.15,1.46,2.91,1.64,2.13,1.75,1.67,2.55,2.87,1.77,2.75,1.96,2.55,2.83,3.21 220 | 2.11,1.67,1.63,1.91,2.09,2.53,1.3,2.59,1.03,2.41,2.81,1.78,2.3,1.75,2.82,2.67,1.87,2.64,6.18,6.68,3.69,1.85,1.94,2.28,3.14,1.89,2.32,1.9,1.7,1.99,2.27,1.52,2.94,1.73,2.16,1.8,1.69,2.58,3.01,1.84,2.8,1.93,2.68,2.83,3.29 221 | 2.3,1.8,1.76,2.0,2.21,2.65,1.44,2.69,1.08,2.52,2.93,1.82,2.33,1.88,2.83,2.72,2.03,2.85,6.17,6.66,3.82,1.86,1.97,2.34,3.39,2.02,2.4,1.97,1.86,2.05,2.38,1.57,2.99,1.87,2.24,1.85,1.72,2.59,3.06,1.87,2.83,1.89,2.82,2.86,3.36 222 | 2.41,1.9,1.84,2.09,2.32,2.73,1.53,2.73,1.14,2.64,3.04,1.86,2.34,1.96,2.84,2.74,2.05,2.93,6.15,6.67,3.95,1.88,1.99,2.41,3.54,2.14,2.47,2.05,1.94,2.1,2.49,1.63,3.01,1.9,2.27,1.92,1.75,2.76,3.13,1.89,2.84,1.85,3.03,2.88,3.42 223 | 2.43,2.01,1.95,2.18,2.47,2.82,1.58,2.69,1.21,2.71,3.11,1.87,2.41,2.04,2.87,2.76,2.1,2.99,6.12,6.66,4.14,1.91,2.02,2.47,3.69,2.19,2.53,2.11,2.0,2.14,2.53,1.64,3.03,1.96,2.3,1.97,1.75,2.8,3.17,1.9,2.89,1.81,3.1,3.48,3.48 224 | 2.45,2.09,2.09,2.25,2.55,2.89,1.67,2.74,1.32,2.79,3.2,1.9,2.55,2.11,2.88,2.77,2.18,3.07,6.09,6.64,4.3,1.92,2.03,2.55,3.74,2.25,2.62,2.16,2.08,2.19,2.6,1.67,3.04,2.02,2.43,2.02,1.77,2.82,3.2,1.95,2.92,1.8,3.13,2.89,3.53 225 | 2.44,2.17,2.16,2.31,2.64,2.97,1.78,2.74,1.43,2.86,3.25,1.94,2.59,2.18,2.89,2.8,2.24,3.14,6.07,6.64,4.45,1.94,2.05,2.59,3.79,2.33,2.68,2.2,2.14,2.22,2.65,1.69,3.07,2.04,2.52,2.09,1.79,2.88,3.21,1.98,2.94,1.74,3.25,2.91,3.59 226 | 2.42,2.24,2.28,2.35,2.73,3.02,1.86,2.76,1.47,2.92,3.28,1.98,2.64,2.25,2.93,2.81,2.31,3.2,6.06,6.65,4.53,1.97,2.08,2.62,3.84,2.4,2.74,2.24,2.22,2.25,2.69,1.71,3.1,2.04,2.55,2.12,1.8,2.89,3.29,2.01,3.01,1.71,3.27,2.91,3.66 227 | 2.39,2.25,2.35,2.39,2.81,3.06,1.94,2.79,1.52,2.98,3.31,2.04,2.69,2.33,3.01,2.87,2.32,3.25,6.05,6.64,4.61,2.0,2.1,2.67,3.86,2.46,2.77,2.27,2.28,2.28,2.72,1.73,3.12,2.13,2.58,2.1,1.84,2.95,3.35,2.01,3.04,1.66,3.28,2.92,3.72 228 | 2.35,2.27,2.41,2.43,2.88,3.1,1.75,2.81,1.52,3.04,3.35,2.06,2.78,2.38,3.05,2.88,2.35,3.28,6.03,6.63,4.66,2.03,2.12,2.7,3.89,2.51,2.81,2.31,2.33,2.3,2.8,1.76,3.15,2.17,2.63,2.12,1.84,3.01,3.38,2.11,3.04,1.62,3.31,2.93,3.77 229 | 2.34,2.3,2.46,2.48,2.92,3.11,2.06,2.83,1.56,3.06,3.38,2.08,2.83,2.41,3.06,2.91,2.38,3.3,6.01,6.62,4.74,2.07,2.12,2.72,3.94,2.54,2.85,2.33,2.35,2.31,2.81,1.85,3.15,2.25,2.69,2.13,1.85,3.03,3.37,2.14,3.06,1.58,3.33,2.93,3.8 230 | 2.32,2.31,2.53,2.53,2.95,3.13,2.09,2.84,1.54,3.08,3.38,2.2,2.84,2.44,3.08,2.94,2.39,3.33,6.01,6.61,4.76,2.14,2.15,2.73,3.96,2.56,2.91,2.35,2.37,2.32,2.83,1.92,3.14,2.28,2.72,2.14,1.88,3.07,3.41,2.2,3.1,1.52,3.35,2.94,3.83 231 | 2.32,2.32,2.61,2.55,2.99,3.16,2.11,2.84,1.53,3.07,3.37,2.22,2.84,2.43,3.1,2.95,2.4,3.31,6.0,6.62,4.79,2.14,2.18,2.71,3.99,2.6,2.95,2.36,2.4,2.32,2.8,1.96,3.12,2.32,2.7,2.12,1.93,3.08,3.44,2.21,3.11,1.55,3.36,2.95,3.86 232 | 2.3,2.34,2.68,2.53,3.04,3.2,2.08,2.84,1.52,3.04,3.37,2.22,2.85,2.42,3.09,2.98,2.41,3.29,6.0,6.63,4.8,2.17,2.21,2.69,4.04,2.62,2.93,2.34,2.4,2.29,2.77,1.94,3.05,2.33,2.69,2.08,1.89,3.1,3.47,2.25,3.11,1.49,3.35,2.97,3.85 233 | 2.29,2.33,2.74,2.51,3.08,3.23,2.06,2.86,1.51,3.02,3.36,2.24,2.88,2.4,3.11,2.98,2.4,3.27,6.01,6.63,4.82,2.16,2.22,2.67,4.09,2.61,2.91,2.31,2.38,2.27,2.72,1.93,2.92,2.33,2.68,2.09,1.86,3.09,3.48,2.24,3.12,1.4,3.33,3.02,3.83 234 | 2.28,2.33,2.81,2.48,3.05,3.22,2.03,2.89,1.49,2.99,3.35,2.24,2.87,2.37,3.14,2.97,2.38,3.25,6.01,6.65,4.82,2.18,2.22,2.62,4.14,2.59,2.87,2.23,2.35,2.23,2.69,1.91,2.83,2.37,2.67,2.06,1.87,3.09,3.42,2.22,3.12,1.38,3.34,3.02,3.8 235 | 2.26,2.32,2.78,2.45,3.02,3.2,2.02,2.87,1.47,2.97,3.33,2.24,2.85,2.34,3.09,2.91,2.34,3.22,6.01,6.65,4.77,2.15,2.18,2.7,4.11,2.55,2.85,2.19,2.32,2.2,2.67,1.89,2.79,2.35,2.66,2.07,1.86,3.06,3.39,2.21,3.07,1.39,3.31,3.0,3.76 236 | 2.23,2.3,2.75,2.42,2.98,3.17,1.99,2.87,1.45,2.94,3.31,2.19,2.78,2.31,3.08,2.88,2.31,3.19,6.02,6.66,4.71,2.1,2.17,2.66,4.07,2.52,2.82,2.14,2.29,2.16,2.63,1.87,2.77,2.31,2.64,2.07,1.84,3.05,3.36,2.14,3.05,1.35,3.3,2.93,3.72 237 | 2.19,2.26,2.71,2.37,2.92,3.12,1.95,2.89,1.42,2.9,3.27,2.18,2.77,2.26,3.08,2.89,2.27,3.12,6.04,6.66,4.69,2.08,2.16,2.6,4.03,2.47,2.76,2.1,2.24,2.11,2.58,1.83,2.75,2.24,2.63,2.02,1.83,2.99,3.32,2.14,3.02,1.22,3.29,2.91,3.66 238 | 2.1,2.2,2.63,2.31,2.85,3.12,1.92,2.86,1.35,2.86,3.27,2.13,2.76,2.22,3.05,2.91,2.23,3.08,6.05,6.67,4.69,2.06,2.14,2.53,3.99,2.45,2.71,2.01,2.2,2.07,2.5,1.75,2.74,2.2,2.6,1.99,1.84,3.01,3.28,2.1,2.99,1.28,3.27,2.88,3.61 239 | 2.02,2.13,2.59,2.24,2.79,3.08,1.9,2.77,1.27,2.78,3.26,2.12,2.99,2.16,3.0,2.59,2.22,3.0,6.05,6.68,4.67,2.02,2.08,2.47,3.99,2.41,2.66,1.95,2.15,2.0,2.38,1.65,2.73,2.21,2.57,1.97,1.77,3.02,3.25,2.06,2.97,1.31,3.22,2.95,3.54 240 | 1.96,2.01,2.53,2.15,2.68,3.04,1.86,2.77,1.17,2.66,3.26,2.1,3.11,2.09,2.98,2.97,2.19,2.94,6.07,6.68,4.6,1.97,2.03,2.4,3.96,2.35,2.59,1.9,2.09,1.93,2.24,1.6,2.69,2.21,2.52,1.95,1.72,3.09,3.23,2.06,2.96,1.32,3.19,2.95,3.49 241 | 1.6,1.75,2.41,1.98,2.51,2.66,1.56,2.89,1.17,2.18,3.17,1.97,3.09,1.74,2.82,2.84,2.01,2.71,6.07,6.66,4.54,1.86,2.04,2.12,3.84,1.81,2.46,1.87,1.95,1.79,2.15,1.55,2.65,1.94,2.28,2.53,1.68,2.92,3.04,1.94,3.18,0.79,3.05,2.99,3.23 242 | 1.5,1.57,1.86,1.73,2.31,2.57,1.46,2.87,1.12,2.08,3.12,1.78,2.63,1.58,2.8,2.59,1.99,2.59,6.06,6.64,4.41,1.84,1.98,1.8,3.74,1.73,2.22,1.82,1.72,1.69,1.9,1.37,2.62,1.79,2.12,2.32,1.67,2.75,2.92,1.72,2.96,0.56,2.83,3.23,3.06 243 | 1.38,1.45,1.21,1.54,2.17,2.44,1.36,2.87,1.07,1.88,2.95,1.66,2.21,1.35,2.75,2.48,1.97,2.5,6.06,6.64,4.38,1.75,1.97,1.41,3.54,1.51,1.97,1.76,1.6,1.59,1.68,1.25,2.55,1.78,1.99,2.26,1.55,2.49,2.68,1.63,2.48,0.51,2.81,3.3,2.84 244 | 1.25,1.29,0.83,1.33,1.92,2.22,0.76,2.91,0.68,1.68,2.84,1.41,1.8,1.14,2.67,2.35,1.96,2.29,6.06,6.61,4.44,1.8,1.84,1.16,3.34,1.33,1.68,1.71,1.5,1.53,1.48,1.08,2.49,1.56,1.93,2.19,1.5,2.2,2.51,1.6,2.68,0.6,2.73,3.35,2.62 245 | 1.17,1.17,0.91,1.04,1.75,1.62,0.46,3.14,0.57,1.48,2.59,1.42,1.61,1.04,2.63,2.32,1.52,2.21,6.05,6.6,4.33,1.82,1.78,1.03,3.04,1.18,1.51,1.63,1.4,1.42,1.31,0.85,2.45,1.34,1.84,2.16,1.51,2.09,2.29,1.33,2.9,0.61,2.52,3.42,2.51 246 | 1.2,1.01,0.98,1.13,1.65,1.22,0.36,2.69,0.31,1.4,2.38,1.2,1.46,0.84,2.51,2.3,1.55,2.06,6.05,6.59,4.36,1.69,1.76,0.9,2.54,1.12,1.39,1.52,1.14,1.37,1.05,0.7,2.15,1.08,1.75,2.05,1.47,1.87,2.31,1.21,2.65,0.67,1.97,3.56,2.43 247 | 1.24,0.6,1.11,1.21,1.76,1.2,0.56,2.89,0.47,1.48,2.46,1.03,1.27,0.94,2.47,2.21,1.58,2.15,6.09,6.55,4.28,1.41,1.67,1.0,2.59,1.3,1.48,1.57,1.23,1.39,1.11,0.8,2.03,0.96,1.53,1.85,1.18,1.65,1.97,1.24,2.28,0.73,1.74,3.45,2.25 248 | 1.32,0.75,0.91,1.03,1.84,1.31,0.44,2.94,0.36,1.58,2.54,0.88,1.33,1.02,2.45,2.28,1.46,2.29,6.1,6.54,4.01,1.3,1.61,1.08,2.64,1.37,1.61,1.54,1.35,1.44,1.16,0.91,2.13,0.89,1.52,1.78,1.05,1.63,1.88,1.29,2.19,0.8,1.7,3.26,2.38 249 | 1.13,0.93,0.32,1.1,1.68,1.57,0.52,2.99,0.58,1.49,2.17,0.95,1.34,0.76,2.5,2.33,1.34,2.41,6.12,6.52,3.87,1.17,1.43,1.15,2.69,1.36,1.43,1.5,1.17,1.49,1.14,0.98,2.25,0.82,1.59,1.63,0.94,1.72,1.81,1.43,2.1,0.88,1.56,3.31,2.46 250 | 1.2,0.95,0.66,1.18,1.8,1.76,0.66,2.94,0.73,1.53,2.28,1.04,1.38,0.84,2.5,2.34,1.23,2.2,6.1,6.52,3.8,1.14,1.48,0.98,2.74,0.88,1.52,1.46,1.28,1.23,1.21,0.85,2.38,0.93,1.65,1.39,0.88,1.76,1.92,1.56,2.22,0.91,1.4,3.33,2.57 251 | 1.28,1.03,0.88,1.24,1.87,1.97,0.75,3.04,0.92,1.61,2.37,1.1,1.4,0.91,2.55,2.37,1.27,2.36,6.11,6.53,3.71,1.19,1.52,1.1,2.79,1.05,1.64,1.43,1.39,1.31,1.3,0.75,2.5,1.0,1.63,1.26,0.87,1.8,1.95,1.58,2.24,0.96,1.37,3.35,2.64 252 | 1.37,1.13,1.01,1.33,1.71,2.12,0.5,3.33,0.87,1.78,2.23,1.12,1.41,0.73,2.62,2.35,1.41,2.47,6.11,6.53,3.66,1.23,1.55,1.02,2.84,1.19,1.75,1.4,1.3,1.53,1.35,0.83,2.4,1.02,1.66,1.17,0.89,1.82,2.03,1.63,2.26,0.94,1.48,3.4,2.72 253 | 1.24,0.6,1.11,1.21,1.76,1.2,0.56,2.89,0.47,1.48,2.46,1.03,1.27,0.94,2.47,2.21,1.58,2.15,6.09,6.55,4.28,1.41,1.67,1.0,2.59,1.3,1.48,1.57,1.23,1.39,1.11,0.8,2.03,0.96,1.53,1.85,1.18,1.65,1.97,1.24,2.28,0.73,1.74,3.45,2.25 254 | 1.32,0.75,0.91,1.03,1.84,1.31,0.44,2.94,0.36,1.58,2.54,0.88,1.33,1.02,2.45,2.28,1.46,2.29,6.1,6.54,4.01,1.3,1.61,1.08,2.64,1.37,1.61,1.54,1.35,1.44,1.16,0.91,2.13,0.89,1.52,1.78,1.05,1.63,1.88,1.29,2.19,0.8,1.7,3.26,2.38 255 | 1.13,0.93,0.32,1.1,1.68,1.57,0.52,2.99,0.58,1.49,2.17,0.95,1.34,0.76,2.5,2.33,1.34,2.41,6.12,6.52,3.87,1.17,1.43,1.15,2.69,1.36,1.43,1.5,1.17,1.49,1.14,0.98,2.25,0.82,1.59,1.63,0.94,1.72,1.81,1.43,2.1,0.88,1.56,3.31,2.46 256 | 1.2,0.95,0.66,1.18,1.8,1.76,0.66,2.94,0.73,1.53,2.28,1.04,1.38,0.84,2.5,2.34,1.23,2.2,6.1,6.52,3.8,1.14,1.48,0.98,2.74,0.88,1.52,1.46,1.28,1.23,1.21,0.85,2.38,0.93,1.65,1.39,0.88,1.76,1.92,1.56,2.22,0.91,1.4,3.33,2.57 257 | 1.28,1.03,0.88,1.24,1.87,1.97,0.75,3.04,0.92,1.61,2.37,1.1,1.4,0.91,2.55,2.37,1.27,2.36,6.11,6.53,3.71,1.19,1.52,1.1,2.79,1.05,1.64,1.43,1.39,1.31,1.3,0.75,2.5,1.0,1.63,1.26,0.87,1.8,1.95,1.58,2.24,0.96,1.37,3.35,2.64 258 | 1.37,1.13,1.01,1.33,1.71,2.12,0.5,3.33,0.87,1.78,2.23,1.12,1.41,0.73,2.62,2.35,1.41,2.47,6.11,6.53,3.66,1.23,1.55,1.02,2.84,1.19,1.75,1.4,1.3,1.53,1.35,0.83,2.4,1.02,1.66,1.17,0.89,1.82,2.03,1.63,2.26,0.94,1.48,3.4,2.72 259 | 1.54,1.2,0.95,1.43,1.96,2.21,0.76,3.69,0.68,1.86,2.51,1.37,2.1,0.82,2.79,2.62,2.21,2.32,6.2,6.59,2.92,1.61,1.74,1.52,3.15,1.63,1.73,1.59,1.33,1.41,1.53,0.88,2.52,1.5,2.07,1.52,1.25,2.24,2.06,1.76,2.45,1.79,1.98,3.73,2.89 260 | 1.56,1.23,1.01,1.47,2.01,2.24,0.79,3.89,0.98,1.9,2.54,1.0,2.17,0.89,2.82,2.66,2.29,2.37,6.21,6.61,3.0,1.67,1.8,1.58,3.19,1.68,1.77,1.63,1.37,1.47,1.58,0.94,2.75,1.54,2.1,1.54,1.32,2.25,2.1,1.88,2.48,1.71,2.03,3.69,2.94 261 | 1.59,1.28,1.05,1.5,2.05,2.28,0.84,3.89,1.2,1.95,2.63,1.19,2.28,0.98,2.84,2.75,2.42,2.43,6.23,6.62,3.04,1.79,1.99,1.61,3.26,1.74,1.81,1.67,1.4,1.5,1.62,1.01,2.92,1.56,2.16,1.63,1.5,2.29,2.27,1.87,2.5,1.76,2.09,3.78,3.01 262 | 1.61,1.35,1.11,1.54,2.1,2.33,0.9,3.75,1.29,2.0,2.7,1.32,2.34,1.05,2.96,2.78,2.47,2.48,6.23,6.63,3.18,1.94,2.06,1.66,3.34,1.79,1.85,1.71,1.45,1.56,1.69,1.08,3.03,1.61,2.19,1.7,1.58,2.35,2.28,1.99,2.51,1.89,2.21,3.85,3.07 263 | 1.64,1.42,1.18,1.6,2.14,2.4,0.97,3.85,1.41,2.05,2.75,1.5,2.43,1.12,2.99,2.81,2.54,2.52,6.24,6.64,3.21,2.03,2.18,1.7,3.39,1.84,1.89,1.79,1.51,1.61,1.74,1.15,3.19,1.7,2.35,1.76,1.61,2.38,2.4,2.12,2.56,2.03,2.37,3.88,3.12 264 | 1.69,1.48,1.23,1.66,2.21,2.47,1.03,4.01,1.54,2.1,2.83,1.77,2.53,1.18,3.0,2.84,2.59,2.59,6.26,6.66,3.26,1.98,2.19,1.76,3.46,1.88,1.94,1.87,1.6,1.65,1.81,1.25,3.29,1.93,2.37,1.81,1.69,2.4,2.43,2.21,2.61,2.05,2.56,3.91,3.18 265 | 1.69,1.53,1.31,1.71,2.23,2.51,1.06,3.49,1.59,2.12,2.88,1.79,2.68,1.24,3.01,2.87,2.63,2.65,6.25,6.65,3.33,2.0,2.22,1.8,3.53,1.95,1.99,1.9,1.7,1.67,1.9,1.35,3.41,1.95,2.44,1.84,1.76,2.51,2.45,2.25,2.63,2.18,2.72,3.95,3.23 266 | 1.7,1.6,1.42,1.75,2.26,2.56,1.1,3.54,1.62,2.16,2.94,1.84,2.7,1.29,3.03,2.89,2.68,2.74,6.26,6.66,3.47,2.07,2.24,1.87,3.6,2.04,2.07,1.95,1.81,1.7,1.99,1.42,3.28,1.96,2.5,1.98,1.84,2.65,2.48,2.3,2.67,2.3,2.83,3.94,3.31 267 | 1.73,1.66,1.55,1.79,2.31,2.62,1.16,3.59,1.66,2.18,3.01,1.85,2.73,1.35,3.03,2.95,2.86,2.81,6.27,6.68,3.77,2.11,2.29,1.93,3.68,2.07,2.15,2.01,1.9,1.74,2.05,1.52,3.2,2.04,2.53,2.08,1.92,2.82,2.52,2.39,2.7,2.36,2.92,3.83,3.38 268 | 1.75,1.71,1.63,1.83,2.36,2.67,1.21,3.59,1.73,2.23,3.06,1.93,2.82,1.39,3.05,2.98,2.98,2.89,6.26,6.69,3.81,2.16,2.28,1.97,3.71,2.15,2.23,2.13,2.01,1.79,2.13,1.63,3.19,2.06,2.55,2.19,2.0,2.99,2.62,2.48,2.83,2.45,2.99,3.79,3.5 269 | 1.73,1.75,1.71,1.89,2.43,2.73,1.26,3.57,1.79,2.28,3.11,1.97,2.83,1.46,3.06,3.0,3.01,2.95,6.27,6.69,3.94,2.19,2.36,1.84,3.59,2.22,2.32,2.3,2.14,1.84,2.21,1.75,3.22,2.1,2.57,2.21,2.02,3.09,2.64,2.5,2.88,2.47,3.04,3.78,3.61 270 | 1.61,1.8,1.8,1.81,2.51,2.81,1.36,3.57,1.9,2.32,3.16,1.69,2.84,1.52,3.08,2.99,3.02,3.04,6.28,6.71,4.06,2.24,2.42,1.32,3.34,2.3,2.43,2.33,2.25,1.89,2.18,1.87,3.16,2.08,2.52,2.22,1.99,3.12,2.7,2.54,2.89,2.24,3.07,3.74,3.58 271 | 1.45,1.92,1.85,1.59,2.59,2.84,1.44,3.42,1.99,2.38,3.18,1.11,2.49,1.56,3.0,2.93,2.77,3.07,6.27,6.69,4.08,2.27,2.29,1.2,3.26,2.25,1.93,2.32,2.27,1.91,2.22,1.89,3.12,1.98,2.37,2.29,1.86,3.08,2.82,2.42,2.72,1.95,2.99,3.76,3.41 272 | 1.3,1.95,1.98,1.41,2.66,2.85,1.49,3.29,2.05,2.41,3.2,1.3,1.71,1.62,2.91,2.88,2.36,3.1,6.27,6.7,4.11,2.06,1.95,1.1,3.12,2.18,1.98,2.31,2.3,1.92,2.25,1.93,2.79,1.62,2.24,2.08,1.84,2.95,2.87,2.35,2.31,1.6,2.64,3.8,3.21 273 | 1.22,2.07,2.19,1.23,2.61,2.9,1.56,3.29,1.85,2.45,3.05,1.28,1.33,1.6,2.86,2.85,1.73,3.12,6.28,6.7,4.14,1.52,1.53,0.98,3.01,2.05,2.01,2.28,2.32,1.86,2.2,1.98,2.65,1.27,1.89,2.0,1.43,2.81,2.89,2.21,2.12,1.43,2.44,3.83,3.12 274 | 1.05,2.2,2.3,1.05,2.45,2.82,1.62,3.24,1.69,2.18,2.93,1.33,1.4,1.52,2.79,2.84,1.1,3.15,6.28,6.68,4.07,1.37,1.37,0.77,2.9,1.85,1.97,2.26,2.35,1.67,2.11,2.0,2.25,1.03,1.55,1.81,1.31,2.49,2.65,2.31,1.9,1.32,2.17,3.87,3.03 275 | 0.92,2.09,2.01,0.83,2.26,2.62,1.46,3.19,1.45,2.08,2.84,1.34,1.44,1.34,2.76,2.8,1.15,3.07,6.29,6.68,4.06,1.31,1.18,0.64,2.79,1.75,1.95,2.23,2.3,1.59,1.9,2.01,2.06,0.95,1.56,1.23,1.23,2.05,2.6,1.94,1.91,1.24,1.81,3.94,2.97 276 | 0.78,1.91,1.63,0.71,2.14,2.42,1.24,2.94,1.26,1.96,2.69,1.37,1.46,1.22,2.67,2.77,1.18,2.91,6.29,6.67,4.03,1.12,1.15,0.56,2.74,1.67,1.87,2.2,2.21,1.47,1.75,1.85,2.17,0.9,1.52,1.16,1.17,1.84,2.56,1.87,1.94,1.21,1.69,3.96,2.89 277 | 0.71,1.54,0.95,0.68,1.82,2.12,1.01,3.41,0.85,1.68,2.4,1.15,1.1,1.07,2.66,2.68,1.12,2.69,6.29,6.68,3.88,0.87,1.11,0.52,2.29,1.46,1.76,1.94,1.84,1.5,1.77,1.61,2.22,0.86,1.77,1.1,0.84,1.82,2.42,1.45,1.73,0.74,1.14,4.21,2.92 278 | 0.66,1.17,0.41,0.66,1.61,1.94,0.69,3.89,0.52,1.58,2.08,1.01,0.37,0.76,2.7,2.43,1.07,2.47,6.28,6.68,3.72,0.58,0.99,0.54,2.04,1.25,1.65,1.91,1.61,1.54,1.8,1.26,2.24,0.7,2.03,0.55,0.57,1.89,2.41,1.28,1.5,0.53,0.65,4.38,2.98 279 | 0.69,0.71,0.79,0.63,1.36,1.46,0.46,3.85,0.45,1.4,1.86,0.9,0.06,0.58,2.71,2.37,1.08,2.17,6.28,6.69,3.61,0.61,0.93,0.55,2.09,1.28,1.69,1.88,1.4,1.56,1.84,1.04,2.26,0.77,2.25,0.58,0.48,1.91,2.44,1.21,1.51,0.42,0.55,4.5,3.04 280 | 0.8,0.77,0.84,0.7,1.25,1.28,0.38,3.69,0.17,1.33,1.57,0.93,0.18,0.52,2.73,2.38,1.14,2.04,6.29,6.69,3.62,0.66,0.97,0.6,2.15,1.35,1.61,1.63,1.25,1.45,1.87,0.85,2.32,0.83,2.3,0.59,0.51,1.93,2.46,1.26,1.55,0.44,0.57,4.49,3.11 281 | 0.94,0.86,0.96,0.78,1.19,1.34,0.35,2.69,0.24,1.36,1.6,0.95,0.24,0.56,2.75,2.38,1.19,2.09,6.3,6.7,3.66,0.69,1.03,0.68,2.23,1.43,1.66,1.38,1.18,1.29,1.92,0.88,2.33,0.9,2.32,0.63,0.54,1.95,2.52,1.3,1.62,0.49,0.62,4.45,3.18 282 | 1.07,0.97,1.18,0.9,1.27,1.4,0.4,2.74,0.31,1.44,1.71,1.01,0.34,0.65,2.76,2.4,1.27,2.17,6.3,6.72,3.7,0.7,1.06,0.75,2.28,1.5,1.72,1.14,1.23,1.34,2.0,0.94,2.34,0.91,2.32,0.67,0.59,2.02,2.55,1.31,1.69,0.56,0.64,4.46,3.23 283 | 1.1,1.02,1.25,0.96,1.38,1.47,0.46,2.45,0.47,1.49,1.77,1.03,0.41,0.71,2.77,2.42,1.28,2.21,6.3,6.71,3.73,0.73,1.07,0.82,2.36,1.55,1.79,1.2,1.27,1.39,2.04,1.07,2.12,0.97,2.35,0.77,0.63,2.05,2.57,1.42,1.71,0.68,0.69,4.44,3.3 284 | 1.19,1.07,1.32,1.02,1.46,1.63,0.53,2.34,0.62,1.53,1.82,1.07,0.57,0.77,2.8,2.45,1.38,2.29,6.31,6.71,3.75,0.78,1.1,0.9,2.44,1.61,1.85,1.26,1.32,1.46,2.09,1.17,2.2,1.06,2.35,0.81,0.66,2.06,2.6,1.47,1.76,0.71,0.72,4.32,3.36 285 | 1.23,1.13,1.41,1.06,1.51,1.78,0.6,2.29,0.69,1.6,1.86,1.11,0.73,0.84,2.8,2.46,1.37,2.35,6.31,6.69,3.76,0.82,1.13,0.97,2.49,1.68,1.89,1.31,1.38,1.54,2.14,1.25,2.26,1.08,2.39,0.83,0.68,2.09,2.68,1.49,1.8,0.72,0.75,4.31,3.43 286 | 1.26,1.22,1.52,1.11,1.56,1.9,0.69,2.29,0.81,1.67,1.94,1.14,0.79,0.95,2.82,2.5,1.44,2.41,6.33,6.67,3.81,0.94,1.19,1.05,2.56,1.75,1.96,1.35,1.41,1.65,2.19,1.34,2.34,1.09,2.41,0.85,0.72,2.15,2.71,1.5,1.84,0.77,0.8,4.29,3.49 287 | 1.34,1.3,1.64,1.16,1.66,1.98,0.81,2.36,0.9,1.77,2.01,1.26,0.93,1.07,2.83,2.51,1.49,2.47,6.34,6.67,3.82,0.93,1.21,1.12,2.64,1.83,2.02,1.43,1.49,1.76,2.25,1.41,2.44,1.12,2.44,0.9,0.76,2.17,2.73,1.54,1.89,0.79,0.86,4.24,3.53 288 | 1.41,1.34,1.71,1.23,1.75,2.03,0.92,2.42,0.96,1.89,2.07,1.38,1.06,1.19,2.84,2.51,1.51,2.52,6.35,6.66,3.84,1.01,1.24,1.2,2.71,1.88,2.09,1.5,1.58,1.87,2.32,1.46,2.56,1.15,2.48,0.92,0.92,2.2,2.73,1.59,1.91,0.8,0.91,4.2,3.65 289 | 1.5,1.49,1.76,1.31,1.85,2.12,1.03,2.46,0.99,1.98,2.1,1.48,1.14,1.25,2.84,2.52,1.55,2.59,6.35,6.66,3.84,1.0,1.29,1.3,2.79,1.95,2.17,1.55,1.67,1.91,2.38,1.55,2.68,1.16,2.49,0.98,0.95,2.24,2.76,1.61,1.98,0.83,0.95,4.19,4.07 290 | 1.75,1.56,1.78,1.38,1.97,2.23,1.16,2.49,1.08,2.08,2.13,1.51,1.2,1.34,2.86,2.53,1.6,2.67,6.34,6.64,3.94,1.12,1.34,1.41,2.85,2.02,2.25,1.68,1.75,1.98,2.46,1.62,2.73,1.21,2.52,1.02,0.97,2.28,2.8,1.66,2.02,0.89,1.01,4.14,4.11 291 | 1.9,1.67,1.81,1.44,2.07,2.31,1.28,2.59,1.14,2.19,2.18,1.55,1.24,1.46,2.87,2.55,1.66,2.75,6.34,6.63,3.98,1.17,1.42,1.53,2.93,2.08,2.31,1.77,1.81,2.03,2.52,1.69,2.78,1.27,2.5,1.09,1.04,2.34,2.84,1.75,2.05,0.98,1.09,4.08,4.14 292 | 2.02,1.75,1.84,1.5,2.18,2.38,1.37,2.64,1.19,2.31,2.22,1.62,1.29,1.57,2.9,2.57,1.68,2.86,6.33,6.63,4.03,1.22,1.47,1.62,2.99,2.15,2.37,1.89,1.86,2.08,2.59,1.76,2.82,1.35,2.55,1.12,1.12,2.38,2.87,1.78,2.1,1.1,1.14,4.03,4.18 293 | 2.11,1.9,1.86,1.57,2.26,2.47,1.45,2.69,1.26,2.42,2.29,1.64,1.32,1.66,2.96,2.58,1.77,2.93,6.32,6.62,4.12,1.34,1.55,1.7,3.05,2.22,2.42,1.95,1.94,2.12,2.64,1.8,2.86,1.44,2.57,1.14,1.16,2.42,2.89,1.81,2.15,1.17,1.22,3.88,4.21 294 | 2.18,1.97,1.89,1.66,2.34,2.58,1.52,2.69,1.31,2.49,2.35,1.68,1.35,1.74,3.02,2.61,1.83,3.0,6.33,6.61,4.15,1.39,1.6,1.79,3.14,2.3,2.47,2.03,2.0,2.18,2.7,1.86,2.9,1.48,2.59,1.16,1.25,2.47,2.94,1.85,2.2,1.24,1.3,3.84,4.25 295 | 2.25,2.1,1.93,1.72,2.46,2.65,1.57,2.74,1.39,2.56,2.42,1.75,1.43,1.84,3.05,2.65,1.89,3.06,6.35,6.61,4.17,1.43,1.62,1.83,3.22,2.37,2.52,2.1,2.05,2.22,2.74,1.92,2.92,1.53,2.63,1.23,1.3,2.53,2.96,1.9,2.27,1.33,1.39,3.83,4.07 296 | 2.3,2.18,1.98,1.78,2.53,2.73,1.61,2.79,1.44,2.63,2.46,1.76,1.49,1.9,3.06,2.66,1.95,3.11,6.36,6.62,4.19,1.55,1.64,1.9,3.29,2.43,2.57,2.14,2.11,2.27,2.78,1.98,2.95,1.62,2.67,1.28,1.35,2.55,2.97,1.96,2.34,1.39,1.45,3.86,4.11 297 | 2.34,2.21,2.06,1.85,2.6,2.82,1.64,2.84,1.47,2.7,2.53,1.78,1.58,1.96,3.09,2.68,1.98,3.17,6.36,6.62,4.2,1.66,1.67,1.98,3.37,2.47,2.6,2.17,2.16,2.31,2.81,2.04,3.0,1.64,2.68,1.3,1.38,2.57,3.0,1.98,2.39,1.44,1.53,3.88,4.14 298 | 2.38,2.27,2.17,1.92,2.68,2.9,1.67,2.89,1.53,2.74,2.62,1.83,1.67,2.04,3.1,2.69,2.03,3.22,6.35,6.62,4.27,1.78,1.75,2.05,3.44,2.54,2.64,2.2,2.22,2.36,2.85,2.09,3.08,1.69,2.74,1.39,1.41,2.61,3.04,2.03,2.48,1.5,1.57,3.89,4.18 299 | 2.42,2.3,2.25,1.97,2.77,2.99,1.7,2.94,1.57,2.79,2.74,1.88,1.7,2.11,3.13,2.69,2.11,3.29,6.37,6.64,4.28,1.84,1.83,2.11,3.5,2.58,2.67,2.23,2.27,2.39,2.9,2.13,3.15,1.73,2.75,1.41,1.46,2.66,3.08,2.09,2.53,1.57,1.65,3.91,4.21 300 | 2.4,2.34,2.33,2.02,2.85,3.05,1.75,2.99,1.62,2.87,2.85,1.94,1.73,2.18,3.14,2.7,2.21,3.34,6.37,6.64,4.34,1.89,1.97,2.2,3.57,2.62,2.71,2.28,2.31,2.44,2.94,2.16,3.2,1.76,2.76,1.44,1.54,2.7,3.1,2.11,2.61,1.63,1.71,3.94,4.25 301 | 2.39,2.36,2.36,2.06,2.91,3.07,1.78,2.99,1.64,2.89,2.93,1.97,1.76,2.21,3.15,2.7,2.34,3.38,6.38,6.64,4.36,1.93,2.04,2.26,3.64,2.64,2.73,2.26,2.35,2.46,2.98,2.19,3.24,1.84,2.78,1.46,1.56,2.72,3.14,2.22,2.65,1.68,1.73,3.95,4.29 302 | 2.39,2.37,2.38,2.08,2.94,3.1,1.81,2.97,1.66,2.92,2.96,2.04,1.78,2.25,3.16,2.74,2.45,3.41,6.38,6.65,4.37,2.02,2.08,2.3,3.69,2.65,2.74,2.23,2.37,2.45,3.0,2.18,3.29,1.89,2.81,1.51,1.59,2.78,3.21,2.25,2.72,1.7,1.77,3.99,4.29 303 | 2.38,2.36,2.37,2.07,2.92,3.09,1.8,2.97,1.65,2.9,2.93,2.08,1.79,2.24,3.18,2.79,2.44,3.39,6.37,6.66,4.37,2.04,2.11,2.29,3.67,2.62,2.73,2.22,2.36,2.44,2.98,2.17,3.32,1.9,2.83,1.52,1.65,2.79,3.19,2.26,2.78,1.69,1.78,3.97,4.27 304 | 2.36,2.34,2.36,2.05,2.91,3.08,1.79,2.96,1.63,2.88,2.91,2.13,1.82,2.23,3.2,2.81,2.46,3.38,6.36,6.66,4.34,2.03,2.14,2.28,3.66,2.58,2.72,2.2,2.34,2.43,2.95,2.15,3.27,1.91,2.82,1.54,1.67,2.8,3.2,2.27,2.8,1.71,1.81,4.0,4.26 305 | 2.35,2.32,2.34,2.03,2.89,3.07,1.76,2.96,1.62,2.85,2.88,2.14,1.83,2.21,3.24,2.82,2.49,3.35,6.36,6.67,4.35,2.07,2.15,2.26,3.64,2.55,2.7,2.18,2.32,2.42,2.93,2.13,3.21,1.95,2.87,1.57,1.73,2.87,3.23,2.3,2.83,1.77,1.93,4.02,4.24 306 | 2.32,2.29,2.31,2.02,2.86,3.05,1.74,2.99,1.6,2.83,2.88,2.17,1.89,2.19,3.27,2.86,2.53,3.33,6.35,6.68,4.33,2.14,2.18,2.24,3.62,2.51,2.68,2.16,2.29,2.4,2.9,2.11,3.17,2.02,2.89,1.61,1.77,2.89,3.28,2.31,2.86,1.84,1.96,4.05,4.21 307 | 2.3,2.22,2.26,1.97,2.81,3.02,1.68,2.92,1.55,2.77,2.83,2.15,1.86,2.14,3.28,2.84,2.51,3.28,6.35,6.67,4.35,2.1,2.17,2.2,3.55,2.49,2.62,2.12,2.25,2.36,2.85,2.07,3.12,2.04,2.85,1.58,1.78,2.84,3.24,2.34,2.88,1.87,2.03,3.99,4.18 308 | 2.25,2.16,2.21,1.9,2.4,2.96,1.62,2.93,1.5,2.7,2.77,2.16,1.87,2.08,3.29,2.83,2.5,3.21,6.35,6.67,4.38,2.03,2.15,2.16,3.49,2.44,2.57,2.05,2.2,2.31,2.8,2.03,3.06,2.02,2.84,1.62,1.76,2.84,3.23,2.33,2.87,1.88,2.02,3.97,4.13 309 | 2.2,2.11,2.14,1.83,2.34,2.91,1.55,2.94,1.44,2.65,2.71,2.14,1.88,2.02,3.29,2.8,2.5,3.14,6.34,6.66,4.35,2.02,2.12,2.1,3.42,2.36,2.51,1.99,2.14,2.23,2.73,1.99,3.02,2.01,2.81,1.61,1.77,2.82,3.23,2.3,2.86,1.75,1.98,3.94,4.07 310 | 2.14,2.05,2.1,1.77,2.27,2.82,1.51,2.89,1.33,2.6,2.63,2.11,1.85,1.89,3.25,2.66,2.44,3.07,6.34,6.65,4.3,2.0,2.11,2.05,3.34,2.26,2.47,1.92,2.1,2.17,2.7,1.89,2.93,1.96,2.75,1.59,1.73,2.8,3.18,2.17,2.83,1.68,1.93,3.7,4.01 311 | 2.03,1.96,2.05,1.72,2.22,2.74,1.46,2.92,1.26,2.49,2.55,2.1,1.83,1.78,3.23,2.63,2.37,2.98,6.33,6.65,4.22,1.91,2.08,2.0,3.25,2.13,2.39,1.84,2.02,2.09,2.68,1.85,2.88,1.93,2.73,1.56,1.72,2.71,3.09,2.14,2.8,1.5,1.9,3.69,3.91 312 | 1.97,1.87,1.99,1.63,2.16,2.66,1.37,2.92,1.22,2.4,2.43,2.04,1.82,1.66,3.2,2.61,2.34,2.87,6.33,6.64,4.05,1.82,2.07,1.94,3.07,2.04,2.34,1.77,1.95,1.99,2.41,1.75,2.83,1.91,2.67,1.53,1.67,2.68,3.1,2.09,2.78,1.48,1.88,3.64,3.81 313 | 1.89,1.74,1.95,1.6,2.11,2.6,1.31,3.03,1.23,2.38,2.38,1.84,1.51,1.63,3.03,2.64,2.09,2.69,6.3,6.61,4.24,1.75,1.87,1.89,3.04,2.0,2.29,1.73,1.87,1.98,2.3,1.67,2.73,1.54,2.39,1.27,1.47,2.28,3.05,1.94,2.7,1.29,1.99,3.76,3.61 314 | 1.6,1.47,1.91,1.41,1.97,2.42,1.06,3.24,1.18,2.33,2.35,1.55,1.46,1.59,2.87,2.52,2.05,2.53,6.28,6.6,4.29,1.53,1.57,1.64,2.79,1.88,2.08,1.71,1.78,1.94,2.11,1.64,2.6,1.35,2.06,1.33,0.93,2.07,2.96,1.86,2.64,1.17,2.1,3.72,3.48 315 | 1.51,1.25,1.61,1.27,2.01,2.27,1.14,3.06,0.79,2.09,2.16,1.47,1.28,1.25,2.82,2.4,2.01,2.32,6.27,6.58,4.33,1.36,1.29,1.41,2.49,1.74,1.95,1.65,1.5,1.74,1.94,1.46,2.46,1.23,1.93,1.46,0.87,1.99,2.81,1.81,2.65,0.95,2.24,3.64,3.24 316 | 1.43,1.18,1.72,1.36,2.07,2.12,1.21,2.74,0.57,2.06,2.04,1.35,1.12,1.14,2.76,2.39,1.95,2.39,6.23,6.57,4.41,1.33,1.34,1.26,2.14,1.77,2.02,1.34,1.4,1.59,1.75,1.32,2.52,1.05,1.72,1.47,0.71,1.96,2.83,1.78,2.68,0.78,2.41,3.56,3.01 317 | 1.3,0.91,1.79,1.44,1.94,2.15,1.02,2.69,0.47,1.98,1.95,1.14,1.23,0.96,2.69,2.31,1.93,2.45,6.19,6.57,4.46,1.14,1.42,1.12,1.84,1.68,2.08,1.26,1.35,1.49,1.81,1.39,2.39,0.74,1.65,1.4,0.76,1.81,2.6,1.76,2.61,0.4,2.42,3.6,2.9 318 | 1.35,0.54,1.59,1.33,2.0,2.2,1.1,2.84,0.53,1.93,1.98,0.93,1.24,1.04,2.63,1.97,1.74,2.5,6.16,6.56,4.48,0.92,1.18,1.15,1.95,1.73,1.96,1.12,1.39,1.52,1.88,1.46,2.23,0.63,1.6,1.32,0.65,1.78,2.24,1.69,2.45,0.43,2.35,3.63,2.95 319 | 1.18,0.51,1.51,1.4,1.81,2.32,1.23,2.29,0.55,1.79,2.03,1.02,1.13,1.12,2.34,1.83,1.36,2.59,6.17,6.58,4.32,0.64,1.05,1.2,2.04,1.35,1.64,1.15,1.42,1.59,1.92,1.29,2.3,0.6,1.5,1.19,0.49,1.62,2.02,1.74,2.24,0.67,1.9,3.51,2.86 320 | 1.01,0.58,1.21,1.46,1.72,2.27,1.29,2.19,0.51,1.68,2.11,1.05,1.19,1.18,2.38,1.89,1.25,2.45,6.17,6.59,4.21,0.66,1.08,1.35,2.14,1.41,1.7,1.2,1.49,1.39,1.7,1.18,2.36,0.62,1.48,1.07,0.54,1.58,1.89,1.8,2.12,0.88,1.66,3.49,2.91 321 | 0.97,0.55,1.42,1.18,1.63,2.36,1.06,2.09,0.7,1.86,1.9,1.01,1.3,1.0,2.43,1.99,1.33,2.24,6.18,6.59,4.24,0.73,1.15,1.13,2.24,1.45,1.76,1.24,1.21,1.44,1.76,1.34,2.29,0.7,1.6,1.16,0.67,1.71,1.83,1.86,2.07,0.93,1.64,3.35,3.01 322 | 0.9,0.85,1.53,1.22,1.74,2.29,0.98,2.79,0.85,1.91,1.98,0.99,1.33,1.14,2.51,2.01,1.43,2.37,6.2,6.61,4.26,0.85,1.27,1.24,2.34,1.5,1.63,1.02,1.3,1.55,1.55,1.38,2.35,0.75,1.74,1.21,0.82,1.85,1.86,1.92,2.17,1.09,1.43,3.28,3.06 323 | 0.85,0.96,1.31,1.27,1.66,2.06,1.15,2.99,1.06,1.76,2.11,1.04,1.39,1.24,2.54,2.05,1.62,2.43,6.18,6.62,4.3,0.96,1.38,1.42,2.27,1.55,1.73,1.06,1.36,1.63,1.61,1.44,2.34,0.81,1.73,1.3,0.89,1.99,1.98,2.07,2.25,0.91,1.45,3.3,2.98 324 | 0.8,1.02,1.01,0.93,1.42,1.42,0.9,1.84,0.87,1.51,1.69,0.86,1.1,0.99,2.37,1.9,1.46,1.9,6.14,6.58,4.27,0.78,1.06,1.04,1.84,1.15,1.3,0.45,0.9,1.41,1.3,1.08,2.16,0.76,1.37,1.1,0.71,1.93,1.87,2.04,2.04,0.64,1.19,3.18,2.57 325 | 0.7,0.82,0.86,0.76,0.31,1.22,1.01,0.95,0.93,1.2,1.53,0.77,1.19,0.73,2.48,1.92,1.38,1.17,6.15,6.6,4.56,0.65,1.14,0.73,1.64,1.05,1.18,0.27,0.83,1.27,1.2,0.85,1.88,0.34,1.68,1.07,0.76,1.91,1.88,2.06,2.05,0.71,1.56,3.43,2.26 326 | 0.5,0.32,0.11,0.51,0.46,1.22,1.02,1.19,0.95,0.93,1.79,0.68,1.27,0.56,2.5,1.93,1.29,1.22,6.16,6.61,4.74,0.51,1.17,0.35,1.29,0.65,1.06,0.36,0.54,1.02,0.88,0.35,1.94,0.03,1.9,1.02,0.71,1.83,1.89,2.03,2.13,0.78,1.67,3.86,2.18 327 | 0.63,0.4,0.46,0.58,0.77,1.27,0.12,1.74,0.2,1.05,1.9,0.74,1.3,0.64,2.59,1.96,1.27,1.43,6.17,6.6,4.63,0.43,1.08,0.49,1.37,0.75,1.12,0.49,0.6,1.09,0.93,0.38,2.03,0.38,2.01,1.13,0.55,1.85,1.93,1.98,2.12,0.85,1.72,3.91,2.3 328 | 0.74,0.44,0.54,0.71,0.88,1.32,0.26,1.79,0.56,1.12,2.01,0.83,1.33,0.77,2.62,1.99,1.33,1.66,6.18,6.58,4.66,0.47,1.15,0.6,1.49,0.88,1.24,0.65,0.7,1.2,1.01,0.54,2.05,0.25,2.13,1.15,0.42,1.89,1.97,1.85,2.17,0.7,1.79,4.06,2.36 329 | 0.79,0.62,0.62,0.76,0.95,1.37,0.35,2.34,0.79,1.3,2.14,0.86,1.38,0.94,2.56,1.98,1.36,1.84,6.19,6.57,4.67,0.48,1.21,0.73,1.74,1.0,1.27,0.69,0.76,1.29,1.1,0.6,2.08,0.01,2.14,1.18,0.36,1.96,2.05,1.81,2.2,0.67,1.84,4.08,2.48 330 | 0.88,0.63,0.79,0.85,1.08,1.48,0.39,2.61,0.94,1.46,2.37,0.87,1.36,1.15,2.48,2.01,1.41,2.0,6.18,6.57,4.7,0.62,1.26,0.89,1.92,1.06,1.42,0.65,0.82,1.38,1.25,0.63,2.08,0.06,1.97,1.16,0.37,2.02,2.08,1.83,2.22,0.63,1.85,4.07,2.7 331 | 0.76,0.69,0.72,0.78,1.07,1.4,0.34,2.36,1.01,1.08,2.42,0.9,1.47,1.22,2.5,2.02,1.46,2.11,6.2,6.55,4.71,0.7,1.4,0.92,1.96,1.03,1.39,0.64,0.79,1.34,1.22,0.63,1.99,0.13,2.04,1.2,0.45,2.03,2.22,2.01,2.24,0.76,1.9,4.09,2.78 332 | 0.82,0.75,0.78,0.81,1.33,1.52,0.46,2.74,1.09,1.3,2.47,0.95,1.55,1.3,2.53,2.04,1.47,2.19,6.21,6.58,4.73,0.77,1.57,0.98,2.06,1.1,1.47,0.8,0.88,1.39,1.36,0.69,2.05,0.31,2.07,1.22,0.52,2.11,2.35,2.09,2.27,0.88,1.93,4.08,2.86 333 | 0.9,0.88,0.91,0.69,0.96,1.64,0.45,2.79,1.17,1.52,2.56,0.97,1.58,1.39,2.54,2.06,1.6,2.28,6.23,6.59,4.78,0.81,1.62,1.04,2.14,1.17,1.56,0.79,0.73,1.29,1.19,0.77,2.06,0.74,2.12,1.36,0.56,2.17,2.43,2.14,2.3,0.91,1.95,4.05,2.97 334 | 1.01,0.99,1.02,0.75,1.05,1.81,0.52,2.99,1.26,1.75,2.63,1.04,1.62,1.47,2.6,2.07,1.64,2.37,6.24,6.59,4.81,0.89,1.65,1.09,2.23,1.25,1.64,0.85,0.85,1.38,1.25,0.92,2.14,0.91,2.05,1.38,0.66,2.21,2.48,2.21,2.33,0.95,2.04,4.04,3.03 335 | 1.19,1.05,1.2,1.03,1.24,1.92,0.67,3.64,1.44,1.87,2.71,1.08,1.73,1.52,2.64,2.18,1.75,2.44,6.26,6.62,4.77,1.02,1.74,1.15,2.28,1.46,1.87,1.14,1.05,1.46,1.55,1.02,2.22,1.27,2.13,1.57,0.8,2.26,2.57,2.23,2.32,1.01,2.11,3.98,3.12 336 | 1.24,1.14,1.22,1.17,1.55,2.04,0.75,3.35,1.63,1.92,2.74,1.23,1.82,1.58,2.63,2.21,1.78,2.5,6.25,6.63,4.76,1.14,1.8,1.2,2.35,1.5,1.91,1.22,1.17,1.51,1.61,1.07,2.25,1.28,2.0,1.49,0.86,2.24,2.6,2.27,2.36,1.0,2.12,4.02,3.01 337 | 1.55,1.12,1.52,1.28,1.73,2.16,0.84,2.94,1.12,1.96,2.78,1.28,1.84,1.65,2.69,2.21,1.85,2.19,6.25,6.61,4.78,1.23,1.83,1.26,2.48,1.44,1.96,1.27,1.24,1.4,1.57,1.18,2.14,1.68,1.95,1.45,0.91,2.31,2.63,2.3,2.35,1.13,2.35,3.94,3.09 338 | 0.85,0.77,0.9,1.01,1.34,1.64,0.47,2.89,1.16,1.98,2.48,1.3,1.86,1.44,2.7,2.25,1.99,2.26,6.26,6.6,4.77,1.31,1.84,1.35,2.64,1.11,1.47,0.83,0.88,1.43,1.47,0.74,2.19,1.13,1.98,1.41,0.96,2.35,2.64,2.35,2.37,1.19,2.44,3.79,3.13 339 | 1.34,1.44,1.81,1.54,1.99,2.42,1.07,3.02,1.23,2.03,2.55,1.36,1.94,1.52,2.8,2.25,2.07,2.34,6.28,6.58,4.81,1.33,1.93,1.42,2.74,1.2,2.08,1.54,1.49,1.49,1.84,1.44,2.22,1.29,2.04,1.48,1.03,2.36,2.67,2.41,2.4,1.4,2.57,3.65,3.25 340 | 1.39,1.52,1.87,1.61,2.1,2.53,1.2,2.99,1.01,2.06,2.8,1.44,2.05,1.59,2.82,2.27,2.1,2.43,6.3,6.58,4.8,1.4,2.01,1.55,2.89,1.33,1.69,2.03,0.95,2.29,1.93,1.48,2.28,1.33,2.05,1.46,1.09,2.38,2.74,2.48,2.42,1.48,2.65,3.63,3.31 341 | 1.44,1.65,1.98,1.68,2.2,2.64,1.27,3.34,1.04,2.08,2.9,1.57,2.18,1.67,2.83,2.3,2.21,2.49,6.3,6.58,4.85,1.46,2.06,1.61,3.04,1.42,2.21,1.72,1.6,1.62,1.99,1.65,2.32,1.35,2.07,1.52,1.1,2.42,2.75,2.5,2.43,1.63,2.71,3.59,3.41 342 | 1.53,1.73,2.11,1.79,2.29,2.73,1.4,2.94,1.09,2.11,2.98,1.71,2.32,1.74,2.77,2.29,2.23,2.54,6.31,6.56,4.78,1.45,2.08,1.68,3.14,1.51,2.26,1.81,1.67,1.69,2.05,1.73,2.37,1.44,2.14,1.54,1.05,2.29,2.8,2.44,2.04,1.85,2.72,3.04,3.35 343 | 1.58,1.69,2.14,1.83,2.33,2.77,1.44,2.94,1.23,1.99,3.0,1.78,2.33,1.56,2.78,2.24,2.22,2.36,6.2,6.57,4.53,1.19,1.81,1.41,2.94,1.45,2.15,1.86,1.69,1.59,1.81,1.64,2.32,1.07,1.9,1.55,1.1,2.26,2.51,2.4,1.65,1.75,2.49,2.96,3.31 344 | 1.62,1.67,2.16,1.85,2.38,2.81,1.47,2.94,1.21,1.9,2.88,1.66,2.07,1.35,2.8,2.16,2.16,2.19,6.21,6.57,4.5,1.13,1.55,1.2,2.54,1.38,1.97,1.92,1.54,1.49,1.63,1.49,2.2,0.84,1.48,1.56,0.88,2.23,2.5,2.36,1.5,1.57,2.45,3.12,3.12 345 | 1.58,1.55,2.01,1.78,2.38,2.72,1.39,2.59,0.88,1.68,2.64,1.41,1.98,1.14,2.8,2.15,1.88,2.07,6.22,6.58,4.45,1.04,1.31,1.01,2.42,1.25,1.88,1.87,1.4,1.44,1.35,1.3,2.1,0.8,1.44,1.51,0.84,2.2,2.33,2.27,1.48,1.31,2.37,3.26,3.01 346 | 1.31,1.4,1.77,1.46,2.33,2.42,1.33,2.78,0.63,1.59,2.49,1.3,1.51,0.92,2.75,2.14,1.56,1.85,6.22,6.6,4.44,1.03,1.25,0.83,2.14,1.05,1.62,1.63,1.08,1.29,1.1,0.92,2.0,0.76,0.8,1.2,0.72,2.16,2.37,2.18,1.44,1.28,2.32,3.37,2.84 347 | 1.15,1.28,1.58,1.29,2.01,2.27,1.17,2.56,0.14,1.48,2.07,0.97,1.46,0.7,2.6,2.03,1.37,1.63,6.22,6.61,4.42,1.01,1.27,0.69,2.14,0.87,1.46,1.45,0.9,1.18,0.99,0.69,1.87,0.77,0.79,1.12,0.69,1.92,2.38,2.05,1.39,1.17,2.19,3.38,2.79 348 | 0.78,1.09,1.32,0.88,1.76,1.52,0.76,1.99,0.55,1.34,1.78,0.98,1.43,0.54,2.61,2.05,1.29,1.5,6.23,6.61,4.36,0.94,1.3,0.54,2.11,0.71,1.22,1.43,0.83,1.09,0.74,0.48,1.81,0.74,0.67,1.09,0.66,1.69,2.41,2.02,1.33,1.06,2.13,3.41,2.72 349 | 0.62,0.95,1.21,0.73,1.31,1.35,0.56,2.49,0.44,1.36,1.69,0.81,1.3,0.57,2.6,2.07,1.32,1.54,6.24,6.62,4.38,1.03,1.31,0.48,1.98,0.53,1.03,1.33,0.72,0.99,0.78,0.37,1.78,0.78,0.53,0.94,0.42,1.7,2.45,2.03,1.12,1.08,2.16,3.43,2.79 350 | 0.65,0.97,1.24,0.66,0.87,1.3,0.37,2.64,0.38,1.39,1.73,0.83,1.11,0.61,2.63,2.1,1.4,1.59,6.24,6.64,4.37,1.04,1.33,0.38,1.93,0.32,1.07,0.99,0.64,0.97,0.85,0.32,1.83,0.8,0.49,1.0,0.39,1.71,2.46,2.06,1.11,1.17,2.18,3.75,2.86 351 | 0.69,1.01,1.28,0.71,0.91,1.32,0.3,2.99,0.46,1.43,1.77,0.84,1.12,0.65,2.63,2.11,1.44,1.63,6.26,6.66,4.35,1.12,1.35,0.42,1.95,0.37,1.11,0.88,0.67,1.0,0.9,0.36,1.88,0.84,0.6,1.01,0.43,1.75,2.46,2.07,1.2,1.18,2.2,3.8,2.91 352 | 0.76,1.07,1.33,0.77,1.09,1.43,0.37,3.02,0.07,1.54,1.86,0.92,1.21,0.77,2.64,2.11,1.43,1.78,6.27,6.66,4.34,1.13,1.44,0.52,2.08,0.48,1.17,0.92,0.75,1.09,1.02,0.5,1.95,0.86,0.68,1.03,0.5,1.8,2.48,2.1,1.25,1.2,2.22,3.81,3.03 353 | 0.83,1.14,1.46,0.86,1.15,1.56,0.42,2.59,0.15,1.62,1.94,0.93,1.27,0.94,2.68,2.12,1.46,1.87,6.27,6.67,4.33,1.18,1.47,0.61,2.17,0.6,1.24,0.98,0.81,1.16,1.1,0.56,2.02,0.86,0.72,1.07,0.51,1.83,2.48,2.11,1.31,1.22,2.28,3.83,3.09 354 | 0.91,1.26,1.57,1.02,1.23,1.68,0.47,2.64,0.23,1.7,2.03,0.95,1.36,1.06,2.67,2.16,1.47,1.96,6.27,6.66,4.31,1.22,1.48,0.72,2.24,0.73,1.32,1.03,0.88,1.25,1.19,0.64,2.07,0.96,0.74,1.18,0.54,1.86,2.51,2.11,1.4,1.23,2.27,3.82,3.13 355 | 0.93,1.29,1.6,1.05,1.31,1.72,0.54,3.29,0.25,1.78,2.06,0.97,1.37,1.14,2.68,2.17,1.49,2.01,6.27,6.66,4.34,1.24,1.52,0.77,2.32,0.76,1.35,1.07,0.92,1.29,1.22,0.69,2.13,0.98,0.72,1.19,0.63,1.95,2.59,2.18,1.43,1.29,2.32,3.83,3.21 356 | 0.95,1.32,1.61,1.11,1.38,1.77,0.59,3.37,0.36,1.86,2.1,0.98,1.41,1.2,2.67,2.2,1.51,2.09,6.28,6.66,4.35,1.27,1.54,0.82,2.36,0.83,1.38,1.1,0.96,1.32,1.27,0.76,2.16,1.03,0.78,1.22,0.7,2.02,2.61,2.23,1.44,1.31,2.38,3.85,3.29 357 | 0.98,1.35,1.65,1.17,1.5,1.8,0.65,3.43,0.45,1.91,2.13,1.13,1.44,1.25,2.7,2.21,1.63,2.2,6.29,6.68,4.36,1.3,1.57,0.86,2.45,0.89,1.41,1.14,1.04,1.36,1.34,0.82,2.21,1.04,0.81,1.34,0.73,2.03,2.64,2.27,1.52,1.34,2.4,3.86,3.35 358 | 1.01,1.39,1.69,1.23,1.59,1.83,0.68,3.49,0.56,1.97,2.18,1.14,1.46,1.33,2.72,2.24,1.66,2.29,6.29,6.68,4.36,1.31,1.59,0.9,2.55,0.94,1.45,1.17,1.09,1.4,1.4,0.88,2.26,1.11,0.92,1.39,0.75,2.06,2.67,2.3,1.65,1.39,2.45,3.87,3.42 359 | 1.05,1.43,1.71,1.31,1.64,1.88,0.72,3.57,0.64,2.01,2.23,1.18,1.52,1.36,2.72,2.28,1.68,2.37,6.3,6.69,4.38,1.35,1.66,0.94,2.66,0.98,1.47,1.2,1.13,1.44,1.48,0.95,2.3,1.17,0.92,1.43,0.84,2.1,2.73,2.33,1.69,1.4,2.49,3.89,3.51 360 | 1.08,1.46,1.77,1.38,1.79,1.93,0.77,3.64,0.68,2.08,2.27,1.2,1.53,1.44,2.75,2.29,1.71,2.45,6.3,6.7,4.37,1.37,1.73,0.97,2.74,1.05,1.5,1.24,1.2,1.5,1.57,1.03,2.35,1.19,0.99,1.5,0.87,2.13,2.74,2.32,1.75,1.42,2.49,3.89,3.61 361 | 1.19,1.5,1.81,1.41,1.84,2.02,0.86,2.29,0.8,2.16,2.32,1.23,1.55,1.49,2.78,2.37,1.78,2.51,6.29,6.71,4.39,1.4,1.74,1.01,2.8,1.12,1.57,1.32,1.27,1.59,1.63,1.11,2.37,1.2,1.0,1.53,0.89,2.15,2.75,2.35,1.75,1.48,2.5,3.93,3.67 362 | 1.25,1.54,1.86,1.45,1.9,2.1,0.95,2.36,0.89,2.22,2.35,1.24,1.67,1.52,2.79,2.4,1.83,2.59,6.29,6.71,4.41,1.42,1.75,1.06,2.88,1.21,1.64,1.43,1.36,1.69,1.7,1.18,2.4,1.22,1.03,1.55,0.9,2.16,2.77,2.38,1.77,1.49,2.54,3.97,3.72 363 | 1.3,1.71,1.91,1.51,1.95,2.17,0.99,2.46,0.96,2.29,2.4,1.26,1.78,1.58,2.82,2.49,1.85,2.66,6.3,6.7,4.41,1.46,1.77,1.13,2.94,1.31,1.72,1.57,1.44,1.73,1.79,1.27,2.45,1.25,1.05,1.57,0.94,2.19,2.8,2.42,1.84,1.54,2.55,3.98,3.77 364 | 1.36,1.82,1.93,1.55,2.06,2.28,1.13,2.49,1.02,2.38,2.45,1.29,1.82,1.64,2.84,2.49,1.86,2.74,6.31,6.69,4.54,1.51,1.82,1.19,3.0,1.4,1.81,1.71,1.51,1.78,1.83,1.35,2.48,1.27,1.08,1.61,0.98,2.22,2.81,2.45,1.85,1.56,2.61,4.02,3.8 365 | 1.41,1.95,1.98,1.63,2.12,2.34,1.22,2.74,1.1,2.41,2.52,1.34,1.94,1.71,2.84,2.51,1.94,2.83,6.31,6.68,4.61,1.54,1.86,1.36,3.09,1.52,1.9,1.77,1.59,1.83,1.9,1.41,2.52,1.32,1.11,1.62,1.02,2.22,2.81,2.51,1.9,1.59,2.63,4.02,3.83 366 | 1.46,2.01,2.11,1.69,2.17,2.48,1.29,2.79,1.17,2.44,2.56,1.43,2.01,1.78,2.88,2.52,1.97,2.91,6.32,6.68,4.92,1.57,1.89,1.42,3.17,1.6,1.99,1.83,1.64,1.89,1.98,1.5,2.55,1.36,1.13,1.69,1.03,2.26,2.83,2.51,1.91,1.65,2.66,4.04,3.88 367 | 1.5,2.07,2.22,1.75,2.28,2.56,1.36,2.84,1.25,2.48,2.63,1.45,2.08,1.89,2.9,2.55,2.03,3.0,6.32,6.68,5.18,1.65,2.0,1.5,3.28,1.65,2.03,2.02,1.68,1.9,2.02,1.56,2.63,1.53,1.19,1.76,1.16,2.38,2.97,2.54,1.97,1.67,2.8,4.06,3.91 368 | 1.61,2.02,2.31,1.83,2.42,2.62,1.43,2.84,1.4,2.54,2.74,1.57,2.14,2.01,2.94,2.58,2.11,3.11,6.33,6.69,5.3,1.71,2.12,1.56,3.39,1.71,2.07,2.05,1.71,1.93,2.09,1.63,2.7,1.66,1.32,1.83,1.24,2.5,3.03,2.57,2.18,1.69,2.86,4.09,3.95 369 | 1.75,2.15,2.31,1.9,2.49,2.7,1.51,2.84,1.48,2.59,2.83,1.61,2.22,2.1,2.94,2.6,2.19,3.24,6.32,6.69,5.39,1.79,2.21,1.64,3.46,1.76,2.13,2.08,1.75,1.96,2.14,1.66,2.77,1.74,1.47,1.96,1.37,2.65,3.21,2.61,2.47,1.73,2.97,4.11,3.99 370 | 1.83,2.23,2.39,1.96,2.55,2.75,1.59,2.65,1.56,2.68,2.89,1.7,2.29,2.2,2.95,2.71,2.27,3.31,6.31,6.71,5.71,1.93,2.28,1.73,3.58,1.82,2.2,2.1,1.78,1.99,2.2,1.78,2.86,1.85,1.64,2.07,1.49,2.78,3.37,2.65,2.7,1.74,3.09,4.12,4.02 371 | 1.99,2.27,2.41,2.02,2.62,2.82,1.67,2.69,1.65,2.78,2.96,1.76,2.4,2.27,3.01,2.78,2.34,3.34,6.3,6.72,5.8,2.02,2.33,1.8,3.64,1.91,2.27,2.13,1.84,2.02,2.25,1.96,2.91,2.0,1.81,2.22,1.67,2.93,3.45,2.67,2.87,1.91,3.22,4.14,4.05 372 | 2.08,2.23,2.39,2.05,2.66,2.89,1.74,2.69,1.75,2.82,3.03,1.87,2.58,2.24,3.06,2.86,2.41,3.31,6.3,6.72,5.95,2.06,2.36,1.88,3.7,1.96,2.32,2.16,1.89,2.09,2.32,2.05,2.95,2.23,2.12,2.34,1.82,2.97,3.5,2.69,3.03,1.92,3.33,4.12,4.07 373 | 2.07,2.23,2.31,2.01,2.69,2.87,1.78,2.69,1.71,2.8,3.0,2.01,2.71,2.22,3.07,2.96,2.49,3.26,6.29,6.73,6.14,2.11,2.38,1.92,3.73,1.94,2.28,2.18,1.92,2.12,2.3,2.11,3.0,2.29,2.28,2.39,1.83,3.05,3.54,2.72,3.16,1.88,3.31,4.07,4.03 374 | 2.04,2.2,2.21,1.93,2.63,2.82,1.76,2.69,1.64,2.76,2.92,2.08,2.73,2.09,3.06,2.94,2.49,3.17,6.29,6.73,6.2,2.12,2.38,1.89,3.69,1.87,2.23,2.21,1.9,2.09,2.26,2.09,3.06,2.28,2.41,2.3,1.81,3.03,3.51,2.67,3.12,1.85,3.26,4.0,3.99 375 | 1.95,2.14,2.03,2.09,2.56,2.74,1.73,2.69,1.55,2.71,2.87,2.04,2.65,2.03,3.05,2.93,2.46,3.09,6.29,6.71,6.22,2.1,2.34,1.85,3.63,1.83,2.17,2.17,1.87,2.03,2.23,2.04,3.1,2.24,2.38,2.34,1.78,3.0,3.5,2.68,3.13,1.82,3.25,3.99,3.93 376 | 1.88,2.08,1.96,2.02,2.51,2.68,1.67,2.64,1.47,2.65,2.81,2.04,2.62,1.95,3.0,2.87,2.43,3.01,6.3,6.71,6.19,2.09,2.33,1.79,3.54,1.75,2.06,2.12,1.82,1.97,2.18,1.98,3.06,2.22,2.37,2.33,1.75,2.99,3.48,2.65,3.1,1.78,3.25,3.97,3.88 377 | 1.82,1.97,1.85,1.94,2.4,2.59,1.59,2.64,1.42,2.58,2.73,2.03,2.61,1.88,3.0,2.87,2.42,2.95,6.3,6.71,6.08,2.07,2.3,1.67,3.42,1.66,1.97,2.06,1.76,1.89,2.1,1.92,3.02,2.19,2.36,2.29,1.74,2.93,3.48,2.64,3.07,1.74,3.23,3.98,3.81 378 | 1.74,1.88,1.74,1.85,2.29,2.51,1.54,2.59,1.35,2.49,2.63,2.03,2.57,1.81,2.98,2.87,2.43,2.8,6.31,6.71,6.07,2.03,2.26,1.58,3.31,1.55,1.89,1.98,1.71,1.84,2.01,1.85,2.94,2.2,2.35,2.27,1.72,2.87,3.47,2.57,3.03,1.6,3.22,3.85,3.72 379 | 1.68,1.82,1.7,1.81,2.24,2.45,1.49,2.49,1.31,2.46,2.58,2.02,2.54,1.78,2.96,2.84,2.34,2.75,6.32,6.7,6.02,2.01,2.24,1.55,3.27,1.52,1.87,1.96,1.69,1.81,1.99,1.82,2.87,2.19,2.3,2.26,1.71,2.84,3.41,2.56,3.03,1.54,3.18,3.92,3.67 380 | 1.6,1.75,1.62,1.74,2.18,2.38,1.43,2.59,1.26,2.41,2.53,1.98,2.51,1.73,2.93,2.82,2.26,2.7,6.32,6.7,5.99,2.0,2.22,1.52,3.24,1.47,1.84,1.92,1.66,1.78,1.93,1.78,2.83,2.15,2.27,2.22,1.7,2.8,3.37,2.55,3.01,1.47,3.15,3.89,3.63 381 | 1.51,1.68,1.55,1.68,2.12,2.34,1.36,2.59,1.23,2.37,2.49,1.94,2.5,1.66,2.92,2.78,2.21,2.64,6.33,6.69,5.95,1.97,2.2,1.49,3.2,1.42,1.79,1.88,1.61,1.75,1.88,1.68,2.79,1.83,2.26,2.15,1.67,2.78,3.36,2.51,2.97,1.46,3.15,3.84,3.58 382 | 1.4,1.59,1.5,1.59,2.04,2.3,1.3,2.59,1.18,2.32,2.46,1.91,2.41,1.63,2.9,2.67,2.16,2.59,6.33,6.68,5.94,1.95,2.17,1.45,3.16,1.38,1.76,1.83,1.58,1.69,1.81,1.63,2.74,1.81,2.24,2.09,1.65,2.75,3.3,2.5,2.94,1.42,3.0,3.81,3.53 383 | 1.44,1.62,1.53,1.62,2.08,2.34,1.35,2.64,1.21,2.34,2.5,1.9,2.34,1.66,2.85,2.63,2.09,2.63,6.34,6.67,5.78,1.91,2.12,1.48,3.22,1.4,1.79,1.85,1.62,1.72,1.85,1.67,2.69,1.77,2.15,2.07,1.62,2.71,3.26,2.47,2.88,1.4,2.97,3.78,3.56 384 | 1.37,1.67,1.57,1.64,2.11,2.38,1.39,2.73,1.26,2.38,2.55,1.88,2.32,1.7,2.79,2.6,2.04,2.66,6.33,6.65,5.67,1.9,2.1,1.52,3.26,1.44,1.81,1.87,1.67,1.75,1.9,1.71,2.67,1.74,2.08,2.05,1.59,2.69,3.23,2.38,2.87,1.34,2.95,3.76,3.59 385 | 1.3,1.59,1.54,1.51,1.91,2.3,1.36,2.49,1.38,1.98,2.58,1.64,2.11,1.72,2.7,2.36,1.96,2.49,6.37,6.66,5.32,1.73,1.75,1.2,3.29,1.47,1.84,1.9,1.2,1.59,1.55,1.38,2.69,1.23,1.85,1.96,1.1,2.37,3.05,1.99,2.78,1.29,1.53,3.69,3.44 386 | 1.22,1.13,1.25,1.27,1.82,2.06,1.08,2.49,1.25,1.88,2.54,1.49,1.49,1.74,2.67,2.37,1.69,2.3,6.38,6.66,5.13,1.42,1.61,1.02,3.04,1.25,1.49,1.52,1.05,1.51,1.31,0.95,2.55,1.2,1.49,1.43,0.89,1.89,2.8,1.98,2.49,1.24,1.46,3.68,3.03 387 | 1.09,1.11,1.11,1.32,1.7,2.0,0.96,2.51,1.01,1.78,2.38,1.34,1.53,1.46,2.65,2.34,1.71,2.16,6.42,6.67,4.96,1.34,1.64,0.8,2.84,1.08,1.3,1.35,0.85,1.37,1.22,0.63,2.45,1.19,1.25,1.31,0.86,1.92,2.68,2.06,2.12,1.16,1.32,3.71,2.86 388 | 1.13,0.97,1.17,1.36,1.73,2.02,0.88,2.51,0.41,1.68,2.16,1.04,1.56,1.28,2.53,2.25,1.74,2.24,6.45,6.68,4.94,1.13,1.48,0.71,2.59,0.9,1.35,1.23,0.7,1.29,1.25,0.34,2.53,1.15,1.2,1.25,0.93,1.76,2.66,2.11,2.17,0.94,1.17,3.62,2.92 389 | 1.19,0.96,1.24,1.4,1.79,2.05,0.91,2.78,0.23,1.68,2.03,0.92,1.32,1.32,2.43,2.24,1.4,2.29,6.48,6.68,4.9,1.16,1.44,0.77,2.65,0.95,1.41,1.27,0.78,1.33,1.28,0.27,2.44,1.09,1.16,1.23,0.98,1.68,2.55,1.7,2.0,0.82,1.06,3.83,2.99 390 | 1.25,0.87,1.32,1.14,1.83,2.09,0.95,2.42,0.34,1.48,2.08,0.87,1.27,1.38,2.41,2.07,1.48,2.37,6.5,6.69,4.82,1.17,1.47,0.84,2.44,1.02,1.45,1.3,0.89,1.41,1.0,0.37,2.3,1.0,1.23,1.11,0.77,1.51,2.48,1.65,1.91,0.77,1.04,3.85,2.73 391 | 0.91,0.49,0.79,0.88,1.69,1.55,0.71,1.91,0.46,1.53,2.16,0.96,1.04,1.03,2.43,1.94,1.31,2.42,6.49,6.66,4.53,1.13,1.46,0.88,2.49,0.77,1.17,1.1,0.95,1.19,0.88,0.63,2.13,0.87,1.32,1.08,0.61,1.45,2.11,1.66,1.96,0.71,1.13,3.76,2.87 392 | 0.85,0.6,0.31,0.7,1.45,1.64,0.83,2.19,0.62,1.58,2.23,0.99,1.11,1.11,2.46,1.86,1.23,2.1,6.48,6.65,4.47,1.08,1.31,0.51,2.54,0.83,1.22,1.07,1.06,1.24,0.97,0.79,2.21,0.82,1.41,1.13,0.6,1.48,2.05,1.69,1.98,0.75,1.15,3.63,2.96 393 | 0.75,0.67,0.51,0.78,1.24,1.82,0.95,2.94,0.8,1.5,1.85,1.01,1.26,1.18,2.47,1.88,1.18,2.19,6.46,6.65,4.34,1.02,1.35,0.58,2.59,0.89,1.25,1.06,0.72,1.3,1.08,0.87,2.29,0.91,1.28,1.04,0.53,1.51,2.0,1.82,2.05,0.85,1.19,3.55,3.03 394 | 0.98,0.82,0.21,0.97,1.38,1.6,1.0,3.02,0.98,1.48,1.93,0.95,1.38,1.24,2.5,2.0,1.09,2.3,6.46,6.64,4.3,1.06,1.37,0.62,2.59,0.96,1.32,1.08,0.78,1.34,1.15,0.92,2.36,0.93,1.39,0.96,0.64,1.6,1.92,1.86,2.02,0.91,1.25,3.47,3.15 395 | 1.04,0.95,0.32,1.1,1.5,1.66,0.76,3.01,1.05,1.43,2.01,0.98,1.6,1.29,2.51,2.03,1.06,2.37,6.45,6.63,4.21,1.11,1.4,0.67,2.64,1.05,1.36,1.1,0.83,1.39,1.24,1.02,2.21,0.97,1.37,1.02,0.66,1.68,1.97,1.91,1.93,0.97,1.28,3.49,2.81 396 | 1.18,0.99,0.91,1.14,1.55,1.75,0.87,2.95,1.14,1.52,2.07,1.03,1.83,1.33,2.56,2.05,1.11,2.42,6.43,6.63,4.22,1.1,1.45,0.75,2.69,1.11,1.39,1.12,0.89,1.49,1.36,1.09,2.07,1.0,1.39,1.12,0.67,1.71,2.0,1.94,1.94,0.94,1.31,3.52,2.92 397 | 1.27,0.87,0.81,1.25,1.66,1.89,1.01,3.07,0.98,1.61,2.19,0.91,1.82,1.0,2.59,2.08,1.14,2.53,6.44,6.64,4.01,0.93,1.39,0.89,2.79,1.2,1.35,1.2,1.09,1.34,1.57,1.17,2.1,1.03,1.45,1.12,0.72,1.58,1.87,2.02,1.8,1.01,1.44,3.65,3.05 398 | 1.02,1.13,0.83,1.36,1.25,2.01,1.23,2.79,1.24,1.43,2.41,0.9,1.83,1.13,2.62,2.11,1.23,2.15,6.42,6.65,3.96,0.96,1.33,0.98,2.92,1.32,1.57,1.27,1.18,1.41,1.68,1.34,2.21,1.11,1.46,0.94,0.78,1.53,1.73,2.05,1.67,1.11,1.58,3.41,3.16 399 | 0.9,1.19,0.94,0.95,1.36,1.72,1.32,2.89,0.61,1.52,1.97,0.86,1.82,1.25,2.66,2.17,1.27,2.27,6.41,6.65,3.93,1.01,1.36,1.2,2.98,1.4,1.8,1.29,0.85,1.53,1.79,1.4,2.34,1.15,1.64,1.01,0.94,1.63,1.64,1.87,1.72,1.32,1.72,3.43,3.23 400 | 0.98,1.01,0.41,1.11,1.49,1.51,1.1,2.69,0.74,1.65,2.08,0.99,1.89,1.42,2.67,2.23,1.45,2.38,6.4,6.66,3.87,1.08,1.44,1.45,2.6,1.55,1.93,1.3,0.94,1.72,1.87,1.07,2.2,1.22,1.69,1.18,1.05,1.82,1.7,1.83,1.89,1.44,1.8,3.45,3.35 401 | 1.09,0.95,0.52,1.22,1.58,1.57,1.22,2.49,1.18,1.78,2.19,1.08,1.92,1.46,2.71,2.25,1.51,2.46,6.4,6.67,3.88,1.21,1.41,1.57,2.72,1.25,1.52,1.32,1.12,1.83,1.53,1.24,2.0,1.3,1.77,1.26,1.1,1.86,1.85,1.64,2.02,1.54,1.85,3.5,3.08 402 | 1.2,0.94,0.63,0.86,1.31,1.66,1.36,2.39,1.35,1.92,1.9,1.02,1.97,1.19,2.63,2.16,1.61,2.23,6.39,6.68,3.9,1.17,1.28,1.68,2.79,1.39,1.74,1.35,1.34,1.59,1.71,1.32,2.08,1.24,1.76,1.19,1.06,1.92,1.92,1.67,2.03,1.58,1.72,3.48,3.23 403 | 1.23,0.82,0.67,1.0,1.41,1.72,1.44,2.69,1.01,1.97,1.97,1.01,1.84,1.3,2.55,2.08,1.55,2.39,6.39,6.68,3.95,1.1,1.25,1.75,2.89,1.5,1.79,1.38,1.4,1.69,1.76,0.9,2.19,1.02,1.91,1.14,0.88,1.96,2.04,1.84,2.13,1.34,1.78,3.63,2.96 404 | 1.3,0.95,0.72,1.07,1.54,1.8,1.51,2.84,1.14,2.01,2.05,0.93,1.81,1.41,2.57,2.1,1.5,2.5,6.38,6.69,3.97,1.11,1.32,1.83,2.95,1.56,1.86,1.42,1.46,1.79,1.81,0.96,2.27,0.89,1.98,1.18,0.8,2.07,2.07,2.03,2.21,1.23,1.83,3.68,3.07 405 | 1.34,1.01,0.79,1.19,1.71,1.83,1.59,2.95,1.42,2.06,2.13,0.97,1.86,1.47,2.62,2.15,1.51,2.59,6.38,6.7,4.0,1.21,1.44,1.9,3.03,1.63,1.97,1.5,1.51,1.84,1.9,1.13,2.34,0.83,2.03,1.32,0.95,2.13,2.13,2.19,2.27,1.27,1.9,3.82,3.15 406 | 1.38,1.09,0.84,1.24,1.79,1.97,1.66,3.02,1.55,2.13,2.24,1.04,1.9,1.53,2.71,2.19,1.64,2.7,6.36,6.7,4.04,1.23,1.51,1.98,3.09,1.7,2.08,1.67,1.59,1.87,1.97,1.18,2.37,0.95,2.08,1.35,1.0,2.18,2.23,2.25,2.39,1.4,1.96,3.85,3.24 407 | 1.4,1.2,0.89,1.35,1.88,2.06,1.71,2.74,1.68,2.17,2.35,1.15,1.98,1.58,2.77,2.25,1.71,2.84,6.35,6.71,4.05,1.26,1.58,2.07,3.16,1.78,2.19,1.73,1.66,1.9,2.03,1.26,2.41,1.09,2.14,1.48,1.04,2.22,2.38,2.33,2.41,1.45,2.04,3.88,3.36 408 | 1.43,1.32,0.96,1.43,1.99,2.12,1.79,2.81,1.01,2.2,2.13,1.29,2.03,1.7,2.8,2.29,1.76,2.91,6.34,6.71,4.16,1.29,1.75,2.14,3.24,1.87,2.28,1.78,1.75,1.94,2.1,1.35,2.47,1.26,2.19,1.55,1.17,2.29,2.5,2.38,2.43,1.43,2.09,3.91,3.47 409 | 1.13,1.6,1.71,1.58,2.11,2.21,1.94,3.49,1.13,2.37,2.33,1.35,2.04,1.78,2.81,2.37,2.09,3.04,6.39,6.72,4.22,1.37,1.83,2.45,3.32,2.0,2.35,1.87,1.86,1.99,2.18,1.51,2.51,1.31,2.24,1.68,1.24,2.51,2.59,2.1,2.6,1.97,2.24,3.81,3.59 410 | 1.14,1.73,1.82,1.7,2.24,2.29,2.03,3.58,1.2,2.43,2.57,1.46,2.1,1.95,2.83,2.4,2.27,3.15,6.42,6.72,4.27,1.44,1.87,2.59,3.39,2.09,2.52,2.09,1.97,2.07,2.26,1.58,2.55,1.44,2.27,1.81,1.33,2.67,2.68,2.21,2.74,2.15,2.35,3.84,3.76 411 | 1.17,2.03,1.91,1.77,2.47,2.42,2.17,3.34,1.28,2.51,2.72,1.6,2.18,2.07,2.83,2.41,2.5,3.28,6.5,6.73,4.31,1.55,1.94,2.64,3.48,2.17,2.61,2.16,2.14,2.15,2.43,1.68,2.62,1.58,2.29,1.84,1.38,2.78,2.72,2.23,2.93,2.29,2.55,3.89,3.88 412 | 1.2,2.2,2.01,1.89,2.56,2.6,2.23,3.29,1.36,2.58,2.88,1.72,2.22,2.19,2.88,2.32,2.65,3.36,6.55,6.73,4.34,1.67,2.1,2.76,3.65,2.34,2.73,2.28,2.27,2.25,2.55,1.8,2.72,1.65,2.33,1.9,1.43,2.88,2.75,2.26,3.1,2.36,2.7,3.95,3.96 413 | 1.15,2.32,2.11,2.01,2.3,2.66,2.34,3.27,1.49,2.64,3.03,1.75,2.24,2.25,2.92,2.34,2.68,3.53,6.61,6.75,4.35,1.68,2.21,2.88,3.81,2.39,2.82,2.37,2.38,2.09,2.61,1.9,2.8,1.7,2.39,1.91,1.47,2.93,2.83,2.31,3.13,2.4,2.79,4.0,4.02 414 | 1.11,2.44,2.19,2.08,2.01,2.42,2.11,3.37,1.65,2.7,3.12,1.83,2.37,2.4,2.97,2.36,2.73,3.39,6.66,6.76,4.4,1.82,2.3,3.0,3.54,2.3,2.7,2.3,2.45,1.79,2.74,2.02,2.87,1.77,2.41,1.96,1.68,2.94,3.01,2.37,3.19,2.46,2.85,4.03,4.14 415 | 0.91,2.37,2.01,1.94,1.91,2.62,2.03,3.34,1.55,2.64,3.19,1.86,2.4,2.09,3.0,2.28,2.76,2.89,6.65,6.74,4.46,1.83,2.24,2.8,3.42,2.15,2.57,2.13,2.48,1.68,2.79,2.07,2.93,1.8,2.29,2.0,1.7,2.86,3.03,2.38,3.21,2.01,2.79,4.04,3.46 416 | 0.8,2.15,1.93,1.83,1.72,2.72,1.96,3.29,0.93,2.35,2.81,1.88,2.43,1.8,3.02,2.24,2.71,2.69,6.64,6.73,4.35,1.64,1.88,2.51,3.03,2.09,2.43,2.1,2.54,1.59,2.58,1.85,3.03,1.46,2.04,1.68,1.62,2.7,3.06,2.34,3.08,1.78,2.68,3.96,3.35 417 | 0.68,2.0,1.88,1.67,1.46,2.82,1.94,3.29,0.45,2.12,2.55,1.75,2.27,1.62,3.01,2.09,2.43,2.5,6.64,6.71,4.21,1.41,1.6,2.44,2.8,1.97,2.27,1.93,2.2,1.47,2.3,1.65,3.15,1.31,1.93,1.59,1.5,2.45,2.95,2.29,2.75,1.49,2.54,3.81,3.19 418 | 0.56,1.94,1.61,1.54,1.49,2.92,1.87,3.24,0.27,1.96,2.33,1.62,1.94,1.44,2.89,2.04,1.92,2.39,6.63,6.7,4.2,1.23,1.38,2.25,2.82,1.85,2.11,1.76,2.0,1.35,2.3,1.5,3.32,1.14,1.71,1.32,0.96,2.32,2.71,2.2,2.53,1.3,2.41,3.62,2.92 419 | 0.63,1.53,1.52,1.39,1.51,3.02,1.91,3.31,0.27,1.98,2.14,1.11,1.66,1.28,2.84,1.98,1.77,2.09,6.62,6.69,4.13,1.22,1.33,2.07,2.83,1.74,1.98,1.53,1.81,1.24,2.12,1.25,3.15,0.95,1.56,1.13,0.91,2.13,2.46,2.16,2.32,1.1,2.29,3.58,2.69 420 | 0.7,1.29,1.49,1.43,1.54,3.07,1.94,3.2,0.34,2.01,2.16,1.0,1.4,1.31,2.76,1.95,1.54,2.09,6.6,6.68,4.17,0.98,1.27,2.1,2.85,1.65,1.95,1.42,1.62,1.27,2.0,0.93,2.97,0.84,1.18,1.01,0.84,1.92,2.2,2.07,2.17,1.05,2.25,3.56,2.73 421 | 0.75,1.04,1.64,1.1,1.58,3.09,1.97,3.0,0.36,2.03,2.17,0.87,1.12,1.29,2.54,2.13,1.39,2.17,6.64,6.69,3.94,0.94,1.2,2.16,2.89,1.7,1.97,1.52,1.7,1.28,1.92,0.96,2.76,0.87,1.3,0.83,0.72,1.86,2.24,1.34,1.65,0.89,1.73,3.45,2.76 422 | 0.78,1.1,1.69,1.13,1.62,3.13,2.0,3.07,0.4,2.06,2.2,0.92,1.04,1.32,2.57,2.16,1.41,2.22,6.66,6.7,3.87,0.73,1.19,2.21,2.92,1.73,1.99,1.57,1.73,1.29,1.95,1.0,2.68,0.92,1.37,0.86,0.73,1.9,2.27,1.46,1.72,0.88,1.46,3.49,2.83 423 | 0.83,1.17,1.72,1.19,1.67,3.17,2.04,3.11,0.42,2.09,2.25,1.0,1.05,1.35,2.68,2.14,1.48,2.29,6.67,6.7,3.91,0.76,1.26,2.25,2.96,1.73,2.02,1.65,1.75,1.32,1.96,1.04,2.7,0.96,1.44,0.91,0.8,1.92,2.3,1.59,1.78,0.95,1.48,3.6,2.88 424 | 0.88,1.21,1.76,1.22,1.74,3.2,2.07,3.29,0.45,2.13,2.29,1.01,1.09,1.41,2.7,2.1,1.5,2.37,6.7,6.72,4.0,0.81,1.28,2.3,3.01,1.78,2.04,1.67,1.78,1.37,1.98,1.07,2.72,1.0,1.51,0.97,0.83,1.96,2.32,1.61,1.81,0.99,1.49,3.68,2.92 425 | 0.95,1.24,1.78,1.27,1.81,3.23,2.12,3.38,0.5,2.18,2.33,1.03,1.2,1.48,2.77,2.07,1.53,2.42,6.72,6.72,4.01,0.83,1.34,2.34,3.03,1.82,2.07,1.72,1.83,1.42,2.05,1.11,2.8,1.01,1.66,1.0,0.87,2.03,2.37,1.64,1.83,1.01,1.52,3.73,2.99 426 | 0.99,1.28,1.82,1.31,1.87,3.27,2.15,3.41,0.55,2.24,2.38,1.04,1.23,1.56,2.78,2.09,1.56,2.49,6.72,6.72,4.04,0.88,1.36,2.39,3.11,1.86,2.12,1.79,1.85,1.48,2.12,1.14,2.87,1.04,1.78,1.01,0.89,2.06,2.44,1.65,1.85,1.08,1.53,3.75,3.03 427 | 1.04,1.32,1.85,1.34,1.91,3.31,2.17,3.45,0.61,2.26,2.43,1.11,1.33,1.62,2.8,2.14,1.63,2.55,6.73,6.71,4.07,1.02,1.44,2.42,3.14,1.9,2.16,1.81,1.89,1.54,2.15,1.18,2.93,1.19,1.82,1.07,0.93,2.13,2.47,1.67,1.98,1.14,1.61,3.82,3.13 428 | 1.09,1.37,1.89,1.4,1.94,3.35,2.2,3.54,0.65,2.28,2.47,1.13,1.4,1.69,2.81,2.24,1.71,2.63,6.74,6.73,4.12,1.04,1.55,2.5,3.19,1.95,2.22,1.82,1.93,1.59,2.18,1.25,2.97,1.3,1.83,1.16,0.97,2.17,2.58,1.79,2.03,1.24,1.7,3.87,3.28 429 | 1.12,1.41,1.94,1.42,1.99,3.38,2.24,3.6,0.7,2.32,2.53,1.17,1.47,1.75,2.81,2.32,1.73,2.71,6.74,6.75,4.16,1.09,1.57,2.58,3.24,2.01,2.31,1.84,1.97,1.63,2.21,1.3,3.01,1.33,1.85,1.21,1.1,2.2,2.63,1.84,2.05,1.27,1.88,3.94,3.33 430 | 1.15,1.45,1.98,1.46,2.03,3.42,2.28,3.39,0.76,2.37,2.58,1.22,1.56,1.81,2.83,2.37,1.76,2.79,6.76,6.75,4.23,1.13,1.69,2.64,3.28,2.05,2.39,1.86,2.06,1.67,2.24,1.36,3.07,1.39,1.88,1.33,1.19,2.25,2.65,1.95,2.1,1.4,2.03,4.0,3.41 431 | 1.17,1.49,2.03,1.51,2.08,3.46,2.35,3.36,0.84,2.41,2.62,1.3,1.62,1.9,2.85,2.39,1.85,2.87,6.77,6.74,4.3,1.2,1.76,2.67,3.34,2.11,2.46,1.9,2.11,1.71,2.28,1.43,3.12,1.46,1.95,1.4,1.27,2.28,2.68,1.99,2.21,1.43,2.15,4.06,3.46 432 | 1.2,1.56,2.08,1.57,2.16,3.5,2.39,3.19,0.92,2.46,2.69,1.38,1.69,1.98,2.9,2.41,1.96,2.95,6.77,6.74,4.31,1.33,1.85,2.73,3.42,2.16,2.51,1.93,2.16,1.78,2.36,1.52,3.15,1.51,2.01,1.48,1.36,2.33,2.73,2.13,2.24,1.47,2.26,4.1,3.55 433 | -------------------------------------------------------------------------------- /dataset/W_45.csv: -------------------------------------------------------------------------------- 1 | 0.0,6708.365797898137,11601.986087161604,9361.165414783894,18982.241342355912,26258.70813429055,25489.825108389923,24429.243973609147,28637.07115564252,32190.232692812326,34049.38284435978,31470.09926202608,38874.62444028909,40458.9502929145,42668.81616581752,39101.0438265816,47028.296739003024,46806.59412639704,48313.28270533624,50470.22410824943,53696.09203543982,58905.203586041454,56395.42553249089,7231.2375626227595,11336.706315957805,7232.952761662901,9172.045373111261,5727.103493717893,15050.10479168996,13475.869354726592,18071.438776719606,14875.677689175516,18071.54109663586,21119.620452641848,24713.280650460627,24106.301141136122,25490.934152389087,31201.200257879987,35423.194749052505,37367.15100398333,39502.25480412504,45286.71635985335,48212.2270731759,42669.437603283666,47618.61934525607 2 | 6708.365797898137,0.0,5727.044430449191,6961.822098224812,14875.24091959733,20329.248906757526,20554.16570072704,20410.676572215,24105.354528909407,26845.79869473992,29650.998441294054,28327.553463638524,35616.12360696504,35615.114520553594,38974.66614272987,35199.24650021542,41890.418522601154,42598.37189027117,44448.77036896321,46298.609448998766,48858.62803698478,53696.185878019234,52000.863750143704,13756.357823635299,17675.959054699615,12724.050303382874,13989.228535777147,7236.17173232512,19674.978658632986,16365.221241776919,20416.51125014738,13128.763368242571,18075.307848250653,20077.620550608863,25730.285037726822,21533.798517276657,24716.479249831857,30154.89443979975,32964.41772915973,36366.9536544659,37368.02672475746,44203.82873707977,45218.05822512627,39774.6524513783,42943.05240595746 3 | 11601.986087161604,5727.044430449191,0.0,5599.786515216532,9670.401534530194,14730.210259300333,14875.015082367516,15071.897924538318,18511.090472454045,21119.88662734788,24064.32222532437,23293.58904615887,30437.195592313954,29914.672339256635,33584.80063415228,29750.29902351048,36163.67894748827,37021.69186324972,38974.23563726279,40723.75348666099,43148.88365753421,47970.25358024292,46361.6384228628,17862.454683186817,21309.78912200382,15831.187367461838,16365.101558567794,9177.544391798248,21318.686748335032,16799.843183182413,19951.468066661775,10072.265386778241,15843.686317041733,16803.619600084945,23770.645789693805,17029.72808004978,21376.5071503487,26413.51783185457,28240.465536555355,32514.51931680857,32772.644159357544,40179.27595521697,40096.24360618104,34737.92795087003,37259.94889187949 4 | 9361.165414783894,6961.822098224812,5599.786515216532,0.0,9670.538631163272,18081.130440199428,16383.027107946173,15072.111652875205,19341.84747679117,23246.37477886356,24709.687081175925,22258.166205608013,29651.661460985186,31202.918090804695,33349.96952516098,29750.721189132175,37866.54655639134,37445.4329832501,38974.788929170696,41109.64397376296,44410.172331966576,49746.89363389072,47037.98679650291,13922.204202702153,16789.8642657163,11194.704185804032,11261.045284860464,4641.507882864329,15835.288999819135,11200.059132220596,14484.91359719993,6219.959835104025,11201.035651427277,13131.808811789837,18997.473726759978,15072.328877895692,17764.071885107718,23247.71665072837,26516.70458032929,29468.526323874485,30791.72733836676,37334.89139306511,39101.67595209996,33585.76664514611,38303.148661299434 5 | 18982.241342355912,14875.24091959733,9670.401534530194,9670.538631163272,0.0,10489.314396196745,6966.75934316453,5553.383543454446,9671.310328317071,13935.257776457382,15072.363754691412,13624.382499185078,20793.28076402585,21537.386649825803,24105.354401601937,20362.191712148157,28244.567857261038,27907.977692129072,29617.538365609427,31593.52854714253,34739.76809316252,40117.63393501155,37445.47893966098,23239.455518335246,25529.903271757303,20076.218833991832,19446.493431763887,14117.313983851169,22347.80917567449,16790.53532554375,17708.371288857248,6717.833245885822,11364.434418516286,9986.581318626851,18319.26521242762,7893.202694398399,13788.499270945598,17893.04818853864,18667.220407072098,23689.249986514296,23253.967448206906,31073.41761867714,30439.291032952457,25067.841303657777,28640.082984836754 6 | 26258.70813429055,20329.248906757526,14730.210259300333,18081.130440199428,10489.314396196745,0.0,5907.697204201911,9990.9484823372,8946.607391052989,7535.894561983037,13137.403377843295,16810.265086939766,21786.820521434838,16894.212720136464,23255.438297308043,19342.34848653329,22256.353889402006,25067.465421831414,27717.803259028507,28640.409376191612,29615.937425992604,33788.3983155117,33583.94549839745,31940.729391311506,34846.97124596267,29256.537364370946,29067.448899679457,22693.552729333518,32596.68396791058,27116.88775179997,28192.44821421692,17104.479445620113,21749.434504327113,19718.558021222485,28106.6002923623,15539.107939784346,22518.009339110708,24976.325298002277,22380.23525559395,29651.32962837476,26853.143185166566,35839.095257657646,30916.10477948144,26431.19707649051,24226.64518306768 7 | 25489.825108389923,20554.16570072704,14875.015082367516,16383.027107946173,6966.75934316453,5907.697204201911,0.0,4208.291141225925,3959.134123285223,6968.500066308214,9361.20846751064,11213.274805792089,17037.72008400249,15072.326463683086,19344.618791567787,15396.153238150937,21538.511728782196,22257.084608216745,24430.28670541481,25952.822178491857,28328.64697496173,33415.93466296175,31498.95004767518,30148.652334521274,32496.648556298605,27034.429546902196,26405.329523186592,20948.87203793474,29072.629222858344,23478.13791918863,23779.832241654756,13489.575734599897,17108.467387998324,14504.905414845436,22742.996701786018,9819.485432187814,16833.707355568764,19070.01564977566,16822.460105764123,23807.893919555663,21398.583584258227,30213.43503828152,26560.201683277814,21654.514395630227,22389.468247233905 8 | 24429.243973609147,20410.676572215,15071.897924538318,15072.111652875205,5553.383543454446,9990.9484823372,4208.291141225925,0.0,4645.5708168812935,10086.08873233556,9671.85544858565,8517.938356228811,15396.482512004648,16388.566325995907,18564.237212767144,14808.808174082254,23256.590820893583,22389.93940866311,24064.15483490219,26066.976867640955,29456.88089063339,35061.126637329915,31966.34031640617,28234.49868137,30149.310896333263,24880.62527905951,23950.407165419205,19341.396490003357,25970.26422656356,20389.257487489584,20157.612752957055,10821.994873689735,13440.564247707598,10492.149591411526,18604.512852787,5611.194828175213,12625.418286577229,15092.502052260876,13939.62610282494,20178.76718026732,18587.978946414147,27007.184868123477,25069.142533815288,19798.696549269,23296.90280098823 9 | 28637.07115564252,24105.354528909407,18511.090472454045,19341.84747679117,9671.310328317071,8946.607391052989,3959.134123285223,4645.5708168812935,0.0,5911.760392705357,5553.231760607663,7935.735186695602,13140.620586384963,11878.711565401589,15396.76667543759,11456.228211542973,18670.745344658135,18510.601430521012,20554.87310491695,22212.663066661993,25068.76253715021,30512.709379675318,27907.575026981405,32763.975897288365,34768.89885581009,29461.159218905996,28578.306268663284,23751.904038973815,30583.988344098983,25012.7685279342,24589.60370721261,15449.827830939712,17890.031290208488,14636.42083776038,22307.358166857528,9196.94442188453,15872.666927767435,16945.845863743496,13454.466285636532,21007.643143611505,17906.547084257716,26919.105979646956,22614.260756304822,17776.90469319726,18984.383101175845 10 | 32190.232692812326,26845.79869473992,21119.88662734788,23246.37477886356,13935.257776457382,7535.894561983037,6968.500066308214,10086.08873233556,5911.760392705357,0.0,6726.947600334421,12770.868389501506,15668.152390019732,9361.099860223696,16267.389299639193,12517.736806225965,15072.29471474105,17582.195374109615,20341.055870242188,21123.446555439168,22256.3585716411,26850.41344521631,26066.27170061651,37080.50962270494,39465.126943296156,33997.65015491699,33369.428152214154,27848.411826633535,35892.82432435263,30294.782248780546,30243.6838714754,20395.58906410334,23526.643854211714,20452.59818601821,28218.015257433486,15091.848020479672,21770.088217685174,22461.74323393798,17743.819510933252,25881.987152727233,21735.35759968085,30945.89489170155,24118.62224582383,20168.766026820467,16718.28744662675 11 | 34049.38284435978,29650.998441294054,24064.32222532437,24709.687081175925,15072.363754691412,13137.403377843295,9361.20846751064,9671.85544858565,5553.231760607663,6726.947600334421,0.0,7261.762646741488,8955.175018212973,6971.690043383727,10170.499977525427,6223.756770327691,13945.117690998408,12957.369674093621,15073.109393857132,16659.431312506043,19800.612863413327,25566.048516558636,22389.775511409392,37860.81853040728,39609.97485702361,34449.97606688836,33392.105613180116,29013.247491677903,34832.381995082964,29365.012615864005,28289.073380829686,20334.966079864735,21780.868607437686,18094.952746600302,24719.218873654492,12513.86729063958,18002.323036880876,17252.714498604346,11389.36969264017,19753.740956690806,15108.670754486071,24318.13862742015,17780.90529400744,13507.504552444949,13626.259111718642 12 | 31470.09926202608,28327.553463638524,23293.58904615887,22258.166205608013,13624.382499185078,16810.265086939766,11213.274805792089,8517.938356228811,7935.735186695602,12770.868389501506,7261.762646741488,0.0,7404.525208581468,13458.870583607619,11456.763581379251,8519.965594875272,20171.430197215883,16391.79768961086,17185.40843406416,19800.731058540307,24744.824995781302,31092.47953511015,25999.958846792768,34050.3371893705,35156.91640467411,30436.969125914424,29013.756600124383,25954.424752972573,29465.49290682096,24286.255638500734,22406.06185913492,16719.46546201375,16386.62443266365,12444.690086121856,17896.19945585104,7536.472197031045,11213.763395320273,10001.386570243341,5609.188027605166,13172.611218146314,10206.682817563864,19035.284822037534,16895.780283481316,11456.930681649574,18594.008959549083 13 | 38874.62444028909,35616.12360696504,30437.195592313954,29651.661460985186,20793.28076402585,21786.820521434838,17037.72008400249,15396.482512004648,13140.620586384963,15668.152390019732,8955.175018212973,7404.525208581468,0.0,11244.744001367215,4649.021788584348,4216.854313842975,16433.826615770075,10211.108902286524,10171.894610783198,13146.127752577113,19125.99785098268,25799.556176890324,19353.710051642825,41325.284073670584,42242.7717011435,37681.972190954875,36165.307697022756,33350.5972218405,36153.43120766891,31204.724113561522,28830.207159478112,24105.961261827986,23296.33955752669,19344.729479625017,23233.56278257391,14875.608332162867,17038.462712328455,13504.435540004546,5610.765753662962,13178.701147142092,7028.296174215143,15903.445533038888,9673.657850306554,4649.089618850838,13468.250289029094 14 | 40458.9502929145,35615.114520553594,29914.672339256635,31202.918090804695,21537.386649825803,16894.212720136464,15072.326463683086,16388.566325995907,11878.711565401589,9361.099860223696,6971.690043383727,13458.870583607619,11244.744001367215,0.0,9212.226482623999,7027.890452377656,6973.429270358641,8521.689290372724,11623.750189012331,11880.799400494552,13258.864867924602,18675.856389657554,16718.331085346832,44616.32124718301,46493.04990102659,41266.549288275826,40279.870740111575,35630.60412677046,41804.04750855937,36332.06838318571,35222.963837106174,27177.27620776983,28746.134061850098,25029.135349006632,31324.78483363786,19468.17988510907,24612.63207094542,23014.3863421203,15894.969208984165,24175.034281558994,18273.036860896416,26969.358230823094,16836.897794783014,14538.811156458318,7404.02687029572 15 | 42668.81616581752,38974.66614272987,33584.80063415228,33349.96952516098,24105.354401601937,23255.438297308043,19344.618791567787,18564.237212767144,15396.76667543759,16267.389299639193,10170.499977525427,11456.763581379251,4649.021788584348,9212.226482623999,0.0,3960.146708377567,12792.667783890045,5613.715595225669,5728.6455390806195,8522.881988148354,14573.363015507868,21287.321846645562,14745.111764554333,45499.74873168537,46589.01634819923,41892.62408523768,40462.072209907776,37261.4891690933,40671.69444879148,35634.24200407884,33401.484116325744,28084.62383609572,27716.94869870959,23757.996025098135,27882.577313272817,18984.81835796862,21655.14267923527,18106.322367333996,10208.932173115612,17143.91945113707,10515.321427440436,18283.009207126965,7906.1795454986495,5625.35337645742,9216.090014675035 16 | 39101.0438265816,35199.24650021542,29750.29902351048,29750.721189132175,20362.191712148157,19342.34848653329,15396.153238150937,14808.808174082254,11456.228211542973,12517.736806225965,6223.756770327691,8519.965594875272,4216.854313842975,7027.890452377656,3960.146708377567,0.0,12544.743623200104,7920.440500032541,9255.346671462155,11457.095813631046,16274.598801342856,22739.86589836831,17584.48646671137,42241.837927879686,43547.83904113077,38685.52629806178,37368.29171860736,33789.65404116551,37983.71069259435,32773.116691913536,30906.403483939706,24710.819381587735,24890.41544136571,20958.260127594218,25995.80013630241,15838.435231574002,19468.88322699225,16822.850842141383,9208.6902282535,17265.371729667117,11245.1497230846,20029.53852244571,11624.165881869534,7945.186106393793,10210.891354602987 17 | 47028.296739003024,41890.418522601154,36163.67894748827,37866.54655639134,28244.567857261038,22256.353889402006,21538.511728782196,23256.590820893583,18670.745344658135,15072.29471474105,13945.117690998408,20171.430197215883,16433.826615770075,6973.429270358641,12792.667783890045,12544.743623200104,0.0,8641.29400037405,11848.946886182115,10105.297798355903,7536.400301192392,11881.354193615598,12451.65734144887,51429.933422164,53399.98456794206,48128.19044142944,47193.204528078866,42361.85866470441,48777.44251265734,43302.16680770447,42171.57572135698,34072.91346084521,35715.84487615211,31981.827960874125,38064.44043114757,26433.66532563124,31382.87738582299,29310.76743499916,21748.417881938538,29600.22900770072,23173.327857635668,30998.64559508504,18661.85548998018,18377.557871124183,4608.3837537428535 18 | 46806.59412639704,42598.37189027117,37021.69186324972,37445.4329832501,27907.977692129072,25067.465421831414,22257.084608216745,22389.93940866311,18510.601430521012,17582.195374109615,12957.369674093621,16391.79768961086,10211.108902286524,8521.689290372724,5613.715595225669,7920.440500032541,8641.29400037405,0.0,3368.527174996357,3702.0616409256204,8964.47232772586,15690.510661814178,9673.89744320637,50131.93331318718,51468.25924747316,46589.8996667985,45287.467229722904,41585.916686338096,45843.50935855195,40674.12912486085,38685.80785360054,32577.029011174276,32778.40859331589,28834.473540395713,33400.008383139306,23758.872341247134,27060.675916652417,23706.78959157401,15807.278507011526,22601.453370785548,15893.538052392469,22810.015058701192,10022.842019887015,10519.701593856424,4220.932365124263 19 | 48313.28270533624,44448.77036896321,38974.23563726279,38974.788929170696,29617.538365609427,27717.803259028507,24430.28670541481,24064.15483490219,20554.87310491695,20341.055870242188,15073.109393857132,17185.40843406416,10171.894610783198,11623.750189012331,5728.6455390806195,9255.346671462155,11848.946886182115,3368.527174996357,0.0,3369.1126298511645,10526.237061826758,17166.842834372892,9302.797357536201,51225.85555494655,52309.55025152924,47620.87859424149,46188.161648702335,42944.64893304948,46321.58264349108,41331.34606095824,38999.8561932103,33790.39639291744,33415.04712385688,29456.84151345529,33234.83710480167,24712.11656208676,27185.36532338772,23243.51809836142,15462.439374046822,21275.67651866735,14569.774380717588,20464.647247075234,7036.948681986013,8962.173352733247,7275.233126947547 20 | 50470.22410824943,46298.609448998766,40723.75348666099,41109.64397376296,31593.52854714253,28640.409376191612,25952.822178491857,26066.976867640955,22212.663066661993,21123.446555439168,16659.431312506043,19800.731058540307,13146.127752577113,11880.799400494552,8522.881988148354,11457.095813631046,10105.297798355903,3702.0616409256204,3369.1126298511645,0.0,7278.5319306469755,13836.728384020382,6226.416162001828,53697.216973445524,54947.5965201984,50133.66939425062,48785.62072397796,45218.00512720199,49164.15725547214,44072.33070022399,41919.18726099663,36166.4133484119,36159.843418592805,32204.08622825093,36350.90629911085,27252.49820619964,30175.907527875643,26442.78035230596,18597.757832445925,24636.703469431264,17923.03646955485,23699.600530775773,10025.26253992841,12321.957466081323,5615.17198488566 21 | 53696.09203543982,48858.62803698478,43148.88365753421,44410.172331966576,34739.76809316252,29615.937425992604,28328.64697496173,29456.88089063339,25068.76253715021,22256.3585716411,19800.612863413327,24744.824995781302,19125.99785098268,13258.864867924602,14573.363015507868,16274.598801342856,7536.400301192392,8964.47232772586,10526.237061826758,7278.5319306469755,0.0,6740.777494316311,5617.455302750725,57660.06124134214,59326.35922800738,54231.978930680765,53110.11333540662,48784.98639658949,54128.091523165596,48809.8569162948,47150.52499661629,40115.94270820165,40999.66018732862,37110.843169410466,42190.44015328183,31779.627531937826,35730.08227668438,32629.81404744517,24735.67433657719,31491.38284680902,24763.217706129893,30967.208684208737,17293.38246124004,19256.19169026103,6225.67630400952 22 | 58905.203586041454,53696.185878019234,47970.25358024292,49746.89363389072,40117.63393501155,33788.3983155117,33415.93466296175,35061.126637329915,30512.709379675318,26850.41344521631,25566.048516558636,31092.47953511015,25799.556176890324,18675.856389657554,21287.321846645562,22739.86589836831,11881.354193615598,15690.510661814178,17166.842834372892,13836.728384020382,6740.777494316311,0.0,9860.881726595167,63275.52331334448,65162.05656208014,59941.22741575146,58946.525793715955,54234.32044820708,60278.19940950148,54875.627117469456,53446.22528096811,45852.238767813906,47156.59630338897,43323.04130717175,48731.1678373257,37879.17441709891,42198.50015141477,39290.45800862017,31409.3685146961,38230.85570002695,31501.788703839688,37498.72963134122,23714.085755097305,25981.611555618623,12529.958590757442 23 | 56395.42553249089,52000.863750143704,46361.6384228628,47037.98679650291,37445.47893966098,33583.94549839745,31498.95004767518,31966.34031640617,27907.575026981405,26066.27170061651,22389.775511409392,25999.958846792768,19353.710051642825,16718.331085346832,14745.111764554333,17584.48646671137,12451.65734144887,9673.89744320637,9302.797357536201,6226.416162001828,5617.455302750725,9860.881726595167,0.0,59805.135677488106,61124.860081311344,56263.456419529524,54950.986621301396,51226.34267893296,55388.31194738292,50281.89715160645,48144.808691107435,42246.45158100334,42372.612810586426,38418.78386213717,42519.69754862743,33417.56609086252,36390.69877795652,32546.298612327355,24749.270032427467,30353.315872971303,23721.335200496178,28457.88687635297,14677.146590215269,18121.00260968493,9361.422533553497 24 | 7231.2375626227595,13756.357823635299,17862.454683186817,13922.204202702153,23239.455518335246,31940.729391311506,30148.652334521274,28234.49868137,32763.975897288365,37080.50962270494,37860.81853040728,34050.3371893705,41325.284073670584,44616.32124718301,45499.74873168537,42241.837927879686,51429.933422164,50131.93331318718,51225.85555494655,53697.216973445524,57660.06124134214,63275.52331334448,59805.135677488106,0.0,4193.743431727859,3702.5829865316696,5727.241632989552,9280.697786655488,10185.41936278409,11454.744086172015,15832.861421198135,17576.48547056008,18512.721826200825,22259.19696960124,23292.44095626415,26515.693625027332,25955.577824346554,31596.05150346014,37130.62409034141,37447.492997316396,40727.3510861318,45220.23820689038,50159.49911751607,44627.90134809901,51467.139156221245 25 | 11336.706315957805,17675.959054699615,21309.78912200382,16789.8642657163,25529.903271757303,34846.97124596267,32496.648556298605,30149.310896333263,34768.89885581009,39465.126943296156,39609.97485702361,35156.91640467411,42242.7717011435,46493.04990102659,46589.01634819923,43547.83904113077,53399.98456794206,51468.25924747316,52309.55025152924,54947.5965201984,59326.35922800738,65162.05656208014,61124.860081311344,4193.743431727859,0.0,5595.163106130958,6218.443779879596,12275.353535452585,7916.085544858398,11195.651409044738,14876.568345969128,19328.025837771947,18983.16919451014,22910.357965680658,22391.653419141465,27709.4257501484,26068.95827130734,31503.098044684608,37682.3449503386,37052.45699130508,40944.35333825905,44629.93147092858,50686.821590141335,45219.203468257176,53104.051330903196 26 | 7232.952761662901,12724.050303382874,15831.187367461838,11194.704185804032,20076.218833991832,29256.537364370946,27034.429546902196,24880.62527905951,29461.159218905996,33997.65015491699,34449.97606688836,30436.969125914424,37681.972190954875,41266.549288275826,41892.62408523768,38685.52629806178,48128.19044142944,46589.8996667985,47620.87859424149,50133.66939425062,54231.978930680765,59941.22741575146,56263.456419529524,3702.5829865316696,5595.163106130958,0.0,2320.322675943521,6709.787253450738,7914.470145479699,7916.371312939694,12439.032407089207,14116.662486892134,14810.13884327881,18565.446048720783,19792.897402460385,22910.42672318903,22259.2926806329,27910.03000095002,33439.86036803074,33791.358620648396,37024.76811949335,41588.35255486686,46471.144171911896,40943.43987870049,48024.47848505734 27 | 9172.045373111261,13989.228535777147,16365.101558567794,11261.045284860464,19446.493431763887,29067.448899679457,26405.329523186592,23950.407165419205,28578.306268663284,33369.428152214154,33392.105613180116,29013.756600124383,36165.307697022756,40279.870740111575,40462.072209907776,37368.29171860736,47193.204528078866,45287.467229722904,46188.161648702335,48785.62072397796,53110.11333540662,58946.525793715955,54950.986621301396,5727.241632989552,6218.443779879596,2320.322675943521,0.0,7236.434684749452,5894.943072080756,5727.502622636526,10164.650509603105,13129.240654683572,13034.32990409786,16895.163203562613,17578.377016613544,21534.581697901383,20363.961015455698,25955.628509069546,31750.777953347064,31751.49823622645,35201.55905032256,39504.465929431404,44782.17210562818,39278.35241453226,46888.52913934057 28 | 5727.103493717893,7236.17173232512,9177.544391798248,4641.507882864329,14117.313983851169,22693.552729333518,20948.87203793474,19341.396490003357,23751.904038973815,27848.411826633535,29013.247491677903,25954.424752972573,33350.5972218405,35630.60412677046,37261.4891690933,33789.65404116551,42361.85866470441,41585.916686338096,42944.64893304948,45218.00512720199,48784.98639658949,54234.32044820708,51226.34267893296,9280.697786655488,12275.353535452585,6709.787253450738,7236.434684749452,0.0,12593.916962853316,9177.945103776186,13426.84166052196,9256.051601209845,12439.885819256562,15394.404842389864,19449.108702882542,18511.981866726688,19793.466863691116,25493.04728685474,29751.60722002555,31672.688758284396,33790.644265859046,39593.48060609582,42600.11068862707,37050.181337901915,42637.367365046055 29 | 15050.10479168996,19674.978658632986,21318.686748335032,15835.288999819135,22347.80917567449,32596.68396791058,29072.629222858344,25970.26422656356,30583.988344098983,35892.82432435263,34832.381995082964,29465.49290682096,36153.43120766891,41804.04750855937,40671.69444879148,37983.71069259435,48777.44251265734,45843.50935855195,46321.58264349108,49164.15725547214,54128.091523165596,60278.19940950148,55388.31194738292,10185.41936278409,7916.085544858398,7914.470145479699,5894.943072080756,12593.916962853316,0.0,5598.416230779784,7536.511486999796,15636.108723742122,13130.554970645295,17028.824763845558,14810.624343546971,22399.21342374944,19342.686362520937,24431.416837823635,31204.014492790004,29653.99773983992,34052.7294931244,37052.69890312397,44033.739890691526,38688.41861207271,47921.809130779795 30 | 13475.869354726592,16365.221241776919,16799.843183182413,11200.059132220596,16790.53532554375,27116.88775179997,23478.13791918863,20389.257487489584,25012.7685279342,30294.782248780546,29365.012615864005,24286.255638500734,31204.724113561522,36332.06838318571,35634.24200407884,32773.116691913536,43302.16680770447,40674.12912486085,41331.34606095824,44072.33070022399,48809.8569162948,54875.627117469456,50281.89715160645,11454.744086172015,11195.651409044738,7916.371312939694,5727.502622636526,9177.945103776186,5598.416230779784,0.0,4642.62352705668,10072.705352852598,7917.30228184691,11876.257512643102,11876.527812112054,17030.472272459832,14876.312620776882,20363.95605986259,26517.48771719713,26069.2128864166,29752.47202451516,33792.47379520886,39503.90657237534,34052.516959282315,42589.960327685825 31 | 18071.438776719606,20416.51125014738,19951.468066661775,14484.91359719993,17708.371288857248,28192.44821421692,23779.832241654756,20157.612752957055,24589.60370721261,30243.6838714754,28289.073380829686,22406.06185913492,28830.207159478112,35222.963837106174,33401.484116325744,30906.403483939706,42171.57572135698,38685.80785360054,38999.8561932103,41919.18726099663,47150.52499661629,53446.22528096811,48144.808691107435,15832.861421198135,14876.568345969128,12439.032407089207,10164.650509603105,13426.84166052196,7536.511486999796,4642.62352705668,0.0,11359.181787881495,6717.053738687802,10194.972579446981,7536.699593219286,15784.112885726156,11877.135073420226,16895.920063976053,23756.090250865793,22215.42023741935,26518.354900620157,29753.364138257522,36520.59675908849,31206.60199562309,40985.94870675493 32 | 14875.677689175516,13128.763368242571,10072.265386778241,6219.959835104025,6717.833245885822,17104.479445620113,13489.575734599897,10821.994873689735,15449.827830939712,20395.58906410334,20334.966079864735,16719.46546201375,24105.961261827986,27177.27620776983,28084.62383609572,24710.819381587735,34072.91346084521,32577.029011174276,33790.39639291744,36166.4133484119,40115.94270820165,45852.238767813906,42246.45158100334,17576.48547056008,19328.025837771947,14116.662486892134,13129.240654683572,9256.051601209845,15636.108723742122,10072.705352852598,11359.181787881495,0.0,5902.790587541509,6965.8521686207005,13783.09016426527,9255.930270883062,11610.888055213398,17033.518372349285,20555.459854438814,23252.019927855752,24711.54385473213,31114.94173024822,33350.48006311344,27802.986470189877,33910.848125856864 33 | 18071.54109663586,18075.307848250653,15843.686317041733,11201.035651427277,11364.434418516286,21749.434504327113,17108.467387998324,13440.564247707598,17890.031290208488,23526.643854211714,21780.868607437686,16386.62443266365,23296.33955752669,28746.134061850098,27716.94869870959,24890.41544136571,35715.84487615211,32778.40859331589,33415.04712385688,36159.843418592805,40999.66018732862,47156.59630338897,42372.612810586426,18512.721826200825,18983.16919451014,14810.13884327881,13034.32990409786,12439.885819256562,13130.554970645295,7917.30228184691,6717.053738687802,5902.790587541509,0.0,3958.9556599212933,7926.967303272773,9289.095578357928,7536.564049316603,13258.903214741234,18723.61550883159,19344.72639027942,22214.62931969096,27252.161839769087,31750.955628067197,26256.699700372228,34791.58097593855 34 | 21119.620452641848,20077.620550608863,16803.619600084945,13131.808811789837,9986.581318626851,19718.558021222485,14504.905414845436,10492.149591411526,14636.42083776038,20452.59818601821,18094.952746600302,12444.690086121856,19344.729479625017,25029.135349006632,23757.996025098135,20958.260127594218,31981.827960874125,28834.473540395713,29456.84151345529,32204.08622825093,37110.843169410466,43323.04130717175,38418.78386213717,22259.19696960124,22910.357965680658,18565.446048720783,16895.163203562613,15394.404842389864,17028.824763845558,11876.257512643102,10194.972579446981,6965.8521686207005,3958.9556599212933,0.0,8413.046292829034,5604.491591084897,4645.036691723312,10168.253126291287,14875.947592637365,16388.559131542435,18565.232111465884,24292.67595138175,27909.393277961673,22391.014844243036,30914.28804282598 35 | 24713.280650460627,25730.285037726822,23770.645789693805,18997.473726759978,18319.26521242762,28106.6002923623,22742.996701786018,18604.512852787,22307.358166857528,28218.015257433486,24719.218873654492,17896.19945585104,23233.56278257391,31324.78483363786,27882.577313272817,25995.80013630241,38064.44043114757,33400.008383139306,33234.83710480167,36350.90629911085,42190.44015328183,48731.1678373257,42519.69754862743,23292.44095626415,22391.653419141465,19792.897402460385,17578.377016613544,19449.108702882542,14810.624343546971,11876.527812112054,7536.699593219286,13783.09016426527,7926.967303272773,8413.046292829034,0.0,13154.415641297788,6721.809727609239,10168.484770430678,17772.616682864118,14876.624305249323,19798.712492289706,22259.791023567654,29956.283409912823,24893.787807908466,36205.640984534286 36 | 24106.301141136122,21533.798517276657,17029.72808004978,15072.328877895692,7893.202694398399,15539.107939784346,9819.485432187814,5611.194828175213,9196.94442188453,15091.848020479672,12513.86729063958,7536.472197031045,14875.608332162867,19468.17988510907,18984.81835796862,15838.435231574002,26433.66532563124,23758.872341247134,24712.11656208676,27252.49820619964,31779.627531937826,37879.17441709891,33417.56609086252,26515.693625027332,27709.4257501484,22910.42672318903,21534.581697901383,18511.981866726688,22399.21342374944,17030.472272459832,15784.112885726156,9255.930270883062,9289.095578357928,5604.491591084897,13154.415641297788,0.0,7014.224609740603,10086.816847968375,11456.435269658385,15796.054670913381,15838.899781369173,23237.932326217753,24105.926333011877,18564.775855991655,25559.523159471504 37 | 25490.934152389087,24716.479249831857,21376.5071503487,17764.071885107718,13788.499270945598,22518.009339110708,16833.707355568764,12625.418286577229,15872.666927767435,21770.088217685174,18002.323036880876,11213.763395320273,17038.462712328455,24612.63207094542,21655.14267923527,19468.88322699225,31382.87738582299,27060.675916652417,27185.36532338772,30175.907527875643,35730.08227668438,42198.50015141477,36390.69877795652,25955.577824346554,26068.95827130734,22259.2926806329,20363.961015455698,19793.466863691116,19342.686362520937,14876.312620776882,11877.135073420226,11610.888055213398,7536.564049316603,4645.036691723312,6721.809727609239,7014.224609740603,0.0,5728.301523373399,11878.959030034397,11879.228763360812,14876.160197416728,19800.028030798385,24713.02548984074,19345.744178691668,29669.823134274473 38 | 31201.200257879987,30154.89443979975,26413.51783185457,23247.71665072837,17893.04818853864,24976.325298002277,19070.01564977566,15092.502052260876,16945.845863743496,22461.74323393798,17252.714498604346,10001.386570243341,13504.435540004546,23014.3863421203,18106.322367333996,16822.850842141383,29310.76743499916,23706.78959157401,23243.51809836142,26442.78035230596,32629.81404744517,39290.45800862017,32546.298612327355,31596.05150346014,31503.098044684608,27910.03000095002,25955.628509069546,25493.04728685474,24431.416837823635,20363.95605986259,16895.920063976053,17033.518372349285,13258.903214741234,10168.253126291287,10168.484770430678,10086.816847968375,5728.301523373399,0.0,7899.519220336204,6223.696379332064,9673.10923805502,14125.26968042049,19801.167890277822,14740.406837448327,26870.025869137688 39 | 35423.194749052505,32964.41772915973,28240.465536555355,26516.70458032929,18667.220407072098,22380.23525559395,16822.460105764123,13939.62610282494,13454.466285636532,17743.819510933252,11389.36969264017,5609.188027605166,5610.765753662962,15894.969208984165,10208.932173115612,9208.6902282535,21748.417881938538,15807.278507011526,15462.439374046822,18597.757832445925,24735.67433657719,31409.3685146961,24749.270032427467,37130.62409034141,37682.3449503386,33439.86036803074,31750.777953347064,29751.60722002555,31204.014492790004,26517.48771719713,23756.090250865793,20555.459854438814,18723.61550883159,14875.947592637365,17772.616682864118,11456.435269658385,11878.959030034397,7899.519220336204,0.0,8429.943621624847,4648.355408321015,13464.652516038906,13034.113786103107,7536.863423731888,19040.428434139536 40 | 37367.15100398333,36366.9536544659,32514.51931680857,29468.526323874485,23689.249986514296,29651.32962837476,23807.893919555663,20178.76718026732,21007.643143611505,25881.987152727233,19753.740956690806,13172.611218146314,13178.701147142092,24175.034281558994,17143.91945113707,17265.371729667117,29600.22900770072,22601.453370785548,21275.67651866735,24636.703469431264,31491.38284680902,38230.85570002695,30353.315872971303,37447.492997316396,37052.45699130508,33791.358620648396,31751.49823622645,31672.688758284396,29653.99773983992,26069.2128864166,22215.42023741935,23252.019927855752,19344.72639027942,16388.559131542435,14876.624305249323,15796.054670913381,11879.228763360812,6223.696379332064,8429.943621624847,0.0,6731.153431017514,7920.800971725352,16273.089503207857,12314.756633004523,26356.36143199768 41 | 39502.25480412504,37368.02672475746,32772.644159357544,30791.72733836676,23253.967448206906,26853.143185166566,21398.583584258227,18587.978946414147,17906.547084257716,21735.35759968085,15108.670754486071,10206.682817563864,7028.296174215143,18273.036860896416,10515.321427440436,11245.1497230846,23173.327857635668,15893.538052392469,14569.774380717588,17923.03646955485,24763.217706129893,31501.788703839688,23721.335200496178,40727.3510861318,40944.35333825905,37024.76811949335,35201.55905032256,33790.644265859046,34052.7294931244,29752.47202451516,26518.354900620157,24711.54385473213,22214.62931969096,18565.232111465884,19798.712492289706,15838.899781369173,14876.160197416728,9673.10923805502,4648.355408321015,6731.153431017514,0.0,9213.108617271759,10172.268267494233,5612.426025360084,19724.690296432356 42 | 45286.71635985335,44203.82873707977,40179.27595521697,37334.89139306511,31073.41761867714,35839.095257657646,30213.43503828152,27007.184868123477,26919.105979646956,30945.89489170155,24318.13862742015,19035.284822037534,15903.445533038888,26969.358230823094,18283.009207126965,20029.53852244571,30998.64559508504,22810.015058701192,20464.647247075234,23699.600530775773,30967.208684208737,37498.72963134122,28457.88687635297,45220.23820689038,44629.93147092858,41588.35255486686,39504.465929431404,39593.48060609582,37052.69890312397,33792.47379520886,29753.364138257522,31114.94173024822,27252.161839769087,24292.67595138175,22259.791023567654,23237.932326217753,19800.028030798385,14125.26968042049,13464.652516038906,7920.800971725352,9213.108617271759,0.0,13826.749829511,12657.658811889658,26982.617472192378 43 | 48212.2270731759,45218.05822512627,40096.24360618104,39101.67595209996,30439.291032952457,30916.10477948144,26560.201683277814,25069.142533815288,22614.260756304822,24118.62224582383,17780.90529400744,16895.780283481316,9673.657850306554,16836.897794783014,7906.1795454986495,11624.165881869534,18661.85548998018,10022.842019887015,7036.948681986013,10025.26253992841,17293.38246124004,23714.085755097305,14677.146590215269,50159.49911751607,50686.821590141335,46471.144171911896,44782.17210562818,42600.11068862707,44033.739890691526,39503.90657237534,36520.59675908849,33350.48006311344,31750.955628067197,27909.393277961673,29956.283409912823,24105.926333011877,24713.02548984074,19801.167890277822,13034.113786103107,16273.089503207857,10172.268267494233,13826.749829511,0.0,5553.395452243053,14193.10870548184 44 | 42669.437603283666,39774.6524513783,34737.92795087003,33585.76664514611,25067.841303657777,26431.19707649051,21654.514395630227,19798.696549269,17776.90469319726,20168.766026820467,13507.504552444949,11456.930681649574,4649.089618850838,14538.811156458318,5625.35337645742,7945.186106393793,18377.557871124183,10519.701593856424,8962.173352733247,12321.957466081323,19256.19169026103,25981.611555618623,18121.00260968493,44627.90134809901,45219.203468257176,40943.43987870049,39278.35241453226,37050.181337901915,38688.41861207271,34052.516959282315,31206.60199562309,27802.986470189877,26256.699700372228,22391.014844243036,24893.787807908466,18564.775855991655,19345.744178691668,14740.406837448327,7536.863423731888,12314.756633004523,5612.426025360084,12657.658811889658,5553.395452243053,0.0,14545.620262821707 45 | 47618.61934525607,42943.05240595746,37259.94889187949,38303.148661299434,28640.082984836754,24226.64518306768,22389.468247233905,23296.90280098823,18984.383101175845,16718.28744662675,13626.259111718642,18594.008959549083,13468.250289029094,7404.02687029572,9216.090014675035,10210.891354602987,4608.3837537428535,4220.932365124263,7275.233126947547,5615.17198488566,6225.67630400952,12529.958590757442,9361.422533553497,51467.139156221245,53104.051330903196,48024.47848505734,46888.52913934057,42637.367365046055,47921.809130779795,42589.960327685825,40985.94870675493,33910.848125856864,34791.58097593855,30914.28804282598,36205.640984534286,25559.523159471504,29669.823134274473,26870.025869137688,19040.428434139536,26356.36143199768,19724.690296432356,26982.617472192378,14193.10870548184,14545.620262821707,0.0 46 | -------------------------------------------------------------------------------- /loss_curve/Train_Val_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/loss_curve/Train_Val_loss.png -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from os.path import join as pjoin 3 | from models.base_model import * 4 | from utils.math_graph import * 5 | from data_loader.data_utils import * 6 | from models.trainer import train_model 7 | import random 8 | from models.tester import * 9 | from models.transformer import * 10 | 11 | 12 | import warnings 13 | warnings.filterwarnings("ignore") 14 | 15 | parser = argparse.ArgumentParser() 16 | parser.add_argument('--n_route', type=int, default=1) 17 | parser.add_argument('--n_his', type=int, default=36) 18 | parser.add_argument('--n_pred', type=int, default=18) 19 | parser.add_argument('--batch_size', type=int, default=16) 20 | parser.add_argument('--epoch', type=int, default=300) 21 | parser.add_argument('--ks', type=int, default=2) 22 | parser.add_argument('--rnn_units', type=int, default=64) 23 | parser.add_argument('--n_heads', type=int, default=2) 24 | parser.add_argument('--num_rnn_layers', type=int, default=1) 25 | parser.add_argument('--lr', type=float, default=1e-3) 26 | parser.add_argument('--opt', type=str, default='ADAM') 27 | parser.add_argument('--function', type=str, default='fc') 28 | parser.add_argument('--lr_step', type=int, default=50) 29 | parser.add_argument('--lr_gamma', type=float, default=0.5) 30 | parser.add_argument('--use_schedule_learning', type=bool, default=1) 31 | parser.add_argument('--cl_decay_steps', type=int, default=1) 32 | parser.add_argument('--print_node', type=int, default=0) 33 | parser.add_argument('--patience', type=int, default=80) 34 | parser.add_argument('--seed', type=int, default=123) 35 | parser.add_argument('--max_grad_norm', type=float, default=1.0) 36 | 37 | 38 | args = parser.parse_args() 39 | print(f'Training configs: {args}') 40 | 41 | SEED = args.seed 42 | random.seed(SEED) 43 | np.random.seed(SEED) 44 | torch.manual_seed(SEED) 45 | torch.cuda.manual_seed(SEED) 46 | torch.backends.cudnn.deterministic = True 47 | 48 | n, n_his, n_pred = args.n_route, args.n_his, args.n_pred 49 | ks, rnn_units, function = args.ks, args.rnn_units, args.function 50 | num_rnn_layers, cl_decay_steps, ucl = args.num_rnn_layers, args.cl_decay_steps, args.use_schedule_learning 51 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 52 | 53 | if n > 1: 54 | # Load wighted adjacency matrix W 55 | wa = weight_matrix(pjoin('./dataset', f'W_{n}.csv')) 56 | # Calculate graph kernel 57 | la = scaled_laplacian(wa) 58 | # Alternative approximation method: 1st approx - first_approx(W, n). 59 | lk = cheb_poly_approx(la, ks, n) 60 | # lk = first_approx(wa, n) 61 | graph_kernel = torch.tensor(lk).type(torch.float32).to(device) 62 | else: 63 | # univariate time series prediction 64 | graph_kernel = None 65 | 66 | # Data Preprocessing 67 | data_file = pjoin('./dataset', f'V_{n}.csv') 68 | n_train, n_val, n_test = 6, 1, 1 69 | # data: [batch_size, seq_len, n_well, C_0]. 70 | dataloaders, dataset_sizes, test_data_gen, stats = loader_gen(data_file, n_train, n_val, 71 | n_test, n_his, n_pred, n, args.batch_size, device) 72 | print('>> Loading dataset with Mean: {:.2f}, STD: {:.2f}'.format(stats['mean'], stats['std'])) 73 | 74 | if __name__ == '__main__': 75 | if args.model_type == 'gcn_gru': 76 | model = DCRNNModel(graph_kernel, input_dim=1, output_dim=1, seq_len=n_his, horizon=n_pred, rnn_units=rnn_units, 77 | num_rnn_layers=num_rnn_layers, num_nodes=n, cl_decay_steps=args.cl_decay_steps, 78 | use_curriculum_learning=ucl, function=function).to(device) 79 | train_model(model, dataloaders, dataset_sizes, args, stats) 80 | print(args) 81 | print(f'The model has {count_parameters(model)} parameters') 82 | model_test(test_data_gen, args, stats) 83 | elif args.model_type == 'transformer': 84 | if args.function == 'fc': 85 | model = ShiftTransformer(rnn_units, args.n_heads, rnn_units, num_rnn_layers, dropout=args.dropout).to(device) 86 | elif args.function == 'gconv': 87 | model = GCNShiftTransformer(ks, graph_kernel, rnn_units, args.n_heads, rnn_units, num_rnn_layers, 88 | dropout=args.dropout).to(device) 89 | train_model(model, dataloaders, dataset_sizes, args, stats) 90 | print(args) 91 | print(f'The model has {count_parameters(model)} parameters') 92 | model_multi_test(test_data_gen, args, stats) 93 | else: 94 | raise ValueError(f'ERROR: no model type named {args.model_type}') 95 | 96 | -------------------------------------------------------------------------------- /models/base_model.py: -------------------------------------------------------------------------------- 1 | from models.layer_module import * 2 | import torch.nn as nn 3 | import numpy as np 4 | import torch.nn.functional as F 5 | 6 | 7 | device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') 8 | 9 | 10 | def count_parameters(model): 11 | return sum(p.numel() for p in model.parameters() if p.requires_grad) 12 | 13 | 14 | class Seq2SeqAttrs: 15 | def __init__(self, adj_mx, **model_kwargs): 16 | self.adj_mx = adj_mx 17 | self.function = model_kwargs.get('function', 'fc') 18 | self.num_nodes = int(model_kwargs.get('num_nodes', 38)) 19 | self.num_rnn_layers = int(model_kwargs.get('num_rnn_layers', 1)) 20 | self.rnn_units = int(model_kwargs.get('rnn_units')) 21 | self.hidden_state_size = self.num_nodes * self.rnn_units 22 | 23 | 24 | class EncoderModel(nn.Module, Seq2SeqAttrs): 25 | def __init__(self, adj_mx, **model_kwargs): 26 | nn.Module.__init__(self) 27 | Seq2SeqAttrs.__init__(self, adj_mx, **model_kwargs) 28 | self.input_dim = int(model_kwargs.get('input_dim', 1)) 29 | self.seq_len = int(model_kwargs.get('seq_len')) # for the encoder 30 | self.gcn_gru_layers = nn.ModuleList([GRUCell(input_size=(1 if layer == 0 else self.rnn_units) 31 | , hidden_size=self.rnn_units, nodes=self.num_nodes, 32 | graph_kernel=adj_mx, func_type=self.function) 33 | for layer in range(self.num_rnn_layers)]) 34 | 35 | def forward(self, inputs, hidden_state=None): 36 | """ 37 | Encoder forward pass. 38 | :param inputs: shape (batch_size, self.num_nodes * self.input_dim) 39 | :param hidden_state: (num_layers, batch_size, self.hidden_state_size) 40 | optional, zeros if not provided 41 | :return: output: # shape (batch_size, self.hidden_state_size) 42 | hidden_state # shape (num_layers, batch_size, self.hidden_state_size) 43 | (lower indices mean lower layers) 44 | """ 45 | batch_size, _ = inputs.size() 46 | if hidden_state is None: 47 | hidden_state = torch.zeros((self.num_rnn_layers, batch_size, self.hidden_state_size), device=device) 48 | hidden_states = [] 49 | output = inputs 50 | for layer_num, gcn_gru_layer in enumerate(self.gcn_gru_layers): 51 | next_hidden_state = gcn_gru_layer(output, hidden_state[layer_num]) 52 | hidden_states.append(next_hidden_state) 53 | output = next_hidden_state 54 | 55 | return output, torch.stack(hidden_states, dim=0) 56 | 57 | 58 | class Attention(nn.Module): 59 | def __init__(self, hidden_state_size): 60 | super().__init__() 61 | self.attn = nn.Linear(hidden_state_size * 2, hidden_state_size) 62 | self.v = nn.Parameter(torch.rand(hidden_state_size), requires_grad=True) 63 | 64 | def forward(self, hidden, encoder_outputs): 65 | # hidden = [batch_size, hidden_state_size] 66 | # encoder_outputs = [seq_len, batch_size, hidden_state_size] 67 | batch_size = encoder_outputs.shape[1] 68 | seq_len = encoder_outputs.shape[0] 69 | # hidden = [batch_size, seq_len, hidden_state_size] 70 | # encoder_outputs = [batch_size, seq_len, hidden_state_size] 71 | hidden = hidden.unsqueeze(1).repeat(1, seq_len, 1) 72 | encoder_outputs = encoder_outputs.permute(1, 0, 2) 73 | # energy = [batch_size, seq_len, hidden_state_size] 74 | energy = torch.tanh(self.attn(torch.cat((hidden, encoder_outputs), dim=2))) 75 | energy = energy.permute(0, 2, 1) 76 | # v = [batch_size, 1, hidden_state_size] 77 | v = self.v.repeat(batch_size, 1).unsqueeze(1) 78 | # attention = [batch_size, seq_len] 79 | attention = torch.bmm(v, energy).squeeze(1) 80 | return F.softmax(attention, dim=1) 81 | 82 | 83 | class DecoderModelWithAttention(nn.Module, Seq2SeqAttrs): 84 | def __init__(self, adj_mx, attention, **model_kwargs): 85 | nn.Module.__init__(self) 86 | Seq2SeqAttrs.__init__(self, adj_mx, **model_kwargs) 87 | self.output_dim = int(model_kwargs.get('output_dim', 1)) 88 | self.horizon = int(model_kwargs.get('horizon', 1)) # seq_len for the decoder 89 | self.projection_layer = nn.Linear(self.rnn_units * 2 + self.output_dim, self.output_dim) 90 | self.attention = attention 91 | self.gcn_gru_layers = nn.ModuleList([GRUCell(input_size=(1+self.rnn_units if layer == 0 else self.rnn_units) 92 | , hidden_size=self.rnn_units, nodes=self.num_nodes, 93 | graph_kernel=adj_mx, func_type=self.function) 94 | for layer in range(self.num_rnn_layers)]) 95 | 96 | def forward(self, inputs, encoder_outputs, hidden_state=None): 97 | """ 98 | Decoder forward pass. 99 | 100 | :param inputs: shape (batch_size, self.num_nodes * self.output_dim) 101 | :param hidden_state: (num_layers, batch_size, self.hidden_state_size) 102 | optional, zeros if not provided 103 | :return: output: # shape (batch_size, self.num_nodes * self.output_dim) 104 | hidden_state # shape (num_layers, batch_size, self.hidden_state_size) 105 | (lower indices mean lower layers) 106 | """ 107 | batch_size = inputs.shape[0] 108 | hidden_states = [] 109 | a = self.attention(hidden_state[-1], encoder_outputs) 110 | # a = [batch_size, 1, seq_len] 111 | a = a.unsqueeze(1) 112 | # encoder_outputs = [seq_len, batch_size, hidden_state_size] 113 | encoder_outputs = encoder_outputs.permute(1, 0, 2) 114 | # weighted = [batch_size, num_nodes, hidden_state_size] 115 | weighted = torch.bmm(a, encoder_outputs).squeeze().reshape(batch_size, self.num_nodes, -1) 116 | # output = [batch_size, new_hidden_state] (new_hidden_state = num_nodes * (output_dim + hidden_state_size)) 117 | output = torch.cat((inputs.reshape(batch_size, self.num_nodes, -1), weighted), dim=2).view(batch_size, -1) 118 | for layer_num, gcn_gru_layer in enumerate(self.gcn_gru_layers): 119 | next_hidden_state = gcn_gru_layer(output, hidden_state[layer_num]) 120 | hidden_states.append(next_hidden_state) 121 | output = next_hidden_state 122 | 123 | projected = self.projection_layer(torch.cat((output.view(-1, self.rnn_units), weighted.view(-1, self.rnn_units), 124 | inputs.reshape(-1, self.output_dim)), dim=1)) 125 | output = projected.view(batch_size, -1) 126 | 127 | return output, torch.stack(hidden_states, dim=0) 128 | 129 | 130 | class DCRNNModel(nn.Module, Seq2SeqAttrs): 131 | def __init__(self, adj_mx, **model_kwargs): 132 | super().__init__() 133 | Seq2SeqAttrs.__init__(self, adj_mx, **model_kwargs) 134 | self.encoder_model = EncoderModel(adj_mx, **model_kwargs) 135 | self.attention = Attention(self.hidden_state_size) 136 | self.decoder_model = DecoderModelWithAttention(adj_mx, self.attention, **model_kwargs) 137 | self.cl_decay_steps = int(model_kwargs.get('cl_decay_steps', 200)) 138 | self.use_curriculum_learning = bool(model_kwargs.get('use_curriculum_learning', False)) 139 | 140 | def _compute_sampling_threshold(self, batches_seen): # with the progress of training, teaching force ratio decrease 141 | return self.cl_decay_steps / (self.cl_decay_steps + np.exp(batches_seen / self.cl_decay_steps)) 142 | 143 | def encoder4attention(self, inputs): 144 | """ 145 | encoder forward pass on t time steps 146 | :param inputs: shape (seq_len, batch_size, num_nodes * input_dim) 147 | :return: encoder_hidden_state: (num_layers, batch_size, self.hidden_state_size) 148 | outputs: (seq_len, batch_size, self.hidden_state_size) 149 | """ 150 | encoder_hidden_state = None 151 | outputs = [] 152 | for t in range(self.encoder_model.seq_len): 153 | output, encoder_hidden_state = self.encoder_model(inputs[t], encoder_hidden_state) 154 | outputs.append(output) 155 | 156 | return encoder_hidden_state, torch.stack(outputs, dim=0) 157 | 158 | def decoder4attention(self, base, encoder_hidden_state, encoder_outputs, labels=None, batches_seen=None): 159 | """ 160 | Decoder forward pass 161 | :param base: 162 | :param encoder_hidden_state: (num_layers, batch_size, self.hidden_state_size) 163 | :param labels: (self.horizon, batch_size, self.num_nodes * self.output_dim) [optional, not exist for inference] 164 | :param batches_seen: global step [optional, not exist for inference] 165 | :return: output: (self.horizon, batch_size, self.num_nodes * self.output_dim) 166 | """ 167 | go_symbol = base 168 | decoder_hidden_state = encoder_hidden_state 169 | decoder_input = go_symbol # may use the last element of inputs 170 | 171 | outputs = [] 172 | 173 | for t in range(self.decoder_model.horizon): 174 | decoder_output, decoder_hidden_state = self.decoder_model(decoder_input, encoder_outputs, 175 | decoder_hidden_state) 176 | decoder_input = decoder_output 177 | outputs.append(decoder_output) 178 | if self.training and self.use_curriculum_learning: 179 | c = np.random.uniform(0, 1) 180 | d = self._compute_sampling_threshold(batches_seen) 181 | if c < max(d, 0.5): 182 | decoder_input = labels[t] 183 | outputs = torch.stack(outputs) 184 | return outputs 185 | 186 | def forward(self, inputs, labels=None, batches_seen=None): 187 | """ 188 | seq2seq forward pass 189 | :param inputs: shape (seq_len, batch_size, num_sensor * input_dim) 190 | :param labels: shape (horizon, batch_size, num_sensor * output) 191 | :param batches_seen: batches seen till now 192 | :return: outputs: (self.horizon, batch_size, self.num_nodes * self.output_dim) 193 | """ 194 | encoder_hidden_state, encoder_outputs = self.encoder4attention(inputs) 195 | outputs = self.decoder4attention(inputs[-1], encoder_outputs, encoder_hidden_state, labels, 196 | batches_seen=batches_seen) 197 | return outputs 198 | -------------------------------------------------------------------------------- /models/layer_module.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import torch.nn as nn 4 | import torch.nn.functional as F 5 | 6 | 7 | class GRUCell(nn.Module): 8 | def __init__(self, input_size, hidden_size, graph_kernel, nodes, func_type='fc'): 9 | """ 10 | 11 | :param input_size: 12 | :param hidden_size: 13 | :param graph_kernel: tensor, (nodes, ks * nodes). 14 | :param nodes: int, number of nodes in the graph. 15 | :param func_type: 16 | """ 17 | super(GRUCell, self).__init__() 18 | self.type = func_type 19 | self.input_size = input_size 20 | self.hidden_size = hidden_size 21 | self.nodes = nodes 22 | if func_type == 'gconv': 23 | self.ks = graph_kernel.shape[1] // self.nodes 24 | self.graph_kernel = graph_kernel 25 | self.w_ih = nn.Parameter(torch.randn(3 * hidden_size, self.ks * input_size), requires_grad=True) 26 | self.w_hh = nn.Parameter(torch.randn(3 * hidden_size, self.ks * hidden_size), requires_grad=True) 27 | elif func_type == 'fc': 28 | self.w_ih = nn.Parameter(torch.randn(3 * hidden_size, input_size), requires_grad=True) 29 | self.w_hh = nn.Parameter(torch.randn(3 * hidden_size, hidden_size), requires_grad=True) 30 | else: 31 | raise ValueError(f'ERROR: no function type named {func_type}') 32 | self.b_ih = nn.Parameter(torch.randn(3 * hidden_size), requires_grad=True) 33 | self.b_hh = nn.Parameter(torch.randn(3 * hidden_size), requires_grad=True) 34 | self._reset_param() 35 | 36 | def _reset_param(self): 37 | std = 1.0 / np.sqrt(self.hidden_size) 38 | for weight in self.parameters(): 39 | nn.init.uniform_(weight, -std, std) 40 | 41 | def gc(self, x, w, b): 42 | batch_size = x.shape[0] // self.nodes 43 | x = x.reshape(batch_size, self.nodes, -1) 44 | x_ker = torch.matmul(x.transpose(1, 2), self.graph_kernel) 45 | x_ker = x_ker.reshape(batch_size, -1, self.nodes).transpose(1, 2) 46 | return (torch.matmul(x_ker, w.t()) + b).reshape(batch_size * self.nodes, -1) 47 | 48 | def forward(self, x, hx): 49 | batch_size = x.shape[0] 50 | x = x.reshape(batch_size * self.nodes, -1) 51 | hx = hx.reshape(batch_size * self.nodes, -1) 52 | if self.type == 'fc': 53 | gi = F.linear(x, self.w_ih, self.b_ih) 54 | gh = F.linear(hx, self.w_hh, self.b_hh) 55 | else: 56 | gi = self.gc(x, self.w_ih, self.b_ih) 57 | gh = self.gc(hx, self.w_hh, self.b_hh) 58 | i_r, i_i, i_n = gi.chunk(3, 1) 59 | h_r, h_i, h_n = gh.chunk(3, 1) 60 | 61 | r = torch.sigmoid(i_r + h_r) 62 | z = torch.sigmoid(i_i + h_i) 63 | n = torch.tanh(i_n + r * h_n) 64 | return (z * hx + (1 - z) * n).reshape(batch_size, -1) 65 | -------------------------------------------------------------------------------- /models/tester.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | from os.path import join as pjoin 5 | from utils.math_utils import mape, mae, mse 6 | from utils.math_utils import z_inverse 7 | 8 | 9 | def model_test(dataloader, args, stats): 10 | """ 11 | Load and test saved model from the output directory for rnn. 12 | :param dataloader: instance of class Dataloader, dataloader for test. 13 | :param args: instance of class argparse, args for training. 14 | :param stats: dict, mean and variance for the test dataset. 15 | :return: 16 | """ 17 | n_his, n_pred, ks, batch_size = args.n_his, args.n_pred, args.ks, args.batch_size 18 | n_route, epoch = args.n_route, args.epoch 19 | print_node = args.print_node 20 | model_path = pjoin('./output', f'{args.function}.pkl') 21 | 22 | model = torch.load(model_path) 23 | print(f'>> Loading saved model from {model_path} ...') 24 | 25 | v, v_ = [], [] 26 | 27 | with torch.no_grad(): 28 | for j, (x, y_tar) in enumerate(dataloader): 29 | x, y_tar = x.permute(1, 0, 2, 3), y_tar.permute(1, 0, 2, 3) 30 | x = x.reshape(n_his, -1, n_route) # n_route * c_in if c_in not 1 31 | y_tar = y_tar.reshape(n_pred, -1, n_route) # n_route * c_out if c_out not 1 32 | 33 | # [seq_len, batch_size, n_well * C_0] 34 | y_pre = model(x) 35 | # [batch_size, seq_len, n_well * C_0] 36 | v.extend(y_tar.transpose(0, 1).to('cpu').numpy()) 37 | v_.extend(y_pre.transpose(0, 1).to('cpu').numpy()) 38 | 39 | v = torch.from_numpy(np.array(v)) 40 | v_ = torch.from_numpy(np.array(v_)) 41 | # convert water level to its original value 42 | v = z_inverse(v, stats['mean'], stats['std']) 43 | v_ = z_inverse(v_, stats['mean'], stats['std']) 44 | 45 | mae1 = mae(v, v_) 46 | mape1 = mape(v, v_) 47 | mse1 = mse(v, v_) 48 | rmse1 = torch.sqrt(mse1) 49 | 50 | # plot comparison diagram of prediction value and actual measurement value 51 | x1 = torch.arange(len(v)) 52 | for point in range(n_pred): 53 | fig = plt.figure() 54 | plt.title(f'Comparision plot between actual and prediction values', color='black') 55 | plt.xlabel("Number of test data") 56 | plt.ylabel('Water level [in meters]') 57 | plt.plot(x1, v[:, point, print_node], color='r', label="Target") 58 | plt.plot(x1, v_[:, point, print_node], color='b', label='Prediction') 59 | plt.legend() 60 | fig.savefig(pjoin('./picture', f'single_point{point}_node{print_node}.png')) 61 | 62 | # plot comparison diagram of prediction value and actual measurement value 63 | x2 = torch.arange(n_pred) + 1 64 | for node in range(1): 65 | fig = plt.figure() 66 | plt.title(f'Comparision plot between actual and prediction values', color='black') 67 | plt.xlabel('time step') 68 | plt.ylabel('water level [in meters]') 69 | plt.xlim(0, 19) 70 | plt.plot(x2, v[-1, :, node], color='r', label='Target') 71 | plt.plot(x2, v_[-1, :, node], color='b', label='Prediction') 72 | plt.legend() 73 | fig.savefig(pjoin('./picture', f'single_n_pred{n_pred}_node{node}.png')) 74 | 75 | print(f'Preprocess {j:3d}', 76 | f'mae<{mae1:.3f}> mape<{mape1:.3f}> mse<{mse1:.3f}> rmse<{rmse1:.3f}>') 77 | print('Testing model finished!') 78 | 79 | 80 | def model_multi_test(dataloader, args, stats): 81 | """ 82 | Load and test saved model from the output directory for transformer. 83 | :param dataloader: instance of class Dataloader, dataloader for test. 84 | :param args: instance of class argparse, args for training. 85 | :param stats: dict, mean and variance for the test dataset. 86 | :return: 87 | """ 88 | n_his, n_pred, ks, batch_size = args.n_his, args.n_pred, args.ks, args.batch_size 89 | n_route, epoch = args.n_route, args.epoch 90 | print_node = args.print_node 91 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 92 | model_path = pjoin('./output', f'{args.model_type}.pkl') 93 | 94 | model = torch.load(model_path) 95 | print(f'>> Loading saved model from {model_path} ...') 96 | 97 | v, v_ = [], [] 98 | 99 | with torch.no_grad(): 100 | for j, (x, y_tar) in enumerate(dataloader): 101 | x, y_tar = x.permute(1, 0, 2, 3), y_tar.permute(1, 0, 2, 3) 102 | # [seq_len, batch_size, n_well * C_0] 103 | x = x.reshape(n_his, -1, n_route) # n_route * c_in 104 | y_tar = y_tar.reshape(n_pred, -1, n_route) # n_route * c_out 105 | step_list = [x[-1]] 106 | for step in range(n_pred): 107 | trg_tensor = torch.stack(step_list, dim=0) 108 | y_pre = model(x, trg_tensor) 109 | step_list.append(y_pre[-1]) 110 | 111 | # [batch_size, seq_len, n_well * C_0] 112 | v.extend(y_tar.transpose(0, 1).to('cpu').numpy()) 113 | v_.extend(torch.stack(step_list[1:], dim=0).transpose(0, 1).to('cpu').numpy()) 114 | 115 | v = torch.from_numpy(np.array(v)) 116 | v_ = torch.from_numpy(np.array(v_)) 117 | # convert water level to its original value 118 | v = z_inverse(v, stats['mean'], stats['std']) 119 | v_ = z_inverse(v_, stats['mean'], stats['std']) 120 | mae1 = mae(v, v_) 121 | mape1 = mape(v, v_) 122 | mse1 = mse(v, v_) 123 | rmse1 = torch.sqrt(mse1) 124 | 125 | # plot comparison diagram of prediction value and actual measurement value 126 | x1 = torch.arange(len(v)) 127 | for point in range(n_pred): 128 | fig = plt.figure() 129 | plt.title(f'Comparision plot between actual and prediction values', color='black') 130 | plt.xlabel("Number of test data") 131 | plt.ylabel('Water level [in meters]') 132 | plt.plot(x1, v[:, point, print_node], color='r', label="Target") 133 | plt.plot(x1, v_[:, point, print_node], color='b', label='Prediction') 134 | plt.legend() 135 | fig.savefig(pjoin('./picture', f'multi_point{point}_node{print_node}.png')) 136 | 137 | # plot comparison diagram of prediction value and actual measurement value 138 | x2 = torch.arange(n_pred) 139 | for node in range(6): 140 | fig = plt.figure() 141 | plt.title(f'Comparision plot between actual and prediction values', color='black') 142 | plt.xlabel('time step') 143 | plt.ylabel('water level [in meters]') 144 | plt.plot(x2, v[-1, :, node], color='r', label='Target') 145 | plt.plot(x2, v_[-1, :, node], color='b', label='Prediction') 146 | plt.legend() 147 | fig.savefig(pjoin('./picture', f'multi_n_pred{n_pred}_node{node}.png')) 148 | 149 | # with SummaryWriter('./tensorboard') as w_test: 150 | # w_test.add_figure('Prediction vs Target', figure) 151 | # for i in np.arange(v_.shape[1]): 152 | # w_test.add_scalars('Target&Prediction', {'target': v[i, 0], 'prediction': v_[i, 0]}, i + 1) 153 | 154 | print(f'Preprocess {j:3d}', 155 | f'mae<{mae1:.3f}> mape<{mape1:.3f}> mse<{mse1:.3f}> rmse<{rmse1:.3f}>') 156 | print('Testing model finished!') 157 | -------------------------------------------------------------------------------- /models/trainer.py: -------------------------------------------------------------------------------- 1 | from utils.math_utils import mape, mae, mse 2 | from os.path import join as pjoin 3 | import torch 4 | import torch.nn as nn 5 | import torch.optim as optim 6 | import time 7 | import numpy as np 8 | import matplotlib.pyplot as plt 9 | from utils.math_utils import z_inverse 10 | 11 | 12 | def train_model(model, dataloaders, dataset_sizes, args, stats): 13 | """ 14 | Train the base model while doing validation for parameters choosing. 15 | :param model: 16 | :param dataloaders: dict, include train dataset and validation dataset. 17 | :param dataset_sizes: dict, size of train dataset and validation dataset. 18 | :param args: instance of class argparse, args for training. 19 | :param stats: 20 | :return: 21 | """ 22 | batch_size, epoch, lr, opt, n_route = args.batch_size, args.epoch, args.lr, args.opt, args.n_route 23 | n_his, n_pred, patience, max_grad_norm = args.n_his, args.n_pred, args.patience, args.max_grad_norm 24 | seq_loaders = dataloaders 25 | seq_sizes = dataset_sizes 26 | train_loss = [] 27 | val_loss = [] 28 | # The loss function is defined with mean squared loss. 29 | loss_func = nn.MSELoss() 30 | # Options for the optimizer. 31 | if opt == 'RMSProp': 32 | optimizer = optim.RMSprop(model.parameters(), lr) 33 | elif opt == 'ADAM': 34 | optimizer = optim.Adam(model.parameters(), lr) 35 | else: 36 | raise ValueError(f'ERROR: optimizer "{opt}" is not defined.') 37 | # The scheduler is used for dynamic learning rate. 38 | scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=args.lr_step, gamma=args.lr_gamma) 39 | 40 | start_time = time.time() 41 | best_model_wts = model.state_dict() 42 | best_loss = float('inf') 43 | wait = 0 44 | batches_seen = 0.0 45 | 46 | for i in range(epoch): 47 | if wait >= patience: 48 | print('Early stop of training!') 49 | break 50 | for phase in ['train', 'valid']: 51 | if phase == 'train': 52 | model.train() 53 | else: 54 | model.eval() 55 | 56 | running_loss = 0.0 57 | for j, (x, y_tar) in enumerate(seq_loaders[phase]): 58 | # x, y_tar: [batch_size, seq_len, n_well, C_0] 59 | x, y_tar = x.permute(1, 0, 2, 3), y_tar.permute(1, 0, 2, 3) 60 | x = x.reshape(n_his, -1, n_route) # n_route * c_in if c_in not 1 61 | y_tar = y_tar.reshape(n_pred, -1, n_route) # n_route * c_out if c_out not 1 62 | 63 | optimizer.zero_grad() 64 | 65 | if phase == 'train': 66 | y_pre = model(x, labels=y_tar, batches_seen=batches_seen) 67 | else: 68 | y_pre = model(x, labels=None, batches_seen=None) 69 | 70 | y_tar = y_tar.transpose(0, 1) 71 | y_pre = y_pre.transpose(0, 1) 72 | 73 | # y: [b, t, n * c] 74 | loss = loss_func(y_pre, y_tar) 75 | v_ = y_pre.clone().detach() 76 | v = y_tar.clone().detach() 77 | # convert water level to its original value 78 | v = z_inverse(v, stats['mean'], stats['std']) 79 | v_ = z_inverse(v_, stats['mean'], stats['std']) 80 | mae1 = mae(v, v_) 81 | mape1 = mape(v, v_) 82 | mse1 = mse(v, v_) 83 | rmse1 = torch.sqrt(mse1) 84 | print('.', end='') 85 | if j % 5 == 4: # every 5 batches to display information 86 | print(f'{phase}: Epoch {i + 1:2d}, Step {j + 1:3d}:', 87 | f'mse<{mse1:.3f}> mae<{mae1:.3f}> mape<{mape1:.3f}> rmse<{rmse1:.3f}>') 88 | 89 | if phase == 'train': 90 | batches_seen += 1 91 | loss.backward() 92 | optimizer.step() 93 | # gradient clipping - this does it in place 94 | torch.nn.utils.clip_grad_norm_(model.parameters(), max_grad_norm) 95 | 96 | running_loss += loss.data.item() 97 | 98 | epoch_loss = running_loss / np.ceil(seq_sizes[phase] / batch_size) 99 | if phase == 'train': 100 | scheduler.step() 101 | train_loss.append(epoch_loss) 102 | else: 103 | val_loss.append(epoch_loss) 104 | if epoch_loss < best_loss: 105 | best_loss = epoch_loss 106 | best_model_wts = model.state_dict() 107 | wait = 0 108 | else: 109 | wait += 1 110 | print() 111 | 112 | model.load_state_dict(best_model_wts) 113 | torch.save(model, pjoin('./output', f'{args.function}.pkl')) 114 | time_elapsed = time.time() - start_time 115 | fig = plt.figure() 116 | x1 = torch.arange(len(train_loss)) 117 | plt.title(f'Training loss curve', color='black') 118 | plt.xlabel('epoch') 119 | plt.ylabel('loss') 120 | plt.plot(x1, train_loss, color='r', label='train') 121 | plt.plot(x1, val_loss, color='b', label='val') 122 | fig.legend() 123 | fig.savefig(pjoin('./loss_curve', f'Train_Val_loss.png')) 124 | print('Training complete in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60)) 125 | print('Best validation loss: {:.4f}'.format(best_loss)) 126 | print('Training model finished!') 127 | -------------------------------------------------------------------------------- /models/transformer.py: -------------------------------------------------------------------------------- 1 | import math 2 | import torch 3 | import torch.nn as nn 4 | import numpy as np 5 | import torch.nn.functional as F 6 | 7 | 8 | class PositionalEncoding(nn.Module): 9 | 10 | def __init__(self, d_model, dropout=0.1, max_len=100): 11 | super(PositionalEncoding, self).__init__() 12 | self.dropout = nn.Dropout(p=dropout) 13 | 14 | pe = torch.zeros(max_len, d_model) 15 | position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1) 16 | div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model)) 17 | pe[:, 0::2] = torch.sin(position * div_term) 18 | pe[:, 1::2] = torch.cos(position * div_term) if d_model % 2 == 0 else torch.cos(position * div_term)[:, :-1] 19 | pe = pe.unsqueeze(0).transpose(0, 1) 20 | self.register_buffer('pe', pe) 21 | 22 | def forward(self, x): 23 | x = x + self.pe[:x.size(0), :] 24 | return self.dropout(x) 25 | 26 | 27 | class GConv(nn.Module): 28 | # Spectral-based graph convolution function. 29 | # x: tensor, [batch_size, c_in, time_step, n_route]. 30 | # theta: tensor, [ks*c_in, c_out], trainable kernel parameters. 31 | # ks: int, kernel size of graph convolution. 32 | # c_in: int, size of input channel. 33 | # c_out: int, size of output channel. 34 | # return: tensor, [batch_size, c_out, time_step, n_route]. 35 | 36 | def __init__(self, ks, c_in, c_out, graph_kernel): 37 | super(GConv, self).__init__() 38 | self.c_in = c_in 39 | self.c_out = c_out 40 | self.ks = ks 41 | self.graph_kernel = graph_kernel 42 | self.theta = nn.Linear(ks*c_in, c_out) 43 | 44 | def forward(self, x): 45 | # graph kernel: tensor, [n_route, ks*n_route] 46 | kernel = self.graph_kernel 47 | # time_step, n_route 48 | _, _, t, n = x.shape 49 | # x:[batch_size, c_in, time_step, n_route] -> [batch_size, time_step, c_in, n_route] 50 | x_tmp = x.transpose(1, 2).contiguous() 51 | # x_ker = x_tmp * ker -> [batch_size, time_step, c_in, ks*n_route] 52 | x_ker = torch.matmul(x_tmp, kernel) 53 | # -> [batch_size, time_step, c_in*ks, n_route] -> [batch_size, time_step, n_route, c_in*ks] 54 | x_ker = x_ker.reshape(-1, t, self.c_in * self.ks, n).transpose(2, 3) 55 | # -> [batch_size, time_step, n_route, c_out] 56 | x_fig = self.theta(x_ker) 57 | # -> [batch_size, c_out, time_step, n_route] 58 | return x_fig.permute(0, 3, 1, 2).contiguous() 59 | 60 | 61 | class TransformerModel(nn.Module): 62 | def __init__(self, ninp, nhead, nhid, nlayers, dropout=0.1): 63 | super(TransformerModel, self).__init__() 64 | from torch.nn import TransformerEncoder, TransformerDecoder, TransformerEncoderLayer, TransformerDecoderLayer 65 | self.model_type = 'Transformer' 66 | self.pos_encoder = PositionalEncoding(ninp, dropout) 67 | encoder_layers = TransformerEncoderLayer(ninp, nhead, nhid, dropout) 68 | self.transformer_encoder = TransformerEncoder(encoder_layers, nlayers) 69 | decoder_layers = TransformerDecoderLayer(ninp, nhead, nhid, dropout) 70 | self.decoder = TransformerDecoder(decoder_layers, nlayers) 71 | self.ninp = ninp 72 | 73 | def _generate_square_subsequent_mask(self, sz): 74 | mask = (torch.triu(torch.ones(sz, sz)) == 1).transpose(0, 1) 75 | mask = mask.float().masked_fill(mask == 0, float('-inf')).masked_fill(mask == 1, float(0.0)) 76 | return mask 77 | 78 | def forward(self, src, tar, tgt_mask=None): 79 | if tgt_mask is None: 80 | device = tar.device 81 | tgt_mask = self._generate_square_subsequent_mask(tar.shape[0]).to(device) 82 | 83 | src = self.pos_encoder(src) 84 | tar = self.pos_encoder(tar) 85 | memory = self.transformer_encoder(src) 86 | output = self.decoder(tar, memory, tgt_mask) 87 | return output 88 | 89 | 90 | class ShiftTransformer(nn.Module): 91 | def __init__(self, ninp, nhead, nhid, nlayers, dropout=0.1): 92 | super(ShiftTransformer, self).__init__() 93 | self.fc_1 = nn.Linear(1, ninp) 94 | self.fc_2 = nn.Linear(ninp, 1) 95 | self.transformer = TransformerModel(ninp, nhead, nhid, nlayers, dropout) 96 | 97 | def forward(self, src, tar, tgt_mask=None): 98 | _, b, nodes = src.shape 99 | src = src.reshape(-1, b*nodes, 1) 100 | tar = tar.reshape(-1, b*nodes, 1) 101 | src = self.fc_1(src) 102 | tar = self.fc_1(tar) 103 | output = self.transformer(src, tar) 104 | output = self.fc_2(output) 105 | return output.reshape(-1, b, nodes) 106 | 107 | 108 | class GCNShiftTransformer(nn.Module): 109 | def __init__(self, ks, graph_kernel, ninp, nhead, nhid, nlayers, dropout=0.1): 110 | super(GCNShiftTransformer, self).__init__() 111 | self.fc_1 = nn.Linear(ninp, 1) 112 | self.gcn_1 = GConv(ks, 1, ninp, graph_kernel) 113 | self.transformer = TransformerModel(ninp, nhead, nhid, nlayers, dropout) 114 | self.ninp = ninp 115 | 116 | def forward(self, src, tar): 117 | _, b, nodes = src.shape 118 | src = src.view(-1, b, nodes, 1).permute(1, 3, 0, 2) 119 | tar = tar.view(-1, b, nodes, 1).permute(1, 3, 0, 2) 120 | src = self.gcn_1(src) 121 | tar = self.gcn_1(tar) 122 | src = src.permute(2, 0, 3, 1).reshape(-1, b * nodes, self.ninp) 123 | tar = tar.permute(2, 0, 3, 1).reshape(-1, b * nodes, self.ninp) 124 | output = self.transformer(src, tar) 125 | output = self.fc_1(output) 126 | return output.reshape(-1, b, nodes) 127 | -------------------------------------------------------------------------------- /output/fc.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/output/fc.pkl -------------------------------------------------------------------------------- /picture/single_n_pred18_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_n_pred18_node0.png -------------------------------------------------------------------------------- /picture/single_point0_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point0_node0.png -------------------------------------------------------------------------------- /picture/single_point10_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point10_node0.png -------------------------------------------------------------------------------- /picture/single_point11_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point11_node0.png -------------------------------------------------------------------------------- /picture/single_point12_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point12_node0.png -------------------------------------------------------------------------------- /picture/single_point13_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point13_node0.png -------------------------------------------------------------------------------- /picture/single_point14_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point14_node0.png -------------------------------------------------------------------------------- /picture/single_point15_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point15_node0.png -------------------------------------------------------------------------------- /picture/single_point16_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point16_node0.png -------------------------------------------------------------------------------- /picture/single_point17_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point17_node0.png -------------------------------------------------------------------------------- /picture/single_point1_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point1_node0.png -------------------------------------------------------------------------------- /picture/single_point2_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point2_node0.png -------------------------------------------------------------------------------- /picture/single_point3_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point3_node0.png -------------------------------------------------------------------------------- /picture/single_point4_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point4_node0.png -------------------------------------------------------------------------------- /picture/single_point5_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point5_node0.png -------------------------------------------------------------------------------- /picture/single_point6_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point6_node0.png -------------------------------------------------------------------------------- /picture/single_point7_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point7_node0.png -------------------------------------------------------------------------------- /picture/single_point8_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point8_node0.png -------------------------------------------------------------------------------- /picture/single_point9_node0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VincLee8188/Spatio-temporal-forecasting-PyTorch/226e216360ade35c90ecb36c0285f1ac501a3e1a/picture/single_point9_node0.png -------------------------------------------------------------------------------- /utils/math_graph.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | from scipy.sparse.linalg import eigs 4 | 5 | 6 | def scaled_laplacian(wa): 7 | """ 8 | Normalized graph Laplacian function. 9 | :param wa: np.ndarray, [n_well, n_well], weighted adjacency matrix of G. 10 | :return: np.matrix, [n_well, n_well]. 11 | """ 12 | # d -> diagonal degree matrix 13 | n, d = np.shape(wa)[0], np.sum(wa, axis=1) 14 | # la -> normalized graph Laplacian 15 | la = -wa 16 | la[np.diag_indices_from(la)] = d 17 | for i in range(n): 18 | for j in range(n): 19 | if (d[i] > 0) and (d[j] > 0): 20 | la[i, j] = la[i, j] / np.sqrt(d[i] * d[j]) 21 | lambda_max = eigs(la, k=1, which='LR')[0][0].real 22 | return np.mat(2 * la / lambda_max - np.identity(n)) 23 | 24 | 25 | def cheb_poly_approx(la, ks, n): 26 | """ 27 | Chebyshev polynomials approximation function. 28 | :param la: np.matrix, [n_well, n_well], graph Laplacian. 29 | :param ks: int, kernel size of spatial convolution. 30 | :param n: int, size of graph. 31 | :return: np.ndarray, [n_well, ks * n_well]. 32 | """ 33 | la0, la1 = np.mat(np.identity(n)), np.mat(np.copy(la)) 34 | 35 | if ks > 1: 36 | la_list = [np.copy(la0), np.copy(la1)] 37 | for i in range(ks - 2): 38 | la_n = np.mat(2 * la * la1 - la0) 39 | la_list.append(np.copy(la_n)) 40 | la0, la1 = np.mat(np.copy(la1)), np.mat(np.copy(la_n)) 41 | return np.concatenate(la_list, axis=-1) 42 | elif ks == 1: 43 | return np.asarray(la0) 44 | else: 45 | raise ValueError(f'ERROR: the size of spatial kernel must be greater than 1, but received {ks}') 46 | 47 | 48 | def first_approx(W, n): 49 | """ 50 | 1st-order approximation function. 51 | :param W: np.ndarray, [n_route, n_route], weighted adjacency matrix of G. 52 | :param n: int, number of routes / size of graph. 53 | :return: np.ndarray, [n_route, n_route]. 54 | """ 55 | A = W + np.identity(n) 56 | d = np.sum(A, axis=1) 57 | sinvD = np.sqrt(np.mat(np.diag(d)).I) 58 | # refer to Eq.5 59 | return np.mat(np.identity(n) + sinvD * A * sinvD) 60 | 61 | 62 | def weight_matrix(file_path, sigma2=0.6, epsilon=0.3, scaling=True): 63 | """ 64 | Load weight matrix function. 65 | :param file_path: str, the path of saved weight matrix file. 66 | :param sigma2: float, scalar of matrix wa. 67 | :param epsilon: float, thresholds to control the sparsity of matrix wa. 68 | :param scaling: bool, whether applies numerical scaling on wa. 69 | :return: np.ndarray, [n_well, n_well]. 70 | """ 71 | try: 72 | wa = pd.read_csv(file_path, header=None).values 73 | except FileNotFoundError: 74 | print(f'ERROR: input file was not found in {file_path}') 75 | 76 | # check whether wa is a 0/1 matrix. 77 | if set(np.unique(wa)) == {0, 1}: 78 | print('The input graph is a 0/1 matrix, set "scaling" to False.') 79 | scaling = False 80 | 81 | if scaling: 82 | n = wa.shape[0] 83 | wa = wa / 10000. # change the scaling number if necessary 84 | wa2, wa_mask = wa * wa, np.ones([n, n]) - np.identity(n) 85 | return np.exp(-wa2 / sigma2) * (np.exp(-wa2 / sigma2) >= epsilon) * wa_mask 86 | else: 87 | return wa 88 | 89 | -------------------------------------------------------------------------------- /utils/math_utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | 4 | 5 | def z_score(x, mean, std): 6 | # Z-score normalization function: $z = (X - \mu) / \sigma $, 7 | # where z is the z-score, X is the value of the element, 8 | # $\mu$ is the population mean, and $\sigma$ is the standard deviation. 9 | # x: np.ndarray, input array to be normalized. 10 | # mean: float, the value of mean. 11 | # std: float, the value of standard deviation. 12 | # return: np.ndarray, z-score normalized array. 13 | return (x - mean) / std 14 | 15 | 16 | def z_inverse(x, mean, std): 17 | # The inverse of function z_score(). 18 | # x: np.ndarray, input to be recovered. 19 | # mean: float, the value of mean. 20 | # std: float, the value of standard deviation. 21 | # return: np.ndarray, z-score inverse array. 22 | 23 | return x * std + mean 24 | 25 | 26 | def mape(v, v_): 27 | # Mean absolute percentage error. 28 | # v: np.ndarray or int, ground truth. 29 | # v_: np.ndarray or int, prediction. 30 | # return: int, mape averages on all elements of input. 31 | 32 | return torch.mean(torch.abs((v_ - v) / (v + 1e-5))) 33 | 34 | 35 | def mse(v, v_): 36 | # Mean squared error. 37 | # v: np.ndarray or int, ground truth. 38 | # v_: np.ndarray or int, prediction. 39 | # return: int, mse averages on all elements of input. 40 | 41 | return torch.mean((v_ - v) ** 2) 42 | 43 | 44 | def rmse(v, v_): 45 | # Root mean squared error. 46 | # v: np.ndarray or int, ground truth. 47 | # v_: np.ndarray or int, prediction. 48 | # return: int, rmse averages on all elements of input. 49 | 50 | return torch.sqrt(torch.mean((v_ - v) ** 2)) 51 | 52 | 53 | def mae(v, v_): 54 | # Mean absolute error. 55 | # v: np.ndarray or int, ground truth. 56 | # v_: np.ndarray or int, prediction. 57 | # return: int, mae averages on all elements of input. 58 | 59 | return torch.mean(torch.abs(v_ - v)) 60 | 61 | 62 | def evaluation(y, y_, x_stats): 63 | # Evaluation function: interface to calculate MAPE, MAE and RMSE between ground truth and prediction. 64 | # Extended version: multi-step prediction can be calculated by self-calling. 65 | # y: np.ndarray or int, ground truth. 66 | # y_: np.ndarray or int, prediction. 67 | # x_stats: dict, paras of z-scores (mean & std). 68 | # return: np.ndarray, averaged metric values. 69 | dim = len(y_.shape) 70 | 71 | if dim == 3: 72 | # single_step case 73 | v = z_inverse(y, x_stats['mean'], x_stats['std']) 74 | v_ = z_inverse(y_, x_stats['mean'], x_stats['std']) 75 | return np.array([mape(v, v_), mae(v, v_), mse(v, v_)]) 76 | else: 77 | # multi_step case 78 | tmp_list = [] 79 | # recursively call 80 | for i in range(y_.shape[0]): 81 | tmp_res = evaluation(y[i], y_[i], x_stats) 82 | tmp_list.append(tmp_res) 83 | return np.concatenate(tmp_list, axis=-1) 84 | --------------------------------------------------------------------------------