├── .DS_Store ├── CNN ├── .DS_Store └── cnn_encoder_test.py ├── DynNet └── dynamic_net.py ├── Gradient ├── .DS_Store ├── nn_grad.py └── poly_grad.py ├── MFNN ├── mfdata.mat ├── mfnn.jpg ├── mfnn.py ├── mfnn_grad.py ├── plotting.m ├── plotting_grad.m ├── y_h.dat ├── y_l.dat └── y_test.dat ├── NN └── nn.py ├── PINNs ├── AC.mat ├── ac_1d_td_fwd.py ├── ac_2d_td.py └── pred_res.mat ├── README.md └── Utils ├── __pycache__ └── plotting.cpython-37.pyc └── plotting.py /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MinglangYin/PyTorchTutorial/8ad44ebfbd8c88b5a74ee25909a6ded6656c5d14/.DS_Store -------------------------------------------------------------------------------- /CNN/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MinglangYin/PyTorchTutorial/8ad44ebfbd8c88b5a74ee25909a6ded6656c5d14/CNN/.DS_Store -------------------------------------------------------------------------------- /CNN/cnn_encoder_test.py: -------------------------------------------------------------------------------- 1 | """ 2 | Minglang Yin, Brown University 3 | minglang_yin@brown.edu 4 | 5 | Decoder test 6 | """ 7 | import sys 8 | sys.path.insert(0, '../') 9 | 10 | import torch 11 | import torchvision 12 | import torchvision.transforms as transforms 13 | import numpy as np 14 | import matplotlib.pyplot as plt 15 | import matplotlib.image as mpimg 16 | import torch.optim as optim 17 | import time 18 | import torch.nn as nn 19 | 20 | 21 | class ConvNet(nn.Module): 22 | def __init__(self): 23 | super(ConvNet, self).__init__() 24 | self.layer1 = nn.Sequential( 25 | nn.Conv2d(1, 1, kernel_size=3, stride=1, padding=1), 26 | nn.BatchNorm2d(1), 27 | nn.ReLU(), 28 | nn.MaxPool2d(kernel_size=2, stride=2)) 29 | self.layer2 = nn.Sequential( 30 | nn.Conv2d(1, 1, kernel_size=3, stride=1, padding=1), 31 | nn.BatchNorm2d(1), 32 | nn.ReLU(), 33 | nn.MaxPool2d(kernel_size=2, stride=2)) 34 | self.fc = nn.Linear(16, 10) 35 | 36 | def forward(self, x): 37 | out1 = self.layer1(x) 38 | out2 = self.layer2(out1) 39 | out3 = out2.reshape(out2.size(0), -1) 40 | out = self.fc(out3) 41 | return out 42 | 43 | def to_numpy(input): 44 | if isinstance(input, torch.Tensor): 45 | return input.detach().cpu().numpy() 46 | elif isinstance(input, np.ndarray): 47 | return input 48 | else: 49 | raise TypeError('Unknown type of input, expected torch.Tensor or '\ 50 | 'np.ndarray, but got {}'.format(type(input))) 51 | 52 | def main(): 53 | ## Hyper parameters 54 | learning_rate = 0.001 55 | cuda = 1 56 | epochs = 1000 57 | device = torch.device(f"cuda:{cuda}" if torch.cuda.is_available() else "cpu") 58 | 59 | ## pre-proc data 60 | input_size = [1, 1, 16, 16] 61 | output_size = [10] 62 | input_tensor = torch.randn(input_size).to(device) 63 | output_tensor = torch.randn(output_size).to(device) 64 | 65 | ## model 66 | model = ConvNet().to(device) 67 | optimizer = optim.Adam(model.parameters(), lr = 0.001) 68 | 69 | def train(epoch): 70 | model.train() 71 | def closure(): 72 | optimizer.zero_grad() 73 | output_pred = model(input_tensor) 74 | loss = ((output_pred - output_tensor)**2).mean() 75 | loss.backward() 76 | return loss 77 | loss = optimizer.step(closure) 78 | loss_value = loss.item() if not isinstance(loss, float) else loss 79 | print(f'epoch {epoch}: loss {loss_value:.6f}') 80 | 81 | print('start training...') 82 | tic = time.time() 83 | for epoch in range(1, epochs + 1): 84 | train(epoch) 85 | 86 | output_pred = model(input_tensor) 87 | print(f'output_pred: {to_numpy(output_pred)}') 88 | print(f'output_train: {to_numpy(output_tensor)}') 89 | 90 | if __name__=='__main__': 91 | main() 92 | 93 | 94 | 95 | 96 | 97 | -------------------------------------------------------------------------------- /DynNet/dynamic_net.py: -------------------------------------------------------------------------------- 1 | # Code in file nn/dynamic_net.py 2 | import random 3 | import torch 4 | 5 | class DynamicNet(torch.nn.Module): 6 | def __init__(self, D_in, H, D_out): 7 | """ 8 | In the constructor we construct three nn.Linear instances that we will use 9 | in the forward pass. 10 | """ 11 | super(DynamicNet, self).__init__() 12 | self.input_linear = torch.nn.Linear(D_in, H) 13 | self.middle_linear = torch.nn.Linear(H, H) 14 | self.output_linear = torch.nn.Linear(H, D_out) 15 | 16 | def forward(self, x): 17 | """ 18 | For the forward pass of the model, we randomly choose either 0, 1, 2, or 3 19 | and reuse the middle_linear Module that many times to compute hidden layer 20 | representations. 21 | 22 | Since each forward pass builds a dynamic computation graph, we can use normal 23 | Python control-flow operators like loops or conditional statements when 24 | defining the forward pass of the model. 25 | 26 | Here we also see that it is perfectly safe to reuse the same Module many 27 | times when defining a computational graph. This is a big improvement from Lua 28 | Torch, where each Module could be used only once. 29 | """ 30 | h_relu = self.input_linear(x).clamp(min=0) 31 | for _ in range(random.randint(0, 3)): 32 | h_relu = self.middle_linear(h_relu).clamp(min=0) 33 | y_pred = self.output_linear(h_relu) 34 | return y_pred 35 | 36 | 37 | # N is batch size; D_in is input dimension; 38 | # H is hidden dimension; D_out is output dimension. 39 | N, D_in, H, D_out = 64, 1000, 100, 10 40 | 41 | # Create random Tensors to hold inputs and outputs. 42 | x = torch.randn(N, D_in) 43 | y = torch.randn(N, D_out) 44 | 45 | # Construct our model by instantiating the class defined above 46 | model = DynamicNet(D_in, H, D_out) 47 | 48 | # Construct our loss function and an Optimizer. Training this strange model with 49 | # vanilla stochastic gradient descent is tough, so we use momentum 50 | criterion = torch.nn.MSELoss(reduction='sum') 51 | optimizer = torch.optim.SGD(model.parameters(), lr=1e-4, momentum=0.9) 52 | for t in range(5000): 53 | # Forward pass: Compute predicted y by passing x to the model 54 | y_pred = model(x) 55 | 56 | # Compute and print loss 57 | loss = criterion(y_pred, y) 58 | print(t, loss.item()) 59 | 60 | # Zero gradients, perform a backward pass, and update the weights. 61 | optimizer.zero_grad() 62 | loss.backward() 63 | optimizer.step() -------------------------------------------------------------------------------- /Gradient/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MinglangYin/PyTorchTutorial/8ad44ebfbd8c88b5a74ee25909a6ded6656c5d14/Gradient/.DS_Store -------------------------------------------------------------------------------- /Gradient/nn_grad.py: -------------------------------------------------------------------------------- 1 | """ 2 | Author: Minglang Yin 3 | Examplify the routine to calculate output gradient w.r.t. input 4 | from an Neural Network. 5 | 6 | Note: Gradients do not accumulate over times 7 | """ 8 | 9 | import torch 10 | import torch.nn as nn 11 | import numpy as np 12 | import torch.optim as optim 13 | 14 | torch.manual_seed(123456) 15 | np.random.seed(0) 16 | 17 | class model(nn.Module): 18 | def __init__(self): 19 | super(model, self).__init__() 20 | self.layer = nn.Sequential( 21 | nn.Linear(1, 20), 22 | nn.Tanh(), 23 | nn.Linear(20, 20), 24 | nn.Tanh(), 25 | nn.Linear(20, 1) 26 | ) 27 | 28 | def forward(self, x): 29 | return self.layer(x) 30 | 31 | goal = torch.Tensor([[1.0],[1.0]]) 32 | x_array = [[2.0],[5.0]] 33 | x = torch.tensor(x_array, requires_grad=True) 34 | 35 | net = model() 36 | optimizer = optim.Adam(net.parameters(), lr = 0.01) 37 | 38 | for i in range(0, 100): 39 | optimizer.zero_grad() 40 | y = net(x) 41 | 42 | # gradient does not accumulate after repetitively calculating gradient 43 | tmp = torch.ones(2, 1, dtype=torch.float32) 44 | x_grad = torch.autograd.grad(y, x, tmp, retain_graph=True) 45 | print('1. x_gradient = ', x_grad) 46 | x_grad = torch.autograd.grad(y, x, tmp, retain_graph=True) 47 | print('2. x_gradient = ', x_grad) 48 | 49 | loss = ((y - goal)**2).mean() 50 | loss.backward(x) 51 | print('backward:', x.grad) 52 | optimizer.step() 53 | 54 | print('network prediction:', net(x)) -------------------------------------------------------------------------------- /Gradient/poly_grad.py: -------------------------------------------------------------------------------- 1 | """ 2 | Author: Minglang Yin 3 | Calculate gradient of polynomial by .backward() 4 | """ 5 | 6 | import torch 7 | import torch.optim as optim 8 | 9 | torch.manual_seed(123456) 10 | 11 | def poly(x): 12 | return x[0]**2 + 5*x[1] + 2 13 | 14 | def poly_2(x): 15 | return x*3 + 2 16 | 17 | x_array = [2.0, 3.0] 18 | x = torch.tensor(x_array, requires_grad=True) 19 | optimizer = optim.Adam([x], lr = 0.01) 20 | 21 | optimizer.zero_grad() 22 | y = poly(x) 23 | y.backward(retain_graph=True) 24 | print('x grad is ',x.grad) 25 | optimizer.zero_grad() 26 | y2 = poly_2(y) 27 | y2.backward() 28 | print('x grad 2 is ', x.grad) -------------------------------------------------------------------------------- /MFNN/mfdata.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MinglangYin/PyTorchTutorial/8ad44ebfbd8c88b5a74ee25909a6ded6656c5d14/MFNN/mfdata.mat -------------------------------------------------------------------------------- /MFNN/mfnn.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MinglangYin/PyTorchTutorial/8ad44ebfbd8c88b5a74ee25909a6ded6656c5d14/MFNN/mfnn.jpg -------------------------------------------------------------------------------- /MFNN/mfnn.py: -------------------------------------------------------------------------------- 1 | """ 2 | Author : Minglang Yin 3 | Multi-fidelity Neural Network (MFNN) 4 | High-fidelity data: y_h = (6*x-2)**2*sin(12*x-4) 5 | Low-fidelity data: y_l = A*(6*x-2)**2*sin(12*x-4)+B*(x-0.5)+C, A = 0.5, B = 10, C = -5 6 | 7 | Note: 8 | no gradient loss 9 | no regularization 10 | 11 | Needs to do: add ploting at the end 12 | """ 13 | import torch 14 | import torch.nn as nn 15 | import numpy as np 16 | import scipy.io 17 | import time 18 | 19 | torch.manual_seed(123456) 20 | np.random.seed(0) 21 | 22 | class Model(nn.Module): 23 | def __init__(self):#, device): 24 | super(Model, self).__init__() 25 | self.net_l = nn.Sequential() 26 | self.net_l.add_module('layer_1', nn.Linear(1, 20)) 27 | self.net_l.add_module('layer_2', nn.Tanh()) 28 | self.net_l.add_module('layer_3', nn.Linear(20, 20)) 29 | self.net_l.add_module('layer_4', nn.Tanh()) 30 | self.net_l.add_module('layer_5', nn.Linear(20, 1)) 31 | 32 | self.net_h_nl = nn.Sequential() 33 | self.net_h_nl.add_module('layer_1', nn.Linear(2, 10)) 34 | self.net_h_nl.add_module('layer_2', nn.Tanh()) 35 | self.net_h_nl.add_module('layer_3', nn.Linear(10, 10)) 36 | self.net_h_nl.add_module('layer_4', nn.Tanh()) 37 | self.net_h_nl.add_module('layer_5', nn.Linear(10, 1)) 38 | 39 | self.net_h_l = nn.Sequential() 40 | self.net_h_l.add_module('layer_1', nn.Linear(2, 1)) 41 | 42 | def forward(self, x_l, x_h): 43 | y_l = self.net_l(x_l) 44 | y_l_h = self.net_l(x_h) 45 | y_h = self.net_h_l(torch.cat((y_l_h, x_h), dim=1)) 46 | return y_l, y_h 47 | 48 | 49 | def to_numpy(input): 50 | if isinstance(input, torch.Tensor): 51 | return input.detach().cpu().numpy() 52 | elif isinstance(input, np.ndarray): 53 | return input 54 | else: 55 | raise TypeError('Unknown type of input, expected torch.Tensor or '\ 56 | 'np.ndarray, but got {}'.format(type(input))) 57 | 58 | def data_loader(device): 59 | Nl = 21 60 | Nh = 4 61 | 62 | data = scipy.io.loadmat('./mfdata.mat') 63 | x = data['x'].flatten()[:, None] 64 | y = data['y'].flatten()[:, None] 65 | xl = data['xl'].flatten()[:, None] 66 | yl = data['yl'].flatten()[:, None] 67 | yl_x = data['yl_x'].flatten()[:, None] 68 | xh = data['xh'].flatten()[:, None] 69 | yh = data['yh'].flatten()[:, None] 70 | 71 | x_test = torch.tensor(x, dtype=torch.float32).to(device) 72 | y_test = torch.tensor(y, dtype=torch.float32).to(device) 73 | 74 | #training data for low fidelity 75 | id_l = np.random.choice(xl.shape[0], Nl, replace=False) 76 | x_train_l = torch.tensor(xl[id_l], requires_grad=True, dtype = torch.float32).to(device) 77 | y_train_l = torch.tensor(yl[id_l], dtype = torch.float32).to(device) 78 | y_train_l_grad = torch.tensor(yl_x[id_l], dtype=torch.float32).to(device) 79 | 80 | #training data for high fidelity 81 | id_h = np.random.choice(xh.shape[0], Nh, replace=False) 82 | x_train_h = torch.tensor(xh[id_h], requires_grad=True, dtype= torch.float32).to(device) 83 | y_train_h = torch.tensor(yh[id_h], dtype=torch.float32).to(device) 84 | 85 | return x_test, y_test, x_train_l, y_train_l, y_train_l_grad, x_train_h, y_train_h 86 | 87 | def main(): 88 | ## parameters 89 | lr = 0.001 90 | device = torch.device(f"cpu") 91 | epochs = 50000 92 | 93 | ## load data 94 | x_test, y_test, x_train_l, y_train_l, y_train_l_grad, x_train_h, y_train_h = data_loader(device) 95 | 96 | ## initialize model 97 | model = Model().to(device) 98 | 99 | # Loss and optimizer 100 | optimizer = torch.optim.Adam(model.parameters(), lr = lr, weight_decay=1e-5) 101 | 102 | # training 103 | def train(epoch): 104 | model.train() 105 | def closure(): 106 | optimizer.zero_grad() 107 | y_pred_l, y_pred_h = model(x_train_l, x_train_h) 108 | loss = ((y_pred_l - y_train_l)**2).mean() +\ 109 | ((y_pred_h - y_train_h)**2).mean() 110 | loss.backward() 111 | return loss 112 | loss = optimizer.step(closure) 113 | loss_value = loss.item() if not isinstance(loss, float) else loss 114 | print(f'epoch {epoch}: loss {loss_value:.6f}') 115 | 116 | print('start training...') 117 | tic = time.time() 118 | for epoch in range(1, epochs + 1): 119 | train(epoch) 120 | toc = time.time() 121 | print(f'total training time: {toc-tic}') 122 | 123 | # testing and save 124 | y_pred_l_test, y_pred_h = model(x_test, x_test) 125 | x_test = to_numpy(x_test) 126 | y_pred_h = to_numpy(y_pred_h) 127 | y_test = to_numpy(y_test) 128 | y_pred_l_test = to_numpy(y_pred_l_test) 129 | y_test = np.concatenate((x_test, y_pred_h, y_test, y_pred_l_test), axis=1) 130 | 131 | y_pred_l, y_pred_h = model(x_train_l, x_train_h) 132 | y_pred_l = to_numpy(y_pred_l) 133 | x_train_l = to_numpy(x_train_l) 134 | y_l = np.concatenate((x_train_l, y_pred_l, y_train_l), axis=1) 135 | y_pred_h = to_numpy(y_pred_h) 136 | x_train_h = to_numpy(x_train_h) 137 | y_h = np.concatenate((x_train_h, y_pred_h, y_train_h), axis=1) 138 | 139 | np.savetxt('y_test.dat', y_test) 140 | np.savetxt('y_l.dat', y_l) 141 | np.savetxt('y_h.dat', y_h) 142 | 143 | if __name__ == '__main__': 144 | main() -------------------------------------------------------------------------------- /MFNN/mfnn_grad.py: -------------------------------------------------------------------------------- 1 | """ 2 | Author : Minglang Yin 3 | Multi-fidelity Neural Network (MFNN) with gradient constraint 4 | High-fidelity data: y_h = (6*x-2)**2*sin(12*x-4) 5 | Low-fidelity data: y_l = A*(6*x-2)**2*sin(12*x-4)+B*(x-0.5)+C, A = 0.5, B = 10, C = -5 6 | 7 | Note: 8 | no regularization 9 | large value of loss 10 | 11 | Needs to do: 12 | add ploting at the end 13 | rewrite the initialization framework 14 | """ 15 | import torch 16 | import torch.nn as nn 17 | import numpy as np 18 | import scipy.io 19 | import time 20 | 21 | from torch.autograd import grad, Variable 22 | 23 | torch.manual_seed(13572) 24 | np.random.seed(0) 25 | 26 | class Model(nn.Module): 27 | def __init__(self):#, device): 28 | super(Model, self).__init__() 29 | self.net_l = nn.Sequential() 30 | self.net_l.add_module('layer_1', nn.Linear(1, 20)) 31 | self.net_l.add_module('layer_2', nn.Tanh()) 32 | self.net_l.add_module('layer_3', nn.Linear(20, 20)) 33 | self.net_l.add_module('layer_4', nn.Tanh()) 34 | self.net_l.add_module('layer_5', nn.Linear(20, 1)) 35 | 36 | self.net_h_nl = nn.Sequential() 37 | self.net_h_nl.add_module('layer_1', nn.Linear(2, 10)) 38 | self.net_h_nl.add_module('layer_2', nn.Tanh()) 39 | self.net_h_nl.add_module('layer_3', nn.Linear(10, 10)) 40 | self.net_h_nl.add_module('layer_4', nn.Tanh()) 41 | self.net_h_nl.add_module('layer_5', nn.Linear(10, 1)) 42 | 43 | self.net_h_l = nn.Sequential() 44 | self.net_h_l.add_module('layer_1', nn.Linear(2, 1)) 45 | 46 | ## xavier_init 47 | torch.nn.init.xavier_normal_(self.net_l.layer_1.weight) 48 | torch.nn.init.xavier_normal_(self.net_l.layer_3.weight) 49 | torch.nn.init.xavier_normal_(self.net_l.layer_5.weight) 50 | torch.nn.init.xavier_normal_(self.net_h_nl.layer_1.weight) 51 | torch.nn.init.xavier_normal_(self.net_h_nl.layer_3.weight) 52 | torch.nn.init.xavier_normal_(self.net_h_nl.layer_5.weight) 53 | torch.nn.init.xavier_normal_(self.net_h_l.layer_1.weight) 54 | 55 | def forward(self, x_l, x_h): 56 | y_l = self.net_l(x_l) 57 | y_l_h = self.net_l(x_h) 58 | y_h_nl = self.net_h_nl(torch.cat((y_l_h, x_h), dim=1)) 59 | y_h_l = self.net_h_l(torch.cat((y_l_h, x_h), dim=1)) 60 | y_h = y_h_nl + y_h_l + y_l_h 61 | return y_l, y_h 62 | 63 | 64 | def to_numpy(input): 65 | if isinstance(input, torch.Tensor): 66 | return input.detach().cpu().numpy() 67 | elif isinstance(input, np.ndarray): 68 | return input 69 | else: 70 | raise TypeError('Unknown type of input, expected torch.Tensor or '\ 71 | 'np.ndarray, but got {}'.format(type(input))) 72 | 73 | def data_loader(device): 74 | Nl = 15 75 | Nh = 4 76 | 77 | data = scipy.io.loadmat('./mfdata.mat') 78 | x = data['x'].flatten()[:, None] 79 | y = data['y'].flatten()[:, None] 80 | xl = data['xl'].flatten()[:, None] 81 | yl = data['yl'].flatten()[:, None] 82 | yl_x = data['yl_x'].flatten()[:, None] 83 | xh = data['xh'].flatten()[:, None] 84 | yh = data['yh'].flatten()[:, None] 85 | 86 | x_test = torch.tensor(x, dtype=torch.float32).to(device) 87 | y_test = torch.tensor(y, dtype=torch.float32).to(device) 88 | 89 | #training data for low fidelity 90 | id_l = np.random.choice(xl.shape[0], Nl, replace=False) 91 | x_train_l = torch.tensor(xl[id_l], requires_grad=True, dtype = torch.float32).to(device) 92 | y_train_l = torch.tensor(yl[id_l], dtype = torch.float32).to(device) 93 | y_train_l_grad = torch.tensor(yl_x[id_l], dtype=torch.float32).to(device) 94 | 95 | #training data for high fidelity 96 | id_h = np.random.choice(x.shape[0], Nh, replace=False) 97 | x_train_h = torch.tensor(xh[id_h], requires_grad=True, dtype= torch.float32).to(device) 98 | y_train_h = torch.tensor(yh[id_h], dtype=torch.float32).to(device) 99 | 100 | return x_test, y_test, x_train_l, y_train_l, y_train_l_grad, x_train_h, y_train_h 101 | 102 | def main(): 103 | ## parameters 104 | lr = 0.001 105 | device = torch.device(f"cpu") 106 | epochs = 50000 107 | 108 | ## load data 109 | x_test, y_test, x_train_l, y_train_l, y_train_l_grad, x_train_h, y_train_h = data_loader(device) 110 | 111 | ## initialize model 112 | model = Model().to(device) 113 | 114 | # Loss and optimizer 115 | optimizer = torch.optim.Adam(model.parameters(), lr = lr, weight_decay=1e-5) 116 | # optimizer = torch.optim.LBFGS(model.parameters(), lr = lr) 117 | 118 | # training 119 | def train(epoch): 120 | model.train() 121 | def closure(): 122 | optimizer.zero_grad() 123 | y_pred_l, y_pred_h = model(x_train_l, x_train_h) 124 | 125 | tmp = torch.ones((15, 1), dtype=torch.float32) 126 | y_pred_l_grad = grad(y_pred_l, x_train_l, tmp, retain_graph=True) 127 | 128 | loss = ((y_pred_l - y_train_l)**2).mean() +\ 129 | ((y_pred_h - y_train_h)**2).mean() +\ 130 | ((y_pred_l_grad[0] - y_train_l_grad)**2).mean() 131 | 132 | loss.backward() 133 | return loss 134 | loss = optimizer.step(closure) 135 | loss_value = loss.item() if not isinstance(loss, float) else loss 136 | print(f'epoch {epoch}: loss {loss_value:.6f}') 137 | 138 | print('start training...') 139 | tic = time.time() 140 | for epoch in range(1, epochs + 1): 141 | train(epoch) 142 | toc = time.time() 143 | print(f'total training time: {toc-tic}') 144 | 145 | # testing and save 146 | y_pred_l_test, y_pred_h = model(x_test, x_test) 147 | x_test = to_numpy(x_test) 148 | y_pred_h = to_numpy(y_pred_h) 149 | y_test = to_numpy(y_test) 150 | y_pred_l_test = to_numpy(y_pred_l_test) 151 | y_test = np.concatenate((x_test, y_pred_h, y_test, y_pred_l_test), axis=1) 152 | 153 | y_pred_l, y_pred_h = model(x_train_l, x_train_h) 154 | ## save low-fidelity gradient 155 | tmp = torch.ones((15, 1), dtype=torch.float32) 156 | y_pred_l_grad = grad(y_pred_l, x_train_l, tmp, retain_graph=True) 157 | y_train_l_grad = to_numpy(y_train_l_grad) 158 | y_pred_l_grad = to_numpy(y_pred_l_grad[0]) 159 | x_train_l = to_numpy(x_train_l) 160 | y_l_grad = np.concatenate((x_train_l, y_pred_l_grad, y_train_l_grad), axis=1) 161 | ## save low-fidelity data 162 | y_pred_l = to_numpy(y_pred_l) 163 | x_train_l = to_numpy(x_train_l) 164 | y_l = np.concatenate((x_train_l, y_pred_l, y_train_l), axis=1) 165 | ## save high-fidelity data 166 | y_pred_h = to_numpy(y_pred_h) 167 | x_train_h = to_numpy(x_train_h) 168 | y_h = np.concatenate((x_train_h, y_pred_h, y_train_h), axis=1) 169 | 170 | 171 | np.savetxt('y_test.dat', y_test) 172 | np.savetxt('y_l.dat', y_l) 173 | np.savetxt('y_h.dat', y_h) 174 | np.savetxt('y_l_grad.dat', y_l_grad) 175 | 176 | 177 | if __name__ == '__main__': 178 | main() -------------------------------------------------------------------------------- /MFNN/plotting.m: -------------------------------------------------------------------------------- 1 | clear all 2 | close all 3 | clc 4 | 5 | test = load('y_test.dat'); 6 | low = load('y_l.dat'); 7 | high = load('y_h.dat'); 8 | 9 | figure() 10 | hold on 11 | scatter(test(:,1), test(:,2), '.'); 12 | scatter(test(:,1), test(:,3), '.'); 13 | scatter(test(:,1), test(:,4), '.'); 14 | 15 | scatter(low(:,1), low(:,2), 'o'); 16 | scatter(high(:,1), high(:,2), 'x'); 17 | 18 | scatter(low(:,1), low(:,3), 'o'); 19 | scatter(high(:,1), high(:,3), 'x'); -------------------------------------------------------------------------------- /MFNN/plotting_grad.m: -------------------------------------------------------------------------------- 1 | clear all 2 | close all 3 | clc 4 | 5 | y_low = @(x) 0.5.*(6.*x-2).^2.*sin(12.*x-4)+10.*(x-0.5)-5; 6 | y_high = @(x) (6.*x-2).^2.*sin(12.*x-4); 7 | y_low_grad = @(x) sin(12.*x-4).*(6.*x-2).*6 + 6.*(6.*x-2).^2.*cos(12.*x-4) +10; 8 | x = 0:0.01:1; 9 | 10 | test = load('y_test.dat'); 11 | low = load('y_l.dat'); 12 | high = load('y_h.dat'); 13 | grad = load('y_l_grad.dat'); 14 | 15 | figure() 16 | hold on 17 | scatter(test(:,1), test(:,2), '.'); 18 | scatter(test(:,1), test(:,3), '.'); 19 | scatter(test(:,1), test(:,4), '.'); 20 | 21 | scatter(low(:,1), low(:,2), 'o'); 22 | scatter(low(:,1), low(:,3), 'o'); 23 | 24 | scatter(high(:,1), high(:,2), 'x'); 25 | scatter(high(:,1), high(:,3), 'x'); 26 | 27 | x_grad = grad(:,1); 28 | 29 | scatter(grad(:,1), grad(:,2)); 30 | scatter(grad(:,1), grad(:,3)); 31 | % scatter(x_grad, y_low_grad(x_grad) ); 32 | 33 | grad(:,3) 34 | y_low_grad(x_grad) 35 | 36 | plot(x, y_low(x)) 37 | plot(x, y_high(x)) 38 | 39 | 40 | % legend('yh pred test', 'y test real', 'yl pred test', 'yl pred', 'yl train', 'yh pred', 'yh train', 'yl grad pred', 'yl grad train', 'y_low_real', 'y_high_real') 41 | % legend('yh pred test', 'y test real', 'yl pred test', 'yl pred', 'yl train', 'yh pred', 'yh train', 'y low real', 'y high real') 42 | 43 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /MFNN/y_h.dat: -------------------------------------------------------------------------------- 1 | 4.000000059604644775e-01 1.161918640136718750e-01 1.147769764065742493e-01 2 | 0.000000000000000000e+00 3.025154113769531250e+00 3.027209997177124023e+00 3 | 6.000000238418579102e-01 -1.487121582031250000e-01 -1.494378000497817993e-01 4 | 1.000000000000000000e+00 1.582911586761474609e+01 1.582973194122314453e+01 5 | -------------------------------------------------------------------------------- /MFNN/y_l.dat: -------------------------------------------------------------------------------- 1 | 4.000000059604644775e-01 -5.939506530761718750e+00 -5.942611694335937500e+00 2 | 6.499999761581420898e-01 -4.602056503295898438e+00 -4.604403495788574219e+00 3 | 1.000000000000000000e+00 7.921420097351074219e+00 7.914865970611572266e+00 4 | 5.000000074505805969e-02 -9.158716201782226562e+00 -9.130743026733398438e+00 5 | 5.500000119209289551e-01 -4.068767547607421875e+00 -4.064401149749755859e+00 6 | 5.000000000000000000e-01 -4.533351898193359375e+00 -4.545351505279541016e+00 7 | 6.999999880790710449e-01 -5.305346488952636719e+00 -5.302876949310302734e+00 8 | 8.999999761581420898e-01 1.856894731521606445e+00 1.855975151062011719e+00 9 | 3.000000119209289551e-01 -7.019420623779296875e+00 -7.007788181304931641e+00 10 | 9.499999880790710449e-01 5.651111602783203125e+00 5.651657104492187500e+00 11 | 2.000000029802322388e-01 -8.324061393737792969e+00 -8.319863319396972656e+00 12 | 1.000000014901161194e-01 -9.297140121459960938e+00 -9.328288078308105469e+00 13 | 2.500000000000000000e-01 -7.593698501586914062e+00 -7.605184078216552734e+00 14 | 8.000000119209289551e-01 -4.474843978881835938e+00 -4.474565029144287109e+00 15 | 4.499999880790710449e-01 -5.279835700988769531e+00 -5.258564949035644531e+00 16 | 3.499999940395355225e-01 -6.500290870666503906e+00 -6.499006748199462891e+00 17 | 8.500000238418579102e-01 -1.899632692337036133e+00 -1.899244546890258789e+00 18 | 1.500000059604644775e-01 -9.000183105468750000e+00 -8.989140510559082031e+00 19 | 0.000000000000000000e+00 -8.457743644714355469e+00 -8.486394882202148438e+00 20 | 7.500000000000000000e-01 -5.495387077331542969e+00 -5.496638298034667969e+00 21 | 6.000000238418579102e-01 -4.082909584045410156e+00 -4.074718952178955078e+00 22 | -------------------------------------------------------------------------------- /MFNN/y_test.dat: -------------------------------------------------------------------------------- 1 | 0.000000000000000000e+00 3.025152206420898438e+00 3.027209997177124023e+00 -8.457744598388671875e+00 2 | 1.000000047497451305e-03 2.962614059448242188e+00 2.977671146392822266e+00 -8.479206085205078125e+00 3 | 2.000000094994902611e-03 2.900766372680664062e+00 2.928138017654418945e+00 -8.500322341918945312e+00 4 | 3.000000026077032089e-03 2.839607238769531250e+00 2.878621101379394531e+00 -8.521091461181640625e+00 5 | 4.000000189989805222e-03 2.779130935668945312e+00 2.829130649566650391e+00 -8.541518211364746094e+00 6 | 4.999999888241291046e-03 2.719326019287109375e+00 2.779677391052246094e+00 -8.561607360839843750e+00 7 | 6.000000052154064178e-03 2.660200119018554688e+00 2.730271339416503906e+00 -8.581356048583984375e+00 8 | 7.000000216066837311e-03 2.601737976074218750e+00 2.680922508239746094e+00 -8.600770950317382812e+00 9 | 8.000000379979610443e-03 2.543937683105468750e+00 2.631641149520874023e+00 -8.619853973388671875e+00 10 | 8.999999612569808960e-03 2.486797332763671875e+00 2.582437276840209961e+00 -8.638605117797851562e+00 11 | 9.999999776482582092e-03 2.430307388305664062e+00 2.533320188522338867e+00 -8.657030105590820312e+00 12 | 1.099999994039535522e-02 2.374458312988281250e+00 2.484300374984741211e+00 -8.675131797790527344e+00 13 | 1.200000010430812836e-02 2.319252014160156250e+00 2.435386657714843750e+00 -8.692912101745605469e+00 14 | 1.300000026822090149e-02 2.264678955078125000e+00 2.386589050292968750e+00 -8.710372924804687500e+00 15 | 1.400000043213367462e-02 2.210737228393554688e+00 2.337916851043701172e+00 -8.727518081665039062e+00 16 | 1.499999966472387314e-02 2.157413482666015625e+00 2.289379119873046875e+00 -8.744352340698242188e+00 17 | 1.600000075995922089e-02 2.104713439941406250e+00 2.240985155105590820e+00 -8.760873794555664062e+00 18 | 1.700000092387199402e-02 2.052618026733398438e+00 2.192744255065917969e+00 -8.777090072631835938e+00 19 | 1.799999922513961792e-02 2.001132965087890625e+00 2.144664764404296875e+00 -8.793001174926757812e+00 20 | 1.899999938905239105e-02 1.950246810913085938e+00 2.096756219863891602e+00 -8.808610916137695312e+00 21 | 1.999999955296516418e-02 1.899951934814453125e+00 2.049026727676391602e+00 -8.823923110961914062e+00 22 | 2.099999971687793732e-02 1.850248336791992188e+00 2.001485109329223633e+00 -8.838939666748046875e+00 23 | 2.199999988079071045e-02 1.801124572753906250e+00 1.954139709472656250e+00 -8.853664398193359375e+00 24 | 2.300000004470348358e-02 1.752578735351562500e+00 1.906998991966247559e+00 -8.868099212646484375e+00 25 | 2.400000020861625671e-02 1.704601287841796875e+00 1.860071182250976562e+00 -8.882247924804687500e+00 26 | 2.500000037252902985e-02 1.657188415527343750e+00 1.813364148139953613e+00 -8.896113395690917969e+00 27 | 2.600000053644180298e-02 1.610334396362304688e+00 1.766886115074157715e+00 -8.909697532653808594e+00 28 | 2.700000070035457611e-02 1.564035415649414062e+00 1.720644712448120117e+00 -8.923004150390625000e+00 29 | 2.800000086426734924e-02 1.518280029296875000e+00 1.674647808074951172e+00 -8.936036109924316406e+00 30 | 2.899999916553497314e-02 1.473068237304687500e+00 1.628902912139892578e+00 -8.948797225952148438e+00 31 | 2.999999932944774628e-02 1.428398132324218750e+00 1.583417415618896484e+00 -8.961284637451171875e+00 32 | 3.099999949336051941e-02 1.384252548217773438e+00 1.538198828697204590e+00 -8.973508834838867188e+00 33 | 3.200000151991844177e-02 1.340635299682617188e+00 1.493254065513610840e+00 -8.985467910766601562e+00 34 | 3.299999982118606567e-02 1.297531127929687500e+00 1.448590397834777832e+00 -8.997169494628906250e+00 35 | 3.400000184774398804e-02 1.254941940307617188e+00 1.404214859008789062e+00 -9.008611679077148438e+00 36 | 3.500000014901161194e-02 1.212863922119140625e+00 1.360133886337280273e+00 -9.019798278808593750e+00 37 | 3.599999845027923584e-02 1.171285629272460938e+00 1.316354513168334961e+00 -9.030732154846191406e+00 38 | 3.700000047683715820e-02 1.130207061767578125e+00 1.272883176803588867e+00 -9.041416168212890625e+00 39 | 3.799999877810478210e-02 1.089620590209960938e+00 1.229726195335388184e+00 -9.051853179931640625e+00 40 | 3.900000080466270447e-02 1.049522399902343750e+00 1.186889767646789551e+00 -9.062044143676757812e+00 41 | 3.999999910593032837e-02 1.009906768798828125e+00 1.144380331039428711e+00 -9.071993827819824219e+00 42 | 4.100000113248825073e-02 9.707679748535156250e-01 1.102203607559204102e+00 -9.081703186035156250e+00 43 | 4.199999943375587463e-02 9.320964813232421875e-01 1.060365676879882812e+00 -9.091178894042968750e+00 44 | 4.300000146031379700e-02 8.938961029052734375e-01 1.018872022628784180e+00 -9.100416183471679688e+00 45 | 4.399999976158142090e-02 8.561592102050781250e-01 9.777284860610961914e-01 -9.109422683715820312e+00 46 | 4.500000178813934326e-02 8.188743591308593750e-01 9.369403719902038574e-01 -9.118200302124023438e+00 47 | 4.600000008940696716e-02 7.820491790771484375e-01 8.965131044387817383e-01 -9.126749038696289062e+00 48 | 4.699999839067459106e-02 7.456684112548828125e-01 8.564518094062805176e-01 -9.135072708129882812e+00 49 | 4.800000041723251343e-02 7.097301483154296875e-01 8.167616128921508789e-01 -9.143175125122070312e+00 50 | 4.899999871850013733e-02 6.742362976074218750e-01 7.774473428726196289e-01 -9.151053428649902344e+00 51 | 5.000000074505805969e-02 6.391735076904296875e-01 7.385137677192687988e-01 -9.158716201782226562e+00 52 | 5.099999904632568359e-02 6.045455932617187500e-01 6.999656558036804199e-01 -9.166160583496093750e+00 53 | 5.200000107288360596e-02 5.703430175781250000e-01 6.618073582649230957e-01 -9.173391342163085938e+00 54 | 5.299999937415122986e-02 5.365600585937500000e-01 6.240434050559997559e-01 -9.180410385131835938e+00 55 | 5.400000140070915222e-02 5.032005310058593750e-01 5.866779088973999023e-01 -9.187217712402343750e+00 56 | 5.499999970197677612e-02 4.702491760253906250e-01 5.497149825096130371e-01 -9.193819046020507812e+00 57 | 5.600000172853469849e-02 4.377155303955078125e-01 5.131586194038391113e-01 -9.200210571289062500e+00 58 | 5.700000002980232239e-02 4.055862426757812500e-01 4.770126044750213623e-01 -9.206399917602539062e+00 59 | 5.799999833106994629e-02 3.738632202148437500e-01 4.412806630134582520e-01 -9.212385177612304688e+00 60 | 5.900000035762786865e-02 3.425426483154296875e-01 4.059663414955139160e-01 -9.218168258666992188e+00 61 | 5.999999865889549255e-02 3.116149902343750000e-01 3.710730075836181641e-01 -9.223752975463867188e+00 62 | 6.100000068545341492e-02 2.810821533203125000e-01 3.366039693355560303e-01 -9.229141235351562500e+00 63 | 6.199999898672103882e-02 2.509403228759765625e-01 3.025623559951782227e-01 -9.234331130981445312e+00 64 | 6.300000101327896118e-02 2.211856842041015625e-01 2.689511775970458984e-01 -9.239328384399414062e+00 65 | 6.400000303983688354e-02 1.918163299560546875e-01 2.357733249664306641e-01 -9.244131088256835938e+00 66 | 6.499999761581420898e-02 1.628303527832031250e-01 2.030315101146697998e-01 -9.248741149902343750e+00 67 | 6.599999964237213135e-02 1.342201232910156250e-01 1.707283407449722290e-01 -9.253162384033203125e+00 68 | 6.700000166893005371e-02 1.059913635253906250e-01 1.388662755489349365e-01 -9.257392883300781250e+00 69 | 6.800000369548797607e-02 7.813262939453125000e-02 1.074476838111877441e-01 -9.261436462402343750e+00 70 | 6.899999827146530151e-02 5.064773559570312500e-02 7.647473365068435669e-02 -9.265293121337890625e+00 71 | 7.000000029802322388e-02 2.352714538574218750e-02 4.594950377941131592e-02 -9.268966674804687500e+00 72 | 7.100000232458114624e-02 -3.227233886718750000e-03 1.587393879890441895e-02 -9.272455215454101562e+00 73 | 7.199999690055847168e-02 -2.961158752441406250e-02 -1.375016104429960251e-02 -9.275759696960449219e+00 74 | 7.299999892711639404e-02 -5.563735961914062500e-02 -4.292111471295356750e-02 -9.278882980346679688e+00 75 | 7.400000095367431641e-02 -8.130836486816406250e-02 -7.163738459348678589e-02 -9.281827926635742188e+00 76 | 7.500000298023223877e-02 -1.066131591796875000e-01 -9.989754110574722290e-02 -9.284589767456054688e+00 77 | 7.599999755620956421e-02 -1.315650939941406250e-01 -1.277002990245819092e-01 -9.287174224853515625e+00 78 | 7.699999958276748657e-02 -1.561603546142578125e-01 -1.550445109605789185e-01 -9.289578437805175781e+00 79 | 7.800000160932540894e-02 -1.804027557373046875e-01 -1.819291412830352783e-01 -9.291807174682617188e+00 80 | 7.900000363588333130e-02 -2.042999267578125000e-01 -2.083532661199569702e-01 -9.293860435485839844e+00 81 | 7.999999821186065674e-02 -2.278423309326171875e-01 -2.343161106109619141e-01 -9.295737266540527344e+00 82 | 8.100000023841857910e-02 -2.510356903076171875e-01 -2.598170340061187744e-01 -9.297437667846679688e+00 83 | 8.200000226497650146e-02 -2.738857269287109375e-01 -2.848554849624633789e-01 -9.298966407775878906e+00 84 | 8.299999684095382690e-02 -2.963867187500000000e-01 -3.094310760498046875e-01 -9.300319671630859375e+00 85 | 8.399999886751174927e-02 -3.185405731201171875e-01 -3.335434794425964355e-01 -9.301498413085937500e+00 86 | 8.500000089406967163e-02 -3.403530120849609375e-01 -3.571925759315490723e-01 -9.302505493164062500e+00 87 | 8.600000292062759399e-02 -3.618259429931640625e-01 -3.803783059120178223e-01 -9.303342819213867188e+00 88 | 8.699999749660491943e-02 -3.829536437988281250e-01 -4.031007289886474609e-01 -9.304005622863769531e+00 89 | 8.799999952316284180e-02 -4.037418365478515625e-01 -4.253600537776947021e-01 -9.304498672485351562e+00 90 | 8.900000154972076416e-02 -4.241867065429687500e-01 -4.471565783023834229e-01 -9.304819107055664062e+00 91 | 9.000000357627868652e-02 -4.442939758300781250e-01 -4.684907495975494385e-01 -9.304969787597656250e+00 92 | 9.099999815225601196e-02 -4.640617370605468750e-01 -4.893630743026733398e-01 -9.304950714111328125e+00 93 | 9.200000017881393433e-02 -4.834938049316406250e-01 -5.097742676734924316e-01 -9.304761886596679688e+00 94 | 9.300000220537185669e-02 -5.025844573974609375e-01 -5.297250151634216309e-01 -9.304402351379394531e+00 95 | 9.399999678134918213e-02 -5.213375091552734375e-01 -5.492162108421325684e-01 -9.303873062133789062e+00 96 | 9.499999880790710449e-02 -5.397510528564453125e-01 -5.682489275932312012e-01 -9.303173065185546875e+00 97 | 9.600000083446502686e-02 -5.578308105468750000e-01 -5.868241786956787109e-01 -9.302305221557617188e+00 98 | 9.700000286102294922e-02 -5.755710601806640625e-01 -6.049432754516601562e-01 -9.301267623901367188e+00 99 | 9.799999743700027466e-02 -5.929775238037109375e-01 -6.226074099540710449e-01 -9.300062179565429688e+00 100 | 9.899999946355819702e-02 -6.100444793701171875e-01 -6.398180723190307617e-01 -9.298686027526855469e+00 101 | 1.000000014901161194e-01 -6.267738342285156250e-01 -6.565767526626586914e-01 -9.297140121459960938e+00 102 | 1.010000035166740417e-01 -6.431655883789062500e-01 -6.728851795196533203e-01 -9.295425415039062500e+00 103 | 1.019999980926513672e-01 -6.592216491699218750e-01 -6.887449622154235840e-01 -9.293540954589843750e+00 104 | 1.030000001192092896e-01 -6.749382019042968750e-01 -7.041580080986022949e-01 -9.291486740112304688e+00 105 | 1.040000021457672119e-01 -6.903171539306640625e-01 -7.191262245178222656e-01 -9.289262771606445312e+00 106 | 1.049999967217445374e-01 -7.053585052490234375e-01 -7.336516380310058594e-01 -9.286869049072265625e+00 107 | 1.059999987483024597e-01 -7.200660705566406250e-01 -7.477363944053649902e-01 -9.284307479858398438e+00 108 | 1.070000007748603821e-01 -7.344284057617187500e-01 -7.613826990127563477e-01 -9.281572341918945312e+00 109 | 1.080000028014183044e-01 -7.484512329101562500e-01 -7.745929360389709473e-01 -9.278668403625488281e+00 110 | 1.089999973773956299e-01 -7.621383666992187500e-01 -7.873694300651550293e-01 -9.275594711303710938e+00 111 | 1.099999994039535522e-01 -7.754859924316406250e-01 -7.997146844863891602e-01 -9.272350311279296875e+00 112 | 1.110000014305114746e-01 -7.884883880615234375e-01 -8.116313815116882324e-01 -9.268932342529296875e+00 113 | 1.120000034570693970e-01 -8.011531829833984375e-01 -8.231221437454223633e-01 -9.265345573425292969e+00 114 | 1.129999980330467224e-01 -8.134765625000000000e-01 -8.341896533966064453e-01 -9.261586189270019531e+00 115 | 1.140000000596046448e-01 -8.254585266113281250e-01 -8.448369503021240234e-01 -9.257656097412109375e+00 116 | 1.150000020861625671e-01 -8.370971679687500000e-01 -8.550667762756347656e-01 -9.253553390502929688e+00 117 | 1.159999966621398926e-01 -8.483886718750000000e-01 -8.648822903633117676e-01 -9.249277114868164062e+00 118 | 1.169999986886978149e-01 -8.593406677246093750e-01 -8.742865920066833496e-01 -9.244829177856445312e+00 119 | 1.180000007152557373e-01 -8.699474334716796875e-01 -8.832827210426330566e-01 -9.240208625793457031e+00 120 | 1.190000027418136597e-01 -8.802089691162109375e-01 -8.918740749359130859e-01 -9.235413551330566406e+00 121 | 1.199999973177909851e-01 -8.901271820068359375e-01 -9.000639319419860840e-01 -9.230447769165039062e+00 122 | 1.209999993443489075e-01 -8.996944427490234375e-01 -9.078557491302490234e-01 -9.225304603576660156e+00 123 | 1.220000013709068298e-01 -9.089183807373046875e-01 -9.152529239654541016e-01 -9.219989776611328125e+00 124 | 1.230000033974647522e-01 -9.177970886230468750e-01 -9.222590327262878418e-01 -9.214500427246093750e+00 125 | 1.239999979734420776e-01 -9.263229370117187500e-01 -9.288777709007263184e-01 -9.208833694458007812e+00 126 | 1.250000000000000000e-01 -9.345054626464843750e-01 -9.351127147674560547e-01 -9.202995300292968750e+00 127 | 1.260000020265579224e-01 -9.423370361328125000e-01 -9.409677386283874512e-01 -9.196980476379394531e+00 128 | 1.270000040531158447e-01 -9.498176574707031250e-01 -9.464465975761413574e-01 -9.190790176391601562e+00 129 | 1.280000060796737671e-01 -9.569530487060546875e-01 -9.515531659126281738e-01 -9.184425354003906250e+00 130 | 1.289999932050704956e-01 -9.637393951416015625e-01 -9.562914967536926270e-01 -9.177886009216308594e+00 131 | 1.299999952316284180e-01 -9.701786041259765625e-01 -9.606654644012451172e-01 -9.171171188354492188e+00 132 | 1.309999972581863403e-01 -9.762592315673828125e-01 -9.646791815757751465e-01 -9.164278030395507812e+00 133 | 1.319999992847442627e-01 -9.820003509521484375e-01 -9.683368206024169922e-01 -9.157212257385253906e+00 134 | 1.330000013113021851e-01 -9.873828887939453125e-01 -9.716425538063049316e-01 -9.149968147277832031e+00 135 | 1.340000033378601074e-01 -9.924221038818359375e-01 -9.746005535125732422e-01 -9.142550468444824219e+00 136 | 1.350000053644180298e-01 -9.971103668212890625e-01 -9.772151708602905273e-01 -9.134957313537597656e+00 137 | 1.360000073909759521e-01 -1.001445770263671875e+00 -9.794907569885253906e-01 -9.127186775207519531e+00 138 | 1.369999945163726807e-01 -1.005437850952148438e+00 -9.814316034317016602e-01 -9.119243621826171875e+00 139 | 1.379999965429306030e-01 -1.009077072143554688e+00 -9.830421805381774902e-01 -9.111123085021972656e+00 140 | 1.389999985694885254e-01 -1.012369155883789062e+00 -9.843270182609558105e-01 -9.102828979492187500e+00 141 | 1.400000005960464478e-01 -1.015312194824218750e+00 -9.852906465530395508e-01 -9.094358444213867188e+00 142 | 1.410000026226043701e-01 -1.017910003662109375e+00 -9.859375357627868652e-01 -9.085715293884277344e+00 143 | 1.420000046491622925e-01 -1.020164489746093750e+00 -9.862723350524902344e-01 -9.076898574829101562e+00 144 | 1.430000066757202148e-01 -1.022073745727539062e+00 -9.862997531890869141e-01 -9.067909240722656250e+00 145 | 1.439999938011169434e-01 -1.023632049560546875e+00 -9.860243797302246094e-01 -9.058744430541992188e+00 146 | 1.449999958276748657e-01 -1.024856567382812500e+00 -9.854509830474853516e-01 -9.049411773681640625e+00 147 | 1.459999978542327881e-01 -1.025735855102539062e+00 -9.845842719078063965e-01 -9.039904594421386719e+00 148 | 1.469999998807907104e-01 -1.026277542114257812e+00 -9.834290742874145508e-01 -9.030227661132812500e+00 149 | 1.480000019073486328e-01 -1.026479721069335938e+00 -9.819901585578918457e-01 -9.020381927490234375e+00 150 | 1.490000039339065552e-01 -1.026348114013671875e+00 -9.802724123001098633e-01 -9.010366439819335938e+00 151 | 1.500000059604644775e-01 -1.025878906250000000e+00 -9.782806634902954102e-01 -9.000183105468750000e+00 152 | 1.509999930858612061e-01 -1.025081634521484375e+00 -9.760197997093200684e-01 -8.989833831787109375e+00 153 | 1.519999951124191284e-01 -1.023950576782226562e+00 -9.734947681427001953e-01 -8.979318618774414062e+00 154 | 1.529999971389770508e-01 -1.022495269775390625e+00 -9.707105159759521484e-01 -8.968638420104980469e+00 155 | 1.539999991655349731e-01 -1.020711898803710938e+00 -9.676719903945922852e-01 -8.957795143127441406e+00 156 | 1.550000011920928955e-01 -1.018610000610351562e+00 -9.643841385841369629e-01 -8.946790695190429688e+00 157 | 1.560000032186508179e-01 -1.016187667846679688e+00 -9.608519673347473145e-01 -8.935626029968261719e+00 158 | 1.570000052452087402e-01 -1.013450622558593750e+00 -9.570804834365844727e-01 -8.924303054809570312e+00 159 | 1.580000072717666626e-01 -1.010402679443359375e+00 -9.530747532844543457e-01 -8.912824630737304688e+00 160 | 1.589999943971633911e-01 -1.007045745849609375e+00 -9.488397836685180664e-01 -8.901189804077148438e+00 161 | 1.599999964237213135e-01 -1.003383636474609375e+00 -9.443806409835815430e-01 -8.889402389526367188e+00 162 | 1.609999984502792358e-01 -9.994182586669921875e-01 -9.397023320198059082e-01 -8.877462387084960938e+00 163 | 1.620000004768371582e-01 -9.951553344726562500e-01 -9.348099827766418457e-01 -8.865373611450195312e+00 164 | 1.630000025033950806e-01 -9.905986785888671875e-01 -9.297086000442504883e-01 -8.853136062622070312e+00 165 | 1.640000045299530029e-01 -9.857559204101562500e-01 -9.244033098220825195e-01 -8.840755462646484375e+00 166 | 1.650000065565109253e-01 -9.806270599365234375e-01 -9.188991189002990723e-01 -8.828231811523437500e+00 167 | 1.659999936819076538e-01 -9.752235412597656250e-01 -9.132012128829956055e-01 -8.815568923950195312e+00 168 | 1.669999957084655762e-01 -9.695453643798828125e-01 -9.073145389556884766e-01 -8.802768707275390625e+00 169 | 1.679999977350234985e-01 -9.635982513427734375e-01 -9.012442827224731445e-01 -8.789833068847656250e+00 170 | 1.689999997615814209e-01 -9.573822021484375000e-01 -8.949954509735107422e-01 -8.776763916015625000e+00 171 | 1.700000017881393433e-01 -9.509124755859375000e-01 -8.885731697082519531e-01 -8.763565063476562500e+00 172 | 1.710000038146972656e-01 -9.441909790039062500e-01 -8.819823861122131348e-01 -8.750241279602050781e+00 173 | 1.720000058412551880e-01 -9.372158050537109375e-01 -8.752282261848449707e-01 -8.736789703369140625e+00 174 | 1.729999929666519165e-01 -9.300079345703125000e-01 -8.683157563209533691e-01 -8.723221778869628906e+00 175 | 1.739999949932098389e-01 -9.225654602050781250e-01 -8.612499237060546875e-01 -8.709534645080566406e+00 176 | 1.749999970197677612e-01 -9.148941040039062500e-01 -8.540358543395996094e-01 -8.695734024047851562e+00 177 | 1.759999990463256836e-01 -9.069976806640625000e-01 -8.466784358024597168e-01 -8.681819915771484375e+00 178 | 1.770000010728836060e-01 -8.988914489746093750e-01 -8.391826748847961426e-01 -8.667798995971679688e+00 179 | 1.780000030994415283e-01 -8.905735015869140625e-01 -8.315536379814147949e-01 -8.653674125671386719e+00 180 | 1.790000051259994507e-01 -8.820590972900390625e-01 -8.237961530685424805e-01 -8.639449119567871094e+00 181 | 1.800000071525573730e-01 -8.733463287353515625e-01 -8.159152269363403320e-01 -8.625124931335449219e+00 182 | 1.809999942779541016e-01 -8.644485473632812500e-01 -8.079156875610351562e-01 -8.610707283020019531e+00 183 | 1.819999963045120239e-01 -8.553695678710937500e-01 -7.998025417327880859e-01 -8.596199035644531250e+00 184 | 1.829999983310699463e-01 -8.461189270019531250e-01 -7.915805578231811523e-01 -8.581604957580566406e+00 185 | 1.840000003576278687e-01 -8.367042541503906250e-01 -7.832546234130859375e-01 -8.566927909851074219e+00 186 | 1.850000023841857910e-01 -8.271350860595703125e-01 -7.748295068740844727e-01 -8.552173614501953125e+00 187 | 1.860000044107437134e-01 -8.174114227294921875e-01 -7.663100361824035645e-01 -8.537342071533203125e+00 188 | 1.870000064373016357e-01 -8.075466156005859375e-01 -7.577009201049804688e-01 -8.522438049316406250e+00 189 | 1.879999935626983643e-01 -7.975540161132812500e-01 -7.490069270133972168e-01 -8.507472038269042969e+00 190 | 1.889999955892562866e-01 -7.874317169189453125e-01 -7.402326464653015137e-01 -8.492439270019531250e+00 191 | 1.899999976158142090e-01 -7.771930694580078125e-01 -7.313828468322753906e-01 -8.477348327636718750e+00 192 | 1.909999996423721313e-01 -7.668437957763671875e-01 -7.224621176719665527e-01 -8.462202072143554688e+00 193 | 1.920000016689300537e-01 -7.563915252685546875e-01 -7.134750485420227051e-01 -8.447004318237304688e+00 194 | 1.930000036954879761e-01 -7.458457946777343750e-01 -7.044261097908020020e-01 -8.431758880615234375e+00 195 | 1.940000057220458984e-01 -7.352180480957031250e-01 -6.953199505805969238e-01 -8.416472434997558594e+00 196 | 1.949999928474426270e-01 -7.245044708251953125e-01 -6.861609220504760742e-01 -8.401144027709960938e+00 197 | 1.959999948740005493e-01 -7.137260437011718750e-01 -6.769534945487976074e-01 -8.385782241821289062e+00 198 | 1.969999969005584717e-01 -7.028827667236328125e-01 -6.677020788192749023e-01 -8.370388031005859375e+00 199 | 1.979999989271163940e-01 -6.919898986816406250e-01 -6.584110260009765625e-01 -8.354969024658203125e+00 200 | 1.990000009536743164e-01 -6.810493469238281250e-01 -6.490846276283264160e-01 -8.339525222778320312e+00 201 | 2.000000029802322388e-01 -6.700668334960937500e-01 -6.397271156311035156e-01 -8.324060440063476562e+00 202 | 2.010000050067901611e-01 -6.590595245361328125e-01 -6.303427219390869141e-01 -8.308584213256835938e+00 203 | 2.020000070333480835e-01 -6.480274200439453125e-01 -6.209356188774108887e-01 -8.293094635009765625e+00 204 | 2.029999941587448120e-01 -6.369762420654296875e-01 -6.115098595619201660e-01 -8.277596473693847656e+00 205 | 2.039999961853027344e-01 -6.259212493896484375e-01 -6.020696163177490234e-01 -8.262095451354980469e+00 206 | 2.049999982118606567e-01 -6.148662567138671875e-01 -5.926188826560974121e-01 -8.246594429016113281e+00 207 | 2.060000002384185791e-01 -6.038169860839843750e-01 -5.831615328788757324e-01 -8.231096267700195312e+00 208 | 2.070000022649765015e-01 -5.927829742431640625e-01 -5.737016201019287109e-01 -8.215606689453125000e+00 209 | 2.080000042915344238e-01 -5.817718505859375000e-01 -5.642428994178771973e-01 -8.200127601623535156e+00 210 | 2.090000063180923462e-01 -5.707855224609375000e-01 -5.547892451286315918e-01 -8.184661865234375000e+00 211 | 2.099999934434890747e-01 -5.598411560058593750e-01 -5.453443527221679688e-01 -8.169216156005859375e+00 212 | 2.109999954700469971e-01 -5.489330291748046875e-01 -5.359119772911071777e-01 -8.153789520263671875e+00 213 | 2.119999974966049194e-01 -5.380744934082031250e-01 -5.264956951141357422e-01 -8.138387680053710938e+00 214 | 2.129999995231628418e-01 -5.272712707519531250e-01 -5.170992016792297363e-01 -8.123012542724609375e+00 215 | 2.140000015497207642e-01 -5.165252685546875000e-01 -5.077258944511413574e-01 -8.107667922973632812e+00 216 | 2.150000035762786865e-01 -5.058517456054687500e-01 -4.983793497085571289e-01 -8.092357635498046875e+00 217 | 2.160000056028366089e-01 -4.952468872070312500e-01 -4.890629351139068604e-01 -8.077083587646484375e+00 218 | 2.169999927282333374e-01 -4.847259521484375000e-01 -4.797799885272979736e-01 -8.061851501464843750e+00 219 | 2.179999947547912598e-01 -4.742832183837890625e-01 -4.705337882041931152e-01 -8.046658515930175781e+00 220 | 2.189999967813491821e-01 -4.639320373535156250e-01 -4.613276124000549316e-01 -8.031511306762695312e+00 221 | 2.199999988079071045e-01 -4.536743164062500000e-01 -4.521645903587341309e-01 -8.016410827636718750e+00 222 | 2.210000008344650269e-01 -4.435157775878906250e-01 -4.430478513240814209e-01 -8.001359939575195312e+00 223 | 2.220000028610229492e-01 -4.334621429443359375e-01 -4.339804053306579590e-01 -7.986362457275390625e+00 224 | 2.230000048875808716e-01 -4.235115051269531250e-01 -4.249652326107025146e-01 -7.971416473388671875e+00 225 | 2.240000069141387939e-01 -4.136772155761718750e-01 -4.160052537918090820e-01 -7.956528663635253906e+00 226 | 2.249999940395355225e-01 -4.039592742919921875e-01 -4.071033298969268799e-01 -7.941699981689453125e+00 227 | 2.259999960660934448e-01 -3.943595886230468750e-01 -3.982622325420379639e-01 -7.926929473876953125e+00 228 | 2.269999980926513672e-01 -3.848857879638671875e-01 -3.894846737384796143e-01 -7.912222862243652344e+00 229 | 2.280000001192092896e-01 -3.755416870117187500e-01 -3.807733058929443359e-01 -7.897581100463867188e+00 230 | 2.290000021457672119e-01 -3.663215637207031250e-01 -3.721306920051574707e-01 -7.883002281188964844e+00 231 | 2.300000041723251343e-01 -3.572349548339843750e-01 -3.635593652725219727e-01 -7.868491172790527344e+00 232 | 2.310000061988830566e-01 -3.482856750488281250e-01 -3.550617992877960205e-01 -7.854046821594238281e+00 233 | 2.319999933242797852e-01 -3.394775390625000000e-01 -3.466403186321258545e-01 -7.839674949645996094e+00 234 | 2.329999953508377075e-01 -3.308067321777343750e-01 -3.382972776889801025e-01 -7.825372695922851562e+00 235 | 2.339999973773956299e-01 -3.222808837890625000e-01 -3.300348818302154541e-01 -7.811141967773437500e+00 236 | 2.349999994039535522e-01 -3.138999938964843750e-01 -3.218553662300109863e-01 -7.796983718872070312e+00 237 | 2.360000014305114746e-01 -3.056640625000000000e-01 -3.137607574462890625e-01 -7.782899856567382812e+00 238 | 2.370000034570693970e-01 -2.975769042968750000e-01 -3.057531416416168213e-01 -7.768888473510742188e+00 239 | 2.380000054836273193e-01 -2.896404266357421875e-01 -2.978344857692718506e-01 -7.754954338073730469e+00 240 | 2.389999926090240479e-01 -2.818527221679687500e-01 -2.900066673755645752e-01 -7.741094589233398438e+00 241 | 2.399999946355819702e-01 -2.742137908935546875e-01 -2.822715044021606445e-01 -7.727310180664062500e+00 242 | 2.409999966621398926e-01 -2.667312622070312500e-01 -2.746307551860809326e-01 -7.713603973388671875e+00 243 | 2.419999986886978149e-01 -2.593975067138671875e-01 -2.670861184597015381e-01 -7.699972152709960938e+00 244 | 2.430000007152557373e-01 -2.522239685058593750e-01 -2.596392035484313965e-01 -7.686420440673828125e+00 245 | 2.440000027418136597e-01 -2.451972961425781250e-01 -2.522915601730346680e-01 -7.672943115234375000e+00 246 | 2.450000047683715820e-01 -2.383289337158203125e-01 -2.450446486473083496e-01 -7.659544944763183594e+00 247 | 2.460000067949295044e-01 -2.316093444824218750e-01 -2.378998845815658569e-01 -7.646221160888671875e+00 248 | 2.469999939203262329e-01 -2.250423431396484375e-01 -2.308585792779922485e-01 -7.632975578308105469e+00 249 | 2.479999959468841553e-01 -2.186336517333984375e-01 -2.239219993352890015e-01 -7.619808197021484375e+00 250 | 2.489999979734420776e-01 -2.123699188232421875e-01 -2.170913517475128174e-01 -7.606714248657226562e+00 251 | 2.500000000000000000e-01 -2.062625885009765625e-01 -2.103677392005920410e-01 -7.593698501586914062e+00 252 | 2.509999871253967285e-01 -2.003021240234375000e-01 -2.037522345781326294e-01 -7.580757141113281250e+00 253 | 2.520000040531158447e-01 -1.944961547851562500e-01 -1.972457766532897949e-01 -7.567892074584960938e+00 254 | 2.529999911785125732e-01 -1.888370513916015625e-01 -1.908493041992187500e-01 -7.555101394653320312e+00 255 | 2.540000081062316895e-01 -1.833248138427734375e-01 -1.845636516809463501e-01 -7.542384147644042969e+00 256 | 2.549999952316284180e-01 -1.779594421386718750e-01 -1.783895790576934814e-01 -7.529741287231445312e+00 257 | 2.560000121593475342e-01 -1.727371215820312500e-01 -1.723278015851974487e-01 -7.517169952392578125e+00 258 | 2.569999992847442627e-01 -1.676578521728515625e-01 -1.663789302110671997e-01 -7.504671096801757812e+00 259 | 2.579999864101409912e-01 -1.627197265625000000e-01 -1.605435311794281006e-01 -7.492242813110351562e+00 260 | 2.590000033378601074e-01 -1.579227447509765625e-01 -1.548220962285995483e-01 -7.479885101318359375e+00 261 | 2.599999904632568359e-01 -1.532669067382812500e-01 -1.492150425910949707e-01 -7.467597961425781250e+00 262 | 2.610000073909759521e-01 -1.487407684326171875e-01 -1.437227427959442139e-01 -7.455376625061035156e+00 263 | 2.619999945163726807e-01 -1.443519592285156250e-01 -1.383454650640487671e-01 -7.443223953247070312e+00 264 | 2.630000114440917969e-01 -1.400909423828125000e-01 -1.330834180116653442e-01 -7.431136131286621094e+00 265 | 2.639999985694885254e-01 -1.359634399414062500e-01 -1.279367804527282715e-01 -7.419115066528320312e+00 266 | 2.649999856948852539e-01 -1.319637298583984375e-01 -1.229056119918823242e-01 -7.407157897949218750e+00 267 | 2.660000026226043701e-01 -1.280899047851562500e-01 -1.179899424314498901e-01 -7.395263671875000000e+00 268 | 2.669999897480010986e-01 -1.243400573730468750e-01 -1.131897196173667908e-01 -7.383432388305664062e+00 269 | 2.680000066757202148e-01 -1.207046508789062500e-01 -1.085048317909240723e-01 -7.371658325195312500e+00 270 | 2.689999938011169434e-01 -1.171951293945312500e-01 -1.039350926876068115e-01 -7.359946250915527344e+00 271 | 2.700000107288360596e-01 -1.137962341308593750e-01 -9.948025643825531006e-02 -7.348291397094726562e+00 272 | 2.709999978542327881e-01 -1.105175018310546875e-01 -9.514001756906509399e-02 -7.336696624755859375e+00 273 | 2.720000147819519043e-01 -1.073455810546875000e-01 -9.091401100158691406e-02 -7.325155258178710938e+00 274 | 2.730000019073486328e-01 -1.042823791503906250e-01 -8.680178970098495483e-02 -7.313668251037597656e+00 275 | 2.739999890327453613e-01 -1.013298034667968750e-01 -8.280286192893981934e-02 -7.302236557006835938e+00 276 | 2.750000059604644775e-01 -9.847259521484375000e-02 -7.891666889190673828e-02 -7.290853500366210938e+00 277 | 2.759999930858612061e-01 -9.572219848632812500e-02 -7.514258474111557007e-02 -7.279523849487304688e+00 278 | 2.770000100135803223e-01 -9.306907653808593750e-02 -7.147992402315139771e-02 -7.268242835998535156e+00 279 | 2.779999971389770508e-01 -9.050559997558593750e-02 -6.792795658111572266e-02 -7.257007598876953125e+00 280 | 2.790000140666961670e-01 -8.803939819335937500e-02 -6.448587030172348022e-02 -7.245820999145507812e+00 281 | 2.800000011920928955e-01 -8.567047119140625000e-02 -6.115281209349632263e-02 -7.234683036804199219e+00 282 | 2.809999883174896240e-01 -8.338165283203125000e-02 -5.792786553502082825e-02 -7.223585128784179688e+00 283 | 2.820000052452087402e-01 -8.117866516113281250e-02 -5.481004714965820312e-02 -7.212530136108398438e+00 284 | 2.829999923706054688e-01 -7.906150817871093750e-02 -5.179833620786666870e-02 -7.201519012451171875e+00 285 | 2.840000092983245850e-01 -7.702255249023437500e-02 -4.889163747429847717e-02 -7.190546035766601562e+00 286 | 2.849999964237213135e-01 -7.506179809570312500e-02 -4.608881473541259766e-02 -7.179613113403320312e+00 287 | 2.860000133514404297e-01 -7.317733764648437500e-02 -4.338866844773292542e-02 -7.168718338012695312e+00 288 | 2.870000004768371582e-01 -7.135963439941406250e-02 -4.078995063900947571e-02 -7.157856941223144531e+00 289 | 2.879999876022338867e-01 -6.961441040039062500e-02 -3.829135745763778687e-02 -7.147031784057617188e+00 290 | 2.890000045299530029e-01 -6.793785095214843750e-02 -3.589154407382011414e-02 -7.136241912841796875e+00 291 | 2.899999916553497314e-01 -6.632614135742187500e-02 -3.358909860253334045e-02 -7.125484466552734375e+00 292 | 2.910000085830688477e-01 -6.477546691894531250e-02 -3.138256818056106567e-02 -7.114757537841796875e+00 293 | 2.919999957084655762e-01 -6.328582763671875000e-02 -2.927045337855815887e-02 -7.104061126708984375e+00 294 | 2.930000126361846924e-01 -6.185150146484375000e-02 -2.725120261311531067e-02 -7.093392372131347656e+00 295 | 2.939999997615814209e-01 -6.047630310058593750e-02 -2.532321773469448090e-02 -7.082753181457519531e+00 296 | 2.949999868869781494e-01 -5.914688110351562500e-02 -2.348485402762889862e-02 -7.072137832641601562e+00 297 | 2.960000038146972656e-01 -5.787467956542968750e-02 -2.173442579805850983e-02 -7.061550140380859375e+00 298 | 2.969999909400939941e-01 -5.664253234863281250e-02 -2.007019519805908203e-02 -7.050983428955078125e+00 299 | 2.980000078678131104e-01 -5.545806884765625000e-02 -1.849039085209369659e-02 -7.040439605712890625e+00 300 | 2.989999949932098389e-01 -5.432128906250000000e-02 -1.699318923056125641e-02 -7.029920578002929688e+00 301 | 3.000000119209289551e-01 -5.322647094726562500e-02 -1.557673327624797821e-02 -7.019422531127929688e+00 302 | 3.009999990463256836e-01 -5.216217041015625000e-02 -1.423912309110164642e-02 -7.008939743041992188e+00 303 | 3.019999861717224121e-01 -5.114364624023437500e-02 -1.297841779887676239e-02 -6.998479843139648438e+00 304 | 3.030000030994415283e-01 -5.015182495117187500e-02 -1.179264020174741745e-02 -6.988033294677734375e+00 305 | 3.039999902248382568e-01 -4.919815063476562500e-02 -1.067977678030729294e-02 -6.977605819702148438e+00 306 | 3.050000071525573730e-01 -4.826545715332031250e-02 -9.637776762247085571e-03 -6.967189788818359375e+00 307 | 3.059999942779541016e-01 -4.737091064453125000e-02 -8.664553984999656677e-03 -6.956791877746582031e+00 308 | 3.070000112056732178e-01 -4.649734497070312500e-02 -7.757990155369043350e-03 -6.946404457092285156e+00 309 | 3.079999983310699463e-01 -4.565048217773437500e-02 -6.915932521224021912e-03 -6.936031341552734375e+00 310 | 3.089999854564666748e-01 -4.482078552246093750e-02 -6.136197131127119064e-03 -6.925666809082031250e+00 311 | 3.100000023841857910e-01 -4.401779174804687500e-02 -5.416570696979761124e-03 -6.915314674377441406e+00 312 | 3.109999895095825195e-01 -4.322814941406250000e-02 -4.754809197038412094e-03 -6.904970169067382812e+00 313 | 3.120000064373016357e-01 -4.245567321777343750e-02 -4.148640669882297516e-03 -6.894634246826171875e+00 314 | 3.129999935626983643e-01 -4.170227050781250000e-02 -3.595767077058553696e-03 -6.884306907653808594e+00 315 | 3.140000104904174805e-01 -4.095840454101562500e-02 -3.093862673267722130e-03 -6.873985290527343750e+00 316 | 3.149999976158142090e-01 -4.022026062011718750e-02 -2.640578430145978928e-03 -6.863666534423828125e+00 317 | 3.160000145435333252e-01 -3.950119018554687500e-02 -2.233541104942560196e-03 -6.853356361389160156e+00 318 | 3.170000016689300537e-01 -3.878402709960937500e-02 -1.870354870334267616e-03 -6.843048095703125000e+00 319 | 3.179999887943267822e-01 -3.807640075683593750e-02 -1.548603060655295849e-03 -6.832745552062988281e+00 320 | 3.190000057220458984e-01 -3.736877441406250000e-02 -1.265848870389163494e-03 -6.822441101074218750e+00 321 | 3.199999928474426270e-01 -3.666496276855468750e-02 -1.019636518321931362e-03 -6.812139511108398438e+00 322 | 3.210000097751617432e-01 -3.596115112304687500e-02 -8.074925863184034824e-04 -6.801837921142578125e+00 323 | 3.219999969005584717e-01 -3.525924682617187500e-02 -6.269272416830062866e-04 -6.791537284851074219e+00 324 | 3.230000138282775879e-01 -3.456115722656250000e-02 -4.754354304168373346e-04 -6.781237602233886719e+00 325 | 3.240000009536743164e-01 -3.384780883789062500e-02 -3.504981577862054110e-04 -6.770932197570800781e+00 326 | 3.249999880790710449e-01 -3.313827514648437500e-02 -2.495835360605269670e-04 -6.760627746582031250e+00 327 | 3.260000050067901611e-01 -3.241729736328125000e-02 -1.701481960481032729e-04 -6.750316619873046875e+00 328 | 3.269999921321868896e-01 -3.169059753417968750e-02 -1.096383857657201588e-04 -6.740003585815429688e+00 329 | 3.280000090599060059e-01 -3.096008300781250000e-02 -6.549127283506095409e-05 -6.729688644409179688e+00 330 | 3.289999961853027344e-01 -3.021430969238281250e-02 -3.513615956762805581e-05 -6.719365119934082031e+00 331 | 3.300000131130218506e-01 -2.945327758789062500e-02 -1.599573442945256829e-05 -6.709034919738769531e+00 332 | 3.310000002384185791e-01 -2.868461608886718750e-02 -5.487283033289713785e-06 -6.698700904846191406e+00 333 | 3.319999873638153076e-01 -2.789878845214843750e-02 -1.023956315293617081e-06 -6.688358306884765625e+00 334 | 3.330000042915344238e-01 -2.709960937500000000e-02 -1.599995691492495098e-08 -6.678008079528808594e+00 335 | 3.339999914169311523e-01 -2.628135681152343750e-02 1.279986321378601133e-07 -6.667649269104003906e+00 336 | 3.350000083446502686e-01 -2.544593811035156250e-02 1.999866753976675682e-06 -6.657280921936035156e+00 337 | 3.359999954700469971e-01 -2.459144592285156250e-02 8.190601874957792461e-06 -6.646903991699218750e+00 338 | 3.370000123977661133e-01 -2.371788024902343750e-02 2.128913001797627658e-05 -6.636516571044921875e+00 339 | 3.379999995231628418e-01 -2.282333374023437500e-02 4.388105662656016648e-05 -6.626119613647460938e+00 340 | 3.389999866485595703e-01 -2.190589904785156250e-02 7.854743307689204812e-05 -6.615710258483886719e+00 341 | 3.400000035762786865e-01 -2.095794677734375000e-02 1.278635172639042139e-04 -6.605285644531250000e+00 342 | 3.409999907016754150e-01 -1.999282836914062500e-02 1.943974930327385664e-04 -6.594852447509765625e+00 343 | 3.420000076293945312e-01 -1.899909973144531250e-02 2.807093260344117880e-04 -6.584404945373535156e+00 344 | 3.429999947547912598e-01 -1.797866821289062500e-02 3.893494431395083666e-04 -6.573945045471191406e+00 345 | 3.440000116825103760e-01 -1.692771911621093750e-02 5.228575319051742554e-04 -6.563468933105468750e+00 346 | 3.449999988079071045e-01 -1.584625244140625000e-02 6.837612600065767765e-04 -6.552977561950683594e+00 347 | 3.459999859333038330e-01 -1.473617553710937500e-02 8.745752274990081787e-04 -6.542471885681152344e+00 348 | 3.470000028610229492e-01 -1.360130310058593750e-02 1.097799395211040974e-03 -6.531952857971191406e+00 349 | 3.479999899864196777e-01 -1.243019104003906250e-02 1.355918473564088345e-03 -6.521416664123535156e+00 350 | 3.490000069141387939e-01 -1.122474670410156250e-02 1.651399885304272175e-03 -6.510863304138183594e+00 351 | 3.499999940395355225e-01 -9.981155395507812500e-03 1.986693358048796654e-03 -6.500289916992187500e+00 352 | 3.510000109672546387e-01 -8.708953857421875000e-03 2.364228945225477219e-03 -6.489703178405761719e+00 353 | 3.519999980926513672e-01 -7.400512695312500000e-03 2.786417026072740555e-03 -6.479097366333007812e+00 354 | 3.529999852180480957e-01 -6.050109863281250000e-03 3.255645511671900749e-03 -6.468471527099609375e+00 355 | 3.540000021457672119e-01 -4.665374755859375000e-03 3.774279728531837463e-03 -6.457826614379882812e+00 356 | 3.549999892711639404e-01 -3.238677978515625000e-03 4.344661254435777664e-03 -6.447162628173828125e+00 357 | 3.560000061988830566e-01 -1.773834228515625000e-03 4.969106521457433701e-03 -6.436478614807128906e+00 358 | 3.569999933242797852e-01 -2.651214599609375000e-04 5.649905186146497726e-03 -6.425772666931152344e+00 359 | 3.580000102519989014e-01 1.279830932617187500e-03 6.389320362359285355e-03 -6.415048599243164062e+00 360 | 3.589999973773956299e-01 2.876281738281250000e-03 7.189584895968437195e-03 -6.404298782348632812e+00 361 | 3.600000143051147461e-01 4.510879516601562500e-03 8.052904158830642700e-03 -6.393528938293457031e+00 362 | 3.610000014305114746e-01 6.195068359375000000e-03 8.981450460851192474e-03 -6.382735252380371094e+00 363 | 3.619999885559082031e-01 7.919311523437500000e-03 9.977364912629127502e-03 -6.371920585632324219e+00 364 | 3.630000054836273193e-01 9.683609008789062500e-03 1.104275602847337723e-02 -6.361085891723632812e+00 365 | 3.639999926090240479e-01 1.150703430175781250e-02 1.217969786375761032e-02 -6.350222587585449219e+00 366 | 3.650000095367431641e-01 1.337432861328125000e-02 1.339022908359766006e-02 -6.339336395263671875e+00 367 | 3.659999966621398926e-01 1.529312133789062500e-02 1.467635110020637512e-02 -6.328424453735351562e+00 368 | 3.670000135898590088e-01 1.726150512695312500e-02 1.604003086686134338e-02 -6.317487716674804688e+00 369 | 3.680000007152557373e-01 1.927375793457031250e-02 1.748319156467914581e-02 -6.306528091430664062e+00 370 | 3.689999878406524658e-01 2.134513854980468750e-02 1.900772377848625183e-02 -6.295539855957031250e+00 371 | 3.700000047683715820e-01 2.347183227539062500e-02 2.061546966433525085e-02 -6.284523963928222656e+00 372 | 3.709999918937683105e-01 2.564239501953125000e-02 2.230823785066604614e-02 -6.273485183715820312e+00 373 | 3.720000088214874268e-01 2.787780761718750000e-02 2.408778853714466095e-02 -6.262413978576660156e+00 374 | 3.729999959468841553e-01 3.016281127929687500e-02 2.595583908259868622e-02 -6.251317977905273438e+00 375 | 3.740000128746032715e-01 3.251075744628906250e-02 2.791406586766242981e-02 -6.240190505981445312e+00 376 | 3.750000000000000000e-01 3.490829467773437500e-02 2.996409684419631958e-02 -6.229038238525390625e+00 377 | 3.759999871253967285e-01 3.736495971679687500e-02 3.210751339793205261e-02 -6.217855453491210938e+00 378 | 3.770000040531158447e-01 3.988265991210937500e-02 3.434585034847259521e-02 -6.206643104553222656e+00 379 | 3.779999911785125732e-01 4.246520996093750000e-02 3.668059408664703369e-02 -6.195397377014160156e+00 380 | 3.790000081062316895e-01 4.510307312011718750e-02 3.911317884922027588e-02 -6.184124946594238281e+00 381 | 3.799999952316284180e-01 4.780197143554687500e-02 4.164499789476394653e-02 -6.172821044921875000e+00 382 | 3.810000121593475342e-01 5.056381225585937500e-02 4.427738115191459656e-02 -6.161485671997070312e+00 383 | 3.819999992847442627e-01 5.339241027832031250e-02 4.701161757111549377e-02 -6.150116920471191406e+00 384 | 3.829999864101409912e-01 5.628204345703125000e-02 4.984893649816513062e-02 -6.138716697692871094e+00 385 | 3.840000033378601074e-01 5.923652648925781250e-02 5.279051512479782104e-02 -6.127285003662109375e+00 386 | 3.849999904632568359e-01 6.225967407226562500e-02 5.583747848868370056e-02 -6.115818023681640625e+00 387 | 3.860000073909759521e-01 6.535339355468750000e-02 5.899089574813842773e-02 -6.104315757751464844e+00 388 | 3.869999945163726807e-01 6.851387023925781250e-02 6.225177273154258728e-02 -6.092781066894531250e+00 389 | 3.880000114440917969e-01 7.173728942871093750e-02 6.562107801437377930e-02 -6.081212997436523438e+00 390 | 3.889999985694885254e-01 7.504081726074218750e-02 6.909969449043273926e-02 -6.069605827331542969e+00 391 | 3.899999856948852539e-01 7.841110229492187500e-02 7.268847525119781494e-02 -6.057965278625488281e+00 392 | 3.910000026226043701e-01 8.184432983398437500e-02 7.638819515705108643e-02 -6.046292304992675781e+00 393 | 3.919999897480010986e-01 8.536148071289062500e-02 8.019957691431045532e-02 -6.034577369689941406e+00 394 | 3.930000066757202148e-01 8.895111083984375000e-02 8.412328362464904785e-02 -6.022826194763183594e+00 395 | 3.939999938011169434e-01 9.260940551757812500e-02 8.815991878509521484e-02 -6.011040687561035156e+00 396 | 3.950000107288360596e-01 9.634590148925781250e-02 9.231001138687133789e-02 -5.999215126037597656e+00 397 | 3.959999978542327881e-01 1.001605987548828125e-01 9.657405316829681396e-02 -5.987351417541503906e+00 398 | 3.970000147819519043e-01 1.040534973144531250e-01 1.009524539113044739e-01 -5.975447654724121094e+00 399 | 3.980000019073486328e-01 1.080169677734375000e-01 1.054455563426017761e-01 -5.963508605957031250e+00 400 | 3.989999890327453613e-01 1.120719909667968750e-01 1.100536584854125977e-01 -5.951523780822753906e+00 401 | 4.000000059604644775e-01 1.161937713623046875e-01 1.147769764065742493e-01 -5.939505577087402344e+00 402 | 4.009999930858612061e-01 1.204032897949218750e-01 1.196156665682792664e-01 -5.927443504333496094e+00 403 | 4.020000100135803223e-01 1.246929168701171875e-01 1.245698332786560059e-01 -5.915340423583984375e+00 404 | 4.029999971389770508e-01 1.290645599365234375e-01 1.296394914388656616e-01 -5.903196334838867188e+00 405 | 4.040000140666961670e-01 1.335201263427734375e-01 1.348246186971664429e-01 -5.891010284423828125e+00 406 | 4.050000011920928955e-01 1.380596160888671875e-01 1.401250958442687988e-01 -5.878782272338867188e+00 407 | 4.059999883174896240e-01 1.426868438720703125e-01 1.455407440662384033e-01 -5.866510391235351562e+00 408 | 4.070000052452087402e-01 1.473922729492187500e-01 1.510713249444961548e-01 -5.854197502136230469e+00 409 | 4.079999923706054688e-01 1.521911621093750000e-01 1.567165553569793701e-01 -5.841839790344238281e+00 410 | 4.090000092983245850e-01 1.570796966552734375e-01 1.624760180711746216e-01 -5.829436302185058594e+00 411 | 4.099999964237213135e-01 1.620483398437500000e-01 1.683492958545684814e-01 -5.816992759704589844e+00 412 | 4.110000133514404297e-01 1.671123504638671875e-01 1.743358820676803589e-01 -5.804500579833984375e+00 413 | 4.120000004768371582e-01 1.722564697265625000e-01 1.804351657629013062e-01 -5.791968345642089844e+00 414 | 4.129999876022338867e-01 1.774978637695312500e-01 1.866465061902999878e-01 -5.779387474060058594e+00 415 | 4.140000045299530029e-01 1.828308105468750000e-01 1.929691880941390991e-01 -5.766760826110839844e+00 416 | 4.149999916553497314e-01 1.882572174072265625e-01 1.994024217128753662e-01 -5.754087448120117188e+00 417 | 4.160000085830688477e-01 1.937789916992187500e-01 2.059453278779983521e-01 -5.741365432739257812e+00 418 | 4.169999957084655762e-01 1.993942260742187500e-01 2.125969827175140381e-01 -5.728597640991210938e+00 419 | 4.180000126361846924e-01 2.050857543945312500e-01 2.193564027547836304e-01 -5.715789794921875000e+00 420 | 4.189999997615814209e-01 2.108917236328125000e-01 2.262224853038787842e-01 -5.702925682067871094e+00 421 | 4.199999868869781494e-01 2.167854309082031250e-01 2.331940978765487671e-01 -5.690017700195312500e+00 422 | 4.210000038146972656e-01 2.227725982666015625e-01 2.402700334787368774e-01 -5.677062034606933594e+00 423 | 4.219999909400939941e-01 2.288570404052734375e-01 2.474490106105804443e-01 -5.664058685302734375e+00 424 | 4.230000078678131104e-01 2.350368499755859375e-01 2.547296583652496338e-01 -5.651005744934082031e+00 425 | 4.239999949932098389e-01 2.413215637207031250e-01 2.621105611324310303e-01 -5.637901306152343750e+00 426 | 4.250000119209289551e-01 2.476997375488281250e-01 2.695902287960052490e-01 -5.624750137329101562e+00 427 | 4.259999990463256836e-01 2.541618347167968750e-01 2.771670818328857422e-01 -5.611555099487304688e+00 428 | 4.269999861717224121e-01 2.607460021972656250e-01 2.848394811153411865e-01 -5.598300933837890625e+00 429 | 4.280000030994415283e-01 2.674160003662109375e-01 2.926057279109954834e-01 -5.585001945495605469e+00 430 | 4.289999902248382568e-01 2.741851806640625000e-01 3.004640340805053711e-01 -5.571654319763183594e+00 431 | 4.300000071525573730e-01 2.810573577880859375e-01 3.084125518798828125e-01 -5.558254241943359375e+00 432 | 4.309999942779541016e-01 2.880249023437500000e-01 3.164494037628173828e-01 -5.544806480407714844e+00 433 | 4.320000112056732178e-01 2.950973510742187500e-01 3.245725333690643311e-01 -5.531306266784667969e+00 434 | 4.329999983310699463e-01 3.022613525390625000e-01 3.327799141407012939e-01 -5.517760276794433594e+00 435 | 4.339999854564666748e-01 3.095397949218750000e-01 3.410694301128387451e-01 -5.504156112670898438e+00 436 | 4.350000023841857910e-01 3.169021606445312500e-01 3.494388759136199951e-01 -5.490509986877441406e+00 437 | 4.359999895095825195e-01 3.243713378906250000e-01 3.578859865665435791e-01 -5.476809501647949219e+00 438 | 4.370000064373016357e-01 3.319358825683593750e-01 3.664084076881408691e-01 -5.463062286376953125e+00 439 | 4.379999935626983643e-01 3.396110534667968750e-01 3.750037550926208496e-01 -5.449259757995605469e+00 440 | 4.390000104904174805e-01 3.473758697509765625e-01 3.836695551872253418e-01 -5.435410499572753906e+00 441 | 4.399999976158142090e-01 3.552379608154296875e-01 3.924033045768737793e-01 -5.421513557434082031e+00 442 | 4.410000145435333252e-01 3.632087707519531250e-01 4.012023508548736572e-01 -5.407561302185058594e+00 443 | 4.420000016689300537e-01 3.712730407714843750e-01 4.100640118122100830e-01 -5.393563270568847656e+00 444 | 4.429999887943267822e-01 3.794288635253906250e-01 4.189856052398681641e-01 -5.379518508911132812e+00 445 | 4.440000057220458984e-01 3.876914978027343750e-01 4.279642999172210693e-01 -5.365421295166015625e+00 446 | 4.449999928474426270e-01 3.960437774658203125e-01 4.369972050189971924e-01 -5.351277351379394531e+00 447 | 4.460000097751617432e-01 4.044952392578125000e-01 4.460814297199249268e-01 -5.337083816528320312e+00 448 | 4.469999969005584717e-01 4.130516052246093750e-01 4.552139341831207275e-01 -5.322838783264160156e+00 449 | 4.480000138282775879e-01 4.216861724853515625e-01 4.643917083740234375e-01 -5.308553695678710938e+00 450 | 4.490000009536743164e-01 4.304199218750000000e-01 4.736115634441375732e-01 -5.294219017028808594e+00 451 | 4.499999880790710449e-01 4.392471313476562500e-01 4.828703701496124268e-01 -5.279836654663085938e+00 452 | 4.510000050067901611e-01 4.481697082519531250e-01 4.921648502349853516e-01 -5.265407562255859375e+00 453 | 4.519999921321868896e-01 4.571762084960937500e-01 5.014916658401489258e-01 -5.250935554504394531e+00 454 | 4.530000090599060059e-01 4.662666320800781250e-01 5.108475685119628906e-01 -5.236421585083007812e+00 455 | 4.539999961853027344e-01 4.754562377929687500e-01 5.202289819717407227e-01 -5.221858024597167969e+00 456 | 4.550000131130218506e-01 4.847202301025390625e-01 5.296324491500854492e-01 -5.207256317138671875e+00 457 | 4.560000002384185791e-01 4.940662384033203125e-01 5.390544533729553223e-01 -5.192613601684570312e+00 458 | 4.569999873638153076e-01 5.035037994384765625e-01 5.484914183616638184e-01 -5.177926063537597656e+00 459 | 4.580000042915344238e-01 5.130157470703125000e-01 5.579396486282348633e-01 -5.163199424743652344e+00 460 | 4.589999914169311523e-01 5.226039886474609375e-01 5.673954486846923828e-01 -5.148435592651367188e+00 461 | 4.600000083446502686e-01 5.322761535644531250e-01 5.768550038337707520e-01 -5.133629798889160156e+00 462 | 4.609999954700469971e-01 5.420055389404296875e-01 5.863144993782043457e-01 -5.118794441223144531e+00 463 | 4.620000123977661133e-01 5.518264770507812500e-01 5.957700610160827637e-01 -5.103914260864257812e+00 464 | 4.629999995231628418e-01 5.617027282714843750e-01 6.052177548408508301e-01 -5.089005470275878906e+00 465 | 4.639999866485595703e-01 5.716514587402343750e-01 6.146535873413085938e-01 -5.074060440063476562e+00 466 | 4.650000035762786865e-01 5.816612243652343750e-01 6.240735650062561035e-01 -5.059083938598632812e+00 467 | 4.659999907016754150e-01 5.917263031005859375e-01 6.334735751152038574e-01 -5.044080734252929688e+00 468 | 4.670000076293945312e-01 6.018486022949218750e-01 6.428494453430175781e-01 -5.029047966003417969e+00 469 | 4.679999947547912598e-01 6.120185852050781250e-01 6.521970629692077637e-01 -5.013992309570312500e+00 470 | 4.690000116825103760e-01 6.222457885742187500e-01 6.615121960639953613e-01 -4.998907089233398438e+00 471 | 4.699999988079071045e-01 6.325225830078125000e-01 6.707905530929565430e-01 -4.983797073364257812e+00 472 | 4.709999859333038330e-01 6.428375244140625000e-01 6.800277829170227051e-01 -4.968668937683105469e+00 473 | 4.720000028610229492e-01 6.531791687011718750e-01 6.892195343971252441e-01 -4.953525543212890625e+00 474 | 4.729999899864196777e-01 6.635665893554687500e-01 6.983614563941955566e-01 -4.938360214233398438e+00 475 | 4.740000069141387939e-01 6.739788055419921875e-01 7.074490189552307129e-01 -4.923182487487792969e+00 476 | 4.749999940395355225e-01 6.844139099121093750e-01 7.164778113365173340e-01 -4.907993316650390625e+00 477 | 4.760000109672546387e-01 6.948738098144531250e-01 7.254433035850524902e-01 -4.892790794372558594e+00 478 | 4.769999980926513672e-01 7.053413391113281250e-01 7.343409061431884766e-01 -4.877586364746093750e+00 479 | 4.779999852180480957e-01 7.158393859863281250e-01 7.431660294532775879e-01 -4.862365722656250000e+00 480 | 4.790000021457672119e-01 7.263031005859375000e-01 7.519140839576721191e-01 -4.847160339355468750e+00 481 | 4.799999892711639404e-01 7.367897033691406250e-01 7.605803012847900391e-01 -4.831945419311523438e+00 482 | 4.810000061988830566e-01 7.472667694091796875e-01 7.691600918769836426e-01 -4.816735267639160156e+00 483 | 4.819999933242797852e-01 7.577304840087890625e-01 7.776486277580261230e-01 -4.801531791687011719e+00 484 | 4.830000102519989014e-01 7.681617736816406250e-01 7.860412001609802246e-01 -4.786343574523925781e+00 485 | 4.839999973773956299e-01 7.785873413085937500e-01 7.943330407142639160e-01 -4.771160125732421875e+00 486 | 4.850000143051147461e-01 7.889595031738281250e-01 8.025192618370056152e-01 -4.756002426147460938e+00 487 | 4.860000014305114746e-01 7.992973327636718750e-01 8.105950951576232910e-01 -4.740861892700195312e+00 488 | 4.869999885559082031e-01 8.095932006835937500e-01 8.185555934906005859e-01 -4.725742340087890625e+00 489 | 4.880000054836273193e-01 8.198204040527343750e-01 8.263959288597106934e-01 -4.710657119750976562e+00 490 | 4.889999926090240479e-01 8.299903869628906250e-01 8.341111540794372559e-01 -4.695600509643554688e+00 491 | 4.900000095367431641e-01 8.400955200195312500e-01 8.416963219642639160e-01 -4.680576324462890625e+00 492 | 4.909999966621398926e-01 8.501110076904296875e-01 8.491464853286743164e-01 -4.665598869323730469e+00 493 | 4.920000135898590088e-01 8.600387573242187500e-01 8.564566969871520996e-01 -4.650664329528808594e+00 494 | 4.930000007152557373e-01 8.698692321777343750e-01 8.636219501495361328e-01 -4.635777473449707031e+00 495 | 4.939999878406524658e-01 8.795890808105468750e-01 8.706371784210205078e-01 -4.620948791503906250e+00 496 | 4.950000047683715820e-01 8.892059326171875000e-01 8.774974942207336426e-01 -4.606169700622558594e+00 497 | 4.959999918937683105e-01 8.986778259277343750e-01 8.841977119445800781e-01 -4.591464042663574219e+00 498 | 4.970000088214874268e-01 9.080276489257812500e-01 8.907328844070434570e-01 -4.576819419860839844e+00 499 | 4.979999959468841553e-01 9.172267913818359375e-01 8.970979452133178711e-01 -4.562251091003417969e+00 500 | 4.990000128746032715e-01 9.262790679931640625e-01 9.032878279685974121e-01 -4.547756195068359375e+00 501 | 5.000000000000000000e-01 9.351463317871093750e-01 9.092974066734313965e-01 -4.533354759216308594e+00 502 | 5.009999871253967285e-01 9.438476562500000000e-01 9.151217341423034668e-01 -4.519035339355468750e+00 503 | 5.019999742507934570e-01 9.523563385009765625e-01 9.207556247711181641e-01 -4.504814147949218750e+00 504 | 5.030000209808349609e-01 9.606571197509765625e-01 9.261940717697143555e-01 -4.490695953369140625e+00 505 | 5.040000081062316895e-01 9.687500000000000000e-01 9.314319491386413574e-01 -4.476683616638183594e+00 506 | 5.049999952316284180e-01 9.766235351562500000e-01 9.364642500877380371e-01 -4.462780952453613281e+00 507 | 5.059999823570251465e-01 9.842491149902343750e-01 9.412857890129089355e-01 -4.449003219604492188e+00 508 | 5.070000290870666504e-01 9.916305541992187500e-01 9.458916187286376953e-01 -4.435346603393554688e+00 509 | 5.080000162124633789e-01 9.987373352050781250e-01 9.502766132354736328e-01 -4.421828269958496094e+00 510 | 5.090000033378601074e-01 1.005578994750976562e+00 9.544357061386108398e-01 -4.408444404602050781e+00 511 | 5.099999904632568359e-01 1.012130737304687500e+00 9.583639502525329590e-01 -4.395205497741699219e+00 512 | 5.109999775886535645e-01 1.018375396728515625e+00 9.620561599731445312e-01 -4.382121086120605469e+00 513 | 5.120000243186950684e-01 1.024307250976562500e+00 9.655074477195739746e-01 -4.369194030761718750e+00 514 | 5.130000114440917969e-01 1.029903411865234375e+00 9.687127470970153809e-01 -4.356434822082519531e+00 515 | 5.139999985694885254e-01 1.035152435302734375e+00 9.716671109199523926e-01 -4.343850135803222656e+00 516 | 5.149999856948852539e-01 1.040054321289062500e+00 9.743654727935791016e-01 -4.331439971923828125e+00 517 | 5.159999728202819824e-01 1.044569015502929688e+00 9.768030047416687012e-01 -4.319224357604980469e+00 518 | 5.170000195503234863e-01 1.048696517944335938e+00 9.789746999740600586e-01 -4.307202339172363281e+00 519 | 5.180000066757202148e-01 1.052419662475585938e+00 9.808756709098815918e-01 -4.295384407043457031e+00 520 | 5.189999938011169434e-01 1.055728912353515625e+00 9.825010299682617188e-01 -4.283773422241210938e+00 521 | 5.199999809265136719e-01 1.058605194091796875e+00 9.838458895683288574e-01 -4.272381782531738281e+00 522 | 5.210000276565551758e-01 1.061027526855468750e+00 9.849054813385009766e-01 -4.261216163635253906e+00 523 | 5.220000147819519043e-01 1.062988281250000000e+00 9.856749176979064941e-01 -4.250283241271972656e+00 524 | 5.230000019073486328e-01 1.064466476440429688e+00 9.861495494842529297e-01 -4.239592552185058594e+00 525 | 5.239999890327453613e-01 1.065448760986328125e+00 9.863244891166687012e-01 -4.229150772094726562e+00 526 | 5.249999761581420898e-01 1.065917968750000000e+00 9.861951470375061035e-01 -4.218966484069824219e+00 527 | 5.260000228881835938e-01 1.065868377685546875e+00 9.857568144798278809e-01 -4.209043502807617188e+00 528 | 5.270000100135803223e-01 1.065269470214843750e+00 9.850048422813415527e-01 -4.199396133422851562e+00 529 | 5.279999971389770508e-01 1.064117431640625000e+00 9.839347004890441895e-01 -4.190026283264160156e+00 530 | 5.289999842643737793e-01 1.062397003173828125e+00 9.825417399406433105e-01 -4.180942535400390625e+00 531 | 5.299999713897705078e-01 1.060085296630859375e+00 9.808215498924255371e-01 -4.172156333923339844e+00 532 | 5.310000181198120117e-01 1.057168960571289062e+00 9.787696003913879395e-01 -4.163671493530273438e+00 533 | 5.320000052452087402e-01 1.053625106811523438e+00 9.763815402984619141e-01 -4.155503273010253906e+00 534 | 5.329999923706054688e-01 1.049472808837890625e+00 9.736529588699340820e-01 -4.147640228271484375e+00 535 | 5.339999794960021973e-01 1.044662475585937500e+00 9.705795049667358398e-01 -4.140109062194824219e+00 536 | 5.350000262260437012e-01 1.039184570312500000e+00 9.671569466590881348e-01 -4.132910728454589844e+00 537 | 5.360000133514404297e-01 1.033044815063476562e+00 9.633810520172119141e-01 -4.126047134399414062e+00 538 | 5.370000004768371582e-01 1.026214599609375000e+00 9.592477083206176758e-01 -4.119529724121093750e+00 539 | 5.379999876022338867e-01 1.018684387207031250e+00 9.547527432441711426e-01 -4.113364219665527344e+00 540 | 5.389999747276306152e-01 1.010440826416015625e+00 9.498921036720275879e-01 -4.107555389404296875e+00 541 | 5.400000214576721191e-01 1.001468658447265625e+00 9.446618556976318359e-01 -4.102112770080566406e+00 542 | 5.410000085830688477e-01 9.917659759521484375e-01 9.390580654144287109e-01 -4.097039222717285156e+00 543 | 5.419999957084655762e-01 9.813117980957031250e-01 9.330767989158630371e-01 -4.092341423034667969e+00 544 | 5.429999828338623047e-01 9.701080322265625000e-01 9.267143011093139648e-01 -4.088020324707031250e+00 545 | 5.440000295639038086e-01 9.581298828125000000e-01 9.199668765068054199e-01 -4.084088325500488281e+00 546 | 5.450000166893005371e-01 9.453849792480468750e-01 9.128308296203613281e-01 -4.080540657043457031e+00 547 | 5.460000038146972656e-01 9.318523406982421875e-01 9.053024649620056152e-01 -4.077389717102050781e+00 548 | 5.469999909400939941e-01 9.175262451171875000e-01 8.973783850669860840e-01 -4.074637413024902344e+00 549 | 5.479999780654907227e-01 9.024162292480468750e-01 8.890550732612609863e-01 -4.072278022766113281e+00 550 | 5.490000247955322266e-01 8.864994049072265625e-01 8.803291320800781250e-01 -4.070323944091796875e+00 551 | 5.500000119209289551e-01 8.697891235351562500e-01 8.711973428726196289e-01 -4.068769454956054688e+00 552 | 5.509999990463256836e-01 8.522682189941406250e-01 8.616563081741333008e-01 -4.067620277404785156e+00 553 | 5.519999861717224121e-01 8.339500427246093750e-01 8.517030477523803711e-01 -4.066872596740722656e+00 554 | 5.529999732971191406e-01 8.148193359375000000e-01 8.413343429565429688e-01 -4.066534042358398438e+00 555 | 5.540000200271606445e-01 7.948951721191406250e-01 8.305473327636718750e-01 -4.066592216491699219e+00 556 | 5.550000071525573730e-01 7.741775512695312500e-01 8.193390369415283203e-01 -4.067049026489257812e+00 557 | 5.559999942779541016e-01 7.526741027832031250e-01 8.077065944671630859e-01 -4.067901611328125000e+00 558 | 5.569999814033508301e-01 7.303924560546875000e-01 7.956473827362060547e-01 -4.069145202636718750e+00 559 | 5.580000281333923340e-01 7.073402404785156250e-01 7.831586599349975586e-01 -4.070774078369140625e+00 560 | 5.590000152587890625e-01 6.835403442382812500e-01 7.702378630638122559e-01 -4.072779655456542969e+00 561 | 5.600000023841857910e-01 6.590118408203125000e-01 7.568825483322143555e-01 -4.075152397155761719e+00 562 | 5.609999895095825195e-01 6.337604522705078125e-01 7.430903315544128418e-01 -4.077886581420898438e+00 563 | 5.619999766349792480e-01 6.078224182128906250e-01 7.288588881492614746e-01 -4.080965995788574219e+00 564 | 5.630000233650207520e-01 5.812206268310546875e-01 7.141861319541931152e-01 -4.084378242492675781e+00 565 | 5.640000104904174805e-01 5.539894104003906250e-01 6.990698575973510742e-01 -4.088107109069824219e+00 566 | 5.649999976158142090e-01 5.261688232421875000e-01 6.835080981254577637e-01 -4.092131614685058594e+00 567 | 5.659999847412109375e-01 4.977989196777343750e-01 6.674989461898803711e-01 -4.096433639526367188e+00 568 | 5.669999718666076660e-01 4.689216613769531250e-01 6.510406732559204102e-01 -4.100989341735839844e+00 569 | 5.680000185966491699e-01 4.395866394042968750e-01 6.341314911842346191e-01 -4.105774879455566406e+00 570 | 5.690000057220458984e-01 4.098510742187500000e-01 6.167697906494140625e-01 -4.110760688781738281e+00 571 | 5.699999928474426270e-01 3.797950744628906250e-01 5.989542007446289062e-01 -4.115908622741699219e+00 572 | 5.709999799728393555e-01 3.494834899902343750e-01 5.806831717491149902e-01 -4.121184349060058594e+00 573 | 5.720000267028808594e-01 3.189830780029296875e-01 5.619555711746215820e-01 -4.126555442810058594e+00 574 | 5.730000138282775879e-01 2.883987426757812500e-01 5.427701473236083984e-01 -4.131968498229980469e+00 575 | 5.740000009536743164e-01 2.578163146972656250e-01 5.231258273124694824e-01 -4.137381553649902344e+00 576 | 5.749999880790710449e-01 2.273368835449218750e-01 5.030217170715332031e-01 -4.142742156982421875e+00 577 | 5.759999752044677734e-01 1.970748901367187500e-01 4.824569225311279297e-01 -4.147993087768554688e+00 578 | 5.770000219345092773e-01 1.671600341796875000e-01 4.614306986331939697e-01 -4.153069496154785156e+00 579 | 5.780000090599060059e-01 1.377105712890625000e-01 4.399424791336059570e-01 -4.157912254333496094e+00 580 | 5.789999961853027344e-01 1.088752746582031250e-01 4.179917275905609131e-01 -4.162446975708007812e+00 581 | 5.799999833106994629e-01 8.079528808593750000e-02 3.955780565738677979e-01 -4.166602134704589844e+00 582 | 5.809999704360961914e-01 5.362319946289062500e-02 3.727012276649475098e-01 -4.170301437377929688e+00 583 | 5.820000171661376953e-01 2.751159667968750000e-02 3.493610322475433350e-01 -4.173468589782714844e+00 584 | 5.830000042915344238e-01 2.624511718750000000e-03 3.255575001239776611e-01 -4.176019668579101562e+00 585 | 5.839999914169311523e-01 -2.087783813476562500e-02 3.012906610965728760e-01 -4.177875518798828125e+00 586 | 5.849999785423278809e-01 -4.283905029296875000e-02 2.765607535839080811e-01 -4.178958892822265625e+00 587 | 5.860000252723693848e-01 -6.311416625976562500e-02 2.513680756092071533e-01 -4.179193496704101562e+00 588 | 5.870000123977661133e-01 -8.155822753906250000e-02 2.257131338119506836e-01 -4.178508758544921875e+00 589 | 5.879999995231628418e-01 -9.805679321289062500e-02 1.995964348316192627e-01 -4.176847457885742188e+00 590 | 5.889999866485595703e-01 -1.125087738037109375e-01 1.730187237262725830e-01 -4.174157142639160156e+00 591 | 5.899999737739562988e-01 -1.248474121093750000e-01 1.459807902574539185e-01 -4.170406341552734375e+00 592 | 5.910000205039978027e-01 -1.350479125976562500e-01 1.184836104512214661e-01 -4.165580749511718750e+00 593 | 5.920000076293945312e-01 -1.431045532226562500e-01 9.052824974060058594e-02 -4.159680366516113281e+00 594 | 5.929999947547912598e-01 -1.490955352783203125e-01 6.211590394377708435e-02 -4.152740478515625000e+00 595 | 5.939999818801879883e-01 -1.531105041503906250e-01 3.324791043996810913e-02 -4.144808769226074219e+00 596 | 5.950000286102294922e-01 -1.553192138671875000e-01 3.925730008631944656e-03 -4.135970115661621094e+00 597 | 5.960000157356262207e-01 -1.559486389160156250e-01 -2.584904991090297699e-02 -4.126337051391601562e+00 598 | 5.970000028610229492e-01 -1.552619934082031250e-01 -5.607471242547035217e-02 -4.116044998168945312e+00 599 | 5.979999899864196777e-01 -1.535758972167968750e-01 -8.674939721822738647e-02 -4.105249404907226562e+00 600 | 5.989999771118164062e-01 -1.512603759765625000e-01 -1.178711354732513428e-01 -4.094139099121093750e+00 601 | 6.000000238418579102e-01 -1.487083435058593750e-01 -1.494378000497817993e-01 -4.082908630371093750e+00 602 | 6.010000109672546387e-01 -1.463470458984375000e-01 -1.814471632242202759e-01 -4.071773529052734375e+00 603 | 6.019999980926513672e-01 -1.445884704589843750e-01 -2.138968259096145630e-01 -4.060942649841308594e+00 604 | 6.029999852180480957e-01 -1.438694000244140625e-01 -2.467842847108840942e-01 -4.050633430480957031e+00 605 | 6.039999723434448242e-01 -1.445922851562500000e-01 -2.801069021224975586e-01 -4.041049003601074219e+00 606 | 6.050000190734863281e-01 -1.471290588378906250e-01 -3.138618767261505127e-01 -4.032373428344726562e+00 607 | 6.060000061988830566e-01 -1.518249511718750000e-01 -3.480463027954101562e-01 -4.024784088134765625e+00 608 | 6.069999933242797852e-01 -1.589660644531250000e-01 -3.826571106910705566e-01 -4.018422126770019531e+00 609 | 6.079999804496765137e-01 -1.687850952148437500e-01 -4.176911711692810059e-01 -4.013406753540039062e+00 610 | 6.090000271797180176e-01 -1.814670562744140625e-01 -4.531450867652893066e-01 -4.009827613830566406e+00 611 | 6.100000143051147461e-01 -1.971359252929687500e-01 -4.890154600143432617e-01 -4.007749557495117188e+00 612 | 6.110000014305114746e-01 -2.158603668212890625e-01 -5.252986550331115723e-01 -4.007205963134765625e+00 613 | 6.119999885559082031e-01 -2.376575469970703125e-01 -5.619909763336181641e-01 -4.008205413818359375e+00 614 | 6.129999756813049316e-01 -2.625045776367187500e-01 -5.990885496139526367e-01 -4.010737419128417969e+00 615 | 6.140000224113464355e-01 -2.903366088867187500e-01 -6.365873217582702637e-01 -4.014766693115234375e+00 616 | 6.150000095367431641e-01 -3.210487365722656250e-01 -6.744832396507263184e-01 -4.020244598388671875e+00 617 | 6.159999966621398926e-01 -3.545207977294921875e-01 -7.127718925476074219e-01 -4.027108192443847656e+00 618 | 6.169999837875366211e-01 -3.906097412109375000e-01 -7.514490485191345215e-01 -4.035285949707031250e+00 619 | 6.179999709129333496e-01 -4.291553497314453125e-01 -7.905099987983703613e-01 -4.044697761535644531e+00 620 | 6.190000176429748535e-01 -4.699897766113281250e-01 -8.299501538276672363e-01 -4.055258750915527344e+00 621 | 6.200000047683715820e-01 -5.129432678222656250e-01 -8.697646260261535645e-01 -4.066884994506835938e+00 622 | 6.209999918937683105e-01 -5.578422546386718750e-01 -9.099484682083129883e-01 -4.079487800598144531e+00 623 | 6.219999790191650391e-01 -6.045150756835937500e-01 -9.504966139793395996e-01 -4.092981338500976562e+00 624 | 6.230000257492065430e-01 -6.528053283691406250e-01 -9.914037585258483887e-01 -4.107287406921386719e+00 625 | 6.240000128746032715e-01 -7.025356292724609375e-01 -1.032664656639099121e+00 -4.122316360473632812e+00 626 | 6.250000000000000000e-01 -7.535705566406250000e-01 -1.074273586273193359e+00 -4.138001441955566406e+00 627 | 6.259999871253967285e-01 -8.057613372802734375e-01 -1.116225123405456543e+00 -4.154266357421875000e+00 628 | 6.269999742507934570e-01 -8.589744567871093750e-01 -1.158513426780700684e+00 -4.171044349670410156e+00 629 | 6.280000209808349609e-01 -9.130840301513671875e-01 -1.201132535934448242e+00 -4.188273429870605469e+00 630 | 6.290000081062316895e-01 -9.679756164550781250e-01 -1.244076490402221680e+00 -4.205895423889160156e+00 631 | 6.299999952316284180e-01 -1.023540496826171875e+00 -1.287338972091674805e+00 -4.223855972290039062e+00 632 | 6.309999823570251465e-01 -1.079685211181640625e+00 -1.330913782119750977e+00 -4.242106437683105469e+00 633 | 6.320000290870666504e-01 -1.136320114135742188e+00 -1.374794363975524902e+00 -4.260602951049804688e+00 634 | 6.330000162124633789e-01 -1.193351745605468750e+00 -1.418974280357360840e+00 -4.279299736022949219e+00 635 | 6.340000033378601074e-01 -1.250720977783203125e+00 -1.463446736335754395e+00 -4.298165321350097656e+00 636 | 6.349999904632568359e-01 -1.308343887329101562e+00 -1.508204817771911621e+00 -4.317159652709960938e+00 637 | 6.359999775886535645e-01 -1.366165161132812500e+00 -1.553241610527038574e+00 -4.336253166198730469e+00 638 | 6.370000243186950684e-01 -1.424137115478515625e+00 -1.598549962043762207e+00 -4.355421066284179688e+00 639 | 6.380000114440917969e-01 -1.482192993164062500e+00 -1.644122719764709473e+00 -4.374632835388183594e+00 640 | 6.389999985694885254e-01 -1.540298461914062500e+00 -1.689952492713928223e+00 -4.393867492675781250e+00 641 | 6.399999856948852539e-01 -1.598400115966796875e+00 -1.736031770706176758e+00 -4.413102149963378906e+00 642 | 6.409999728202819824e-01 -1.656467437744140625e+00 -1.782352924346923828e+00 -4.432318687438964844e+00 643 | 6.420000195503234863e-01 -1.714466094970703125e+00 -1.828908085823059082e+00 -4.451500892639160156e+00 644 | 6.430000066757202148e-01 -1.772361755371093750e+00 -1.875689625740051270e+00 -4.470631599426269531e+00 645 | 6.439999938011169434e-01 -1.830123901367187500e+00 -1.922689318656921387e+00 -4.489694595336914062e+00 646 | 6.449999809265136719e-01 -1.887729644775390625e+00 -1.969899177551269531e+00 -4.508680343627929688e+00 647 | 6.460000276565551758e-01 -1.945159912109375000e+00 -2.017310857772827148e+00 -4.527576446533203125e+00 648 | 6.470000147819519043e-01 -2.002386093139648438e+00 -2.064916133880615234e+00 -4.546370506286621094e+00 649 | 6.480000019073486328e-01 -2.059391021728515625e+00 -2.112706184387207031e+00 -4.565053939819335938e+00 650 | 6.489999890327453613e-01 -2.116157531738281250e+00 -2.160672903060913086e+00 -4.583618164062500000e+00 651 | 6.499999761581420898e-01 -2.172676086425781250e+00 -2.208806991577148438e+00 -4.602056503295898438e+00 652 | 6.510000228881835938e-01 -2.228927612304687500e+00 -2.257099866867065430e+00 -4.620360374450683594e+00 653 | 6.520000100135803223e-01 -2.284893035888671875e+00 -2.305542707443237305e+00 -4.638522148132324219e+00 654 | 6.529999971389770508e-01 -2.340574264526367188e+00 -2.354126453399658203e+00 -4.656539916992187500e+00 655 | 6.539999842643737793e-01 -2.395950317382812500e+00 -2.402841567993164062e+00 -4.674405097961425781e+00 656 | 6.549999713897705078e-01 -2.451015472412109375e+00 -2.451678752899169922e+00 -4.692114830017089844e+00 657 | 6.560000181198120117e-01 -2.505769729614257812e+00 -2.500628948211669922e+00 -4.709666252136230469e+00 658 | 6.570000052452087402e-01 -2.560190200805664062e+00 -2.549682378768920898e+00 -4.727051734924316406e+00 659 | 6.579999923706054688e-01 -2.614280700683593750e+00 -2.598829269409179688e+00 -4.744270324707031250e+00 660 | 6.589999794960021973e-01 -2.668029785156250000e+00 -2.648060321807861328e+00 -4.761319160461425781e+00 661 | 6.600000262260437012e-01 -2.721441268920898438e+00 -2.697365283966064453e+00 -4.778196334838867188e+00 662 | 6.610000133514404297e-01 -2.774499893188476562e+00 -2.746734142303466797e+00 -4.794898033142089844e+00 663 | 6.620000004768371582e-01 -2.827209472656250000e+00 -2.796157121658325195e+00 -4.811423301696777344e+00 664 | 6.629999876022338867e-01 -2.879560470581054688e+00 -2.845623970031738281e+00 -4.827769279479980469e+00 665 | 6.639999747276306152e-01 -2.931558609008789062e+00 -2.895124197006225586e+00 -4.843937873840332031e+00 666 | 6.650000214576721191e-01 -2.983196258544921875e+00 -2.944647789001464844e+00 -4.859924316406250000e+00 667 | 6.660000085830688477e-01 -3.034465789794921875e+00 -2.994184017181396484e+00 -4.875727653503417969e+00 668 | 6.669999957084655762e-01 -3.085376739501953125e+00 -3.043722391128540039e+00 -4.891349792480468750e+00 669 | 6.679999828338623047e-01 -3.135921478271484375e+00 -3.093252182006835938e+00 -4.906787872314453125e+00 670 | 6.690000295639038086e-01 -3.186100006103515625e+00 -3.142762899398803711e+00 -4.922042846679687500e+00 671 | 6.700000166893005371e-01 -3.235912322998046875e+00 -3.192243337631225586e+00 -4.937112808227539062e+00 672 | 6.710000038146972656e-01 -3.285358428955078125e+00 -3.241683006286621094e+00 -4.951999664306640625e+00 673 | 6.719999909400939941e-01 -3.334434509277343750e+00 -3.291070461273193359e+00 -4.966701507568359375e+00 674 | 6.729999780654907227e-01 -3.383148193359375000e+00 -3.340394973754882812e+00 -4.981219291687011719e+00 675 | 6.740000247955322266e-01 -3.431495666503906250e+00 -3.389645099639892578e+00 -4.995553970336914062e+00 676 | 6.750000119209289551e-01 -3.479476928710937500e+00 -3.438809633255004883e+00 -5.009705543518066406e+00 677 | 6.759999990463256836e-01 -3.527093887329101562e+00 -3.487877130508422852e+00 -5.023673057556152344e+00 678 | 6.769999861717224121e-01 -3.574344635009765625e+00 -3.536836624145507812e+00 -5.037457466125488281e+00 679 | 6.779999732971191406e-01 -3.621234893798828125e+00 -3.585675954818725586e+00 -5.051059722900390625e+00 680 | 6.790000200271606445e-01 -3.667762756347656250e+00 -3.634384155273437500e+00 -5.064480781555175781e+00 681 | 6.800000071525573730e-01 -3.713926315307617188e+00 -3.682949304580688477e+00 -5.077718734741210938e+00 682 | 6.809999942779541016e-01 -3.759735107421875000e+00 -3.731359720230102539e+00 -5.090778350830078125e+00 683 | 6.819999814033508301e-01 -3.805179595947265625e+00 -3.779603481292724609e+00 -5.103655815124511719e+00 684 | 6.830000281333923340e-01 -3.850278854370117188e+00 -3.827668905258178711e+00 -5.116357803344726562e+00 685 | 6.840000152587890625e-01 -3.895013809204101562e+00 -3.875544309616088867e+00 -5.128878593444824219e+00 686 | 6.850000023841857910e-01 -3.939397811889648438e+00 -3.923217296600341797e+00 -5.141222953796386719e+00 687 | 6.859999895095825195e-01 -3.983425140380859375e+00 -3.970676183700561523e+00 -5.153388023376464844e+00 688 | 6.869999766349792480e-01 -4.027101516723632812e+00 -4.017908573150634766e+00 -5.165376663208007812e+00 689 | 6.880000233650207520e-01 -4.070432662963867188e+00 -4.064902782440185547e+00 -5.177191734313964844e+00 690 | 6.890000104904174805e-01 -4.113410949707031250e+00 -4.111646175384521484e+00 -5.188830375671386719e+00 691 | 6.899999976158142090e-01 -4.156038284301757812e+00 -4.158126831054687500e+00 -5.200291633605957031e+00 692 | 6.909999847412109375e-01 -4.198320388793945312e+00 -4.204332351684570312e+00 -5.211580276489257812e+00 693 | 6.919999718666076660e-01 -4.240253448486328125e+00 -4.250250339508056641e+00 -5.222693443298339844e+00 694 | 6.930000185966491699e-01 -4.281845092773437500e+00 -4.295868396759033203e+00 -5.233634948730468750e+00 695 | 6.940000057220458984e-01 -4.323085784912109375e+00 -4.341174602508544922e+00 -5.244400024414062500e+00 696 | 6.949999928474426270e-01 -4.363983154296875000e+00 -4.386155605316162109e+00 -5.254992485046386719e+00 697 | 6.959999799728393555e-01 -4.404533386230468750e+00 -4.430799961090087891e+00 -5.265411376953125000e+00 698 | 6.970000267028808594e-01 -4.444740295410156250e+00 -4.475094318389892578e+00 -5.275657653808593750e+00 699 | 6.980000138282775879e-01 -4.484596252441406250e+00 -4.519026279449462891e+00 -5.285727500915527344e+00 700 | 6.990000009536743164e-01 -4.524108886718750000e+00 -4.562583923339843750e+00 -5.295624732971191406e+00 701 | 6.999999880790710449e-01 -4.563270568847656250e+00 -4.605753898620605469e+00 -5.305346488952636719e+00 702 | 7.009999752044677734e-01 -4.602085113525390625e+00 -4.648524284362792969e+00 -5.314892768859863281e+00 703 | 7.020000219345092773e-01 -4.640544891357421875e+00 -4.690881729125976562e+00 -5.324261665344238281e+00 704 | 7.030000090599060059e-01 -4.678657531738281250e+00 -4.732813835144042969e+00 -5.333455085754394531e+00 705 | 7.039999961853027344e-01 -4.716409683227539062e+00 -4.774308204650878906e+00 -5.342469215393066406e+00 706 | 7.049999833106994629e-01 -4.753803253173828125e+00 -4.815351486206054688e+00 -5.351302146911621094e+00 707 | 7.059999704360961914e-01 -4.790840148925781250e+00 -4.855931758880615234e+00 -5.359955787658691406e+00 708 | 7.070000171661376953e-01 -4.827510833740234375e+00 -4.896036148071289062e+00 -5.368426322937011719e+00 709 | 7.080000042915344238e-01 -4.863811492919921875e+00 -4.935651779174804688e+00 -5.376709938049316406e+00 710 | 7.089999914169311523e-01 -4.899740219116210938e+00 -4.974765777587890625e+00 -5.384807586669921875e+00 711 | 7.099999785423278809e-01 -4.935291290283203125e+00 -5.013366222381591797e+00 -5.392715454101562500e+00 712 | 7.110000252723693848e-01 -4.970462799072265625e+00 -5.051439285278320312e+00 -5.400432586669921875e+00 713 | 7.120000123977661133e-01 -5.005245208740234375e+00 -5.088973045349121094e+00 -5.407953262329101562e+00 714 | 7.129999995231628418e-01 -5.039630889892578125e+00 -5.125954627990722656e+00 -5.415275573730468750e+00 715 | 7.139999866485595703e-01 -5.073616027832031250e+00 -5.162371635437011719e+00 -5.422397613525390625e+00 716 | 7.149999737739562988e-01 -5.107192993164062500e+00 -5.198210716247558594e+00 -5.429314613342285156e+00 717 | 7.160000205039978027e-01 -5.140361785888671875e+00 -5.233459949493408203e+00 -5.436024665832519531e+00 718 | 7.170000076293945312e-01 -5.173101425170898438e+00 -5.268106460571289062e+00 -5.442520141601562500e+00 719 | 7.179999947547912598e-01 -5.205410003662109375e+00 -5.302137851715087891e+00 -5.448799133300781250e+00 720 | 7.189999818801879883e-01 -5.237281799316406250e+00 -5.335540771484375000e+00 -5.454858779907226562e+00 721 | 7.200000286102294922e-01 -5.268699645996093750e+00 -5.368303775787353516e+00 -5.460690498352050781e+00 722 | 7.210000157356262207e-01 -5.299652099609375000e+00 -5.400413990020751953e+00 -5.466288566589355469e+00 723 | 7.220000028610229492e-01 -5.330137252807617188e+00 -5.431858539581298828e+00 -5.471652030944824219e+00 724 | 7.229999899864196777e-01 -5.360139846801757812e+00 -5.462625503540039062e+00 -5.476773262023925781e+00 725 | 7.239999771118164062e-01 -5.389644622802734375e+00 -5.492702007293701172e+00 -5.481644630432128906e+00 726 | 7.250000238418579102e-01 -5.418647766113281250e+00 -5.522076129913330078e+00 -5.486262321472167969e+00 727 | 7.260000109672546387e-01 -5.447126388549804688e+00 -5.550735473632812500e+00 -5.490617752075195312e+00 728 | 7.269999980926513672e-01 -5.475072860717773438e+00 -5.578668117523193359e+00 -5.494706153869628906e+00 729 | 7.279999852180480957e-01 -5.502470016479492188e+00 -5.605861186981201172e+00 -5.498518943786621094e+00 730 | 7.289999723434448242e-01 -5.529304504394531250e+00 -5.632302761077880859e+00 -5.502048492431640625e+00 731 | 7.300000190734863281e-01 -5.555564880371093750e+00 -5.657980918884277344e+00 -5.505289077758789062e+00 732 | 7.310000061988830566e-01 -5.581228256225585938e+00 -5.682884216308593750e+00 -5.508231163024902344e+00 733 | 7.319999933242797852e-01 -5.606283187866210938e+00 -5.706999301910400391e+00 -5.510867118835449219e+00 734 | 7.329999804496765137e-01 -5.630714416503906250e+00 -5.730315685272216797e+00 -5.513189315795898438e+00 735 | 7.340000271797180176e-01 -5.654499053955078125e+00 -5.752820968627929688e+00 -5.515187263488769531e+00 736 | 7.350000143051147461e-01 -5.677623748779296875e+00 -5.774503231048583984e+00 -5.516853332519531250e+00 737 | 7.360000014305114746e-01 -5.700069427490234375e+00 -5.795351028442382812e+00 -5.518179893493652344e+00 738 | 7.369999885559082031e-01 -5.721817016601562500e+00 -5.815352916717529297e+00 -5.519154548645019531e+00 739 | 7.379999756813049316e-01 -5.742851257324218750e+00 -5.834496974945068359e+00 -5.519770622253417969e+00 740 | 7.390000224113464355e-01 -5.763147354125976562e+00 -5.852772235870361328e+00 -5.520015716552734375e+00 741 | 7.400000095367431641e-01 -5.782688140869140625e+00 -5.870166778564453125e+00 -5.519882202148437500e+00 742 | 7.409999966621398926e-01 -5.801448822021484375e+00 -5.886670589447021484e+00 -5.519357681274414062e+00 743 | 7.419999837875366211e-01 -5.819419860839843750e+00 -5.902271270751953125e+00 -5.518434524536132812e+00 744 | 7.429999709129333496e-01 -5.836570739746093750e+00 -5.916957855224609375e+00 -5.517100334167480469e+00 745 | 7.440000176429748535e-01 -5.852886199951171875e+00 -5.930720329284667969e+00 -5.515347480773925781e+00 746 | 7.450000047683715820e-01 -5.868343353271484375e+00 -5.943547248840332031e+00 -5.513161659240722656e+00 747 | 7.459999918937683105e-01 -5.882919311523437500e+00 -5.955427646636962891e+00 -5.510534286499023438e+00 748 | 7.469999790191650391e-01 -5.896589279174804688e+00 -5.966351509094238281e+00 -5.507452964782714844e+00 749 | 7.480000257492065430e-01 -5.909339904785156250e+00 -5.976307868957519531e+00 -5.503908157348632812e+00 750 | 7.490000128746032715e-01 -5.921146392822265625e+00 -5.985286235809326172e+00 -5.499890327453613281e+00 751 | 7.500000000000000000e-01 -5.931985855102539062e+00 -5.993276596069335938e+00 -5.495386123657226562e+00 752 | 7.509999871253967285e-01 -5.941843032836914062e+00 -6.000268936157226562e+00 -5.490388870239257812e+00 753 | 7.519999742507934570e-01 -5.950683593750000000e+00 -6.006253242492675781e+00 -5.484881401062011719e+00 754 | 7.530000209808349609e-01 -5.958496093750000000e+00 -6.011219024658203125e+00 -5.478856086730957031e+00 755 | 7.540000081062316895e-01 -5.965255737304687500e+00 -6.015156745910644531e+00 -5.472304344177246094e+00 756 | 7.549999952316284180e-01 -5.970943450927734375e+00 -6.018056869506835938e+00 -5.465212821960449219e+00 757 | 7.559999823570251465e-01 -5.975536346435546875e+00 -6.019910335540771484e+00 -5.457571029663085938e+00 758 | 7.570000290870666504e-01 -5.979017257690429688e+00 -6.020707130432128906e+00 -5.449370384216308594e+00 759 | 7.580000162124633789e-01 -5.981367111206054688e+00 -6.020438194274902344e+00 -5.440602302551269531e+00 760 | 7.590000033378601074e-01 -5.982555389404296875e+00 -6.019094467163085938e+00 -5.431251525878906250e+00 761 | 7.599999904632568359e-01 -5.982570648193359375e+00 -6.016666889190673828e+00 -5.421311378479003906e+00 762 | 7.609999775886535645e-01 -5.981399536132812500e+00 -6.013146400451660156e+00 -5.410774230957031250e+00 763 | 7.620000243186950684e-01 -5.979011535644531250e+00 -6.008525371551513672e+00 -5.399625778198242188e+00 764 | 7.630000114440917969e-01 -5.975399017333984375e+00 -6.002794265747070312e+00 -5.387864112854003906e+00 765 | 7.639999985694885254e-01 -5.970539093017578125e+00 -5.995944976806640625e+00 -5.375473976135253906e+00 766 | 7.649999856948852539e-01 -5.964414596557617188e+00 -5.987969875335693359e+00 -5.362450599670410156e+00 767 | 7.659999728202819824e-01 -5.957015991210937500e+00 -5.978860378265380859e+00 -5.348786354064941406e+00 768 | 7.670000195503234863e-01 -5.948322296142578125e+00 -5.968608856201171875e+00 -5.334470748901367188e+00 769 | 7.680000066757202148e-01 -5.938323974609375000e+00 -5.957207679748535156e+00 -5.319500923156738281e+00 770 | 7.689999938011169434e-01 -5.926998138427734375e+00 -5.944649219512939453e+00 -5.303865432739257812e+00 771 | 7.699999809265136719e-01 -5.914346694946289062e+00 -5.930926322937011719e+00 -5.287562370300292969e+00 772 | 7.710000276565551758e-01 -5.900348663330078125e+00 -5.916031360626220703e+00 -5.270582199096679688e+00 773 | 7.720000147819519043e-01 -5.884988784790039062e+00 -5.899957656860351562e+00 -5.252920150756835938e+00 774 | 7.730000019073486328e-01 -5.868268966674804688e+00 -5.882699012756347656e+00 -5.234574317932128906e+00 775 | 7.739999890327453613e-01 -5.850172042846679688e+00 -5.864247798919677734e+00 -5.215537071228027344e+00 776 | 7.749999761581420898e-01 -5.830694198608398438e+00 -5.844598293304443359e+00 -5.195805549621582031e+00 777 | 7.760000228881835938e-01 -5.809825897216796875e+00 -5.823743820190429688e+00 -5.175375938415527344e+00 778 | 7.770000100135803223e-01 -5.787559509277343750e+00 -5.801678180694580078e+00 -5.154244422912597656e+00 779 | 7.779999971389770508e-01 -5.763893127441406250e+00 -5.778396606445312500e+00 -5.132409095764160156e+00 780 | 7.789999842643737793e-01 -5.738822937011718750e+00 -5.753892421722412109e+00 -5.109869003295898438e+00 781 | 7.799999713897705078e-01 -5.712345123291015625e+00 -5.728159904479980469e+00 -5.086622238159179688e+00 782 | 7.810000181198120117e-01 -5.684452056884765625e+00 -5.701194763183593750e+00 -5.062664985656738281e+00 783 | 7.820000052452087402e-01 -5.655155181884765625e+00 -5.672991752624511719e+00 -5.038001060485839844e+00 784 | 7.829999923706054688e-01 -5.624441146850585938e+00 -5.643545627593994141e+00 -5.012626647949218750e+00 785 | 7.839999794960021973e-01 -5.592317581176757812e+00 -5.612851619720458984e+00 -4.986544609069824219e+00 786 | 7.850000262260437012e-01 -5.558784484863281250e+00 -5.580905914306640625e+00 -4.959753036499023438e+00 787 | 7.860000133514404297e-01 -5.523849487304687500e+00 -5.547703742980957031e+00 -4.932259559631347656e+00 788 | 7.870000004768371582e-01 -5.487506866455078125e+00 -5.513241291046142578e+00 -4.904057502746582031e+00 789 | 7.879999876022338867e-01 -5.449768066406250000e+00 -5.477515220642089844e+00 -4.875154495239257812e+00 790 | 7.889999747276306152e-01 -5.410636901855468750e+00 -5.440521240234375000e+00 -4.845551490783691406e+00 791 | 7.900000214576721191e-01 -5.370117187500000000e+00 -5.402256488800048828e+00 -4.815251350402832031e+00 792 | 7.910000085830688477e-01 -5.328216552734375000e+00 -5.362717151641845703e+00 -4.784259796142578125e+00 793 | 7.919999957084655762e-01 -5.284946441650390625e+00 -5.321901321411132812e+00 -4.752577781677246094e+00 794 | 7.929999828338623047e-01 -5.240308761596679688e+00 -5.279805660247802734e+00 -4.720210075378417969e+00 795 | 7.940000295639038086e-01 -5.194314956665039062e+00 -5.236427783966064453e+00 -4.687160491943359375e+00 796 | 7.950000166893005371e-01 -5.146976470947265625e+00 -5.191765785217285156e+00 -4.653435707092285156e+00 797 | 7.960000038146972656e-01 -5.098300933837890625e+00 -5.145817756652832031e+00 -4.619040489196777344e+00 798 | 7.969999909400939941e-01 -5.048297882080078125e+00 -5.098581314086914062e+00 -4.583977699279785156e+00 799 | 7.979999780654907227e-01 -4.996978759765625000e+00 -5.050055503845214844e+00 -4.548254013061523438e+00 800 | 7.990000247955322266e-01 -4.944351196289062500e+00 -5.000238895416259766e+00 -4.511872291564941406e+00 801 | 8.000000119209289551e-01 -4.890434265136718750e+00 -4.949130535125732422e+00 -4.474843978881835938e+00 802 | 8.009999990463256836e-01 -4.835231781005859375e+00 -4.896729469299316406e+00 -4.437170028686523438e+00 803 | 8.019999861717224121e-01 -4.778762817382812500e+00 -4.843035221099853516e+00 -4.398859977722167969e+00 804 | 8.029999732971191406e-01 -4.721035003662109375e+00 -4.788047313690185547e+00 -4.359916687011718750e+00 805 | 8.040000200271606445e-01 -4.662052154541015625e+00 -4.731766223907470703e+00 -4.320343971252441406e+00 806 | 8.050000071525573730e-01 -4.601840972900390625e+00 -4.674191474914550781e+00 -4.280154228210449219e+00 807 | 8.059999942779541016e-01 -4.540409088134765625e+00 -4.615324020385742188e+00 -4.239350318908691406e+00 808 | 8.069999814033508301e-01 -4.477764129638671875e+00 -4.555164337158203125e+00 -4.197937965393066406e+00 809 | 8.080000281333923340e-01 -4.413911819458007812e+00 -4.493713855743408203e+00 -4.155919075012207031e+00 810 | 8.090000152587890625e-01 -4.348876953125000000e+00 -4.430972576141357422e+00 -4.113306999206542969e+00 811 | 8.100000023841857910e-01 -4.282667160034179688e+00 -4.366943359375000000e+00 -4.070103645324707031e+00 812 | 8.109999895095825195e-01 -4.215290069580078125e+00 -4.301627159118652344e+00 -4.026315689086914062e+00 813 | 8.119999766349792480e-01 -4.146759033203125000e+00 -4.235025882720947266e+00 -3.981946706771850586e+00 814 | 8.130000233650207520e-01 -4.077081680297851562e+00 -4.167142391204833984e+00 -3.937000989913940430e+00 815 | 8.140000104904174805e-01 -4.006269454956054688e+00 -4.097978115081787109e+00 -3.891486883163452148e+00 816 | 8.149999976158142090e-01 -3.934333801269531250e+00 -4.027536869049072266e+00 -3.845408678054809570e+00 817 | 8.159999847412109375e-01 -3.861284255981445312e+00 -3.955821275711059570e+00 -3.798770666122436523e+00 818 | 8.169999718666076660e-01 -3.787124633789062500e+00 -3.882834434509277344e+00 -3.751575231552124023e+00 819 | 8.180000185966491699e-01 -3.711868286132812500e+00 -3.808579921722412109e+00 -3.703828573226928711e+00 820 | 8.190000057220458984e-01 -3.635524749755859375e+00 -3.733061790466308594e+00 -3.655535936355590820e+00 821 | 8.199999928474426270e-01 -3.558099746704101562e+00 -3.656284093856811523e+00 -3.606699705123901367e+00 822 | 8.209999799728393555e-01 -3.479597091674804688e+00 -3.578251123428344727e+00 -3.557323694229125977e+00 823 | 8.220000267028808594e-01 -3.400024414062500000e+00 -3.498967170715332031e+00 -3.507408857345581055e+00 824 | 8.230000138282775879e-01 -3.319395065307617188e+00 -3.418437480926513672e+00 -3.456962823867797852e+00 825 | 8.240000009536743164e-01 -3.237705230712890625e+00 -3.336667299270629883e+00 -3.405985593795776367e+00 826 | 8.249999880790710449e-01 -3.154970169067382812e+00 -3.253661632537841797e+00 -3.354482889175415039e+00 827 | 8.259999752044677734e-01 -3.071184158325195312e+00 -3.169426441192626953e+00 -3.302452325820922852e+00 828 | 8.270000219345092773e-01 -2.986354827880859375e+00 -3.083967685699462891e+00 -3.249896764755249023e+00 829 | 8.280000090599060059e-01 -2.900489807128906250e+00 -2.997291564941406250e+00 -3.196822881698608398e+00 830 | 8.289999961853027344e-01 -2.813594818115234375e+00 -2.909404516220092773e+00 -3.143230199813842773e+00 831 | 8.299999833106994629e-01 -2.725669860839843750e+00 -2.820313453674316406e+00 -3.089120626449584961e+00 832 | 8.309999704360961914e-01 -2.636713027954101562e+00 -2.730025291442871094e+00 -3.034493684768676758e+00 833 | 8.320000171661376953e-01 -2.546728134155273438e+00 -2.638547420501708984e+00 -2.979348897933959961e+00 834 | 8.330000042915344238e-01 -2.455726623535156250e+00 -2.545887470245361328e+00 -2.923694133758544922e+00 835 | 8.339999914169311523e-01 -2.363704681396484375e+00 -2.452053308486938477e+00 -2.867527246475219727e+00 836 | 8.349999785423278809e-01 -2.270664215087890625e+00 -2.357052803039550781e+00 -2.810847520828247070e+00 837 | 8.360000252723693848e-01 -2.176597595214843750e+00 -2.260894775390625000e+00 -2.753653526306152344e+00 838 | 8.370000123977661133e-01 -2.081523895263671875e+00 -2.163587808609008789e+00 -2.695953607559204102e+00 839 | 8.379999995231628418e-01 -1.985431671142578125e+00 -2.065140724182128906e+00 -2.637742280960083008e+00 840 | 8.389999866485595703e-01 -1.888328552246093750e+00 -1.965562820434570312e+00 -2.579022407531738281e+00 841 | 8.399999737739562988e-01 -1.790208816528320312e+00 -1.864863634109497070e+00 -2.519793748855590820e+00 842 | 8.410000205039978027e-01 -1.691076278686523438e+00 -1.763053059577941895e+00 -2.460054159164428711e+00 843 | 8.420000076293945312e-01 -1.590938568115234375e+00 -1.660140872001647949e+00 -2.399810552597045898e+00 844 | 8.429999947547912598e-01 -1.489788055419921875e+00 -1.556137681007385254e+00 -2.339059114456176758e+00 845 | 8.439999818801879883e-01 -1.387626647949218750e+00 -1.451053977012634277e+00 -2.277799606323242188e+00 846 | 8.450000286102294922e-01 -1.284452438354492188e+00 -1.344900608062744141e+00 -2.216030597686767578e+00 847 | 8.460000157356262207e-01 -1.180282592773437500e+00 -1.237688779830932617e+00 -2.153761148452758789e+00 848 | 8.470000028610229492e-01 -1.075098037719726562e+00 -1.129429936408996582e+00 -2.090983629226684570e+00 849 | 8.479999899864196777e-01 -9.689197540283203125e-01 -1.020135641098022461e+00 -2.027705430984497070e+00 850 | 8.489999771118164062e-01 -8.617343902587890625e-01 -9.098179936408996582e-01 -1.963922739028930664e+00 851 | 8.500000238418579102e-01 -7.535400390625000000e-01 -7.984891533851623535e-01 -1.899632692337036133e+00 852 | 8.510000109672546387e-01 -6.443576812744140625e-01 -6.861616373062133789e-01 -1.834846258163452148e+00 853 | 8.519999980926513672e-01 -5.341796875000000000e-01 -5.728481411933898926e-01 -1.769559621810913086e+00 854 | 8.529999852180480957e-01 -4.230079650878906250e-01 -4.585617780685424805e-01 -1.703775405883789062e+00 855 | 8.539999723434448242e-01 -3.108501434326171875e-01 -3.433157801628112793e-01 -1.637493371963500977e+00 856 | 8.550000190734863281e-01 -1.976985931396484375e-01 -2.271237820386886597e-01 -1.570713520050048828e+00 857 | 8.560000061988830566e-01 -8.357238769531250000e-02 -1.099995672702789307e-01 -1.503444910049438477e+00 858 | 8.569999933242797852e-01 3.153038024902343750e-02 8.042780682444572449e-03 -1.435685157775878906e+00 859 | 8.579999804496765137e-01 1.476020812988281250e-01 1.269889026880264282e-01 -1.367438316345214844e+00 860 | 8.590000271797180176e-01 2.646446228027343750e-01 2.468241900205612183e-01 -1.298703908920288086e+00 861 | 8.600000143051147461e-01 3.826446533203125000e-01 3.675337731838226318e-01 -1.229489564895629883e+00 862 | 8.610000014305114746e-01 5.015926361083984375e-01 4.891025125980377197e-01 -1.159797906875610352e+00 863 | 8.619999885559082031e-01 6.214866638183593750e-01 6.115150451660156250e-01 -1.089631557464599609e+00 864 | 8.629999756813049316e-01 7.423229217529296875e-01 7.347556948661804199e-01 -1.018993139266967773e+00 865 | 8.640000224113464355e-01 8.640880584716796875e-01 8.588086366653442383e-01 -9.478867053985595703e-01 866 | 8.650000095367431641e-01 9.867610931396484375e-01 9.836576581001281738e-01 -8.763234615325927734e-01 867 | 8.659999966621398926e-01 1.110351562500000000e+00 1.109286427497863770e+00 -8.043010234832763672e-01 868 | 8.669999837875366211e-01 1.234834671020507812e+00 1.235678195953369141e+00 -7.318294048309326172e-01 869 | 8.679999709129333496e-01 1.360214233398437500e+00 1.362816214561462402e+00 -6.589083671569824219e-01 870 | 8.690000176429748535e-01 1.486469268798828125e+00 1.490683197975158691e+00 -5.855467319488525391e-01 871 | 8.700000047683715820e-01 1.613584518432617188e+00 1.619261860847473145e+00 -5.117545127868652344e-01 872 | 8.709999918937683105e-01 1.741544723510742188e+00 1.748534560203552246e+00 -4.375367164611816406e-01 873 | 8.719999790191650391e-01 1.870342254638671875e+00 1.878483414649963379e+00 -3.628985881805419922e-01 874 | 8.730000257492065430e-01 1.999969482421875000e+00 2.009090423583984375e+00 -2.878434658050537109e-01 875 | 8.740000128746032715e-01 2.130392074584960938e+00 2.140336990356445312e+00 -2.123885154724121094e-01 876 | 8.750000000000000000e-01 2.261608123779296875e+00 2.272204875946044922e+00 -1.365363597869873047e-01 877 | 8.759999871253967285e-01 2.393592834472656250e+00 2.404675006866455078e+00 -6.029748916625976562e-02 878 | 8.769999742507934570e-01 2.526329040527343750e+00 2.537728548049926758e+00 1.631975173950195312e-02 879 | 8.780000209808349609e-01 2.659812927246093750e+00 2.671346187591552734e+00 9.331130981445312500e-02 880 | 8.790000081062316895e-01 2.794000625610351562e+00 2.805508375167846680e+00 1.706564426422119141e-01 881 | 8.799999952316284180e-01 2.928886413574218750e+00 2.940195322036743164e+00 2.483530044555664062e-01 882 | 8.809999823570251465e-01 3.064451217651367188e+00 3.075387239456176758e+00 3.263900279998779297e-01 883 | 8.820000290870666504e-01 3.200675964355468750e+00 3.211064100265502930e+00 4.047586917877197266e-01 884 | 8.830000162124633789e-01 3.337520599365234375e+00 3.347205400466918945e+00 4.834377765655517578e-01 885 | 8.840000033378601074e-01 3.474977493286132812e+00 3.483790397644042969e+00 5.624258518218994141e-01 886 | 8.849999904632568359e-01 3.613016128540039062e+00 3.620798587799072266e+00 6.417050361633300781e-01 887 | 8.859999775886535645e-01 3.751613616943359375e+00 3.758208990097045898e+00 7.212657928466796875e-01 888 | 8.870000243186950684e-01 3.890755653381347656e+00 3.896000146865844727e+00 8.011002540588378906e-01 889 | 8.880000114440917969e-01 4.030397415161132812e+00 4.034151077270507812e+00 8.811843395233154297e-01 890 | 8.889999985694885254e-01 4.170524597167968750e+00 4.172639369964599609e+00 9.615125656127929688e-01 891 | 8.899999856948852539e-01 4.311099052429199219e+00 4.311444282531738281e+00 1.042066335678100586e+00 892 | 8.909999728202819824e-01 4.452104568481445312e+00 4.450543403625488281e+00 1.122835874557495117e+00 893 | 8.920000195503234863e-01 4.593515396118164062e+00 4.589913845062255859e+00 1.203809499740600586e+00 894 | 8.930000066757202148e-01 4.735286712646484375e+00 4.729534626007080078e+00 1.284964323043823242e+00 895 | 8.939999938011169434e-01 4.877389907836914062e+00 4.869381904602050781e+00 1.366285204887390137e+00 896 | 8.949999809265136719e-01 5.019802093505859375e+00 5.009433746337890625e+00 1.447760462760925293e+00 897 | 8.960000276565551758e-01 5.162500381469726562e+00 5.149667263031005859e+00 1.529381036758422852e+00 898 | 8.970000147819519043e-01 5.305428504943847656e+00 5.290058612823486328e+00 1.611117005348205566e+00 899 | 8.980000019073486328e-01 5.448572158813476562e+00 5.430585384368896484e+00 1.692960500717163086e+00 900 | 8.989999890327453613e-01 5.591890335083007812e+00 5.571223735809326172e+00 1.774892210960388184e+00 901 | 8.999999761581420898e-01 5.735352516174316406e+00 5.711950302124023438e+00 1.856895327568054199e+00 902 | 9.010000228881835938e-01 5.878932952880859375e+00 5.852741241455078125e+00 1.938958764076232910e+00 903 | 9.020000100135803223e-01 6.022580146789550781e+00 5.993572711944580078e+00 2.021054744720458984e+00 904 | 9.029999971389770508e-01 6.166265487670898438e+00 6.134420394897460938e+00 2.103170871734619141e+00 905 | 9.039999842643737793e-01 6.309955596923828125e+00 6.275259971618652344e+00 2.185288429260253906e+00 906 | 9.049999713897705078e-01 6.453615188598632812e+00 6.416068077087402344e+00 2.267391681671142578e+00 907 | 9.060000181198120117e-01 6.597210884094238281e+00 6.556818962097167969e+00 2.349462985992431641e+00 908 | 9.070000052452087402e-01 6.740695953369140625e+00 6.697488784790039062e+00 2.431477308273315430e+00 909 | 9.079999923706054688e-01 6.884038925170898438e+00 6.838052749633789062e+00 2.513421058654785156e+00 910 | 9.089999794960021973e-01 7.027198791503906250e+00 6.978485584259033203e+00 2.595273494720458984e+00 911 | 9.100000262260437012e-01 7.170154571533203125e+00 7.118762016296386719e+00 2.677022933959960938e+00 912 | 9.110000133514404297e-01 7.312841415405273438e+00 7.258857727050781250e+00 2.758636951446533203e+00 913 | 9.120000004768371582e-01 7.455242156982421875e+00 7.398747444152832031e+00 2.840107440948486328e+00 914 | 9.129999876022338867e-01 7.597309112548828125e+00 7.538404941558837891e+00 2.921410083770751953e+00 915 | 9.139999747276306152e-01 7.739009857177734375e+00 7.677804946899414062e+00 3.002528429031372070e+00 916 | 9.150000214576721191e-01 7.880313873291015625e+00 7.816922187805175781e+00 3.083448648452758789e+00 917 | 9.160000085830688477e-01 8.021162033081054688e+00 7.955730915069580078e+00 3.164139747619628906e+00 918 | 9.169999957084655762e-01 8.161523818969726562e+00 8.094204902648925781e+00 3.244585752487182617e+00 919 | 9.179999828338623047e-01 8.301370620727539062e+00 8.232318878173828125e+00 3.324774265289306641e+00 920 | 9.190000295639038086e-01 8.440666198730468750e+00 8.370046615600585938e+00 3.404685258865356445e+00 921 | 9.200000166893005371e-01 8.579358100891113281e+00 8.507361412048339844e+00 3.484292745590209961e+00 922 | 9.210000038146972656e-01 8.717418670654296875e+00 8.644238471984863281e+00 3.563582897186279297e+00 923 | 9.219999909400939941e-01 8.854808807373046875e+00 8.780650138854980469e+00 3.642536878585815430e+00 924 | 9.229999780654907227e-01 8.991496086120605469e+00 8.916570663452148438e+00 3.721137523651123047e+00 925 | 9.240000247955322266e-01 9.127447128295898438e+00 9.051973342895507812e+00 3.799368858337402344e+00 926 | 9.250000119209289551e-01 9.262608528137207031e+00 9.186832427978515625e+00 3.877204179763793945e+00 927 | 9.259999990463256836e-01 9.396963119506835938e+00 9.321120262145996094e+00 3.954632759094238281e+00 928 | 9.269999861717224121e-01 9.530467033386230469e+00 9.454812049865722656e+00 4.031634330749511719e+00 929 | 9.279999732971191406e-01 9.663091659545898438e+00 9.587879180908203125e+00 4.108195304870605469e+00 930 | 9.290000200271606445e-01 9.794806480407714844e+00 9.720296859741210938e+00 4.184298992156982422e+00 931 | 9.300000071525573730e-01 9.925569534301757812e+00 9.852036476135253906e+00 4.259923934936523438e+00 932 | 9.309999942779541016e-01 1.005534839630126953e+01 9.983072280883789062e+00 4.335055351257324219e+00 933 | 9.319999814033508301e-01 1.018412303924560547e+01 1.011337757110595703e+01 4.409681797027587891e+00 934 | 9.330000281333923340e-01 1.031186008453369141e+01 1.024292564392089844e+01 4.483787536621093750e+00 935 | 9.340000152587890625e-01 1.043851947784423828e+01 1.037168979644775391e+01 4.557351589202880859e+00 936 | 9.350000023841857910e-01 1.056408023834228516e+01 1.049964141845703125e+01 4.630364418029785156e+00 937 | 9.359999895095825195e-01 1.068851757049560547e+01 1.062675571441650391e+01 4.702812194824218750e+00 938 | 9.369999766349792480e-01 1.081180095672607422e+01 1.075300502777099609e+01 4.774680137634277344e+00 939 | 9.380000233650207520e-01 1.093390846252441406e+01 1.087836265563964844e+01 4.845959663391113281e+00 940 | 9.390000104904174805e-01 1.105480861663818359e+01 1.100280189514160156e+01 4.916630744934082031e+00 941 | 9.399999976158142090e-01 1.117447853088378906e+01 1.112629413604736328e+01 4.986683845520019531e+00 942 | 9.409999847412109375e-01 1.129289817810058594e+01 1.124881458282470703e+01 5.056110382080078125e+00 943 | 9.419999718666076660e-01 1.141005420684814453e+01 1.137033653259277344e+01 5.124900817871093750e+00 944 | 9.430000185966491699e-01 1.152591896057128906e+01 1.149083137512207031e+01 5.193043708801269531e+00 945 | 9.440000057220458984e-01 1.164046955108642578e+01 1.161027336120605469e+01 5.260525703430175781e+00 946 | 9.449999928474426270e-01 1.175368309020996094e+01 1.172863483428955078e+01 5.327337265014648438e+00 947 | 9.459999799728393555e-01 1.186555480957031250e+01 1.184589099884033203e+01 5.393473625183105469e+00 948 | 9.470000267028808594e-01 1.197607040405273438e+01 1.196201324462890625e+01 5.458930015563964844e+00 949 | 9.480000138282775879e-01 1.208519935607910156e+01 1.207697582244873047e+01 5.523690223693847656e+00 950 | 9.490000009536743164e-01 1.219294548034667969e+01 1.219075107574462891e+01 5.587754249572753906e+00 951 | 9.499999880790710449e-01 1.229928398132324219e+01 1.230331420898437500e+01 5.651111602783203125e+00 952 | 9.509999752044677734e-01 1.240420722961425781e+01 1.241463661193847656e+01 5.713759422302246094e+00 953 | 9.520000219345092773e-01 1.250771141052246094e+01 1.252469348907470703e+01 5.775693893432617188e+00 954 | 9.530000090599060059e-01 1.260977554321289062e+01 1.263345813751220703e+01 5.836904525756835938e+00 955 | 9.539999961853027344e-01 1.271040058135986328e+01 1.274090385437011719e+01 5.897393226623535156e+00 956 | 9.549999833106994629e-01 1.280957317352294922e+01 1.284700393676757812e+01 5.957151412963867188e+00 957 | 9.559999704360961914e-01 1.290729522705078125e+01 1.295173358917236328e+01 6.016180992126464844e+00 958 | 9.570000171661376953e-01 1.300356006622314453e+01 1.305506610870361328e+01 6.074480056762695312e+00 959 | 9.580000042915344238e-01 1.309836196899414062e+01 1.315697574615478516e+01 6.132043838500976562e+00 960 | 9.589999914169311523e-01 1.319169139862060547e+01 1.325743579864501953e+01 6.188867568969726562e+00 961 | 9.599999785423278809e-01 1.328356170654296875e+01 1.335642147064208984e+01 6.244958877563476562e+00 962 | 9.610000252723693848e-01 1.337396335601806641e+01 1.345390701293945312e+01 6.300312995910644531e+00 963 | 9.620000123977661133e-01 1.346289443969726562e+01 1.354986667633056641e+01 6.354927062988281250e+00 964 | 9.629999995231628418e-01 1.355036067962646484e+01 1.364427471160888672e+01 6.408805847167968750e+00 965 | 9.639999866485595703e-01 1.363636016845703125e+01 1.373710632324218750e+01 6.461948394775390625e+00 966 | 9.649999737739562988e-01 1.372090244293212891e+01 1.382833671569824219e+01 6.514358520507812500e+00 967 | 9.660000205039978027e-01 1.380399036407470703e+01 1.391794013977050781e+01 6.566039085388183594e+00 968 | 9.670000076293945312e-01 1.388562011718750000e+01 1.400589275360107422e+01 6.616987228393554688e+00 969 | 9.679999947547912598e-01 1.396580505371093750e+01 1.409216785430908203e+01 6.667208671569824219e+00 970 | 9.689999818801879883e-01 1.404454517364501953e+01 1.417674350738525391e+01 6.716704368591308594e+00 971 | 9.700000286102294922e-01 1.412185859680175781e+01 1.425959300994873047e+01 6.765484809875488281e+00 972 | 9.710000157356262207e-01 1.419773769378662109e+01 1.434069442749023438e+01 6.813544273376464844e+00 973 | 9.720000028610229492e-01 1.427220153808593750e+01 1.442002105712890625e+01 6.860892295837402344e+00 974 | 9.729999899864196777e-01 1.434525585174560547e+01 1.449755096435546875e+01 6.907533645629882812e+00 975 | 9.739999771118164062e-01 1.441691017150878906e+01 1.457325935363769531e+01 6.953470230102539062e+00 976 | 9.750000238418579102e-01 1.448717594146728516e+01 1.464712333679199219e+01 6.998710632324218750e+00 977 | 9.760000109672546387e-01 1.455606079101562500e+01 1.471912002563476562e+01 7.043256759643554688e+00 978 | 9.769999980926513672e-01 1.462357521057128906e+01 1.478922462463378906e+01 7.087115287780761719e+00 979 | 9.779999852180480957e-01 1.468973255157470703e+01 1.485741615295410156e+01 7.130290985107421875e+00 980 | 9.789999723434448242e-01 1.475454521179199219e+01 1.492366981506347656e+01 7.172792434692382812e+00 981 | 9.800000190734863281e-01 1.481802654266357422e+01 1.498796463012695312e+01 7.214625358581542969e+00 982 | 9.810000061988830566e-01 1.488018798828125000e+01 1.505027770996093750e+01 7.255794525146484375e+00 983 | 9.819999933242797852e-01 1.494104003906250000e+01 1.511058712005615234e+01 7.296305656433105469e+00 984 | 9.829999804496765137e-01 1.500059604644775391e+01 1.516886997222900391e+01 7.336167335510253906e+00 985 | 9.840000271797180176e-01 1.505887889862060547e+01 1.522510623931884766e+01 7.375389099121093750e+00 986 | 9.850000143051147461e-01 1.511588954925537109e+01 1.527927398681640625e+01 7.413970947265625000e+00 987 | 9.860000014305114746e-01 1.517164707183837891e+01 1.533135128021240234e+01 7.451924324035644531e+00 988 | 9.869999885559082031e-01 1.522616958618164062e+01 1.538131809234619141e+01 7.489256858825683594e+00 989 | 9.879999756813049316e-01 1.527946662902832031e+01 1.542915248870849609e+01 7.525973320007324219e+00 990 | 9.890000224113464355e-01 1.533155918121337891e+01 1.547483539581298828e+01 7.562086105346679688e+00 991 | 9.900000095367431641e-01 1.538245201110839844e+01 1.551834583282470703e+01 7.597596168518066406e+00 992 | 9.909999966621398926e-01 1.543217086791992188e+01 1.555966472625732422e+01 7.632515907287597656e+00 993 | 9.919999837875366211e-01 1.548072242736816406e+01 1.559877109527587891e+01 7.666850090026855469e+00 994 | 9.929999709129333496e-01 1.552812862396240234e+01 1.563564586639404297e+01 7.700608253479003906e+00 995 | 9.940000176429748535e-01 1.557440376281738281e+01 1.567027091979980469e+01 7.733798980712890625e+00 996 | 9.950000047683715820e-01 1.561956024169921875e+01 1.570262718200683594e+01 7.766427040100097656e+00 997 | 9.959999918937683105e-01 1.566361141204833984e+01 1.573269557952880859e+01 7.798501014709472656e+00 998 | 9.969999790191650391e-01 1.570658016204833984e+01 1.576045799255371094e+01 7.830030441284179688e+00 999 | 9.980000257492065430e-01 1.574847698211669922e+01 1.578589630126953125e+01 7.861021995544433594e+00 1000 | 9.990000128746032715e-01 1.578931522369384766e+01 1.580899333953857422e+01 7.891482353210449219e+00 1001 | 1.000000000000000000e+00 1.582911586761474609e+01 1.582973194122314453e+01 7.921420097351074219e+00 1002 | -------------------------------------------------------------------------------- /NN/nn.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copy code from https://github.com/jcjohnson/pytorch-examples#pytorch-nn 3 | """ 4 | 5 | 6 | # Code in file nn/two_layer_net_nn.py 7 | import torch 8 | 9 | device = torch.device('cpu') 10 | # device = torch.device('cuda') # Uncomment this to run on GPU 11 | 12 | # N is batch size; D_in is input dimension; 13 | # H is hidden dimension; D_out is output dimension. 14 | N, D_in, H, D_out = 64, 1000, 100, 10 15 | 16 | # Create random Tensors to hold inputs and outputs 17 | x = torch.randn(N, D_in, device=device) 18 | y = torch.randn(N, D_out, device=device) 19 | 20 | # Use the nn package to define our model as a sequence of layers. nn.Sequential 21 | # is a Module which contains other Modules, and applies them in sequence to 22 | # produce its output. Each Linear Module computes output from input using a 23 | # linear function, and holds internal Tensors for its weight and bias. 24 | # After constructing the model we use the .to() method to move it to the 25 | # desired device. 26 | model = torch.nn.Sequential( 27 | torch.nn.Linear(D_in, H), 28 | torch.nn.ReLU(), 29 | torch.nn.Linear(H, D_out), 30 | ).to(device) 31 | 32 | # The nn package also contains definitions of popular loss functions; in this 33 | # case we will use Mean Squared Error (MSE) as our loss function. Setting 34 | # reduction='sum' means that we are computing the *sum* of squared errors rather 35 | # than the mean; this is for consistency with the examples above where we 36 | # manually compute the loss, but in practice it is more common to use mean 37 | # squared error as a loss by setting reduction='elementwise_mean'. 38 | loss_fn = torch.nn.MSELoss(reduction='sum') 39 | 40 | learning_rate = 1e-4 41 | for t in range(500): 42 | # Forward pass: compute predicted y by passing x to the model. Module objects 43 | # override the __call__ operator so you can call them like functions. When 44 | # doing so you pass a Tensor of input data to the Module and it produces 45 | # a Tensor of output data. 46 | y_pred = model(x) 47 | 48 | # Compute and print loss. We pass Tensors containing the predicted and true 49 | # values of y, and the loss function returns a Tensor containing the loss. 50 | loss = loss_fn(y_pred, y) 51 | print(t, loss.item()) 52 | 53 | # Zero the gradients before running the backward pass. 54 | model.zero_grad() 55 | 56 | # Backward pass: compute gradient of the loss with respect to all the learnable 57 | # parameters of the model. Internally, the parameters of each Module are stored 58 | # in Tensors with requires_grad=True, so this call will compute gradients for 59 | # all learnable parameters in the model. 60 | loss.backward() 61 | 62 | # Update the weights using gradient descent. Each parameter is a Tensor, so 63 | # we can access its data and gradients like we did before. 64 | with torch.no_grad(): 65 | for param in model.parameters(): 66 | param.data -= learning_rate * param.grad -------------------------------------------------------------------------------- /PINNs/AC.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MinglangYin/PyTorchTutorial/8ad44ebfbd8c88b5a74ee25909a6ded6656c5d14/PINNs/AC.mat -------------------------------------------------------------------------------- /PINNs/ac_1d_td_fwd.py: -------------------------------------------------------------------------------- 1 | """ 2 | Author: Minglang Yin 3 | PINNs (physical-informed neural network) for solving time-dependent Allen-Cahn equation (1D). 4 | 5 | u_t - 0.0001*u_xx + 5*u^3 - 5*u = 0, x in [-1, 1], t in [0, 1] 6 | with 7 | u(0, x) = x^2*cos(pi*x) 8 | u(t, -1) = u(t, 1) 9 | u_x(t, -1) = u_x(t, 1) 10 | 11 | Input: [t, x] 12 | Output: [u] 13 | """ 14 | 15 | import sys 16 | # sys.path.insert(0,'../../Utils') 17 | 18 | import torch 19 | import torch.nn as nn 20 | import numpy as np 21 | import time 22 | import random 23 | import matplotlib.pyplot as plt 24 | import matplotlib.gridspec as gridspec 25 | import scipy.io 26 | 27 | # from plotting import newfig, savefig 28 | 29 | torch.manual_seed(1234) 30 | np.random.seed(1234) 31 | 32 | import matplotlib 33 | matplotlib.use('Agg') 34 | 35 | class Model(nn.Module): 36 | def __init__(self): 37 | super(Model, self).__init__() 38 | self.net = nn.Sequential() 39 | self.net.add_module('linear_layer_1', nn.Linear(2, 100)) 40 | self.net.add_module('tanh_layer_1', nn.Tanh()) 41 | for num in range(2,5): 42 | self.net.add_module('linear_layer_%d' %(num), nn.Linear(100, 100)) 43 | self.net.add_module('tanh_layer_%d' %(num), nn.Tanh()) 44 | self.net.add_module('linear_layer_50', nn.Linear(100, 1)) 45 | 46 | def forward(self, x): 47 | return self.net(x) 48 | 49 | def loss_pde(self, x): 50 | u = self.net(x) 51 | u_g = gradients(u, x)[0] 52 | u_t, u_x = u_g[:, :1], u_g[:, 1:] 53 | u_gg = gradients(u_x, x)[0] 54 | u_xx = u_gg[:,1:] 55 | loss = u_t - 0.0001*u_xx + 5.0*u**3 - 5.0*u 56 | # loss = u_t - 0.0001*u_xx + u**3 - u 57 | return (loss**2).mean() 58 | 59 | def loss_f(self, x, u_f_train): 60 | u_f_pred = self.net(x) 61 | return ((u_f_pred - u_f_train)**2).mean() 62 | 63 | def loss_bc(self, x_b_l_train, x_b_r_train): 64 | u_b_l_pred = self.net(x_b_l_train) 65 | u_b_r_pred = self.net(x_b_r_train) 66 | u_b_l_pred_x = gradients(u_b_l_pred, x_b_l_train)[0][:,1] 67 | u_b_r_pred_x = gradients(u_b_r_pred, x_b_r_train)[0][:,1] 68 | return ((u_b_l_pred - u_b_r_pred)**2).mean() + ((u_b_l_pred_x - u_b_r_pred_x)**2).mean() 69 | 70 | def loss_ic(self, x_i_train, u_i_train): 71 | u_i_pred = self.net(x_i_train) 72 | return ((u_i_pred - u_i_train)**2).mean() 73 | 74 | def gradients(outputs, inputs): 75 | return torch.autograd.grad(outputs, inputs, grad_outputs=torch.ones_like(outputs), create_graph=True) 76 | 77 | def to_numpy(input): 78 | if isinstance(input, torch.Tensor): 79 | return input.detach().cpu().numpy() 80 | elif isinstance(input, np.ndarray): 81 | return input 82 | else: 83 | raise TypeError('Unknown type of input, expected torch.Tensor or '\ 84 | 'np.ndarray, but got {}'.format(type(input))) 85 | 86 | def init_cond(x): 87 | return np.sin(np.pi*x) 88 | # return 1+np.cos(np.pi*x) 89 | 90 | def main(): 91 | ## parameters 92 | device = torch.device(f"cuda:0" if torch.cuda.is_available() else "cpu") 93 | print(f"Use GPU: {torch.cuda.is_available()}") 94 | epochs = 250000 95 | num_i_train = 300 96 | num_b_train = 150 97 | num_f_train = 30000 98 | lr = 0.001 99 | 100 | ## pre-processing 101 | data = scipy.io.loadmat('./AC.mat') 102 | 103 | ## x: array, x_grid: grid data, X: flatten data 104 | t = data['tt'].flatten()[:, None] 105 | x = data['x'].flatten()[:, None] 106 | t_grid, x_grid = np.meshgrid(t, x) 107 | exact_grid = np.real(data['uu']) 108 | T = t_grid.flatten()[:, None] 109 | X = x_grid.flatten()[:, None] 110 | Exact = exact_grid.flatten()[:, None] 111 | 112 | ## Initial&Boundary data 113 | id_i = np.random.choice(x.shape[0], num_i_train, replace=False) 114 | id_b = np.random.choice(t.shape[0], num_b_train, replace=False) 115 | id_f = np.random.choice(Exact.shape[0], num_f_train, replace=False) 116 | 117 | x_i = x_grid[id_i, 0][:,None] 118 | t_i = t_grid[id_i, 0][:,None] 119 | x_i_train = np.hstack((t_i, x_i)) 120 | # u_i_train = init_cond(x_i) 121 | u_i_train = exact_grid[id_i, 0][:,None] 122 | 123 | x_b_l = x_grid[0, id_b][:,None] 124 | x_b_r = x_grid[-1, id_b][:,None] 125 | t_b_l = t_grid[0, id_b][:,None] 126 | t_b_r = t_grid[-1, id_b][:,None] 127 | x_b_l_train = np.hstack((t_b_l, x_b_l)) 128 | x_b_r_train = np.hstack((t_b_r, x_b_r)) 129 | 130 | x_f = X[id_f, 0][:,None] 131 | t_f = T[id_f, 0][:,None] 132 | x_f_train = np.hstack((t_f, x_f)) 133 | u_f_train = Exact[id_f, 0][:,None] 134 | 135 | x_test = np.hstack((T, X)) 136 | 137 | ## Form data tensor and send 138 | x_i_train = torch.tensor(x_i_train, dtype=torch.float32).to(device) 139 | x_b_l_train = torch.tensor(x_b_l_train, requires_grad=True, dtype=torch.float32).to(device) 140 | x_b_r_train = torch.tensor(x_b_r_train, requires_grad=True, dtype=torch.float32).to(device) 141 | x_f_train = torch.tensor(x_f_train, requires_grad=True, dtype=torch.float32).to(device) 142 | x_test = torch.tensor(x_test, dtype=torch.float32).to(device) 143 | 144 | u_i_train = torch.tensor(u_i_train, dtype=torch.float32).to(device) 145 | u_f_train = torch.tensor(u_f_train, dtype=torch.float32).to(device) 146 | 147 | ## instantiate model 148 | model = Model().to(device) 149 | 150 | # Loss and optimizer 151 | optimizer = torch.optim.Adam(model.parameters(), lr = lr) 152 | 153 | # training 154 | loss_his = np.zeros((epochs, 3)) 155 | def train(epoch): 156 | model.train() 157 | def closure(): 158 | optimizer.zero_grad() 159 | loss_pde = model.loss_pde(x_f_train) 160 | loss_bc = model.loss_bc(x_b_l_train, x_b_r_train) 161 | loss_ic = model.loss_ic(x_i_train, u_i_train) 162 | loss = 10*loss_pde + loss_bc + 10*loss_ic 163 | 164 | loss_his[epoch-1, 0] = to_numpy(loss_pde) 165 | loss_his[epoch-1, 1] = to_numpy(loss_bc) 166 | loss_his[epoch-1, 2] = to_numpy(loss_ic) 167 | print(f'epoch {epoch}: loss_pde {loss_pde:.6f}, loss_bc {loss_bc:.6f}, loss_ic {loss_ic:.6f},') 168 | 169 | loss.backward() 170 | return loss 171 | loss = optimizer.step(closure) 172 | loss_value = loss.item() if not isinstance(loss, float) else loss 173 | 174 | 175 | print('start training...') 176 | tic = time.time() 177 | for epoch in range(1, epochs + 1): 178 | if epoch == 100000: 179 | optimizer = torch.optim.Adam(model.parameters(), lr = 0.0001) 180 | elif epoch == 200000: 181 | optimizer = torch.optim.Adam(model.parameters(), lr = 0.00001) 182 | #elif epoch == 248000: 183 | # optimizer = torch.optim.LBFGS(model.parameters()) 184 | 185 | train(epoch) 186 | toc = time.time() 187 | print(f'total training time: {toc-tic}') 188 | 189 | u_pred = to_numpy(model(x_test)) 190 | u_pred = u_pred.reshape((exact_grid.shape[0], exact_grid.shape[1])) 191 | scipy.io.savemat('pred_res.mat',{'t':t, 'x':x, 'u':u_pred}) 192 | 193 | # u_i_pred = model(x_i_train) 194 | # np.savetxt('x_i_train.txt', to_numpy(u_i_pred)) 195 | 196 | ## printing 197 | x_f_train = np.hstack((T, X)) 198 | x_f_train = torch.tensor(x_f_train, requires_grad=True, dtype=torch.float32).to(device) 199 | u_f_pred = to_numpy(model(x_f_train)).reshape(512,201) 200 | u_f_train = to_numpy(Exact.reshape(512,201)) 201 | 202 | fig = plt.figure(constrained_layout=False, figsize=(9, 3)) 203 | gs = fig.add_gridspec(1, 2) 204 | ax = fig.add_subplot(gs[0]) 205 | h = ax.imshow(u_f_pred, cmap='coolwarm', aspect = 0.5) 206 | ax = fig.add_subplot(gs[1]) 207 | h = ax.imshow(u_f_train, cmap='coolwarm', aspect = 0.5) 208 | ax.set_title('Training case (Pred):') 209 | fig.colorbar(h, ax=ax) 210 | fig.savefig('./1D_ac.png') 211 | plt.close() 212 | 213 | fig = plt.figure(constrained_layout=False, figsize=(4, 4)) 214 | gs = fig.add_gridspec(1, 1) 215 | ax = fig.add_subplot(gs[0]) 216 | h = ax.plot(loss_his[:, 0], label = "PDE loss") 217 | h = ax.plot(loss_his[:, 1], label = "BC loss") 218 | h = ax.plot(loss_his[:, 2], label = "IC loss") 219 | ax.set_yscale('log') 220 | ax.set_title('Loss') 221 | fig.savefig('./Loss.png') 222 | plt.close() 223 | 224 | if __name__ == '__main__': 225 | main() 226 | -------------------------------------------------------------------------------- /PINNs/ac_2d_td.py: -------------------------------------------------------------------------------- 1 | """ 2 | Author: Minglang Yin 3 | PINNs (physical-informed neural network) for solving time-dependent Allen-Cahn equation (2D). 4 | 5 | d(phi)/dt + div*(phi*u) = 1/Pe*(-F'(phi) + eps^2*lap(phi)) + beta(t) 6 | 7 | where: 8 | u = 1; 9 | beta(t) = 0; 10 | 11 | Input: [t, x, y] 12 | Output: [u] 13 | """ 14 | 15 | import sys 16 | sys.path.insert(0,'../Utils') 17 | 18 | import torch 19 | import torch.nn as nn 20 | import numpy as np 21 | import time 22 | import random 23 | import matplotlib.pyplot as plt 24 | import matplotlib.gridspec as gridspec 25 | import scipy.io 26 | 27 | from plotting import newfig, savefig 28 | 29 | torch.manual_seed(123456) 30 | np.random.seed(123456) 31 | 32 | eps = 0.02 33 | Pe = 1 34 | 35 | class Model(nn.Module): 36 | def __init__(self): 37 | super(Model, self).__init__() 38 | self.net = nn.Sequential() 39 | self.net.add_module('linear_layer_1', nn.Linear(3, 30)) 40 | self.net.add_module('tanh_layer_1', nn.Tanh()) 41 | for num in range(2,10): 42 | self.net.add_module('linear_layer_%d' %(num), nn.Linear(30, 30)) 43 | self.net.add_module('tanh_layer_%d' %(num), nn.Tanh()) 44 | self.net.add_module('linear_layer_50', nn.Linear(30, 1)) 45 | 46 | def forward(self, x): 47 | return self.net(x) 48 | 49 | def loss_pde(self, x): 50 | u = self.net(x) 51 | u_g = gradients(u, x)[0] 52 | u_t, u_x, u_y = u_g[:, 0], u_g[:, 1], u_g[:, 2] 53 | u_xx = gradients(u_x, x)[0][:, 1] 54 | u_yy = gradients(u_y, x)[0][:, 2] 55 | F_g = u**3 - u 56 | loss = u_t + (u_x + u_y) - 1/Pe*(-F_g + eps**2*(u_xx + u_yy)) 57 | return (loss**2).mean() 58 | 59 | def loss_bc(self, x_b, u_b): 60 | return ((self.net(x_b)-u_b)**2).mean() 61 | 62 | def loss_ic(self, x_i, u_i): 63 | u_i_pred = self.net(x_i) 64 | return ((u_i_pred-u_i)**2).mean() 65 | 66 | def gradients(outputs, inputs): 67 | return torch.autograd.grad(outputs, inputs, grad_outputs=torch.ones_like(outputs), create_graph=True) 68 | 69 | def to_numpy(input): 70 | if isinstance(input, torch.Tensor): 71 | return input.detach().cpu().numpy() 72 | elif isinstance(input, np.ndarray): 73 | return input 74 | else: 75 | raise TypeError('Unknown type of input, expected torch.Tensor or '\ 76 | 'np.ndarray, but got {}'.format(type(input))) 77 | 78 | def AC_2D_init(x): 79 | return np.tanh( (0.1-np.sqrt((x[:,1]-0.5)**2 + (x[:,2] - 0.5)**2)) / (np.sqrt(2)*eps) ) 80 | 81 | def main(): 82 | ## parameters 83 | device = torch.device(f"cpu") 84 | epochs =5000 85 | lr = 0.001 86 | 87 | num_x = 100 88 | num_y = 100 89 | num_t = 10 90 | num_b_train = 10 # boundary sampling points 91 | num_f_train = 100 # inner sampling points 92 | num_i_train = 100 # initial sampling points 93 | 94 | x = np.linspace(0, 1, num=num_x) 95 | y = np.linspace(0, 1, num=num_y) 96 | t = np.linspace(0, 1, num=num_t) 97 | x_grid, y_grid = np.meshgrid(x, y) 98 | # x_test = np.concatenate((t_grid.flatten()[:,None], x_grid.flatten()[:,None], y_grid.flatten()[:,None]), axis=1) 99 | x_2d = np.concatenate((x_grid.flatten()[:,None], y_grid.flatten()[:,None]), axis=1) 100 | xt_init = np.concatenate((np.zeros((num_x*num_y, 1)), x_2d), axis=1) 101 | u_init = AC_2D_init(xt_init)[:,None] 102 | 103 | x_2d_ext = np.tile(x_2d, [num_t,1]) 104 | t_ext = np.tile(t[:,None], [num_x*num_y, 1]) 105 | xt_2d_ext = np.concatenate((t_ext, x_2d_ext), axis=1) 106 | 107 | ## find a smart way to take boundary point 108 | x_up = np.vstack((x_grid[-1,:], y_grid[-1,:])).T 109 | x_dw = np.vstack((x_grid[0,:], y_grid[0,:])).T 110 | x_l = np.vstack((x_grid[:, 0], y_grid[:, 0])).T 111 | x_r = np.vstack((x_grid[:, -1], y_grid[:, -1])).T 112 | x_bound = np.vstack((x_up, x_dw, x_l, x_r)) 113 | 114 | x_bound_ext = np.tile(x_bound, [num_t, 1]) 115 | t_bound_ext = np.tile(t[:,None], [num_x*4, 1]) 116 | xt_bound_ext = np.concatenate((t_bound_ext, x_bound_ext), axis=1) 117 | u_bound_ext = -1*np.ones((num_x*4*num_t))[:,None] 118 | 119 | ## sampling 120 | id_f = np.random.choice(num_x*num_y*num_t, num_f_train) 121 | id_b = np.random.choice(num_x*4, num_b_train) ## Dirichlet 122 | id_i = np.random.choice(num_x*num_y, num_i_train) 123 | 124 | x_i = xt_init[id_i, :] 125 | u_i = u_init[id_i, :] 126 | x_f = xt_2d_ext[id_f, :] 127 | x_b = xt_bound_ext[id_b, :] 128 | u_b = u_bound_ext[id_b, :] 129 | 130 | ## set data as tensor and send to device 131 | x_f_train = torch.tensor(x_f, requires_grad=True, dtype=torch.float32).to(device) 132 | x_b_train = torch.tensor(x_b, requires_grad=True, dtype=torch.float32).to(device) 133 | x_test = torch.tensor(xt_2d_ext, requires_grad=True, dtype=torch.float32).to(device) 134 | x_i_train = torch.tensor(x_i, dtype=torch.float32).to(device) 135 | u_i_train = torch.tensor(u_i, dtype=torch.float32).to(device) 136 | u_b_train = torch.tensor(u_b, dtype=torch.float32).to(device) 137 | 138 | 139 | ## instantiate model 140 | model = Model().to(device) 141 | 142 | # Loss and optimizer 143 | optimizer = torch.optim.Adam(model.parameters(), lr = lr) 144 | 145 | # training 146 | def train(epoch): 147 | model.train() 148 | def closure(): 149 | optimizer.zero_grad() 150 | loss_pde = model.loss_pde(x_f_train) 151 | loss_bc = model.loss_bc(x_b_train, u_b_train) 152 | loss_ic = model.loss_ic(x_i_train, u_i_train) 153 | loss = loss_pde + loss_bc + loss_ic 154 | loss.backward() 155 | return loss 156 | loss = optimizer.step(closure) 157 | loss_value = loss.item() if not isinstance(loss, float) else loss 158 | print(f'epoch {epoch}: loss {loss_value:.6f}') 159 | 160 | print('start training...') 161 | tic = time.time() 162 | for epoch in range(1, epochs + 1): 163 | train(epoch) 164 | toc = time.time() 165 | print(f'total training time: {toc-tic}') 166 | 167 | ## test 168 | u_test = np.zeros((num_t, num_x, num_y)) 169 | for i in range(0, 6): 170 | xt = np.concatenate((t[i]*np.ones((num_x*num_y, 1)), x_2d), axis=1)[:,None] 171 | xt_tensor = torch.tensor(xt, dtype=torch.float32).to(device) 172 | u_test[i,:,:] = to_numpy(model(xt_tensor)).reshape(num_x, num_y) 173 | 174 | x_test = to_numpy(x_test) 175 | 176 | fig, ax = newfig(2.0, 1.1) 177 | ax.axis('off') 178 | gs0 = gridspec.GridSpec(1, 2) 179 | gs0.update(top=1-0.06, bottom=1-1/3, left=0.15, right=0.85, wspace=0) 180 | h = ax.imshow(u_test[i,:,:].T, interpolation='nearest', cmap='rainbow', origin='lower', aspect='auto') 181 | fig.colorbar(h) 182 | ax.plot(x_test[:,1], x_test[:,2], 'kx', label = 'Data (%d points)' % (x_test.shape[0]), markersize = 4, clip_on = False) 183 | line = np.linspace(x_test.min(), x_test.max(), 2)[:,None] 184 | savefig('./u_test_'+str(i)) 185 | 186 | if __name__ == '__main__': 187 | main() 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | -------------------------------------------------------------------------------- /PINNs/pred_res.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MinglangYin/PyTorchTutorial/8ad44ebfbd8c88b5a74ee25909a6ded6656c5d14/PINNs/pred_res.mat -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PyTorch Tutorial for Physics-informed Machine Learning 2 | 3 | This repository contains several key models in Physice-informed machine learning (PI-ML) and data-driven machine learning written in PyTorch. 4 | 5 | ## Data-driven models 6 | * Neural Network (NN) 7 | - Standard fully connected neural networks 8 | * Multi-fidelity Neural Network (MFNN) 9 | - Three standard neural networks coupled to fit high-fidelity data, high-fidelity data and their linear combination. 10 | * Convolutional Neural Network (CNN) 11 | - Convolutional neural network(Decoder) 12 | 13 | ## Physics-informed Machine Learning (PIML) 14 | * Physical-informed Neural Networks (PINNs) 15 | - Physical-informed neural network for solving partial differential equations, e.g., Allen-Cahn equation(1D time-dependent and 2D equilibrium state) 16 | * Deep Operator Networks (DeepONet) 17 | - DeepONet for learning a PDE operator 18 | 19 | ## Proof of Concept are listed below: 20 | * DynNet (Dynamic-graph Network) 21 | - Fully-connected neural network to demonstrate the concept of dynamic graph. 22 | * Gradient (Automatic Differentiation) 23 | - Calculate gradient in PyTorch 24 | 25 | 26 | -------------------------------------------------------------------------------- /Utils/__pycache__/plotting.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MinglangYin/PyTorchTutorial/8ad44ebfbd8c88b5a74ee25909a6ded6656c5d14/Utils/__pycache__/plotting.cpython-37.pyc -------------------------------------------------------------------------------- /Utils/plotting.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Mon Oct 9 20:11:57 2017 5 | 6 | @author: mraissi 7 | """ 8 | 9 | import numpy as np 10 | import matplotlib as mpl 11 | #mpl.use('pgf') 12 | 13 | def figsize(scale, nplots = 1): 14 | fig_width_pt = 390.0 # Get this from LaTeX using \the\textwidth 15 | inches_per_pt = 1.0/72.27 # Convert pt to inch 16 | golden_mean = (np.sqrt(5.0)-1.0)/2.0 # Aesthetic ratio (you could change this) 17 | fig_width = fig_width_pt*inches_per_pt*scale # width in inches 18 | fig_height = nplots*fig_width*golden_mean # height in inches 19 | fig_size = [fig_width,fig_height] 20 | return fig_size 21 | 22 | pgf_with_latex = { # setup matplotlib to use latex for output 23 | "pgf.texsystem": "pdflatex", # change this if using xetex or lautex 24 | "text.usetex": True, # use LaTeX to write all text 25 | "font.family": "serif", 26 | "font.serif": [], # blank entries should cause plots to inherit fonts from the document 27 | "font.sans-serif": [], 28 | "font.monospace": [], 29 | "axes.labelsize": 10, # LaTeX default is 10pt font. 30 | "font.size": 10, 31 | "legend.fontsize": 8, # Make the legend/label fonts a little smaller 32 | "xtick.labelsize": 8, 33 | "ytick.labelsize": 8, 34 | "figure.figsize": figsize(1.0), # default fig size of 0.9 textwidth 35 | "pgf.preamble": [ 36 | r"\usepackage[utf8x]{inputenc}", # use utf8 fonts becasue your computer can handle it :) 37 | r"\usepackage[T1]{fontenc}", # plots will be generated using this preamble 38 | ] 39 | } 40 | mpl.rcParams.update(pgf_with_latex) 41 | 42 | import matplotlib.pyplot as plt 43 | 44 | # I make my own newfig and savefig functions 45 | def newfig(width, nplots = 1): 46 | fig = plt.figure(figsize=figsize(width, nplots)) 47 | ax = fig.add_subplot(111) 48 | return fig, ax 49 | 50 | def savefig(filename, crop = True): 51 | if crop == True: 52 | # plt.savefig('{}.pgf'.format(filename), bbox_inches='tight', pad_inches=0) 53 | plt.savefig('{}.pdf'.format(filename), bbox_inches='tight', pad_inches=0) 54 | plt.savefig('{}.eps'.format(filename), bbox_inches='tight', pad_inches=0) 55 | else: 56 | # plt.savefig('{}.pgf'.format(filename)) 57 | plt.savefig('{}.pdf'.format(filename)) 58 | plt.savefig('{}.eps'.format(filename)) 59 | 60 | ## Simple plot 61 | #fig, ax = newfig(1.0) 62 | # 63 | #def ema(y, a): 64 | # s = [] 65 | # s.append(y[0]) 66 | # for t in range(1, len(y)): 67 | # s.append(a * y[t] + (1-a) * s[t-1]) 68 | # return np.array(s) 69 | # 70 | #y = [0]*200 71 | #y.extend([20]*(1000-len(y))) 72 | #s = ema(y, 0.01) 73 | # 74 | #ax.plot(s) 75 | #ax.set_xlabel('X Label') 76 | #ax.set_ylabel('EMA') 77 | # 78 | #savefig('ema') --------------------------------------------------------------------------------