├── .DS_Store ├── .gitignore ├── LICENSE ├── README.md ├── first_order_ode ├── README.md ├── data │ ├── Stest.csv │ ├── Strain.csv │ ├── a0.csv │ ├── atest.csv │ └── atrain.csv ├── euler_example.py ├── euler_example_PyTorch.py ├── euler_predict_only.py ├── euler_save_traning.py └── model.py └── second_order_ode ├── README.md ├── data ├── data.csv ├── data02.csv ├── data_contaminated.csv ├── data_contaminated_02.csv └── runge_kutta_create_data.py ├── model.py ├── runge_kutta_example.py ├── runge_kutta_example_PyTorch.py ├── runge_kutta_example_contaminated.py ├── runge_kutta_predict_only.py └── runge_kutta_save_training.py /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PML-UCF/pinn_ode_tutorial/a98688e67387a44a23076851a9c14fe000c13de7/.DS_Store -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | 106 | # IDE files 107 | .idea 108 | 109 | # Mac DS_Store 110 | .DS_Store 111 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 PML @ UCF 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | [![DOI](https://zenodo.org/badge/265305262.svg)](https://zenodo.org/badge/latestdoi/265305262) 4 | 5 | 6 | # Python Implementation of Ordinary Differential Equations Solvers using Hybrid Physics-informed Neural Networks 7 | 8 | This repository is provided as a tutorial for the implementation of integration algorithms of first and second order ODEs through recurrent neural networks in Python. The first order example implements an Euler forward integrator used to solve a fatigue crack growth problem. The second order example implements a Runge-Kutta integrator used for the system identification of a two degree of freedom vibrations problem. 9 | 10 | The repository consists of the following two folders: 11 | 12 | ### first_order_ode: 13 | - euler_example is a complete code implementation including the EulerIntegratorCell class, creation and training of the model as well as prediction on test data 14 | - euler_save_training trains the imported model on training data and saves the model weights 15 | - euler_predict_only loads the model weights and predicts on test data 16 | - model contains the EulerIntegratorCell class, the Normalization layer and the create_model function 17 | 18 | ### second_order_ode: 19 | - runge_kutta_example is a complete implementation of a Runge-Kutta integrator including model training and prediction 20 | - runge_kutta_save_training trains the trainable coefficients on the training data and saves the model weights 21 | - runge_kutts_predict_only loads the saved model weights and predicts on test data 22 | - model contains the RungeKuttaIntegratorCell class and the create_model function 23 | 24 | In order to run the codes, you can clone the repository: 25 | 26 | ``` 27 | $ git clone git@github.com:PML-UCF/pinn_ode_tutorial.git 28 | ``` 29 | ## Citing this repository 30 | Please cite this repository using: 31 | 32 | ``` 33 | @misc{2020_pinn_educational, 34 | Author = {Kajetan Fricke and Renato G. Nascimento and Augusto D. Marques and Felipe A. C. Viana}, 35 | Doi = {10.5281/zenodo.3895408}, 36 | Howpublished = {https://github.com/PML-UCF/pinn\_ode\_tutorial}, 37 | Month = {May}, 38 | Publisher = {Zenodo}, 39 | Title = {Python Implementation of Ordinary Differential Equations Solvers using Hybrid Physics-informed Neural Networks}, 40 | Url = {https://github.com/PML-UCF/pinn\_ode\_tutorial}, 41 | Version = {0.0.1}, 42 | Year = {2020}} 43 | ``` 44 | 45 | The corresponding reference entry should look like: 46 | 47 | K. Fricke, R. G. Nascimento, A. D. Marques, and F. A. C. Viana, Python Implementation of Ordinary Differential Equations Solvers using Hybrid Physics-informed Neural Networks, v0.0.1, Zenodo, https://github.com/PML-UCF/pinn_ode_tutorial, 10.5281/zenodo.3895408. 48 | 49 | ## Publications 50 | 51 | The following publications out of the PML-UCF research group used/referred to this repository: 52 | 53 | - R. G. Nascimento, K. Fricke, F. A. C. Viana, "[A tutorial on solving ordinary differential equations using Python and hybrid physics-informed neural network](https://www.sciencedirect.com/science/article/pii/S095219762030292X)," Engineering Applications of Artificial Intelligence, Vol. 96, 2020, 103996. (DOI: 10.1016/j.engappai.2020.103996). 54 | -------------------------------------------------------------------------------- /first_order_ode/README.md: -------------------------------------------------------------------------------- 1 | # Euler Integration of First Order Ordinary Differential Equations 2 | 3 | This folder contains the implementation of the Euler integrator to solve a fatigue crack propagation problem as first order ODE example. 4 | 5 | The files in this folder can be briefly described as follows: 6 | 7 | TENSORFLOW 8 | 9 | - euler_example.py is a complete code implementation including the creation of the EulerIntegratorCell class, the creation and training of the model as well as prediction on test data 10 | - euler_predict_only.py loads the model weights and predicts on test data 11 | - model.py contains the EulerIntegratorCell class, the Normalization layer and the create_model function 12 | 13 | PYTORCH 14 | 15 | - euler_example_PyTorch.py is a simplified PyTorch implementation of euler_example.py 16 | 17 | DATA: 18 | - The data folder contains the far-field stress data, target crack length and initial crack length for training and prediction 19 | -------------------------------------------------------------------------------- /first_order_ode/data/a0.csv: -------------------------------------------------------------------------------- 1 | a0 2 | 0.005 -------------------------------------------------------------------------------- /first_order_ode/data/atrain.csv: -------------------------------------------------------------------------------- 1 | 0 2 | 0.006603268803706955 3 | 0.0068637848747698715 4 | 0.0071793344376834345 5 | 0.007430880295014039 6 | 0.007471027217731303 7 | 0.007493435118847062 8 | 0.007530907737313543 9 | 0.007569647717761771 10 | 0.0075987021290014126 11 | 0.0076236257843448185 12 | 0.007668492412032631 13 | 0.007709189119860738 14 | 0.007737714516472137 15 | 0.007779226614449526 16 | 0.007831583603720965 17 | 0.007876531281400272 18 | 0.00789699439786506 19 | 0.007942015466085596 20 | 0.00800440766252878 21 | 0.008028726102593083 22 | 0.008075825297410695 23 | 0.008094431950302024 24 | 0.008139254264970492 25 | 0.0081960880575268 26 | 0.008231033696381038 27 | 0.008266390107996192 28 | 0.008295653450767895 29 | 0.008340228235624117 30 | 0.00838913022104868 31 | 0.008431821990429206 32 | 0.008490412072164366 33 | 0.008537954184587733 34 | 0.008585858103182631 35 | 0.008616886933885218 36 | 0.008677358471311233 37 | 0.008737319713164701 38 | 0.008773442472818702 39 | 0.008836775989816833 40 | 0.008869168117672292 41 | 0.008920106794873008 42 | 0.008969410305170438 43 | 0.009022331254923984 44 | 0.009073487281123695 45 | 0.009096074962877874 46 | 0.009160772556456458 47 | 0.009229612228215223 48 | 0.009281503989617242 49 | 0.009596712508546051 50 | 0.010375849179435064 51 | 0.011021128443976055 52 | 0.011658586773526618 53 | 0.012708319038722386 54 | 0.013671396084932834 55 | 0.014778896696299944 56 | 0.01627770902189362 57 | 0.017897650614256844 58 | 0.02016436336046282 59 | 0.022420999145090863 60 | 0.02577311702034626 61 | 0.03078800310186568 62 | -------------------------------------------------------------------------------- /first_order_ode/euler_example.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | from tensorflow.keras.layers import RNN, Dense, Layer 5 | from tensorflow.keras import Sequential 6 | from tensorflow.keras.optimizers import RMSprop 7 | from tensorflow.python.framework import tensor_shape 8 | from tensorflow import float32, concat, convert_to_tensor 9 | 10 | class EulerIntegratorCell(Layer): 11 | def __init__(self, C, m, dKlayer, a0=None, units=1, **kwargs): 12 | super(EulerIntegratorCell, self).__init__(**kwargs) 13 | self.units = units 14 | self.C = C 15 | self.m = m 16 | self.a0 = a0 17 | self.dKlayer = dKlayer 18 | self.state_size = tensor_shape.TensorShape(self.units) 19 | self.output_size = tensor_shape.TensorShape(self.units) 20 | 21 | def build(self, input_shape, **kwargs): 22 | self.built = True 23 | 24 | def call(self, inputs, states): 25 | inputs = convert_to_tensor(inputs) 26 | a_tm1 = convert_to_tensor(states) 27 | x_d_tm1 = concat((inputs, a_tm1[0, :]), axis=1) 28 | dk_t = self.dKlayer(x_d_tm1) 29 | da_t = self.C * (dk_t ** self.m) 30 | a = da_t + a_tm1[0, :] 31 | return a, [a] 32 | 33 | def get_initial_state(self, inputs=None, batch_size=None, dtype=None): 34 | return self.a0 35 | 36 | class Normalization(Layer): 37 | def __init__(self, S_low, S_up, a_low, a_up, **kwargs): 38 | super(Normalization, self).__init__(**kwargs) 39 | self.low_bound_S = S_low 40 | self.upper_bound_S = S_up 41 | self.low_bound_a = a_low 42 | self.upper_bound_a = a_up 43 | 44 | def build(self, input_shape, **kwargs): 45 | self.built = True 46 | 47 | def call(self, inputs): 48 | output = (inputs - [self.low_bound_S, self.low_bound_a]) / [(self.upper_bound_S - self.low_bound_S), (self.upper_bound_a - self.low_bound_a)] 49 | return output 50 | 51 | def create_model(C, m, a0, dKlayer, batch_input_shape, return_sequences=False, return_state=False): 52 | euler = EulerIntegratorCell(C=C, m=m, dKlayer=dKlayer, a0=a0, batch_input_shape=batch_input_shape) 53 | PINN = RNN(cell=euler, batch_input_shape=batch_input_shape, return_sequences=return_sequences, return_state=return_state) 54 | model = Sequential() 55 | model.add(PINN) 56 | model.compile(loss='mse', optimizer=RMSprop(1e-2)) 57 | return model 58 | 59 | if __name__ == "__main__": 60 | # Paris law coefficients 61 | [C, m] = [1.5E-11, 3.8] 62 | 63 | # data 64 | Strain = np.asarray(pd.read_csv('./data/Strain.csv'))[:,:,np.newaxis] 65 | atrain = np.asarray(pd.read_csv('./data/atrain.csv')) 66 | a0 = np.asarray(pd.read_csv('./data/a0.csv'))[0,0]*np.ones((Strain.shape[0],1)) 67 | 68 | # stress-intensity layer 69 | dKlayer = Sequential() 70 | dKlayer.add(Normalization(np.min(Strain), np.max(Strain), np.min(atrain), np.max(atrain))) 71 | dKlayer.add(Dense(5, activation='tanh')) 72 | dKlayer.add(Dense(1)) 73 | 74 | # weight initialization 75 | S_range = np.linspace(np.min(Strain), np.max(Strain), 1000) 76 | a_range = np.linspace(np.min(atrain), np.max(atrain), 1000)[np.random.permutation(np.arange(1000))] 77 | dK_range = -12.05 + 0.24 * S_range + 760.0 * a_range 78 | 79 | dKlayer.compile(loss='mse', optimizer=RMSprop(1e-2)) 80 | inputs_train = np.transpose(np.asarray([S_range, a_range])) 81 | dKlayer.fit(inputs_train, dK_range, epochs=100) 82 | 83 | # fitting physics-informed neural network 84 | model = create_model(C=C, m=m, a0=convert_to_tensor(a0, dtype=float32), dKlayer=dKlayer, batch_input_shape=Strain.shape) 85 | aPred_before = model.predict_on_batch(Strain)[:,:] 86 | model.fit(Strain, atrain, epochs=100, steps_per_epoch=1, verbose=1) 87 | aPred = model.predict_on_batch(Strain)[:,:] 88 | 89 | # plotting predictions 90 | fig = plt.figure() 91 | plt.plot([0,0.05],[0,0.05],'--k') 92 | plt.plot(atrain, aPred_before, 'o', label = 'before training') 93 | plt.plot(atrain, aPred, 's', label = 'after training') 94 | plt.xlabel("actual crack length (m)") 95 | plt.ylabel("predicted crack length (m)") 96 | plt.legend(loc = 'upper center',facecolor = 'w') 97 | plt.grid(which='both') 98 | plt.show() 99 | -------------------------------------------------------------------------------- /first_order_ode/euler_example_PyTorch.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | 4 | import matplotlib.pyplot as plt 5 | import matplotlib as matplotlib 6 | 7 | from torch import ( 8 | nn, 9 | Tensor, 10 | sqrt, 11 | pi, 12 | stack, 13 | cat, 14 | randperm, 15 | min, 16 | max, 17 | linspace, 18 | transpose, 19 | optim 20 | ) 21 | 22 | 23 | class Normalization(nn.Module): 24 | def __init__(self, S_low, S_up, a_low, a_up, **kwargs): 25 | super(Normalization, self).__init__(**kwargs) 26 | self.low_bound_S = S_low 27 | self.upper_bound_S = S_up 28 | self.low_bound_a = a_low 29 | self.upper_bound_a = a_up 30 | 31 | def forward(self, inputs): 32 | 33 | output_S = (inputs[:, 0] - self.low_bound_S)/(self.upper_bound_S - self.low_bound_S) 34 | output_a = (inputs[:, 1] - self.low_bound_a)/(self.upper_bound_a - self.low_bound_a) 35 | 36 | return stack([output_S,output_a], dim=1) 37 | 38 | 39 | class MyRNN(nn.Module): 40 | def __init__(self, cell): 41 | super(MyRNN, self).__init__() 42 | self.cell = cell 43 | 44 | def forward(self, inputs, a0): 45 | 46 | bs, seq_sz, _ = inputs.shape 47 | a = a0 48 | for t in range(seq_sz): 49 | input = inputs[:, t, :] 50 | state = self.cell.forward(input, a) 51 | a = a+state 52 | 53 | return a 54 | 55 | 56 | class EulerIntegratorCell(nn.Module): 57 | def __init__(self, C, m, dKlayer, **kwargs): 58 | super(EulerIntegratorCell, self).__init__(**kwargs) 59 | self.C = C 60 | self.m = m 61 | self.dKlayer = dKlayer 62 | 63 | def forward(self, inputs, states): 64 | 65 | a_tm1 = states 66 | x_d_tm1 = cat((inputs, a_tm1), dim=1) 67 | dk_t = self.dKlayer(x_d_tm1) 68 | da_t = self.C * (dk_t ** self.m) 69 | 70 | return da_t 71 | 72 | 73 | class DkPhys(nn.Module): 74 | def __init__(self, F, **kwargs): 75 | super(DkPhys, self).__init__(**kwargs) 76 | self.F = F 77 | 78 | def forward(self, x_d): 79 | 80 | dk_t = self.F * x_d[:,0] * sqrt(pi * x_d[:,1]) 81 | dk_t = dk_t[:,None] 82 | 83 | return dk_t 84 | 85 | 86 | class DkNN(nn.Module): 87 | def __init__(self,Strain, atrain, **kwargs): 88 | super(DkNN, self).__init__(**kwargs) 89 | # stress-intensity layer 90 | self.dKlayer = nn.Sequential( 91 | Normalization(min(Strain), max(Strain), min(atrain), max(atrain)), 92 | nn.Linear(2, 5), 93 | nn.Tanh(), 94 | nn.Linear(5, 1) 95 | ) 96 | 97 | def forward(self, x): 98 | 99 | dk_t = self.dKlayer(x) 100 | 101 | return dk_t 102 | 103 | 104 | def dk_training_loop(n_epochs,batch_size, optimizer, model, loss_fn, train, label): 105 | for epoch in range(1, n_epochs + 1): 106 | 107 | permutation = randperm(train.shape[0]) 108 | for i in range(0,train.shape[0], batch_size): 109 | 110 | #Minibatch 111 | indices = permutation[i:i+batch_size] 112 | batch_x, batch_y = train[indices], label[indices] 113 | 114 | #Forward pass 115 | output_train = model(batch_x) 116 | loss_train = loss_fn(output_train, batch_y) 117 | 118 | #Backward pass 119 | optimizer.zero_grad() 120 | loss_train.backward() 121 | optimizer.step() 122 | 123 | print(f"{int(i/batch_size)+1}/{batch_size}-Epoch {epoch}, Training loss {loss_train.item():.4e}") 124 | 125 | def hPINN_training_loop(n_epochs, optimizer, model, loss_fn, train, a0, label): 126 | for epoch in range(1, n_epochs + 1): 127 | #Forward pass 128 | output_train = model(train, a0) 129 | loss_train = loss_fn(output_train, label) 130 | 131 | #Backward pass 132 | optimizer.zero_grad() 133 | loss_train.backward() 134 | optimizer.step() 135 | 136 | print(f"Epoch {epoch}, Training loss {loss_train.item():.4e}") 137 | 138 | 139 | if __name__ == "__main__": 140 | # Paris law coefficients 141 | [C, m] = [1.5E-11, 3.8] 142 | F = 1.0 143 | 144 | # data 145 | Strain = np.asarray(pd.read_csv('./data/Strain.csv'))[:,:,np.newaxis] 146 | Strain = Tensor(Strain) 147 | atrain = np.asarray(pd.read_csv('./data/atrain.csv')) 148 | atrain = Tensor(atrain) 149 | a0 = np.asarray(pd.read_csv('./data/a0.csv'))[0,0]*np.ones((Strain.shape[0],1)) 150 | a0 = Tensor(a0) 151 | 152 | "-------------------------------------------------------------------------" 153 | "PINN with DkPhys " 154 | dk_phys = DkPhys(F) 155 | euler = EulerIntegratorCell(C=C, m=m, dKlayer=dk_phys) 156 | model_PINN = MyRNN(cell = euler) 157 | aPred = model_PINN(Strain, a0)[:, :] 158 | 159 | "-------------------------------------------------------------------------" 160 | " dKlayerNN and weight initialization " 161 | S_range = linspace(min(Strain), max(Strain), 1000) 162 | a_range = linspace(min(atrain), max(atrain), 1000)[randperm(1000)] 163 | dK_range = -12.05 + 0.24 * S_range + 760.0 * a_range 164 | dK_range = dK_range [:,None] 165 | 166 | dKlayer = DkNN(Strain,atrain) 167 | 168 | dk_training_loop( 169 | n_epochs = 100, 170 | batch_size = 32, 171 | optimizer = optim.RMSprop(dKlayer.parameters(), lr=1e-2), 172 | model = dKlayer, 173 | loss_fn = nn.MSELoss(), 174 | train = transpose(stack([S_range,a_range]),0,1), 175 | label = dK_range 176 | ) 177 | 178 | "-------------------------------------------------------------------------" 179 | " hPINN training " 180 | h_euler = EulerIntegratorCell(C=C, m=m, dKlayer=dKlayer) 181 | model_hPINN = MyRNN(cell = h_euler) 182 | aPred_before = model_hPINN(Strain, a0)[:,:] 183 | 184 | hPINN_training_loop( 185 | n_epochs = 100, 186 | optimizer = optim.RMSprop(model_hPINN.parameters(), lr=1e-2), 187 | model = model_hPINN, 188 | loss_fn = nn.MSELoss(), 189 | train = Strain, 190 | a0 = a0, 191 | label = atrain, 192 | ) 193 | 194 | h_aPred = model_PINN(Strain, a0)[:,:] 195 | 196 | "-------------------------------------------------------------------------" 197 | " plottting " 198 | matplotlib.rc('font', size=12) 199 | 200 | fig, ax= plt.subplots(1,2,sharey=True, figsize=(2*6.4,4.8),gridspec_kw={'hspace': 0, 'wspace': 0.1}) 201 | 202 | ax[0].plot([0,0.05],[0,0.05],'--k') 203 | ax[0].plot(atrain.detach().numpy(), aPred_before.detach().numpy(), 'o', label = 'before training') 204 | ax[0].plot(atrain.detach().numpy(), h_aPred.detach().numpy(), 's', label = 'after training') 205 | ax[0].set_xlabel("actual crack length (m)") 206 | ax[0].set_ylabel("predicted crack length (m)") 207 | ax[0].legend(loc = 'upper left',facecolor = 'w') 208 | ax[0].grid(which='both') 209 | 210 | ax[1].plot([0,0.05],[0,0.05],'--k') 211 | ax[1].plot(atrain.detach().numpy(), aPred.detach().numpy(), 's', label = 'physical $\Delta$K') 212 | ax[1].grid(which='both') 213 | ax[1].set_xlabel("actual crack length (m)") 214 | ax[1].legend(loc = 'upper left',facecolor = 'w') 215 | 216 | plt.show() -------------------------------------------------------------------------------- /first_order_ode/euler_predict_only.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | from tensorflow.keras import Sequential 5 | from tensorflow.keras.layers import Dense 6 | from tensorflow.keras.optimizers import RMSprop 7 | from tensorflow import float32, convert_to_tensor 8 | from model import Normalization, create_model 9 | 10 | if __name__ == "__main__": 11 | # Paris law coefficients 12 | [C, m] = [1.5E-11, 3.8] 13 | 14 | # data 15 | Stest = np.asarray(pd.read_csv('./data/Stest.csv'))[:, :, np.newaxis] 16 | atest = np.asarray(pd.read_csv('./data/atest.csv')) 17 | a0 = np.asarray(pd.read_csv('./data/a0.csv'))[0,0]*np.ones((Stest.shape[0],1)) 18 | 19 | # stress-intensity layer 20 | dKlayer = Sequential() 21 | dKlayer.add(Normalization(70, 160, 0.005, 0.03)) 22 | dKlayer.add(Dense(5, activation='tanh')) 23 | dKlayer.add(Dense(1)) 24 | 25 | # weight initialization 26 | S_range = np.linspace(70, 160, 1000) 27 | a_range = np.linspace(0.005, 0.03, 1000)[np.random.permutation(np.arange(1000))] 28 | dK_range = -12.05 + 0.24 * S_range + 760.0 * a_range 29 | 30 | dKlayer.compile(loss='mse', optimizer=RMSprop(1e-2)) 31 | inputs_train = np.transpose(np.asarray([S_range, a_range])) 32 | dKlayer.fit(inputs_train, dK_range, epochs=100) 33 | 34 | # building the model and predicting before "training" 35 | model = create_model(C=C, m=m, a0=convert_to_tensor(a0, dtype=float32), 36 | dKlayer=dKlayer, return_sequences=True, batch_input_shape=Stest.shape) 37 | aBefore = model.predict_on_batch(Stest)[:,:,0] 38 | 39 | # loading weights from trained model 40 | model.load_weights("./savedmodels/cp.ckpt") 41 | aAfter = model.predict_on_batch(Stest)[:,:,0] 42 | 43 | # plotting predictions 44 | fig = plt.figure() 45 | plt.plot([0,0.05],[0,0.05],'--k') 46 | plt.plot(atest[:,-1],aBefore[:,-1],'o', label = 'before training') 47 | plt.plot(atest[:,-1],aAfter[:,-1], 's', label = 'after training') 48 | plt.xlabel("actual crack length (m)") 49 | plt.ylabel("predicted crack length (m)") 50 | plt.legend(loc = 'upper center',facecolor = 'w') 51 | plt.grid(which='both') 52 | plt.show() 53 | -------------------------------------------------------------------------------- /first_order_ode/euler_save_traning.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | from tensorflow.keras import Sequential 5 | from tensorflow.keras.layers import Dense 6 | from tensorflow.keras.optimizers import RMSprop 7 | from tensorflow.keras.callbacks import ModelCheckpoint 8 | from tensorflow import float32, convert_to_tensor 9 | from model import Normalization, create_model 10 | 11 | if __name__ == "__main__": 12 | # Paris law coefficients 13 | [C, m] = [1.5E-11, 3.8] 14 | 15 | # data 16 | Strain = np.asarray(pd.read_csv('./data/Strain.csv'))[:, :, np.newaxis] 17 | atrain = np.asarray(pd.read_csv('./data/atrain.csv')) 18 | a0 = np.asarray(pd.read_csv('./data/a0.csv'))[0,0]*np.ones((Strain.shape[0],1)) 19 | 20 | # stress-intensity layer 21 | dKlayer = Sequential() 22 | dKlayer.add(Normalization(70, 160, 0.005, 0.03)) 23 | dKlayer.add(Dense(5, activation='tanh')) 24 | dKlayer.add(Dense(1)) 25 | 26 | # weight initialization 27 | S_range = np.linspace(70, 160, 1000) 28 | a_range = np.linspace(0.005, 0.03, 1000)[np.random.permutation(np.arange(1000))] 29 | dK_range = -12.05 + 0.24 * S_range + 760.0 * a_range 30 | 31 | dKlayer.compile(loss='mse', optimizer=RMSprop(1e-2)) 32 | inputs_train = np.transpose(np.asarray([S_range, a_range])) 33 | dKlayer.fit(inputs_train, dK_range, epochs=100) 34 | 35 | # fitting physics-informed neural network 36 | mckp = ModelCheckpoint(filepath = "./savedmodels/cp.ckpt", monitor = 'loss', verbose = 1, 37 | save_best_only = True, mode = 'min', save_weights_only = True) 38 | 39 | model = create_model(C=C, m=m, a0=convert_to_tensor(a0, dtype=float32), dKlayer=dKlayer, batch_input_shape=Strain.shape) 40 | history = model.fit(Strain, atrain, epochs=100, steps_per_epoch=1, verbose=1, callbacks=[mckp]) 41 | 42 | # plotting predictions 43 | fig = plt.figure() 44 | plt.plot(np.array(history.history['loss'])) 45 | plt.xlabel("epoch") 46 | plt.ylabel("loss") 47 | plt.grid(which='both') 48 | plt.show() 49 | -------------------------------------------------------------------------------- /first_order_ode/model.py: -------------------------------------------------------------------------------- 1 | from tensorflow.keras.layers import RNN, Layer 2 | from tensorflow.keras import Sequential 3 | from tensorflow.keras.optimizers import RMSprop 4 | from tensorflow.python.framework import tensor_shape 5 | from tensorflow import concat, convert_to_tensor 6 | 7 | class EulerIntegratorCell(Layer): 8 | def __init__(self, C, m, dKlayer, a0=None, units=1, **kwargs): 9 | super(EulerIntegratorCell, self).__init__(**kwargs) 10 | self.units = units 11 | self.C = C 12 | self.m = m 13 | self.a0 = a0 14 | self.dKlayer = dKlayer 15 | self.state_size = tensor_shape.TensorShape(self.units) 16 | self.output_size = tensor_shape.TensorShape(self.units) 17 | 18 | def build(self, input_shape, **kwargs): 19 | self.built = True 20 | 21 | def call(self, inputs, states): 22 | inputs = convert_to_tensor(inputs) 23 | a_tm1 = convert_to_tensor(states) 24 | x_d_tm1 = concat((inputs, a_tm1[0, :]), axis=1) 25 | dk_t = self.dKlayer(x_d_tm1) 26 | da_t = self.C * (dk_t ** self.m) 27 | a = da_t + a_tm1[0, :] 28 | return a, [a] 29 | 30 | def get_initial_state(self, inputs=None, batch_size=None, dtype=None): 31 | return self.a0 32 | 33 | class Normalization(Layer): 34 | def __init__(self, S_low, S_up, a_low, a_up, **kwargs): 35 | super(Normalization, self).__init__(**kwargs) 36 | self.low_bound_S = S_low 37 | self.upper_bound_S = S_up 38 | self.low_bound_a = a_low 39 | self.upper_bound_a = a_up 40 | 41 | def build(self, input_shape, **kwargs): 42 | self.built = True 43 | 44 | def call(self, inputs): 45 | output = (inputs - [self.low_bound_S, self.low_bound_a]) / [(self.upper_bound_S - self.low_bound_S), (self.upper_bound_a - self.low_bound_a)] 46 | return output 47 | 48 | def create_model(C, m, a0, dKlayer, batch_input_shape, return_sequences=False, return_state=False): 49 | euler = EulerIntegratorCell(C=C, m=m, dKlayer=dKlayer, a0=a0, batch_input_shape=batch_input_shape) 50 | PINN = RNN(cell=euler, batch_input_shape=batch_input_shape, return_sequences=return_sequences, return_state=return_state) 51 | model = Sequential() 52 | model.add(PINN) 53 | model.compile(loss='mse', optimizer=RMSprop(1e-2)) 54 | return model 55 | -------------------------------------------------------------------------------- /second_order_ode/README.md: -------------------------------------------------------------------------------- 1 | # Runge-Kutta implementation of a system of second order ordinary differential equations 2 | 3 | This folder contains the implementation of the Runge-Kutta integration of a two degree of freedom vibrations system as example for a second order ordinary differential equation. 4 | 5 | The python files and data folder can be described as follows: 6 | 7 | - runge_kutta_example.py is a complete implementation of a Runge-Kutta integrator including model training and prediction 8 | - runge_kutta_save_training.py trains the trainable coefficients on the training data and saves the model weights 9 | - runge_kutts_predict_only.py loads the saved model weights and predicts on test data 10 | - model.py contains the RungeKuttaIntegratorCell class and the create_model function 11 | - The data folder contains the input force, displacement and target displacement data.csv file for system identification as well as a seperate data02.csv file used for prediction only 12 | - The data creation file runge_kutt_create_data.py is also included in the data folder and can be used to create additional data for model validation 13 | -------------------------------------------------------------------------------- /second_order_ode/data/data.csv: -------------------------------------------------------------------------------- 1 | t,u0,u1,y0,y1,yT0,yT1 2 | 0.0,1.0,0.0,0.0,0.0,-1.6226391134739296e-05,1.7277204942041626e-05 3 | 0.002,1.0,0.0,9.930069728252912e-08,7.243675851927142e-10,2.14567623698458e-06,1.9320870775298982e-05 4 | 0.004,1.0,0.0,3.944112425552625e-07,5.722990190100829e-09,2.067783516501586e-05,1.3144982162021642e-05 5 | 0.006,1.0,0.0,8.811573324033639e-07,1.9071637807793726e-08,5.388390614993032e-08,-2.39829260193117e-05 6 | 0.008,1.0,0.0,1.5553824601231847e-06,4.4628612121548524e-08,8.164641949026341e-06,-1.920257491549357e-05 7 | 0.01,1.0,0.0,2.412948406781363e-06,8.603423854149968e-08,-2.6196649452429196e-06,-3.7455724299796944e-05 8 | 0.012,1.0,0.0,3.4497356655857913e-06,1.4671090923711238e-07,-3.846131604785364e-05,-9.258059485194433e-06 9 | 0.014,1.0,0.0,4.66164380656569e-06,2.298636511015584e-07,-2.0017336423186104e-05,-1.0055869211382851e-07 10 | 0.016,1.0,0.0,6.044591788268556e-06,3.3848119264876353e-07,5.39859669644023e-06,-2.7506943611774208e-06 11 | 0.018000000000000002,1.0,0.0,7.594518222890123e-06,4.7533750396302497e-07,3.5634671073161034e-05,2.5798718714939003e-05 12 | 0.02,1.0,0.0,9.307381600960635e-06,6.429937839917856e-07,2.649897013229486e-05,2.7350458581220348e-06 13 | 0.022,1.0,0.0,1.1179160481417283e-05,8.438008696915521e-07,-4.613788212520131e-06,-2.0285873772716626e-05 14 | 0.024,1.0,0.0,1.3205853652599544e-05,1.0799020418022263e-06,8.657022563366896e-06,-3.0419421517168166e-05 15 | 0.026000000000000002,1.0,0.0,1.5383480269411895e-05,1.3532362023335078e-06,2.4827293694714156e-05,-2.044911852383027e-05 16 | 0.028,1.0,0.0,1.77080799716078e-05,1.665541399195759e-06,2.6244601558890057e-05,-5.657901212748171e-07 17 | 0.03,1.0,0.0,2.017571298786046e-05,2.0183586737941136e-06,3.0020387605688323e-05,-2.084517169058445e-05 18 | 0.032,1.0,0.0,2.2782460230000302e-05,2.4130362078259827e-06,9.451977329204614e-06,-2.922465919089801e-05 19 | 0.034,1.0,0.0,2.552442338151703e-05,2.85073374597588e-06,2.3482105365701576e-05,2.045180251048588e-05 20 | 0.036000000000000004,1.0,0.0,2.839772498414585e-05,3.3324272716850828e-06,3.052111110342565e-05,-2.0103030858012154e-05 21 | 0.038,1.0,0.0,3.139850852608368e-05,3.858913913684574e-06,1.6212528335914894e-05,-3.785558892113313e-06 22 | 0.04,1.0,0.0,3.452293853511225e-05,4.430817061515573e-06,5.999950430441682e-05,-7.009027133415862e-06 23 | 0.042,1.0,0.0,3.776720067964121e-05,5.048591668820308e-06,2.4016970131430547e-05,-9.523487045089873e-06 24 | 0.044,1.0,0.0,4.1127501880426725e-05,5.712529723764323e-06,3.018261116284708e-05,-9.592347842026808e-06 25 | 0.046,1.0,0.0,4.460007043546868e-05,6.422765866548146e-06,2.2916498969210973e-05,-8.849603714397233e-06 26 | 0.048,1.0,0.0,4.818115616034447e-05,7.17928313457859e-06,4.4816998075829624e-07,6.391897311830235e-06 27 | 0.05,1.0,0.0,5.186703054599813e-05,7.981918816496044e-06,7.531657817088066e-05,-2.295212183145878e-06 28 | 0.052000000000000005,1.0,0.0,5.5653986935771894e-05,8.830370396891891e-06,4.9189349737469135e-05,9.1971441704194e-06 29 | 0.054,1.0,0.0,5.9538340723242126e-05,9.724201574197684e-06,4.766516498250328e-05,-2.1061410023441747e-05 30 | 0.056,1.0,0.0,6.351642957220446e-05,1.0662848334883084e-05,5.9827252873193707e-05,-2.8224935083700527e-05 31 | 0.058,1.0,0.0,6.75846136599428e-05,1.1645625067760829e-05,5.677036964599119e-05,4.351101490884135e-06 32 | 0.06,1.0,0.0,7.173927594471489e-05,1.2671730702862747e-05,7.940245781913472e-05,8.035100624468746e-06 33 | 0.062,1.0,0.0,7.597682245819273e-05,1.3740254860018915e-05,8.896341207407363e-05,2.6989499543194806e-05 34 | 0.064,1.0,0.0,8.02936826234096e-05,1.4850183992941481e-05,0.00010207248823992513,3.267905085804046e-05 35 | 0.066,1.0,0.0,8.468630959858717e-05,1.60004075152833e-05,8.392546270544131e-05,3.293297226199204e-05 36 | 0.068,1.0,0.0,8.915118064704597e-05,1.7189723895808434e-05,9.320278751554867e-05,7.751951150537049e-06 37 | 0.07,1.0,0.0,9.368479753323999e-05,1.8416846710475053e-05,7.798617006611716e-05,5.3933628816176104e-05 38 | 0.07200000000000001,1.0,0.0,9.828368694480243e-05,1.9680410639890223e-05,0.00011109558850741428,3.3975553832135486e-05 39 | 0.074,1.0,0.0,0.00010294440094034354,2.0978977401249072e-05,0.00012470792740928557,1.1283687764279671e-05 40 | 0.076,1.0,0.0,0.00010766351742260357,2.2311041604517053e-05,0.00011922984526140195,4.0989948848352655e-05 41 | 0.078,1.0,0.0,0.00011243764063643426,2.3675036523251962e-05,0.000111442334638442,1.0871473877234334e-05 42 | 0.08,1.0,0.0,0.00011726340169096038,2.5069339771091413e-05,0.00010122144520731374,6.058035469707204e-05 43 | 0.082,1.0,0.0,0.0001221374591051591,2.649227887555038e-05,0.00011656042380906869,2.0213394510219673e-05 44 | 0.084,1.0,0.0,0.0001270564993759889,2.794213674138152e-05,0.00013683926294393673,3.4611943020232626e-05 45 | 0.08600000000000001,1.0,0.0,0.00013201723756810148,2.9417156996347353e-05,0.00010163864817991255,1.2866398015661755e-05 46 | 0.088,1.0,0.0,0.00013701641792407968,3.091554921283722e-05,0.0001275169795994297,2.80442957345891e-05 47 | 0.09,1.0,0.0,0.00014205081449406075,3.243549399933287e-05,0.0001514924922424402,1.6667291986377894e-05 48 | 0.092,1.0,0.0,0.0001471172317835295,3.3975147956283394e-05,0.00013266441179229685,5.169128079158854e-05 49 | 0.094,1.0,0.0,0.00015221250541799623,3.5532648491493026e-05,0.0001659634103357758,4.809989747201847e-05 50 | 0.096,1.0,0.0,0.00015733350282321333,3.710611849065331e-05,0.00015669335472003236,2.8909963921534406e-05 51 | 0.098,1.0,0.0,0.00016247712391953008,3.869367083916356e-05,0.00016234092290588916,3.735839594083895e-05 52 | 0.1,1.0,0.0,0.0001676403018289367,4.0293412791880744e-05,0.00016739135283016078,4.44776921027782e-05 53 | 0.10200000000000001,1.0,0.0,0.00017282000359330728,4.1903450187920825e-05,0.0001961398807003648,4.611428920868728e-05 54 | 0.10400000000000001,1.0,0.0,0.00017801323090231664,4.352189150809828e-05,0.00017803479975958146,3.7098505201302345e-05 55 | 0.106,1.0,0.0,0.00018321702082947655,4.514685177303876e-05,0.00015890791995552993,3.461462822172972e-05 56 | 0.108,1.0,0.0,0.0001884284465747142,4.677645628043137e-05,0.0002071154942420599,4.496122513235545e-05 57 | 0.11,1.0,0.0,0.00019364461821189815,4.8408844180301564e-05,0.00021787741474764663,5.1515182685935536e-05 58 | 0.112,1.0,0.0,0.00019886268343970496,5.0042171887583546e-05,0.00018397853193090035,7.054980525560099e-05 59 | 0.114,1.0,0.0,0.00020407982833421393,5.1674616331651467e-05,0.00019709504934912393,8.865359623037907e-05 60 | 0.116,1.0,0.0,0.00020929327810161502,5.330437804283258e-05,0.00020844203548414853,3.493420422049485e-05 61 | 0.11800000000000001,1.0,0.0,0.00021450029782941877,5.492968407627213e-05,0.0001827293212393222,3.0200761189338383e-05 62 | 0.12,1.0,0.0,0.00021969819323456542,5.654879077384914e-05,0.00020316706958530695,4.5258623196304544e-05 63 | 0.122,1.0,0.0,0.00022488431140684224,5.815998636515552e-05,0.00025197912778725,6.20799454762734e-05 64 | 0.124,1.0,0.0,0.000230056041546035,5.976159340884657e-05,0.00022418026580811677,6.854362788073098e-05 65 | 0.126,1.0,0.0,0.00023521081569126051,6.135197107595068e-05,0.00022918172130335476,2.276424945058866e-05 66 | 0.128,1.0,0.0,0.00024034610944095075,6.292951727698895e-05,0.00023814712640412688,6.960148480679021e-05 67 | 0.13,1.0,0.0,0.00024545944266198786,6.449267063500236e-05,0.00023178610019019145,6.39707322291042e-05 68 | 0.132,1.0,0.0,0.00025054838018651987,6.603991230681504e-05,0.000245120391213631,4.356972090375107e-05 69 | 0.134,1.0,0.0,0.000255610532495022,6.75697676550772e-05,0.0002635102877224919,6.245429750228485e-05 70 | 0.136,1.0,0.0,0.0002606435563842049,6.908080777383146e-05,0.000254196667273699,5.357390775969354e-05 71 | 0.138,1.0,0.0,0.00026564515561841167,7.057165087053021e-05,0.0002706838732626935,7.274320107644749e-05 72 | 0.14,1.0,0.0,0.0002706130815631883,7.204096350760227e-05,0.0002698840503934366,7.540431464334755e-05 73 | 0.14200000000000002,1.0,0.0,0.00027554513379975565,7.348746170682176e-05,0.0002594235550486991,7.345763154861782e-05 74 | 0.14400000000000002,1.0,0.0,0.00028043916071915923,7.490991191987383e-05,0.0002726078523463347,8.283104201082953e-05 75 | 0.146,1.0,0.0,0.0002852930600949207,7.630713186863887e-05,0.00026103604590371575,7.04001467826628e-05 76 | 0.148,1.0,0.0,0.00029010477963306634,7.767799125883125e-05,0.0002779641275485836,8.120197932183195e-05 77 | 0.15,1.0,0.0,0.00029487231749845884,7.902141237072962e-05,0.0002971182964540302,9.874606865492241e-05 78 | 0.152,1.0,0.0,0.0002995937228164121,8.03363705308241e-05,0.00029610039469157813,7.397237559286462e-05 79 | 0.154,1.0,0.0,0.0003042670961486236,8.162189446828209e-05,0.0003146836851307966,6.378337054684491e-05 80 | 0.156,1.0,0.0,0.0003088905899425139,8.28770665601991e-05,0.0003167191268247493,8.353537214639107e-05 81 | 0.158,1.0,0.0,0.000313462408953119,8.410102296965389e-05,0.00032799195393021555,0.00010050612312865619 82 | 0.16,1.0,0.0,0.00031798081063673795,8.529295368062946e-05,0.0002882277938229198,8.967934244669839e-05 83 | 0.162,1.0,0.0,0.0003224441055155959,8.645210243389327e-05,0.00031136107674780957,7.368760099813012e-05 84 | 0.164,1.0,0.0,0.0003268506575128403,8.757776656795159e-05,0.00032671624070053866,0.0001149409254971212 85 | 0.166,1.0,0.0,0.0003311988842572452,8.866929676920497e-05,0.00032106480411528844,9.217150580345489e-05 86 | 0.168,1.0,0.0,0.0003354872573570575,8.972609673543431e-05,0.0003448417490127726,9.429291006945771e-05 87 | 0.17,1.0,0.0,0.00033971430264247637,9.074762275674142e-05,0.00033751442934464596,7.464880293079345e-05 88 | 0.17200000000000001,1.0,0.0,0.0003438786003763147,9.173338321805317e-05,0.0003646567567069208,0.00011591206796081978 89 | 0.17400000000000002,1.0,0.0,0.00034797878543244866,9.268293802727625e-05,0.00036071891104621557,5.5415466087807655e-05 90 | 0.176,1.0,0.0,0.0003520135474417195,9.35958979731597e-05,0.00035660291713487567,0.00010319849361107908 91 | 0.178,1.0,0.0,0.0003559816309050054,9.447192401688548e-05,0.0003558621259295222,0.00010105397941685595 92 | 0.18,1.0,0.0,0.00035988183527324114,9.531072652136365e-05,0.00036061147802497474,9.580951503287055e-05 93 | 0.182,1.0,0.0,0.00036371301499421365,9.611206442215918e-05,0.00033722597838410287,9.764262598292152e-05 94 | 0.184,1.0,0.0,0.0003674740795260193,9.687574434392124e-05,0.0003618476787957644,7.595156480924815e-05 95 | 0.186,1.0,0.0,0.0003711639933171201,9.760161966612525e-05,0.000355022355441589,0.00011319674774530136 96 | 0.188,1.0,0.0,0.0003747817757529887,9.828958954187108e-05,0.00038734723845691916,9.05402837577296e-05 97 | 0.19,1.0,0.0,0.00037832650106938315,9.893959787341057e-05,0.00038021362047164814,0.00011627990532907075 98 | 0.192,1.0,0.0,0.0003817972982323431,9.955163224800132e-05,0.0004058729779230437,0.00012024439926708663 99 | 0.194,1.0,0.0,0.00038519335078504574,0.00010012572283760579,0.00040875080872382666,7.960179351463939e-05 100 | 0.196,1.0,0.0,0.00038851389666171027,0.00010066194126587048,0.00037507908034146553,7.97946996406754e-05 101 | 0.198,1.0,0.0,0.00039175822796878277,0.00010116039944573506,0.00038656027198753995,9.135020724743082e-05 102 | 0.2,1.0,0.0,0.00039492569073368067,0.0001016212483909317,0.0003974498445173734,9.081645302580455e-05 103 | 0.202,1.0,0.0,0.0003980156846214176,0.00010204467700454355,0.0004000306420661102,0.00010772360744788409 104 | 0.20400000000000001,1.0,0.0,0.0004010276626194719,0.00010243091084769709,0.0004026297090509738,0.0001132822684819951 105 | 0.20600000000000002,1.0,0.0,0.0004039611306913021,0.00010278021089136797,0.0004070720845601694,0.00011254937542872861 106 | 0.20800000000000002,1.0,0.0,0.00040681564739895214,0.00010309287225418158,0.0003977440446488689,9.919807078540585e-05 107 | 0.21,1.0,0.0,0.0004095908234952254,0.00010336922292899145,0.00039576193886327093,0.00010270709878669568 108 | 0.212,1.0,0.0,0.000412286321485943,0.0001036096225009181,0.00040404063848245266,0.00011908075115302784 109 | 0.214,1.0,0.0,0.00041490185516283536,0.0001038144608594305,0.0004041041984414623,8.094531494227174e-05 110 | 0.216,1.0,0.0,0.00041743718910764826,0.00010398415690695055,0.0004110702477933776,0.00012479000027847608 111 | 0.218,1.0,0.0,0.0004198921381680764,0.00010411915726635949,0.0004363273224152693,0.00011420820009915552 112 | 0.22,1.0,0.0,0.0004222665669061656,0.00010421993498968295,0.0004387452334778997,0.0001237546693862004 113 | 0.222,1.0,0.0,0.00042456038901985175,0.00010428698827012986,0.00040090768174013704,0.00010220714957240683 114 | 0.224,1.0,0.0,0.000426773566738333,0.00010432083915955883,0.0004126510290822791,9.695007858738958e-05 115 | 0.226,1.0,0.0,0.0004289061101919922,0.00010432203229334456,0.000428122652905005,0.00010676987111372893 116 | 0.228,1.0,0.0,0.0004309580767576125,0.00010429113362451649,0.0004392325496783885,0.00011273445628540704 117 | 0.23,1.0,0.0,0.0004329295703796474,0.00010422872916894288,0.00042888758775382426,8.301227436687275e-05 118 | 0.232,1.0,0.0,0.00043482074086832837,0.00010413542376323471,0.00044172040325077434,0.00011516976114123596 119 | 0.234,1.0,0.0,0.0004366317831754077,0.00010401183983694674,0.0004182793898096646,9.118260869305082e-05 120 | 0.23600000000000002,1.0,0.0,0.0004383629366483542,0.00010385861620055749,0.0004511390548103384,0.00010895771704478003 121 | 0.23800000000000002,1.0,0.0,0.0004400144842638296,0.00010367640685061528,0.0004313145116173524,8.686624237408993e-05 122 | 0.24,1.0,0.0,0.00044158675184129126,0.00010346587979334466,0.0004334413206833321,0.00010856688980715151 123 | 0.242,1.0,0.0,0.0004430801072375742,0.00010322771588791705,0.0004451301353356479,0.00011815000873000824 124 | 0.244,1.0,0.0,0.00044449495952331755,0.00010296260771049971,0.00044606133908901743,0.00014416102310027426 125 | 0.246,1.0,0.0,0.00044583175814210963,0.00010267125844011013,0.000452715880631074,9.68848512424011e-05 126 | 0.248,1.0,0.0,0.00044709099205323056,0.00010235438076721777,0.00041945860952438127,9.201825755434737e-05 127 | 0.25,1.0,0.0,0.0004482731888588803,0.000102012695825952,0.00043949942128388835,0.00010443590810155883 128 | 0.252,1.0,0.0,0.00044937891391678134,0.00010164693215069394,0.00047222007127823584,0.00011431198421786641 129 | 0.254,1.0,0.0,0.0004504087694390504,0.00010125782465775129,0.0004569696235529409,0.00010090193140190335 130 | 0.256,1.0,0.0,0.0004513633935782341,0.00010084611365273927,0.00046786706993858613,0.0001162135827250993 131 | 0.258,1.0,0.0,0.00045224345950140477,0.00010041254386421594,0.00043526124012871443,0.0001315995529355471 132 | 0.26,1.0,0.0,0.00045304967445321134,9.995786350404922e-05,0.0004512762020309725,0.00010352176553068333 133 | 0.262,1.0,0.0,0.0004537827788087778,9.948282335492343e-05,0.00047898890504469133,0.00011679532855961115 134 | 0.264,1.0,0.0,0.00045444354511734055,9.89881758853263e-05,0.00046648112731744986,0.0001069633035490635 135 | 0.266,1.0,0.0,0.00045503277713750926,9.84746743922932e-05,0.0004635535118136869,0.00011270683248223128 136 | 0.268,1.0,0.0,0.00045555130886503246,9.794307217212371e-05,0.00045088834905242616,8.363101922744292e-05 137 | 0.27,1.0,0.0,0.0004560000035539419,9.73941217192258e-05,0.00045961694477481945,8.283450739873205e-05 138 | 0.272,1.0,0.0,0.0004563797527319425,9.682857395318666e-05,0.00045256968685504663,9.232092114000113e-05 139 | 0.274,1.0,0.0,0.0004566914752109074,9.624717747411441e-05,0.0004522839925470049,0.00011357733481127393 140 | 0.276,1.0,0.0,0.0004569361160933263,9.565067784624346e-05,0.00045491327788650004,9.60680796708058e-05 141 | 0.278,1.0,0.0,0.00045711464577554805,9.503981690974654e-05,0.0004460972888437333,0.00010480227158465634 142 | 0.28,1.0,0.0,0.0004572280589486452,9.44153321206498e-05,0.00047367634798511145,8.659807057425857e-05 143 | 0.28200000000000003,1.0,0.0,0.00045727737359771854,9.377795591870284e-05,0.00043917483365063937,7.526106145468166e-05 144 | 0.28400000000000003,1.0,0.0,0.0004572636300004455,9.31284151230133e-05,0.00048137774166239785,8.656514007256843e-05 145 | 0.28600000000000003,1.0,0.0,0.0004571878897256646,9.246743035521598e-05,0.00047338498103005497,0.00010480053291116027 146 | 0.28800000000000003,1.0,0.0,0.00045705123463277354,9.179571548990952e-05,0.0004653628506831144,0.00012412475730235657 147 | 0.29,1.0,0.0,0.0004568547658727052,9.111397713205794e-05,0.0004521485879029372,9.165986755035496e-05 148 | 0.292,1.0,0.0,0.0004565996028912292,9.042291412102206e-05,0.0004671325032890221,6.820790284270146e-05 149 | 0.294,1.0,0.0,0.00045628688243531284,8.972321706085518e-05,0.0004633749538706548,7.150345493789281e-05 150 | 0.296,1.0,0.0,0.0004559177575632587,8.901556787646886e-05,0.0004464821481916432,0.00010723509674178944 151 | 0.298,1.0,0.0,0.0004554933966593189,8.830063939524871e-05,0.0004308935235373209,6.0948074170102595e-05 152 | 0.3,1.0,0.0,0.0004550149824534703,8.757909495367559e-05,0.0004508324045462717,7.020313586960855e-05 153 | 0.302,1.0,0.0,0.00045448371104701764,8.685158802848562e-05,0.00045500243700723143,8.719961177576991e-05 154 | 0.304,1.0,0.0,0.0004539007909446723,8.611876189188256e-05,0.00045818259014816337,5.33768136898221e-05 155 | 0.306,1.0,0.0,0.00045326744209373866,8.538124929029726e-05,0.00045636910287047715,7.490907457994912e-05 156 | 0.308,1.0,0.0,0.0004525848949310202,8.463967214617306e-05,0.0004579223317054618,0.00010551953532764188 157 | 0.31,1.0,0.0,0.00045185438943803956,8.389464128224124e-05,0.00046699136156819985,8.259987384137488e-05 158 | 0.312,1.0,0.0,0.0004510771742051482,8.314675616773756e-05,0.00042913173650203223,6.472806730106633e-05 159 | 0.314,1.0,0.0,0.00045025450550508145,8.239660468600036e-05,0.0004794235424593034,6.822606226413065e-05 160 | 0.316,1.0,0.0,0.00044938764637649745,8.164476292288042e-05,0.00046583796262537083,9.398425236577816e-05 161 | 0.318,1.0,0.0,0.00044847786571801694,8.089179497538565e-05,0.0004303109056982798,9.667469735141281e-05 162 | 0.32,1.0,0.0,0.00044752643739326355,8.013825277997628e-05,0.0004766535069118068,8.96446939390279e-05 163 | 0.322,1.0,0.0,0.00044653463934738396,7.938467595992224e-05,0.0004230379299343636,9.399809610538742e-05 164 | 0.324,1.0,0.0,0.00044550375273550726,7.863159169112969e-05,0.00046726436130200573,8.221698918382268e-05 165 | 0.326,1.0,0.0,0.00044443506106358526,7.78795145858423e-05,0.00042644891568204544,7.864075939043035e-05 166 | 0.328,1.0,0.0,0.00044332984934203346,7.71289465936212e-05,0.0004519992511579441,8.370973996223783e-05 167 | 0.33,1.0,0.0,0.0004421894032525757,7.638037691900776e-05,0.00043005287491934243,6.374692940029625e-05 168 | 0.332,1.0,0.0,0.00044101500832867386,7.56342819552747e-05,0.00042266553951975056,8.002881701959908e-05 169 | 0.334,1.0,0.0,0.0004398079491499063,7.489112523367339e-05,0.00046053986633130844,0.00010292817323455397 170 | 0.336,1.0,0.0,0.00043856950855063794,7.415135738758838e-05,0.0004351227919533183,7.997518083189135e-05 171 | 0.338,1.0,0.0,0.00043730096684330827,7.341541613101443e-05,0.00043018851454461424,0.00010262740994453203 172 | 0.34,1.0,0.0,0.00043600360105664165,7.268372625077686e-05,0.00043320322830797404,7.844827169798822e-05 173 | 0.342,1.0,0.0,0.0004346786841890677,7.195669961192167e-05,0.0004463324610048046,9.885012297265661e-05 174 | 0.34400000000000003,1.0,0.0,0.0004333274844776204,7.123473517570883e-05,0.00042371426127164966,5.691181542454029e-05 175 | 0.34600000000000003,1.0,0.0,0.00043195126468256566,7.051821902964977e-05,0.0004167100652749618,7.181390451519321e-05 176 | 0.34800000000000003,1.0,0.0,0.0004305512813879891,6.980752442903819e-05,0.0004434522329912244,8.939508464942321e-05 177 | 0.35000000000000003,1.0,0.0,0.00042912878431855875,6.910301184943215e-05,0.0004297845848690119,7.604532884764276e-05 178 | 0.352,1.0,0.0,0.00042768501567265753,6.840502904955495e-05,0.00044463900138882,5.940274184330686e-05 179 | 0.354,1.0,0.0,0.00042622120947206595,6.771391114409204e-05,0.00044131391830012884,7.390285708152378e-05 180 | 0.356,1.0,0.0,0.0004247385909283547,6.702998068587181e-05,0.000414865002918284,8.802433815571188e-05 181 | 0.358,1.0,0.0,0.00042323837582613366,6.635354775692884e-05,0.00042528053043715416,8.797676049795042e-05 182 | 0.36,1.0,0.0,0.0004217217699232829,6.568491006795938e-05,0.0004036573977950251,5.7393678740560586e-05 183 | 0.362,1.0,0.0,0.0004201899683682796,6.502435306569055e-05,0.0004279062866150544,5.990845958448159e-05 184 | 0.364,1.0,0.0,0.0004186441551347146,6.437215004769653e-05,0.0004272148382568366,6.515549146806818e-05 185 | 0.366,1.0,0.0,0.00041708550247307984,6.37285622842069e-05,0.00040029106587833837,3.486618006667319e-05 186 | 0.368,1.0,0.0,0.0004155151703798893,6.30938391464652e-05,0.00043251068439181274,7.078452409702723e-05 187 | 0.37,1.0,0.0,0.0004139343060841848,6.246821824120766e-05,0.00042349956971443056,5.774171949248854e-05 188 | 0.372,1.0,0.0,0.00041234404355145935,6.18519255508451e-05,0.00044849874175642394,6.771429335158426e-05 189 | 0.374,1.0,0.0,0.00041074550300501954,6.124517557894365e-05,0.00040554260619450784,4.478852430029411e-05 190 | 0.376,1.0,0.0,0.00040913979046479213,6.064817150061296e-05,0.0004075302962065248,5.1391223221585926e-05 191 | 0.378,1.0,0.0,0.00040752799730356794,6.006110531742295e-05,0.0004296744651559124,3.3920976482677866e-05 192 | 0.38,1.0,0.0,0.00040591119982066176,5.948415801648381e-05,0.00043141274162698015,5.232217637339017e-05 193 | 0.382,1.0,0.0,0.00040429045883295424,5.8917499733336254e-05,0.0003938264510883041,7.428956603329365e-05 194 | 0.384,1.0,0.0,0.00040266681928326914,5.8361289918312006e-05,0.0004086491541883379,5.724750849399996e-05 195 | 0.386,1.0,0.0,0.00040104130986602795,5.781567750603762e-05,0.00039931024851489865,5.0500507023657354e-05 196 | 0.388,1.0,0.0,0.0003994149426701095,5.7280801087766916e-05,0.0004019090746950985,4.913557048040277e-05 197 | 0.39,1.0,0.0,0.00039778871283883396,5.6756789086240295e-05,0.00038216662019087237,5.6436163671961156e-05 198 | 0.392,1.0,0.0,0.00039616359824697664,5.624375993278148e-05,0.000403536396348415,6.8522301799287e-05 199 | 0.394,1.0,0.0,0.000394540559194708,5.5741822246354494e-05,0.00041612543398822133,7.24569559656241e-05 200 | 0.396,1.0,0.0,0.0003929205381183444,5.525107501431592e-05,0.00040697637951265386,8.919626262600942e-05 201 | 0.398,1.0,0.0,0.00039130445931778593,5.4771607774609335e-05,0.0003570753873886739,5.592005453457454e-05 202 | 0.4,1.0,0.0,0.0003896932287005054,5.430350079916053e-05,0.0003849969124016293,6.597385772290446e-05 203 | 0.402,1.0,0.0,0.00038808773354194595,5.384682527824376e-05,0.00040326785154631926,7.387695940720243e-05 204 | 0.404,1.0,0.0,0.0003864888422621733,5.340164350560038e-05,0.0003656935753963966,6.964157084518618e-05 205 | 0.406,1.0,0.0,0.0003848974042186223,5.296800906410235e-05,0.00039200386595131064,5.4676380249712065e-05 206 | 0.40800000000000003,1.0,0.0,0.0003833142495147666,5.254596701176389e-05,0.00039994941025561786,5.738498203053468e-05 207 | 0.41000000000000003,1.0,0.0,0.00038174018882453466,5.213555406791501e-05,0.00035092838764143493,4.896856460215562e-05 208 | 0.41200000000000003,1.0,0.0,0.00038017601323228693,5.173679879936097e-05,0.0004082558631120833,4.2788757497016025e-05 209 | 0.41400000000000003,1.0,0.0,0.0003786224940881603,5.134972180636155e-05,0.0003627414009147662,4.0529262952135595e-05 210 | 0.41600000000000004,1.0,0.0,0.0003770803828785823,5.0974335908273924e-05,0.00037656227168929985,6.570102336630146e-05 211 | 0.418,1.0,0.0,0.00037555041111174674,5.061064632871201e-05,0.0003746716891242065,5.353871440671801e-05 212 | 0.42,1.0,0.0,0.00037403329021784044,5.025865088008455e-05,0.00038469889195865683,5.6272523856406786e-05 213 | 0.422,1.0,0.0,0.0003725297114638017,4.9918340147382725e-05,0.00036772002636287264,7.377344830144608e-05 214 | 0.424,1.0,0.0,0.00037104034588238664,4.958969767109686e-05,0.00035828080407122366,2.683769076158547e-05 215 | 0.426,1.0,0.0,0.0003695658442153151,4.9272700129149634e-05,0.00036355724306646277,5.220300055906698e-05 216 | 0.428,1.0,0.0,0.00036810683687026096,4.896731751774137e-05,0.0003588361491598335,8.981114054145317e-05 217 | 0.43,1.0,0.0,0.00036666393389144913,4.867351333101051e-05,0.0003590742889207579,5.276894672400519e-05 218 | 0.432,1.0,0.0,0.000365237724943615,4.839124473941946e-05,0.00036976545753666067,4.5549539234778425e-05 219 | 0.434,1.0,0.0,0.00036382877930908006,4.81204627667832e-05,0.00036585287493139215,5.155629106825994e-05 220 | 0.436,1.0,0.0,0.0003624376458976925,4.7861112465864605e-05,0.00035188994839415225,4.180761629843532e-05 221 | 0.438,1.0,0.0,0.0003610648532693778,4.7613133092466863e-05,0.00034069822343338443,4.700497127293791e-05 222 | 0.44,1.0,0.0,0.00035971090966904225,4.7376458277959304e-05,0.0003739948579161628,5.9760940215722055e-05 223 | 0.442,1.0,0.0,0.0003583763030735682,4.715101620017898e-05,0.0003868435278359251,4.427198164415119e-05 224 | 0.444,1.0,0.0,0.0003570615012506382,4.6936729752655574e-05,0.00037977264260335747,6.537115886090602e-05 225 | 0.446,1.0,0.0,0.00035576695182912223,4.6733516712112714e-05,0.00034614793183737754,9.7898540447996e-06 226 | 0.448,1.0,0.0,0.0003544930823807593,4.654128990420343e-05,0.0003567223846673605,5.371625853596138e-05 227 | 0.45,1.0,0.0,0.00035324030051286454,4.6359957367442316e-05,0.00034571890536377317,4.627999739467335e-05 228 | 0.452,1.0,0.0,0.0003520089939717903,4.618942251530145e-05,0.00032585121001597655,4.0507834147714596e-05 229 | 0.454,1.0,0.0,0.00035079953075686695,4.6029584296440945e-05,0.00034377773033546455,5.728566953884032e-05 230 | 0.456,1.0,0.0,0.000349612259244551,4.588033735304935e-05,0.00036515808183340174,4.232383864449111e-05 231 | 0.458,1.0,0.0,0.00034844750832250404,4.574157217727229e-05,0.0003501514356955882,4.872172028419874e-05 232 | 0.46,1.0,0.0,0.0003473055875333271,4.561317526571149e-05,0.0003296305268512879,4.9332906623986336e-05 233 | 0.462,1.0,0.0,0.000346186787227674,4.549502927197929e-05,0.00034246955321767526,7.221040424210031e-05 234 | 0.464,1.0,0.0,0.00034509137872646664,4.538701315729691e-05,0.00034158565239427466,6.470713508219364e-05 235 | 0.466,1.0,0.0,0.000344019614491935,4.5289002339127155e-05,0.0003520339916204438,6.511620804655021e-05 236 | 0.468,1.0,0.0,0.00034297172830720574,4.5200868837834964e-05,0.00032902092801109697,3.602070243570404e-05 237 | 0.47000000000000003,1.0,0.0,0.0003419479354641616,4.5122481421371625e-05,0.00035716217598919057,3.586883446536795e-05 238 | 0.47200000000000003,1.0,0.0,0.000340948432959296,4.5053705747980236e-05,0.0003487928370093288,5.6544341454427734e-05 239 | 0.47400000000000003,1.0,0.0,0.000339973399697287,4.49944045069223e-05,0.00033216994335746923,6.180131816039933e-05 240 | 0.47600000000000003,1.0,0.0,0.0003390229967020154,4.4944437557226876e-05,0.00034796046931878004,2.8875305733559665e-05 241 | 0.47800000000000004,1.0,0.0,0.000338097367334753,4.490366206446532e-05,0.0003403939756537768,5.70712243650017e-05 242 | 0.48,1.0,0.0,0.00033719663751924877,4.487193263555612e-05,0.0003106860894095265,2.1773316401348253e-05 243 | 0.482,1.0,0.0,0.0003363209159734398,4.4849101451605557e-05,0.00035065612998644085,4.738093649273301e-05 244 | 0.484,1.0,0.0,0.0003354702944475183,4.4835018398790975e-05,0.0003177318548065802,2.4307399941809393e-05 245 | 0.486,1.0,0.0,0.00033464484796808447,4.4829531197294634e-05,0.00032621541522232964,5.944614719169765e-05 246 | 0.488,1.0,0.0,0.00033384463508811954,4.483248552829656e-05,0.0003521847916053305,4.6574579961847134e-05 247 | 0.49,1.0,0.0,0.00033306969814251235,4.484372515903593e-05,0.0003358135708375986,6.937079519326155e-05 248 | 0.492,1.0,0.0,0.00033232006350887686,4.486309206595102e-05,0.0003429295928283139,5.102652773870566e-05 249 | 0.494,1.0,0.0,0.0003315957418733994,4.489042655590791e-05,0.00030192655532793966,5.1890453494659224e-05 250 | 0.496,1.0,0.0,0.0003308967285014562,4.492556738552902e-05,0.00036407008080422727,2.7841369041804802e-05 251 | 0.498,1.0,0.0,0.00033022300351274433,4.4968351878632535e-05,0.00032716906247926946,1.89068860371505e-05 252 | 0.5,1.0,0.0,0.00032957453216067177,4.501861604179398e-05,0.0003352304977358732,5.109045096109604e-05 253 | 0.502,1.0,0.0,0.00032895126511575554,4.507619467804146e-05,0.00030106381514693407,4.896016035285394e-05 254 | 0.504,1.0,0.0,0.00032835313875277666,4.5140921498696e-05,0.000301400726142456,2.223148569421441e-05 255 | 0.506,1.0,0.0,0.0003277800754414473,4.521262923336846e-05,0.0003319482919349702,3.405952148424852e-05 256 | 0.508,1.0,0.0,0.0003272319838403456,4.529114973812442e-05,0.00031796957247504705,4.889106363847826e-05 257 | 0.51,1.0,0.0,0.00032670875919387665,4.537631410182814e-05,0.00033271011311275414,6.962035034003689e-05 258 | 0.512,1.0,0.0,0.000326210283632023,4.5467952750676784e-05,0.00036125441655242583,5.782124483267253e-05 259 | 0.514,1.0,0.0,0.0003257364264726487,4.556589555093555e-05,0.000331711667264407,5.47450548146216e-05 260 | 0.516,1.0,0.0,0.0003252870445261256,4.566997190988421e-05,0.0003386392204500916,4.530747170004015e-05 261 | 0.518,1.0,0.0,0.0003248619824020533,4.5780010874985346e-05,0.0003354095121543431,5.094360932116743e-05 262 | 0.52,1.0,0.0,0.0003244610728178476,4.589584123128395e-05,0.0003243231190649423,5.504777672815447e-05 263 | 0.522,1.0,0.0,0.00032408413690897546,4.6017291597048e-05,0.00031418376080596776,5.526845750225118e-05 264 | 0.524,1.0,0.0,0.0003237309845406171,4.614419051765893e-05,0.00032582006306307906,4.37968731424783e-05 265 | 0.526,1.0,0.0,0.0003234014146205418,4.627636655776084e-05,0.00032910941279430813,5.9630170042032263e-05 266 | 0.528,1.0,0.0,0.00032309521541298386,4.641364839167645e-05,0.00033200766034628774,5.715341271813332e-05 267 | 0.53,1.0,0.0,0.00032281216485331217,4.655586489209783e-05,0.0003077823665663184,2.7577903150179396e-05 268 | 0.532,1.0,0.0,0.0003225520308632879,4.670284521705917e-05,0.0003291280005289171,4.59655242138418e-05 269 | 0.534,1.0,0.0,0.0003223145716667099,4.685441889519856e-05,0.0003303923163255556,5.659578633027156e-05 270 | 0.536,1.0,0.0,0.0003220995361052508,4.701041590931531e-05,0.000334654731928562,5.090614064959153e-05 271 | 0.538,1.0,0.0,0.00032190666395428966,4.7170666778228975e-05,0.00033087462391659205,5.73841191481439e-05 272 | 0.54,1.0,0.0,0.0003217356862385525,4.733500263694571e-05,0.000302696027006161,6.139640030594969e-05 273 | 0.542,1.0,0.0,0.00032158632554737346,4.7503255315137246e-05,0.0003294591203398298,7.751409553681814e-05 274 | 0.544,1.0,0.0,0.0003214582963493962,4.767525741393747e-05,0.0003270580465681602,3.0602034563011296e-05 275 | 0.546,1.0,0.0,0.0003213513053065352,4.785084238106107e-05,0.0003246394984529048,5.155691335123772e-05 276 | 0.548,1.0,0.0,0.0003212650515870246,4.8029844584248517e-05,0.00033301487815654983,3.8956821073335716e-05 277 | 0.55,1.0,0.0,0.0003211992271773821,4.821209938304119e-05,0.0003168174339707403,5.7381325068759676e-05 278 | 0.552,1.0,0.0,0.0003211535171931232,4.8397443198890224e-05,0.0002977359645859,3.790580582363545e-05 279 | 0.554,1.0,0.0,0.0003211276001880615,4.85857135836024e-05,0.00031863387191391273,4.194446980857311e-05 280 | 0.556,1.0,0.0,0.0003211211484620367,4.8776749286126e-05,0.00031334323588306655,4.8364551751323164e-05 281 | 0.558,1.0,0.0,0.0003211338283669162,4.8970390317679545e-05,0.0003088033140296971,2.6444519987559158e-05 282 | 0.56,1.0,0.0,0.0003211653006107181,4.916647801522591e-05,0.00032443219100683935,5.152758994226269e-05 283 | 0.562,1.0,0.0,0.00032121522055970973,4.9364855103294315e-05,0.00030244319522192695,3.665146610801693e-05 284 | 0.5640000000000001,1.0,0.0,0.0003212832385383379,4.956536575415242e-05,0.0003233719424701764,3.215581553001421e-05 285 | 0.5660000000000001,1.0,0.0,0.00032136900012685274,4.976785564633067e-05,0.000339293367281633,7.28905862541427e-05 286 | 0.5680000000000001,1.0,0.0,0.0003214721464564883,4.997217202150105e-05,0.00034898440130889767,5.907696403363851e-05 287 | 0.5700000000000001,1.0,0.0,0.00032159231450207135,5.0178163739712266e-05,0.0003263659498999766,5.938571455888143e-05 288 | 0.5720000000000001,1.0,0.0,0.0003217291373719285,5.0385681332983284e-05,0.00030127169253081696,6.343150446274417e-05 289 | 0.5740000000000001,1.0,0.0,0.0003218822445949703,5.059457705725736e-05,0.00032196198446720947,3.96246203003103e-05 290 | 0.5760000000000001,1.0,0.0,0.00032205126240483225,5.0804704942718576e-05,0.00030265644483831397,4.549755355712153e-05 291 | 0.578,1.0,0.0,0.000322235814020957,5.101592084247303e-05,0.00033962589510111674,4.777336144251729e-05 292 | 0.58,1.0,0.0,0.0003224355199265071,5.122808247959699e-05,0.00030090719235378904,2.614117372980714e-05 293 | 0.582,1.0,0.0,0.0003226499981430001,5.144104949255423e-05,0.0003182176829613885,3.810617022834096e-05 294 | 0.584,1.0,0.0,0.0003228788645015623,5.165468347898535e-05,0.0003221704116875978,2.195100646702026e-05 295 | 0.586,1.0,0.0,0.00032312173291070196,5.186884803787158e-05,0.0003084289261591803,7.562057331597339e-05 296 | 0.588,1.0,0.0,0.0003233782156205049,5.208340881007623e-05,0.00033283892569030096,6.258950147106611e-05 297 | 0.59,1.0,0.0,0.0003236479234831616,5.2298233517266836e-05,0.00032944179632106235,7.959907237838044e-05 298 | 0.592,1.0,0.0,0.00032393046620973656,5.251319199922169e-05,0.0002992471255238861,5.484103701861175e-05 299 | 0.594,1.0,0.0,0.00032422545262309545,5.2728156249524326e-05,0.0003261815877856414,4.189268373570659e-05 300 | 0.596,1.0,0.0,0.0003245324909069101,5.294300044965035e-05,0.000315023672743079,3.1708162358862156e-05 301 | 0.598,1.0,0.0,0.00032485118885066305,5.315760100145094e-05,0.00032577446891406884,5.5789675808267316e-05 302 | 0.6,1.0,0.0,0.000325181154090579,5.3371836558037863e-05,0.0003498073168040079,2.9726794337891094e-05 303 | 0.602,1.0,0.0,0.0003255219943464142,5.358558805307545e-05,0.00032043533774166953,8.016797628747173e-05 304 | 0.604,1.0,0.0,0.0003258733176540355,5.379873872848504e-05,0.00031729663673676635,5.72612961821225e-05 305 | 0.606,1.0,0.0,0.00032623473259373015,5.401117416056813e-05,0.0003397683956860085,4.272931778556436e-05 306 | 0.608,1.0,0.0,0.0003266058485141834,5.422278228455478e-05,0.00033281848507485547,7.346076051795557e-05 307 | 0.61,1.0,0.0,0.0003269862757520729,5.443345341758438e-05,0.0003166420487535916,5.65604911554758e-05 308 | 0.612,1.0,0.0,0.0003273756258472246,5.464308028012634e-05,0.0003320420289288073,3.577882909189648e-05 309 | 0.614,1.0,0.0,0.00032777351175328423,5.485155801584876e-05,0.0003307988845505859,4.132999322903927e-05 310 | 0.616,1.0,0.0,0.0003281795480438586,5.5058784209943694e-05,0.00034877027235348144,6.368367102735981e-05 311 | 0.618,1.0,0.0,0.00032859335111408536,5.5264658905918174e-05,0.000314393703778186,6.334883944321007e-05 312 | 0.62,1.0,0.0,0.0003290145393775936,5.546908462086064e-05,0.00033350509541204507,5.2462281742912576e-05 313 | 0.622,1.0,0.0,0.00032944273345882037,5.567196635919305e-05,0.00032201891767977145,7.32972640138628e-05 314 | 0.624,1.0,0.0,0.00032987755638065163,5.587321162491948e-05,0.00031173112614108064,5.712748192538091e-05 315 | 0.626,1.0,0.0,0.0003303186337473598,5.607273043238256e-05,0.00031298943557705994,6.221964671332869e-05 316 | 0.628,1.0,0.0,0.00033076559392281274,5.627043531553978e-05,0.00033881565017294984,6.870991322664578e-05 317 | 0.63,1.0,0.0,0.00033121806820393267,5.646624133577213e-05,0.00032568389455821267,4.011880798993308e-05 318 | 0.632,1.0,0.0,0.0003316756909893862,5.6660066088238285e-05,0.0003478738795740273,5.080509785573419e-05 319 | 0.634,1.0,0.0,0.0003321380999434896,5.6851829706788004e-05,0.0003221964034244286,5.2264110035735616e-05 320 | 0.636,1.0,0.0,0.00033260493615531794,5.704145486744911e-05,0.00034639307901614623,2.8109042232357946e-05 321 | 0.638,1.0,0.0,0.00033307584429300677,5.7228866790502894e-05,0.000325325647906984,7.853083021556003e-05 322 | 0.64,1.0,0.0,0.0003335504727532419,5.741399324116347e-05,0.0002851598735789041,4.505661580403192e-05 323 | 0.642,1.0,0.0,0.00033402847380593133,5.7596764528877186e-05,0.0003221300670134054,6.412975102864353e-05 324 | 0.644,1.0,0.0,0.0003345095037340602,5.777711350525871e-05,0.0003497345622692611,8.315020832326829e-05 325 | 0.646,1.0,0.0,0.00033499322296872997,5.795497556068105e-05,0.00034519940780982787,5.314049267829459e-05 326 | 0.648,1.0,0.0,0.0003354792962193862,5.813028861953728e-05,0.0003239905394521423,7.332262052223761e-05 327 | 0.65,1.0,0.0,0.0003359673925992435,5.8302993134192436e-05,0.0003564053940208997,6.909548832991627e-05 328 | 0.652,1.0,0.0,0.0003364571857459155,5.847303207764442e-05,0.00032997897153211895,4.991247133536086e-05 329 | 0.654,1.0,0.0,0.00033694835393726535,5.864035093491339e-05,0.0003516536691476363,7.761879107463912e-05 330 | 0.656,1.0,0.0,0.00033744058020248886,5.8804897693179734e-05,0.00033748174827260554,4.073152484465071e-05 331 | 0.658,1.0,0.0,0.0003379335524284504,5.896662283069104e-05,0.0003421677347093026,7.318080949101294e-05 332 | 0.66,1.0,0.0,0.0003384269634612898,5.912547930445923e-05,0.0003356346417483858,8.231920528283427e-05 333 | 0.662,1.0,0.0,0.0003389205112033241,5.92814225367693e-05,0.000342311885028528,4.587364377104498e-05 334 | 0.664,1.0,0.0,0.00033941389870526797,5.9434410400521835e-05,0.0003348590332530712,6.878111397587685e-05 335 | 0.666,1.0,0.0,0.0003399068342538,5.9584403203431605e-05,0.00034276574875146857,7.09923556646235e-05 336 | 0.668,1.0,0.0,0.00034039903145450476,5.973136367110544e-05,0.00036416679746220957,5.516983921362678e-05 337 | 0.67,1.0,0.0,0.0003408902093102211,5.987525692902259e-05,0.0003143049821665394,5.7256383584384235e-05 338 | 0.672,1.0,0.0,0.00034138009229483135,6.00160504834416e-05,0.0003501628172361744,4.932884471647039e-05 339 | 0.674,1.0,0.0,0.00034186841042252577,6.015371420125768e-05,0.0003404581506028434,6.522214317384666e-05 340 | 0.676,1.0,0.0,0.0003423548993125811,6.028822028883552e-05,0.0003400064572041027,5.395490718073124e-05 341 | 0.678,1.0,0.0,0.0003428393002496919,6.041954326984221e-05,0.0003248466343540333,5.835398364839248e-05 342 | 0.68,1.0,0.0,0.0003433213602398965,6.054765996210594e-05,0.0003530736042321664,5.7726232589939776e-05 343 | 0.682,1.0,0.0,0.00034380083206214104,6.067254945352593e-05,0.00034552656851685997,5.3420434230340636e-05 344 | 0.684,1.0,0.0,0.0003442774743155261,6.079419307705974e-05,0.0003444387722642495,6.747672769408247e-05 345 | 0.686,1.0,0.0,0.0003447510514622832,6.0912574384814175e-05,0.00035785301659728446,4.973620940045615e-05 346 | 0.6880000000000001,1.0,0.0,0.00034522133386653,6.102767912126644e-05,0.0003294066487549518,7.044797009190623e-05 347 | 0.6900000000000001,1.0,0.0,0.0003456880978288531,6.113949519564228e-05,0.000341387146631917,6.844129415116931e-05 348 | 0.6920000000000001,1.0,0.0,0.00034615112561677143,6.124801265347828e-05,0.00039036936864155674,4.3918776847499315e-05 349 | 0.6940000000000001,1.0,0.0,0.0003466102054911323,6.13532236473955e-05,0.000342796009060838,4.130514269515594e-05 350 | 0.6960000000000001,1.0,0.0,0.0003470651317284961,6.145512240711205e-05,0.0003659935419641775,7.747863247725811e-05 351 | 0.6980000000000001,1.0,0.0,0.0003475157046395637,6.155370520872198e-05,0.00036764151361430504,6.395515759642916e-05 352 | 0.7000000000000001,1.0,0.0,0.0003479617305837062,6.16489703432687e-05,0.00033973689483784796,6.558238008145577e-05 353 | 0.7020000000000001,1.0,0.0,0.00034840302197965356,6.174091808464042e-05,0.00034550939550839695,6.251580894284898e-05 354 | 0.704,1.0,0.0,0.0003488393973124034,6.182955065681598e-05,0.0003444016582176481,5.7244119192456614e-05 355 | 0.706,1.0,0.0,0.0003492706811364107,6.19148722004889e-05,0.0003862990305627768,7.29613226764658e-05 356 | 0.708,1.0,0.0,0.00034969670407512007,6.199688873909807e-05,0.0003702193416287932,5.63441066384993e-05 357 | 0.71,1.0,0.0,0.00035011730281690436,6.207560814429315e-05,0.0003513993031296222,5.141365094402384e-05 358 | 0.712,1.0,0.0,0.00035053232010747386,6.215104010086304e-05,0.0003551332949924177,4.5615722321352636e-05 359 | 0.714,1.0,0.0,0.00035094160473882104,6.222319607115554e-05,0.0003378066659477431,4.630743947323919e-05 360 | 0.716,1.0,0.0,0.00035134501153476715,6.229208925901653e-05,0.0003471402219282035,8.594108542151754e-05 361 | 0.718,1.0,0.0,0.0003517424013331776,6.235773457327684e-05,0.0003702181769566797,5.137743849392224e-05 362 | 0.72,1.0,0.0,0.00035213364096491393,6.242014859081495e-05,0.0003384871381716006,5.299564796466526e-05 363 | 0.722,1.0,0.0,0.0003525186032295907,6.247934951922348e-05,0.0003468205746486543,7.096347837071385e-05 364 | 0.724,1.0,0.0,0.0003528971668682073,6.253535715910766e-05,0.00035985172222389674,9.674113165627634e-05 365 | 0.726,1.0,0.0,0.00035326921653272347,6.258819286604325e-05,0.0003507809835399348,6.594027022704774e-05 366 | 0.728,1.0,0.0,0.0003536346427526497,6.263787951222189e-05,0.00035592384077777635,7.615739480793707e-05 367 | 0.73,1.0,0.0,0.0003539933418987234,6.268444144781138e-05,0.00035964161982552804,2.9363800508392724e-05 368 | 0.732,1.0,0.0,0.00035434521614374277,6.272790446205816e-05,0.00033314653306341317,6.1534644769953e-05 369 | 0.734,1.0,0.0,0.00035469017342062894,6.276829574415916e-05,0.0003044913979729346,5.4625835719930955e-05 370 | 0.736,1.0,0.0,0.0003550281273777909,6.280564384393013e-05,0.00036596725121660294,6.74142641741112e-05 371 | 0.738,1.0,0.0,0.00035535899733186397,6.283997863229686e-05,0.0003558587623729372,5.501409778918705e-05 372 | 0.74,1.0,0.0,0.00035568270821789596,6.28713312616362e-05,0.00036649757918667326,9.1622813368683e-05 373 | 0.742,1.0,0.0,0.00035599919053705414,6.289973412599274e-05,0.00035979549557278126,6.754762520418857e-05 374 | 0.744,1.0,0.0,0.00035630838030192597,6.292522082119714e-05,0.00037223098788741513,6.231652607334736e-05 375 | 0.746,1.0,0.0,0.000356610218979488,6.29478261049121e-05,0.0003520266402965871,7.691634780868422e-05 376 | 0.748,1.0,0.0,0.0003569046534318161,6.29675858566307e-05,0.0003326808305204307,7.268475327163716e-05 377 | 0.75,1.0,0.0,0.0003571916358546111,6.29845370376527e-05,0.0003547968798188982,7.724886064076284e-05 378 | 0.752,1.0,0.0,0.0003574711237136139,6.299871765106314e-05,0.00037280497993672624,6.363831246276488e-05 379 | 0.754,1.0,0.0,0.00035774307967898335,6.30101667017376e-05,0.00032833799200009426,6.897396732228945e-05 380 | 0.756,1.0,0.0,0.00035800747155771175,6.301892415639827e-05,0.0003731493945399877,7.84050662168066e-05 381 | 0.758,1.0,0.0,0.00035826427222415,6.302503090374408e-05,0.00035464114106004,6.953509546895127e-05 382 | 0.76,1.0,0.0,0.00035851345954871774,6.302852871467842e-05,0.00036766614288478815,5.3084064320452165e-05 383 | 0.762,1.0,0.0,0.0003587550163248705,6.302946020265698e-05,0.0003672165282036597,6.130773982548791e-05 384 | 0.764,1.0,0.0,0.00035898893019439793,6.302786878417822e-05,0.00035695674577512163,6.238455095337476e-05 385 | 0.766,1.0,0.0,0.00035921519357112546,6.302379863943838e-05,0.00035335648869593976,7.00223440093099e-05 386 | 0.768,1.0,0.0,0.00035943380356309235,6.301729467317258e-05,0.0003588547454786105,5.2191094351180774e-05 387 | 0.77,1.0,0.0,0.00035964476189327764,6.300840247570305e-05,0.00036489648709387215,6.651324206096218e-05 388 | 0.772,1.0,0.0,0.0003598480748189471,6.299716828421507e-05,0.0003651727696842255,6.934690387181474e-05 389 | 0.774,1.0,0.0,0.0003600437530496917,6.298363894428094e-05,0.00034257563788050213,6.917769608681786e-05 390 | 0.776,1.0,0.0,0.00036023181166422913,6.29678618716515e-05,0.00034541121823012587,5.476352018382493e-05 391 | 0.778,1.0,0.0,0.0003604122700260383,6.294988501433461e-05,0.0003523869823581791,7.21152707946207e-05 392 | 0.78,1.0,0.0,0.0003605851516978981,6.292975681497911e-05,0.00035381628002053444,6.66016562575435e-05 393 | 0.782,1.0,0.0,0.00036075048435539825,6.290752617358272e-05,0.00036645518905136426,6.262567900470915e-05 394 | 0.784,1.0,0.0,0.0003609082996994929,6.288324241054152e-05,0.00037140730466515425,6.785139555173916e-05 395 | 0.786,1.0,0.0,0.0003610586333681639,6.285695523005838e-05,0.0003874629378299295,3.335436347257305e-05 396 | 0.788,1.0,0.0,0.0003612015248472623,6.282871468392688e-05,0.00035989935350489913,9.783171253776362e-05 397 | 0.79,1.0,0.0,0.0003613370173805948,6.279857113570732e-05,0.0003582428813526017,4.6094908793648235e-05 398 | 0.792,1.0,0.0,0.00036146515787932143,6.276657522531024e-05,0.00034758077350873515,7.135698788670217e-05 399 | 0.794,1.0,0.0,0.00036158599683073036,6.273277783400278e-05,0.0003950961524197956,6.388199417231127e-05 400 | 0.796,1.0,0.0,0.0003616995882064542,6.269723004985269e-05,0.0003578871068232813,5.167347257518631e-05 401 | 0.798,1.0,0.0,0.00036180598937019236,6.265998313362407e-05,0.000355721564702238,5.030756642165432e-05 402 | 0.8,1.0,0.0,0.0003619052609850028,6.262108848513854e-05,0.0003889606825042179,7.915774254503241e-05 403 | 0.802,1.0,0.0,0.0003619974669202251,6.258059761011496e-05,0.00036433854705825565,4.768596937051764e-05 404 | 0.804,1.0,0.0,0.000362082674158097,6.253856208750037e-05,0.0003762532440532843,4.9769342990768685e-05 405 | 0.806,1.0,0.0,0.000362160952700125,6.249503353730426e-05,0.00038730416909663794,4.677793677382466e-05 406 | 0.808,1.0,0.0,0.000362232375473269,6.245006358894762e-05,0.00038586853738547844,4.878301570080754e-05 407 | 0.81,1.0,0.0,0.0003622970182359996,6.240370385013802e-05,0.000355910758596544,5.876835161539622e-05 408 | 0.812,1.0,0.0,0.00036235495948428657,6.235600587628105e-05,0.00035945400498903343,5.639299369438144e-05 409 | 0.8140000000000001,1.0,0.0,0.00036240628035757517,6.230702114043833e-05,0.0003826101035624506,7.574085832554815e-05 410 | 0.8160000000000001,1.0,0.0,0.00036245106454480696,6.225680100384138e-05,0.0003852844169352278,4.740467163074583e-05 411 | 0.8180000000000001,1.0,0.0,0.0003624893981905396,6.220539668697044e-05,0.0003606739983977473,4.256248896939542e-05 412 | 0.8200000000000001,1.0,0.0,0.0003625213698012201,6.21528592412067e-05,0.00035315784923260383,7.691111979644878e-05 413 | 0.8220000000000001,1.0,0.0,0.0003625470701516652,6.20992395210659e-05,0.000359112220796575,4.111196110312312e-05 414 | 0.8240000000000001,1.0,0.0,0.00036256659219179943,6.204458815702053e-05,0.0003802228762686672,4.82261300715648e-05 415 | 0.8260000000000001,1.0,0.0,0.00036258003095370405,6.198895552891797e-05,0.0003756304018959385,4.5127895384342865e-05 416 | 0.8280000000000001,1.0,0.0,0.0003625874834590251,6.193239174000059e-05,0.0003561279233102972,7.55525275024863e-05 417 | 0.8300000000000001,1.0,0.0,0.00036258904862679006,6.187494659153394e-05,0.00036802302504697104,4.1958426759378616e-05 418 | 0.8320000000000001,1.0,0.0,0.0003625848271816818,6.181666955804855e-05,0.0003613980436149014,5.915239217399257e-05 419 | 0.834,1.0,0.0,0.000362574921562815,6.175760976320004e-05,0.0003821656179480979,5.664993570564644e-05 420 | 0.836,1.0,0.0,0.00036255943583306217,6.169781595625227e-05,0.0003424971138920297,7.116602360919361e-05 421 | 0.838,1.0,0.0,0.0003625384755889733,6.163733648918743e-05,0.00037354876740027453,5.816085185228267e-05 422 | 0.84,1.0,0.0,0.00036251214787133204,6.157621929444646e-05,0.00036329065077193525,5.003888703455093e-05 423 | 0.842,1.0,0.0,0.00036248056107639204,6.151451186330313e-05,0.0003770930792988676,8.810295388700869e-05 424 | 0.844,1.0,0.0,0.0003624438248678332,6.145226122487412e-05,0.00037467597513716355,8.646952746642751e-05 425 | 0.846,1.0,0.0,0.0003624020500894793,6.138951392576736e-05,0.0003759278200441707,4.927139587791417e-05 426 | 0.848,1.0,0.0,0.0003623553486788144,6.132631601037035e-05,0.0003647648720064895,9.00059117937928e-05 427 | 0.85,1.0,0.0,0.0003623038335813377,6.12627130017797e-05,0.00038193711938475566,6.710162799179169e-05 428 | 0.852,1.0,0.0,0.0003622476186657918,6.119874988337257e-05,0.00036156715141516863,4.811426642245698e-05 429 | 0.854,1.0,0.0,0.0003621868186403007,6.113447108102071e-05,0.0003959829032835876,6.105141353580965e-05 430 | 0.856,1.0,0.0,0.0003621215489694519,6.106992044594683e-05,0.00034862155669719596,5.659378393890961e-05 431 | 0.858,1.0,0.0,0.00036205192579235554,6.100514123822291e-05,0.00036686908456840135,6.133078907459107e-05 432 | 0.86,1.0,0.0,0.00036197806584171305,6.094017611090972e-05,0.0003752656430205488,5.572402070994597e-05 433 | 0.862,1.0,0.0,0.0003619000863639254,6.087506709483624e-05,0.0003786818406954499,7.155892280191149e-05 434 | 0.864,1.0,0.0,0.0003618181050402721,6.0809855584017385e-05,0.0003619734583975503,5.5868774470236126e-05 435 | 0.866,1.0,0.0,0.00036173223990918776,6.074458232170804e-05,0.00038179609493734846,7.254315889312161e-05 436 | 0.868,1.0,0.0,0.0003616426092896654,6.067928738709108e-05,0.00036201148046600674,4.7637527561698765e-05 437 | 0.87,1.0,0.0,0.00036154933170581176,6.061401018259658e-05,0.0003291037079638057,4.474029632196525e-05 438 | 0.872,1.0,0.0,0.00036145252581258017,6.054878942184916e-05,0.00036584338058471893,6.964110688445809e-05 439 | 0.874,1.0,0.0,0.00036135231032270515,6.048366311824011e-05,0.0003651173328874779,5.0830667679469255e-05 440 | 0.876,1.0,0.0,0.0003612488039348611,6.041866857412043e-05,0.0003736876131773363,7.433465816831066e-05 441 | 0.878,1.0,0.0,0.00036114212526306783,6.0353842370610755e-05,0.0003516972253457616,4.5139177585082574e-05 442 | 0.88,1.0,0.0,0.0003610323927673627,6.028922035802376e-05,0.0003186911438466514,3.7858944302840447e-05 443 | 0.882,1.0,0.0,0.00036091972468575955,6.0224837646894276e-05,0.00037838351463803676,5.3001745061196315e-05 444 | 0.884,1.0,0.0,0.0003608042389675125,6.0160728599612216e-05,0.0003397497710350223,6.792541245322087e-05 445 | 0.886,1.0,0.0,0.0003606860532077025,6.0096926822652806e-05,0.00035577534122735704,5.3811930066093216e-05 446 | 0.888,1.0,0.0,0.0003605652845831619,6.0033465159398703e-05,0.00036209332639426786,4.6165083744991406e-05 447 | 0.89,1.0,0.0,0.0003604420497897539,5.997037568354808e-05,0.00037304431946709254,6.7545244777246e-05 448 | 0.892,1.0,0.0,0.0003603164649810193,5.99076896931025e-05,0.0003401412569443421,3.208547377509029e-05 449 | 0.894,1.0,0.0,0.0003601886457082048,5.984543770492818e-05,0.0003488271798026091,7.572003428343766e-05 450 | 0.896,1.0,0.0,0.0003600587068616844,5.978364944988408e-05,0.000351027992758531,5.5889615903252356e-05 451 | 0.898,1.0,0.0,0.0003599267626137847,5.972235386850993e-05,0.00033653224459621764,4.47965392897873e-05 452 | 0.9,1.0,0.0,0.00035979292636302513,5.9661579107266945e-05,0.00035704127024149693,6.05116723957877e-05 453 | 0.902,1.0,0.0,0.0003596573106797801,5.9601352515324115e-05,0.0003281098653951196,8.182153822895971e-05 454 | 0.904,1.0,0.0,0.00035952002725337333,5.954170064188236e-05,0.00038982172505439824,8.007115577731992e-05 455 | 0.906,1.0,0.0,0.00035938118684060953,5.948264923402887e-05,0.0003570239883349247,7.204473879830616e-05 456 | 0.908,1.0,0.0,0.0003592408992157503,5.9424223235113695e-05,0.0003517702564813028,6.003803985053824e-05 457 | 0.91,1.0,0.0,0.00035909927312193873,5.936644678364049e-05,0.000327719543755612,6.182196502696444e-05 458 | 0.912,1.0,0.0,0.0003589564162240778,5.9309343212662986e-05,0.0003838902493617033,5.8721399998681925e-05 459 | 0.914,1.0,0.0,0.00035881243506316385,5.9252935049678895e-05,0.0003587899873221053,7.111489658765188e-05 460 | 0.916,1.0,0.0,0.0003586674350120794,5.919724401701248e-05,0.0003435694584664866,4.148859893093034e-05 461 | 0.918,1.0,0.0,0.0003585215202328451,5.9142291032677094e-05,0.00036305570536161074,7.600005811128594e-05 462 | 0.92,1.0,0.0,0.0003583747936353318,5.908809621170874e-05,0.0003615875435030152,7.41256533898879e-05 463 | 0.922,1.0,0.0,0.00035822735683743235,5.9034678867961545e-05,0.00035520737208295317,5.5220607692531593e-05 464 | 0.924,1.0,0.0,0.00035807931012669166,5.898205751635609e-05,0.00036900167858071255,4.46189373542711e-05 465 | 0.926,1.0,0.0,0.0003579307524233923,5.8930249875571176e-05,0.0003722537175237032,5.1463486065055926e-05 466 | 0.928,1.0,0.0,0.0003577817812450938,5.8879272871169646e-05,0.0003445577099949312,7.896056097593449e-05 467 | 0.93,1.0,0.0,0.0003576324926726202,5.8829142639148846e-05,0.00036501937970992443,6.9401067905875e-05 468 | 0.932,1.0,0.0,0.00035748298131749314,5.8779874529905985e-05,0.0003564422432089578,5.176581784538883e-05 469 | 0.934,1.0,0.0,0.0003573333402908031,5.873148311260876e-05,0.00036581685381026217,4.911284967600112e-05 470 | 0.936,1.0,0.0,0.00035718366117351395,5.868398217996158e-05,0.00033917091629989,6.0920227808627695e-05 471 | 0.9380000000000001,1.0,0.0,0.00035703403398819317,5.863738475335737e-05,0.0003676015476230283,4.8106975461970255e-05 472 | 0.9400000000000001,1.0,0.0,0.0003568845471721605,5.8591703088405186e-05,0.0003563001765200045,7.270087083541319e-05 473 | 0.9420000000000001,1.0,0.0,0.0003567352875520463,5.8546948680823695e-05,0.00034539012239587734,3.134780521658765e-05 474 | 0.9440000000000001,1.0,0.0,0.0003565863403197506,5.850313227269041e-05,0.0003550245629557373,6.535349082751665e-05 475 | 0.9460000000000001,1.0,0.0,0.0003564377890097932,5.8460263859036765e-05,0.0003471644405280746,7.057377353539686e-05 476 | 0.9480000000000001,1.0,0.0,0.0003562897154780438,5.841835269477887e-05,0.00035858241737608256,4.624629559404727e-05 477 | 0.9500000000000001,1.0,0.0,0.00035614219988182257,5.8377407301973894e-05,0.00036015514514177064,7.234188194448895e-05 478 | 0.9520000000000001,1.0,0.0,0.0003559953206613572,5.833743547739192e-05,0.00032246090750893074,3.376929472680874e-05 479 | 0.9540000000000001,1.0,0.0,0.00035584915452258596,5.8298444300393165e-05,0.0003564906255645216,5.855728741582477e-05 480 | 0.9560000000000001,1.0,0.0,0.00035570377642129327,5.826044014110045e-05,0.0003395830771587031,3.232794871828162e-05 481 | 0.9580000000000001,1.0,0.0,0.00035555925954856377,5.822342866885667e-05,0.0003524962886711729,4.938922134188004e-05 482 | 0.96,1.0,0.0,0.00035541567531754183,5.8187414860957304e-05,0.00035652486352989927,4.340494206356879e-05 483 | 0.962,1.0,0.0,0.00035527309335148087,5.8152403011647676e-05,0.00034500257325109586,4.091406006807162e-05 484 | 0.964,1.0,0.0,0.000355131581473068,5.8118396741375004e-05,0.00034948668801430816,6.751664874293061e-05 485 | 0.966,1.0,0.0,0.0003549912056950086,5.8085399006285054e-05,0.00035495937632027606,8.486253740544586e-05 486 | 0.968,1.0,0.0,0.0003548520302118537,5.805341210795342e-05,0.00035111348084962445,6.609577636829743e-05 487 | 0.97,1.0,0.0,0.0003547141173930551,5.802243770334142e-05,0.00037045747002187665,6.356780006306625e-05 488 | 0.972,1.0,0.0,0.0003545775277772296,5.799247681496659e-05,0.00037183189467963874,9.925646595273198e-05 489 | 0.974,1.0,0.0,0.00035444232006761613,5.796352984127789e-05,0.00033351739886687635,7.337173558309183e-05 490 | 0.976,1.0,0.0,0.00035430855112870735,5.793559656722578e-05,0.00032956536822675907,6.0045466408311134e-05 491 | 0.978,1.0,0.0,0.0003541762759840378,5.7908676175017327e-05,0.000379028619514857,5.1592360496553206e-05 492 | 0.98,1.0,0.0,0.0003540455478151094,5.7882767255046544e-05,0.0003362416667492078,5.8846778872567054e-05 493 | 0.982,1.0,0.0,0.0003539164179614362,5.785786781699042e-05,0.000369455580354364,7.36384486696177e-05 494 | 0.984,1.0,0.0,0.0003537889359216879,5.7833975301060874e-05,0.0003643965346017724,7.413738314523033e-05 495 | 0.986,1.0,0.0,0.00035366314935591375,5.781108658940322e-05,0.000322717140025994,5.026018143951218e-05 496 | 0.988,1.0,0.0,0.0003535391040888257,5.778919801763164e-05,0.00034125825819480017,7.64572484269791e-05 497 | 0.99,1.0,0.0,0.0003534168441141217,5.7768305386492285e-05,0.0003794001655253451,5.123841989461319e-05 498 | 0.992,1.0,0.0,0.0003532964115998273,5.7748403973644795e-05,0.00036684511081402866,4.480225394865522e-05 499 | 0.994,1.0,0.0,0.0003531778468946361,5.7729488545553e-05,0.0003502800031816833,5.243745074698378e-05 500 | 0.996,1.0,0.0,0.00035306118853522714,5.771155336947573e-05,0.00036032849937679104,7.538715708401024e-05 501 | 0.998,1.0,0.0,0.0003529464732545384,5.769459222554871e-05,0.0003674494361905856,7.958455770259355e-05 502 | 1.0,1.0,0.0,0.00035283373599097464,5.7678598418948853e-05,0.00036067687536938726,4.0298493276671035e-05 503 | 1.002,1.0,0.0,0.0003527230098985285,5.766356479213184e-05,0.0003541328480058186,5.043149912788201e-05 504 | 1.004,1.0,0.0,0.00035261432635779225,5.764948373713468e-05,0.0003669197593364875,6.373389962805404e-05 505 | 1.006,1.0,0.0,0.0003525077149878391,5.7636347207934524e-05,0.00033980784438082846,4.175670978175948e-05 506 | 1.008,1.0,0.0,0.0003524032036589514,5.7624146732855246e-05,0.00034834117360707496,5.2735505424910626e-05 507 | 1.01,1.0,0.0,0.00035230081850617343,5.761287342701372e-05,0.0003653471887539047,7.329791735574868e-05 508 | 1.012,1.0,0.0,0.00035220058394366707,5.76025180047974e-05,0.00033763311819569406,4.915061577299441e-05 509 | 1.014,1.0,0.0,0.0003521025226798472,5.759307079236517e-05,0.00034955030920938305,5.262830392554695e-05 510 | 1.016,1.0,0.0,0.0003520066557332747,5.758452174016376e-05,0.0003714122272876874,5.05695349624673e-05 511 | 1.018,1.0,0.0,0.00035191300244928457,5.757686043545157e-05,0.00035448799797898933,4.5306559925256716e-05 512 | 1.02,1.0,0.0,0.0003518215805173261,5.757007611482261e-05,0.00035729834429743044,5.5850985122409336e-05 513 | 1.022,1.0,0.0,0.0003517324059889929,5.756415767672275e-05,0.0003281366321281098,6.348687663476082e-05 514 | 1.024,1.0,0.0,0.0003516454932967198,5.755909369395111e-05,0.0003413198536020805,5.9633814148862595e-05 515 | 1.026,1.0,0.0,0.000351560855273124,5.7554872426139156e-05,0.0003663178739675993,5.818931897349337e-05 516 | 1.028,1.0,0.0,0.0003514785031709684,5.7551481832200534e-05,0.0003453825888360907,5.68299546235879e-05 517 | 1.03,1.0,0.0,0.00035139844668372267,5.754890958274467e-05,0.00036305819900864076,6.526516728807523e-05 518 | 1.032,1.0,0.0,0.000351320693966702,5.7547143072447264e-05,0.00035363231191216384,5.383447300643862e-05 519 | 1.034,1.0,0.0,0.0003512452516587584,5.7546169432370993e-05,0.0003402808256404913,1.806443435706414e-05 520 | 1.036,1.0,0.0,0.00035117212490450435,5.754597554223003e-05,0.0003447285798528919,6.18864653855416e-05 521 | 1.038,1.0,0.0,0.00035110131737704406,5.754654804259186e-05,0.0003499413027168018,4.213689418358173e-05 522 | 1.04,1.0,0.0,0.0003510328313011919,5.7547873347010314e-05,0.0003544308496754138,5.149042391728674e-05 523 | 1.042,1.0,0.0,0.0003509666674771547,5.75499376540836e-05,0.00034932260798739215,7.774526149317479e-05 524 | 1.044,1.0,0.0,0.000350902825304656,5.755272695943155e-05,0.0003689286711925277,6.413813292432163e-05 525 | 1.046,1.0,0.0,0.00035084130280747995,5.755622706758621e-05,0.00035030388879787525,1.5079990339825489e-05 526 | 1.048,1.0,0.0,0.0003507820966584139,5.756042360379031e-05,0.0003461676109947879,4.427638071719119e-05 527 | 1.05,1.0,0.0,0.00035072520220456627,5.756530202569793e-05,0.00038228535705350455,6.624742694815893e-05 528 | 1.052,1.0,0.0,0.0003506706134930395,5.757084763497233e-05,0.00033665270164969456,8.28763814768548e-05 529 | 1.054,1.0,0.0,0.00035061832329693607,5.7577045588775574e-05,0.00034525551219519705,5.372100567423554e-05 530 | 1.056,1.0,0.0,0.0003505683231416765,5.7583880911145096e-05,0.00035593146124734314,4.661794814351945e-05 531 | 1.058,1.0,0.0,0.00035052060333160755,5.75913385042523e-05,0.0003422531032705585,3.4655002997636196e-05 532 | 1.06,1.0,0.0,0.00035047515297688067,5.759940315953856e-05,0.0003595388119028517,6.116524634718415e-05 533 | 1.062,1.0,0.0,0.0003504319600205793,5.760805956872405e-05,0.0003176022123533057,5.223195236316231e-05 534 | 1.064,1.0,0.0,0.0003503910112660748,5.7617292334685126e-05,0.00038208489081872796,4.830149936654634e-05 535 | 1.066,1.0,0.0,0.00035035229240459046,5.7627085982195914e-05,0.00035317082271450403,7.626168422127073e-05 536 | 1.068,1.0,0.0,0.000350315788042954,5.763742496853017e-05,0.00034380890668600565,4.807699767915493e-05 537 | 1.07,1.0,0.0,0.0003502814817315183,5.764829369391945e-05,0.0003558480847665464,6.812733393116705e-05 538 | 1.072,1.0,0.0,0.0003502493559922308,5.765967651186385e-05,0.000363092855716206,3.645357619216481e-05 539 | 1.074,1.0,0.0,0.0003502193923468324,5.7671557739291726e-05,0.0003636863172939527,8.753644282284852e-05 540 | 1.076,1.0,0.0,0.00035019157134516657,5.768392166656496e-05,0.0003638209740801205,5.9233402506120184e-05 541 | 1.078,1.0,0.0,0.0003501658725935804,5.7696752567326474e-05,0.00036160284913257267,6.415134916880898e-05 542 | 1.08,1.0,0.0,0.0003501422747833981,5.7710034708186874e-05,0.00033887751444482226,4.333637592282135e-05 543 | 1.082,1.0,0.0,0.00035012075571944957,5.772375235824722e-05,0.00034545609846452,4.6294362439606974e-05 544 | 1.084,1.0,0.0,0.0003501012923486355,5.773788979845506e-05,0.00036917379573537655,5.128980763156591e-05 545 | 1.086,1.0,0.0,0.00035008386078851154,5.775243133079116e-05,0.00035080322147170546,5.952724610512913e-05 546 | 1.088,1.0,0.0,0.0003500684363558741,5.776736128728421e-05,0.0003343349866994911,2.5756967810627038e-05 547 | 1.09,1.0,0.0,0.0003500549935953306,5.778266403885132e-05,0.0003540686194425191,6.174882442655779e-05 548 | 1.092,1.0,0.0,0.0003500435063078374,5.779832400396183e-05,0.00034058898576061654,7.384106667713087e-05 549 | 1.094,1.0,0.0,0.00035003394757918913,5.781432565712255e-05,0.00036447024319141025,5.671197437702649e-05 550 | 1.096,1.0,0.0,0.0003500262898084427,5.783065353718241e-05,0.0003446810099476158,6.812046112172998e-05 551 | 1.098,1.0,0.0,0.00035002050473626075,5.784729225545458e-05,0.0003422858072522444,4.7618389470055765e-05 552 | 1.1,1.0,0.0,0.00035001656347315884,5.7864226503654544e-05,0.0003542321067961073,5.401249815674162e-05 553 | 1.102,1.0,0.0,0.0003500144365276406,5.7881441061652516e-05,0.0003548486563854649,5.7171526044147145e-05 554 | 1.104,1.0,0.0,0.000350014093834207,5.789892080503883e-05,0.00033065545440853853,4.430837748102995e-05 555 | 1.106,1.0,0.0,0.0003500155047812243,5.791665071250101e-05,0.0003699289408533822,5.6942255222133125e-05 556 | 1.108,1.0,0.0,0.0003500186382386367,5.7934615873011394e-05,0.00035514365109202755,4.2875720503383584e-05 557 | 1.11,1.0,0.0,0.0003500234625855101,5.795280149282437e-05,0.0003639321562108042,5.439513265143216e-05 558 | 1.112,1.0,0.0,0.00035002994573739315,5.7971192902282294e-05,0.00036832740667967155,5.292228097963303e-05 559 | 1.114,1.0,0.0,0.0003500380551734829,5.798977556242935e-05,0.0003371731587422562,5.1422719628256836e-05 560 | 1.116,1.0,0.0,0.00035004775796358093,5.800853507143285e-05,0.00032410396517272577,7.443321421447588e-05 561 | 1.118,1.0,0.0,0.000350059020794829,5.802745717081131e-05,0.00033495320959968367,6.45727958578894e-05 562 | 1.12,1.0,0.0,0.00035007180999821144,5.80465277514691e-05,0.0003328571738474693,4.778508541255982e-05 563 | 1.122,1.0,0.0,0.00035008609157481186,5.806573285953739e-05,0.000344042289389238,7.488524156217159e-05 564 | 1.124,1.0,0.0,0.00035010183122181416,5.8085058702021214e-05,0.00033118766782791985,5.9669100447020236e-05 565 | 1.1260000000000001,1.0,0.0,0.000350118994358235,5.810449165225266e-05,0.000370259634062163,6.562661891880522e-05 566 | 1.1280000000000001,1.0,0.0,0.0003501375461503786,5.812401825515042e-05,0.00034545736691617325,5.805977621807617e-05 567 | 1.1300000000000001,1.0,0.0,0.00035015745153700237,5.8143625232285726e-05,0.00035657942489196687,4.313835793050527e-05 568 | 1.1320000000000001,1.0,0.0,0.0003501786752541838,5.8163299486755225e-05,0.000369140380123222,6.401118686508904e-05 569 | 1.1340000000000001,1.0,0.0,0.0003502011818598789,5.818302810786101e-05,0.000304579406192844,5.340845435024963e-05 570 | 1.1360000000000001,1.0,0.0,0.00035022493575816263,5.8202798375598586e-05,0.00033903490363985933,2.5642619409957837e-05 571 | 1.1380000000000001,1.0,0.0,0.0003502499012231426,5.822259776495325e-05,0.0003493375458987639,6.591841393646007e-05 572 | 1.1400000000000001,1.0,0.0,0.0003502760424225376,5.824241395000578e-05,0.0003481749367433703,5.3087689890620876e-05 573 | 1.1420000000000001,1.0,0.0,0.0003503033234409115,5.826223480784815e-05,0.00034057778629041765,7.893749011406605e-05 574 | 1.1440000000000001,1.0,0.0,0.00035033170830255637,5.828204842231041e-05,0.0003341822990254634,6.335437094771005e-05 575 | 1.1460000000000001,1.0,0.0,0.00035036116099401596,5.830184308749957e-05,0.00035899960005904996,6.772422570392612e-05 576 | 1.1480000000000001,1.0,0.0,0.00035039164548624234,5.8321607311151875e-05,0.00037132569483316294,4.809248680096984e-05 577 | 1.1500000000000001,1.0,0.0,0.00035042312575637975,5.8341329817799485e-05,0.00035227640040898684,3.525112837496531e-05 578 | 1.1520000000000001,1.0,0.0,0.0003504555658091677,5.836099955175303e-05,0.00035293751901482504,6.740565388878532e-05 579 | 1.154,1.0,0.0,0.0003504889296979585,5.8380605679901395e-05,0.0003494443910212142,3.4655012709440474e-05 580 | 1.156,1.0,0.0,0.00035052318154534244,5.840013759433027e-05,0.00036690753665020575,6.570948011238856e-05 581 | 1.158,1.0,0.0,0.00035055828556337525,5.841958491476099e-05,0.00031662979896217563,6.41814756927585e-05 582 | 1.16,1.0,0.0,0.0003505942060734028,5.8438937490811386e-05,0.00034103144632767046,9.606197516096474e-05 583 | 1.162,1.0,0.0,0.0003506309075254781,5.8458185404080314e-05,0.00035616900505896213,4.766613527297648e-05 584 | 1.164,1.0,0.0,0.00035066835451736604,5.847731897005778e-05,0.0003524248350785598,5.859048794238443e-05 585 | 1.166,1.0,0.0,0.0003507065118131316,5.8496328739862376e-05,0.00035200629827075844,8.032565344054604e-05 586 | 1.168,1.0,0.0,0.00035074534436130755,5.8515205501808155e-05,0.0003575474492888881,5.931992934610747e-05 587 | 1.17,1.0,0.0,0.0003507848173126383,5.853394028280287e-05,0.0003513527474398602,5.8210515216663203e-05 588 | 1.172,1.0,0.0,0.00035082489603739637,5.855252434957967e-05,0.0003266348605426179,4.936842415497558e-05 589 | 1.174,1.0,0.0,0.0003508655461422688,5.8570949209764415e-05,0.0003311540579291402,5.518604381208851e-05 590 | 1.176,1.0,0.0,0.0003509067334868105,5.85892066127808e-05,0.00033632582919185906,5.810220188313061e-05 591 | 1.178,1.0,0.0,0.0003509484241994626,5.860728855059564e-05,0.0003497553219529404,5.95325185466348e-05 592 | 1.18,1.0,0.0,0.000350990584693133,5.862518725830643e-05,0.00035492287450244493,7.919676321965595e-05 593 | 1.182,1.0,0.0,0.0003510331816803387,5.864289521457388e-05,0.00033908412304517016,4.7778644530844754e-05 594 | 1.184,1.0,0.0,0.000351076182187907,5.866040514190153e-05,0.00032972709279410675,5.2372240103028327e-05 595 | 1.186,1.0,0.0,0.00035111955357123545,5.8677710006765086e-05,0.0003391548340959086,3.7444111103473714e-05 596 | 1.188,1.0,0.0,0.00035116326352810904,5.869480301959401e-05,0.00037794539507987775,5.632464721801388e-05 597 | 1.19,1.0,0.0,0.0003512072801120743,5.871167763460785e-05,0.0003596719431967854,6.271027087361905e-05 598 | 1.192,1.0,0.0,0.0003512515717453699,5.8728327549510055e-05,0.0003357969854466945,3.8466194276650235e-05 599 | 1.194,1.0,0.0,0.0003512961072314141,5.874474670504166e-05,0.0003504578032142025,4.334244949657031e-05 600 | 1.196,1.0,0.0,0.000351340855766848,5.8760929284397886e-05,0.000356691486076305,4.877712386632007e-05 601 | 1.198,1.0,0.0,0.0003513857869531371,5.8776869712510046e-05,0.00033254854793488085,5.686620063690129e-05 602 | 1.2,1.0,0.0,0.0003514308708077297,5.879256265519575e-05,0.0003228958825064381,4.9966593905782276e-05 603 | 1.202,1.0,0.0,0.00035147607777477483,5.880800301818005e-05,0.0003421913292458475,6.0357467885900696e-05 604 | 1.204,1.0,0.0,0.0003515213787353998,5.8823185945990365e-05,0.00036180652322393987,5.151377826111406e-05 605 | 1.206,1.0,0.0,0.00035156674501754965,5.883810682072811e-05,0.0003632998793889889,6.594767061323084e-05 606 | 1.208,1.0,0.0,0.00035161214840538955,5.885276126071977e-05,0.0003261135827860404,4.6901564336064374e-05 607 | 1.21,1.0,0.0,0.0003516575611482724,5.8867145119050414e-05,0.0003659168118883047,5.69055056472363e-05 608 | 1.212,1.0,0.0,0.00035170295596927413,5.888125448198244e-05,0.0003581941031038431,5.960150624223286e-05 609 | 1.214,1.0,0.0,0.000351748306073298,5.889508566726263e-05,0.000358970213870987,5.737440000161682e-05 610 | 1.216,1.0,0.0,0.00035179358515475173,5.890863522232028e-05,0.0003582829109647422,6.854680337789319e-05 611 | 1.218,1.0,0.0,0.00035183876740479974,5.892189992235951e-05,0.00032433746521135843,2.8717496016353028e-05 612 | 1.22,1.0,0.0,0.000351883827518193,5.893487676834862e-05,0.0003418901389628504,4.773944136285906e-05 613 | 1.222,1.0,0.0,0.0003519287406996809,5.89475629849095e-05,0.00037231961327858137,3.989657279175917e-05 614 | 1.224,1.0,0.0,0.0003519734826700071,5.895995601811014e-05,0.00036049975543711674,7.171257572754694e-05 615 | 1.226,1.0,0.0,0.0003520180296714944,5.8972053533163075e-05,0.0003490459150401349,6.696742370995223e-05 616 | 1.228,1.0,0.0,0.0003520623584732212,5.8983853412032964e-05,0.0003603144951962777,7.749251042484136e-05 617 | 1.23,1.0,0.0,0.00035210644637579427,5.899535375095612e-05,0.0003571221918689489,4.2898072493815457e-05 618 | 1.232,1.0,0.0,0.0003521502712157216,5.9006552857875085e-05,0.00037363627108908016,6.870121411521188e-05 619 | 1.234,1.0,0.0,0.0003521938113693897,5.901744924979124e-05,0.0003616044676011829,6.691795325996753e-05 620 | 1.236,1.0,0.0,0.0003522370457566498,5.902804165003839e-05,0.0003164191497329011,5.232257986530498e-05 621 | 1.238,1.0,0.0,0.00035227995384401803,5.903832898548041e-05,0.00036251563959897263,7.731125265344707e-05 622 | 1.24,1.0,0.0,0.0003523225156474932,5.9048310383635834e-05,0.00036349232303216903,6.954337165288381e-05 623 | 1.242,1.0,0.0,0.0003523647117349983,5.905798516973238e-05,0.00033190434443408363,6.310853763946275e-05 624 | 1.244,1.0,0.0,0.00035240652322845014,5.906735286369446e-05,0.000328197061324495,6.349371133439339e-05 625 | 1.246,1.0,0.0,0.0003524479318054625,5.907641317706656e-05,0.00037780242320523285,6.245489262424776e-05 626 | 1.248,1.0,0.0,0.00035248891970068803,5.908516600987536e-05,0.0003449044348616953,7.196671019685159e-05 627 | 1.25,1.0,0.0,0.0003525294697068042,5.909361144743374e-05,0.00036760419370367956,5.794641089810936e-05 628 | 1.252,1.0,0.0,0.00035256956517514925,5.910174975708928e-05,0.0003423595380812664,5.312378530411545e-05 629 | 1.254,1.0,0.0,0.00035260919001601365,5.910958138492046e-05,0.00035987649109004387,6.530382678346283e-05 630 | 1.256,1.0,0.0,0.0003526483286985925,5.911710695238318e-05,0.00039052377902986177,6.51062447581235e-05 631 | 1.258,1.0,0.0,0.0003526869662506051,5.9124327252910655e-05,0.00037175075910112593,6.55040137201752e-05 632 | 1.26,1.0,0.0,0.0003527250882575879,5.913124324846931e-05,0.00036499352013971123,3.6073256296004356e-05 633 | 1.262,1.0,0.0,0.0003527626808618663,5.913785606607369e-05,0.0003508452358059801,3.676461558308766e-05 634 | 1.264,1.0,0.0,0.00035279973076121174,5.914416699426306e-05,0.0003578311009931649,4.162543604534737e-05 635 | 1.266,1.0,0.0,0.0003528362252071904,5.9150177479542484e-05,0.00035119367102296676,6.828722617340613e-05 636 | 1.268,1.0,0.0,0.0003528721520032101,5.915588912279108e-05,0.00032374703086416397,4.6320106983581076e-05 637 | 1.27,1.0,0.0,0.00035290749950227083,5.916130367564021e-05,0.00036583386952212334,8.590987286399916e-05 638 | 1.272,1.0,0.0,0.0003529422566044263,5.9166423036824296e-05,0.0003910861400361005,3.251008768507932e-05 639 | 1.274,1.0,0.0,0.00035297641275396286,5.917124924850682e-05,0.00038074164455496645,7.142451037535688e-05 640 | 1.276,1.0,0.0,0.00035300995793630205,5.9175784492584265e-05,0.00037065710597792087,3.8418711523821426e-05 641 | 1.278,1.0,0.0,0.0003530428826746332,5.918003108697041e-05,0.00035699127966296653,5.240857713037242e-05 642 | 1.28,1.0,0.0,0.00035307517802628377,5.918399148186372e-05,0.0003294787327225023,4.1385425053022027e-05 643 | 1.282,1.0,0.0,0.00035310683557883296,5.918766825600015e-05,0.0003473857966516312,3.125684047265397e-05 644 | 1.284,1.0,0.0,0.00035313784744597614,5.9191064112894094e-05,0.000356562339684934,4.325437707341476e-05 645 | 1.286,1.0,0.0,0.00035316820626314616,5.919418187706963e-05,0.00034547298931831357,5.2104078320793056e-05 646 | 1.288,1.0,0.0,0.00035319790518289953,5.919702449028476e-05,0.00034996577034285584,4.4247420019389764e-05 647 | 1.29,1.0,0.0,0.00035322693787007245,5.919959500775082e-05,0.000318237429874237,6.387974314913404e-05 648 | 1.292,1.0,0.0,0.0003532552984967159,5.920189659434948e-05,0.00038631098081727793,6.152999927910347e-05 649 | 1.294,1.0,0.0,0.0003532829817368143,5.920393252084966e-05,0.00035897156121318056,6.384144265899265e-05 650 | 1.296,1.0,0.0,0.00035330998276079625,5.9205706160126494e-05,0.0003399611793846603,8.163897317499153e-05 651 | 1.298,1.0,0.0,0.0003533362972298435,5.9207220983384726e-05,0.00036741598223175985,6.771884805985352e-05 652 | 1.3,1.0,0.0,0.0003533619212900053,5.9208480556388614e-05,0.0003603794992452676,6.743681552944563e-05 653 | 1.302,1.0,0.0,0.00035338685156612477,5.9209488535700475e-05,0.00036084313544688636,6.929794867423898e-05 654 | 1.304,1.0,0.0,0.0003534110851555846,5.921024866493002e-05,0.00036171136418405376,7.092796094310008e-05 655 | 1.306,1.0,0.0,0.0003534346196218783,5.9210764770996474e-05,0.00037023387703088024,5.644095338002489e-05 656 | 1.308,1.0,0.0,0.000353457452988014,5.921104076040554e-05,0.0003533388130799039,7.446469975906596e-05 657 | 1.31,1.0,0.0,0.00035347958372975867,5.921108061554312e-05,0.0003629084115166355,6.119963426489009e-05 658 | 1.312,1.0,0.0,0.0003535010107687278,5.921088839098777e-05,0.0003442629024106745,6.77641457448662e-05 659 | 1.314,1.0,0.0,0.00035352173346532876,5.921046820984372e-05,0.00038190997530612394,6.425901119776472e-05 660 | 1.316,1.0,0.0,0.00035354175161156386,5.920982426009632e-05,0.00033826926266002105,4.112619834890627e-05 661 | 1.318,1.0,0.0,0.00035356106542369994,5.9208960790991674e-05,0.00034256681436634155,3.345852244248035e-05 662 | 1.32,1.0,0.0,0.0003535796755348114,5.920788210944223e-05,0.0003546997586953721,4.64651131324857e-05 663 | 1.322,1.0,0.0,0.0003535975829872033,5.920659257646e-05,0.0003662649766566125,3.935705609047162e-05 664 | 1.324,1.0,0.0,0.0003536147892247209,5.92050966036191e-05,0.0003383045606588535,6.149183166303416e-05 665 | 1.326,1.0,0.0,0.0003536312960849517,5.9203398649549144e-05,0.0003493014366286028,6.68954527374034e-05 666 | 1.328,1.0,0.0,0.00035364710579132824,5.920150321646114e-05,0.0003542701915508941,6.390224916058812e-05 667 | 1.33,1.0,0.0,0.00035366222094513575,5.9199414846707306e-05,0.0003892520608222719,7.008520779059481e-05 668 | 1.332,1.0,0.0,0.00035367664451743316,5.919713811937637e-05,0.0003350622150217299,8.127176313205928e-05 669 | 1.334,1.0,0.0,0.0003536903798408923,5.919467764692569e-05,0.00035476117043681965,4.217199119460196e-05 670 | 1.336,1.0,0.0,0.00035370343060156224,5.919203807185162e-05,0.00039749774975090526,6.72091988517977e-05 671 | 1.338,1.0,0.0,0.00035371580083056516,5.918922406339941e-05,0.000353560664255985,4.047823756259262e-05 672 | 1.34,1.0,0.0,0.0003537274948957293,5.918624031431399e-05,0.00037150589153284386,5.5802870651553164e-05 673 | 1.342,1.0,0.0,0.0003537385174931657,5.9183091537632785e-05,0.0003662071488878524,5.797112894575686e-05 674 | 1.344,1.0,0.0,0.000353748873638794,5.917978246352176e-05,0.0003786811408210978,6.176411684871304e-05 675 | 1.346,1.0,0.0,0.00035375856865982395,5.917631783615595e-05,0.00036320021426764697,4.530049646829511e-05 676 | 1.348,1.0,0.0,0.0003537676081861984,5.917270241064539e-05,0.00038102836348368716,4.693490857639852e-05 677 | 1.35,1.0,0.0,0.0003537759981420028,5.916894095000769e-05,0.00035155247720929084,8.499502310698718e-05 678 | 1.352,1.0,0.0,0.00035378374473684775,5.916503822218805e-05,0.00036516485260262155,6.713822671281697e-05 679 | 1.354,1.0,0.0,0.0003537908544572301,5.916099899712786e-05,0.0003748224499430587,8.384913164855819e-05 680 | 1.356,1.0,0.0,0.00035379733405787754,5.9156828043882696e-05,0.0003584689946652443,7.876367834903453e-05 681 | 1.358,1.0,0.0,0.0003538031905530826,5.915253012779057e-05,0.0003401250003659584,6.098172883307758e-05 682 | 1.36,1.0,0.0,0.00035380843120803195,5.914811000769135e-05,0.00035178067368675606,5.337439422415578e-05 683 | 1.362,1.0,0.0,0.0003538130635301347,5.9143572433198e-05,0.0003650884221814476,5.177119255149644e-05 684 | 1.364,1.0,0.0,0.0003538170952603565,5.913892214202048e-05,0.0003279548305919469,6.640902269861575e-05 685 | 1.366,1.0,0.0,0.0003538205343645639,5.913416385734295e-05,0.0003586827382236779,6.049610577329197e-05 686 | 1.368,1.0,0.0,0.0003538233890248834,5.9129302285254874e-05,0.00033752756814713037,4.20131239491354e-05 687 | 1.37,1.0,0.0,0.00035382566763108106,5.912434211223678e-05,0.00036194615897784584,2.1907511668536688e-05 688 | 1.372,1.0,0.0,0.0003538273787719672,5.911928800270102e-05,0.0003801014947005921,8.454629152715478e-05 689 | 1.374,1.0,0.0,0.00035382853122683014,5.911414459658825e-05,0.0003393660348561342,4.074928919367001e-05 690 | 1.3760000000000001,1.0,0.0,0.00035382913395690444,5.910891650701998e-05,0.00034314636428338477,6.747229533636105e-05 691 | 1.3780000000000001,1.0,0.0,0.00035382919609687794,5.9103608318007634e-05,0.00036362637310877156,5.128137740454342e-05 692 | 1.3800000000000001,1.0,0.0,0.00035382872694644216,5.9098224582218594e-05,0.0003623428784398302,2.6653124059210493e-05 693 | 1.3820000000000001,1.0,0.0,0.00035382773596188933,5.90927698187995e-05,0.00033817585287633766,5.1656481821418966e-05 694 | 1.3840000000000001,1.0,0.0,0.0003538262327477619,5.908724851125711e-05,0.00033623961774859143,5.916377450948373e-05 695 | 1.3860000000000001,1.0,0.0,0.0003538242270485573,5.908166510539709e-05,0.00035128140457440994,7.313626459375958e-05 696 | 1.3880000000000001,1.0,0.0,0.00035382172874049247,5.907602400732082e-05,0.0003468600886083479,8.546966957620365e-05 697 | 1.3900000000000001,1.0,0.0,0.0003538187478233322,5.907032958148056e-05,0.00036399919597853835,5.01917636022086e-05 698 | 1.3920000000000001,1.0,0.0,0.0003538152944122846,5.9064586148793005e-05,0.00034071791817642984,7.015350669084246e-05 699 | 1.3940000000000001,1.0,0.0,0.00035381137872996746,5.905879798481143e-05,0.00034642734989793,5.01158890360581e-05 700 | 1.3960000000000001,1.0,0.0,0.00035380701109845005,5.9052969317956463e-05,0.00035523592992184566,6.635261241653612e-05 701 | 1.3980000000000001,1.0,0.0,0.0003538022019313717,5.904710432780552e-05,0.000361038912082091,5.139730591694358e-05 702 | 1.4000000000000001,1.0,0.0,0.0003537969617261432,5.904120714344099e-05,0.00035840009407916886,5.3457030588569946e-05 703 | 1.4020000000000001,1.0,0.0,0.0003537913010562317,5.903528184185703e-05,0.00033880927763798284,7.07495540904466e-05 704 | 1.4040000000000001,1.0,0.0,0.0003537852305635344,5.9029332446424996e-05,0.00037905938026873225,7.178150601465331e-05 705 | 1.4060000000000001,1.0,0.0,0.0003537787609508423,5.902336292541733e-05,0.0003511009453256054,2.7135822071340834e-05 706 | 1.408,1.0,0.0,0.00035377190297439834,5.9017377190589934e-05,0.0003509809001579927,9.005013696610495e-05 707 | 1.41,1.0,0.0,0.00035376466743655195,5.901137909582265e-05,0.0003574093472591498,4.2333496595713663e-05 708 | 1.412,1.0,0.0,0.0003537570651785129,5.9005372435817804e-05,0.00034334797321727606,4.455718030360648e-05 709 | 1.414,1.0,0.0,0.000353749107073207,5.89993609448566e-05,0.00037437193097623493,7.329142051506079e-05 710 | 1.416,1.0,0.0,0.00035374080401823677,5.899334829561297e-05,0.00034871130326246454,2.444023137790096e-05 711 | 1.418,1.0,0.0,0.0003537321669289481,5.898733809802475e-05,0.00038350766866315436,1.985125125925774e-05 712 | 1.42,1.0,0.0,0.0003537232067316067,5.898133389822181e-05,0.0003600918028681222,6.751897289383403e-05 713 | 1.422,1.0,0.0,0.00035371393435668595,5.89753391775107e-05,0.00038238539456062495,5.1670177801426826e-05 714 | 1.424,1.0,0.0,0.0003537043607322678,5.896935735141563e-05,0.000353628358902888,5.867037696883376e-05 715 | 1.426,1.0,0.0,0.0003536944967775589,5.896339176877522e-05,0.00037925284353170375,6.889502055519976e-05 716 | 1.428,1.0,0.0,0.0003536843533965248,5.895744571089471e-05,0.00036366574821425265,5.596455550885862e-05 717 | 1.43,1.0,0.0,0.00035367394147164255,5.895152239075305e-05,0.0003213755472513343,2.8143800331684163e-05 718 | 1.432,1.0,0.0,0.0003536632718577741,5.894562495226455e-05,0.0003652616765165778,5.044960115769416e-05 719 | 1.434,1.0,0.0,0.00035365235537616224,5.8939756469594504e-05,0.0003589464799668593,8.074581525447606e-05 720 | 1.436,1.0,0.0,0.00035364120280855004,5.893391994652821e-05,0.0003380181144060015,6.255715502374963e-05 721 | 1.438,1.0,0.0,0.00035362982489142567,5.892811831589299e-05,0.00034588649220853885,4.70770493973918e-05 722 | 1.44,1.0,0.0,0.0003536182323103936,5.892235443903249e-05,0.0003770138673482384,7.828737743917047e-05 723 | 1.442,1.0,0.0,0.00035360643569467334,5.8916631105332785e-05,0.0003598461713692456,3.682621182835526e-05 724 | 1.444,1.0,0.0,0.00035359444561172654,5.89109510317996e-05,0.0003554567049764197,6.0941959765702546e-05 725 | 1.446,1.0,0.0,0.0003535822725620146,5.8905316862686045e-05,0.00033907933130986664,7.178582709454992e-05 726 | 1.448,1.0,0.0,0.0003535699269738859,5.8899731169170226e-05,0.00035672433439316517,6.235913777427557e-05 727 | 1.45,1.0,0.0,0.00035355741919859516,5.8894196449082054e-05,0.00034878046544193866,6.234747027649153e-05 728 | 1.452,1.0,0.0,0.0003535447595054541,5.8888715126678567e-05,0.00034927733755401317,4.6407679843609656e-05 729 | 1.454,1.0,0.0,0.00035353195807711545,5.8883289552467044e-05,0.00035896303299642966,6.32356156162244e-05 730 | 1.456,1.0,0.0,0.00035351902500498983,5.887792200307522e-05,0.0003563386129938181,7.468845203265321e-05 731 | 1.458,1.0,0.0,0.0003535059702847962,5.887261468116786e-05,0.0003395434319390623,7.049143621761799e-05 732 | 1.46,1.0,0.0,0.0003534928038122466,5.886736971540891e-05,0.0003446332565845647,4.7758424726157035e-05 733 | 1.462,1.0,0.0,0.00035347953537886464,5.886218916046851e-05,0.00032987785926156724,4.977538519387699e-05 734 | 1.464,1.0,0.0,0.0003534661746679391,5.8857074997074044e-05,0.0003575781731668847,6.596966429990167e-05 735 | 1.466,1.0,0.0,0.00035345273125061136,5.885202913210449e-05,0.000342780450359157,4.7644082476699197e-05 736 | 1.468,1.0,0.0,0.0003534392145820978,5.884705339872719e-05,0.0003732731774602488,8.145551571772956e-05 737 | 1.47,1.0,0.0,0.0003534256339980464,5.884214955657632e-05,0.0003726274623532181,6.62770131142396e-05 738 | 1.472,1.0,0.0,0.00035341199871102787,5.883731929197212e-05,0.0003697860167660564,7.028996227787283e-05 739 | 1.474,1.0,0.0,0.00035339831780716084,5.883256421818017e-05,0.0003642312167174283,8.221533175072892e-05 740 | 1.476,1.0,0.0,0.0003533846002428706,5.88278858757098e-05,0.0003541911894034405,5.891494574668461e-05 741 | 1.478,1.0,0.0,0.0003533708548417815,5.8823285732650765e-05,0.0003509875884076739,4.527955602567958e-05 742 | 1.48,1.0,0.0,0.0003533570902917424,5.881876518504735e-05,0.00037583717141395325,6.1001265096045844e-05 743 | 1.482,1.0,0.0,0.00035334331514198406,5.881432555730903e-05,0.0003335749282314976,4.7449880266038066e-05 744 | 1.484,1.0,0.0,0.0003533295378004092,5.880996810265686e-05,0.00034558527489727923,5.8754012127757356e-05 745 | 1.486,1.0,0.0,0.00035331576653101275,5.880569400360458e-05,0.00032307815449065613,8.209166964785678e-05 746 | 1.488,1.0,0.0,0.0003533020094514335,5.8801504372473706e-05,0.00033679842922683216,7.312296589648145e-05 747 | 1.49,1.0,0.0,0.000353288274530635,5.879740025194158e-05,0.00037263758156311645,4.848352418806023e-05 748 | 1.492,1.0,0.0,0.000353274569586715,5.879338261562157e-05,0.00033856552854239966,2.445663951060064e-05 749 | 1.494,1.0,0.0,0.0003532609022848437,5.87894523686745e-05,0.0003403942956807607,7.161393275238833e-05 750 | 1.496,1.0,0.0,0.0003532472801353277,5.878561034845036e-05,0.0003474025159200504,5.576819706203568e-05 751 | 1.498,1.0,0.0,0.00035323371049180075,5.8781857325159444e-05,0.00034353746326107917,7.505137006459694e-05 752 | 1.5,1.0,0.0,0.0003532202005495392,5.877819400257197e-05,0.00033127255429640616,6.490707906010431e-05 753 | 1.502,1.0,0.0,0.00035320675734390076,5.877462101874531e-05,0.00036839920286812957,6.961940601681319e-05 754 | 1.504,1.0,0.0,0.0003531933877488858,5.877113894677791e-05,0.00036121312418473523,7.966618288626304e-05 755 | 1.506,1.0,0.0,0.0003531800984758201,5.876774829558894e-05,0.0003560511952699114,6.0415251495700374e-05 756 | 1.508,1.0,0.0,0.00035316689607215704,5.876444951072281e-05,0.0003502450106469341,6.61254871734671e-05 757 | 1.51,1.0,0.0,0.00035315378692039884,5.876124297517766e-05,0.0003409223186862654,6.132523898424254e-05 758 | 1.512,1.0,0.0,0.0003531407772371345,5.8758129010256866e-05,0.00035711912051761437,4.3579751984594376e-05 759 | 1.514,1.0,0.0,0.0003531278730721937,5.875510787644266e-05,0.00034766992692162967,3.713147103717634e-05 760 | 1.516,1.0,0.0,0.00035311508030791454,5.8752179774291044e-05,0.00033648766534650327,7.069874771197909e-05 761 | 1.518,1.0,0.0,0.0003531024046585243,5.874934484534696e-05,0.00035944224948348516,3.995436302822935e-05 762 | 1.52,1.0,0.0,0.00035308985166963104,5.8746603173078944e-05,0.00034681599477465695,3.9252154083013665e-05 763 | 1.522,1.0,0.0,0.00035307742671782485,5.874395478383227e-05,0.0003417714788664905,4.230825707448201e-05 764 | 1.524,1.0,0.0,0.0003530651350103866,5.874139964779977e-05,0.00033873924807113174,6.623051167035314e-05 765 | 1.526,1.0,0.0,0.00035305298158510326,5.8738937680009436e-05,0.00036292143876754215,6.23701390411434e-05 766 | 1.528,1.0,0.0,0.00035304097131018695,5.873656874132786e-05,0.0003508983660534756,2.6120151818187566e-05 767 | 1.53,1.0,0.0,0.0003530291088842975,5.873429263947869e-05,0.0003693501231941279,7.519060734905019e-05 768 | 1.532,1.0,0.0,0.00035301739883666493,5.87321091300753e-05,0.00034138676292761464,4.2800085671655944e-05 769 | 1.534,1.0,0.0,0.00035300584552731183,5.873001791766663e-05,0.0003796982491428241,7.935205433010325e-05 770 | 1.536,1.0,0.0,0.00035299445314737225,5.8728018656795534e-05,0.00035738427929620154,5.501659210601626e-05 771 | 1.538,1.0,0.0,0.00035298322571950647,5.87261109530687e-05,0.0003534014086196357,4.886394371342484e-05 772 | 1.54,1.0,0.0,0.0003529721670984085,5.872429436423727e-05,0.0003633397884570264,5.150297828932615e-05 773 | 1.542,1.0,0.0,0.0003529612809714061,5.872256840128742e-05,0.0003324833118018796,6.712745991659798e-05 774 | 1.544,1.0,0.0,0.0003529505708591493,5.8720932529540024e-05,0.0003387518357745941,5.074858367025556e-05 775 | 1.546,1.0,0.0,0.0003529400401163875,5.8719386169758536e-05,0.0003685366130095149,4.1217100203378664e-05 776 | 1.548,1.0,0.0,0.0003529296919328312,5.871792869926444e-05,0.000350893743708076,5.612283175118022e-05 777 | 1.55,1.0,0.0,0.0003529195293340985,5.8716559453059256e-05,0.00037096456745841017,7.30111124511564e-05 778 | 1.552,1.0,0.0,0.0003529095551827424,5.871527772495259e-05,0.0003358926949079549,5.574347494351431e-05 779 | 1.554,1.0,0.0,0.0003528997721793579,5.8714082768695144e-05,0.0003506157916851287,4.623517701346275e-05 780 | 1.556,1.0,0.0,0.00035289018286376695,5.871297379911616e-05,0.0003627366459553269,7.268562149095805e-05 781 | 1.558,1.0,0.0,0.00035288078961627874,5.8711949993264495e-05,0.0003519928720442965,5.5389083733571476e-05 782 | 1.56,1.0,0.0,0.00035287159465902324,5.871101049155241e-05,0.00033569987345326773,7.229187778833386e-05 783 | 1.562,1.0,0.0,0.0003528626000573568,5.871015439890164e-05,0.0003728802140239885,5.599575044734389e-05 784 | 1.564,1.0,0.0,0.00035285380772133624,5.8709380785890785e-05,0.0003676899588980385,5.963148882144114e-05 785 | 1.566,1.0,0.0,0.00035284521940726093,5.870868868990337e-05,0.00035811709880611105,7.944781490202323e-05 786 | 1.568,1.0,0.0,0.00035283683671927946,5.870807711627595e-05,0.00036981154704609424,4.5000933374627394e-05 787 | 1.57,1.0,0.0,0.00035282866111106,5.870754503944548e-05,0.0003379108157937917,4.566329279942048e-05 788 | 1.572,1.0,0.0,0.0003528206938875212,5.870709140409535e-05,0.00034592337939452284,6.027742005745604e-05 789 | 1.574,1.0,0.0,0.00035281293620662203,5.870671512629938e-05,0.00035650608225127635,7.9300831487493e-05 790 | 1.576,1.0,0.0,0.00035280538908120886,5.8706415094663134e-05,0.0003723064106124072,6.0664445256203266e-05 791 | 1.578,1.0,0.0,0.00035279805338091664,5.870619017146194e-05,0.0003616302523190808,4.2857795714630775e-05 792 | 1.58,1.0,0.0,0.0003527909298341235,5.8706039193774945e-05,0.0003429050414173056,5.4059154326882435e-05 793 | 1.582,1.0,0.0,0.0003527840190299553,5.8705960974614683e-05,0.0003727596661715275,6.422994443627431e-05 794 | 1.584,1.0,0.0,0.00035277732142033905,5.870595430405145e-05,0.0003346987626651755,6.66771885412312e-05 795 | 1.586,1.0,0.0,0.0003527708373221026,5.870601795033201e-05,0.00034610387488516004,5.279514087741976e-05 796 | 1.588,1.0,0.0,0.0003527645669191188,5.8706150660991925e-05,0.00033679467360830674,3.254676236637394e-05 797 | 1.59,1.0,0.0,0.0003527585102644922,5.870635116396115e-05,0.00037673272307933297,5.1863782656003294e-05 798 | 1.592,1.0,0.0,0.0003527526672827858,5.8706618168662156e-05,0.00033668071518770713,6.833889181343643e-05 799 | 1.594,1.0,0.0,0.000352747037772286,5.870695036710021e-05,0.0003459856751326975,5.140299405804978e-05 800 | 1.596,1.0,0.0,0.00035274162140730425,5.870734643494514e-05,0.0003712729617924722,3.708494054527648e-05 801 | 1.598,1.0,0.0,0.0003527364177405127,5.870780503260423e-05,0.00034632333222254195,6.308067187599437e-05 802 | 1.6,1.0,0.0,0.00035273142620531194,5.8708324806285676e-05,0.0003585067494709639,5.6026154421818955e-05 803 | 1.602,1.0,0.0,0.0003527266461182296,5.870890438905219e-05,0.00034927867735555127,3.559934548742175e-05 804 | 1.604,1.0,0.0,0.0003527220766813467,5.870954240186416e-05,0.0003647292178109542,6.261174342214215e-05 805 | 1.606,1.0,0.0,0.0003527177169847509,5.8710237454612165e-05,0.00034534141257019815,8.187269758572268e-05 806 | 1.608,1.0,0.0,0.0003527135660090141,5.871098814713812e-05,0.0003664585147766057,5.367084083899297e-05 807 | 1.61,1.0,0.0,0.0003527096226276924,5.87117930702449e-05,0.0003566184262315116,5.43278539193809e-05 808 | 1.612,1.0,0.0,0.0003527058856098471,5.871265080669383e-05,0.0003612265530812846,7.498522678178216e-05 809 | 1.614,1.0,0.0,0.00035270235362258443,5.8713559932189846e-05,0.0003489859653119022,7.476720491140345e-05 810 | 1.616,1.0,0.0,0.0003526990252336124,5.871451901635374e-05,0.0003315056312272869,8.037418640437493e-05 811 | 1.618,1.0,0.0,0.00035269589891381246,5.8715526623681365e-05,0.00034692896970938195,1.0603715249972605e-05 812 | 1.62,1.0,0.0,0.0003526929730398252,5.871658131448923e-05,0.0003635190026227124,7.565135846785101e-05 813 | 1.622,1.0,0.0,0.00035269024589664686,5.8717681645846335e-05,0.0003547958009419805,5.6703148585917715e-05 814 | 1.624,1.0,0.0,0.0003526877156802366,5.871882617249182e-05,0.00034697851345674507,6.0910928697626354e-05 815 | 1.6260000000000001,1.0,0.0,0.000352685380500131,5.8720013447738115e-05,0.0003449965298276514,4.163827991196475e-05 816 | 1.6280000000000001,1.0,0.0,0.0003526832383820658,5.8721242024359425e-05,0.0003584920421562179,6.109333875892286e-05 817 | 1.6300000000000001,1.0,0.0,0.00035268128727060176,5.872251045546516e-05,0.0003253627543625749,6.256600794728576e-05 818 | 1.6320000000000001,1.0,0.0,0.0003526795250317542,5.872381729535801e-05,0.00034124129597751533,8.531201230059546e-05 819 | 1.6340000000000001,1.0,0.0,0.00035267794945562363,5.872516110037662e-05,0.00031596733909510844,5.91194722485488e-05 820 | 1.6360000000000001,1.0,0.0,0.00035267655825902626,5.872654042972236e-05,0.00034176222011380886,6.875144354274171e-05 821 | 1.6380000000000001,1.0,0.0,0.0003526753490881231,5.8727953846270207e-05,0.00035533725856057616,4.834250193926193e-05 822 | 1.6400000000000001,1.0,0.0,0.0003526743195210457,5.8729399917363365e-05,0.0003590063347674575,5.0664147061736346e-05 823 | 1.6420000000000001,1.0,0.0,0.0003526734670705171,5.873087721559152e-05,0.0003613044271966671,6.540406600478543e-05 824 | 1.6440000000000001,1.0,0.0,0.0003526727891864666,5.873238431955254e-05,0.0003706534970978487,6.0624356165792315e-05 825 | 1.6460000000000001,1.0,0.0,0.00035267228325863687,5.8733919814597375e-05,0.0003603426087089756,4.6884940115941245e-05 826 | 1.6480000000000001,1.0,0.0,0.00035267194661918217,5.873548229355811e-05,0.000350015810225083,7.661120741372396e-05 827 | 1.6500000000000001,1.0,0.0,0.00035267177654525566,5.873707035745896e-05,0.00032950727842568197,7.352016150683036e-05 828 | 1.6520000000000001,1.0,0.0,0.0003526717702615852,5.873868261621007e-05,0.0003411842573751471,2.5347119605053136e-05 829 | 1.6540000000000001,1.0,0.0,0.000352671924943036,5.874031768928409e-05,0.00037997308322625534,6.069815854330717e-05 830 | 1.6560000000000001,1.0,0.0,0.0003526722377171583,5.874197420637528e-05,0.0003597673709022626,7.121523893467214e-05 831 | 1.6580000000000001,1.0,0.0,0.00035267270566671994,5.8743650808041156e-05,0.0003384120583599031,5.5517865647916884e-05 832 | 1.6600000000000001,1.0,0.0,0.0003526733258322216,5.874534614632661e-05,0.0003415127966054136,4.038238195844916e-05 833 | 1.6620000000000001,1.0,0.0,0.00035267409521439364,5.8747058885370306e-05,0.0003542573292851928,6.353325947980552e-05 834 | 1.6640000000000001,1.0,0.0,0.0003526750107766741,5.8748787701993456e-05,0.0003467136951361499,6.325576428531901e-05 835 | 1.666,1.0,0.0,0.00035267606944766556,5.8750531286270835e-05,0.0003576600300954197,4.5155998682762014e-05 836 | 1.668,1.0,0.0,0.00035267726812357095,5.875228834208399e-05,0.00036807787348617826,5.889505577711081e-05 837 | 1.67,1.0,0.0,0.0003526786036706061,5.875405758765667e-05,0.00034704634674053824,6.646149381462477e-05 838 | 1.672,1.0,0.0,0.00035268007292738886,5.875583775607247e-05,0.00037365216990235416,5.166982762801276e-05 839 | 1.674,1.0,0.0,0.0003526816727073034,5.875762759577461e-05,0.0003333455606712013,7.5612182457545e-05 840 | 1.676,1.0,0.0,0.0003526833998008387,5.875942587104796e-05,0.000366991858856805,6.581709933283954e-05 841 | 1.678,1.0,0.0,0.0003526852509779002,5.876123136248325e-05,0.00033331441285096514,7.545403512406066e-05 842 | 1.68,1.0,0.0,0.00035268722299009386,5.8763042867423546e-05,0.0003683759442514695,5.499845457811447e-05 843 | 1.682,1.0,0.0,0.0003526893125729821,5.8764859200392984e-05,0.0003456159935602591,7.421327430240665e-05 844 | 1.684,1.0,0.0,0.0003526915164483097,5.876667919350785e-05,0.0003426247935588763,6.955698187804825e-05 845 | 1.686,1.0,0.0,0.00035269383132620005,5.876850169687011e-05,0.00036534332360811574,5.353795105401394e-05 846 | 1.688,1.0,0.0,0.00035269625390731986,5.8770325578943296e-05,0.00033750022289671,6.916804561492506e-05 847 | 1.69,1.0,0.0,0.00035269878088501293,5.877214972691105e-05,0.00035712972154174565,4.111046172204634e-05 848 | 1.692,1.0,0.0,0.00035270140894740044,5.8773973047018205e-05,0.00035295698066292844,4.0389228027138955e-05 849 | 1.694,1.0,0.0,0.00035270413477944896,5.877579446489467e-05,0.0003425666638726064,5.257775232734227e-05 850 | 1.696,1.0,0.0,0.0003527069550650042,5.877761292586207e-05,0.0003398264238785244,5.631901105891328e-05 851 | 1.698,1.0,0.0,0.0003527098664887902,5.877942739522341e-05,0.0003403946819836881,4.5668067710172075e-05 852 | 1.7,1.0,0.0,0.000352712865738374,5.8781236858535727e-05,0.0003461825157152397,5.063442953854188e-05 853 | 1.702,1.0,0.0,0.0003527159495060938,5.8783040321866034e-05,0.00034950156700130523,5.8585305578711385e-05 854 | 1.704,1.0,0.0,0.000352719114490952,5.878483681203048e-05,0.00034251063495589355,7.827541891881127e-05 855 | 1.706,1.0,0.0,0.0003527223574004709,5.8786625376817113e-05,0.0003508057628948042,2.3160816159972554e-05 856 | 1.708,1.0,0.0,0.0003527256749525111,5.878840508519217e-05,0.0003588443961239749,4.45457781615053e-05 857 | 1.71,1.0,0.0,0.00035272906387705315,5.8790175027490214e-05,0.000355401226468117,7.384055735677273e-05 858 | 1.712,1.0,0.0,0.00035273252091794026,5.879193431558822e-05,0.0003414661218762368,6.491247100642531e-05 859 | 1.714,1.0,0.0,0.0003527360428345833,5.879368208306379e-05,0.0003481487255163527,4.851979861796947e-05 860 | 1.716,1.0,0.0,0.00035273962640362664,5.879541748533766e-05,0.0003430760876377556,7.779006008734277e-05 861 | 1.718,1.0,0.0,0.0003527432684205754,5.879713969980075e-05,0.0003672875415698099,6.482177794204358e-05 862 | 1.72,1.0,0.0,0.0003527469657013826,5.879884792592585e-05,0.00035332666422645503,7.554799727275518e-05 863 | 1.722,1.0,0.0,0.0003527507150839974,5.8800541385364205e-05,0.00034370514833192056,4.7713753848608397e-05 864 | 1.724,1.0,0.0,0.00035275451342987337,5.8802219322027205e-05,0.00037623437929352164,5.477618624244822e-05 865 | 1.726,1.0,0.0,0.00035275835762543583,5.8803881002153364e-05,0.00032340596604672793,6.771583101675421e-05 866 | 1.728,1.0,0.0,0.00035276224458351035,5.8805525714360835e-05,0.0003524653636317453,5.6174534713155794e-05 867 | 1.73,1.0,0.0,0.00035276617124470973,5.880715276968559e-05,0.00039667220314310427,8.030604421564478e-05 868 | 1.732,1.0,0.0,0.00035277013457878076,5.880876150160564e-05,0.0003367096915201577,8.647457159234966e-05 869 | 1.734,1.0,0.0,0.0003527741315859103,5.881035126605135e-05,0.0003589658003440025,7.815056150718381e-05 870 | 1.736,1.0,0.0,0.0003527781592979907,5.8811921441402255e-05,0.0003381204364099823,4.359537444822778e-05 871 | 1.738,1.0,0.0,0.0003527822147798446,5.881347142847048e-05,0.000372928033432359,7.25539333854335e-05 872 | 1.74,1.0,0.0,0.0003527862951304085,5.8815000650471015e-05,0.00034950977986756215,3.698123399478008e-05 873 | 1.742,1.0,0.0,0.00035279039748387564,5.881650855297922e-05,0.00036739001185506076,3.680667637457939e-05 874 | 1.744,1.0,0.0,0.00035279451901079834,5.881799460387566e-05,0.0003337306935956424,6.278515535488808e-05 875 | 1.746,1.0,0.0,0.0003527986569191494,5.881945829327855e-05,0.00034387484283635006,4.5884611478487714e-05 876 | 1.748,1.0,0.0,0.0003528028084553428,5.8820899133464124e-05,0.0003558331006442211,5.6585805472490885e-05 877 | 1.75,1.0,0.0,0.00035280697090521413,5.8822316658775156e-05,0.0003699887949381916,5.9868124752906265e-05 878 | 1.752,1.0,0.0,0.0003528111415949599,5.882371042551787e-05,0.0003897215765361764,6.742212893098142e-05 879 | 1.754,1.0,0.0,0.00035281531789203705,5.8825080011847516e-05,0.0003220340091628783,6.617344842784723e-05 880 | 1.756,1.0,0.0,0.0003528194972060224,5.882642501764286e-05,0.00035289795883775916,3.113730991742299e-05 881 | 1.758,1.0,0.0,0.0003528236769894316,5.882774506436992e-05,0.0003426667756456808,6.325535193975001e-05 882 | 1.76,1.0,0.0,0.0003528278547384986,5.882903979493511e-05,0.0003377485891736833,4.215572517650925e-05 883 | 1.762,1.0,0.0,0.00035283202799391564,5.8830308873528165e-05,0.00033168552662072654,7.186978498522457e-05 884 | 1.764,1.0,0.0,0.00035283619434153373,5.8831551985455045e-05,0.0003279649749537511,7.460823334032543e-05 885 | 1.766,1.0,0.0,0.00035284035141302405,5.8832768836961105e-05,0.00033328527194505686,5.0151736163943174e-05 886 | 1.768,1.0,0.0,0.0003528444968865008,5.8833959155044855e-05,0.0003529758548804172,7.07048523973971e-05 887 | 1.77,1.0,0.0,0.00035284862848710525,5.8835122687262525e-05,0.0003615792332615615,7.619629125024397e-05 888 | 1.772,1.0,0.0,0.000352852743987552,5.883625920152372e-05,0.000360660141062015,4.9261278587171915e-05 889 | 1.774,1.0,0.0,0.0003528568412086369,5.883736848587846e-05,0.0003616972445598571,3.11282651814066e-05 890 | 1.776,1.0,0.0,0.000352860918019708,5.8838450348295895e-05,0.0003348621283840641,5.133240948218692e-05 891 | 1.778,1.0,0.0,0.0003528649723390991,5.8839504616434884e-05,0.00036714783913268284,5.3502137917633196e-05 892 | 1.78,1.0,0.0,0.00035286900213452667,5.884053113740689e-05,0.0003450287333475239,6.674990421408118e-05 893 | 1.782,1.0,0.0,0.00035287300542345023,5.884152977753123e-05,0.0003529202141015155,2.958213654895899e-05 894 | 1.784,1.0,0.0,0.0003528769802733969,5.884250042208318e-05,0.0003534899344913334,7.983508399931642e-05 895 | 1.786,1.0,0.0,0.00035288092480225086,5.884344297503508e-05,0.00035347353397704594,6.393524225318743e-05 896 | 1.788,1.0,0.0,0.000352884837178507,5.884435735879068e-05,0.0003894341869034128,8.484413478013271e-05 897 | 1.79,1.0,0.0,0.00035288871562149055,5.8845243513913234e-05,0.00035199550602387354,6.361984181613457e-05 898 | 1.792,1.0,0.0,0.00035289255840154234,5.884610139884726e-05,0.0003560461175493925,4.7868198997012075e-05 899 | 1.794,1.0,0.0,0.00035289636384017034,5.884693098963455e-05,0.00034691862745599215,3.778999514710127e-05 900 | 1.796,1.0,0.0,0.0003529001303101683,5.884773227962455e-05,0.0003621574999624818,6.643310952599992e-05 901 | 1.798,1.0,0.0,0.0003529038562357015,5.884850527917939e-05,0.0003318438566493828,7.210548811990139e-05 902 | 1.8,1.0,0.0,0.00035290754009236,5.884925001537385e-05,0.00036152583095512396,2.9618626934877598e-05 903 | 1.802,1.0,0.0,0.0003529111804071814,5.884996653169056e-05,0.0003684645061450494,5.150175600229966e-05 904 | 1.804,1.0,0.0,0.0003529147757586411,5.885065488771065e-05,0.000342702321267127,7.38715567044181e-05 905 | 1.806,1.0,0.0,0.0003529183247766129,5.8851315158800115e-05,0.00035224377005062035,8.497026208452579e-05 906 | 1.808,1.0,0.0,0.0003529218261422994,5.885194743579219e-05,0.0003671436085933346,4.5581850823752885e-05 907 | 1.81,1.0,0.0,0.00035292527858813284,5.8852551824665984e-05,0.0003563793161550747,6.626998204501058e-05 908 | 1.812,1.0,0.0,0.0003529286808976471,5.88531284462216e-05,0.00033889947925401547,3.717242077158427e-05 909 | 1.814,1.0,0.0,0.0003529320319053214,5.8853677435752026e-05,0.000343314977967816,4.6284298622718255e-05 910 | 1.816,1.0,0.0,0.0003529353304963965,5.885419894271205e-05,0.00033651938401404004,4.394207860012678e-05 911 | 1.818,1.0,0.0,0.00035293857560666337,5.885469313038441e-05,0.00034209817380128173,6.689917138811796e-05 912 | 1.82,1.0,0.0,0.00035294176622222554,5.8855160175543495e-05,0.00037130061657636063,5.7395111757973835e-05 913 | 1.822,1.0,0.0,0.0003529449013792357,5.8855600268116746e-05,0.0003484029525723221,6.017129777800406e-05 914 | 1.824,1.0,0.0,0.0003529479801636066,5.8856013610844114e-05,0.0003580609206659534,6.0594511554491024e-05 915 | 1.826,1.0,0.0,0.0003529510017106975,5.88564004189357e-05,0.0003436342831228572,3.9066058958750596e-05 916 | 1.828,1.0,0.0,0.00035295396520497625,5.885676091972787e-05,0.0003525083457641173,4.5606610134202076e-05 917 | 1.83,1.0,0.0,0.00035295686987965835,5.8857095352338095e-05,0.0003494013253116162,4.080889098529931e-05 918 | 1.832,1.0,0.0,0.00035295971501632243,5.88574039673187e-05,0.00034598686738031054,5.280347049833952e-05 919 | 1.834,1.0,0.0,0.0003529624999445044,5.885768702630975e-05,0.000382592458941588,6.033497222163684e-05 920 | 1.836,1.0,0.0,0.00035296522404126894,5.885794480169136e-05,0.0003653896327671598,3.608526330199688e-05 921 | 1.838,1.0,0.0,0.0003529678867307612,5.885817757623553e-05,0.000344015519472366,7.167420726807626e-05 922 | 1.84,1.0,0.0,0.0003529704874837367,5.885838564275785e-05,0.0003835160384066278,6.737454212969264e-05 923 | 1.842,1.0,0.0,0.00035297302581707295,5.8858569303769184e-05,0.0003647435305459321,5.042127141064718e-05 924 | 1.844,1.0,0.0,0.00035297550129326113,5.8858728871127615e-05,0.00035679576873406114,5.062501142914462e-05 925 | 1.846,1.0,0.0,0.0003529779135198793,5.885886466569077e-05,0.0003578241224409062,7.026441890182168e-05 926 | 1.848,1.0,0.0,0.0003529802621490482,5.885897701696883e-05,0.00035265772499395233,7.215342497400745e-05 927 | 1.85,1.0,0.0,0.0003529825468768698,5.885906626277835e-05,0.0003306604909310477,7.935338821697858e-05 928 | 1.852,1.0,0.0,0.0003529847674428486,5.885913274889708e-05,0.000349626660118713,7.076038306728367e-05 929 | 1.854,1.0,0.0,0.00035298692362929816,5.885917682871999e-05,0.00034692502992477115,0.00010962072263571613 930 | 1.856,1.0,0.0,0.00035298901526073127,5.885919886291673e-05,0.0003332516840632956,6.630506483318444e-05 931 | 1.858,1.0,0.0,0.00035299104220323606,5.8859199219090646e-05,0.0003532482164087848,6.770629907714169e-05 932 | 1.86,1.0,0.0,0.00035299300436383797,5.885917827143951e-05,0.0003431394301587485,4.3097604022100916e-05 933 | 1.862,1.0,0.0,0.000352994901689848,5.885913640041821e-05,0.0003408575250971166,7.122865473964591e-05 934 | 1.864,1.0,0.0,0.0003529967341681985,5.8859073992403526e-05,0.00036586059788512707,5.931187953479306e-05 935 | 1.866,1.0,0.0,0.0003529985018247668,5.885899143936114e-05,0.0003563581217289839,6.822951609801881e-05 936 | 1.868,1.0,0.0,0.00035300020472368675,5.8858889138515084e-05,0.00035772684689995327,5.046957512094927e-05 937 | 1.87,1.0,0.0,0.00035300184296665013,5.885876749201975e-05,0.0003694668580721472,8.337293687221464e-05 938 | 1.872,1.0,0.0,0.0003530034166921969,5.8858626906634594e-05,0.00035834801383152607,7.362979184697307e-05 939 | 1.874,1.0,0.0,0.00035300492607499644,5.8858467793401764e-05,0.0003453928613655735,7.123642969640241e-05 940 | 1.8760000000000001,1.0,0.0,0.0003530063713251186,5.885829056732669e-05,0.0003683514725597855,4.9421824367163955e-05 941 | 1.8780000000000001,1.0,0.0,0.00035300775268729754,5.885809564706185e-05,0.0003342709326411667,3.434193281383552e-05 942 | 1.8800000000000001,1.0,0.0,0.00035300907044018666,5.885788345459379e-05,0.00033590241629797885,6.705778387737746e-05 943 | 1.8820000000000001,1.0,0.0,0.00035301032489560614,5.885765441493362e-05,0.0003457706126365338,3.823270588845413e-05 944 | 1.8840000000000001,1.0,0.0,0.00035301151639778437,5.885740895581098e-05,0.0003534802321595492,3.263954054640191e-05 945 | 1.8860000000000001,1.0,0.0,0.00035301264532259233,5.8857147507371696e-05,0.0003504668939703738,3.5858477060429735e-05 946 | 1.8880000000000001,1.0,0.0,0.00035301371207677277,5.885687050187925e-05,0.0003506257763431751,5.290101645766478e-05 947 | 1.8900000000000001,1.0,0.0,0.0003530147170971642,5.885657837342008e-05,0.0003764516981168079,6.010719132450999e-05 948 | 1.8920000000000001,1.0,0.0,0.0003530156608499203,5.885627155761292e-05,0.00036724847147462787,4.2128382585905746e-05 949 | 1.8940000000000001,1.0,0.0,0.00035301654382972496,5.8855950491322244e-05,0.0003517026079390412,6.593075945091428e-05 950 | 1.8960000000000001,1.0,0.0,0.0003530173665590046,5.88556156123759e-05,0.00034134714488988826,3.965491553228481e-05 951 | 1.8980000000000001,1.0,0.0,0.0003530181295871369,5.885526735928705e-05,0.0003656786138360509,6.802254927016125e-05 952 | 1.9000000000000001,1.0,0.0,0.00035301883348965715,5.885490617098053e-05,0.00036927345667308106,2.5127525883387752e-05 953 | 1.9020000000000001,1.0,0.0,0.0003530194788674631,5.885453248652363e-05,0.00034152576865609696,9.088581763220196e-05 954 | 1.9040000000000001,1.0,0.0,0.0003530200663460178,5.885414674486152e-05,0.00033986328433218766,3.5605797488808384e-05 955 | 1.9060000000000001,1.0,0.0,0.00035302059657455235,5.885374938455724e-05,0.0003807478302844596,8.329136486727376e-05 956 | 1.9080000000000001,1.0,0.0,0.00035302107022526714,5.885334084353647e-05,0.0003617988942634945,3.7433933541999765e-05 957 | 1.9100000000000001,1.0,0.0,0.00035302148799253437,5.885292155883702e-05,0.00036510201974532946,9.39033208397069e-05 958 | 1.9120000000000001,1.0,0.0,0.00035302185059210014,5.885249196636327e-05,0.0003438061935583213,4.791863213027076e-05 959 | 1.9140000000000001,1.0,0.0,0.000353022158760288,5.885205250064545e-05,0.00037342245906203165,2.136949823367249e-05 960 | 1.9160000000000001,1.0,0.0,0.00035302241325320405,5.885160359460395e-05,0.0003481826914575797,5.5920593993907744e-05 961 | 1.9180000000000001,1.0,0.0,0.00035302261484594367,5.885114567931857e-05,0.00034215545213505066,6.438062622842737e-05 962 | 1.92,1.0,0.0,0.0003530227643318008,5.8850679183803024e-05,0.0003624252763687675,6.331628673158943e-05 963 | 1.922,1.0,0.0,0.00035302286252148,5.8850204534784374e-05,0.0003528888197558855,3.420304185581718e-05 964 | 1.924,1.0,0.0,0.00035302291024231163,5.8849722156487785e-05,0.0003375025287460592,5.954620357511696e-05 965 | 1.926,1.0,0.0,0.0003530229083374707,5.884923247042645e-05,0.00036660505323184335,5.2746315608295635e-05 966 | 1.928,1.0,0.0,0.0003530228576651999,5.8848735895196725e-05,0.0003607175304527133,8.480662446385819e-05 967 | 1.93,1.0,0.0,0.00035302275909803685,5.884823284627857e-05,0.00032897597133842117,6.2675587594535e-05 968 | 1.932,1.0,0.0,0.0003530226135220461,5.8847723735841325e-05,0.00035572819852167244,6.974819121343823e-05 969 | 1.934,1.0,0.0,0.00035302242183605663,5.8847208972554776e-05,0.000367254480782836,5.8696839997600653e-05 970 | 1.936,1.0,0.0,0.0003530221849509047,5.8846688961405605e-05,0.0003802587211143701,5.6554483371143575e-05 971 | 1.938,1.0,0.0,0.00035302190378868264,5.8846164103519157e-05,0.00035500057838014793,7.298592341610803e-05 972 | 1.94,1.0,0.0,0.00035302157928199365,5.884563479598667e-05,0.0003514224856814256,4.600028599875886e-05 973 | 1.942,1.0,0.0,0.00035302121237321365,5.884510143169783e-05,0.0003453425502734998,8.645190671637967e-05 974 | 1.944,1.0,0.0,0.0003530208040137599,5.8844564399178775e-05,0.00034367284890204306,6.510213831248499e-05 975 | 1.946,1.0,0.0,0.0003530203551633666,5.884402408243546e-05,0.00037538233367918135,4.5592422696532256e-05 976 | 1.948,1.0,0.0,0.0003530198667893681,5.8843480860802435e-05,0.0003384996589386367,5.034013490495433e-05 977 | 1.95,1.0,0.0,0.0003530193398659904,5.884293510879707e-05,0.0003529980564607681,9.009832628352044e-05 978 | 1.952,1.0,0.0,0.0003530187753736501,5.884238719597909e-05,0.00034911140985490873,6.639593770136293e-05 979 | 1.954,1.0,0.0,0.0003530181742982623,5.884183748681562e-05,0.0003518435121113794,4.6988699829101094e-05 980 | 1.956,1.0,0.0,0.000353017537630557,5.8841286340551436e-05,0.0003520657127478096,6.465436881035866e-05 981 | 1.958,1.0,0.0,0.0003530168663654041,5.884073411108473e-05,0.0003884686482824347,5.868032686475581e-05 982 | 1.96,1.0,0.0,0.00035301616150114833,5.884018114684814e-05,0.0003422727204895489,5.853487511217204e-05 983 | 1.962,1.0,0.0,0.0003530154240389527,5.883962779069506e-05,0.0003220125905334856,4.423610028666284e-05 984 | 1.964,1.0,0.0,0.0003530146549821522,5.8839074379791345e-05,0.00034691780835452457,6.393323827205822e-05 985 | 1.966,1.0,0.0,0.00035301385533561744,5.8838521245512215e-05,0.0003634392153024775,6.321038219930402e-05 986 | 1.968,1.0,0.0,0.0003530130261051279,5.88379687133444e-05,0.0003584976315048561,8.434038988091691e-05 987 | 1.97,1.0,0.0,0.0003530121682967557,5.883741710279349e-05,0.0003432988036638698,3.358109360778418e-05 988 | 1.972,1.0,0.0,0.00035301128291626,5.883686672729651e-05,0.0003545456116409937,6.579079064369085e-05 989 | 1.974,1.0,0.0,0.0003530103709684918,5.883631789413955e-05,0.0003400508331747939,4.831198804826768e-05 990 | 1.976,1.0,0.0,0.00035300943345680956,5.8835770904380535e-05,0.00033405575658413425,5.724399180854773e-05 991 | 1.978,1.0,0.0,0.00035300847138250614,5.8835226052777044e-05,0.0003617056001136256,6.315567324203038e-05 992 | 1.98,1.0,0.0,0.0003530074857442463,5.883468362771913e-05,0.00035678376775749826,6.0559758741281725e-05 993 | 1.982,1.0,0.0,0.00035300647753751604,5.883414391116711e-05,0.0003652079119956059,4.221348265106585e-05 994 | 1.984,1.0,0.0,0.00035300544775408314,5.8833607178594245e-05,0.0003588637846209675,3.9993358658742755e-05 995 | 1.986,1.0,0.0,0.00035300439738146906,5.8833073698934336e-05,0.0003192684053444017,5.0021307364308004e-05 996 | 1.988,1.0,0.0,0.0003530033274024327,5.8832543734534064e-05,0.0003562340449138336,5.371229051387105e-05 997 | 1.99,1.0,0.0,0.00035300223879446596,5.8832017541110134e-05,0.0003544828069697774,6.629505603932028e-05 998 | 1.992,1.0,0.0,0.000353001132529301,5.883149536771108e-05,0.00036499996621545397,5.376566500765418e-05 999 | 1.994,1.0,0.0,0.0003530000095724294,5.883097745668376e-05,0.0003404440182696711,3.2714606314150713e-05 1000 | 1.996,1.0,0.0,0.00035299887088263384,5.8830464043644396e-05,0.0003943911430620978,2.597545160942912e-05 1001 | 1.998,1.0,0.0,0.00035299771741153103,5.8829955357454125e-05,0.00036555669421691893,3.120842134391713e-05 1002 | 2.0,1.0,0.0,0.0003529965501031276,5.8829451620199044e-05,0.00033926921204018414,7.166646087964726e-05 1003 | -------------------------------------------------------------------------------- /second_order_ode/data/runge_kutta_create_data.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy import signal 3 | from scipy import linalg 4 | import pandas as pd 5 | 6 | import matplotlib.pyplot as plt 7 | 8 | if __name__ == "__main__": 9 | 10 | #-------------------------------------------------------------------------- 11 | m1 = 20.0 12 | m2 = 10.0 13 | 14 | k1 = 2e3 15 | k2 = 1e3 16 | k3 = 5e3 17 | 18 | c1 = 100.0 19 | c2 = 110.0 20 | c3 = 120.0 21 | 22 | Mvib = np.asarray([[m1, 0.0], [0.0, m2]], dtype = float) 23 | Cvib = np.asarray([[c1+c2, -c2], [-c2, c2+c3]], dtype = float) 24 | Kvib = np.asarray([[k1+k2, -k2], [-k2, k2+k3]], dtype = float) 25 | 26 | #-------------------------------------------------------------------------- 27 | # building matrices in continuous time domain 28 | n = Mvib.shape[0] 29 | I = np.eye(n) 30 | Z = np.zeros([n,n]) 31 | Minv = linalg.pinv(Mvib) 32 | 33 | negMinvK = - np.matmul(Minv, Kvib) 34 | negMinvC = - np.matmul(Minv, Cvib) 35 | 36 | Ac = np.hstack((np.vstack((Z,negMinvK)), np.vstack((I,negMinvC)))) 37 | Bc = np.vstack((Z,Minv)) 38 | Cc = np.hstack((I,Z)) 39 | Dc = Z.copy() 40 | 41 | systemC = (Ac, Bc, Cc, Dc) 42 | 43 | #-------------------------------------------------------------------------- 44 | # building matrices in discrete time domain 45 | t = np.linspace(0,2,1001,dtype = float) 46 | dt = t[1] - t[0] 47 | 48 | sD = signal.cont2discrete(systemC, dt) 49 | 50 | Ad = sD[0] 51 | Bd = sD[1] 52 | Cd = sD[2] 53 | Dd = sD[3] 54 | 55 | systemD = (Ad, Bd, Cd, Dd, dt) 56 | 57 | #-------------------------------------------------------------------------- 58 | u = np.zeros((t.shape[0], n)) 59 | u[:, 0] = np.ones((t.shape[0],)) 60 | 61 | x0 = np.zeros((Ad.shape[1],), dtype = 'float32') 62 | 63 | output = signal.dlsim(systemD, u = u, t = t, x0 = x0) 64 | yScipy = output[1] 65 | 66 | yTarget = yScipy + 1.5e-5*np.random.randn(yScipy.shape[0], yScipy.shape[1]) 67 | 68 | df = pd.DataFrame(np.hstack([t[:,np.newaxis],u,yScipy,yTarget]), columns=['t', 'u0','u1','y0','y1','yT0','yT1']) 69 | 70 | df.to_csv('./data.csv', index = False) 71 | 72 | #-------------------------------------------------------------------------- 73 | plt.plot(t, yTarget, '-', color ='gray') 74 | plt.plot(t, yScipy, '-', color ='r') 75 | plt.xlabel('time') 76 | plt.ylabel('displacement') 77 | plt.grid('on') 78 | plt.show() 79 | -------------------------------------------------------------------------------- /second_order_ode/model.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from tensorflow.keras.layers import RNN, Layer 3 | from tensorflow.keras import Sequential 4 | from tensorflow.keras.optimizers import RMSprop 5 | from tensorflow import float32, concat, convert_to_tensor, linalg 6 | 7 | class RungeKuttaIntegratorCell(Layer): 8 | def __init__(self, m, c, k, dt, initial_state, **kwargs): 9 | super(RungeKuttaIntegratorCell, self).__init__(**kwargs) 10 | self.Minv = linalg.inv(np.diag(m)) 11 | self._c = c 12 | self.K = self._getCKmatrix(k) 13 | self.initial_state = initial_state 14 | self.state_size = 2*len(m) 15 | self.A = np.array([0., 0.5, 0.5, 1.0], dtype='float32') 16 | self.B = np.array([[1/6, 2/6, 2/6, 1/6]], dtype='float32') 17 | self.dt = dt 18 | 19 | def build(self, input_shape, **kwargs): 20 | self.kernel = self.add_weight("C", shape = self._c.shape, trainable = True, initializer = lambda shape, dtype: self._c, **kwargs) 21 | self.built = True 22 | 23 | def call(self, inputs, states): 24 | C = self._getCKmatrix(self.kernel) 25 | y = states[0][:,:2] 26 | ydot = states[0][:,2:] 27 | 28 | yddoti = self._fun(self.Minv, self.K, C, inputs, y, ydot) 29 | yi = y + self.A[0] * ydot * self.dt 30 | ydoti = ydot + self.A[0] * yddoti * self.dt 31 | fn = self._fun(self.Minv, self.K, C, inputs, yi, ydoti) 32 | for j in range(1,4): 33 | yn = y + self.A[j] * ydot * self.dt 34 | ydotn = ydot + self.A[j] * yddoti * self.dt 35 | ydoti = concat([ydoti, ydotn], axis=0) 36 | fn = concat([fn, self._fun(self.Minv, self.K, C, inputs, yn, ydotn)], axis=0) 37 | 38 | y = y + linalg.matmul(self.B, ydoti) * self.dt 39 | ydot = ydot + linalg.matmul(self.B, fn) * self.dt 40 | return y, [concat(([y, ydot]), axis=-1)] 41 | 42 | def _fun(self, Minv, K, C, u, y, ydot): 43 | return linalg.matmul(u - linalg.matmul(ydot, C, transpose_b=True) - linalg.matmul(y, K, transpose_b=True), Minv, transpose_b=True) 44 | 45 | def _getCKmatrix(self, a): 46 | return convert_to_tensor([[a[0]+a[1],-a[1]],[-a[1],a[1]+a[2]]], dtype=float32) 47 | 48 | def get_initial_state(self, inputs=None, batch_size=None, dtype=None): 49 | return self.initial_state 50 | 51 | 52 | def create_model(m, c, k, dt, initial_state, batch_input_shape, return_sequences = True, unroll = False): 53 | rkCell = RungeKuttaIntegratorCell(m=m, c=c, k=k, dt=dt, initial_state=initial_state) 54 | PINN = RNN(cell=rkCell, batch_input_shape=batch_input_shape, return_sequences=return_sequences, return_state=False, unroll=unroll) 55 | model = Sequential() 56 | model.add(PINN) 57 | model.compile(loss='mse', optimizer=RMSprop(1e4), metrics=['mae']) 58 | return model 59 | -------------------------------------------------------------------------------- /second_order_ode/runge_kutta_example.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | from tensorflow.keras.layers import RNN, Layer 5 | from tensorflow.keras import Sequential 6 | from tensorflow.keras.optimizers import RMSprop 7 | from tensorflow import float32, concat, convert_to_tensor, linalg 8 | 9 | class RungeKuttaIntegratorCell(Layer): 10 | def __init__(self, m, c, k, dt, initial_state, **kwargs): 11 | super(RungeKuttaIntegratorCell, self).__init__(**kwargs) 12 | self.Minv = linalg.inv(np.diag(m)) 13 | self._c = c 14 | self.K = self._getCKmatrix(k) 15 | self.initial_state = initial_state 16 | self.state_size = 2*len(m) 17 | self.A = np.array([0., 0.5, 0.5, 1.0], dtype='float32') 18 | self.B = np.array([[1/6, 2/6, 2/6, 1/6]], dtype='float32') 19 | self.dt = dt 20 | 21 | def build(self, input_shape, **kwargs): 22 | self.kernel = self.add_weight("C", shape = self._c.shape, trainable = True, initializer = lambda shape, dtype: self._c, **kwargs) 23 | self.built = True 24 | 25 | def call(self, inputs, states): 26 | C = self._getCKmatrix(self.kernel) 27 | y = states[0][:,:2] 28 | ydot = states[0][:,2:] 29 | 30 | yddoti = self._fun(self.Minv, self.K, C, inputs, y, ydot) 31 | yi = y + self.A[0] * ydot * self.dt 32 | ydoti = ydot + self.A[0] * yddoti * self.dt 33 | fn = self._fun(self.Minv, self.K, C, inputs, yi, ydoti) 34 | for j in range(1,4): 35 | yn = y + self.A[j] * ydot * self.dt 36 | ydotn = ydot + self.A[j] * yddoti * self.dt 37 | ydoti = concat([ydoti, ydotn], axis=0) 38 | fn = concat([fn, self._fun(self.Minv, self.K, C, inputs, yn, ydotn)], axis=0) 39 | 40 | y = y + linalg.matmul(self.B, ydoti) * self.dt 41 | ydot = ydot + linalg.matmul(self.B, fn) * self.dt 42 | return y, [concat(([y, ydot]), axis=-1)] 43 | 44 | def _fun(self, Minv, K, C, u, y, ydot): 45 | return linalg.matmul(u - linalg.matmul(ydot, C, transpose_b=True) - linalg.matmul(y, K, transpose_b=True), Minv, transpose_b=True) 46 | 47 | def _getCKmatrix(self, a): 48 | return convert_to_tensor([[a[0]+a[1],-a[1]],[-a[1],a[1]+a[2]]], dtype=float32) 49 | 50 | def get_initial_state(self, inputs=None, batch_size=None, dtype=None): 51 | return self.initial_state 52 | 53 | 54 | def create_model(m, c, k, dt, initial_state, batch_input_shape, return_sequences = True, unroll = False): 55 | rkCell = RungeKuttaIntegratorCell(m=m, c=c, k=k, dt=dt, initial_state=initial_state) 56 | PINN = RNN(cell=rkCell, batch_input_shape=batch_input_shape, return_sequences=return_sequences, return_state=False, unroll=unroll) 57 | model = Sequential() 58 | model.add(PINN) 59 | model.compile(loss='mse', optimizer=RMSprop(1e4), metrics=['mae']) 60 | return model 61 | 62 | if __name__ == "__main__": 63 | # masses, spring coefficients, and damping coefficients 64 | m = np.array([20.0, 10.0], dtype='float32') 65 | c = np.array([10.0, 10.0, 10.0], dtype='float32') # initial guess 66 | k = np.array([2e3, 1e3, 5e3], dtype='float32') 67 | 68 | # data 69 | df = pd.read_csv('./data/data.csv') 70 | t = df[['t']].values 71 | dt = (t[1] - t[0])[0] 72 | utrain = df[['u0', 'u1']].values[np.newaxis, :, :] 73 | ytrain = df[['yT0', 'yT1']].values[np.newaxis, :, :] 74 | 75 | # fitting physics-informed neural network 76 | initial_state = np.zeros((1,2 * len(m),), dtype='float32') 77 | model = create_model(m, c, k, dt, initial_state=initial_state, batch_input_shape=utrain.shape) 78 | yPred_before = model.predict_on_batch(utrain)[0, :, :] 79 | model.fit(utrain, ytrain, epochs=100, steps_per_epoch=1, verbose=1) 80 | yPred = model.predict_on_batch(utrain)[0, :, :] 81 | 82 | # plotting prediction results 83 | plt.plot(t, ytrain[0, :, :], 'gray') 84 | plt.plot(t, yPred_before[:, :], 'r', label='before training') 85 | plt.plot(t, yPred[:, :], 'b', label='after training') 86 | plt.xlabel('t') 87 | plt.ylabel('y') 88 | plt.grid('on') 89 | plt.legend() 90 | plt.show() 91 | -------------------------------------------------------------------------------- /second_order_ode/runge_kutta_example_PyTorch.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | 4 | import matplotlib.pyplot as plt 5 | import matplotlib as matplotlib 6 | 7 | from torch.nn.parameter import Parameter 8 | from torch import ( 9 | linalg, 10 | nn, 11 | Tensor, 12 | stack, 13 | cat, 14 | transpose, 15 | optim, 16 | zeros, 17 | diag 18 | ) 19 | 20 | 21 | class MyRNN(nn.Module): 22 | def __init__(self, cell, **kwargs): 23 | super(MyRNN, self).__init__() 24 | self.cell = cell 25 | 26 | def forward(self, inputs, initial_state): 27 | 28 | bs, seq_sz, _ = inputs.shape 29 | state = [] 30 | state.append(initial_state) 31 | for t in range(1, seq_sz): 32 | input = inputs[:, t-1, :] 33 | state_t = self.cell.forward(input, state[t-1]) 34 | state.append(state[t-1]+state_t) 35 | 36 | return stack((state),dim=1) 37 | 38 | 39 | class RungeKuttaIntegratorCell(nn.Module): 40 | def __init__(self, m, c, k, dt, **kwargs): 41 | super(RungeKuttaIntegratorCell, self).__init__(**kwargs) 42 | self.Minv = linalg.inv(diag(m)) 43 | self.c1 = Parameter(c[0]) 44 | self.c2 = Parameter(c[1]) 45 | self.c3 = Parameter(c[2]) 46 | 47 | self.K = Tensor([[k[0]+k[1],-k[1]],[-k[1],k[1]+k[2]]]) 48 | self.state_size = 2*len(m) 49 | self.A = Tensor([0., 0.5, 0.5, 1.0]) 50 | self.B = Tensor([[1/6, 2/6, 2/6, 1/6]]) 51 | self.dt = dt 52 | 53 | def forward(self, inputs, states): 54 | C = stack((stack((self.c1+self.c2, -self.c2)), stack((-self.c2, self.c2+self.c3)))) 55 | y = states[:, :2] 56 | ydot = states[:, 2:] 57 | 58 | yddoti = self._fun(self.Minv, self.K, C, inputs, y, ydot) 59 | yi = y + self.A[0] * ydot * self.dt 60 | ydoti = ydot + self.A[0] * yddoti * self.dt 61 | fn = self._fun(self.Minv, self.K, C, inputs, yi, ydoti) 62 | for j in range(1,4): 63 | yn = y + self.A[j] * ydot * self.dt 64 | ydotn = ydot + self.A[j] * yddoti * self.dt 65 | ydoti = cat([ydoti, ydotn], dim=0) 66 | fn = cat([fn, self._fun(self.Minv, self.K, C, inputs, yn, ydotn)], dim=0) 67 | 68 | y = linalg.matmul(self.B, ydoti) * self.dt 69 | ydot = linalg.matmul(self.B, fn) * self.dt 70 | 71 | return cat(([y, ydot]), dim=-1) 72 | 73 | def _fun(self, Minv, K, C, u, y, ydot): 74 | return linalg.matmul(u - linalg.matmul(ydot, transpose(C, 0, 1)) - linalg.matmul(y, transpose (K, 0, 1)), transpose(Minv, 0, 1)) 75 | 76 | 77 | def pinn_training_loop(n_epochs, optimizer, model, loss_fn, train, label, initial_state): 78 | mae = nn.L1Loss() 79 | for epoch in range(1, n_epochs + 1): 80 | #Forward pass 81 | output_train = model(train, initial_state)[:, :, :2] 82 | loss_train = loss_fn(output_train, label) 83 | mae_train = mae(output_train, label) 84 | 85 | #Backward pass 86 | optimizer.zero_grad() 87 | loss_train.backward() 88 | optimizer.step() 89 | 90 | print(f"Epoch {epoch}, Training loss {loss_train.item():.4e}, mae {mae_train.item():.4e}") 91 | 92 | 93 | if __name__ == "__main__": 94 | # masses, spring coefficients, and damping coefficients 95 | m = Tensor([20.0, 10.0]) 96 | k = Tensor([2e3, 1e3, 5e3]) 97 | c = Tensor([10.0, 10.0, 10.0]) # initial guess for damping coefficient 98 | 99 | 100 | # data 101 | df = pd.read_csv('./data/data.csv') 102 | t = df[['t']].values 103 | dt = (t[1] - t[0])[0] 104 | utrain = df[['u0', 'u1']].values[np.newaxis, :, :] 105 | ytrain = df[['yT0', 'yT1']].values[np.newaxis, :, :] 106 | t = Tensor(t) 107 | utrain = Tensor(utrain) 108 | ytrain = Tensor(ytrain) 109 | 110 | # Initial state of the system 111 | initial_state = zeros((1,2 * len(m))) 112 | 113 | rkCell = RungeKuttaIntegratorCell(m=m, c=c, k=k, dt=dt) 114 | model = MyRNN(cell=rkCell) 115 | 116 | #prediction results before training 117 | yPred_before = model(utrain, initial_state)[0, :, :] 118 | yPred_before = yPred_before.detach().numpy()[:,:2] 119 | 120 | #PINN training 121 | pinn_training_loop( 122 | n_epochs = 100, 123 | optimizer = optim.RMSprop(model.parameters(), lr=1e4), 124 | model = model, 125 | loss_fn = nn.MSELoss(), 126 | train = utrain, 127 | label = ytrain, 128 | initial_state=initial_state 129 | ) 130 | 131 | #prediction results after training 132 | yPred = model(utrain, initial_state) [0, :, :] 133 | yPred = yPred.detach().numpy()[:,:2] 134 | 135 | # plotting prediction results 136 | plt.plot(t, ytrain[0, :, :], 'gray') 137 | plt.plot(t, yPred_before, 'r', label='before training') 138 | plt.plot(t, yPred, 'b', label='after training') 139 | plt.xlabel('t') 140 | plt.ylabel('y') 141 | plt.grid('on') 142 | plt.legend() 143 | plt.show() 144 | -------------------------------------------------------------------------------- /second_order_ode/runge_kutta_example_contaminated.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import tensorflow as tf 4 | import matplotlib.pyplot as plt 5 | import matplotlib as matplotlib 6 | from tensorflow.keras.layers import RNN, Layer 7 | from tensorflow.keras import Sequential 8 | from tensorflow.keras.optimizers import RMSprop 9 | from tensorflow import float32, concat, convert_to_tensor, linalg 10 | 11 | 12 | class RungeKuttaIntegratorCell(Layer): 13 | def __init__(self, m, c, k, dt, initial_state, **kwargs): 14 | super(RungeKuttaIntegratorCell, self).__init__(**kwargs) 15 | self.Minv = linalg.inv(np.diag(m)) 16 | self._c = c 17 | self.K = self._getCKmatrix(k) 18 | self.initial_state = initial_state 19 | self.state_size = 2 * len(m) 20 | self.A = np.array([0., 0.5, 0.5, 1.0], dtype='float32') 21 | self.B = np.array([[1 / 6, 2 / 6, 2 / 6, 1 / 6]], dtype='float32') 22 | self.dt = dt 23 | 24 | def build(self, input_shape, **kwargs): 25 | self.kernel = self.add_weight("C", shape=self._c.shape, trainable=True, 26 | initializer=lambda shape, dtype: self._c, **kwargs) 27 | self.built = True 28 | 29 | def call(self, inputs, states): 30 | C = self._getCKmatrix(self.kernel) 31 | y = states[0][:, :2] 32 | ydot = states[0][:, 2:] 33 | 34 | yddoti = self._fun(self.Minv, self.K, C, inputs, y, ydot) 35 | yi = y + self.A[0] * ydot * self.dt 36 | ydoti = ydot + self.A[0] * yddoti * self.dt 37 | fn = self._fun(self.Minv, self.K, C, inputs, yi, ydoti) 38 | for j in range(1, 4): 39 | yn = y + self.A[j] * ydot * self.dt 40 | ydotn = ydot + self.A[j] * yddoti * self.dt 41 | ydoti = concat([ydoti, ydotn], axis=0) 42 | fn = concat([fn, self._fun(self.Minv, self.K, C, inputs, yn, ydotn)], axis=0) 43 | 44 | y = y + linalg.matmul(self.B, ydoti) * self.dt 45 | ydot = ydot + linalg.matmul(self.B, fn) * self.dt 46 | return y, [concat(([y, ydot]), axis=-1)] 47 | 48 | def _fun(self, Minv, K, C, u, y, ydot): 49 | return linalg.matmul(u - linalg.matmul(ydot, C, transpose_b=True) - linalg.matmul(y, K, transpose_b=True), Minv, 50 | transpose_b=True) 51 | 52 | def _getCKmatrix(self, a): 53 | return convert_to_tensor([[a[0] + a[1], -a[1]], [-a[1], a[1] + a[2]]], dtype=float32) 54 | 55 | def get_initial_state(self, inputs=None, batch_size=None, dtype=None): 56 | return self.initial_state 57 | 58 | 59 | def loss_fc(y_true, y_pred): 60 | error_squared = (y_true - y_pred) ** 2 61 | filtered_error_sq = error_squared * filter_array 62 | mse = tf.math.reduce_mean(filtered_error_sq) 63 | return mse 64 | 65 | 66 | def create_model(m, c, k, dt, initial_state, batch_input_shape, return_sequences=True, unroll=False): 67 | rkCell = RungeKuttaIntegratorCell(m=m, c=c, k=k, dt=dt, initial_state=initial_state) 68 | PINN = RNN(cell=rkCell, batch_input_shape=batch_input_shape, return_sequences=return_sequences, return_state=False, 69 | unroll=unroll) 70 | model = Sequential() 71 | model.add(PINN) 72 | model.compile(loss=loss_fc, optimizer=RMSprop(1e4), metrics=['mae']) 73 | return model 74 | 75 | 76 | if __name__ == "__main__": 77 | # masses, spring coefficients, and damping coefficients 78 | m = np.array([20.0, 10.0], dtype='float32') 79 | c = np.array([10.0, 10.0, 10.0], dtype='float32') # initial guess 80 | k = np.array([2e3, 1e3, 5e3], dtype='float32') 81 | 82 | # data 83 | df = pd.read_csv('./data/data_contaminated_02.csv') 84 | t = df[['t']].values 85 | dt = (t[1] - t[0])[0] 86 | utrain = df[['u0', 'u1']].values[np.newaxis, :, :] 87 | ytrain = df[['yT0', 'yT1']].values[np.newaxis, :, :] 88 | 89 | # filter array 90 | filter_array = np.ones((1001, 1)) 91 | filter_array[50:100, 0] = 0 92 | filter_array[200:250, 0] = 0 93 | #filter_array[250:375, 0] = 0 94 | filter_array = tf.convert_to_tensor(filter_array, dtype=tf.float32) 95 | 96 | # fitting physics-informed neural network 97 | initial_state = np.zeros((1, 2 * len(m),), dtype='float32') 98 | model = create_model(m, c, k, dt, initial_state=initial_state, batch_input_shape=utrain.shape) 99 | yPred_before = model.predict_on_batch(utrain)[0, :, :] 100 | model.fit(utrain, ytrain, epochs=100, steps_per_epoch=1, verbose=1) 101 | yPred = model.predict_on_batch(utrain)[0, :, :] 102 | 103 | # plotting prediction results 104 | matplotlib.rc('font', size=14) 105 | fig = plt.figure(1) 106 | plt.plot(t, ytrain[0, :, 0] * 1000, 'tab:gray', label='observed data', linewidth=1.0) 107 | plt.plot(t, ytrain[0, :, 1] * 1000, 'tab:gray', linewidth=1.0) 108 | plt.plot(t, yPred[:, 0] * 1000, 'tab:orange', label='after training', linewidth=2.0) 109 | plt.plot(t, yPred[:, 1] * 1000, 'tab:orange', linewidth=2.0) 110 | plt.xlabel('time (s)') 111 | plt.ylabel('y (mm)') 112 | plt.ylim(-0.1, 0.8) 113 | plt.grid('on') 114 | plt.legend() 115 | plt.show() 116 | 117 | # plotting prediction results 118 | fig = plt.figure(2) 119 | plt.plot(t, yPred[:, 0] * 1000, 'tab:blue', label='$m_1$', linewidth=2.0) 120 | plt.plot(t, yPred[:, 1] * 1000, 'tab:orange', label='$m_2$', linewidth=2.0) 121 | plt.xlabel('time (s)') 122 | plt.ylabel('y (mm)') 123 | plt.ylim(-0.1, 0.7) 124 | plt.grid('on') 125 | plt.legend() 126 | plt.show() 127 | 128 | weights = model.trainable_weights[0].numpy() 129 | weights = weights[np.newaxis, :] 130 | 131 | 132 | -------------------------------------------------------------------------------- /second_order_ode/runge_kutta_predict_only.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | from model import create_model 5 | 6 | if __name__ == "__main__": 7 | # masses, spring coefficients, and damping coefficients 8 | m = np.array([20.0, 10.0], dtype='float32') 9 | c = np.array([30.0, 5.0, 10.0], dtype='float32') # initial guess 10 | k = np.array([2e3, 1e3, 5e3], dtype='float32') 11 | 12 | # data 13 | df = pd.read_csv('./data/data.csv') 14 | t = df[['t']].values 15 | dt = (t[1] - t[0])[0] 16 | u = df[['u0', 'u1']].values[np.newaxis, :, :] 17 | yObs = df[['yT0', 'yT1']].values[np.newaxis, :, :] 18 | 19 | initial_state = np.zeros((1,2 * len(m),), dtype='float32') 20 | 21 | # fitting physics-informed neural network 22 | model = create_model(m, c, k, dt, initial_state=initial_state, batch_input_shape=u.shape) 23 | yPred_before = model.predict_on_batch(u)[0, :, :] 24 | model.load_weights("./savedmodels/cp.ckpt") 25 | yPred = model.predict_on_batch(u)[0, :, :] 26 | 27 | # plotting predictions 28 | plt.plot(t, yObs[0,:,:], 'gray') 29 | plt.plot(t, yPred_before[:,:], 'r', label='before training') 30 | plt.plot(t, yPred[:,:], 'b', label='after training') 31 | plt.xlabel('t') 32 | plt.ylabel('y') 33 | plt.grid('on') 34 | plt.legend() 35 | plt.show() 36 | -------------------------------------------------------------------------------- /second_order_ode/runge_kutta_save_training.py: -------------------------------------------------------------------------------- 1 | from tensorflow.keras.callbacks import ModelCheckpoint 2 | import pandas as pd 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | from model import create_model 6 | 7 | if __name__ == "__main__": 8 | # masses, spring coefficients, and damping coefficients 9 | m = np.array([20.0, 10.0], dtype='float32') 10 | c = np.array([30.0, 5.0, 10.0], dtype='float32') # initial guess 11 | k = np.array([2e3, 1e3, 5e3], dtype='float32') 12 | 13 | # data 14 | df = pd.read_csv('./data/data.csv') 15 | t = df[['t']].values 16 | dt = (t[1] - t[0])[0] 17 | utrain = df[['u0', 'u1']].values[np.newaxis, :, :] 18 | ytrain = df[['yT0', 'yT1']].values[np.newaxis, :, :] 19 | 20 | initial_state = np.zeros((1,2 * len(m),), dtype='float32') 21 | 22 | # Callback 23 | mckp = ModelCheckpoint(filepath="./savedmodels/cp.ckpt", monitor='loss', verbose=1, 24 | save_best_only=True, mode='min', save_weights_only=True) 25 | 26 | # fitting physics-informed neural network 27 | model = create_model(m, c, k, dt, initial_state=initial_state, batch_input_shape=utrain.shape) 28 | history = model.fit(utrain, ytrain, epochs=100, steps_per_epoch=1, verbose=1, callbacks=[mckp]) 29 | 30 | # plotting 31 | plt.plot(np.array(history.history['loss'])) 32 | plt.xlabel("epoch") 33 | plt.ylabel("loss") 34 | plt.grid(which='both') 35 | plt.show() 36 | --------------------------------------------------------------------------------