├── outputs └── synthetic │ ├── bayesian_kappa5_alpha2_rank10.json │ ├── supervised_kappa5_alpha0_rank20.json │ └── supervised_kappa5_alpha0_rank10.json ├── src ├── rpca.py ├── synthetic_bayesian_train.py ├── model.py ├── synthetic_train.py ├── training_functions.py └── background_separation_train.py └── README.md /outputs/synthetic/bayesian_kappa5_alpha2_rank10.json: -------------------------------------------------------------------------------- 1 | {"0": {"eta": 0.5717742959387591, "z0": 0.033783769993661485, "z1": 0.010257256705521883, "decay": 0.8736233793883164, "loss": 41.48923873901367, "X_loss": 5.284445840983421e-13, "reconstruction_loss": 1.5564705774240295e-11}, "1": {"eta": 0.4549705555560064, "z0": 0.02295405519595916, "z1": 0.01380975083239102, "decay": 0.8537392503954786, "loss": 41.81155776977539, "X_loss": 3.571132376357228e-13, "reconstruction_loss": 5.343347834509127e-13}, "2": {"eta": 0.4024934821027122, "z0": 0.030354369554160012, "z1": 0.03424542829386793, "decay": 0.8544954633415447, "loss": 41.52719497680664, "X_loss": 3.4189201491598153e-13, "reconstruction_loss": 2.372215705120917e-12}, "3": {"eta": 0.2708466240158447, "z0": 0.025488533753303358, "z1": 0.04951646324886857, "decay": 0.8485677035541019, "loss": 41.37783432006836, "X_loss": 3.483786067785821e-13, "reconstruction_loss": 1.376612799447452e-12}, "4": {"eta": 0.28111883021046236, "z0": 0.026798414860478494, "z1": 0.02285444004184913, "decay": 0.8555919477594954, "loss": 41.17599868774414, "X_loss": 3.4251258513445793e-13, "reconstruction_loss": 1.4783990004429981e-12}} -------------------------------------------------------------------------------- /src/rpca.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import tensorly as tl 3 | from tensorly import tucker_to_tensor, tucker_to_unfolded, unfold 4 | from tensorly.decomposition import tucker 5 | 6 | tl.set_backend('pytorch') 7 | 8 | 9 | def thre(inputs, threshold, device): 10 | return torch.sign(inputs) * torch.max( torch.abs(inputs) - threshold, torch.zeros(inputs.shape).to(device)) 11 | 12 | def rpca(Y, ranks, z0, z1, eta, decay, T, epsilon, device, skip=[]): 13 | ## Initialization 14 | G_t, factors_t = tucker(Y - thre(Y, z0, device), rank=ranks) 15 | order = len(ranks) 16 | 17 | ATA_inverses_skipped = dict() 18 | ATA_skipped = dict() 19 | for k in skip: 20 | ATA_skipped[k] = factors_t[k].T @ factors_t[k] 21 | ATA_inverses_skipped[k] = torch.linalg.inv(ATA_skipped[k]) 22 | 23 | ## Main Loop in ScaledGD RPCA 24 | for t in range(T): 25 | X_t = tucker_to_tensor((G_t, factors_t)) 26 | S_t1 = thre(Y- X_t, z1 * (decay**t), device) 27 | factors_t1 = [] 28 | D = S_t1 - Y 29 | ATA_t = [] 30 | for k in range(order): 31 | if k in skip: 32 | ATA_t.append(ATA_skipped[k]) 33 | else: 34 | ATA_t.append(factors_t[k].T @ factors_t[k]) 35 | 36 | for k in range(order): 37 | if k in skip: 38 | factors_t1.append(factors_t[k]) 39 | continue 40 | 41 | A_t = factors_t[k] 42 | factors_t_copy = factors_t.copy() 43 | factors_t_copy[k] = torch.eye(A_t.shape[1]).to(device) 44 | A_breve_t = tucker_to_unfolded((G_t, factors_t_copy), k).T 45 | 46 | ATA_t_copy = ATA_t.copy() 47 | ATA_t_copy[k] = torch.eye(A_t.shape[1]).to(device) 48 | AbTAb_t = tucker_to_unfolded((G_t, ATA_t_copy), k) @ unfold(G_t, k).T 49 | 50 | ker = torch.linalg.inv(AbTAb_t + epsilon * torch.eye(A_breve_t.shape[1]).to(device)) 51 | A_t1 = (1 - eta) * A_t - eta * unfold(D, k) @ A_breve_t @ ker 52 | factors_t1.append(A_t1) 53 | G_factors_t = [] 54 | for k in range(order): 55 | if k in skip: 56 | G_factors_t.append(ATA_inverses_skipped[k] @ factors_t[k].T) 57 | else: 58 | G_factors_t.append(torch.linalg.inv(ATA_t[k] + epsilon * torch.eye(factors_t[k].shape[1]).to(device)) @ factors_t[k].T) 59 | G_t1 = G_t - eta * tucker_to_tensor((X_t + D, G_factors_t)) 60 | factors_t = factors_t1 61 | G_t = G_t1 62 | 63 | return tucker_to_tensor((G_t, factors_t)), S_t1 64 | -------------------------------------------------------------------------------- /src/synthetic_bayesian_train.py: -------------------------------------------------------------------------------- 1 | import optuna 2 | import torch 3 | import numpy as np 4 | import random 5 | import tensorly as tl 6 | import json 7 | 8 | from rpca import rpca 9 | from training_functions import generate_problem 10 | 11 | torch.manual_seed(0) 12 | np.random.seed(0) 13 | random.seed(0) 14 | 15 | optuna.logging.set_verbosity(optuna.logging.WARNING) 16 | 17 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") 18 | tl.set_backend('pytorch') 19 | 20 | samples = 20 # number of samples 21 | n_trials = 500 # iterations of baseline to run 22 | shape = [100, 100, 100] # Tensor shape 23 | T = 100 # number of iterative updates of RPCA 24 | 25 | # Grid of scenarios to experiement 26 | kappas = [5] 27 | alphas = [0.2, 0.3] 28 | ranks = [10, 20] 29 | 30 | epsilon = 1e-7 31 | 32 | out_dir = '../outputs/synthetic' 33 | 34 | 35 | l1_loss = lambda Y, X_t: (Y-X_t).norm(p=1) / (Y.norm()**2) 36 | X_loss = lambda X_star, X_t: (X_t - X_star).norm()**2 / (X_star.norm()**2) 37 | reconstruction_loss = lambda Y, X_t, S_t: (Y - X_t - S_t).norm()**2 / (Y.norm()**2) 38 | 39 | def objective(trial, Y, r): 40 | eta = trial.suggest_float('eta', 0, 3) 41 | z0 = trial.suggest_float('z0', 0.001, 0.05) 42 | z1 = trial.suggest_float('z1', 0.001, 0.05) 43 | decay = trial.suggest_float('decay', 0.001, 0.999) 44 | try: 45 | X_t, S_t = rpca(Y, [r, r, r], z0, z1, eta, decay, T, epsilon, device) 46 | return l1_loss(Y, X_t) 47 | except: 48 | return float('inf') 49 | 50 | data_generator = lambda ranks, shape, alpha, kappa: generate_problem(ranks, shape, alpha, kappa, device) 51 | 52 | for kap in kappas: 53 | for a in alphas: 54 | for r in ranks: 55 | metrics_path = f'{out_dir}/bayesian_kappa{kap}_alpha{int(10 * a)}_rank{r}.json' 56 | try: 57 | with open(metrics_path, 'r') as f: 58 | metrics = json.load(f) 59 | start_i = max([int(i) for i in metrics.keys()]) + 1 60 | except: 61 | metrics = dict() 62 | start_i = 0 63 | print(f'Sampling {kap}, {a}, {r}') 64 | for s in range(start_i, start_i + samples): 65 | print(s) 66 | metrics[s] = dict() 67 | Y, X_star = data_generator(ranks=(r, r, r), shape=shape, alpha=a, kappa=kap) 68 | order = len(Y.shape) 69 | objective_wrapper = lambda trial: objective(trial, Y, r) 70 | study = optuna.create_study() 71 | study.optimize(objective_wrapper, n_trials=n_trials, gc_after_trial=True) 72 | 73 | 74 | metrics[s]['eta'] = study.best_params['eta'] 75 | metrics[s]['z0'] = study.best_params['z0'] 76 | metrics[s]['z1'] = study.best_params['z1'] 77 | metrics[s]['decay'] = study.best_params['decay'] 78 | 79 | X_t, S_t = rpca(Y, [r, r, r], metrics[s]['z0'], metrics[s]['z1'], metrics[s]['eta'], metrics[s]['decay'], T, epsilon, device) 80 | metrics[s]['loss'] = l1_loss(Y, X_t).item() 81 | metrics[s]['X_loss'] = X_loss(X_star, X_t).item() 82 | metrics[s]['reconstruction_loss'] = reconstruction_loss(Y, X_t, S_t).item() 83 | 84 | with open(metrics_path, 'w') as fp: 85 | json.dump(metrics, fp) 86 | 87 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Learned Tensor RPCA 2 | 3 | This repository implements the original ScaledGD alogrithm presented in [Fast and Provable Tensor Robust Principal Component Analysis via Scaled Gradient Descent](https://arxiv.org/abs/2206.09109) \[2\], along with algorithms and experiments described in [Deep Unfolded Tensor Robust PCA with Self-supervised Learning](https://arxiv.org/abs/2212.11346) \[1\]. 4 | The `src` contains all the code while the `outputs` directory contains subdirectories that store outputs of the experiments. 5 | Example outputs can be found in `outputs`. 6 | More organization details in the `src` directory below. 7 | For details on citing this repository, please scroll to the bottom of this page. 8 | 9 | ### Tensor RPCA 10 | 11 | `rpca.py` contains the standard tensor RPCA algorithm from \[2\]. 12 | This can be used out of the box, independent of learning. 13 | 14 | ### Model 15 | 16 | Our deep unfolded RPCA model can be found in `model.py`. 17 | 18 | ### Helper functions 19 | 20 | Some functions related to tensor generation and training are shared across multiple files. 21 | These are placed in `training_functions.py`. 22 | Part of this file is adapted from \[3\]. 23 | 24 | ### Synthetic Experiments 25 | 26 | Synthetic experiments are split into 2 files: `synthetic_bayesian_train.py` for the baseline and `synthetic_train.py` for learned methods via our model. 27 | Within each file, one can customize the scenario to run tests on. 28 | For instance, one could manipulate the the ranks, tensor size, corruption sparsity, hyperparameters, etc. 29 | 30 | ### Background Subtraction 31 | 32 | Code for background subtraction can be found in `background_separation_train.py`. 33 | Hyperparameters and processing can be adjusted within the file. 34 | In order to run this file, the real videos dataset must be downloaded from http://backgroundmodelschallenge.eu 35 | 36 | 37 | \[1\] Dong, H., Shah, M., Donegan, S., & Chi, Y. (2023). Deep Unfolded Tensor Robust PCA with Self-supervised Learning. International Conference on Acoustics, Speech, and Signal Processing (ICASSP). 38 | 39 | \[2\] Dong, H., Tong, T., Ma, C., & Chi, Y. (2022). Fast and Provable Tensor Robust Principal Component Analysis via Scaled Gradient Descent. arXiv preprint arXiv:2206.09109. 40 | 41 | \[3\] Cai, H., Liu, J., & Yin, W. (2021). Learned robust pca: A scalable deep unfolding approach for high-dimensional outlier detection. Advances in Neural Information Processing Systems, 34, 16977-16989. 42 | 43 | 44 | # How to Cite 45 | 46 | If you use or build upon this work, please cite the [paper](https://arxiv.org/abs/2206.09109) that the original ScaledGD tensor RPCA algorithm was introduced: 47 | 48 | @article{dong2022fast, 49 | title={Fast and provable tensor robust principal component analysis via scaled gradient descent}, 50 | author={Dong, Harry and Tong, Tian and Ma, Cong and Chi, Yuejie}, 51 | journal={arXiv preprint arXiv:2206.09109}, 52 | year={2022} 53 | } 54 | 55 | If you also use or build the learnable version of the algorithm, please also cite the following [paper](https://arxiv.org/abs/2212.11346): 56 | 57 | @article{dong2022deep, 58 | title={Deep Unfolded Tensor Robust PCA with Self-supervised Learning}, 59 | author={Dong, Harry and Shah, Megna and Donegan, Sean and Chi, Yuejie}, 60 | journal={arXiv preprint arXiv:2212.11346}, 61 | year={2022} 62 | } 63 | 64 | -------------------------------------------------------------------------------- /src/model.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | import torch.nn.functional as F 4 | 5 | from torch.autograd import Variable 6 | from tensorly import tucker_to_tensor, tucker_to_unfolded, unfold 7 | from tensorly.decomposition import tucker 8 | 9 | 10 | class TensorRPCANet(nn.Module): 11 | ''' 12 | Recurrent neural network for tensor RPCA. 13 | 14 | Parameters: 15 | z0_init: initial parameter value for z0--note that this value is passed through a softplus to get the actual z0 value 16 | z1_init: initial parameter value for z1--note that this value is passed through a softplus to get the actual z1 value 17 | eta_init: initial parameter value for step size--note that this value is passed through a softplus to get the actual step size 18 | decay_init: initial parameter value for decay rate--note that this value is passed through a sigmoid to get the actual decay rate 19 | device: device to load this model 20 | softplus_factor: a factor to multiple the softplus outputs for z0 and z1 to make the gradients nicer 21 | skip: modes to skip iterative updates for 22 | datatype: parameter datatypes 23 | ''' 24 | def __init__(self, z0_init, z1_init, eta_init, decay_init, device, softplus_factor=0.01, skip=[], datatype=torch.float32): 25 | super().__init__() 26 | self.device = device 27 | self.z0 = nn.Parameter(Variable(torch.tensor(z0_init, dtype=datatype, device = device), requires_grad=True)) 28 | self.z1 = nn.Parameter(Variable(torch.tensor(z1_init, dtype=datatype, device = device), requires_grad=True)) 29 | self.eta = nn.Parameter(Variable(torch.tensor(eta_init, dtype=datatype, device = device), requires_grad=True)) 30 | self.decay = nn.Parameter(Variable(torch.tensor(decay_init, dtype=datatype, device = device), requires_grad=True)) 31 | 32 | self.softplus_factor = softplus_factor 33 | self.skip = skip 34 | self = self.to(device) 35 | 36 | def thre(self, inputs, threshold): 37 | ''' 38 | Soft thresholding. 39 | 40 | Args: 41 | inputs: input tensor 42 | threshold: threshold value >=0 43 | Output: 44 | out: soft thresholding outputs 45 | ''' 46 | out = torch.sign(inputs) * torch.relu( torch.abs(inputs) - threshold) 47 | return out 48 | 49 | def forward(self, Y, ranks, num_l, epsilon=1e-9): 50 | ''' 51 | Forward method of network. 52 | 53 | Args: 54 | Y: input tensor 55 | ranks: multilinear rank of low rank tensor 56 | num_l: number of iterative updates of ScaledGD i.e. number of recurrent layers 57 | epsilon: bias term for matrix inverse stability 58 | Output: 59 | X: low rank tensor from last iteration 60 | S: sparse tensor from last iteration 61 | ''' 62 | 63 | z0 = self.softplus_factor * F.softplus(self.z0) 64 | z1 = self.softplus_factor * F.softplus(self.z1) 65 | eta = F.softplus(self.eta) 66 | decay = torch.sigmoid(self.decay) 67 | 68 | ## Initialization 69 | G_t, factors_t = tucker(Y - self.thre(Y, z0), rank=ranks) 70 | order = len(ranks) 71 | 72 | ATA_inverses_skipped = dict() 73 | ATA_skipped = dict() 74 | for k in self.skip: 75 | ATA_skipped[k] = factors_t[k].T @ factors_t[k] 76 | ATA_inverses_skipped[k] = torch.linalg.inv(ATA_skipped[k]) 77 | 78 | ## Main Loop in ScaledGD RPCA 79 | for t in range(num_l): 80 | X_t = tucker_to_tensor((G_t, factors_t)) 81 | S_t1 = self.thre(Y- X_t, z1 * (decay**t)) 82 | factors_t1 = [] 83 | D = S_t1 - Y 84 | ATA_t = [] 85 | for k in range(order): 86 | if k in self.skip: 87 | ATA_t.append(ATA_skipped[k]) 88 | else: 89 | ATA_t.append(factors_t[k].T @ factors_t[k]) 90 | 91 | for k in range(order): 92 | if k in self.skip: 93 | factors_t1.append(factors_t[k]) 94 | continue 95 | 96 | A_t = factors_t[k] 97 | factors_t_copy = factors_t.copy() 98 | factors_t_copy[k] = torch.eye(A_t.shape[1]).to(self.device) 99 | A_breve_t = tucker_to_unfolded((G_t, factors_t_copy), k).T 100 | 101 | ATA_t_copy = ATA_t.copy() 102 | ATA_t_copy[k] = torch.eye(A_t.shape[1]).to(self.device) 103 | AbTAb_t = tucker_to_unfolded((G_t, ATA_t_copy), k) @ unfold(G_t, k).T 104 | 105 | ker = torch.linalg.inv(AbTAb_t + epsilon * torch.eye(A_breve_t.shape[1]).to(self.device)) 106 | A_t1 = (1 - eta) * A_t - eta * unfold(D, k) @ A_breve_t @ ker 107 | factors_t1.append(A_t1) 108 | G_factors_t = [] 109 | for k in range(order): 110 | if k in self.skip: 111 | G_factors_t.append(ATA_inverses_skipped[k] @ factors_t[k].T) 112 | else: 113 | G_factors_t.append(torch.linalg.inv(ATA_t[k] + epsilon * torch.eye(factors_t[k].shape[1]).to(self.device)) @ factors_t[k].T) 114 | G_t1 = G_t - eta * tucker_to_tensor((X_t + D, G_factors_t)) 115 | factors_t = factors_t1 116 | G_t = G_t1 117 | 118 | return tucker_to_tensor((G_t, factors_t)), S_t1 119 | 120 | -------------------------------------------------------------------------------- /src/synthetic_train.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | import random 4 | import tensorly as tl 5 | import json 6 | 7 | from training_functions import synthetic_supervised_train, single_self_supervised_train, generate_problem 8 | from model import TensorRPCANet 9 | 10 | torch.manual_seed(0) 11 | np.random.seed(0) 12 | random.seed(0) 13 | 14 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") 15 | tl.set_backend('pytorch') 16 | out_dir = '../outputs/synthetic' 17 | 18 | supervised_config = dict() 19 | 20 | supervised_config['shape'] = [100, 100, 100] # shape of tensors 21 | supervised_config['T'] = 100 # number of iterative updates of RPCA 22 | supervised_config['softplus_factor'] = 0.01 # factor to multiple z0 and z1 by to make gradients nicer 23 | 24 | # Network initialization 25 | # NOTE: These are not the actual RPCA hyperparameter values. This values will be saved in json files for convenience instead of the true RPCA values. See model.py for more information. 26 | supervised_config["z0_init"] = 0 27 | supervised_config["z1_init"] = 0 28 | supervised_config["eta_init"] = 0.1 29 | supervised_config["decay_init"] = 0.8 30 | 31 | # Logging 32 | supervised_config['log_interval'] = 100 33 | 34 | # Optimization parameters 35 | supervised_config["iterations"] = 1000 # Number of backprops. Each batch contains 1 tensor 36 | supervised_config['lr'] = 0.05 37 | supervised_config['grad_clip'] = 100 38 | supervised_config['scheduler_steps'] = 100 39 | supervised_config['scheduler_decay'] = 0.7 40 | 41 | # matrix inverse bias term 42 | supervised_config['eps'] = 1e-7 43 | 44 | # Fine tuning configs are very similar to supervised learning except for a few changes: 45 | self_supervised_config = supervised_config.copy() 46 | self_supervised_config["iterations"] = 500 47 | self_supervised_config['log_interval'] = 100 48 | self_supervised_config['lr'] = 0.002 49 | self_supervised_config['scheduler_steps'] = 100 50 | self_supervised_config['scheduler_decay'] = 0.7 51 | 52 | supervised_train = True #train a supervised model? Otherwise, a pretrained model will be loaded 53 | eval_samples = 20 # number of samples to fine tune on 54 | 55 | # Grid of scenarios to experiement 56 | kappas = [5] 57 | alphas = [0.0, 0.1, 0.2] 58 | ranks = [10, 20, 30] 59 | 60 | 61 | data_generator = lambda ranks, shape, alpha, kappa: generate_problem(ranks, shape, alpha, kappa, device) 62 | 63 | if supervised_train: 64 | for k in kappas: 65 | supervised_config['kappa'] = k 66 | for a in alphas: 67 | supervised_config['alpha'] = a 68 | for r in ranks: 69 | supervised_config['ranks'] = [r, r, r] 70 | metrics_path = f'{out_dir}/supervised_kappa{k}_alpha{int(10 * a)}_rank{r}.json' 71 | model = TensorRPCANet(supervised_config['z0_init'], supervised_config['z1_init'], supervised_config['eta_init'], supervised_config['decay_init'], device, supervised_config['softplus_factor']) 72 | synthetic_supervised_train(model, data_generator, supervised_config, collect_metrics=True, metrics_path=metrics_path) 73 | 74 | 75 | def choose_params(metrics): 76 | ''' 77 | Extract z0, z1, eta, and decay from metrics. 78 | ''' 79 | z0 = metrics['z0_traj'][-1] 80 | z1 = metrics['z1_traj'][-1] 81 | eta = metrics['eta_traj'][-1] 82 | decay = metrics['decay_traj'][-1] 83 | return z0, z1, eta, decay 84 | 85 | 86 | for k in kappas: 87 | self_supervised_config['kappa'] = k 88 | for a in alphas: 89 | self_supervised_config['alpha'] = a 90 | for r in ranks: 91 | self_supervised_config['ranks'] = [r, r, r] 92 | 93 | # Loading the supervised model 94 | supervised_metrics_path = f'{out_dir}/supervised_kappa{k}_alpha{int(10 * a)}_rank{r}.json' 95 | with open(supervised_metrics_path, 'r') as f: 96 | metrics = json.load(f) 97 | z0, z1, eta, decay = choose_params(metrics) 98 | 99 | # Self-supervised fine tuning 100 | self_supervised_metrics_path = f'{out_dir}/self_supervised_kappa{k}_alpha{int(10 * a)}_rank{r}.json' 101 | self_supervised_metrics = dict() 102 | for s in range(eval_samples): 103 | model = TensorRPCANet(z0, z1, eta, decay, device, self_supervised_config['softplus_factor']) 104 | Y, X_star = data_generator(ranks=[r, r, r], shape=self_supervised_config['shape'], alpha=a, kappa=k) 105 | self_supervised_metrics[s] = single_self_supervised_train(model, Y, self_supervised_config, collect_metrics=True, X_star=X_star, metrics_path=None) 106 | with open(self_supervised_metrics_path, 'w') as fp: 107 | json.dump(self_supervised_metrics, fp) # note the first index is the results from purely supervised learning -------------------------------------------------------------------------------- /src/training_functions.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import math 3 | import numpy as np 4 | import tensorly as tl 5 | from tensorly import tucker_to_tensor 6 | from tensorly.random import random_tucker 7 | import json 8 | 9 | 10 | tl.set_backend('pytorch') 11 | 12 | def generate_problem(ranks, shape, alpha, kappa, device): 13 | ''' 14 | Randomly generate a Y and X_star pair. 15 | 16 | Args: 17 | ranks: iterable of desired low multilinear rank of X_star 18 | shape: iterable of desired tensor shape 19 | alpha: corruption fraction between 0 and 1 20 | kappa: condition number of X_star 21 | device: device to create tensors 22 | Output: 23 | Y: X_star + sparse corruptions 24 | X_star: low rank tensor 25 | ''' 26 | core = torch.zeros(ranks, device=device) 27 | _, factors = random_tucker(shape = tuple(shape), rank=ranks, full=False, orthogonal=True) 28 | for k in range(len(factors)): 29 | factors[k] = factors[k].to(device) 30 | sing_val_decay = (1/kappa)**(1/(ranks[0] - 1)) 31 | for i in range(ranks[0]): 32 | core[i, i, i] = sing_val_decay ** i 33 | X_star = tucker_to_tensor((core, factors)) 34 | Y = corrupt_X(X_star, alpha, device) 35 | return Y, X_star 36 | 37 | def corrupt_X(X_star, alpha, device, s_range=None): 38 | ''' 39 | Sparsely corrupt a tensor. 40 | Adpated from https://github.com/caesarcai/LRPCA 41 | HanQin Cai, Jialin Liu, and Wotao Yin. Learned Robust PCA: A Scalable Deep Unfolding Approach for High-Dimensional Outlier Detection. In Advances in Neural Information Processing Systems, 34: 16977-16989, 2021. 42 | 43 | Args: 44 | X_star: tensor to be corrupted 45 | alpha: corruption fraction between 0 and 1 46 | device: device to create tensors 47 | s_range: corruptions will be randomly sampled from [-s_range, +s_range] 48 | Output: 49 | Y: X_star + sparse corruptions 50 | X_star: low rank tensor 51 | ''' 52 | if not s_range: 53 | s_range = torch.mean(torch.abs(X_star)) 54 | num_entries = np.prod(X_star.shape) 55 | idx = torch.randperm(num_entries, device = device) 56 | idx = idx[:math.floor(alpha * num_entries)] 57 | S_star = torch.rand(len(idx), dtype = torch.float32, device = device) 58 | S_star = s_range * (2.0 * S_star - 1.0) 59 | Y = X_star.reshape(-1) 60 | Y[idx] = Y[idx] + S_star 61 | Y = Y.reshape(tuple(X_star.shape)) 62 | return Y 63 | 64 | 65 | def synthetic_supervised_train(model, data_generator, config, collect_metrics=False, metrics_path=None): 66 | ''' 67 | Train a model using supervised learning with synthetic streaming data. 68 | 69 | Args: 70 | model: pytorch model to train 71 | data_generator: a function that generates a (Y, X_star) pair when given ranks, shape, alpha, and kappa 72 | config: a dictionary of various hyperparameters 73 | collect_metrics: whether to collect data while training 74 | metrics_path: location to store metrics as a json file 75 | Output: 76 | metrics: only returned if collect_metrics is True 77 | ''' 78 | model.train() 79 | optimizer = torch.optim.Adam([ 80 | {'params': model.z0, 'lr': config['lr'] }, 81 | {'params': model.z1, 'lr': config['lr'] }, 82 | {'params': model.eta, 'lr': config['lr']}, 83 | {'params': model.decay, 'lr': config['lr'] }, 84 | ],) 85 | scheduler = torch.optim.lr_scheduler.StepLR(optimizer, config['scheduler_steps'], gamma=config['scheduler_decay']) 86 | 87 | if collect_metrics: 88 | metrics = dict() 89 | metrics['loss_traj'] = [] 90 | 91 | metrics['z0_traj'] = [model.z0.item()] 92 | metrics['z1_traj'] = [model.z1.item()] 93 | metrics['eta_traj'] = [model.eta.item()] 94 | metrics['decay_traj'] = [model.decay.item()] 95 | 96 | print("Supervised training starting.") 97 | for b in range(config['iterations']): 98 | optimizer.zero_grad() 99 | Y, X_star = data_generator(ranks=config['ranks'], shape=config['shape'], alpha=config['alpha'], kappa=config['kappa']) 100 | X, S = model(Y, config["ranks"], config["T"], epsilon=config["eps"]) 101 | 102 | loss = ((X_star-X).norm() / X_star.norm())**2 103 | loss.backward() 104 | process_grads(model, config) 105 | 106 | optimizer.step() 107 | scheduler.step() 108 | 109 | if collect_metrics: 110 | metrics['loss_traj'].append(loss.item()) 111 | metrics['z0_traj'].append(model.z0.item()) 112 | metrics['z1_traj'].append(model.z1.item()) 113 | metrics['eta_traj'].append(model.eta.item()) 114 | metrics['decay_traj'].append(model.decay.item()) 115 | 116 | if b % config['log_interval'] == config['log_interval'] - 1: 117 | if metrics_path: 118 | with open(metrics_path, 'w') as fp: 119 | json.dump(metrics, fp) 120 | print("Mean loss: ", np.mean(metrics['loss_traj'][-config['log_interval']:])) 121 | print("Supervised training complete.") 122 | if collect_metrics: 123 | return metrics 124 | 125 | def single_self_supervised_train(model, Y, config, collect_metrics=False, X_star=None, metrics_path=None): 126 | ''' 127 | Fine tune a model on one tensor using self-supervised learning. 128 | 129 | Args: 130 | model: pytorch model to train 131 | Y: input tensor 132 | config: a dictionary of various hyperparameters 133 | collect_metrics: whether to collect data while training 134 | X_star: ground truth low rank tensor 135 | metrics_path: location to store metrics as a json file 136 | Output: 137 | metrics: only returned if collect_metrics is True 138 | ''' 139 | model.train() 140 | optimizer = torch.optim.Adam([ 141 | {'params': model.z0, 'lr': config['lr'] }, 142 | {'params': model.z1, 'lr': config['lr'] }, 143 | {'params': model.eta, 'lr': config['lr']}, 144 | {'params': model.decay, 'lr': config['lr'] }, 145 | ],) 146 | scheduler = torch.optim.lr_scheduler.StepLR(optimizer, config['scheduler_steps'], gamma=config['scheduler_decay']) 147 | 148 | if collect_metrics: 149 | metrics = dict() 150 | metrics['loss_traj'] = [] 151 | metrics['reconstruction_loss_traj'] = [] 152 | if X_star is not None: 153 | metrics['X_loss_traj'] = [] 154 | 155 | metrics['z0_traj'] = [model.z0.item()] 156 | metrics['z1_traj'] = [model.z1.item()] 157 | metrics['eta_traj'] = [model.eta.item()] 158 | metrics['decay_traj'] = [model.decay.item()] 159 | 160 | print("Self-supervised training starting.") 161 | for b in range(config['iterations']): 162 | optimizer.zero_grad() 163 | 164 | X, S = model(Y, config["ranks"], config["T"], epsilon=config["eps"]) 165 | 166 | loss = (Y-X).norm(p=1) / Y.norm()**2 167 | loss.backward() 168 | process_grads(model, config) 169 | 170 | optimizer.step() 171 | scheduler.step() 172 | 173 | if collect_metrics: 174 | metrics['loss_traj'].append(loss.item()) 175 | metrics['reconstruction_loss_traj'].append(((Y - X - S).norm()**2 / Y.norm()**2).item()) 176 | if X_star is not None: 177 | metrics['X_loss_traj'].append(((X - X_star).norm()**2 / X_star.norm()**2).item()) 178 | metrics['z0_traj'].append(model.z0.item()) 179 | metrics['z1_traj'].append(model.z1.item()) 180 | metrics['eta_traj'].append(model.eta.item()) 181 | metrics['decay_traj'].append(model.decay.item()) 182 | 183 | if b % config['log_interval'] == config['log_interval'] - 1: 184 | if metrics_path: 185 | with open(metrics_path, 'w') as fp: 186 | json.dump(metrics, fp) 187 | print("Mean loss: ", np.mean(metrics['loss_traj'][-config['log_interval']:])) 188 | print("Mean reconstruction loss: ", np.mean(metrics['reconstruction_loss_traj'][-config['log_interval']:])) 189 | if X_star is not None: 190 | print("Mean X loss: ", np.mean(metrics['X_loss_traj'][-config['log_interval']:])) 191 | 192 | model.eval() 193 | with torch.no_grad(): 194 | X, S = model(Y, config["ranks"], config["T"], epsilon=config["eps"]) 195 | loss = (Y-X).norm(p=1) / Y.norm()**2 196 | metrics['loss_traj'].append(loss.item()) 197 | metrics['reconstruction_loss_traj'].append(((Y - X - S).norm()**2 / Y.norm()**2).item()) 198 | if X_star is not None: 199 | metrics['X_loss_traj'].append(((X - X_star).norm()**2 / X_star.norm()**2).item()) 200 | 201 | print("Self-supervised training complete.") 202 | if collect_metrics: 203 | return metrics 204 | 205 | def process_grads(model, config): 206 | ''' 207 | Gradient processing to prevent errors/divergence. 208 | 209 | Args: 210 | model: pytorch model to train 211 | config: a dictionary of various hyperparameters 212 | ''' 213 | if torch.isnan(model.z0.grad): 214 | model.z0.grad = torch.zeros_like(model.z0.grad) 215 | if torch.isnan(model.z1.grad): 216 | model.z1.grad = torch.zeros_like(model.z0.grad) 217 | if torch.isnan(model.eta.grad): 218 | model.eta.grad = torch.zeros_like(model.z0.grad) 219 | if torch.isnan(model.decay.grad): 220 | model.decay.grad = torch.zeros_like(model.z0.grad) 221 | if config['grad_clip']: 222 | torch.nn.utils.clip_grad_norm_(model.parameters(), config['grad_clip'], norm_type='inf') 223 | 224 | -------------------------------------------------------------------------------- /outputs/synthetic/supervised_kappa5_alpha0_rank20.json: -------------------------------------------------------------------------------- 1 | {"loss_traj": [3.831803319087612e-09, 4.101084805085975e-09, 3.644421209259008e-09, 3.4847200680587775e-09, 3.365401290977843e-09, 3.6755951615674576e-09, 3.2032336783061055e-09, 2.8275486396722727e-09, 2.7553757053766503e-09, 2.3385422487365304e-09, 2.3040633845283764e-09, 2.5816009330270617e-09, 1.9036920928527934e-09, 1.9976595933002272e-09, 2.0669057576583327e-09, 2.0710386738898023e-09, 1.892156431537728e-09, 1.9164687614647846e-09, 1.6070356156916432e-09, 1.5564519673105792e-09, 1.597971421851696e-09, 1.317859266158905e-09, 1.5185431800901483e-09, 1.4196013253808815e-09, 1.2119792947018482e-09, 1.3262164699767709e-09, 1.082173239908002e-09, 1.0731915356387844e-09, 9.021869917802405e-10, 9.135953105143813e-10, 1.034880292571927e-09, 9.375156206914426e-10, 8.257407535516847e-10, 7.639482935140052e-10, 7.081922825946663e-10, 8.039255372516152e-10, 6.5633443124824e-10, 5.668997493657457e-10, 6.313309319772031e-10, 5.473274611311751e-10, 4.848594858941624e-10, 4.5283407579255197e-10, 5.650189760508795e-10, 4.864206815113903e-10, 4.1121450689018957e-10, 3.602045328676695e-10, 4.4179190861193263e-10, 3.4796587833341164e-10, 3.745966870027928e-10, 3.1301383707216246e-10, 2.845277624619058e-10, 2.400869503205172e-10, 2.452451020040769e-10, 3.053634289873486e-10, 2.195499892998498e-10, 2.2631208018708548e-10, 1.5841783440606605e-10, 1.986595610237174e-10, 1.6442931738414046e-10, 1.474768640541413e-10, 1.667496002388802e-10, 1.4913216495049397e-10, 1.1983999903542042e-10, 1.2758806511303789e-10, 8.940301970961073e-11, 1.1158753088214013e-10, 7.472111818174199e-11, 7.88098197812559e-11, 6.708032740387893e-11, 6.472135327673101e-11, 8.695782982570677e-11, 4.1274088313780766e-11, 5.0293484654684306e-11, 4.351310978201184e-11, 3.565732778487707e-11, 4.8667441604477446e-11, 3.501493192614724e-11, 4.893088711988014e-11, 3.798225275963851e-11, 3.459625988466719e-11, 3.6998223928996765e-11, 2.427806046478853e-11, 2.6213914719463993e-11, 2.0263801053799213e-11, 2.3380582192533694e-11, 1.7989346123847838e-11, 1.6061912216924767e-11, 2.1056734481050832e-11, 2.2883792083483456e-11, 1.3769241823113898e-11, 1.4038245392533621e-11, 1.1611932769495237e-11, 1.1460268498075799e-11, 1.1818060419166443e-11, 9.02708463534907e-12, 1.0626485802411878e-11, 9.97874786273778e-12, 8.328488072806284e-12, 1.0094311671260403e-11, 8.455240847748957e-12], "z0_traj": [0.0, -0.004022284876555204, -0.005696067586541176, -0.006484378594905138, -0.006666949018836021, -0.00664807902649045, -0.006548484321683645, -0.006430340930819511, -0.006365636829286814, -0.006695335730910301, -0.006914020050317049, -0.007077197078615427, -0.00718201557174325, -0.007227831054478884, -0.007220401894301176, -0.007164674811065197, -0.007063759490847588, -0.006933741271495819, -0.00677288044244051, -0.006591802462935448, -0.006398218683898449, -0.006192674394696951, -0.005981211084872484, -0.005764005705714226, -0.005540225189179182, -0.005315674934536219, -0.0050889491103589535, -0.004863198846578598, -0.004639109596610069, -0.004422623664140701, -0.00420892471447587, -0.003996955696493387, -0.0037888651713728905, -0.0035837979521602392, -0.003382613882422447, -0.0031877662986516953, -0.0029966947622597218, -0.0028110905550420284, -0.002632398623973131, -0.002457430586218834, -0.0022872660774737597, -0.002125153783708811, -0.0019690918270498514, -0.0018180388724431396, -0.0016725016757845879, -0.0015329585876315832, -0.0014000163646414876, -0.0012706915149465203, -0.0011478060623630881, -0.0010291077196598053, -0.0009156024898402393, -0.0008068915922194719, -0.0007040697964839637, -0.0006053929100744426, -0.0005101396236568689, -0.00041937895002774894, -0.00033246964449062943, -0.0002505916927475482, -0.00017178512644022703, -9.687933197710663e-05, -2.588056850072462e-05, 4.1857652831822634e-05, 0.0001064409880200401, 0.00016723205044399947, 0.0002251498808618635, 0.0002794000320136547, 0.00033091430668719113, 0.0003791715716943145, 0.00042441452387720346, 0.0004668345209211111, 0.0005068194586783648, 0.0005449936143122613, 0.0005805421387776732, 0.0006140162004157901, 0.0006454551476053894, 0.0006747997249476612, 0.0007025692029856145, 0.0007284703897312284, 0.0007530627190135419, 0.000776144559495151, 0.0007979401852935553, 0.0008185139740817249, 0.0008377056219615042, 0.000855813967064023, 0.0008727586246095598, 0.0008886511204764247, 0.0009035183466039598, 0.0009174270089715719, 0.0009306080755777657, 0.0009431720827706158, 0.0009549785172566772, 0.0009660798241384327, 0.0009764850256033242, 0.000986213912256062, 0.0009953201515600085, 0.0010038443142548203, 0.0010118418140336871, 0.0010193552589043975, 0.0010263924486935139, 0.0010330833029001951, 0.0010393946431577206], "z1_traj": [0.0, -0.0002730180276557803, 0.0003121232148259878, 0.0011215198319405317, 0.0021178643219172955, 0.0032676171977072954, 0.004477144684642553, 0.0056953513994812965, 0.0069049750454723835, 0.008104695938527584, 0.009274331852793694, 0.01042630523443222, 0.011580380611121655, 0.012699460610747337, 0.013795451261103153, 0.014878476038575172, 0.015952421352267265, 0.017012279480695724, 0.018067050725221634, 0.01910463161766529, 0.020122092217206955, 0.021131524816155434, 0.02211832068860531, 0.023100588470697403, 0.024073153734207153, 0.025029441341757774, 0.02597888931632042, 0.02690606191754341, 0.027817776426672935, 0.02870364487171173, 0.029572436586022377, 0.03043409064412117, 0.03128740191459656, 0.03212197497487068, 0.03293843939900398, 0.0337357372045517, 0.03452582657337189, 0.03529787063598633, 0.036049503833055496, 0.03678864613175392, 0.037511393427848816, 0.0382145419716835, 0.038899440318346024, 0.03958311676979065, 0.040257591754198074, 0.040915556252002716, 0.041554320603609085, 0.04218888282775879, 0.04280728101730347, 0.04341548681259155, 0.044010479003190994, 0.04458969458937645, 0.045149900019168854, 0.0456962063908577, 0.046239860355854034, 0.046770330518484116, 0.0472903735935688, 0.04779007285833359, 0.04828036576509476, 0.04875658452510834, 0.049217529594898224, 0.04966970160603523, 0.05011148750782013, 0.05053902417421341, 0.05095591023564339, 0.051355574280023575, 0.05174615979194641, 0.05211954563856125, 0.05247972160577774, 0.05282559618353844, 0.05315832421183586, 0.05348554998636246, 0.053795043379068375, 0.054091926664114, 0.05437566712498665, 0.05464578792452812, 0.05490870401263237, 0.055160172283649445, 0.05540749803185463, 0.05564683675765991, 0.05587794631719589, 0.05610295757651329, 0.056317299604415894, 0.05652337148785591, 0.056719765067100525, 0.056908804923295975, 0.057088837027549744, 0.05726005509495735, 0.05742613598704338, 0.05758855491876602, 0.05774303525686264, 0.05789094418287277, 0.05803147703409195, 0.05816541984677315, 0.058293819427490234, 0.05841536447405815, 0.058531999588012695, 0.058643706142902374, 0.0587502084672451, 0.05885331705212593, 0.05895228683948517], "eta_traj": [0.10000000149011612, 0.08359343558549881, 0.0662447065114975, 0.04904817044734955, 0.032019007951021194, 0.015355036593973637, -0.0013447565725073218, -0.017765497788786888, -0.033639684319496155, -0.048640716820955276, -0.06320139765739441, -0.0773378238081932, -0.0913272276520729, -0.10481040179729462, -0.11793018132448196, -0.13079549372196198, -0.14344026148319244, -0.15580083429813385, -0.16793155670166016, -0.17968980967998505, -0.1910906285047531, -0.20221975445747375, -0.21295124292373657, -0.2234405279159546, -0.23366747796535492, -0.24354416131973267, -0.25317108631134033, -0.2624453604221344, -0.2714017927646637, -0.27997517585754395, -0.28821486234664917, -0.29622647166252136, -0.3039809763431549, -0.3114423453807831, -0.31860437989234924, -0.3254741132259369, -0.33212774991989136, -0.338501513004303, -0.344577819108963, -0.350421279668808, -0.3560136556625366, -0.3613400459289551, -0.36640527844429016, -0.3713034689426422, -0.37600472569465637, -0.38048630952835083, -0.38473987579345703, -0.38883525133132935, -0.3927358388900757, -0.3964780867099762, -0.40003812313079834, -0.40341606736183167, -0.4066028892993927, -0.40962380170822144, -0.41252782940864563, -0.41527625918388367, -0.4178915321826935, -0.42034196853637695, -0.42267119884490967, -0.4248679280281067, -0.42693454027175903, -0.4288990795612335, -0.4307590425014496, -0.4325063228607178, -0.43415746092796326, -0.43569818139076233, -0.43714821338653564, -0.4384996294975281, -0.4397636353969574, -0.44094178080558777, -0.44204196333885193, -0.44307833909988403, -0.4440383315086365, -0.44493284821510315, -0.44576650857925415, -0.4465407729148865, -0.4472675621509552, -0.4479424059391022, -0.4485752284526825, -0.4491650462150574, -0.4497166574001312, -0.4502321779727936, -0.45071059465408325, -0.4511566460132599, -0.45156943798065186, -0.45195433497428894, -0.4523109793663025, -0.45264068245887756, -0.45294928550720215, -0.45323801040649414, -0.45350536704063416, -0.45375359058380127, -0.453983336687088, -0.45419633388519287, -0.45439496636390686, -0.4545793831348419, -0.45475101470947266, -0.4549097418785095, -0.45505696535110474, -0.4551948010921478, -0.4553232491016388], "decay_traj": [0.800000011920929, 0.8157761693000793, 0.8323699831962585, 0.8489499688148499, 0.8655334115028381, 0.8821569681167603, 0.8990739583969116, 0.9159176349639893, 0.9325287938117981, 0.9489418864250183, 0.9649960994720459, 0.980783998966217, 0.9965913891792297, 1.0119904279708862, 1.0271248817443848, 1.0421206951141357, 1.0570207834243774, 1.071763515472412, 1.0864572525024414, 1.1009613275527954, 1.1152384281158447, 1.129436731338501, 1.143371343612671, 1.157273769378662, 1.171079158782959, 1.1846989393234253, 1.1982548236846924, 1.2115460634231567, 1.2246606349945068, 1.2374576330184937, 1.250052571296692, 1.2625797986984253, 1.275019884109497, 1.2872315645217896, 1.299221158027649, 1.3109720945358276, 1.3226497173309326, 1.3340998888015747, 1.3452866077423096, 1.3563224077224731, 1.3671493530273438, 1.3777198791503906, 1.38805091381073, 1.3983864784240723, 1.4086111783981323, 1.4186171293258667, 1.4283626079559326, 1.4380671977996826, 1.4475538730621338, 1.456910252571106, 1.4660885334014893, 1.475049376487732, 1.483742117881775, 1.4922418594360352, 1.5007177591323853, 1.5090079307556152, 1.5171539783477783, 1.5250024795532227, 1.5327203273773193, 1.540234923362732, 1.5475260019302368, 1.5546925067901611, 1.5617090463638306, 1.5685139894485474, 1.575162649154663, 1.5815509557724, 1.5878052711486816, 1.5937970876693726, 1.5995876789093018, 1.6051589250564575, 1.6105281114578247, 1.6158158779144287, 1.6208267211914062, 1.6256417036056519, 1.630251169204712, 1.6346465349197388, 1.638929843902588, 1.6430326700210571, 1.647071123123169, 1.6509839296340942, 1.65476655960083, 1.6584527492523193, 1.6619689464569092, 1.6653531789779663, 1.6685820817947388, 1.6716930866241455, 1.674659252166748, 1.677483320236206, 1.6802245378494263, 1.6829067468643188, 1.6854604482650757, 1.6879075765609741, 1.690234899520874, 1.6924550533294678, 1.694584846496582, 1.6966028213500977, 1.6985405683517456, 1.7003978490829468, 1.7021698951721191, 1.7038862705230713, 1.705534815788269]} -------------------------------------------------------------------------------- /src/background_separation_train.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import random 3 | import os 4 | import tensorly as tl 5 | import json 6 | import torch 7 | import skvideo.io 8 | from training_functions import process_grads 9 | from model import TensorRPCANet 10 | from PIL import Image 11 | import matplotlib.pyplot as plt 12 | 13 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") 14 | tl.set_backend('pytorch') 15 | 16 | data_dir = 'data/bmc/' # data location 17 | output_path = 'outputs/background_separation/' 18 | 19 | # Partition the 9 videos into training and validation 20 | train_vids = [1, 2, 3, 4, 5, 6] 21 | val_vids = [7, 8, 9] 22 | 23 | 24 | skip = [1, 2, 3] # skip these dims 25 | max_frames = 100 # Max number of frames per video 26 | downsample = 2 # downsampling factor along the H and W dims of the video 27 | frame_rank = 1 # low rank 28 | 29 | frame_to_save = 42 # create a snapshot of this frame 30 | 31 | config = dict() 32 | 33 | # config['training_type'] = 'supervised' # supervised learning 34 | config['training_type'] = 'self_supervised_cold' # self-supervised learning from scratch 35 | 36 | config["epochs"] = 15 37 | config['fine_tuning_epochs'] = 0 38 | 39 | config['T'] = 150 # number of iterative updates of RPCA 40 | 41 | # Network Initialization 42 | config["z0_init"] = 0 43 | config["z1_init"] = 0 44 | config["eta_init"] = 0.1 45 | config["decay_init"] = 0.8 46 | 47 | config['log_interval'] = 5 48 | 49 | # Optimization Parameters 50 | config['lr'] = 0.05 51 | config['softplus_factor'] = 0.01 52 | config['grad_clip'] = 100 53 | config['scheduler_decay'] = 0.5 54 | config['patience'] = 2 55 | 56 | config['eps'] = 1e-7 57 | 58 | metrics_path = f'{output_path}{config["training_type"]}_metrics.json' 59 | 60 | Ys = [] 61 | S_stars = [] 62 | masks = [] 63 | for i in range(1, 10): 64 | vid_data_dir = f'{data_dir}Video_00{i}/img/' 65 | vid_output_path = f'{output_path}Video_00{i}/' 66 | video = [] 67 | video_mask = [] 68 | for filename in sorted(os.listdir(vid_data_dir)): 69 | if filename[-3:] != "bmp": 70 | continue 71 | f = os.path.join(vid_data_dir, filename) 72 | image = Image.open(f) 73 | if "Img" in filename: 74 | video.append(np.array(image)) 75 | if f"Mask{filename[3:]}" not in os.listdir(vid_data_dir): 76 | print(filename) 77 | 78 | if "Mask" in filename: 79 | video_mask.append(np.array(image)) 80 | Y = torch.Tensor(np.array(video))[:min(max_frames, len(video)), ::downsample, ::downsample] / 255 81 | video_mask = torch.Tensor(np.array(video_mask))[:min(max_frames, len(video)), ::downsample, ::downsample].unsqueeze(-1) / 255 82 | S_star = Y * video_mask 83 | X_star = Y * (1-video_mask) 84 | skvideo.io.vwrite(f'{vid_output_path}Y.mp4', Y.cpu().detach().numpy() * 255, outputdict={"-pix_fmt": "yuv420p"}) 85 | skvideo.io.vwrite(f'{vid_output_path}X_star.mp4', X_star.cpu().detach().numpy() * 255, outputdict={"-pix_fmt": "yuv420p"}) 86 | skvideo.io.vwrite(f'{vid_output_path}S_star.mp4', S_star.cpu().detach().numpy() * 255, outputdict={"-pix_fmt": "yuv420p"}) 87 | Ys.append(Y) 88 | masks.append(video_mask) 89 | S_stars.append(S_star) 90 | 91 | 92 | model = TensorRPCANet(config["z0_init"], config["z1_init"], config["eta_init"], config["decay_init"], device, config['softplus_factor'], skip=skip) 93 | 94 | optimizer = torch.optim.Adam([ 95 | {'params': model.z0, 'lr': config['lr'] }, 96 | {'params': model.z1, 'lr': config['lr'] }, 97 | {'params': model.eta, 'lr': config['lr']}, 98 | {'params': model.decay, 'lr': config['lr'] }, 99 | ],) 100 | 101 | # scheduler = torch.optim.lr_scheduler.StepLR(optimizer, config['scheduler_steps'], gamma=config['scheduler_decay']) 102 | scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=config['scheduler_decay'], patience=config['patience']) 103 | 104 | metrics = dict() 105 | metrics['train_loss_traj'] = [] 106 | metrics['train_reconstruction_loss_traj'] = [] 107 | metrics['train_X_loss_traj'] = [] 108 | metrics['val_loss_traj'] = [] 109 | metrics['val_reconstruction_loss_traj'] = [] 110 | metrics['val_X_loss_traj'] = [] 111 | metrics['z0_traj'] = [model.z0.item()] 112 | metrics['z1_traj'] = [model.z1.item()] 113 | metrics['eta_traj'] = [model.eta.item()] 114 | metrics['decay_traj'] = [model.decay.item()] 115 | 116 | 117 | print("Beginning Training") 118 | for epoch in range(config['epochs']): 119 | model.train() 120 | random.shuffle(train_vids) 121 | total_train_loss = 0 122 | for i in train_vids: 123 | 124 | optimizer.zero_grad() 125 | Y = Ys[i-1].to(device) 126 | S_star = S_stars[i-1].to(device) 127 | mask = masks[i-1].to(device) 128 | rank = [frame_rank, Y.shape[1], Y.shape[2], 3] 129 | X, S = model(Y, rank, config["T"], epsilon=config["eps"]) 130 | if config['training_type'] == 'supervised': 131 | loss = ((Y-X) * (1-mask)).norm()**2 / (Y * (1-mask)).norm()**2 132 | elif config['training_type'] == 'self_supervised_cold': 133 | loss = (Y-X).norm(p=1) / (Y).norm()**2 134 | 135 | total_train_loss += loss.item() 136 | loss.backward() 137 | process_grads(model, config) 138 | 139 | optimizer.step() 140 | 141 | metrics['train_loss_traj'].append(loss.item()) 142 | metrics['train_reconstruction_loss_traj'].append(((Y - X - S).norm()**2 / Y.norm()**2).item()) 143 | metrics['train_X_loss_traj'].append((((Y-X) * (1-mask)).norm()**2 / (Y * (1-mask)).norm()**2).item()) 144 | metrics['z0_traj'].append(model.z0.item()) 145 | metrics['z1_traj'].append(model.z1.item()) 146 | metrics['eta_traj'].append(model.eta.item()) 147 | metrics['decay_traj'].append(model.decay.item()) 148 | 149 | if epoch % config['log_interval'] == config['log_interval'] - 1: 150 | vid_output_path = f'{output_path}Video_00{i}/' 151 | skvideo.io.vwrite(f"{vid_output_path}X_{config['training_type']}_train.mp4", X.cpu().detach().numpy() * 255, outputdict={"-pix_fmt": "yuv420p"}) 152 | skvideo.io.vwrite(f"{vid_output_path}S_{config['training_type']}_train.mp4", S.cpu().detach().numpy() * 255, outputdict={"-pix_fmt": "yuv420p"}) 153 | plt.imshow(Y[frame_to_save].cpu().detach().numpy() ) 154 | plt.axis('off') 155 | plt.savefig(f"{vid_output_path}Y{frame_to_save}_{config['training_type']}_train.eps", bbox_inches='tight') 156 | plt.close() 157 | plt.imshow(X[frame_to_save].cpu().detach().numpy() ) 158 | plt.axis('off') 159 | plt.savefig(f"{vid_output_path}X{frame_to_save}_{config['training_type']}_train.eps", bbox_inches='tight') 160 | plt.close() 161 | plt.imshow(S[frame_to_save].cpu().detach().numpy() ) 162 | plt.axis('off') 163 | plt.savefig(f"{vid_output_path}S{frame_to_save}_{config['training_type']}_train.eps", bbox_inches='tight') 164 | plt.close() 165 | 166 | scheduler.step(total_train_loss / len(train_vids)) 167 | 168 | with open(metrics_path, 'w') as fp: 169 | json.dump(metrics, fp) 170 | print("EPOCH ", epoch) 171 | print("Train Mean loss: ", np.mean(metrics['train_loss_traj'][-len(train_vids):])) 172 | print("Train Mean X loss: ", np.mean(metrics['train_X_loss_traj'][-len(train_vids):])) 173 | print("Train Mean reconstruction loss: ", np.mean(metrics['train_reconstruction_loss_traj'][-len(train_vids):])) 174 | 175 | model.eval() 176 | with torch.no_grad(): 177 | for i in val_vids: 178 | Y = Ys[i-1].clone().to(device) 179 | S_star = S_stars[i-1].clone().to(device) 180 | mask = masks[i-1].clone().to(device) 181 | rank = [frame_rank, Y.shape[1], Y.shape[2], 3] 182 | X, S = model(Y, rank, config["T"], epsilon=config["eps"]) 183 | if config['training_type'] == 'supervised': 184 | loss = ((Y-X) * (1-mask)).norm()**2 / (Y * (1-mask)).norm()**2 185 | elif config['training_type'] == 'self_supervised_cold': 186 | loss = (Y-X).norm(p=1) / (Y).norm()**2 187 | metrics['val_loss_traj'].append(loss.item()) 188 | metrics['val_reconstruction_loss_traj'].append(((Y - X - S).norm()**2 / Y.norm()**2).item()) 189 | metrics['val_X_loss_traj'].append((((Y-X) * (1-mask)).norm()**2 / (Y * (1-mask)).norm()**2).item()) 190 | if epoch % config['log_interval'] == config['log_interval'] - 1: 191 | vid_output_path = f'{output_path}Video_00{i}/' 192 | skvideo.io.vwrite(f"{vid_output_path}X_{config['training_type']}_val.mp4", X.cpu().detach().numpy() * 255, outputdict={"-pix_fmt": "yuv420p"}) 193 | skvideo.io.vwrite(f"{vid_output_path}S_{config['training_type']}_val.mp4", S.cpu().detach().numpy() * 255, outputdict={"-pix_fmt": "yuv420p"}) 194 | 195 | plt.imshow(Y[frame_to_save].cpu().detach().numpy() ) 196 | plt.axis('off') 197 | plt.savefig(f"{vid_output_path}Y{frame_to_save}_{config['training_type']}_val.eps", bbox_inches='tight') 198 | plt.close() 199 | plt.imshow(X[frame_to_save].cpu().detach().numpy() ) 200 | plt.axis('off') 201 | plt.savefig(f"{vid_output_path}X{frame_to_save}_{config['training_type']}_val.eps", bbox_inches='tight') 202 | plt.close() 203 | plt.imshow(S[frame_to_save].cpu().detach().numpy() ) 204 | plt.axis('off') 205 | plt.savefig(f"{vid_output_path}S{frame_to_save}_{config['training_type']}_val.eps", bbox_inches='tight') 206 | plt.close() 207 | print(f"Val {i} loss: ", metrics['val_loss_traj'][-1]) 208 | print(f"Val {i} X loss: ", metrics['val_X_loss_traj'][-1]) 209 | print(f"Val {i} reconstruction loss: ", metrics['val_reconstruction_loss_traj'][-1]) 210 | 211 | 212 | z0 = model.z0.item() 213 | z1 = model.z1.item() 214 | eta = model.eta.item() 215 | decay = model.decay.item() 216 | 217 | for i in val_vids: 218 | del model 219 | print(f"Fine tuning {i}:") 220 | model = TensorRPCANet(z0, z1, eta, decay, device, config['softplus_factor'], skip=skip) 221 | 222 | optimizer = torch.optim.Adam([ 223 | {'params': model.z0, 'lr': config['lr'] / 2}, 224 | {'params': model.z1, 'lr': config['lr'] / 2}, 225 | {'params': model.eta, 'lr': config['lr'] / 2}, 226 | {'params': model.decay, 'lr': config['lr'] / 2}, 227 | ],) 228 | 229 | scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=config['scheduler_decay'], patience=config['patience']) 230 | 231 | # model.train() 232 | Y = Ys[i-1].clone().to(device) 233 | mask = masks[i-1].clone().to(device) 234 | rank = [frame_rank, Y.shape[1], Y.shape[2], 3] 235 | with torch.no_grad(): 236 | X, S = model(Y, rank, config["T"], epsilon=config["eps"]) 237 | original_X_loss = (((Y-X) * (1-mask)).norm()**2 / (Y * (1-mask)).norm()**2).item() 238 | print("ORIGINAL LOSS: ", original_X_loss) 239 | 240 | for epoch in range(config['fine_tuning_epochs']): 241 | X, S = model(Y, rank, config["T"], epsilon=config["eps"]) 242 | loss = (Y-X).norm(p=1) / (Y).norm()**2 243 | loss.backward() 244 | process_grads(model, config) 245 | optimizer.step() 246 | print("loss: ", loss.item()) 247 | print("X loss: ", (((Y-X) * (1-mask)).norm()**2 / (Y * (1-mask)).norm()**2).item()) 248 | print("reconstruction loss: ", ((Y - X - S).norm()**2 / Y.norm()**2).item()) 249 | vid_output_path = f'{output_path}Video_00{i}/' 250 | skvideo.io.vwrite(f"{vid_output_path}X_{config['training_type']}_fine_tuned.mp4", X.cpu().detach().numpy() * 255, outputdict={"-pix_fmt": "yuv420p"}) 251 | skvideo.io.vwrite(f"{vid_output_path}S_{config['training_type']}_fine_tuned.mp4", S.cpu().detach().numpy() * 255, outputdict={"-pix_fmt": "yuv420p"}) 252 | -------------------------------------------------------------------------------- /outputs/synthetic/supervised_kappa5_alpha0_rank10.json: -------------------------------------------------------------------------------- 1 | {"loss_traj": [3.933823045088047e-09, 3.958360750289103e-09, 3.761428502002673e-09, 3.744662802063203e-09, 3.3562321810620688e-09, 2.8276019303774547e-09, 2.8429711917965506e-09, 2.893753237032115e-09, 2.6446029810500704e-09, 2.3248145630816452e-09, 2.2734405469293506e-09, 2.1126664861981226e-09, 2.1081176804216284e-09, 2.00295313668164e-09, 1.9452022215205034e-09, 1.7740060531679092e-09, 1.679712036128933e-09, 1.7357627557501587e-09, 1.71700298423616e-09, 1.5606166359205531e-09, 1.4349070820429688e-09, 1.269616745069868e-09, 1.194551124683585e-09, 1.209851996364364e-09, 1.1212077932754028e-09, 1.0174444620147938e-09, 1.0569787267655784e-09, 8.820799646258592e-10, 9.04018138125906e-10, 8.654460481594128e-10, 7.860826989336545e-10, 8.085312974692727e-10, 6.923532303027002e-10, 6.709757194300892e-10, 6.211117731247384e-10, 6.900250371089101e-10, 6.899728566267527e-10, 5.2211529544266e-10, 4.883968784952231e-10, 4.391689789606801e-10, 4.628362693104293e-10, 4.198059122551001e-10, 4.068843317828197e-10, 3.8067254903850767e-10, 3.757089361844379e-10, 3.253195490771077e-10, 3.063938547320788e-10, 3.17249948533771e-10, 2.587102476692138e-10, 2.361245365900544e-10, 2.2717408509898007e-10, 2.0352898533193553e-10, 2.197000359416279e-10, 2.1773224889720666e-10, 1.7408789687589632e-10, 1.8109395927279337e-10, 1.6622528353771315e-10, 1.39605160764944e-10, 1.3888241945370083e-10, 1.372469360383377e-10, 1.203917382452957e-10, 1.0265912425699852e-10, 1.0549035950324637e-10, 1.2046998121295616e-10, 8.752164964986875e-11, 7.17578566011845e-11, 8.868795975391919e-11, 6.467062302339954e-11, 5.48804751643317e-11, 5.833700189583624e-11, 4.721356638981433e-11, 4.965568581316582e-11, 4.0418956376298e-11, 4.2188860044367615e-11, 3.64877954239784e-11, 3.828247094328496e-11, 2.90199149433068e-11, 2.7907518310166246e-11, 2.4877476814677557e-11, 2.4426700245827604e-11, 2.4835179052162815e-11, 2.294840706351664e-11, 2.2842661789868046e-11, 1.965094406641832e-11, 1.7098078161636998e-11, 1.185495798750047e-11, 1.513728205282394e-11, 1.3385024859313699e-11, 1.3597779152663136e-11, 1.1558073075013109e-11, 1.3317730598871869e-11, 1.704508929833981e-11, 1.0303240899345312e-11, 1.2689030381984878e-11, 9.178218948746597e-12, 1.0069459155381821e-11, 7.87750940867138e-12, 7.47299704756399e-12, 8.820106103812897e-12, 6.076136989385805e-12, 7.576095133188243e-12, 8.61667768387031e-12, 9.820280005845561e-12, 7.274624045511269e-12, 6.9744054281839496e-12, 5.612186496778415e-12, 5.811429896335207e-12, 6.088327758613232e-12, 4.6520170528252525e-12, 7.618575909029701e-12, 5.587643629040295e-12, 5.686052757108984e-12, 5.062624364865487e-12, 5.770793998910451e-12, 5.328715333569045e-12, 5.107457859421238e-12, 4.2210492913474784e-12, 3.7472902385260465e-12, 6.746837463711408e-12, 4.957292389085044e-12, 3.3072184314059117e-12, 3.731747549862163e-12, 5.029369715831011e-12, 4.091889587581887e-12, 4.4750531405512994e-12, 5.440540813000938e-12, 4.020530870035843e-12, 4.183059280904455e-12, 3.910607948576228e-12, 4.37316502455154e-12, 3.571481218406225e-12, 2.9185714175289368e-12, 3.657550391028552e-12, 2.697240217633401e-12, 3.3356362377084947e-12, 3.4526743443452634e-12, 3.447160092096002e-12, 2.8269545992304357e-12, 3.915544537907989e-12, 4.194917416905364e-12, 3.7909501926503e-12, 3.2563507012389747e-12, 2.979708493833222e-12, 2.921431759700388e-12, 3.7027794025368266e-12, 2.564463181739529e-12, 2.637237867322839e-12, 3.1165478184269624e-12, 2.8527143754869533e-12, 2.8796466079727967e-12, 2.8658338722953314e-12, 2.334774951498475e-12, 2.962652692617418e-12, 3.699240566645834e-12, 2.6448342214241416e-12, 2.270001244267239e-12, 2.2315435090070057e-12, 3.3428197276225147e-12, 2.59925045896503e-12, 2.6041278508581733e-12, 3.129862037945519e-12, 2.481288395236869e-12, 2.1829254983479762e-12, 2.1055887068632817e-12, 2.4518601125095296e-12, 2.4788116437940433e-12, 2.5216235349789784e-12, 2.179150089542947e-12, 2.8949095724761786e-12, 1.7978560263795518e-12, 2.1050067071370915e-12, 2.177908461215017e-12, 1.9546040133655573e-12, 1.398937606381101e-12, 2.5387645544855397e-12, 1.3179516072242548e-12, 1.989854895440013e-12, 1.5997729399877536e-12, 2.03199761704409e-12, 1.9154057680215164e-12, 2.6113534078164857e-12, 1.6330934000940989e-12, 1.858502067519918e-12, 2.0200555637950313e-12, 1.533603105619008e-12, 1.5850592323049528e-12, 1.9888491895048155e-12, 1.9387119947616993e-12, 1.888947765565918e-12, 1.9132705402630235e-12, 2.5144417797884344e-12, 1.6298034970219089e-12, 1.764465960493733e-12, 1.6455113096766616e-12, 2.1558440796431988e-12, 1.7317295600977056e-12, 1.397472849246073e-12, 1.4636863769623698e-12, 1.574140557486503e-12, 2.103781992363052e-12, 1.404731799631298e-12, 1.631207321994843e-12, 1.6662221732563487e-12, 2.177153422822098e-12, 2.3111599439795683e-12, 1.6793680161775182e-12, 1.5971748663218266e-12, 1.478306517997685e-12, 1.6074653544015383e-12, 1.4471110941494114e-12, 1.4189769151351506e-12, 1.8040270883049048e-12, 1.5365345714529743e-12, 1.3188394603833031e-12, 1.3794117757759405e-12, 1.55718677969513e-12, 1.2275506132419789e-12, 1.5173297731110869e-12, 1.6313118390842707e-12, 1.4259330478536003e-12, 1.6851658957151017e-12, 1.5450264765487498e-12, 1.7016946662548604e-12, 1.814350102449791e-12, 1.7098518781399896e-12, 1.2992366514238962e-12, 2.1524481415985397e-12, 1.5494481782687974e-12, 1.7317891912171923e-12, 1.5430741536967552e-12, 1.558587894162633e-12, 1.335940581249917e-12, 1.4109308339727011e-12, 1.3878694200830655e-12, 1.3729871189624432e-12, 1.648113503310844e-12, 1.2692415478007812e-12, 1.167109377891995e-12, 1.1370723156448048e-12, 1.631227379735034e-12, 1.2597072907363782e-12, 1.351389811686532e-12, 1.6375671435184258e-12, 1.6483746876141958e-12, 1.3256110618919958e-12, 1.505075361216135e-12, 1.5345147028056338e-12, 1.2457207574506635e-12, 1.0121609496718809e-12, 1.4580635960756427e-12, 1.2375453309690365e-12, 1.080669516546895e-12, 1.1519042013644065e-12, 1.1786375911718161e-12, 1.2624774272870787e-12, 1.3060643061849064e-12, 1.1302547439639987e-12, 1.2342443690346871e-12, 1.4672999143830467e-12, 1.3531068626670972e-12, 1.1081040598992509e-12, 1.3580751107022948e-12, 1.1061489181216078e-12, 7.91019918837399e-13, 9.907093591679517e-13, 1.4752723782179844e-12, 1.3451269178371694e-12, 9.56577373735501e-13, 1.0626788077977567e-12, 1.3538957281677977e-12, 1.2906742731869092e-12, 1.8342558374961904e-12, 1.337597784270561e-12, 1.171765484121734e-12, 1.2091676401468354e-12, 1.1970803038066635e-12, 1.0862999952690466e-12, 1.4308334248328003e-12, 1.8044955720636358e-12, 1.3308996916691629e-12, 1.0099003881422486e-12, 1.2724081770859597e-12, 1.1516240435230363e-12, 1.1718654475620371e-12, 1.0236541432215307e-12, 7.608668469577529e-13, 1.3791735765586455e-12, 1.1805143451323885e-12, 1.17586019046656e-12, 1.5105283560426508e-12, 8.405131517001674e-13, 1.1332611281680838e-12, 1.1129414447322294e-12, 1.388783077253819e-12, 9.831777319363466e-13, 1.1889030341813434e-12, 1.1705835953335075e-12, 1.2712185904623086e-12, 1.0444463219641542e-12, 9.489675755272597e-13, 1.5562951318284779e-12, 1.1092850813257393e-12, 1.333345109669204e-12, 1.2992068358641529e-12, 1.141994593507889e-12, 1.097424776920486e-12, 1.3278885369755189e-12, 1.1512980239297699e-12, 1.4009601855338727e-12, 1.1485942405520255e-12, 1.8347422105907674e-12, 1.1638405083419512e-12, 1.0581020651670436e-12, 1.7917580396806243e-12, 1.2287475724404029e-12, 9.176043171826853e-13, 1.0252078049347024e-12, 8.562089297108055e-13, 1.0564531020829104e-12, 9.638300357481255e-13, 9.471187939827375e-13, 8.189817639163433e-13, 9.35670703243463e-13, 9.656238482425028e-13, 9.305774466977779e-13, 8.721556463142655e-13, 8.401077685078751e-13, 1.0994977714742782e-12, 9.05474913798221e-13, 1.0747499895255586e-12, 1.053376353157831e-12, 8.160571285560636e-13, 7.648063497615876e-13, 1.0029741794037594e-12, 8.332675912117726e-13, 9.022358598079205e-13, 1.0457065985694514e-12, 1.0090190401962351e-12, 9.91948385410668e-13, 9.288936807239079e-13, 8.501372891246695e-13, 9.91083734178111e-13, 9.441245528429842e-13, 1.0439872707643238e-12, 1.5122352155227947e-12, 1.2460166362235348e-12, 1.4254652146161728e-12, 1.1789676223131207e-12, 1.0143522306826913e-12, 9.888293526008618e-13, 1.0385118329528376e-12, 7.647088257761725e-13, 8.577417205321569e-13, 1.0944338968074674e-12, 8.724821537985095e-13, 8.12317877893487e-13, 9.206233865521884e-13, 1.473003034650755e-12, 8.597486329635362e-13, 1.266674807577639e-12, 8.77141783685309e-13, 7.962259324090226e-13, 1.0308596424396521e-12, 8.137164986959933e-13, 6.193212717837959e-13, 1.142891879225838e-12, 9.23061648817891e-13, 8.566358885263303e-13, 8.688160867825756e-13, 9.552924857408884e-13, 1.1882109879346459e-12, 9.327855328422618e-13, 8.999237444649866e-13, 9.899855457976003e-13, 9.378522264347211e-13, 9.366414978687065e-13, 9.794720373310084e-13, 1.1499125219735506e-12, 1.09833604884646e-12, 7.896747492019962e-13, 8.01029002243242e-13, 7.770299161047323e-13, 9.436933656389868e-13, 1.3142894975462505e-12, 8.687296216593199e-13, 9.01469220451756e-13, 9.51502982307617e-13, 8.439022050610312e-13, 7.172465204229039e-13, 1.1051014704027695e-12, 9.417075409398623e-13, 8.552406829606674e-13, 9.393282592723429e-13, 7.235719727376189e-13, 9.05325293898418e-13, 1.0452023361390284e-12, 1.0024136468805844e-12, 1.0362264431934554e-12, 1.2225279382577225e-12, 1.3584094786522893e-12, 1.1168805680653038e-12, 1.0092110524009823e-12, 9.924891854543039e-13, 7.307605041917409e-13, 9.594042140598225e-13, 8.422360573724641e-13, 1.1394804370901124e-12, 1.0735149748308803e-12, 8.170866869390558e-13, 1.2091724106363944e-12, 9.106918778117867e-13, 9.95134421914734e-13, 8.215493715012234e-13, 6.595996535421755e-13, 7.877660112773355e-13, 1.0874864377063975e-12, 8.594769318991113e-13, 7.047828575086623e-13, 8.076890935584946e-13, 7.890073143446141e-13, 7.044295702307579e-13, 8.01150487096669e-13, 7.667846392556132e-13, 1.0708927234565069e-12, 1.0106033848308882e-12, 1.062948123617402e-12, 8.048752094501344e-13, 9.799362927012667e-13, 8.067387903543111e-13, 7.232461157746783e-13, 7.81219598559868e-13, 8.172470404403664e-13, 9.762197560742036e-13, 1.3122930476658357e-12, 8.725007478657676e-13, 9.13250378098418e-13, 9.802558070814982e-13, 8.89659927758718e-13, 1.022077821682954e-12, 7.761232520379913e-13, 9.178534668419225e-13, 8.245610682959537e-13, 8.379424541390956e-13, 6.496672774400358e-13, 6.685245889856417e-13, 7.818238244305942e-13, 7.986401795966047e-13, 8.968256909672179e-13, 9.447023241807018e-13, 9.293298552578988e-13, 8.247811613369682e-13, 7.405824543026129e-13, 8.107147223511413e-13, 6.833845555415108e-13, 1.0980069934871106e-12, 8.931633102386705e-13, 8.314326874550582e-13, 5.706667235115537e-13, 1.022376194120822e-12, 6.999431416410129e-13, 8.206261191412434e-13, 7.687073091781904e-13, 6.916888394513376e-13, 8.799414105351011e-13, 9.738834088127146e-13, 8.681171558520828e-13, 1.0416432256674102e-12, 8.694680175488911e-13, 8.679934483842022e-13, 9.174961138058713e-13, 9.479511359905546e-13, 7.114843111367952e-13, 5.503714346245803e-13, 7.239092138233705e-13, 7.819892194720068e-13, 1.184248120573994e-12, 9.559205640594093e-13, 9.56153342265842e-13, 7.947964118446005e-13, 8.095545176063645e-13, 8.629689844663613e-13, 9.810469494067608e-13, 9.472833758725208e-13, 7.035007884396982e-13, 7.734560062734597e-13, 1.089694307010447e-12, 9.687793102453046e-13, 9.549467336680828e-13, 9.61566763713062e-13, 1.2905989211359215e-12, 6.403082816568151e-13, 7.043742217098525e-13, 7.348651309864451e-13, 9.954488405447548e-13, 7.974279873576573e-13, 9.16166448261535e-13, 7.029287091633862e-13, 1.0914768438022304e-12, 8.428402832431903e-13, 8.49762372013424e-13, 7.570206939609692e-13, 7.043789921994115e-13, 7.605359484547103e-13, 7.194009927699585e-13, 7.65064986189834e-13, 6.324175124555742e-13, 5.869898208991919e-13, 8.09722677363317e-13, 7.146371168442744e-13, 7.79416949817785e-13, 7.619737631657519e-13, 8.281731962538064e-13, 6.916105600544842e-13, 7.979626616590185e-13, 9.290198818567852e-13, 7.897019626765256e-13, 7.439660324425057e-13, 9.126420322594364e-13, 8.927632396370233e-13, 7.385138507676192e-13, 1.367149666045564e-12, 8.63472271114829e-13, 8.85599861673303e-13, 7.54132921474554e-13, 8.138934404905429e-13, 5.190985445310825e-13, 8.166648238737417e-13, 6.837929744998861e-13, 8.477022252553756e-13, 9.07977902933621e-13, 7.900617551674649e-13, 6.994547085623082e-13, 7.381025044633782e-13, 9.46112220685802e-13, 6.048693447155418e-13, 5.67270731256786e-13, 6.955058274096815e-13, 9.093721869274374e-13, 9.385926280983115e-13, 6.656782872323241e-13, 8.124596915376481e-13, 6.917708051355775e-13, 9.291569250113874e-13, 9.56958145538478e-13, 7.611778503509303e-13, 7.515793543078075e-13, 7.175530785871742e-13, 6.7538108352444e-13, 6.958759198212594e-13, 8.90204739350392e-13, 8.704172365509022e-13, 9.534413189515867e-13, 7.673727105139694e-13, 8.495579456938018e-13, 7.895925124672132e-13, 7.781676778645386e-13, 6.565579785573761e-13, 6.55030771377213e-13, 7.897026674079377e-13, 7.256832396280999e-13, 9.337374623497041e-13, 1.009870897843157e-12, 7.792690104313493e-13, 1.0453060942869352e-12, 6.105521904026245e-13, 9.24184448587717e-13, 8.655253705587562e-13, 8.758540225549394e-13, 5.757024089218798e-13, 7.419508258645069e-13, 6.514272628266315e-13, 8.703316387893845e-13, 6.763166957891864e-13, 6.732167449376159e-13, 6.266928165646335e-13, 9.197304376429294e-13, 7.733360393030742e-13, 7.325351263076652e-13, 9.960217329726961e-13, 5.997224201623186e-13, 6.797063996713537e-13, 7.548708294731477e-13, 8.328534801919918e-13, 7.449588905819593e-13, 7.009034195051833e-13, 7.145174751345407e-13, 6.497880033519421e-13, 8.551352442993931e-13, 9.416887842422783e-13, 1.0183251811235472e-12, 9.755836546596064e-13, 9.145904519836101e-13, 8.146529783224776e-13, 1.0755650927188332e-12, 5.567074579104769e-13, 6.364254284164927e-13, 8.19855142976389e-13, 9.12741778859305e-13, 9.70690108154093e-13, 6.811735420511611e-13, 6.424624829533265e-13, 8.904277597372723e-13, 9.644543193590427e-13, 7.423769173182937e-13, 9.533300798086897e-13, 7.875822932192078e-13, 7.693269307197659e-13, 7.233666790562587e-13, 5.928395795107289e-13, 8.185235800782509e-13, 7.850849419351047e-13, 8.780675839203944e-13, 6.502793095664039e-13, 6.616508014221922e-13, 7.444811368946536e-13, 8.800367661161712e-13, 6.484176802261377e-13, 6.673466033252362e-13, 6.733505896958092e-13, 7.125333851588922e-13, 6.209075679823595e-13, 9.648650151419802e-13, 6.051269511517243e-13, 6.283480680213671e-13, 9.337403896955698e-13, 6.79430199167913e-13, 1.048763398174557e-12, 6.439660545261294e-13, 8.934659652751198e-13, 7.997465537035175e-13, 8.447334086565672e-13, 7.395210203757496e-13, 5.874012756236502e-13, 7.893205945623538e-13, 1.0014744025385602e-12, 6.753176576973496e-13, 8.295170648466021e-13, 7.578187751801357e-13, 9.405338920881468e-13, 1.0895150883913352e-12, 8.412503007572403e-13, 5.480303168735323e-13, 9.38663968601261e-13, 1.114713139502288e-12, 8.651739806346537e-13, 7.14798337707323e-13, 5.370248516711751e-13, 6.989228531865954e-13, 8.467708955892106e-13, 6.711597423658677e-13, 7.039234646566417e-13, 7.594544025775474e-13, 7.677912667626574e-13, 7.220749605879595e-13, 7.453298503552752e-13, 8.450677766065617e-13, 8.273885591415786e-13, 8.390341915166799e-13, 8.048636626969974e-13, 7.792313886159641e-13, 8.123283404444515e-13, 8.527828508457513e-13, 8.691895402208882e-13, 9.961998673896355e-13, 6.388596791341572e-13, 7.755839156672883e-13, 8.46055484785696e-13, 8.607576457153598e-13, 7.839183946076189e-13, 6.844901164967943e-13, 7.848712998970164e-13, 7.742028589399763e-13, 6.804796526607704e-13, 8.925115421026808e-13, 8.154448253791524e-13, 8.993223917300175e-13, 5.751754866660519e-13, 7.687246564129502e-13, 7.346636862227973e-13, 5.949137666869109e-13, 8.399396087509226e-13, 5.633090565185239e-13, 6.579522625511924e-13, 9.480573878034582e-13, 6.101772732913791e-13, 5.79729135790491e-13, 7.576354908028771e-13, 9.641731857357172e-13, 8.279146140356686e-13, 9.608211578790438e-13, 6.368215416802103e-13, 7.959249578859406e-13, 9.12460428395545e-13, 7.497973054070017e-13, 5.892424677529651e-13, 6.860619928064637e-13, 8.288110866019882e-13, 9.472574634405984e-13, 5.334713790508538e-13, 7.362326893967097e-13, 7.52130291641756e-13, 7.853739902342893e-13, 8.543812901086467e-13, 9.071305989358236e-13, 6.774289245878307e-13, 7.698501124780988e-13, 7.210496848035486e-13, 7.754412346613893e-13, 8.10212086223977e-13, 8.499602389099026e-13, 1.052275345851672e-12, 5.779940870538625e-13, 8.326691116125606e-13, 6.379416851547137e-13, 7.21433275532174e-13, 7.644700302476826e-13, 6.584615665217175e-13, 7.416784742787785e-13, 6.62041114204287e-13, 5.839258113496393e-13, 5.908701262644089e-13, 8.963541714424039e-13, 8.586711528445201e-13, 7.2262936736886e-13, 5.613354833039486e-13, 8.45395259872761e-13, 5.90254462060763e-13, 6.711264031490638e-13, 5.822355401627344e-13, 5.968914598697417e-13, 9.15029228602815e-13, 8.004040681110214e-13, 6.585638067865829e-13, 6.465576229790215e-13, 6.980757118191239e-13, 6.687291779355897e-13, 6.512905449326811e-13, 9.240747273278616e-13, 7.326352523782942e-13, 9.505506191193058e-13, 8.281074393920451e-13, 6.555279322834062e-13, 6.882639532086732e-13, 7.819150058333002e-13, 6.851502329895121e-13, 8.130125262253984e-13, 7.259196499118103e-13, 6.550138578233222e-13, 9.507467512923085e-13, 6.845420497808563e-13, 6.894882343018438e-13, 6.766460221990789e-13, 6.464429143891726e-13, 6.16832268856421e-13, 8.180504884602868e-13, 6.492312655363708e-13, 6.157771775122667e-13, 6.102318086606551e-13, 7.556176279295557e-13, 7.377711722794666e-13, 8.610777021966776e-13, 5.797713112550007e-13, 6.195520442162095e-13, 5.098872712938629e-13, 6.446150579465793e-13, 8.073011118110707e-13, 9.842512005073245e-13, 7.476814848673963e-13, 7.192739784854518e-13, 6.356221430268982e-13, 8.278180658322087e-13, 7.562184927735471e-13, 7.635641793325709e-13, 7.536467652204115e-13, 7.540460768805379e-13, 7.584206700161911e-13, 6.951979139926956e-13, 8.906575563877306e-13, 8.54189928425203e-13, 6.888642759515784e-13, 6.182692162057246e-13, 6.712376965020694e-13, 7.699282292446263e-13, 8.221157045060212e-13, 7.125234104989053e-13, 6.294098272088822e-13, 5.309354301694102e-13, 6.352572547857482e-13, 5.692930393590145e-13, 7.17399880820202e-13, 6.826906119410114e-13, 8.23107532653411e-13, 7.018291655301601e-13, 5.137484404907355e-13, 5.66349918351694e-13, 6.017752485557026e-13, 6.163038829276601e-13, 6.162378550153558e-13, 6.220979135475313e-13, 6.400254133100136e-13, 6.448198095268531e-13, 7.240836077428148e-13, 6.968681816495181e-13, 5.955034100384171e-13, 7.957690496135372e-13, 9.90555510879676e-13, 5.739215526434638e-13, 1.0456454495669232e-12, 7.347284130924947e-13, 6.795696275672947e-13, 8.554553549908195e-13, 6.188700810497161e-13, 5.252826168825053e-13, 6.160906745704409e-13, 6.685608013382027e-13, 7.728509130409955e-13, 6.908066783536948e-13, 8.420094591184146e-13, 6.421436733045072e-13, 6.986327206852383e-13, 6.210294865166555e-13, 7.50564270023818e-13, 5.696551628846247e-13, 6.682748430152097e-13, 6.530709675302282e-13, 7.430660362191255e-13, 6.505184303555456e-13, 7.635672693087625e-13, 9.637856918792709e-13, 6.766544789760243e-13, 8.940359303571954e-13, 6.211239205258789e-13, 7.258673371569879e-13, 6.663423068528629e-13, 8.724832380006819e-13, 7.941805850106287e-13, 8.425576859469319e-13, 7.338911921749014e-13, 6.299444473001348e-13, 6.436420949169908e-13, 7.856673211320553e-13, 5.598679072432722e-13, 6.327490072698116e-13, 5.931670627769281e-13, 6.644136196082284e-13, 7.101112775055596e-13, 1.0444223610961423e-12, 6.527261912393778e-13, 8.059984429008293e-13, 6.265825532036917e-13, 6.603387541631589e-13, 5.326001141850445e-13, 6.454231138257327e-13, 6.171522169175214e-13, 1.0098891124396547e-12, 6.67260355042415e-13, 7.210049614639336e-13, 6.799036160465288e-13, 6.156358517590832e-13, 7.040856070915369e-13, 6.633276827122669e-13, 7.089440254466617e-13, 7.201525617159255e-13, 6.684588321238805e-13, 5.879629465591063e-13, 7.734386048285913e-13, 9.062333132178746e-13, 7.385445336891006e-13, 6.289928430533442e-13, 5.713817006341992e-13, 6.857349974312421e-13, 5.154291164884139e-13, 4.97176789755055e-13, 7.256229037772011e-13, 5.851900995029746e-13, 7.085968097009232e-13, 6.741019960114503e-13, 8.156307660517337e-13, 6.016498605744547e-13, 7.541299941286883e-13, 8.132864499042769e-13, 6.081229270149535e-13, 6.546305923553486e-13, 7.410443786382004e-13, 6.942844194522679e-13, 5.910025073496694e-13, 7.336444277604437e-13, 7.501948281335435e-13, 8.724870869183943e-13, 1.141155746287037e-12, 5.864975389027749e-13, 7.888631154556736e-13, 6.602889892834418e-13, 6.290469447417513e-13, 7.40093912803691e-13, 6.628934055320779e-13, 6.675509754347497e-13, 6.528448571671563e-13, 5.939620540199031e-13, 6.424815107014537e-13, 6.095777094899946e-13, 7.353826206833725e-13, 5.685907474017871e-13, 6.646455846630317e-13, 6.576534564324554e-13, 5.312585766269196e-13, 6.715389420756945e-13, 8.981865815341217e-13, 9.386496571325842e-13, 9.303921565465e-13, 8.343071784648604e-13, 6.813133499213031e-13, 8.623116868992919e-13, 1.0287708185341415e-12, 6.071829237314086e-13, 5.335136629355808e-13, 7.011921425437162e-13, 6.630506148470883e-13, 7.589627711024338e-13, 5.202681276246512e-13, 6.657135238029299e-13, 5.998314909008706e-13, 8.320252581524301e-13, 6.83480615853993e-13, 7.986877218618682e-13, 8.242856267340337e-13, 6.01680543495936e-13, 6.011144273315727e-13, 6.138911536231195e-13, 7.239325241700789e-13, 6.419557268579068e-13, 7.155658444252255e-13, 5.277035319134482e-13, 5.498491202279854e-13, 6.343203956885035e-13, 6.962326765461158e-13, 7.1068026680568e-13, 6.766021120110932e-13, 7.15156883365764e-13, 7.567756100598788e-13, 7.646905569695661e-13, 7.341508585952117e-13, 7.274781363246496e-13, 7.790092355908218e-13, 8.5857579726345e-13, 9.407640682093654e-13, 6.953863483302736e-13, 6.616163780032158e-13, 7.954434637011398e-13, 7.783169182935812e-13, 6.41229528242776e-13, 6.71745048908684e-13, 4.729510511523871e-13, 6.033399149209251e-13, 5.789782173658276e-13, 7.156249876537346e-13, 6.167591394198868e-13, 6.163523467647702e-13, 7.185659944668188e-13, 6.295533213664106e-13, 9.004574429843926e-13, 7.777313949103304e-13, 8.675506602169591e-13, 6.677089436912809e-13, 5.455818088972997e-13, 7.091422718139007e-13, 6.866347226040792e-13, 5.800171540976118e-13, 7.642601287070894e-13, 7.224873368842644e-13, 7.214102904461173e-13, 6.46636336056744e-13, 6.86086766826105e-13, 7.725588289757279e-13, 6.860116316155518e-13, 7.276615833322342e-13, 1.034581816918012e-12, 6.931982115057633e-13, 6.683722043702989e-13, 6.922535461528767e-13, 7.336797727512667e-13, 5.608435265681833e-13, 5.870314542626154e-13, 6.979398612869114e-13, 5.942250272568395e-13, 6.547824890797138e-13, 9.161765313417392e-13, 9.615755457506592e-13, 9.438918830567689e-13, 6.931273588937914e-13, 7.578020242565708e-13, 7.107585462025334e-13, 6.470456223768573e-13, 7.174293711192936e-13, 5.598147813368204e-13, 4.97481938456501e-13, 7.505262145275637e-13, 8.603169175322445e-13, 6.77785681312687e-13, 7.381647376680789e-13], "z0_traj": [0.0, -0.0020216088742017746, -0.0033825235441327095, -0.004288623109459877, -0.004000407177954912, -0.0036096873227506876, -0.003312481800094247, -0.003061629133298993, -0.002826938172802329, -0.0026226963382214308, -0.0024242314975708723, -0.002243209630250931, -0.0020984781440347433, -0.0019654780626296997, -0.001853706780821085, -0.0017468577716499567, -0.0016489549307152629, -0.0015549480449408293, -0.0014667198993265629, -0.0013804018963128328, -0.0012932394165545702, -0.0012090858072042465, -0.0011300798505544662, -0.0010539379436522722, -0.000981637742370367, -0.0009113922715187073, -0.0008443804690614343, -0.0007809853996150196, -0.0007199015817604959, -0.0006623575463891029, -0.0006079352460801601, -0.0005554747767746449, -0.0005053096101619303, -0.0004577484796755016, -0.0004124725819565356, -0.00036934518720954657, -0.0003281146346125752, -0.00028783187735825777, -0.00024995175772346556, -0.00021457629918586463, -0.00018156094301957637, -0.0001499847712693736, -0.00011997872934443876, -9.146514639724046e-05, -6.425686297006905e-05, -3.8316324207698926e-05, -1.3803325600747485e-05, 9.353317182103638e-06, 3.137253224849701e-05, 5.244055137154646e-05, 7.215427467599511e-05, 9.07655994524248e-05, 0.00010796448623295873, 0.00012444770254660398, 0.0001400493347318843, 0.00015475883265025914, 0.00016876986774150282, 0.0001820413745008409, 0.00019457523012533784, 0.00020635708642657846, 0.00021754561748821288, 0.00022806369815953076, 0.0002378422359470278, 0.0002471818297635764, 0.0002559185086283833, 0.00026434779283590615, 0.000272337842034176, 0.000279915431747213, 0.0002870670286938548, 0.00029376044403761625, 0.00030009192414581776, 0.0003060310264118016, 0.0003116278094239533, 0.0003168407129123807, 0.00032177759567275643, 0.00032639430719427764, 0.000330828275764361, 0.0003349572070874274, 0.0003388574696145952, 0.0003425317700020969, 0.00034593389136716723, 0.0003491390380077064, 0.00035214313538745046, 0.0003549814864527434, 0.00035762839252129197, 0.0003601193311624229, 0.0003624448727350682, 0.0003646347322501242, 0.00036669665132649243, 0.00036866249865852296, 0.0003705231356434524, 0.00037224520929157734, 0.0003738792729564011, 0.00037540451739914715, 0.0003768466121982783, 0.0003782390267588198, 0.0003795749507844448, 0.0003808238252531737, 0.00038199464324861765, 0.00038311531534418464, 0.000384166109142825, 0.00038485057302750647, 0.00038551579928025603, 0.0003861640871036798, 0.00038677730481140316, 0.0003873696841765195, 0.0003879420110024512, 0.0003884833713527769, 0.0003889998479280621, 0.0003894912370014936, 0.00038996836519800127, 0.00039041932905092835, 0.0003908464568667114, 0.00039125338662415743, 0.00039164829649962485, 0.0003920270537491888, 0.0003924063639715314, 0.0003927714133169502, 0.00039311591535806656, 0.00039345206459984183, 0.00039377924986183643, 0.0003940903989132494, 0.00039439386455342174, 0.0003947067307308316, 0.0003950114478357136, 0.0003953096456825733, 0.0003955936408601701, 0.0003958717279601842, 0.00039613753324374557, 0.00039638590533286333, 0.0003966395161114633, 0.0003968781093135476, 0.000397109251935035, 0.0003973289276473224, 0.00039754141471348703, 0.0003977509040851146, 0.0003979638568125665, 0.0003981792542617768, 0.0003983851056545973, 0.00039859116077423096, 0.0003987951495219022, 0.0003989938704762608, 0.00039918714901432395, 0.00039937664405442774, 0.00039955537067726254, 0.00039973368984647095, 0.0003999047330580652, 0.00040007129427976906, 0.0004002382920589298, 0.0004004059301223606, 0.00040057156002148986, 0.0004007369570899755, 0.00040089598041959107, 0.00040105736115947366, 0.0004012290737591684, 0.00040139921475201845, 0.0004015662125311792, 0.0004017282626591623, 0.0004018956678919494, 0.00040206019184552133, 0.00040221947710961103, 0.0004023791116196662, 0.0004025340313091874, 0.0004026845272164792, 0.00040283650741912425, 0.000402988021960482, 0.00040313813951797783, 0.0004032882861793041, 0.00040343904402107, 0.00040359696140512824, 0.00040374399395659566, 0.0004038878541905433, 0.00040402848389931023, 0.00040416530100628734, 0.00040429714135825634, 0.0004044322413392365, 0.0004045579116791487, 0.0004046825924888253, 0.00040480386815033853, 0.0004049194394610822, 0.00040503471973352134, 0.00040515317232348025, 0.00040526772500015795, 0.00040538437315262854, 0.0004054997116327286, 0.000405613100156188, 0.00040572378202341497, 0.00040583242662250996, 0.0004059394996147603, 0.00040604418609291315, 0.0004061491636093706, 0.0004062562366016209, 0.00040636316407471895, 0.00040646674460731447, 0.0004065715766046196, 0.00040668080328032374, 0.0004067910776939243, 0.0004068979760631919, 0.00040700100362300873, 0.0004071016446687281, 0.00040720307151786983, 0.0004072718438692391, 0.0004073418094776571, 0.00040741264820098877, 0.00040748424362391233, 0.00040755391819402575, 0.0004076225159224123, 0.00040769047336652875, 0.0004077552875969559, 0.0004078201309312135, 0.0004078818310517818, 0.0004079407954122871, 0.00040800051647238433, 0.0004080600047018379, 0.0004081180668435991, 0.00040817540138959885, 0.0004082317464053631, 0.0004082856758031994, 0.00040834012906998396, 0.00040839615394361317, 0.00040845113107934594, 0.00040850791265256703, 0.0004085653636138886, 0.00040862380410544574, 0.0004086840490344912, 0.0004087438283022493, 0.00040880366577766836, 0.000408864434575662, 0.0004089277354069054, 0.0004089913854841143, 0.00040905363857746124, 0.0004091139999218285, 0.0004091734008397907, 0.0004092309682164341, 0.00040928818634711206, 0.00040934348362497985, 0.0004093954630661756, 0.0004094470932614058, 0.0004094966279808432, 0.0004095452022738755, 0.00040959700709208846, 0.0004096482298336923, 0.00040969945257529616, 0.0004097522178199142, 0.00040980507037602365, 0.00040985600207932293, 0.0004099070793017745, 0.0004099600191693753, 0.0004100133664906025, 0.0004100651713088155, 0.0004101161321159452, 0.0004101655213162303, 0.0004102147067897022, 0.0004102631355635822, 0.00041031313594430685, 0.0004103622632101178, 0.00041040743235498667, 0.000410453270887956, 0.0004104993713553995, 0.0004105443658772856, 0.0004105912521481514, 0.00041063688695430756, 0.0004106829292140901, 0.0004107280692551285, 0.00041077149217016995, 0.0004108143039047718, 0.0004108581051696092, 0.0004109025467187166, 0.00041094646439887583, 0.0004109910223633051, 0.0004110372974537313, 0.0004110831650905311, 0.00041113304905593395, 0.0004111806338187307, 0.00041122615220956504, 0.0004112723108846694, 0.0004113160539418459, 0.0004113591858185828, 0.00041140196844935417, 0.000411448156228289, 0.00041149536264128983, 0.0004115406482014805, 0.0004115858464501798, 0.00041162982233799994, 0.00041167475865222514, 0.0004117182397749275, 0.0004117606731597334, 0.000411802640883252, 0.00041184431756846607, 0.00041188503382727504, 0.0004119288641959429, 0.0004119707446079701, 0.00041201297426596284, 0.0004120543017052114, 0.0004120967641938478, 0.0004121412930544466, 0.0004121855890844017, 0.00041222787695005536, 0.00041227065958082676, 0.0004123129474464804, 0.00041235514800064266, 0.00041238535777665675, 0.00041241469443775713, 0.0004124437109567225, 0.0004124737170059234, 0.0004125019768252969, 0.00041252924711443484, 0.00041255736141465604, 0.00041259295539930463, 0.0004126277635805309, 0.0004126617277506739, 0.0004126966232433915, 0.0004127318970859051, 0.00041276763658970594, 0.00041280576260760427, 0.000412844616221264, 0.0004128818691242486, 0.0004129167937207967, 0.0004129510489292443, 0.00041298469295725226, 0.000413017172832042, 0.00041304927435703576, 0.00041308102663606405, 0.00041311292443424463, 0.000413144298363477, 0.00041317552677355707, 0.00041320588206872344, 0.00041323460754938424, 0.0004132637404836714, 0.000413293280871585, 0.0004133233451284468, 0.00041335326386615634, 0.00041338230948895216, 0.00041341001633554697, 0.0004134385962970555, 0.0004134665068704635, 0.0004134929913561791, 0.0004135190974920988, 0.00041354523273184896, 0.0004135706403758377, 0.0004135960480198264, 0.0004136207571718842, 0.000413646106608212, 0.0004136705829296261, 0.00041369482642039657, 0.000413719768403098, 0.00041374677675776184, 0.00041377500747330487, 0.0004138051299378276, 0.00041383542702533305, 0.0004138656076975167, 0.00041389570105820894, 0.0004139254451729357, 0.00041395495645701885, 0.00041398583562113345, 0.0004140135715715587, 0.00041404046351090074, 0.000414066860685125, 0.0004140954406466335, 0.000414124020608142, 0.000414152629673481, 0.0004141801327932626, 0.00041420661727897823, 0.0004142318794038147, 0.0004142560646869242, 0.0004142796387895942, 0.00041430300916545093, 0.0004143253609072417, 0.00041434852755628526, 0.0004143709666095674, 0.00041439294000156224, 0.00041441668872721493, 0.00041444008820690215, 0.0004144619742874056, 0.0004144837148487568, 0.00041450641583651304, 0.00041452705045230687, 0.0004145489074289799, 0.0004145716957282275, 0.00041459553176537156, 0.0004146180581301451, 0.00041464046807959676, 0.0004146632272750139, 0.00041468662675470114, 0.00041471095755696297, 0.00041473418241366744, 0.0004147572326473892, 0.0004147790605202317, 0.0004148008010815829, 0.0004148223961237818, 0.0004148447187617421, 0.0004148667794652283, 0.00041488802526146173, 0.0004149097658228129, 0.0004149317101109773, 0.0004149560409132391, 0.00041497984784655273, 0.00041500365477986634, 0.0004150276363361627, 0.0004150525201112032, 0.00041507917921990156, 0.0004150981258135289, 0.00041511651943437755, 0.00041513561154715717, 0.0004151542379986495, 0.00041517248610034585, 0.00041519012302160263, 0.0004152070905547589, 0.00041522429091855884, 0.0004152409383095801, 0.0004152575274929404, 0.00041527318535372615, 0.00041528852307237685, 0.0004153033660259098, 0.00041531785973347723, 0.0004153320915065706, 0.0004153470217715949, 0.00041536171920597553, 0.00041537609649822116, 0.00041539003723300993, 0.0004154036578256637, 0.00041541719110682607, 0.0004154297348577529, 0.0004154425114393234, 0.0004154558409936726, 0.00041546940337866545, 0.0004154832276981324, 0.0004154968773946166, 0.00041551064350642264, 0.00041552475886419415, 0.00041553901974111795, 0.00041555313509888947, 0.00041556754149496555, 0.00041558220982551575, 0.00041559539386071265, 0.00041560883983038366, 0.0004156216746196151, 0.0004156343638896942, 0.00041564699495211244, 0.0004156598588451743, 0.0004156725772190839, 0.00041568538290448487, 0.0004156984214205295, 0.0004157109942752868, 0.00041572347981855273, 0.0004157354705967009, 0.00041574827628210187, 0.0004157612274866551, 0.0004157742077950388, 0.00041578782838769257, 0.00041580115794204175, 0.000415815447922796, 0.0004158293013460934, 0.00041584286373108625, 0.00041585648432374, 0.00041587106534279883, 0.00041588570456951857, 0.0004159006057307124, 0.00041591539047658443, 0.0004159304953645915, 0.00041594551294110715, 0.00041596053051762283, 0.0004159756063017994, 0.00041599010000936687, 0.0004160046810284257, 0.00041601952398195863, 0.0004160341923125088, 0.0004160496173426509, 0.0004160644602961838, 0.00041607898310758173, 0.0004160940588917583, 0.00041610884363763034, 0.00041612362838350236, 0.0004161381220910698, 0.0004161525866948068, 0.00041616655653342605, 0.0004161807300988585, 0.0004161951073911041, 0.00041620945557951927, 0.0004162245604675263, 0.0004162391123827547, 0.0004162541590631008, 0.00041626940947026014, 0.0004162849218118936, 0.0004163005796726793, 0.00041631615022197366, 0.00041633195360191166, 0.00041634758235886693, 0.0004163631529081613, 0.000416378490626812, 0.0004163942357990891, 0.0004164096899330616, 0.000416424561990425, 0.0004164396959822625, 0.00041645500459708273, 0.0004164704296272248, 0.00041648573824204504, 0.00041649999911896884, 0.0004165141726844013, 0.00041652837535366416, 0.00041654164670035243, 0.00041655104723758996, 0.0004165602149441838, 0.00041656941175460815, 0.0004165783175267279, 0.0004165870777796954, 0.00041659551789052784, 0.00041660357965156436, 0.00041661199065856636, 0.00041662045987322927, 0.0004166289872955531, 0.00041663734009489417, 0.0004166460712440312, 0.00041665451135486364, 0.0004166627477388829, 0.00041667124605737627, 0.0004166799772065133, 0.00041668873745948076, 0.000416697992477566, 0.00041670750943012536, 0.00041671705548651516, 0.0004167275328654796, 0.00041673798114061356, 0.0004167488368693739, 0.0004167594597674906, 0.0004167701117694378, 0.000416780385421589, 0.0004167905426584184, 0.00041680046706460416, 0.00041681030415929854, 0.00041681999573484063, 0.0004168295126874, 0.00041683894232846797, 0.0004168480809312314, 0.00041685826727189124, 0.00041686822078190744, 0.00041687782504595816, 0.0004168873419985175, 0.0004168967716395855, 0.00041690628859214485, 0.0004169153398834169, 0.00041692485683597624, 0.0004169341700617224, 0.00041694333776831627, 0.0004169525927864015, 0.0004169614112470299, 0.0004169701423961669, 0.00041697872802615166, 0.00041698719724081457, 0.0004169955791439861, 0.0004170041938778013, 0.00041701263398863375, 0.00041702151065692306, 0.00041703006718307734, 0.0004170387692283839, 0.00041704715113155544, 0.00041705576586537063, 0.0004170646716374904, 0.0004170735483057797, 0.0004170823667664081, 0.0004170913016423583, 0.0004171002365183085, 0.0004171093460172415, 0.0004171182808931917, 0.00041712773963809013, 0.0004171373730059713, 0.0004171472101006657, 0.0004171571636106819, 0.00041716720443218946, 0.00041717669228091836, 0.0004171868786215782, 0.0004171967157162726, 0.0004172067856416106, 0.00041721653542481363, 0.000417226052377373, 0.0004172353073954582, 0.00041724476614035666, 0.00041725425398908556, 0.00041726347990334034, 0.000417273142375052, 0.00041728263022378087, 0.0004172920307610184, 0.00041730134398676455, 0.00041730995872057974, 0.0004173190682195127, 0.000417327624745667, 0.00041733612306416035, 0.00041734465048648417, 0.0004173530323896557, 0.0004173619963694364, 0.00041737130959518254, 0.00041738050640560687, 0.00041738987783901393, 0.00041739968582987785, 0.0004174097557552159, 0.0004174193018116057, 0.00041742788744159043, 0.00041743647307157516, 0.0004174451169092208, 0.00041745422640815377, 0.00041746263741515577, 0.0004174685454927385, 0.0004174744535703212, 0.00041748068179003894, 0.0004174866480752826, 0.0004174927016720176, 0.00041749849333427846, 0.0004175044596195221, 0.00041751080425456166, 0.00041751706157810986, 0.0004175232315901667, 0.0004175296635366976, 0.00041753597906790674, 0.00041754209087230265, 0.00041754799894988537, 0.00041755379061214626, 0.0004175592621322721, 0.00041756502469070256, 0.000417570787249133, 0.0004175764915999025, 0.0004175822832621634, 0.0004175879876129329, 0.00041759421583265066, 0.00041760000749491155, 0.0004176056827418506, 0.0004176109505351633, 0.0004176162474323064, 0.0004176215152256191, 0.00041762657929211855, 0.0004176318470854312, 0.00041763667832128704, 0.0004176420334260911, 0.0004176472721155733, 0.0004176523652859032, 0.00041765725472941995, 0.0004176623187959194, 0.0004176674992777407, 0.0004176729707978666, 0.0004176786169409752, 0.00041768455412238836, 0.0004176911897957325, 0.0004176974471192807, 0.0004177035007160157, 0.0004177094961050898, 0.00041771584074012935, 0.00041772276745177805, 0.0004177296650595963, 0.0004177362425252795, 0.00041774267447181046, 0.0004177491646260023, 0.0004177556256763637, 0.00041776199941523373, 0.000417768198531121, 0.00041777422302402556, 0.0004177805967628956, 0.0004177868540864438, 0.0004177932278253138, 0.0004177997470833361, 0.0004178061499260366, 0.00041781258187256753, 0.0004178188682999462, 0.00041782480548135936, 0.00041783080087043345, 0.00041783685446716845, 0.0004178430244792253, 0.0004178488743491471, 0.00041785460780374706, 0.0004178602830506861, 0.00041786610381677747, 0.00041787198279052973, 0.00041787762893363833, 0.00041788347880356014, 0.0004178894159849733, 0.00041789512033574283, 0.0004179009119980037, 0.00041790693649090827, 0.000417913164710626, 0.0004179190727882087, 0.0004179249226581305, 0.00041793068521656096, 0.0004179365059826523, 0.0004179425595793873, 0.00041794838034547865, 0.0004179540846962482, 0.0004179601382929832, 0.00041796622099354863, 0.00041797204175964, 0.00041797765879891813, 0.0004179837414994836, 0.00041798973688855767, 0.0004179959651082754, 0.00041800207691267133, 0.0004180078976787627, 0.00041801415500231087, 0.0004180201794952154, 0.00041802600026130676, 0.00041803179192356765, 0.0004180374089628458, 0.0004180429968982935, 0.0004180483228992671, 0.00041805353248491883, 0.0004180571995675564, 0.0004180606920272112, 0.00041806400986388326, 0.00041806770605035126, 0.0004180713731329888, 0.0004180751566309482, 0.00041807888192124665, 0.0004180824616923928, 0.00041808627429418266, 0.00041808985406532884, 0.0004180936375632882, 0.0004180971591267735, 0.00041810079710558057, 0.00041810431866906583, 0.0004181078402325511, 0.00041811136179603636, 0.00041811520350165665, 0.0004181190743111074, 0.0004181230906397104, 0.0004181270196568221, 0.00041813080315478146, 0.0004181346739642322, 0.00041813822463154793, 0.0004181418044026941, 0.0004181452968623489, 0.00041814870201051235, 0.0004181521071586758, 0.00041815536678768694, 0.0004181586264166981, 0.0004181619151495397, 0.00041816566954366863, 0.000418169453041628, 0.0004181732074357569, 0.0004181767872069031, 0.0004181804833933711, 0.0004181842668913305, 0.0004181880794931203, 0.0004181921249255538, 0.0004181958211120218, 0.0004182000120636076, 0.0004182042321190238, 0.00041820830665528774, 0.00041821241029538214, 0.00041821657214313745, 0.0004182207630947232, 0.00041822492494247854, 0.00041822902858257294, 0.00041823313222266734, 0.0004182373231742531, 0.0004182413686066866, 0.000418245472246781, 0.0004182494303677231, 0.00041825344669632614, 0.0004182574339210987, 0.00041826150845736265, 0.0004182654374744743, 0.0004182691336609423, 0.0004182728298474103, 0.0004182766133453697, 0.0004182805132586509, 0.000418284471379593, 0.0004182883712928742, 0.00041829224210232496, 0.0004182956472504884, 0.0004182989941909909, 0.0004183024284429848, 0.0004183059500064701, 0.00041830941336229444, 0.00041831290582194924, 0.00041831625276245177, 0.0004183197161182761, 0.00041832306305877864, 0.00041832655551843345, 0.00041833013528957963, 0.0004183338023722172, 0.0004183374694548547, 0.0004183411365374923, 0.0004183451528660953, 0.0004183494602330029, 0.0004183536220807582, 0.0004183576093055308, 0.0004183615674264729, 0.00041836549644358456, 0.00041836939635686576, 0.00041837309254333377, 0.0004183767596259713, 0.00041838004835881293, 0.0004183833079878241, 0.00041838656761683524, 0.0004183898854535073, 0.00041839320329017937, 0.0004183965502306819, 0.0004183999262750149, 0.00041840344783850014, 0.0004184068529866636, 0.00041841031634248793, 0.00041841380880214274, 0.00041841735946945846, 0.00041842085192911327, 0.00041842428618110716, 0.000418426760006696, 0.0004184292920399457, 0.00041843182407319546, 0.0004184342687949538, 0.00041843680082820356, 0.0004184394492767751, 0.0004184419522061944, 0.00041844468796625733, 0.0004184475401416421, 0.0004184502176940441, 0.0004184529825579375, 0.00041845577652566135, 0.0004184584831818938, 0.0004184611316304654, 0.00041846372187137604, 0.00041846634121611714, 0.0004184689314570278, 0.00041847157990559936, 0.0004184742283541709, 0.000418476847698912, 0.00041847946704365313, 0.00041848199907690287, 0.0004184845311101526, 0.00041848706314340234, 0.0004184897697996348, 0.000418492330936715, 0.00041849480476230383, 0.0004184973076917231, 0.00041849998524412513, 0.0004185024299658835, 0.00041850487468764186, 0.0004185072029940784, 0.00041850958950817585, 0.00041851200512610376, 0.0004185144789516926, 0.00041851718560792506, 0.000418519921367988, 0.00041852259892039, 0.00041852524736896157, 0.0004185278667137027, 0.00041853057336993515, 0.0004185332218185067, 0.00041853589937090874, 0.00041853857692331076, 0.00041854139999486506, 0.0004185442812740803, 0.00041854724986478686, 0.0004185501020401716, 0.0004185530124232173, 0.0004185557772871107, 0.00041855848394334316, 0.0004185610741842538, 0.0004185635771136731, 0.00041856616735458374, 0.00041856919415295124, 0.0004185721918474883, 0.00041857524774968624, 0.0004185782454442233, 0.00041858121403492987, 0.000418584153521806, 0.0004185869183856994, 0.00041858977056108415, 0.00041859259363263845, 0.0004185954458080232, 0.0004185982106719166, 0.00041860106284730136, 0.00041860397323034704, 0.0004186068254057318, 0.00041860976489260793, 0.0004186125297565013, 0.0004186152364127338, 0.00041861782665364444, 0.00041862044599838555, 0.00041862326906993985, 0.00041862594662234187, 0.00041862859507091343, 0.00041863112710416317, 0.000418633921071887, 0.00041863671503961086, 0.00041863942169584334, 0.00041864215745590627, 0.0004186447476968169, 0.0004186473088338971, 0.00041864989907480776, 0.0004186525475233793, 0.00041865516686812043, 0.0004186579608358443, 0.0004186608421150595, 0.00041866383980959654, 0.00041866698302328587, 0.0004186699807178229, 0.00041867312393151224, 0.00041867615072987974, 0.0004186791193205863, 0.00041868211701512337, 0.0004186850565019995, 0.00041868785046972334, 0.00041869061533361673, 0.00041869323467835784, 0.0004186959413345903, 0.00041869780397973955, 0.0004186996375210583, 0.0004187014128547162, 0.00041870318818837404, 0.0004187050217296928, 0.0004187067679595202, 0.00041870857239700854, 0.0004187104641459882, 0.0004187125014141202, 0.0004187145677860826, 0.0004187167214695364, 0.00041871884604915977, 0.000418721028836444, 0.0004187232116237283, 0.000418725423514843, 0.0004187275771982968, 0.00041872975998558104, 0.0004187318845652044, 0.0004187340091448277, 0.00041873607551679015, 0.0004187381127849221, 0.00041874003363773227, 0.0004187420126982033, 0.00041874402086250484, 0.0004187461163382977, 0.00041874818271026015, 0.00041875019087456167, 0.0004187521990388632, 0.0004187541489955038, 0.00041875606984831393, 0.0004187579615972936, 0.0004187598533462733, 0.000418761745095253, 0.0004187636368442327, 0.00041876541217789054, 0.0004187671875115484, 0.00041876905015669763, 0.00041877091280184686, 0.0004187727754469961, 0.0004187746671959758, 0.00041877650073729455, 0.00041877833427861333, 0.00041878019692376256, 0.0004187821177765727, 0.0004187842132523656, 0.0004187863669358194, 0.00041878840420395136, 0.0004187903250567615, 0.0004187922750134021, 0.00041879419586621225, 0.00041879608761519194, 0.0004187979211565107, 0.00041879969649016857, 0.000418801442719996, 0.000418803101638332, 0.0004188047896604985, 0.0004188064194750041, 0.00041880810749717057, 0.00041880988283082843, 0.000418811512645334, 0.0004188133170828223, 0.000418815208831802, 0.0004188171587884426, 0.00041881907964125276, 0.00041882091318257153, 0.0004188228340353817, 0.0004188247839920223, 0.0004188267339486629, 0.0004188288003206253, 0.00041883086669258773, 0.0004188329039607197, 0.0004188349994365126, 0.00041883697849698365, 0.00041883898666128516, 0.00041884093661792576, 0.0004188429447822273, 0.0004188450111541897, 0.0004188470484223217, 0.00041884902748279274, 0.0004188509483356029, 0.00041885292739607394, 0.00041885487735271454, 0.00041885688551701605, 0.00041885903920046985, 0.00041886113467626274, 0.0004188632301520556, 0.00041886544204317033, 0.00041886759572662413, 0.00041886992403306067, 0.0004188722523394972, 0.0004188744933344424, 0.00041887667612172663, 0.00041887882980518043, 0.0004188808088656515, 0.0004188827588222921, 0.0004188846505712718, 0.00041888654232025146, 0.000418888550484553, 0.0004188905004411936, 0.0004188924503978342], "z1_traj": [0.0, 0.0011840410297736526, 0.0025253945495933294, 0.0038736993446946144, 0.005032865796238184, 0.0061577302403748035, 0.007267105858772993, 0.008398055098950863, 0.009550531394779682, 0.010699857957661152, 0.011829786002635956, 0.012945624068379402, 0.014042582362890244, 0.015124203637242317, 0.0161961130797863, 0.0172570887953043, 0.018299678340554237, 0.019326068460941315, 0.020345699042081833, 0.021360578015446663, 0.022365238517522812, 0.023353733122348785, 0.024321842938661575, 0.025269553065299988, 0.026207784190773964, 0.027131132781505585, 0.02803272195160389, 0.02892196737229824, 0.02978980541229248, 0.03063950315117836, 0.031473275274038315, 0.03228963539004326, 0.03309643268585205, 0.03388562425971031, 0.03465892747044563, 0.03541608899831772, 0.03616441786289215, 0.0369083508849144, 0.03763405978679657, 0.03834015130996704, 0.03902462497353554, 0.03969614952802658, 0.040351852774620056, 0.04099533334374428, 0.041625604033470154, 0.04224511981010437, 0.04284805431962013, 0.043435823172330856, 0.04401356726884842, 0.044574569910764694, 0.04511837661266327, 0.04564691707491875, 0.046160660684108734, 0.04666358232498169, 0.047157637774944305, 0.0476384162902832, 0.048108071088790894, 0.04856707528233528, 0.049011386930942535, 0.0494438000023365, 0.049865301698446274, 0.05027466267347336, 0.05067004635930061, 0.05105483531951904, 0.051435235887765884, 0.05180349200963974, 0.052157770842313766, 0.052504763007164, 0.05283918231725693, 0.05315948277711868, 0.05346861109137535, 0.053764648735523224, 0.054050467908382416, 0.05432378873229027, 0.05458669364452362, 0.054838892072439194, 0.05508304759860039, 0.05531632527709007, 0.05553968623280525, 0.05575258284807205, 0.055956486612558365, 0.05615277960896492, 0.05634153634309769, 0.05652344971895218, 0.056698258966207504, 0.05686540529131889, 0.05702289566397667, 0.057173557579517365, 0.05731744319200516, 0.05745533108711243, 0.05758700519800186, 0.05771408602595329, 0.05783924087882042, 0.0579586997628212, 0.0580747090280056, 0.0581856369972229, 0.05829232931137085, 0.05839411914348602, 0.05849117040634155, 0.05858491733670235, 0.05867389217019081, 0.058733921498060226, 0.05879243463277817, 0.05885010585188866, 0.058905888348817825, 0.058959826827049255, 0.059011418372392654, 0.059061113744974136, 0.05910912901163101, 0.05915490537881851, 0.05920036509633064, 0.05924432724714279, 0.059287089854478836, 0.059328414499759674, 0.05936889722943306, 0.059408389031887054, 0.059446968138217926, 0.05948414281010628, 0.05951974540948868, 0.05955561622977257, 0.05959063395857811, 0.05962411314249039, 0.0596565343439579, 0.05968855693936348, 0.05971977487206459, 0.05975046008825302, 0.05978124588727951, 0.059811338782310486, 0.05984089523553848, 0.05986983701586723, 0.0598984993994236, 0.05992643162608147, 0.059953343123197556, 0.05997983738780022, 0.06000537797808647, 0.06003039330244064, 0.06005506590008736, 0.06007944792509079, 0.06010301411151886, 0.06012659892439842, 0.060150422155857086, 0.06017410010099411, 0.06019741669297218, 0.060220204293727875, 0.06024254485964775, 0.06026500463485718, 0.06028682366013527, 0.060308076441287994, 0.060329195111989975, 0.06035006418824196, 0.060370683670043945, 0.060391057282686234, 0.060410864651203156, 0.06043055281043053, 0.06045064702630043, 0.06047043949365616, 0.060489725321531296, 0.060508497059345245, 0.060527484863996506, 0.06054619327187538, 0.060564663261175156, 0.06058335304260254, 0.06060178205370903, 0.060619767755270004, 0.060637280344963074, 0.06065461412072182, 0.06067183241248131, 0.06068893522024155, 0.0607057549059391, 0.06072281301021576, 0.060739316046237946, 0.06075553968548775, 0.06077158823609352, 0.06078724190592766, 0.06080217659473419, 0.060817211866378784, 0.060831572860479355, 0.06084576994180679, 0.060859523713588715, 0.060873158276081085, 0.06088662147521973, 0.06090056151151657, 0.06091419607400894, 0.06092768907546997, 0.06094112992286682, 0.06095416843891144, 0.060966912657022476, 0.06097962707281113, 0.06099236384034157, 0.06100503355264664, 0.06101766601204872, 0.06103060021996498, 0.06104329600930214, 0.061055880039930344, 0.06106828525662422, 0.061080846935510635, 0.061093222349882126, 0.06110525131225586, 0.06111697480082512, 0.061128560453653336, 0.061140403151512146, 0.06114848330616951, 0.06115648150444031, 0.06116443872451782, 0.06117257848381996, 0.06118099391460419, 0.06118933483958244, 0.061197564005851746, 0.06120561808347702, 0.06121359020471573, 0.06122143194079399, 0.061229120939970016, 0.06123684346675873, 0.061244480311870575, 0.0612519346177578, 0.06125926598906517, 0.06126657500863075, 0.06127368286252022, 0.0612807460129261, 0.061287831515073776, 0.06129486486315727, 0.06130198389291763, 0.061309099197387695, 0.061316270381212234, 0.06132359802722931, 0.06133095175027847, 0.06133817136287689, 0.06134568527340889, 0.06135319173336029, 0.06136075407266617, 0.06136823445558548, 0.061375681310892105, 0.06138298660516739, 0.06139020621776581, 0.06139729544520378, 0.06140430271625519, 0.06141132116317749, 0.061418190598487854, 0.06142489239573479, 0.061431415379047394, 0.06143800541758537, 0.061444468796253204, 0.061450887471437454, 0.06145739182829857, 0.061463989317417145, 0.06147055700421333, 0.06147712469100952, 0.06148368865251541, 0.06149012967944145, 0.06149635091423988, 0.06150257959961891, 0.061508726328611374, 0.06151473894715309, 0.06152065843343735, 0.061526499688625336, 0.06153230369091034, 0.06153813377022743, 0.06154387816786766, 0.061549581587314606, 0.061555370688438416, 0.061561211943626404, 0.06156694516539574, 0.06157272681593895, 0.0615784116089344, 0.06158386543393135, 0.06158922240138054, 0.061594702303409576, 0.0616002157330513, 0.06160556524991989, 0.06161084026098251, 0.06161617860198021, 0.06162154674530029, 0.06162720173597336, 0.061632879078388214, 0.06163850799202919, 0.0616440586745739, 0.061649564653635025, 0.06165500357747078, 0.0616605281829834, 0.06166632100939751, 0.061672110110521317, 0.06167776137590408, 0.061683446168899536, 0.06168906018137932, 0.06169462949037552, 0.061700090765953064, 0.06170530617237091, 0.06171061843633652, 0.061715878546237946, 0.061721138656139374, 0.06172654777765274, 0.06173177435994148, 0.061736952513456345, 0.06174207106232643, 0.061747290194034576, 0.06175241991877556, 0.06175753474235535, 0.06176265701651573, 0.06176783889532089, 0.06177294999361038, 0.06177796795964241, 0.061781611293554306, 0.061785247176885605, 0.06178894266486168, 0.06179269403219223, 0.061796437948942184, 0.06180015206336975, 0.061803922057151794, 0.06180768087506294, 0.06181152164936066, 0.06181533634662628, 0.061819374561309814, 0.06182338297367096, 0.06182732805609703, 0.06183145195245743, 0.06183554232120514, 0.06183949112892151, 0.061843372881412506, 0.06184712052345276, 0.061850808560848236, 0.06185442581772804, 0.06185796856880188, 0.06186140328645706, 0.06186477094888687, 0.0618680939078331, 0.061871375888586044, 0.06187457963824272, 0.06187771260738373, 0.06188086420297623, 0.06188395246863365, 0.061887044459581375, 0.0618901401758194, 0.06189316138625145, 0.06189608946442604, 0.06189902126789093, 0.06190191209316254, 0.06190478429198265, 0.061907682567834854, 0.06191059201955795, 0.061913520097732544, 0.06191644445061684, 0.06191932037472725, 0.06192220374941826, 0.06192508712410927, 0.06192798539996147, 0.06193108856678009, 0.061934273689985275, 0.061937589198350906, 0.06194096431136131, 0.061944302171468735, 0.06194762513041496, 0.0619509331882, 0.061954136937856674, 0.06195727363228798, 0.061960432678461075, 0.061963509768247604, 0.06196651980280876, 0.06196950003504753, 0.061972666531801224, 0.061975784599781036, 0.06197896972298622, 0.06198209896683693, 0.061985161155462265, 0.061988212168216705, 0.06199119985103607, 0.061994053423404694, 0.06199700012803078, 0.061999931931495667, 0.06200282275676727, 0.06200568377971649, 0.06200854852795601, 0.062011465430259705, 0.06201431527733803, 0.06201712787151337, 0.062019940465688705, 0.06202273815870285, 0.06202550232410431, 0.06202828139066696, 0.06203112006187439, 0.062033988535404205, 0.062036801129579544, 0.062039557844400406, 0.0620422437787056, 0.06204492226243019, 0.06204770505428314, 0.062050435692071915, 0.06205315887928009, 0.06205587089061737, 0.06205855682492256, 0.062061160802841187, 0.062063831835985184, 0.06206649914383888, 0.06206914782524109, 0.062071800231933594, 0.06207437440752983, 0.062076956033706665, 0.062079571187496185, 0.0620821937918663, 0.0620848648250103, 0.06208762526512146, 0.062090497463941574, 0.06209253892302513, 0.06209457293152809, 0.06209660321474075, 0.06209857016801834, 0.06210053339600563, 0.06210244819521904, 0.062104396522045135, 0.06210637092590332, 0.062108296900987625, 0.062110282480716705, 0.06211225688457489, 0.06211424991488457, 0.062116194516420364, 0.06211807206273079, 0.06211990863084793, 0.062121763825416565, 0.062123604118824005, 0.06212538853287697, 0.062127143144607544, 0.062128882855176926, 0.06213056668639183, 0.06213223189115524, 0.06213388592004776, 0.062135595828294754, 0.06213734671473503, 0.062139131128787994, 0.06214088946580887, 0.06214267015457153, 0.0621444433927536, 0.062146179378032684, 0.06214789301156998, 0.062149595469236374, 0.062151312828063965, 0.06215314194560051, 0.06215495243668556, 0.06215676665306091, 0.06215858832001686, 0.06216039881110191, 0.06216222792863846, 0.06216401606798172, 0.06216580048203468, 0.06216756999492645, 0.06216932833194733, 0.062171030789613724, 0.06217269226908684, 0.06217433139681816, 0.06217595934867859, 0.062177594751119614, 0.06217924878001213, 0.06218091771006584, 0.06218259036540985, 0.06218423694372177, 0.062185872346162796, 0.06218745931982994, 0.06218911334872246, 0.06219075620174408, 0.06219238042831421, 0.06219394505023956, 0.062195565551519394, 0.06219715252518654, 0.062198739498853683, 0.06220031902194023, 0.062201857566833496, 0.06220341473817825, 0.062205009162425995, 0.062206611037254333, 0.06220826134085655, 0.06220991909503937, 0.06221157684922218, 0.06221326068043709, 0.062214963138103485, 0.062216635793447495, 0.062218230217695236, 0.06221979111433029, 0.062221333384513855, 0.06222296133637428, 0.0622246116399765, 0.06222628429532051, 0.06222793832421303, 0.06222957372665405, 0.062231212854385376, 0.062232889235019684, 0.062234584242105484, 0.062236249446868896, 0.06223789229989052, 0.06223961338400841, 0.06224135681986809, 0.062243107706308365, 0.06224488839507103, 0.06224675104022026, 0.062248531728982925, 0.06225026398897171, 0.06225196272134781, 0.0622536800801754, 0.0622553750872612, 0.0622570775449276, 0.06225873902440071, 0.0622604563832283, 0.0622621551156044, 0.06226383149623871, 0.06226499751210213, 0.062266137450933456, 0.06226725876331329, 0.06226836144924164, 0.06226944923400879, 0.06227051094174385, 0.06227153539657593, 0.062272559851408005, 0.06227356940507889, 0.06227457523345947, 0.06227557733654976, 0.06227658689022064, 0.06227758154273033, 0.06227857619524002, 0.062279604375362396, 0.06228062883019447, 0.062281638383865356, 0.06228266656398773, 0.0622837133705616, 0.06228474900126457, 0.062285877764225006, 0.06228701025247574, 0.06228814274072647, 0.06228926405310631, 0.062290385365486145, 0.062291454523801804, 0.06229252740740776, 0.062293585389852524, 0.06229464337229729, 0.06229572370648384, 0.062296804040670395, 0.06229786574840546, 0.062298912554979324, 0.06229998543858528, 0.06230102851986885, 0.06230203062295914, 0.06230301782488823, 0.06230402737855911, 0.06230505555868149, 0.06230607256293297, 0.062307097017765045, 0.06230810657143593, 0.0623091459274292, 0.06231020390987396, 0.06231124326586723, 0.06231227517127991, 0.06231329217553139, 0.062314290553331375, 0.06231527030467987, 0.06231626868247986, 0.062317270785570145, 0.06231829524040222, 0.062319315969944, 0.062320344150066376, 0.06232137233018875, 0.06232239305973053, 0.062323395162820816, 0.06232438609004021, 0.0623253732919693, 0.062326353043317795, 0.06232736259698868, 0.06232840195298195, 0.06232943758368492, 0.06233050301671028, 0.06233154237270355, 0.062332604080438614, 0.06233367323875427, 0.06233474984765053, 0.0623357892036438, 0.062336813658475876, 0.06233780458569527, 0.062338802963495255, 0.06233978644013405, 0.06234075129032135, 0.06234169751405716, 0.062342673540115356, 0.06234364211559296, 0.06234460696578026, 0.062345609068870544, 0.06234658509492874, 0.06234753876924515, 0.062348488718271255, 0.06234944611787796, 0.062350399792194366, 0.062351349741220474, 0.062352292239665985, 0.062353216111660004, 0.06235416978597641, 0.06235513836145401, 0.0623561330139637, 0.06235715374350548, 0.06235819682478905, 0.062359243631362915, 0.062360335141420364, 0.06236138194799423, 0.06236240640282631, 0.06236344203352928, 0.062364496290683746, 0.062365587800741196, 0.06236664950847626, 0.06236737594008446, 0.062368109822273254, 0.062368862330913544, 0.06236960366368294, 0.062370359897613525, 0.062371112406253815, 0.062371861189603806, 0.0623725987970829, 0.06237331032752991, 0.06237402930855751, 0.06237475574016571, 0.06237548589706421, 0.06237619370222092, 0.06237688288092613, 0.06237756833434105, 0.06237827241420746, 0.062378961592912674, 0.062379635870456696, 0.06238030269742012, 0.06238096207380295, 0.06238161027431488, 0.0623822845518589, 0.06238294020295143, 0.06238358095288277, 0.06238424405455589, 0.06238490343093872, 0.06238558888435364, 0.06238625943660736, 0.06238694489002228, 0.06238763406872749, 0.0623883381485939, 0.06238903850317001, 0.062389716506004333, 0.062390394508838654, 0.06239109858870506, 0.06239179149270058, 0.06239249184727669, 0.0623931884765625, 0.062393903732299805, 0.062394652515649796, 0.062395401298999786, 0.06239612028002739, 0.062396854162216187, 0.06239762529730797, 0.06239840015769005, 0.062399156391620636, 0.06239987909793854, 0.062400586903095245, 0.06240130215883255, 0.06240200251340866, 0.06240268796682358, 0.062403373420238495, 0.06240405514836311, 0.06240473687648773, 0.06240541487932205, 0.062406107783317566, 0.06240680813789368, 0.062407512217760086, 0.062408216297626495, 0.062408916652202606, 0.06240961328148842, 0.06241031363606453, 0.062411025166511536, 0.06241174787282944, 0.06241245195269585, 0.06241314858198166, 0.06241384893655777, 0.062414560467004776, 0.062415264546871185, 0.0624159500002861, 0.06241663917899132, 0.06241732835769653, 0.062418002635240555, 0.06241869926452637, 0.06241939961910248, 0.062420111149549484, 0.062420804053545, 0.06242149695754051, 0.06242218613624573, 0.06242285668849945, 0.06242353096604347, 0.062424179166555405, 0.06242481991648674, 0.06242549419403076, 0.06242614611983299, 0.06242677941918373, 0.062427420169115067, 0.06242809072136879, 0.06242877617478371, 0.062429483979940414, 0.06243017688393593, 0.06243086978793144, 0.062431566417217255, 0.06243225932121277, 0.06243292987346649, 0.06243358924984932, 0.062434256076812744, 0.06243494153022766, 0.06243560090661049, 0.06243625655770302, 0.06243671849370003, 0.06243718042969704, 0.06243764981627464, 0.062438126653432846, 0.06243859604001045, 0.06243906915187836, 0.06243953853845596, 0.06244000792503357, 0.062440477311611176, 0.06244095042347908, 0.062441449612379074, 0.062441930174827576, 0.062442414462566376, 0.06244288757443428, 0.06244335323572159, 0.062443822622299194, 0.062444280833005905, 0.062444739043712616, 0.06244519352912903, 0.062445636838674545, 0.06244606897234917, 0.06244651973247528, 0.06244697794318199, 0.062447432428598404, 0.062447868287563324, 0.06244830787181854, 0.062448740005493164, 0.06244916841387749, 0.06244958937168121, 0.06245000287890434, 0.06245043873786926, 0.06245088577270508, 0.0624513253569603, 0.06245175749063492, 0.06245219334959984, 0.06245262548327446, 0.06245305389165878, 0.0624535009264946, 0.062453947961330414, 0.06245441362261772, 0.06245488300919533, 0.06245534494519234, 0.06245580315589905, 0.06245626509189606, 0.06245672330260277, 0.062457188963890076, 0.06245765462517738, 0.06245811656117439, 0.062458597123622894, 0.0624590702354908, 0.06245953589677811, 0.06245999410748482, 0.06246044859290123, 0.062460895627737045, 0.062461357563734055, 0.06246180832386017, 0.062462255358695984, 0.062462691217660904, 0.06246313080191612, 0.06246357411146164, 0.06246402487158775, 0.06246446818113327, 0.06246490404009819, 0.062465324997901917, 0.062465742230415344, 0.06246617063879967, 0.06246662139892578, 0.0624670684337616, 0.06246751919388771, 0.06246795877814293, 0.06246841326355934, 0.062468867748975754, 0.062469325959682465, 0.062469784170389175, 0.062470246106386185, 0.062470708042383194, 0.062471166253089905, 0.06247163563966751, 0.06247211620211601, 0.062472593039274216, 0.062473054975271225, 0.062473513185977936, 0.062473978847265244, 0.06247445195913315, 0.062474921345710754, 0.062475379556417465, 0.06247582286596298, 0.0624762624502182, 0.06247669458389282, 0.06247713044285774, 0.06247756630182266, 0.06247800588607788, 0.0624784454703331, 0.062478866428136826, 0.062479279935359955, 0.06247968226671219, 0.062480080872774124, 0.06248047575354576, 0.0624808669090271, 0.06248126178979874, 0.06248153746128082, 0.06248181313276291, 0.062482092529535294, 0.06248236447572708, 0.062482643872499466, 0.06248294562101364, 0.06248323991894722, 0.06248355284333229, 0.06248386576771736, 0.06248417869210243, 0.0624844953417778, 0.06248480826616287, 0.06248510628938675, 0.06248540058732033, 0.062485694885253906, 0.06248599663376808, 0.06248629465699196, 0.06248660013079643, 0.06248690187931061, 0.062487199902534485, 0.06248749792575836, 0.06248779594898224, 0.06248808652162552, 0.0624883733689785, 0.06248866021633148, 0.062488947063684464, 0.062489233911037445, 0.062489524483680725, 0.0624898299574852, 0.062490127980709076, 0.06249043717980385, 0.06249074265360832, 0.062491048127412796, 0.06249134987592697, 0.06249166280031204, 0.06249197944998741, 0.06249230355024338, 0.062492627650499344, 0.06249294430017471, 0.06249326094985008, 0.06249357759952545, 0.062493886798620224, 0.0624941922724247, 0.062494490295648575, 0.06249478831887245, 0.06249508261680603, 0.0624954029917717, 0.06249571591615677, 0.062496036291122437, 0.06249634921550751, 0.06249665841460228, 0.062496960163116455, 0.06249725818634033, 0.06249754875898361, 0.06249786168336868, 0.06249816715717316, 0.06249847263097763, 0.062498774379491806, 0.062499068677425385, 0.06249936297535896, 0.06249966099858284, 0.06249995902180672, 0.06250026077032089, 0.06250056624412537, 0.06250086426734924, 0.06250116974115372, 0.06250148266553879, 0.06250179558992386, 0.06250210851430893, 0.0625024139881134, 0.06250271946191788, 0.06250301748514175, 0.06250330060720444, 0.06250359117984772, 0.0625038743019104, 0.06250415742397308, 0.06250444799661636, 0.06250474601984024, 0.06250503659248352, 0.0625053346157074, 0.06250563263893127, 0.06250592321157455, 0.06250621378421783, 0.06250650435686111, 0.06250680238008499, 0.06250709295272827, 0.06250738352537155, 0.06250768154859543, 0.0625079870223999, 0.06250831484794617, 0.06250863522291183, 0.0625089555978775, 0.06250927597284317, 0.06250958889722824, 0.06250990182161331, 0.06251021474599838, 0.06251052021980286, 0.06251082569360733, 0.06251112371683121, 0.06251142174005508, 0.0625116303563118, 0.06251183897256851, 0.06251204013824463, 0.06251224130392075, 0.06251243501901627, 0.06251262873411179, 0.0625128224492073, 0.06251302361488342, 0.06251323968172073, 0.06251346319913864, 0.06251369416713715, 0.06251391768455505, 0.06251414865255356, 0.06251439452171326, 0.06251463294029236, 0.06251486390829086, 0.06251508742570877, 0.06251531094312668, 0.06251553446054459, 0.0625157505273819, 0.06251595914363861, 0.06251616775989532, 0.06251637637615204, 0.06251658499240875, 0.06251680105924606, 0.06251702457666397, 0.06251724064350128, 0.06251745671033859, 0.0625176653265953, 0.06251787394285202, 0.06251808255910873, 0.06251829117536545, 0.06251849234104156, 0.06251868605613708, 0.0625188797712326, 0.06251908093690872, 0.06251928210258484, 0.06251948326826096, 0.06251968443393707, 0.06251989305019379, 0.0625201091170311, 0.06252032518386841, 0.06252054125070572, 0.06252075731754303, 0.06252098083496094, 0.06252121180295944, 0.06252144277095795, 0.06252166628837585, 0.06252189725637436, 0.06252212822437286, 0.06252235174179077, 0.06252257525920868, 0.06252279132604599, 0.0625229999423027, 0.06252320855855942, 0.06252341717481613, 0.06252362579107285, 0.06252382695674896, 0.06252402812242508, 0.0625242292881012, 0.06252443790435791, 0.06252465397119522, 0.06252487748861313, 0.06252509355545044, 0.06252530217170715, 0.06252551078796387, 0.06252571940422058, 0.0625259205698967, 0.06252612173557281, 0.06252632290124893, 0.06252653151750565, 0.06252674013376236, 0.06252694875001907, 0.06252715736627579, 0.0625273659825325, 0.06252757459878922, 0.06252779811620712, 0.06252802163362503, 0.06252824515104294, 0.06252846866846085, 0.06252869218587875, 0.06252890825271606, 0.06252911686897278, 0.06252932548522949, 0.0625295341014862, 0.06252974271774292, 0.06252995878458023, 0.06253018975257874, 0.06253042817115784, 0.06253065913915634, 0.06253089010715485, 0.06253112107515335, 0.06253134459257126, 0.06253156810998917, 0.06253178417682648, 0.06253199279308319, 0.0625322088599205, 0.06253242492675781, 0.06253264099359512, 0.06253285706043243], "eta_traj": [0.10000000149011612, 0.08280253410339355, 0.06553009152412415, 0.04834317788481712, 0.030164392665028572, 0.012252301909029484, -0.004859062843024731, -0.02140992507338524, -0.037645574659109116, -0.05339112877845764, -0.06858471035957336, -0.08329132944345474, -0.09750113636255264, -0.11132287979125977, -0.12474934011697769, -0.13780316710472107, -0.15044303238391876, -0.16268567740917206, -0.17462487518787384, -0.18627627193927765, -0.19759711623191833, -0.20855562388896942, -0.21910536289215088, -0.22925786674022675, -0.239059716463089, -0.24851270020008087, -0.2575928270816803, -0.2663535475730896, -0.2747405469417572, -0.2828056216239929, -0.29056504368782043, -0.29800161719322205, -0.30516812205314636, -0.3120308220386505, -0.318602055311203, -0.324886679649353, -0.3309522867202759, -0.3368242681026459, -0.3424215316772461, -0.34775471687316895, -0.3528182804584503, -0.35765326023101807, -0.3622581362724304, -0.3666534125804901, -0.3708416521549225, -0.3748414218425751, -0.3786386549472809, -0.38224518299102783, -0.38568511605262756, -0.3889400362968445, -0.3920137286186218, -0.394921213388443, -0.39766284823417664, -0.4002685844898224, -0.4027493894100189, -0.4050902724266052, -0.40731102228164673, -0.4094105660915375, -0.41138583421707153, -0.4132501780986786, -0.41501182317733765, -0.4166717529296875, -0.41822782158851624, -0.4196937382221222, -0.42108607292175293, -0.4223920702934265, -0.4236116409301758, -0.4247630536556244, -0.42583975195884705, -0.42684242129325867, -0.42777976393699646, -0.42865195870399475, -0.4294663965702057, -0.43022361397743225, -0.430930495262146, -0.43158870935440063, -0.43220290541648865, -0.43277254700660706, -0.4333012104034424, -0.43379145860671997, -0.4342470169067383, -0.4346708059310913, -0.4350653290748596, -0.43543320894241333, -0.4357752501964569, -0.43609270453453064, -0.4363854229450226, -0.43665722012519836, -0.4369087815284729, -0.43714264035224915, -0.43735918402671814, -0.4375613033771515, -0.4377514123916626, -0.437927782535553, -0.43809178471565247, -0.43824392557144165, -0.43838584423065186, -0.43851739168167114, -0.438639372587204, -0.4387533366680145, -0.43885868787765503, -0.43892771005630493, -0.43899253010749817, -0.43905389308929443, -0.43911170959472656, -0.43916618824005127, -0.43921712040901184, -0.4392649233341217, -0.43930983543395996, -0.4393519461154938, -0.4393925368785858, -0.43943074345588684, -0.4394669830799103, -0.4395013749599457, -0.4395342171192169, -0.4395654499530792, -0.43959519267082214, -0.43962350487709045, -0.43965038657188416, -0.4396761357784271, -0.43970099091529846, -0.43972423672676086, -0.4397461712360382, -0.4397672414779663, -0.4397874176502228, -0.43980681896209717, -0.43982574343681335, -0.43984392285346985, -0.439861536026001, -0.43987858295440674, -0.43989497423171997, -0.4399107098579407, -0.4399258494377136, -0.43994054198265076, -0.4399545192718506, -0.4399680197238922, -0.4399811625480652, -0.4399940073490143, -0.4400064945220947, -0.440018892288208, -0.4400310218334198, -0.440043181180954, -0.44005483388900757, -0.4400661289691925, -0.44007712602615356, -0.44008809328079224, -0.4400988519191742, -0.4401092529296875, -0.4401194453239441, -0.44012942910194397, -0.44013917446136475, -0.4401486814022064, -0.4401578903198242, -0.44016703963279724, -0.44017618894577026, -0.4401850700378418, -0.44019371271133423, -0.44020214676856995, -0.4402107298374176, -0.44021910429000854, -0.44022729992866516, -0.4402351975440979, -0.44024306535720825, -0.4402507543563843, -0.4402581453323364, -0.4402654767036438, -0.440272718667984, -0.44027990102767944, -0.4402869939804077, -0.44029414653778076, -0.4403010606765747, -0.4403078556060791, -0.44031447172164917, -0.4403209090232849, -0.44032710790634155, -0.44033342599868774, -0.44033944606781006, -0.44034531712532043, -0.44035112857818604, -0.44035688042640686, -0.44036272168159485, -0.4403684735298157, -0.4403741955757141, -0.4403797686100006, -0.4403853416442871, -0.4403907060623169, -0.44039589166641235, -0.4404010474681854, -0.4404061734676361, -0.4404112994670868, -0.4404163658618927, -0.4404216706752777, -0.440426766872406, -0.4404318034648895, -0.4404367208480835, -0.44044166803359985, -0.44044649600982666, -0.44045114517211914, -0.44045567512512207, -0.4404601752758026, -0.44046464562416077, -0.4404677748680115, -0.4404708445072174, -0.44047385454177856, -0.4404768943786621, -0.44047993421554565, -0.4404829442501068, -0.4404858648777008, -0.44048872590065, -0.44049161672592163, -0.44049450755119324, -0.4404973089694977, -0.4405001699924469, -0.4405030310153961, -0.4405058026313782, -0.44050851464271545, -0.44051119685173035, -0.44051384925842285, -0.44051647186279297, -0.4405191242694855, -0.4405217170715332, -0.4405243694782257, -0.44052690267562866, -0.4405294954776764, -0.4405320882797241, -0.44053468108177185, -0.4405371844768524, -0.440539687871933, -0.4405421316623688, -0.44054463505744934, -0.4405471384525299, -0.4405496418476105, -0.44055211544036865, -0.44055449962615967, -0.44055691361427307, -0.4405593276023865, -0.4405617415904999, -0.4405640959739685, -0.44056636095046997, -0.44056856632232666, -0.44057077169418335, -0.4405730068683624, -0.4405752122402191, -0.4405774474143982, -0.4405795931816101, -0.44058161973953247, -0.44058358669281006, -0.44058558344841003, -0.4405875504016876, -0.44058945775032043, -0.4405914545059204, -0.440593421459198, -0.4405953288078308, -0.44059720635414124, -0.4405990242958069, -0.4406008720397949, -0.44060271978378296, -0.4406045079231262, -0.4406062960624695, -0.44060805439949036, -0.44060981273651123, -0.4406115412712097, -0.4406132698059082, -0.4406149685382843, -0.4406166076660156, -0.44061821699142456, -0.44061988592147827, -0.44062161445617676, -0.44062331318855286, -0.44062501192092896, -0.44062671065330505, -0.4406284689903259, -0.44063034653663635, -0.440632164478302, -0.4406339228153229, -0.4406357407569885, -0.4406375288963318, -0.44063928723335266, -0.4406410753726959, -0.4406428933143616, -0.4406447410583496, -0.4406464993953705, -0.4406481981277466, -0.4406498968601227, -0.44065162539482117, -0.44065332412719727, -0.4406549632549286, -0.4406566023826599, -0.44065824151039124, -0.44065991044044495, -0.44066163897514343, -0.44066330790519714, -0.44066497683525085, -0.4406666159629822, -0.4406683146953583, -0.4406699538230896, -0.4406716227531433, -0.440673291683197, -0.44067493081092834, -0.4406765401363373, -0.44067811965942383, -0.4406792223453522, -0.44068029522895813, -0.4406813681125641, -0.44068247079849243, -0.440683513879776, -0.44068455696105957, -0.44068560004234314, -0.4406866431236267, -0.44068771600723267, -0.4406887888908386, -0.44068989157676697, -0.4406909942626953, -0.44069206714630127, -0.440693199634552, -0.44069433212280273, -0.44069546461105347, -0.4406965970993042, -0.44069766998291016, -0.4406987428665161, -0.44069981575012207, -0.44070085883140564, -0.4407018721103668, -0.4407028555870056, -0.4407038390636444, -0.4407047927379608, -0.4407057464122772, -0.44070667028427124, -0.44070762395858765, -0.44070857763290405, -0.44070956110954285, -0.44071054458618164, -0.44071152806282043, -0.44071251153945923, -0.440713495016098, -0.44071444869041443, -0.44071540236473083, -0.44071635603904724, -0.44071727991104126, -0.4407182037830353, -0.4407191276550293, -0.4407200813293457, -0.4407210052013397, -0.44072192907333374, -0.44072285294532776, -0.4407237768173218, -0.4407247304916382, -0.44072574377059937, -0.44072675704956055, -0.44072777032852173, -0.4407287836074829, -0.4407297968864441, -0.4407307803630829, -0.4407317340373993, -0.4407326877117157, -0.4407336413860321, -0.44073453545570374, -0.44073542952537537, -0.4407363533973694, -0.4407372772693634, -0.4407382309436798, -0.4407391846179962, -0.440740168094635, -0.4407412111759186, -0.44074222445487976, -0.44074320793151855, -0.4407442510128021, -0.4407452344894409, -0.4407462179660797, -0.4407471716403961, -0.4407481253147125, -0.44074904918670654, -0.44074997305870056, -0.4407508969306946, -0.4407518208026886, -0.4407527446746826, -0.44075363874435425, -0.4407544732093811, -0.44075530767440796, -0.4407561719417572, -0.44075700640678406, -0.4407578110694885, -0.440758615732193, -0.44075942039489746, -0.4407602548599243, -0.44076108932495117, -0.44076189398765564, -0.4407627284526825, -0.44076353311538696, -0.44076433777809143, -0.4407651424407959, -0.44076594710350037, -0.44076675176620483, -0.4407675266265869, -0.440768301486969, -0.4407690763473511, -0.44076988101005554, -0.4407707452774048, -0.44077160954475403, -0.44077247381210327, -0.4407733380794525, -0.44077396392822266, -0.4407745897769928, -0.44077521562576294, -0.4407758116722107, -0.44077640771865845, -0.4407770335674286, -0.44077765941619873, -0.44077831506729126, -0.4407789707183838, -0.44077959656715393, -0.4407802224159241, -0.4407808482646942, -0.44078147411346436, -0.4407820701599121, -0.44078266620635986, -0.44078329205513, -0.44078391790390015, -0.4407845437526703, -0.44078513979911804, -0.4407857358455658, -0.44078630208969116, -0.44078686833381653, -0.4407874047756195, -0.4407879710197449, -0.44078853726387024, -0.4407891035079956, -0.44078966975212097, -0.44079023599624634, -0.4407908022403717, -0.4407913386821747, -0.44079187512397766, -0.44079241156578064, -0.4407929480075836, -0.440793514251709, -0.44079405069351196, -0.44079458713531494, -0.4407951235771179, -0.4407956600189209, -0.44079622626304626, -0.44079679250717163, -0.440797358751297, -0.44079792499542236, -0.4407985210418701, -0.4407990872859955, -0.44079965353012085, -0.4408002197742462, -0.4408007264137268, -0.4408012628555298, -0.44080179929733276, -0.44080236554145813, -0.4408029019832611, -0.4408034384250641, -0.44080397486686707, -0.44080451130867004, -0.440805047750473, -0.440805584192276, -0.440806120634079, -0.44080662727355957, -0.44080716371536255, -0.4408077001571655, -0.4408082067966461, -0.4408087134361267, -0.4408092498779297, -0.44080978631973267, -0.44081035256385803, -0.4408109188079834, -0.44081151485443115, -0.4408121109008789, -0.44081270694732666, -0.440813273191452, -0.4408138394355774, -0.44081440567970276, -0.4408149719238281, -0.4408155083656311, -0.4408160448074341, -0.44081661105155945, -0.4408171772956848, -0.4408177435398102, -0.44081830978393555, -0.4408188760280609, -0.4408194422721863, -0.44081997871398926, -0.44082051515579224, -0.4408210217952728, -0.4408215284347534, -0.440822035074234, -0.440822571516037, -0.44082310795783997, -0.44082364439964294, -0.4408242404460907, -0.4408247768878937, -0.44082531332969666, -0.44082581996917725, -0.4408263564109802, -0.4408268928527832, -0.4408274292945862, -0.44082796573638916, -0.44082844257354736, -0.44082894921302795, -0.44082945585250854, -0.4408298134803772, -0.44083014130592346, -0.4408304691314697, -0.440830796957016, -0.44083112478256226, -0.4408314526081085, -0.4408317804336548, -0.44083210825920105, -0.4408324360847473, -0.4408327639102936, -0.44083309173583984, -0.4408333897590637, -0.44083371758461, -0.44083404541015625, -0.4408343732357025, -0.4408347010612488, -0.44083502888679504, -0.4408353865146637, -0.44083574414253235, -0.440836101770401, -0.44083645939826965, -0.4408368170261383, -0.44083717465400696, -0.4408375322818756, -0.4408378601074219, -0.44083818793296814, -0.4408385157585144, -0.44083884358406067, -0.4408392012119293, -0.4408395290374756, -0.44083985686302185, -0.4408401846885681, -0.4408405125141144, -0.44084084033966064, -0.4408411681652069, -0.4408414959907532, -0.44084182381629944, -0.4408421516418457, -0.44084247946739197, -0.44084280729293823, -0.4408431351184845, -0.44084346294403076, -0.440843790769577, -0.4408441185951233, -0.44084444642066956, -0.4408448040485382, -0.44084516167640686, -0.4408454895019531, -0.4408458471298218, -0.44084617495536804, -0.4408465325832367, -0.44084689021110535, -0.440847247838974, -0.44084760546684265, -0.4408479630947113, -0.44084832072257996, -0.4408486783504486, -0.44084903597831726, -0.4408493936061859, -0.44084975123405457, -0.4408501386642456, -0.44085052609443665, -0.4408509135246277, -0.4408513009548187, -0.4408516585826874, -0.44085201621055603, -0.4408523738384247, -0.44085273146629333, -0.440853089094162, -0.44085344672203064, -0.4408538341522217, -0.4408542215824127, -0.44085457921028137, -0.44085493683815, -0.4408552944660187, -0.44085562229156494, -0.4408559501171112, -0.44085630774497986, -0.4408566653728485, -0.44085702300071716, -0.4408573806285858, -0.44085773825645447, -0.4408580958843231, -0.4408584535121918, -0.44085878133773804, -0.4408591091632843, -0.44085946679115295, -0.4408598244190216, -0.44086021184921265, -0.4408605992794037, -0.4408609867095947, -0.44086137413978577, -0.4408617615699768, -0.44086214900016785, -0.4408625364303589, -0.44086289405822754, -0.4408632516860962, -0.44086360931396484, -0.4408639669418335, -0.44086432456970215, -0.44086456298828125, -0.44086480140686035, -0.44086506962776184, -0.44086533784866333, -0.4408656060695648, -0.4408658742904663, -0.4408661425113678, -0.4408664107322693, -0.4408666491508484, -0.4408668875694275, -0.4408671259880066, -0.4408673644065857, -0.4408676028251648, -0.4408678412437439, -0.440868079662323, -0.4408682882785797, -0.4408684968948364, -0.44086870551109314, -0.44086891412734985, -0.44086912274360657, -0.4408693313598633, -0.44086953997612, -0.4408697485923767, -0.4408699572086334, -0.44087016582489014, -0.44087037444114685, -0.44087058305740356, -0.4408707916736603, -0.440871000289917, -0.4408712089061737, -0.4408714175224304, -0.44087162613868713, -0.44087183475494385, -0.44087207317352295, -0.44087231159210205, -0.44087255001068115, -0.44087278842926025, -0.44087302684783936, -0.44087326526641846, -0.44087350368499756, -0.44087374210357666, -0.44087398052215576, -0.44087421894073486, -0.44087445735931396, -0.44087469577789307, -0.44087493419647217, -0.44087517261505127, -0.44087541103363037, -0.4408756196498871, -0.4408758282661438, -0.4408760368824005, -0.4408762454986572, -0.44087648391723633, -0.44087672233581543, -0.44087696075439453, -0.44087719917297363, -0.44087743759155273, -0.44087767601013184, -0.44087791442871094, -0.4408781826496124, -0.44087842106819153, -0.44087865948677063, -0.4408789277076721, -0.4408791959285736, -0.4408794641494751, -0.4408797323703766, -0.4408800005912781, -0.4408802390098572, -0.44088050723075867, -0.44088077545166016, -0.44088104367256165, -0.44088131189346313, -0.4408815801143646, -0.4408818185329437, -0.4408820569515228, -0.44088229537010193, -0.44088253378868103, -0.44088277220726013, -0.44088301062583923, -0.44088324904441833, -0.44088348746299744, -0.44088372588157654, -0.44088396430015564, -0.44088417291641235, -0.44088441133499146, -0.44088464975357056, -0.44088488817214966, -0.44088512659072876, -0.44088536500930786, -0.44088560342788696, -0.44088584184646606, -0.44088608026504517, -0.44088631868362427, -0.44088655710220337, -0.44088679552078247, -0.4408870339393616, -0.4408872425556183, -0.440887451171875, -0.4408876597881317, -0.4408878684043884, -0.44088801741600037, -0.4408881664276123, -0.44088831543922424, -0.4408884644508362, -0.4408886134624481, -0.44088876247406006, -0.440888911485672, -0.44088906049728394, -0.4408892095088959, -0.4408893883228302, -0.4408895671367645, -0.44088974595069885, -0.4408899247646332, -0.4408901035785675, -0.44089028239250183, -0.44089046120643616, -0.4408906102180481, -0.44089075922966003, -0.440890908241272, -0.4408910572528839, -0.44089120626449585, -0.4408913552761078, -0.4408915042877197, -0.44089165329933167, -0.4408918023109436, -0.44089195132255554, -0.4408921003341675, -0.4408922493457794, -0.44089239835739136, -0.4408925473690033, -0.44089266657829285, -0.4408927857875824, -0.44089290499687195, -0.4408930242061615, -0.44089314341545105, -0.440893292427063, -0.44089341163635254, -0.4408935308456421, -0.44089365005493164, -0.4408937990665436, -0.4408939480781555, -0.44089409708976746, -0.4408942461013794, -0.44089439511299133, -0.44089454412460327, -0.4408946931362152, -0.44089484214782715, -0.4408949911594391, -0.440895140171051, -0.44089528918266296, -0.4408954381942749, -0.44089558720588684, -0.4408957362174988, -0.4408958852291107, -0.44089603424072266, -0.4408961832523346, -0.44089633226394653, -0.44089648127555847, -0.4408966302871704, -0.44089677929878235, -0.4408969283103943, -0.4408970773220062, -0.44089722633361816, -0.4408973455429077, -0.44089749455451965, -0.4408976435661316, -0.44089779257774353, -0.44089794158935547, -0.440898060798645, -0.44089820981025696, -0.4408983588218689, -0.44089850783348083, -0.4408986568450928, -0.4408988058567047, -0.44089895486831665, -0.4408991038799286, -0.4408992528915405, -0.44089940190315247, -0.4408995509147644, -0.44089969992637634, -0.4408998489379883, -0.4408999979496002, -0.44090014696121216, -0.4409002959728241, -0.44090044498443604, -0.440900593996048, -0.4409007430076599, -0.44090086221694946, -0.440900981426239, -0.44090110063552856, -0.4409012198448181, -0.44090133905410767, -0.4409014582633972, -0.44090160727500916, -0.4409017562866211, -0.44090190529823303, -0.44090205430984497, -0.4409021735191345, -0.4409022927284241, -0.4409024119377136, -0.4409025013446808, -0.44090259075164795, -0.4409026801586151, -0.44090279936790466, -0.4409028887748718, -0.4409030079841614, -0.44090309739112854, -0.4409031867980957, -0.44090327620506287, -0.44090336561203003, -0.4409034848213196, -0.44090357422828674, -0.4409036636352539, -0.44090375304222107, -0.44090384244918823, -0.4409039318561554, -0.44090405106544495, -0.4409041404724121, -0.4409042298793793, -0.4409043490886688, -0.440904438495636, -0.44090452790260315, -0.4409046471118927, -0.44090476632118225, -0.4409048855304718, -0.44090500473976135, -0.4409051239490509, -0.44090524315834045, -0.44090536236763, -0.44090548157691956, -0.4409056007862091, -0.44090571999549866, -0.4409058392047882, -0.44090595841407776, -0.4409060776233673, -0.44090619683265686, -0.4409063160419464, -0.44090643525123596, -0.4409065544605255, -0.44090667366981506, -0.4409067928791046, -0.44090691208839417, -0.4409070312976837, -0.44090715050697327, -0.4409072697162628, -0.44090738892555237, -0.4409075081348419, -0.44090762734413147, -0.440907746553421, -0.44090786576271057, -0.4409079849720001, -0.4409081041812897, -0.4409082233905792, -0.4409083425998688, -0.44090843200683594, -0.4409085512161255, -0.44090864062309265, -0.4409087300300598, -0.44090884923934937, -0.44090893864631653, -0.4409090280532837, -0.44090911746025085, -0.440909206867218, -0.4409092962741852, -0.44090938568115234, -0.4409094750881195, -0.44090956449508667, -0.44090965390205383, -0.440909743309021, -0.44090983271598816, -0.4409099221229553, -0.4409100115299225, -0.44091010093688965, -0.4409101903438568, -0.440910279750824, -0.44091036915779114, -0.4409104585647583, -0.44091054797172546, -0.4409106373786926, -0.4409107267856598, -0.44091081619262695, -0.4409109055995941, -0.4409109950065613, -0.44091108441352844, -0.440911203622818, -0.44091132283210754, -0.4409114122390747, -0.44091153144836426, -0.4409116506576538, -0.44091176986694336, -0.4409118890762329, -0.44091200828552246, -0.440912127494812, -0.44091224670410156, -0.4409123659133911, -0.44091248512268066, -0.4409126043319702, -0.44091272354125977, -0.44091281294822693, -0.4409129321575165, -0.44091299176216125, -0.44091305136680603, -0.4409131407737732, -0.44091323018074036, -0.4409133195877075, -0.4409134089946747, -0.44091346859931946, -0.44091352820396423, -0.440913587808609, -0.4409136474132538, -0.44091370701789856, -0.44091376662254333, -0.4409138262271881, -0.4409138858318329, -0.44091394543647766, -0.44091400504112244, -0.4409140646457672, -0.440914124250412, -0.44091418385505676, -0.44091424345970154, -0.4409143328666687, -0.44091442227363586, -0.440914511680603, -0.4409146010875702, -0.44091469049453735, -0.4409147799015045, -0.4409148693084717, -0.44091492891311646, -0.44091498851776123, -0.440915048122406, -0.4409151077270508, -0.44091516733169556, -0.44091522693634033, -0.4409152865409851, -0.4409153461456299, -0.44091540575027466, -0.44091546535491943, -0.4409155249595642, -0.440915584564209, -0.44091564416885376, -0.44091570377349854, -0.4409157633781433, -0.4409158229827881, -0.44091588258743286, -0.44091594219207764, -0.4409160017967224, -0.4409160614013672, -0.44091612100601196, -0.44091618061065674, -0.4409162402153015, -0.4409162998199463, -0.44091635942459106, -0.44091641902923584, -0.4409164786338806, -0.4409165382385254, -0.44091659784317017, -0.44091665744781494, -0.4409167170524597, -0.4409167766571045, -0.44091683626174927, -0.44091689586639404, -0.4409169554710388, -0.4409170150756836, -0.44091707468032837, -0.44091713428497314, -0.4409171938896179, -0.4409172534942627, -0.44091731309890747, -0.44091740250587463, -0.4409174919128418, -0.44091758131980896, -0.4409176707267761, -0.4409177601337433, -0.44091784954071045, -0.4409179389476776, -0.4409180283546448, -0.44091811776161194, -0.4409182071685791, -0.44091829657554626, -0.4409183859825134, -0.4409184753894806, -0.44091856479644775, -0.4409186542034149, -0.4409187436103821, -0.44091883301734924, -0.4409189224243164, -0.44091901183128357, -0.44091910123825073, -0.4409191906452179, -0.44091928005218506, -0.4409193694591522, -0.4409194588661194, -0.44091954827308655, -0.4409196376800537, -0.4409197270870209, -0.44091981649398804, -0.4409199059009552, -0.44091999530792236, -0.4409200847148895, -0.4409201741218567], "decay_traj": [0.800000011920929, 0.8203824758529663, 0.8394253253936768, 0.8579757213592529, 0.876308262348175, 0.8942866921424866, 0.9116828441619873, 0.9288591742515564, 0.9459815621376038, 0.9628382325172424, 0.9793090224266052, 0.9954723119735718, 1.0113224983215332, 1.0269185304641724, 1.0423504114151, 1.05759596824646, 1.0725860595703125, 1.0873538255691528, 1.102022409439087, 1.1166207790374756, 1.1310820579528809, 1.1453359127044678, 1.1593313217163086, 1.173068642616272, 1.1866902112960815, 1.2001245021820068, 1.2132853269577026, 1.2262983322143555, 1.2390401363372803, 1.2515560388565063, 1.2638752460479736, 1.275976300239563, 1.2879664897918701, 1.29973304271698, 1.311298131942749, 1.322657823562622, 1.33391535282135, 1.3451323509216309, 1.3561089038848877, 1.3668246269226074, 1.3772474527359009, 1.3875023126602173, 1.3975470066070557, 1.4074327945709229, 1.4171440601348877, 1.4267157316207886, 1.4360593557357788, 1.445193886756897, 1.454196572303772, 1.4629639387130737, 1.4714874029159546, 1.4797953367233276, 1.4878926277160645, 1.495840072631836, 1.5036664009094238, 1.511301040649414, 1.5187766551971436, 1.526099443435669, 1.5332046747207642, 1.5401349067687988, 1.5469051599502563, 1.5534946918487549, 1.5598728656768799, 1.5660924911499023, 1.5722508430480957, 1.5782239437103271, 1.5839813947677612, 1.5896286964416504, 1.5950809717178345, 1.60031259059906, 1.605370044708252, 1.6102216243743896, 1.614912748336792, 1.6194058656692505, 1.623734474182129, 1.6278928518295288, 1.6319234371185303, 1.6357797384262085, 1.6394766569137573, 1.6430052518844604, 1.6463886499404907, 1.6496491432189941, 1.652787685394287, 1.6558153629302979, 1.6587275266647339, 1.6615149974822998, 1.664144515991211, 1.666662335395813, 1.6690691709518433, 1.6713776588439941, 1.673583984375, 1.6757147312164307, 1.6778136491775513, 1.6798187494277954, 1.6817668676376343, 1.6836310625076294, 1.6854251623153687, 1.6871380805969238, 1.688772439956665, 1.690351963043213, 1.691852331161499, 1.6928651332855225, 1.6938527822494507, 1.694826364517212, 1.6957684755325317, 1.696679949760437, 1.6975523233413696, 1.6983932256698608, 1.6992061138153076, 1.6999815702438354, 1.700751781463623, 1.7014970779418945, 1.7022223472595215, 1.7029236555099487, 1.7036110162734985, 1.7042819261550903, 1.7049375772476196, 1.7055697441101074, 1.706175684928894, 1.7067862749099731, 1.7073826789855957, 1.7079532146453857, 1.7085059881210327, 1.7090522050857544, 1.7095849514007568, 1.7101088762283325, 1.7106345891952515, 1.711148738861084, 1.7116539478302002, 1.712148904800415, 1.7126392126083374, 1.7131173610687256, 1.7135783433914185, 1.7140322923660278, 1.7144701480865479, 1.7148993015289307, 1.715322732925415, 1.7157413959503174, 1.7161462306976318, 1.7165515422821045, 1.7169610261917114, 1.7173681259155273, 1.7177692651748657, 1.7181615829467773, 1.7185462713241577, 1.7189332246780396, 1.7193093299865723, 1.7196757793426514, 1.720039963722229, 1.720400094985962, 1.720755934715271, 1.7211077213287354, 1.7214499711990356, 1.7217903137207031, 1.722137689590454, 1.7224799394607544, 1.722813606262207, 1.7231385707855225, 1.7234673500061035, 1.723791480064392, 1.7241116762161255, 1.7244356870651245, 1.7247552871704102, 1.725067377090454, 1.7253713607788086, 1.7256723642349243, 1.7259714603424072, 1.7262686491012573, 1.7265610694885254, 1.7268576622009277, 1.727144718170166, 1.7274270057678223, 1.7277063131332397, 1.7279789447784424, 1.7282391786575317, 1.7285012006759644, 1.7287516593933105, 1.7289992570877075, 1.7292392253875732, 1.7294772863388062, 1.7297124862670898, 1.729956030845642, 1.7301942110061646, 1.730430006980896, 1.7306649684906006, 1.7308930158615112, 1.7311159372329712, 1.7313385009765625, 1.731561541557312, 1.731783390045166, 1.7320047616958618, 1.7322314977645874, 1.7324540615081787, 1.7326747179031372, 1.732892394065857, 1.7331128120422363, 1.7333300113677979, 1.7335412502288818, 1.733747124671936, 1.7339507341384888, 1.7341588735580444, 1.734300971031189, 1.7344416379928589, 1.7345815896987915, 1.734724760055542, 1.734872817993164, 1.7350196838378906, 1.7351646423339844, 1.7353065013885498, 1.7354469299316406, 1.7355852127075195, 1.7357208728790283, 1.7358571290969849, 1.7359918355941772, 1.7361234426498413, 1.7362529039382935, 1.736382007598877, 1.7365076541900635, 1.7366324663162231, 1.736757755279541, 1.7368820905685425, 1.737007975578308, 1.7371338605880737, 1.7372608184814453, 1.7373905181884766, 1.737520694732666, 1.7376484870910645, 1.7377815246582031, 1.7379144430160522, 1.7380484342575073, 1.7381809949874878, 1.7383129596710205, 1.7384425401687622, 1.7385705709457397, 1.7386963367462158, 1.7388206720352173, 1.7389452457427979, 1.7390671968460083, 1.73918616771698, 1.739302158355713, 1.7394192218780518, 1.7395341396331787, 1.7396482229232788, 1.739763855934143, 1.7398812770843506, 1.7399981021881104, 1.7401149272918701, 1.7402317523956299, 1.7403464317321777, 1.7404571771621704, 1.7405681610107422, 1.7406777143478394, 1.7407848834991455, 1.7408905029296875, 1.7409946918487549, 1.7410982847213745, 1.7412023544311523, 1.741304874420166, 1.7414066791534424, 1.7415101528167725, 1.7416144609451294, 1.7417168617248535, 1.7418200969696045, 1.7419216632843018, 1.7420191764831543, 1.7421150207519531, 1.7422130107879639, 1.742311716079712, 1.7424074411392212, 1.7425018548965454, 1.7425974607467651, 1.7426936626434326, 1.7427948713302612, 1.742896556854248, 1.742997407913208, 1.7430968284606934, 1.7431955337524414, 1.743293046951294, 1.7433921098709106, 1.7434959411621094, 1.743599772453308, 1.7437012195587158, 1.7438032627105713, 1.7439039945602417, 1.7440040111541748, 1.7441020011901855, 1.7441956996917725, 1.744291067123413, 1.7443856000900269, 1.7444801330566406, 1.744577407836914, 1.74467134475708, 1.7447644472122192, 1.744856595993042, 1.7449504137039185, 1.7450426816940308, 1.745134711265564, 1.7452268600463867, 1.7453200817108154, 1.7454121112823486, 1.7455024719238281, 1.7455681562423706, 1.745633602142334, 1.7457001209259033, 1.7457677125930786, 1.7458351850509644, 1.7459020614624023, 1.7459700107574463, 1.7460377216339111, 1.7461069822311401, 1.746175765991211, 1.7462486028671265, 1.7463208436965942, 1.746392011642456, 1.7464663982391357, 1.7465401887893677, 1.746611475944519, 1.746681571006775, 1.7467492818832397, 1.7468159198760986, 1.7468812465667725, 1.7469452619552612, 1.7470073699951172, 1.7470682859420776, 1.7471283674240112, 1.7471877336502075, 1.7472456693649292, 1.7473024129867554, 1.7473593950271606, 1.74741530418396, 1.7474713325500488, 1.7475273609161377, 1.7475820779800415, 1.7476351261138916, 1.7476882934570312, 1.7477407455444336, 1.7477928400039673, 1.7478454113006592, 1.7478982210159302, 1.7479513883590698, 1.74800443649292, 1.7480566501617432, 1.748108983039856, 1.7481613159179688, 1.7482138872146606, 1.7482701539993286, 1.7483279705047607, 1.7483880519866943, 1.7484493255615234, 1.7485098838806152, 1.748570203781128, 1.7486302852630615, 1.7486884593963623, 1.7487454414367676, 1.7488027811050415, 1.7488586902618408, 1.7489134073257446, 1.7489676475524902, 1.7490252256393433, 1.7490819692611694, 1.7491399049758911, 1.7491968870162964, 1.7492525577545166, 1.7493081092834473, 1.7493624687194824, 1.7494144439697266, 1.7494680881500244, 1.7495214939117432, 1.7495741844177246, 1.7496262788772583, 1.7496784925460815, 1.7497316598892212, 1.7497836351394653, 1.7498348951339722, 1.749886155128479, 1.7499371767044067, 1.7499876022338867, 1.7500382661819458, 1.7500900030136108, 1.7501423358917236, 1.75019371509552, 1.7502440214157104, 1.7502930164337158, 1.7503418922424316, 1.7503926753997803, 1.7504425048828125, 1.7504922151565552, 1.7505418062210083, 1.7505908012390137, 1.7506383657455444, 1.7506871223449707, 1.750735878944397, 1.7507842779159546, 1.7508327960968018, 1.7508798837661743, 1.7509270906448364, 1.7509748935699463, 1.7510229349136353, 1.751071810722351, 1.751122236251831, 1.751174807548523, 1.7512121200561523, 1.7512493133544922, 1.7512863874435425, 1.7513223886489868, 1.7513582706451416, 1.7513933181762695, 1.7514289617538452, 1.751465082168579, 1.7515003681182861, 1.7515367269515991, 1.751572847366333, 1.7516093254089355, 1.7516449689865112, 1.7516794204711914, 1.7517130374908447, 1.7517470121383667, 1.7517807483673096, 1.7518134117126465, 1.7518455982208252, 1.7518775463104248, 1.7519084215164185, 1.7519389390945435, 1.7519692182540894, 1.7520005702972412, 1.7520326375961304, 1.7520654201507568, 1.752097725868225, 1.752130389213562, 1.7521629333496094, 1.752194881439209, 1.7522263526916504, 1.7522577047348022, 1.7522892951965332, 1.7523229122161865, 1.7523561716079712, 1.7523895502090454, 1.7524230480194092, 1.7524563074111938, 1.7524899244308472, 1.7525228261947632, 1.7525556087493896, 1.752588152885437, 1.7526204586029053, 1.7526518106460571, 1.7526823282241821, 1.7527124881744385, 1.7527424097061157, 1.752772569656372, 1.7528029680252075, 1.752833604812622, 1.7528643608093262, 1.752894639968872, 1.7529247999191284, 1.7529540061950684, 1.7529845237731934, 1.7530148029327393, 1.7530447244644165, 1.7530735731124878, 1.7531033754348755, 1.753132700920105, 1.7531620264053345, 1.7531911134719849, 1.753219485282898, 1.7532482147216797, 1.7532775402069092, 1.7533071041107178, 1.7533375024795532, 1.7533680200576782, 1.7533985376358032, 1.753429651260376, 1.7534611225128174, 1.753491997718811, 1.75352144241333, 1.7535501718521118, 1.7535786628723145, 1.7536087036132812, 1.7536392211914062, 1.7536700963974, 1.753700613975525, 1.7537307739257812, 1.7537610530853271, 1.7537920475006104, 1.7538233995437622, 1.7538541555404663, 1.7538845539093018, 1.7539163827896118, 1.7539485692977905, 1.7539809942245483, 1.7540138959884644, 1.7540483474731445, 1.7540812492370605, 1.7541133165359497, 1.7541447877883911, 1.7541764974594116, 1.7542078495025635, 1.7542393207550049, 1.754270076751709, 1.754301905632019, 1.7543333768844604, 1.7543644905090332, 1.754386067390442, 1.7544071674346924, 1.7544279098510742, 1.7544482946395874, 1.7544684410095215, 1.7544881105422974, 1.754507064819336, 1.7545260190963745, 1.754544734954834, 1.7545634508132935, 1.7545820474624634, 1.7546007633209229, 1.7546192407608032, 1.7546377182006836, 1.7546567916870117, 1.7546758651733398, 1.7546945810317993, 1.7547136545181274, 1.7547330856323242, 1.754752278327942, 1.7547731399536133, 1.7547941207885742, 1.7548151016235352, 1.754835844039917, 1.7548565864562988, 1.7548763751983643, 1.7548962831497192, 1.7549159526824951, 1.754935622215271, 1.7549556493759155, 1.75497567653656, 1.754995346069336, 1.7550147771835327, 1.7550346851348877, 1.755053997039795, 1.7550725936889648, 1.7550909519195557, 1.7551096677780151, 1.7551287412643433, 1.7551476955413818, 1.75516676902771, 1.7551854848861694, 1.7552047967910767, 1.7552244663238525, 1.7552437782287598, 1.7552629709243774, 1.755281925201416, 1.755300521850586, 1.7553187608718872, 1.7553373575210571, 1.7553560733795166, 1.7553751468658447, 1.7553941011428833, 1.7554131746292114, 1.7554322481155396, 1.7554513216018677, 1.7554699182510376, 1.755488395690918, 1.7555067539215088, 1.75552499294281, 1.755543828010559, 1.7555631399154663, 1.7555824518203735, 1.755602240562439, 1.7556215524673462, 1.7556413412094116, 1.7556612491607666, 1.7556812763214111, 1.7557005882263184, 1.7557196617126465, 1.7557381391525269, 1.7557567358016968, 1.7557750940322876, 1.7557930946350098, 1.7558107376098633, 1.7558289766311646, 1.7558470964431763, 1.7558650970458984, 1.755883812904358, 1.7559020519256592, 1.7559199333190918, 1.7559376955032349, 1.7559555768966675, 1.7559734582901, 1.7559912204742432, 1.7560088634490967, 1.7560261487960815, 1.7560439109802246, 1.7560620307922363, 1.7560806274414062, 1.7560997009277344, 1.7561192512512207, 1.756138801574707, 1.7561591863632202, 1.7561787366867065, 1.7561978101730347, 1.756217122077942, 1.7562367916107178, 1.756257176399231, 1.756277084350586, 1.7562906742095947, 1.756304383277893, 1.75631844997406, 1.7563323974609375, 1.756346583366394, 1.756360650062561, 1.7563745975494385, 1.7563884258270264, 1.756401777267456, 1.7564152479171753, 1.756428837776184, 1.7564425468444824, 1.7564557790756226, 1.756468653678894, 1.7564815282821655, 1.7564946413040161, 1.7565075159072876, 1.75652015209198, 1.7565325498580933, 1.756544828414917, 1.7565569877624512, 1.7565696239471436, 1.7565819025039673, 1.756593942642212, 1.7566063404083252, 1.756618618965149, 1.7566313743591309, 1.7566438913345337, 1.7566566467285156, 1.756669521331787, 1.7566827535629272, 1.7566958665847778, 1.7567085027694702, 1.7567212581634521, 1.7567344903945923, 1.7567474842071533, 1.756760597229004, 1.756773591041565, 1.7567869424819946, 1.756800889968872, 1.756814956665039, 1.7568284273147583, 1.7568421363830566, 1.7568566799163818, 1.756871223449707, 1.7568854093551636, 1.7568989992141724, 1.7569122314453125, 1.7569255828857422, 1.7569386959075928, 1.7569515705108643, 1.7569644451141357, 1.7569773197174072, 1.7569901943206787, 1.7570029497146606, 1.7570159435272217, 1.7570290565490723, 1.7570422887802124, 1.7570555210113525, 1.7570686340332031, 1.7570817470550537, 1.7570948600769043, 1.757108211517334, 1.7571218013763428, 1.757135033607483, 1.7571481466293335, 1.757161259651184, 1.7571746110916138, 1.757187843322754, 1.7572007179260254, 1.7572137117385864, 1.7572267055511475, 1.7572393417358398, 1.7572524547576904, 1.7572656869888306, 1.7572790384292603, 1.7572920322418213, 1.7573050260543823, 1.7573180198669434, 1.7573306560516357, 1.7573432922363281, 1.7573554515838623, 1.757367491722107, 1.7573801279067993, 1.757392406463623, 1.7574043273925781, 1.7574163675308228, 1.7574290037155151, 1.7574418783187866, 1.7574552297592163, 1.7574682235717773, 1.7574812173843384, 1.757494330406189, 1.7575074434280396, 1.757520079612732, 1.7575324773788452, 1.757544994354248, 1.7575578689575195, 1.7575702667236328, 1.7575825452804565, 1.7575912475585938, 1.757599949836731, 1.7576087713241577, 1.7576178312301636, 1.7576266527175903, 1.7576355934143066, 1.7576444149017334, 1.7576532363891602, 1.757662057876587, 1.7576709985733032, 1.7576804161071777, 1.7576894760131836, 1.7576985359191895, 1.7577074766159058, 1.7577162981033325, 1.7577251195907593, 1.757733702659607, 1.7577422857284546, 1.7577508687973022, 1.7577592134475708, 1.7577674388885498, 1.757775902748108, 1.7577844858169556, 1.7577930688858032, 1.7578012943267822, 1.7578096389770508, 1.7578177452087402, 1.7578258514404297, 1.7578338384628296, 1.75784170627594, 1.757849931716919, 1.7578582763671875, 1.7578665018081665, 1.7578747272491455, 1.7578829526901245, 1.757891058921814, 1.7578991651535034, 1.7579076290130615, 1.7579160928726196, 1.7579249143600464, 1.7579337358474731, 1.7579424381256104, 1.757951021194458, 1.7579597234725952, 1.7579684257507324, 1.7579772472381592, 1.757986068725586, 1.7579947710037231, 1.758003830909729, 1.7580127716064453, 1.758021593093872, 1.7580301761627197, 1.7580387592315674, 1.7580472230911255, 1.7580559253692627, 1.7580645084381104, 1.7580729722976685, 1.7580811977386475, 1.7580894231796265, 1.758097767829895, 1.7581063508987427, 1.7581146955490112, 1.7581229209899902, 1.7581309080123901, 1.7581387758255005, 1.75814688205719, 1.758155345916748, 1.7581638097763062, 1.7581723928451538, 1.7581807374954224, 1.75818932056427, 1.7581979036331177, 1.7582064867019653, 1.7582151889801025, 1.7582240104675293, 1.758232831954956, 1.7582415342330933, 1.7582504749298096, 1.7582595348358154, 1.7582684755325317, 1.7582772970199585, 1.7582859992980957, 1.7582948207855225, 1.7583037614822388, 1.7583125829696655, 1.7583212852478027, 1.7583296298980713, 1.7583379745483398, 1.7583460807800293, 1.7583543062210083, 1.7583625316619873, 1.7583708763122559, 1.7583792209625244, 1.7583872079849243, 1.7583950757980347, 1.758402705192566, 1.7584102153778076, 1.7584177255630493, 1.7584251165390015, 1.7584326267242432, 1.7584378719329834, 1.7584431171417236, 1.7584483623504639, 1.7584534883499146, 1.7584587335586548, 1.7584644556045532, 1.758470058441162, 1.7584760189056396, 1.7584819793701172, 1.7584879398345947, 1.7584939002990723, 1.7584998607635498, 1.7585055828094482, 1.7585111856460571, 1.758516788482666, 1.7585225105285645, 1.758528232574463, 1.7585340738296509, 1.7585397958755493, 1.7585455179214478, 1.7585511207580566, 1.758556842803955, 1.7585623264312744, 1.7585678100585938, 1.758573293685913, 1.7585787773132324, 1.7585842609405518, 1.758589744567871, 1.7585954666137695, 1.758601188659668, 1.758607029914856, 1.758612871170044, 1.758618712425232, 1.7586244344711304, 1.758630394935608, 1.758636474609375, 1.7586426734924316, 1.7586488723754883, 1.7586549520492554, 1.758660912513733, 1.7586668729782104, 1.7586727142333984, 1.7586784362792969, 1.7586841583251953, 1.7586897611618042, 1.758695363998413, 1.7587014436721802, 1.7587074041366577, 1.7587134838104248, 1.7587194442749023, 1.7587252855300903, 1.7587310075759888, 1.7587366104125977, 1.758742094039917, 1.7587480545043945, 1.7587538957595825, 1.7587597370147705, 1.758765459060669, 1.7587710618972778, 1.7587766647338867, 1.7587822675704956, 1.758787989616394, 1.7587937116622925, 1.758799433708191, 1.7588050365447998, 1.7588107585906982, 1.7588167190551758, 1.7588226795196533, 1.7588286399841309, 1.7588343620300293, 1.7588400840759277, 1.7588456869125366, 1.7588510513305664, 1.7588565349578857, 1.7588618993759155, 1.7588673830032349, 1.7588728666305542, 1.758878469467163, 1.7588839530944824, 1.7588895559310913, 1.7588952779769897, 1.7589008808135986, 1.758906364440918, 1.7589119672775269, 1.7589175701141357, 1.758923053741455, 1.7589285373687744, 1.7589341402053833, 1.7589399814605713, 1.7589462995529175, 1.7589523792266846, 1.7589585781097412, 1.7589646577835083, 1.7589706182479858, 1.7589765787124634, 1.758982539176941, 1.758988380432129, 1.758994221687317, 1.7589999437332153, 1.7590056657791138, 1.759009599685669, 1.7590135335922241, 1.7590173482894897, 1.7590211629867554, 1.759024977684021, 1.759028673171997, 1.7590323686599731, 1.7590361833572388, 1.759040355682373, 1.7590446472167969, 1.7590489387512207, 1.7590532302856445, 1.759057641029358, 1.7590622901916504, 1.7590668201446533, 1.7590711116790771, 1.759075403213501, 1.7590796947479248, 1.7590839862823486, 1.7590880393981934, 1.759092092514038, 1.7590960264205933, 1.759100079536438, 1.7591041326522827, 1.759108304977417, 1.7591124773025513, 1.7591166496276855, 1.7591207027435303, 1.759124755859375, 1.7591288089752197, 1.7591328620910645, 1.7591369152069092, 1.7591407299041748, 1.7591445446014404, 1.7591482400894165, 1.7591520547866821, 1.7591558694839478, 1.7591596841812134, 1.7591636180877686, 1.7591675519943237, 1.7591716051101685, 1.7591756582260132, 1.7591798305511475, 1.7591840028762817, 1.7591882944107056, 1.759192705154419, 1.7591971158981323, 1.7592014074325562, 1.75920569896698, 1.7592101097106934, 1.7592144012451172, 1.759218692779541, 1.7592227458953857, 1.7592267990112305, 1.7592307329177856, 1.7592346668243408, 1.759238600730896, 1.7592424154281616, 1.7592462301254272, 1.7592500448226929, 1.7592540979385376, 1.7592581510543823, 1.7592623233795166, 1.7592664957046509, 1.7592705488204956, 1.7592746019363403, 1.759278655052185, 1.7592825889587402, 1.7592865228652954, 1.7592904567718506, 1.7592943906784058, 1.759298324584961, 1.7593022584915161, 1.7593061923980713, 1.7593101263046265, 1.7593140602111816, 1.7593183517456055, 1.7593226432800293, 1.7593269348144531, 1.7593311071395874, 1.7593353986740112, 1.759339451789856, 1.7593435049057007, 1.7593475580215454, 1.7593514919281006, 1.7593554258346558, 1.75935959815979, 1.7593640089035034, 1.7593685388565063, 1.7593730688095093, 1.7593774795532227, 1.759381890296936, 1.7593861818313599, 1.7593904733657837, 1.759394645690918, 1.7593986988067627, 1.7594027519226074, 1.7594069242477417, 1.759411096572876, 1.7594152688980103]} --------------------------------------------------------------------------------