├── .gitignore
├── README.md
├── configs
└── pecnet
│ ├── bo.yaml
│ ├── mc.yaml
│ ├── optimal_settings.yaml
│ └── qmc.yaml
├── evaluate_pipeline.py
├── img
└── process.png
├── run_bo.sh
├── run_mc.sh
├── run_qmc.sh
├── saved_models
└── pecnet
│ ├── eth
│ ├── args.pkl
│ └── val_best.pth
│ ├── hotel
│ ├── args.pkl
│ └── val_best.pth
│ ├── univ
│ ├── args.pkl
│ └── val_best.pth
│ ├── zara1
│ ├── args.pkl
│ └── val_best.pth
│ └── zara2
│ ├── args.pkl
│ └── val_best.pth
└── src
├── bo
├── __init__.py
├── botorch_fit.py
└── botorch_funcitons.py
├── data
└── pecnet.py
├── datasets
├── eth
│ ├── test
│ │ └── biwi_eth.txt
│ ├── train
│ │ ├── biwi_hotel_train.txt
│ │ ├── crowds_zara01_train.txt
│ │ ├── crowds_zara02_train.txt
│ │ ├── crowds_zara03_train.txt
│ │ ├── students001_train.txt
│ │ ├── students003_train.txt
│ │ └── uni_examples_train.txt
│ └── val
│ │ ├── biwi_hotel_val.txt
│ │ ├── crowds_zara01_val.txt
│ │ ├── crowds_zara02_val.txt
│ │ ├── crowds_zara03_val.txt
│ │ ├── students001_val.txt
│ │ ├── students003_val.txt
│ │ └── uni_examples_val.txt
├── hotel
│ ├── test
│ │ └── biwi_hotel.txt
│ ├── train
│ │ ├── biwi_eth_train.txt
│ │ ├── crowds_zara01_train.txt
│ │ ├── crowds_zara02_train.txt
│ │ ├── crowds_zara03_train.txt
│ │ ├── students001_train.txt
│ │ ├── students003_train.txt
│ │ └── uni_examples_train.txt
│ └── val
│ │ ├── biwi_eth_val.txt
│ │ ├── crowds_zara01_val.txt
│ │ ├── crowds_zara02_val.txt
│ │ ├── crowds_zara03_val.txt
│ │ ├── students001_val.txt
│ │ ├── students003_val.txt
│ │ └── uni_examples_val.txt
├── raw
│ ├── .DS_Store
│ ├── all_data
│ │ ├── biwi_eth.txt
│ │ ├── biwi_hotel.txt
│ │ ├── crowds_zara01.txt
│ │ ├── crowds_zara02.txt
│ │ ├── crowds_zara03.txt
│ │ ├── students001.txt
│ │ ├── students003.txt
│ │ └── uni_examples.txt
│ ├── train
│ │ ├── biwi_eth_train.txt
│ │ ├── biwi_hotel_train.txt
│ │ ├── crowds_zara01_train.txt
│ │ ├── crowds_zara02_train.txt
│ │ ├── crowds_zara03_train.txt
│ │ ├── students001_train.txt
│ │ ├── students003_train.txt
│ │ └── uni_examples_train.txt
│ └── val
│ │ ├── biwi_eth_val.txt
│ │ ├── biwi_hotel_val.txt
│ │ ├── crowds_zara01_val.txt
│ │ ├── crowds_zara02_val.txt
│ │ ├── crowds_zara03_val.txt
│ │ ├── students001_val.txt
│ │ ├── students003_val.txt
│ │ └── uni_examples_val.txt
├── univ
│ ├── test
│ │ ├── students001.txt
│ │ └── students003.txt
│ ├── train
│ │ ├── biwi_eth_train.txt
│ │ ├── biwi_hotel_train.txt
│ │ ├── crowds_zara01_train.txt
│ │ ├── crowds_zara02_train.txt
│ │ ├── crowds_zara03_train.txt
│ │ └── uni_examples_train.txt
│ └── val
│ │ ├── biwi_eth_val.txt
│ │ ├── biwi_hotel_val.txt
│ │ ├── crowds_zara01_val.txt
│ │ ├── crowds_zara02_val.txt
│ │ ├── crowds_zara03_val.txt
│ │ └── uni_examples_val.txt
├── zara1
│ ├── test
│ │ └── crowds_zara01.txt
│ ├── train
│ │ ├── biwi_eth_train.txt
│ │ ├── biwi_hotel_train.txt
│ │ ├── crowds_zara02_train.txt
│ │ ├── crowds_zara03_train.txt
│ │ ├── students001_train.txt
│ │ ├── students003_train.txt
│ │ └── uni_examples_train.txt
│ └── val
│ │ ├── biwi_eth_val.txt
│ │ ├── biwi_hotel_val.txt
│ │ ├── crowds_zara02_val.txt
│ │ ├── crowds_zara03_val.txt
│ │ ├── students001_val.txt
│ │ ├── students003_val.txt
│ │ └── uni_examples_val.txt
└── zara2
│ ├── test
│ └── crowds_zara02.txt
│ ├── train
│ ├── biwi_eth_train.txt
│ ├── biwi_hotel_train.txt
│ ├── crowds_zara01_train.txt
│ ├── crowds_zara03_train.txt
│ ├── students001_train.txt
│ ├── students003_train.txt
│ └── uni_examples_train.txt
│ └── val
│ ├── biwi_eth_val.txt
│ ├── biwi_hotel_val.txt
│ ├── crowds_zara01_val.txt
│ ├── crowds_zara03_val.txt
│ ├── students001_val.txt
│ ├── students003_val.txt
│ └── uni_examples_val.txt
├── models
└── pecnet.py
└── utils
├── blackbox_function.py
├── exp_scheduler.py
├── pecnet.py
└── sample.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # vscode
2 | .vscode
3 |
4 | # Mac
5 | *.DS_Store
6 |
7 | # python
8 | *.pyc
9 | *.pyo
10 | **/__pycache__
11 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Unsupervised Sampling Promoting for Stochastic Human Trajectory Prediction
2 |
3 | [ ](https://arxiv.org/abs/2304.04298)[](https://openaccess.thecvf.com/content/CVPR2023/html/Chen_Unsupervised_Sampling_Promoting_for_Stochastic_Human_Trajectory_Prediction_CVPR_2023_paper.html)
4 |
5 |
6 |
7 | The official repo for "Unsupervised Sampling Promoting for Stochastic Human Trajectory Prediction" (accepted by **CVPR 2023**)
8 |
9 | **[Guangyi Chen](https://chengy12.github.io/)\*, [Zhenhao Chen](https://zhenhaochenofficial.github.io/)\*, [Shunxing Fan](https://scholar.google.com/citations?user=SbGcOdYAAAAJ&hl=en), [Kun Zhang](https://www.andrew.cmu.edu/user/kunz1/)** \*\*
10 |
11 | > **Abstract**
12 | >
13 | > The indeterminate nature of human motion requires trajectory prediction systems to use a probabilistic model to formulate the multi-modality phenomenon and infer a finite set of future trajectories. However, the inference processes of most existing methods rely on Monte Carlo random sampling, which is insufficient to cover the realistic paths with finite samples, due to the long tail effect of the predicted distribution. To promote the sampling process of stochastic prediction, we propose a novel method, called BOsampler , to adaptively mine potential paths with Bayesian optimization in an unsupervised manner, as a sequential design strategy in which new prediction is dependent on the previously drawn samples. Specifically, we model the trajectory sampling as a Gaussian process and construct an acquisition function to measure the potential sampling value. This acquisition function applies the original distribution as prior and encourages exploring paths in the long-tail region. This sampling method can be integrated with existing stochastic predictive models without retraining. Experimental results on various baseline methods demonstrate the effectiveness of our method.
14 |
15 | \* *Authors contributed equally and are listed alphabetically by first name*
16 |
17 | ** *Code & Configuration on the exception subset will be updated later.*
18 |
19 | ## Star History
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 | ## Introduction
32 | * BOsampler is a plug-and-play method to promote the sampling process of stochastic prediction in human trajectory forecasting without retraining the model or accessing the source data.
33 | * BOsampler adaptsively mines potential paths with Bayesian optimization in an unsupervised manner, as a sequential design strategy in which new predictions depend on the previously drawn samples.
34 | * The acquisition function encourages exploring paths in the long-tail region of the distribution and achieving a trade-off between accuracy and diversity.
35 | * BOsampler can be directly integrated with existing stochastic predictive models without retraining.
36 | * Compared to Monte Carlo and Quasi-Monte Carlo sampling methods, BOsampler can encourage exploration of low-probability choices and improve the diversity of samples.
37 |
38 |
39 | 
40 |
41 | The figure above is an illustration of how our method generate new samples in an iterative manner. Given w existing samples, we first characterize two functions: $\mu_w$ and $\sqrt{\beta \kappa_w}$. Then, we use these two functions to fit a Gaussian Process and calculate the posterior distribution. Next, we use the posterior distribution to calculate the acquisition function $\phi(\text{z}_w)$. Then, the next sample is generated by maximizing $\phi(\text{z}_w)$.Finally, we add this new sample to inputs and can use it to update the distribution.
42 |
43 |
44 |
45 | ## Requirements
46 |
47 | We prepare the implementation of BOsampler on PECnet baseline as an example. It is easy to extend our code to other baseline models, too. The default settings has been verified to run on NVIDIA A6000 (48G VRAM) and NVIDIA RTX 3090 (24G VRAM). To run on lower-end GPUs, you might have to reduce the number of ``batch_size`` in ``./configs/pecnet/bo.yaml``
48 |
49 |
50 |
51 | ```
52 | PyTorch==1.11.0
53 | BoTorch==0.6.4
54 | numpy,easydict,pyyaml
55 | ```
56 |
57 | ### Prepare the running environment using conda (Recommended)
58 |
59 | We recommend using conda to create a virtual environment to run BOsampler.
60 |
61 | First, create a new conda environment.
62 |
63 | ```
64 | conda create -n bosampler python=3.8
65 | ```
66 |
67 | Activate the conda environment just created.
68 |
69 | ```
70 | conda activate bosampler
71 | ```
72 |
73 | Install PyTorch using official channel.
74 |
75 | ```
76 | conda install pytorch==1.11.0 cudatoolkit=11.3 -c pytorch
77 | ```
78 |
79 | Use pip to install other requirements.
80 |
81 | ```
82 | pip install easydict pyyaml numpy botorch==0.6.4
83 | ```
84 |
85 | ## How to test
86 |
87 | We prepare three shell scripts for testing. ``run_mc.sh`` for Monte Carlo method, ``run_qmc.sh`` for Quasi-Monte Carlo method, and ``run_bo.sh`` for BOsampler method. You can use a unix shell like ``/bin/sh`` to exexute any of those scripts as you like.
88 |
89 | ### Run BOsampler
90 |
91 | ```
92 | sh run_bo.sh
93 | ```
94 |
95 | ## Adjust for other hyperparameter configurations
96 |
97 | We store the configuration files under ``./configs``. You can explore other settings as you like by editing those configuration files.
98 |
99 | ## BibTex
100 |
101 | If you find our work helpful, please cite us by adding the following BibTex.
102 |
103 | ```bibtex
104 | @inproceedings{chen2023unsupervised,
105 | title={Unsupervised Sampling Promoting for Stochastic Human Trajectory Prediction},
106 | author={Chen, Guangyi and Chen, Zhenhao and Fan, Shunxing and Zhang, Kun},
107 | booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition},
108 | pages={17874--17884},
109 | year={2023}
110 | }
111 | ```
112 |
113 | ## Acknowledgement
114 |
115 | This project was partially supported by the **National Institutes of Health (NIH)** under Contract R01HL159805, by the **NSF-Convergence Accelerator** Track-D award #2134901, by a grant from **Apple Inc.**, a grant from **KDDI Research Inc.**, and generous gifts from **Salesforce Inc.**, **Microsoft Research**, and **Amazon Research**.
116 |
117 | We would like to thank our colleague **Zunhao Zhang** from *MBZUAI* for providing computation resource for part of the experiments.
118 |
119 |
--------------------------------------------------------------------------------
/configs/pecnet/bo.yaml:
--------------------------------------------------------------------------------
1 | model_name: 'pecnet'
2 | obs_len: 8
3 | pred_len: 12
4 | skip: 1
5 | loader_num_workers: 20
6 | model_path:
7 | - 'saved_models/pecnet/eth/val_best.pth'
8 | - 'saved_models/pecnet/hotel/val_best.pth'
9 | - 'saved_models/pecnet/univ/val_best.pth'
10 | - 'saved_models/pecnet/zara1/val_best.pth'
11 | - 'saved_models/pecnet/zara2/val_best.pth'
12 | dataset_path:
13 | - 'src/datasets/eth/test'
14 | - 'src/datasets/hotel/test'
15 | - 'src/datasets/univ/test'
16 | - 'src/datasets/zara1/test'
17 | - 'src/datasets/zara2/test'
18 | datasets:
19 | - 'eth'
20 | - 'hotel'
21 | - 'univ'
22 | - 'zara1'
23 | - 'zara2'
24 | eval_times: 3
25 | batch_size:
26 | - 64
27 | - 64
28 | - 16
29 | - 32
30 | - 32
31 | gpu_idx: 0
32 | qmc: False
33 | output_log: 'pecnet_bo.log'
34 | bo:
35 | acq_type: 'UCB'
36 | bound_factor:
37 | - 5
38 | - 2.5
39 | - 3
40 | - 0.9
41 | - 1
42 | acq_factor:
43 | - 0.5
44 | - 0.5
45 | - 0.5
46 | - 0.5
47 | - 0.5
48 | max_iter:
49 | - 100
50 | - 50
51 | - 50
52 | - 30
53 | - 30
54 | lr:
55 | - 0.01
56 | - 0.03
57 | - 0.03
58 | - 0.03
59 | - 0.03
60 | num_of_warmup:
61 | - 10
62 | - 10
63 | - 10
64 | - 10
65 | - 10
66 | num_of_bo:
67 | - 10
68 | - 10
69 | - 10
70 | - 10
71 | - 10
72 | bb_function_mode: raw
73 |
--------------------------------------------------------------------------------
/configs/pecnet/mc.yaml:
--------------------------------------------------------------------------------
1 | model_name: 'pecnet'
2 | obs_len: 8
3 | pred_len: 12
4 | skip: 1
5 | loader_num_workers: 20
6 | model_path:
7 | - 'saved_models/pecnet/eth/val_best.pth'
8 | - 'saved_models/pecnet/hotel/val_best.pth'
9 | - 'saved_models/pecnet/univ/val_best.pth'
10 | - 'saved_models/pecnet/zara1/val_best.pth'
11 | - 'saved_models/pecnet/zara2/val_best.pth'
12 | dataset_path:
13 | - 'src/datasets/eth/test'
14 | - 'src/datasets/hotel/test'
15 | - 'src/datasets/univ/test'
16 | - 'src/datasets/zara1/test'
17 | - 'src/datasets/zara2/test'
18 | datasets:
19 | - 'eth'
20 | - 'hotel'
21 | - 'univ'
22 | - 'zara1'
23 | - 'zara2'
24 | eval_times: 3
25 | batch_size:
26 | - 1
27 | - 1
28 | - 1
29 | - 1
30 | - 1
31 | gpu_idx: 0
32 | qmc: False
33 | output_log: 'pecnet_bo.log'
34 | bo:
35 | acq_type: 'UCB'
36 | bound_factor:
37 | - 5
38 | - 2.5
39 | - 3
40 | - 0.9
41 | - 1
42 | acq_factor:
43 | - 0.5
44 | - 0.5
45 | - 0.5
46 | - 0.5
47 | - 0.5
48 | max_iter:
49 | - 100
50 | - 50
51 | - 50
52 | - 30
53 | - 30
54 | lr:
55 | - 0.01
56 | - 0.03
57 | - 0.03
58 | - 0.03
59 | - 0.03
60 | num_of_warmup:
61 | - 20
62 | - 20
63 | - 20
64 | - 20
65 | - 20
66 | num_of_bo:
67 | - 0
68 | - 0
69 | - 0
70 | - 0
71 | - 0
72 | bb_function_mode: raw
--------------------------------------------------------------------------------
/configs/pecnet/optimal_settings.yaml:
--------------------------------------------------------------------------------
1 | adl_reg: 1
2 | data_scale: 170
3 | dataset_type: image
4 | dec_size:
5 | - 1024
6 | - 512
7 | - 1024
8 | dist_thresh: 50
9 | enc_dest_size:
10 | - 8
11 | - 16
12 | enc_latent_size:
13 | - 8
14 | - 50
15 | enc_past_size:
16 | - 512
17 | - 256
18 | non_local_theta_size:
19 | - 256
20 | - 128
21 | - 64
22 | non_local_phi_size:
23 | - 256
24 | - 128
25 | - 64
26 | non_local_g_size:
27 | - 256
28 | - 128
29 | - 64
30 | non_local_dim: 128
31 | fdim: 16
32 | future_length: 12
33 | gpu_index: 0
34 | kld_reg: 1
35 | learning_rate: 0.0003
36 | mu: 0
37 | n_values: 20
38 | nonlocal_pools: 3
39 | normalize_type: shift_origin
40 | num_epochs: 650
41 | num_workers: 0
42 | past_length: 8
43 | predictor_hidden_size:
44 | - 1024
45 | - 512
46 | - 256
47 | sigma: 1.3
48 | test_b_size: 4096
49 | time_thresh: 0
50 | train_b_size: 512
51 | zdim: 16
52 |
--------------------------------------------------------------------------------
/configs/pecnet/qmc.yaml:
--------------------------------------------------------------------------------
1 | model_name: 'pecnet'
2 | obs_len: 8
3 | pred_len: 12
4 | skip: 1
5 | loader_num_workers: 20
6 | model_path:
7 | - 'saved_models/pecnet/eth/val_best.pth'
8 | - 'saved_models/pecnet/hotel/val_best.pth'
9 | - 'saved_models/pecnet/univ/val_best.pth'
10 | - 'saved_models/pecnet/zara1/val_best.pth'
11 | - 'saved_models/pecnet/zara2/val_best.pth'
12 | dataset_path:
13 | - 'src/datasets/eth/test'
14 | - 'src/datasets/hotel/test'
15 | - 'src/datasets/univ/test'
16 | - 'src/datasets/zara1/test'
17 | - 'src/datasets/zara2/test'
18 | datasets:
19 | - 'eth'
20 | - 'hotel'
21 | - 'univ'
22 | - 'zara1'
23 | - 'zara2'
24 | eval_times: 3
25 | batch_size:
26 | - 1
27 | - 1
28 | - 1
29 | - 1
30 | - 1
31 | gpu_idx: 0
32 | qmc: True
33 | output_log: 'pecnet_bo.log'
34 | bo:
35 | acq_type: 'UCB'
36 | bound_factor:
37 | - 5
38 | - 2.5
39 | - 3
40 | - 0.9
41 | - 1
42 | acq_factor:
43 | - 0.5
44 | - 0.5
45 | - 0.5
46 | - 0.5
47 | - 0.5
48 | max_iter:
49 | - 100
50 | - 50
51 | - 50
52 | - 30
53 | - 30
54 | lr:
55 | - 0.01
56 | - 0.03
57 | - 0.03
58 | - 0.03
59 | - 0.03
60 | num_of_warmup:
61 | - 20
62 | - 20
63 | - 20
64 | - 20
65 | - 20
66 | num_of_bo:
67 | - 0
68 | - 0
69 | - 0
70 | - 0
71 | - 0
72 | bb_function_mode: raw
--------------------------------------------------------------------------------
/evaluate_pipeline.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from tqdm import tqdm
3 | import torch
4 | from torch.utils.data import Dataset
5 | from torch.utils.data import DataLoader
6 | import argparse
7 | import os
8 | from tqdm import tqdm
9 | from torch import Tensor
10 | import yaml
11 | import random
12 |
13 | from src.models.pecnet import PECNet
14 | from src.bo.botorch_funcitons import one_step_BO
15 | from src.data.pecnet import PECNETTrajectoryDataset,pecnet_traj_collate_fn
16 | from src.utils.sample import box_muller_transform
17 | from src.utils.blackbox_function import BlackBoxFunctionPECNET
18 | from src.utils.pecnet import model_forward_post_hook
19 |
20 |
21 | class ConfigExtractor():
22 | def __init__(self,file_path) -> None:
23 | self.file_path = file_path
24 | with open(file_path, 'r') as stream:
25 | self.data = yaml.safe_load(stream)
26 |
27 | def __str__(self) -> str:
28 | return str(self.data)
29 |
30 | def __call__(self, ) -> dict:
31 | return self.data
32 |
33 | def compute_batch_metric(pred, gt):
34 | """Get ADE, FDE, TCC scores for each pedestrian"""
35 | # Calculate ADEs and FDEs
36 | temp = (pred - gt).norm(p=2, dim=-1)
37 | ADEs = temp.mean(dim=1).min(dim=0)[0]
38 | FDEs = temp[:, -1, :].min(dim=0)[0]
39 | return ADEs,FDEs
40 |
41 | def evaluate_helper(error, seq_start_end):
42 | sum_ = 0
43 | error = torch.stack(error, dim=1)
44 | for (start, end) in seq_start_end:
45 | start = start.item()
46 | end = end.item()
47 | _error = error[start:end]
48 | _error = torch.sum(_error, dim=0)
49 | _error = torch.min(_error)
50 | sum_ += _error
51 | return sum_
52 |
53 | class BayesianEvaluator():
54 | def __init__(self,model,bbfunction,dataloader,model_name='pecnet',
55 | num_of_warmup:int=5,num_of_bo:int=15,bound_factor:float=1.0,
56 | config=None,
57 | index=0,
58 | ) -> None:
59 | self.model = model
60 | self.blackbox_function = bbfunction
61 | self.num_of_warmup= num_of_warmup
62 | self.num_of_bo = num_of_bo
63 | self.dataloader = dataloader
64 | self.bound_factor = bound_factor
65 | self.model_name = model_name
66 | self.batchsize = dataloader.batch_size
67 |
68 | self.ades = []
69 | self.fdes = []
70 |
71 | self.ade_cache = []
72 | self.fde_cache = []
73 |
74 | self.config = config
75 | self.index = index
76 |
77 | # self.bb_f_x = []
78 | # self.bb_f_y = []
79 |
80 | def single_step_bo(self,train_x,train_obj,bb_function,bounds):
81 | next_to_probe = one_step_BO(train_x,train_obj,
82 | bounds=bounds,
83 | max_iter=self.config['bo']['max_iter'][self.index],
84 | acq_factor = self.config['bo']['acq_factor'][self.index],
85 | acq_type=self.config['bo']['acq_type'],
86 | lr=self.config['bo']['lr'][self.index])
87 | target = bb_function(next_to_probe)
88 | return next_to_probe,target
89 |
90 |
91 | def single_step_warmup(self,):
92 | pass
93 |
94 | def unpack_batch(self,batch,pecnet_datascale=170):
95 | if self.model_name == 'pecnet':
96 | obs_traj, pred_traj, _, _, mask, _ = [data.cuda(non_blocking=True) for data in batch]
97 |
98 |
99 | x = obs_traj.permute(0, 2, 1).clone()
100 | y = pred_traj.permute(0, 2, 1).clone()
101 |
102 | # starting pos is end of past, start of future. scaled down.
103 | initial_pos = x[:, 7, :].clone() / 1000
104 |
105 | # shift origin and scale data
106 | origin = x[:, :1, :].clone()
107 | x -= origin
108 | y -= origin
109 | x *= pecnet_datascale # hyper_params["data_scale"]
110 |
111 | # reshape the data
112 | x = x.reshape(-1, x.shape[1] * x.shape[2])
113 | x = x.to(obs_traj.device)
114 | return obs_traj, pred_traj, mask, x, y, initial_pos, pecnet_datascale
115 |
116 | else:
117 | raise NotImplementedError
118 |
119 | def generate_bounds(self,bound_factor,*args):
120 | if self.model_name == 'pecnet':
121 | sigma=1.3
122 | bounds = torch.ones((2,16)) * bound_factor
123 | bounds[0,:] = -bounds[0,:]
124 | return bounds
125 | else:
126 | raise NotImplementedError
127 |
128 | def pecnet_evaluate(self):
129 | self.model.eval()
130 | with torch.no_grad ():
131 | for batch in tqdm(self.dataloader):
132 | ade_cache = []
133 | fde_cache = []
134 |
135 | # unpack batch
136 | obs_traj, pred_traj, mask, x, y, initial_pos, pecnet_datascale = self.unpack_batch(batch,data_scale)
137 |
138 | y *= data_scale # hyper_params["data_scale"]
139 | y = y.cpu().numpy()
140 | dest = y[:, -1, :]
141 |
142 | # sampling by mc/qmc
143 | if self.config['qmc'] is True:
144 | sobol_generator = torch.quasirandom.SobolEngine(dimension=16, scramble=True)
145 | loc = box_muller_transform(sobol_generator.draw(self.num_of_warmup).cuda()).unsqueeze(dim=1).expand((self.num_of_warmup,
146 | x.size(0), 16))
147 |
148 |
149 |
150 | all_dest_recon = []
151 |
152 | # get black box function
153 | bb_function = BlackBoxFunctionPECNET(self.model,initial_pos,obs_traj.device,x)
154 | num_of_all_ped = x.shape[0]
155 | # get initial target
156 | train_x = torch.zeros((num_of_all_ped,self.num_of_warmup,16)).to(x.device) #(batch_size, num_of_warmup, 16)
157 | train_obj = torch.zeros((num_of_all_ped,self.num_of_warmup,1)).to(x.device) #(batch_size, num_of_warmup, 1)
158 |
159 | for i in range(self.num_of_warmup):
160 | if self.config['qmc'] is True:
161 | dest_recon = self.model.forward(x, initial_pos, noise=loc[i], device = obs_traj.device )
162 | bb_val = bb_function(loc[i])
163 | train_x[:,i,:] = loc[i]
164 | train_obj[:,i,0] = bb_val
165 | else:
166 | noise = torch.randn((x.shape[0],16)).to(obs_traj.device )
167 | dest_recon = self.model.forward(x, initial_pos, noise = noise,device = obs_traj.device )
168 | bb_val = bb_function(noise)
169 | train_x[:,i,:] = noise
170 | train_obj[:,i,0] = bb_val
171 |
172 | all_dest_recon.append(dest_recon)
173 |
174 | # get bounds
175 | bound = self.generate_bounds(self.bound_factor).to(x.device)
176 |
177 | # bo
178 | for i in range(self.num_of_bo):
179 | next_to_probe,target = self.single_step_bo(train_x,train_obj,bb_function,bound)
180 | next_to_probe=next_to_probe.unsqueeze(dim=1) #[num_of_all_ped,1,16]
181 | target = target.unsqueeze(dim=1).unsqueeze(dim=1) #[num_of_all_ped,1,1]
182 | train_x = torch.cat((train_x,next_to_probe),dim=1)
183 | train_obj = torch.cat((train_obj,target),dim=1)
184 | dest_recon = self.model.forward(x, initial_pos, noise = next_to_probe.squeeze(dim=1),device = obs_traj.device )
185 | all_dest_recon.append(dest_recon)
186 |
187 | ades,fdes = model_forward_post_hook(self.model,all_dest_recon,mask,x,y,initial_pos,dest,)
188 | ade_cache.append(np.array(ades))
189 | fde_cache.append(np.array(fdes))
190 | self.ades.append(ade_cache)
191 | self.fdes.append(fde_cache)
192 |
193 |
194 |
195 | def evaluate(self,):
196 | exp_ades = []
197 | exp_fdes = []
198 | self.model.eval()
199 | for i in range(self.config['eval_times']):
200 | self.ades = []
201 | self.fdes = []
202 |
203 | if self.model_name == 'pecnet':
204 | self.pecnet_evaluate()
205 | self.ades = np.concatenate(self.ades, axis=1)
206 | self.fdes = np.concatenate(self.fdes, axis=1)
207 | else:
208 | raise NotImplementedError
209 | ade = self.ades.mean()
210 | fde = self.fdes.mean()
211 | print(ade,fde)
212 |
213 | exp_ades.append(ade)
214 | exp_fdes.append(fde)
215 | exp_ades = np.array(exp_ades)
216 | exp_fdes = np.array(exp_fdes)
217 | print("[AVG] [ADE] {} [FDE] {}".format(exp_ades.mean(),exp_fdes.mean()))
218 | return exp_ades.mean(),exp_fdes.mean()
219 |
220 | def setup_seed(seed):
221 | torch.manual_seed(seed)
222 | torch.cuda.manual_seed_all(seed)
223 | np.random.seed(seed)
224 | random.seed(seed)
225 | torch.backends.cudnn.deterministic = True
226 |
227 | class LoadModel():
228 | def __init__(self,model_name,model_weight,) -> None:
229 | self.model_name = model_name
230 | self.model_weight = model_weight
231 | def load(self,):
232 | if self.model_name == 'pecnet':
233 | def get_hyperparams():
234 | global data_scale
235 | with open("./configs/pecnet/optimal_settings.yaml", 'r') as file:
236 | hyper_params = yaml.load(file, Loader=yaml.FullLoader)
237 | data_scale = hyper_params["data_scale"]
238 | return (hyper_params["enc_past_size"], hyper_params["enc_dest_size"], hyper_params["enc_latent_size"],
239 | hyper_params["dec_size"], hyper_params["predictor_hidden_size"], hyper_params['non_local_theta_size'],
240 | hyper_params['non_local_phi_size'], hyper_params['non_local_g_size'], hyper_params["fdim"],
241 | hyper_params["zdim"], hyper_params["nonlocal_pools"], hyper_params['non_local_dim'],
242 | hyper_params["sigma"], hyper_params["past_length"], hyper_params["future_length"], False)
243 | model = PECNet(*get_hyperparams())
244 | model.load_state_dict(torch.load(self.model_weight))
245 | model = model.cuda()
246 | model.eval()
247 | return model
248 | else:
249 | raise NotImplementedError
250 |
251 | def load_dataset(model_name,dataset_path,batch_size,obs_len=8,pred_len=12,skip=1,
252 | delim='\t',loader_num_workers=20,):
253 | if model_name == 'pecnet':
254 | dset_train = PECNETTrajectoryDataset(dataset_path, obs_len=obs_len, pred_len=pred_len)
255 | loader_phase = DataLoader(dset_train, batch_size,collate_fn=pecnet_traj_collate_fn, shuffle=False)
256 | return loader_phase
257 | else:
258 | raise NotImplementedError
259 |
260 |
261 | def main(config):
262 |
263 | datasets = config['datasets']
264 | model_name = config['model_name']
265 | print("Baseline model: {}".format(model_name))
266 | ade_results = []
267 | fde_results = []
268 |
269 | for i,dataset in enumerate(datasets):
270 | print("*"*100)
271 | print("Evaluating on dataset {}".format(dataset))
272 | # load model
273 | model_path = config['model_path'][i]
274 |
275 | m = LoadModel(model_name=model_name,model_weight=model_path,)
276 | model = m.load()
277 | # load data
278 | data_loader = load_dataset(model_name=model_name,dataset_path=config['dataset_path'][i],
279 | batch_size=config['batch_size'][i],
280 | obs_len=config['obs_len'],pred_len=config['pred_len'],
281 | skip=config['skip'],
282 | loader_num_workers=config['loader_num_workers'])
283 |
284 | if model_name == 'pecnet':
285 | evaluator = BayesianEvaluator(model=model,bbfunction=BlackBoxFunctionPECNET,
286 | dataloader=data_loader,model_name=model_name,
287 | num_of_warmup=config['bo']['num_of_warmup'][i],
288 | num_of_bo=config['bo']['num_of_bo'][i],
289 | bound_factor=config['bo']['bound_factor'][i],
290 | config=config,index=i)
291 | ade,fde = evaluator.evaluate()
292 | ade_results.append(ade)
293 | fde_results.append(fde)
294 | else:
295 | raise NotImplementedError
296 |
297 | print("ADE results {}".format(np.array(ade_results).mean()))
298 | print("FDE results {}".format(np.array(fde_results).mean()))
299 |
300 | pass
301 |
302 |
303 | if __name__ == "__main__":
304 |
305 | parser = argparse.ArgumentParser()
306 | parser.add_argument('--config', type=str, default='configs/pecnet/zara2.yaml')
307 | args = parser.parse_args()
308 | configs = ConfigExtractor(args.config)
309 |
310 | # exp_configs = ConfigExtractor(args.exp_config)
311 |
312 | # comment below if using evaluation script
313 | torch.cuda.set_device(configs.data['gpu_idx'])
314 |
315 | #print(configs)
316 | main(configs())
317 |
318 |
--------------------------------------------------------------------------------
/img/process.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/img/process.png
--------------------------------------------------------------------------------
/run_bo.sh:
--------------------------------------------------------------------------------
1 | python3 evaluate_pipeline.py --config configs/pecnet/bo.yaml
--------------------------------------------------------------------------------
/run_mc.sh:
--------------------------------------------------------------------------------
1 | python3 evaluate_pipeline.py --config configs/pecnet/mc.yaml
--------------------------------------------------------------------------------
/run_qmc.sh:
--------------------------------------------------------------------------------
1 | python3 evaluate_pipeline.py --config configs/pecnet/qmc.yaml
--------------------------------------------------------------------------------
/saved_models/pecnet/eth/args.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/saved_models/pecnet/eth/args.pkl
--------------------------------------------------------------------------------
/saved_models/pecnet/eth/val_best.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/saved_models/pecnet/eth/val_best.pth
--------------------------------------------------------------------------------
/saved_models/pecnet/hotel/args.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/saved_models/pecnet/hotel/args.pkl
--------------------------------------------------------------------------------
/saved_models/pecnet/hotel/val_best.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/saved_models/pecnet/hotel/val_best.pth
--------------------------------------------------------------------------------
/saved_models/pecnet/univ/args.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/saved_models/pecnet/univ/args.pkl
--------------------------------------------------------------------------------
/saved_models/pecnet/univ/val_best.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/saved_models/pecnet/univ/val_best.pth
--------------------------------------------------------------------------------
/saved_models/pecnet/zara1/args.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/saved_models/pecnet/zara1/args.pkl
--------------------------------------------------------------------------------
/saved_models/pecnet/zara1/val_best.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/saved_models/pecnet/zara1/val_best.pth
--------------------------------------------------------------------------------
/saved_models/pecnet/zara2/args.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/saved_models/pecnet/zara2/args.pkl
--------------------------------------------------------------------------------
/saved_models/pecnet/zara2/val_best.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/saved_models/pecnet/zara2/val_best.pth
--------------------------------------------------------------------------------
/src/bo/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/src/bo/__init__.py
--------------------------------------------------------------------------------
/src/bo/botorch_fit.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright (c) Meta Platforms, Inc. and affiliates.
3 | #
4 | # This source code is licensed under the MIT license found in the
5 | # LICENSE file in the root directory of this source tree.
6 |
7 | r"""
8 | Tools for model fitting.
9 | """
10 |
11 | from __future__ import annotations
12 |
13 | import time
14 | import warnings
15 | from typing import Any, Callable, Dict, List, NamedTuple, Optional, Set, Tuple, Union
16 |
17 | import numpy as np
18 | from botorch.exceptions.warnings import OptimizationWarning
19 | from botorch.optim.numpy_converter import (
20 | module_to_array,
21 | set_params_with_array,
22 | TorchAttr,
23 | )
24 | from botorch.optim.stopping import ExpMAStoppingCriterion
25 | from botorch.optim.utils import (
26 | _filter_kwargs,
27 | _get_extra_mll_args,
28 | _scipy_objective_and_grad,
29 | )
30 | from gpytorch import settings as gpt_settings
31 | from gpytorch.mlls.marginal_log_likelihood import MarginalLogLikelihood
32 | from scipy.optimize import Bounds, minimize
33 | from torch import Tensor
34 | from torch.nn import Module
35 | from torch.optim.adam import Adam
36 | from torch.optim.optimizer import Optimizer
37 |
38 |
39 | ParameterBounds = Dict[str, Tuple[Optional[float], Optional[float]]]
40 | TScipyObjective = Callable[
41 | [np.ndarray, MarginalLogLikelihood, Dict[str, TorchAttr]], Tuple[float, np.ndarray]
42 | ]
43 | TModToArray = Callable[
44 | [Module, Optional[ParameterBounds], Optional[Set[str]]],
45 | Tuple[np.ndarray, Dict[str, TorchAttr], Optional[np.ndarray]],
46 | ]
47 | TArrayToMod = Callable[[Module, np.ndarray, Dict[str, TorchAttr]], Module]
48 |
49 |
50 | class OptimizationIteration(NamedTuple):
51 | itr: int
52 | fun: float
53 | time: float
54 |
55 |
56 | def fit_gpytorch_torch(
57 | mll: MarginalLogLikelihood,
58 | bounds: Optional[ParameterBounds] = None,
59 | optimizer_cls: Optimizer = Adam,
60 | options: Optional[Dict[str, Any]] = None,
61 | track_iterations: bool = True,
62 | approx_mll: bool = True,
63 | ) -> Tuple[MarginalLogLikelihood, Dict[str, Union[float, List[OptimizationIteration]]]]:
64 | r"""Fit a gpytorch model by maximizing MLL with a torch optimizer.
65 | The model and likelihood in mll must already be in train mode.
66 | Note: this method requires that the model has `train_inputs` and `train_targets`.
67 | Args:
68 | mll: MarginalLogLikelihood to be maximized.
69 | bounds: A ParameterBounds dictionary mapping parameter names to tuples
70 | of lower and upper bounds. Bounds specified here take precedence
71 | over bounds on the same parameters specified in the constraints
72 | registered with the module.
73 | optimizer_cls: Torch optimizer to use. Must not require a closure.
74 | options: options for model fitting. Relevant options will be passed to
75 | the `optimizer_cls`. Additionally, options can include: "disp"
76 | to specify whether to display model fitting diagnostics and "maxiter"
77 | to specify the maximum number of iterations.
78 | track_iterations: Track the function values and wall time for each
79 | iteration.
80 | approx_mll: If True, use gpytorch's approximate MLL computation (
81 | according to the gpytorch defaults based on the training at size).
82 | Unlike for the deterministic algorithms used in fit_gpytorch_scipy,
83 | this is not an issue for stochastic optimizers.
84 | Returns:
85 | 2-element tuple containing
86 | - mll with parameters optimized in-place.
87 | - Dictionary with the following key/values:
88 | "fopt": Best mll value.
89 | "wall_time": Wall time of fitting.
90 | "iterations": List of OptimizationIteration objects with information on each
91 | iteration. If track_iterations is False, will be empty.
92 | Example:
93 | >>> gp = SingleTaskGP(train_X, train_Y)
94 | >>> mll = ExactMarginalLogLikelihood(gp.likelihood, gp)
95 | >>> mll.train()
96 | >>> fit_gpytorch_torch(mll)
97 | >>> mll.eval()
98 | """
99 | optim_options = {"maxiter": 100, "disp": True, "lr": 0.05}
100 | optim_options.update(options or {})
101 | exclude = optim_options.pop("exclude", None)
102 | if exclude is not None:
103 | mll_params = [
104 | t for p_name, t in mll.named_parameters() if p_name not in exclude
105 | ]
106 | else:
107 | mll_params = list(mll.parameters())
108 | optimizer = optimizer_cls(
109 | params=[{"params": mll_params}],
110 | **_filter_kwargs(optimizer_cls, **optim_options),
111 | )
112 |
113 | # get bounds specified in model (if any)
114 | bounds_: ParameterBounds = {}
115 | if hasattr(mll, "named_parameters_and_constraints"):
116 | for param_name, _, constraint in mll.named_parameters_and_constraints():
117 | if constraint is not None and not constraint.enforced:
118 | bounds_[param_name] = constraint.lower_bound, constraint.upper_bound
119 |
120 | # update with user-supplied bounds (overwrites if already exists)
121 | if bounds is not None:
122 | bounds_.update(bounds)
123 |
124 | iterations = []
125 | t1 = time.time()
126 |
127 | param_trajectory: Dict[str, List[Tensor]] = {
128 | name: [] for name, param in mll.named_parameters()
129 | }
130 | loss_trajectory: List[float] = []
131 | i = 0
132 | stop = False
133 | stopping_criterion = ExpMAStoppingCriterion(
134 | **_filter_kwargs(ExpMAStoppingCriterion, **optim_options)
135 | )
136 | train_inputs, train_targets = mll.model.train_inputs, mll.model.train_targets
137 | while not stop:
138 | optimizer.zero_grad()
139 | with gpt_settings.fast_computations(log_prob=approx_mll):
140 | output = mll.model(*train_inputs)
141 | # we sum here to support batch mode
142 | args = [output, train_targets] + _get_extra_mll_args(mll)
143 | loss = -mll(*args).sum()
144 | loss.requires_grad_(True)
145 | loss.backward()
146 | loss_trajectory.append(loss.item())
147 | for name, param in mll.named_parameters():
148 | param_trajectory[name].append(param.detach().clone())
149 | if optim_options["disp"] and (
150 | (i + 1) % 10 == 0 or i == (optim_options["maxiter"] - 1)
151 | ):
152 | print(f"Iter {i + 1}/{optim_options['maxiter']}: {loss.item()}")
153 | if track_iterations:
154 | iterations.append(OptimizationIteration(i, loss.item(), time.time() - t1))
155 | optimizer.step()
156 | # project onto bounds:
157 | if bounds_:
158 | for pname, param in mll.named_parameters():
159 | if pname in bounds_:
160 | param.data = param.data.clamp(*bounds_[pname])
161 | i += 1
162 | stop = stopping_criterion.evaluate(fvals=loss.detach())
163 | info_dict = {
164 | "fopt": loss_trajectory[-1],
165 | "wall_time": time.time() - t1,
166 | "iterations": iterations,
167 | }
168 | return mll, info_dict
169 |
--------------------------------------------------------------------------------
/src/bo/botorch_funcitons.py:
--------------------------------------------------------------------------------
1 | from modulefinder import Module
2 | import torch
3 | from torch import Tensor
4 | import torch.nn as nn
5 | import math
6 |
7 | from src.bo.botorch_fit import fit_gpytorch_torch
8 | from botorch.models import SingleTaskGP
9 | from botorch.test_functions import Hartmann
10 | from gpytorch.mlls import ExactMarginalLogLikelihood
11 | from botorch.generation import get_best_candidates, gen_candidates_torch
12 | from botorch.optim import gen_batch_initial_conditions
13 | from botorch.acquisition import qExpectedImprovement, qUpperConfidenceBound
14 | from botorch.sampling import SobolQMCNormalSampler
15 | import os
16 | from typing import Any, Callable, Dict, List, NoReturn, Optional, Tuple, Type, Union
17 | from botorch.test_functions.base import BaseTestProblem
18 | from botorch.acquisition.acquisition import AcquisitionFunction
19 | from botorch.acquisition.cached_cholesky import CachedCholeskyMCAcquisitionFunction
20 | from botorch.acquisition.objective import (
21 | IdentityMCObjective,
22 | MCAcquisitionObjective,
23 | PosteriorTransform,
24 | )
25 | from botorch.acquisition.utils import prune_inferior_points
26 | from botorch.exceptions.errors import UnsupportedError
27 | from botorch.models.model import Model
28 | from botorch.posteriors.posterior import Posterior
29 | from botorch.sampling.samplers import MCSampler, SobolQMCNormalSampler
30 | from botorch.utils.transforms import (
31 | concatenate_pending_points,
32 | match_batch_shape,
33 | t_batch_mode_transform,
34 | )
35 | from botorch.acquisition.monte_carlo import MCAcquisitionFunction
36 | from torch import Tensor
37 |
38 |
39 |
40 | def clamp_col(t:torch.tensor,bounds:torch.tensor):
41 | if len(bounds.shape) == 2:
42 | dim = bounds.shape[-1]
43 | for i in range(dim):
44 | t[:,i] = torch.clamp(t[:,i],min=bounds[0][i], max = bounds[1][i] )
45 | else:
46 | assert bounds.shape[0] == t.shape[0] #batch_size bounds
47 | dim = bounds.shape[-1]
48 | for i in range(dim):
49 | for j in range(bounds.shape[0]):
50 | t[j,i] = torch.clamp(t[j,i],min=bounds[j][0][i], max = bounds[j][1][i] )
51 |
52 | return t
53 |
54 | def generate_batch_initial(bounds:torch.tensor, batch_size:int):
55 | """_summary_
56 |
57 | Args:
58 | bounds (tensor): _description_
59 | batch_size (int): _description_
60 |
61 | Returns:
62 | torch.tensor with shape (batch_size, dim)
63 | """
64 |
65 | dim = bounds.shape[-1]
66 | _initial = torch.zeros((batch_size,dim)).cuda()
67 |
68 | if len(bounds.shape) == 2:
69 | for idx in range(dim):
70 | _initial[:,idx].uniform_(bounds[0][idx],bounds[1][idx])
71 | else:
72 | bsz = bounds.shape[0]
73 | for idx in range(dim):
74 | for batch in range(bsz):
75 | _initial[batch,idx].uniform_(bounds[batch][0][idx],bounds[batch][1][idx])
76 |
77 | return _initial
78 |
79 | def generate_batch_cadidates(initial_conditions:torch.tensor,
80 | acquisition_function,
81 | bounds:torch.tensor,
82 | optimizer:torch.optim.Optimizer,
83 | options: Optional[Dict[str, Union[float, str]]] = None,
84 | ):
85 | options = options or {}
86 | candidates = initial_conditions.requires_grad_(True)
87 | _optimizer = optimizer(params=[candidates],lr=options.get("lr",0.025))
88 |
89 | max_iter = int(options.get("max_iter",1))
90 |
91 | for idx in range(max_iter):
92 | with torch.no_grad():
93 | X = clamp_col(candidates,bounds)
94 |
95 | loss = -acquisition_function(X).sum()
96 | #print('[BO] [step] {} [loss] {}'.format(idx,loss))
97 | loss.requires_grad_(True)
98 | #print('loss shape',loss.shape)
99 |
100 | grad = torch.autograd.grad(loss, X,allow_unused=True)[0]
101 |
102 | def assign_grad():
103 | _optimizer.zero_grad()
104 | candidates.grad = grad
105 | return loss
106 |
107 | _optimizer.step(assign_grad)
108 |
109 |
110 | candidates = candidates.requires_grad_(False)
111 | final_candidates = clamp_col(candidates,bounds)
112 |
113 | return final_candidates
114 |
115 |
116 | def one_step_BO(train_x,train_obj,bounds,max_iter:int=10,acq_factor:float=1.0,lr:float=0.1, acq_type:str='EI'):
117 |
118 | assert train_x.shape[0] == train_obj.shape[0]
119 |
120 | batch_size = train_x.shape[0]
121 |
122 | best_value,_ = torch.max(train_obj,dim=1)
123 | #print(best_value.shape)
124 | # train_x.retain_grad()
125 | # train_obj.retain_grad()
126 | #print('[train_obj] [requires_grad]',train_x.requires_grad,train_obj.requires_grad,train_obj.shape,train_x.shape)
127 | model = SingleTaskGP(train_X=train_x, train_Y=train_obj).cuda()
128 | mll = ExactMarginalLogLikelihood(model.likelihood, model).cuda()
129 | # fit_gpytorch_torch(mll)
130 | mll.train()
131 | fit_gpytorch_torch(mll,options={'lr':lr,'maxiter':max_iter, 'disp':False})
132 | mll.eval()
133 |
134 | resampler = SobolQMCNormalSampler(num_samples=batch_size, seed=0, resample=True)
135 | # MC_EI_resample = qExpectedImprovement(
136 | # model, best_f=best_value, sampler=resampler
137 | # )
138 | if acq_type == 'EI':
139 | MC_EI_resample = XiqExpectedImprovement(
140 | model, best_f=best_value,xi=acq_factor, sampler=resampler
141 | )
142 | elif acq_type == 'UCB':
143 | MC_EI_resample = qUpperConfidenceBound(model,beta=acq_factor,sampler=resampler)
144 | else:
145 | raise Exception
146 | #MC_EI_resample= qUpperConfidenceBound(model,beta=acq_factor,sampler=resampler)
147 |
148 | batch_initial_conditions = generate_batch_initial(bounds=bounds,
149 | batch_size=batch_size
150 | )
151 |
152 |
153 | batch_candidates = generate_batch_cadidates(
154 | initial_conditions=batch_initial_conditions,
155 | acquisition_function=MC_EI_resample,
156 | bounds=bounds,
157 | optimizer = torch.optim.Adam,
158 | options={'max_iter':max_iter, 'disp':False, 'lr':lr}
159 | )
160 |
161 | return batch_candidates
162 |
163 |
164 | class XiqExpectedImprovement(MCAcquisitionFunction):
165 | r"""MC-based batch Expected Improvement.
166 |
167 | This computes qEI by
168 | (1) sampling the joint posterior over q points
169 | (2) evaluating the improvement over the current best for each sample
170 | (3) maximizing over q
171 | (4) averaging over the samples
172 |
173 | `qEI(X) = E(max(max Y - best_f, 0)), Y ~ f(X), where X = (x_1,...,x_q)`
174 |
175 | Example:
176 | >>> model = SingleTaskGP(train_X, train_Y)
177 | >>> best_f = train_Y.max()[0]
178 | >>> sampler = SobolQMCNormalSampler(1024)
179 | >>> qEI = qExpectedImprovement(model, best_f, sampler)
180 | >>> qei = qEI(test_X)
181 | """
182 |
183 | def __init__(
184 | self,
185 | model: Model,
186 | best_f: Union[float, Tensor],
187 | xi: float=1.0,
188 | sampler: Optional[MCSampler] = None,
189 | objective: Optional[MCAcquisitionObjective] = None,
190 | posterior_transform: Optional[PosteriorTransform] = None,
191 | X_pending: Optional[Tensor] = None,
192 | **kwargs: Any,
193 | ) -> None:
194 | r"""q-Expected Improvement.
195 |
196 | Args:
197 | model: A fitted model.
198 | best_f: The best objective value observed so far (assumed noiseless). Can be
199 | a `batch_shape`-shaped tensor, which in case of a batched model
200 | specifies potentially different values for each element of the batch.
201 | sampler: The sampler used to draw base samples. Defaults to
202 | `SobolQMCNormalSampler(num_samples=512, collapse_batch_dims=True)`
203 | objective: The MCAcquisitionObjective under which the samples are evaluated.
204 | Defaults to `IdentityMCObjective()`.
205 | posterior_transform: A PosteriorTransform (optional).
206 | X_pending: A `m x d`-dim Tensor of `m` design points that have been
207 | submitted for function evaluation but have not yet been evaluated.
208 | Concatenated into X upon forward call. Copied and set to have no
209 | gradient.
210 | """
211 | super().__init__(
212 | model=model,
213 | sampler=sampler,
214 | objective=objective,
215 | posterior_transform=posterior_transform,
216 | X_pending=X_pending,
217 | )
218 | self.register_buffer("best_f", torch.as_tensor(best_f, dtype=float))
219 | self.xi = xi
220 |
221 | @concatenate_pending_points
222 | @t_batch_mode_transform()
223 | def forward(self, X: Tensor) -> Tensor:
224 | r"""Evaluate qExpectedImprovement on the candidate set `X`.
225 |
226 | Args:
227 | X: A `batch_shape x q x d`-dim Tensor of t-batches with `q` `d`-dim design
228 | points each.
229 |
230 | Returns:
231 | A `batch_shape'`-dim Tensor of Expected Improvement values at the given
232 | design points `X`, where `batch_shape'` is the broadcasted batch shape of
233 | model and input `X`.
234 | """
235 | posterior = self.model.posterior(
236 | X=X, posterior_transform=self.posterior_transform
237 | )
238 | samples = self.sampler(posterior)
239 | obj = self.objective(samples, X=X)
240 | obj = (obj - self.best_f.unsqueeze(-1).to(obj)-self.xi).clamp_min(0)
241 | q_ei = obj.max(dim=-1)[0].mean(dim=0)
242 | return q_ei
243 |
244 | class qUpperConfidenceBound(MCAcquisitionFunction):
245 | r"""MC-based batch Upper Confidence Bound.
246 | Uses a reparameterization to extend UCB to qUCB for q > 1 (See Appendix A
247 | of [Wilson2017reparam].)
248 | `qUCB = E(max(mu + |Y_tilde - mu|))`, where `Y_tilde ~ N(mu, beta pi/2 Sigma)`
249 | and `f(X)` has distribution `N(mu, Sigma)`.
250 | Example:
251 | >>> model = SingleTaskGP(train_X, train_Y)
252 | >>> sampler = SobolQMCNormalSampler(1024)
253 | >>> qUCB = qUpperConfidenceBound(model, 0.1, sampler)
254 | >>> qucb = qUCB(test_X)
255 | """
256 |
257 | def __init__(
258 | self,
259 | model: Model,
260 | beta: float,
261 | sampler: Optional[MCSampler] = None,
262 | objective: Optional[MCAcquisitionObjective] = None,
263 | posterior_transform: Optional[PosteriorTransform] = None,
264 | X_pending: Optional[Tensor] = None,
265 | ) -> None:
266 | r"""q-Upper Confidence Bound.
267 | Args:
268 | model: A fitted model.
269 | beta: Controls tradeoff between mean and standard deviation in UCB.
270 | sampler: The sampler used to draw base samples. Defaults to
271 | `SobolQMCNormalSampler(num_samples=512, collapse_batch_dims=True)`
272 | objective: The MCAcquisitionObjective under which the samples are
273 | evaluated. Defaults to `IdentityMCObjective()`.
274 | posterior_transform: A PosteriorTransform (optional).
275 | X_pending: A `batch_shape x m x d`-dim Tensor of `m` design points that have
276 | points that have been submitted for function evaluation but have not yet
277 | been evaluated. Concatenated into X upon forward call. Copied and set to
278 | have no gradient.
279 | """
280 | super().__init__(
281 | model=model,
282 | sampler=sampler,
283 | objective=objective,
284 | posterior_transform=posterior_transform,
285 | X_pending=X_pending,
286 | )
287 | self.beta_prime = math.sqrt(beta * math.pi / 2)
288 |
289 | @concatenate_pending_points
290 | @t_batch_mode_transform()
291 | def forward(self, X: Tensor) -> Tensor:
292 | r"""Evaluate qUpperConfidenceBound on the candidate set `X`.
293 | Args:
294 | X: A `batch_sahpe x q x d`-dim Tensor of t-batches with `q` `d`-dim design
295 | points each.
296 | Returns:
297 | A `batch_shape'`-dim Tensor of Upper Confidence Bound values at the given
298 | design points `X`, where `batch_shape'` is the broadcasted batch shape of
299 | model and input `X`.
300 | """
301 | posterior = self.model.posterior(
302 | X=X, posterior_transform=self.posterior_transform
303 | )
304 | samples = self.sampler(posterior)
305 | obj = self.objective(samples, X=X)
306 | mean = obj.mean(dim=0)
307 | ucb_samples = mean + self.beta_prime * (obj - mean).abs()
308 | return ucb_samples.max(dim=-1)[0].mean(dim=0)
--------------------------------------------------------------------------------
/src/data/pecnet.py:
--------------------------------------------------------------------------------
1 | import os
2 | import math
3 | import torch
4 | import numpy as np
5 | from torch.utils.data import Dataset
6 | from torch.utils.data.sampler import Sampler
7 | from torch.utils.data.dataloader import DataLoader
8 |
9 |
10 | def pecnet_traj_collate_fn(data):
11 | obs_seq_list, pred_seq_list, non_linear_ped_list, loss_mask_list, _, _ = zip(*data)
12 |
13 | _len = [len(seq) for seq in obs_seq_list]
14 | cum_start_idx = [0] + np.cumsum(_len).tolist()
15 | seq_start_end = [[start, end] for start, end in zip(cum_start_idx, cum_start_idx[1:])]
16 | seq_start_end = torch.LongTensor(seq_start_end)
17 | scene_mask = torch.zeros(sum(_len), sum(_len), dtype=torch.bool)
18 | for idx, (start, end) in enumerate(seq_start_end):
19 | scene_mask[start:end, start:end] = 1
20 |
21 | out = [torch.cat(obs_seq_list, dim=0), torch.cat(pred_seq_list, dim=0),
22 | torch.cat(non_linear_ped_list, dim=0), torch.cat(loss_mask_list, dim=0), scene_mask, seq_start_end]
23 | return tuple(out)
24 |
25 |
26 | class TrajBatchSampler(Sampler):
27 | r"""Samples batched elements by yielding a mini-batch of indices.
28 |
29 | Args:
30 | data_source (Dataset): dataset to sample from
31 | batch_size (int): Size of mini-batch.
32 | shuffle (bool, optional): set to ``True`` to have the data reshuffled
33 | at every epoch (default: ``False``).
34 | drop_last (bool): If ``True``, the sampler will drop the last batch if
35 | its size would be less than ``batch_size``
36 | generator (Generator): Generator used in sampling.
37 | """
38 |
39 | def __init__(self, data_source, batch_size=64, shuffle=False, drop_last=False, generator=None):
40 | self.data_source = data_source
41 | self.batch_size = batch_size
42 | self.shuffle = shuffle
43 | self.drop_last = drop_last
44 | self.generator = generator
45 |
46 | def __iter__(self):
47 | assert len(self.data_source) == len(self.data_source.num_peds_in_seq)
48 |
49 | if self.shuffle:
50 | if self.generator is None:
51 | generator = torch.Generator()
52 | generator.manual_seed(int(torch.empty((), dtype=torch.int64).random_().item()))
53 | else:
54 | generator = self.generator
55 | indices = torch.randperm(len(self.data_source), generator=generator).tolist()
56 | else:
57 | indices = list(range(len(self.data_source)))
58 | num_peds_indices = self.data_source.num_peds_in_seq[indices]
59 |
60 | batch = []
61 | total_num_peds = 0
62 | for idx, num_peds in zip(indices, num_peds_indices):
63 | batch.append(idx)
64 | total_num_peds += num_peds
65 | if total_num_peds >= self.batch_size:
66 | yield batch
67 | batch = []
68 | total_num_peds = 0
69 | if len(batch) > 0 and not self.drop_last:
70 | yield batch
71 |
72 | def __len__(self):
73 | # Approximated number of batches.
74 | # The order of the trajectories can be shuffled, so this number can vary from run to run.
75 | if self.drop_last:
76 | return sum(self.data_source.num_peds_in_seq) // self.batch_size
77 | else:
78 | return (sum(self.data_source.num_peds_in_seq) + self.batch_size - 1) // self.batch_size
79 |
80 |
81 | def read_file(_path, delim='\t'):
82 | data = []
83 | if delim == 'tab':
84 | delim = '\t'
85 | elif delim == 'space':
86 | delim = ' '
87 | with open(_path, 'r') as f:
88 | for line in f:
89 | line = line.strip().split(delim)
90 | line = [float(i) for i in line]
91 | data.append(line)
92 | return np.asarray(data)
93 |
94 |
95 | def poly_fit(traj, traj_len, threshold):
96 | """
97 | Input:
98 | - traj: Numpy array of shape (2, traj_len)
99 | - traj_len: Len of trajectory
100 | - threshold: Minimum error to be considered for non linear traj
101 | Output:
102 | - int: 1 -> Non Linear 0-> Linear
103 | """
104 | t = np.linspace(0, traj_len - 1, traj_len)
105 | res_x = np.polyfit(t, traj[0, -traj_len:], 2, full=True)[1]
106 | res_y = np.polyfit(t, traj[1, -traj_len:], 2, full=True)[1]
107 | if res_x + res_y >= threshold:
108 | return 1.0
109 | else:
110 | return 0.0
111 |
112 |
113 | class PECNETTrajectoryDataset(Dataset):
114 | """Dataloder for the Trajectory datasets"""
115 | def __init__(self, data_dir, obs_len=8, pred_len=12, skip=1, threshold=0.002, min_ped=1, delim='\t'):
116 | """
117 | Args:
118 | - data_dir: Directory containing dataset files in the format
119 |
120 | - obs_len: Number of time-steps in input trajectories
121 | - pred_len: Number of time-steps in output trajectories
122 | - skip: Number of frames to skip while making the dataset
123 | - threshold: Minimum error to be considered for non linear traj
124 | when using a linear predictor
125 | - min_ped: Minimum number of pedestrians that should be in a seqeunce
126 | - delim: Delimiter in the dataset files
127 | """
128 | super(PECNETTrajectoryDataset, self).__init__()
129 |
130 | self.data_dir = data_dir
131 | self.obs_len = obs_len
132 | self.pred_len = pred_len
133 | self.skip = skip
134 | self.seq_len = self.obs_len + self.pred_len
135 | self.delim = delim
136 |
137 | all_files = os.listdir(self.data_dir)
138 | all_files = [os.path.join(self.data_dir, _path) for _path in all_files]
139 | num_peds_in_seq = []
140 | seq_list = []
141 | seq_list_rel = []
142 | loss_mask_list = []
143 | non_linear_ped = []
144 | for path in all_files:
145 | data = read_file(path, delim)
146 | frames = np.unique(data[:, 0]).tolist()
147 | frame_data = []
148 | for frame in frames:
149 | frame_data.append(data[frame == data[:, 0], :])
150 | num_sequences = int(math.ceil((len(frames) - self.seq_len + 1) / skip))
151 |
152 | for idx in range(0, num_sequences * self.skip + 1, skip):
153 | curr_seq_data = np.concatenate(frame_data[idx:idx + self.seq_len], axis=0)
154 | peds_in_curr_seq = np.unique(curr_seq_data[:, 1])
155 | curr_seq_rel = np.zeros((len(peds_in_curr_seq), 2, self.seq_len))
156 | curr_seq = np.zeros((len(peds_in_curr_seq), 2, self.seq_len))
157 | curr_loss_mask = np.zeros((len(peds_in_curr_seq), self.seq_len))
158 | num_peds_considered = 0
159 | _non_linear_ped = []
160 | for _, ped_id in enumerate(peds_in_curr_seq):
161 | curr_ped_seq = curr_seq_data[curr_seq_data[:, 1] == ped_id, :]
162 | curr_ped_seq = np.around(curr_ped_seq, decimals=4)
163 | pad_front = frames.index(curr_ped_seq[0, 0]) - idx
164 | pad_end = frames.index(curr_ped_seq[-1, 0]) - idx + 1
165 | if pad_end - pad_front != self.seq_len:
166 | continue
167 | curr_ped_seq = np.transpose(curr_ped_seq[:, 2:])
168 | curr_ped_seq = curr_ped_seq
169 | # Make coordinates relative
170 | rel_curr_ped_seq = np.zeros(curr_ped_seq.shape)
171 | rel_curr_ped_seq[:, 1:] = curr_ped_seq[:, 1:] - curr_ped_seq[:, :-1]
172 | _idx = num_peds_considered
173 | curr_seq[_idx, :, pad_front:pad_end] = curr_ped_seq
174 | curr_seq_rel[_idx, :, pad_front:pad_end] = rel_curr_ped_seq
175 | # Linear vs Non-Linear Trajectory
176 | _non_linear_ped.append(poly_fit(curr_ped_seq, pred_len, threshold))
177 | curr_loss_mask[_idx, pad_front:pad_end] = 1
178 | num_peds_considered += 1
179 |
180 | if num_peds_considered > min_ped:
181 | non_linear_ped += _non_linear_ped
182 | num_peds_in_seq.append(num_peds_considered)
183 | loss_mask_list.append(curr_loss_mask[:num_peds_considered])
184 | seq_list.append(curr_seq[:num_peds_considered])
185 | seq_list_rel.append(curr_seq_rel[:num_peds_considered])
186 |
187 | self.num_seq = len(seq_list)
188 | seq_list = np.concatenate(seq_list, axis=0)
189 | seq_list_rel = np.concatenate(seq_list_rel, axis=0)
190 | loss_mask_list = np.concatenate(loss_mask_list, axis=0)
191 | non_linear_ped = np.asarray(non_linear_ped)
192 | self.num_peds_in_seq = np.array(num_peds_in_seq)
193 |
194 | # Convert numpy -> Torch Tensor
195 | self.obs_traj = torch.from_numpy(seq_list[:, :, :self.obs_len]).type(torch.float)
196 | self.pred_traj = torch.from_numpy(seq_list[:, :, self.obs_len:]).type(torch.float)
197 | self.obs_traj_rel = torch.from_numpy(seq_list_rel[:, :, :self.obs_len]).type(torch.float)
198 | self.pred_traj_rel = torch.from_numpy(seq_list_rel[:, :, self.obs_len:]).type(torch.float)
199 | self.loss_mask = torch.from_numpy(loss_mask_list).type(torch.float)
200 | self.non_linear_ped = torch.from_numpy(non_linear_ped).type(torch.float)
201 | cum_start_idx = [0] + np.cumsum(num_peds_in_seq).tolist()
202 | self.seq_start_end = [(start, end) for start, end in zip(cum_start_idx, cum_start_idx[1:])]
203 |
204 | def __len__(self):
205 | return self.num_seq
206 |
207 | def __getitem__(self, index):
208 | start, end = self.seq_start_end[index]
209 | out = [self.obs_traj[start:end, :], self.pred_traj[start:end, :],
210 | self.non_linear_ped[start:end], self.loss_mask[start:end, :], None, [[0, end - start]]]
211 | return out
212 |
213 |
214 | def calculate_loss(x, reconstructed_x, mean, log_var, criterion, future, interpolated_future):
215 | # reconstruction loss
216 | RCL_dest = criterion(x, reconstructed_x)
217 | ADL_traj = criterion(future, interpolated_future) # better with l2 loss
218 |
219 | # kl divergence loss
220 | KLD = -0.5 * torch.sum(1 + log_var - mean.pow(2) - log_var.exp())
221 |
222 | return RCL_dest, KLD, ADL_traj
--------------------------------------------------------------------------------
/src/datasets/eth/val/uni_examples_val.txt:
--------------------------------------------------------------------------------
1 | 5940.0 95.0 14.6694181409 7.59129090929
2 | 5940.0 96.0 14.6927797681 6.77030119827
3 | 5950.0 95.0 15.195580915 7.38007698654
4 | 5950.0 96.0 15.268612308 6.60085274047
5 | 5970.0 97.0 14.796328602 8.11896772645
6 | 5970.0 98.0 14.748342557 8.76859970127
7 | 5980.0 97.0 14.2503821076 8.22326205893
8 | 5980.0 98.0 14.2747960604 8.83566310499
9 | 5990.0 97.0 13.7046460784 8.3277950512
10 | 5990.0 98.0 13.8012495637 8.90272650871
11 | 6000.0 97.0 13.1595414444 8.42612288868
12 | 6000.0 98.0 13.3041309748 8.98768939741
13 | 6010.0 97.0 12.6163309965 8.51084711758
14 | 6010.0 98.0 12.751660062 9.11465641086
15 | 6020.0 97.0 12.0733310137 8.59557134648
16 | 6020.0 98.0 12.1991891492 9.2413847645
17 | 6030.0 97.0 11.5204391707 8.69771774076
18 | 6030.0 98.0 11.6502961433 9.36859043775
19 | 6040.0 97.0 10.9284008173 8.86931413675
20 | 6040.0 98.0 11.1336042992 9.4986600286
21 | 6040.0 99.0 14.9234495282 8.73160743232
22 | 6050.0 97.0 10.336572929 9.04067187295
23 | 6050.0 98.0 10.6169124551 9.62872961944
24 | 6050.0 99.0 14.2996309433 8.83375382659
25 | 6060.0 97.0 9.74453457566 9.21226826894
26 | 6060.0 98.0 10.100431076 9.75903787008
27 | 6060.0 99.0 13.6758123584 8.93613888067
28 | 6070.0 97.0 9.19290552333 9.36047600456
29 | 6070.0 98.0 9.54817062838 9.87192395535
30 | 6070.0 99.0 13.0551507501 9.02229506837
31 | 6070.0 100.0 14.6401634907 9.11465641086
32 | 6080.0 97.0 8.64148693611 9.50844508038
33 | 6080.0 98.0 8.98686018101 9.98027550442
34 | 6080.0 99.0 12.4393298393 9.08410795649
35 | 6080.0 100.0 13.9081658394 9.06286723432
36 | 6090.0 97.0 8.08964741868 9.65164096021
37 | 6090.0 98.0 8.42554973364 10.0888657133
38 | 6090.0 99.0 11.8235089285 9.14592084462
39 | 6090.0 100.0 13.1601728398 8.9986677482
40 | 6100.0 97.0 7.53612418036 9.74877549869
41 | 6100.0 98.0 7.86844858846 10.1907734478
42 | 6100.0 99.0 11.1923240648 9.23541826951
43 | 6100.0 100.0 12.3481984468 8.88506568353
44 | 6110.0 97.0 6.98281140716 9.84614869698
45 | 6110.0 98.0 7.31513581525 10.2862373676
46 | 6110.0 99.0 10.5544043175 9.33661002459
47 | 6110.0 100.0 11.5358031237 8.77862341286
48 | 6110.0 101.0 15.1280216148 8.40249556851
49 | 6120.0 97.0 6.42928816884 9.95044302945
50 | 6120.0 98.0 6.76203350716 10.3817012875
51 | 6120.0 99.0 9.91669503534 9.43780177967
52 | 6120.0 100.0 10.7173043124 8.73900588611
53 | 6120.0 101.0 14.5191460526 8.51323371558
54 | 6130.0 97.0 5.87597539564 10.0652383931
55 | 6130.0 98.0 6.20872073395 10.4771652074
56 | 6130.0 99.0 9.28761482266 9.50558116279
57 | 6130.0 100.0 9.88028457137 8.72086774133
58 | 6130.0 101.0 13.9102704905 8.62421052245
59 | 6140.0 97.0 5.32245215732 10.1802724166
60 | 6140.0 98.0 5.66277423958 10.5890966535
61 | 6140.0 99.0 8.67116251657 9.52252600857
62 | 6140.0 100.0 9.00032994803 8.75332547409
63 | 6140.0 101.0 13.3013949283 8.73494866951
64 | 6150.0 97.0 4.79565798793 10.2721564395
65 | 6150.0 98.0 5.11682774522 10.7010280996
66 | 6150.0 99.0 8.05471021048 9.53947085435
67 | 6150.0 100.0 8.12142765023 8.78649918625
68 | 6150.0 101.0 12.6474798327 8.9139435193
69 | 6160.0 97.0 4.28043939956 10.3544940704
70 | 6160.0 98.0 4.57109171596 10.8127208858
71 | 6160.0 99.0 7.44562418322 9.61560333045
72 | 6160.0 100.0 7.25431139857 8.8246847542
73 | 6160.0 101.0 11.9935647371 9.09293836908
74 | 6170.0 97.0 3.7652208112 10.4368317013
75 | 6170.0 98.0 4.01798940786 10.9392105797
76 | 6170.0 99.0 6.84137885349 9.73111467352
77 | 6170.0 100.0 6.38719514692 8.86287032216
78 | 6170.0 101.0 11.3531194085 9.26334146608
79 | 6180.0 97.0 3.22011617727 10.5647533539
80 | 6180.0 98.0 3.46320337889 11.0695188303
81 | 6180.0 99.0 6.23713352375 9.84662601658
82 | 6180.0 100.0 5.48114284998 8.88840692073
83 | 6180.0 101.0 10.7667636131 9.39985487151
84 | 6190.0 97.0 2.6676452645 10.7038920171
85 | 6190.0 98.0 2.90820688481 11.1995884212
86 | 6190.0 99.0 5.63983354264 9.95450024605
87 | 6190.0 100.0 4.54899287945 8.90535176651
88 | 6190.0 101.0 10.1804078177 9.53612961715
89 | 6200.0 97.0 2.12464528167 10.836348206
90 | 6200.0 98.0 2.3593138789 11.3456482186
91 | 6200.0 99.0 5.04905797992 10.0544987021
92 | 6200.0 100.0 3.61705337402 8.92253527209
93 | 6200.0 101.0 9.57616248796 9.67526828038
94 | 6210.0 97.0 1.5961673914 10.9583033636
95 | 6210.0 98.0 1.81610343096 11.507220903
96 | 6210.0 99.0 4.45849288231 10.1544971582
97 | 6210.0 100.0 2.68048363619 8.98673475821
98 | 6210.0 101.0 8.94476715908 9.81846416021
99 | 6220.0 97.0 1.06747903602 11.0804971811
100 | 6220.0 98.0 1.27289298302 11.6690322472
101 | 6220.0 99.0 3.83804173913 10.2413693253
102 | 6220.0 100.0 1.74307203792 9.06262857452
103 | 6220.0 101.0 8.31337183021 9.96166004004
104 | 6230.0 97.0 0.538790680641 11.2026909985
105 | 6230.0 98.0 0.752202301802 11.8263090552
106 | 6230.0 99.0 3.17297199271 10.3081940692
107 | 6230.0 100.0 0.831337183021 9.22730383632
108 | 6230.0 101.0 7.71102068646 10.1222780852
109 | 6240.0 97.0 0.0103127903717 11.3246461562
110 | 6240.0 98.0 0.264765107909 11.9769033888
111 | 6240.0 99.0 2.5079022463 10.3750188131
112 | 6240.0 100.0 -0.0631395328877 9.45092806865
113 | 6240.0 101.0 7.15181489019 10.3086713888
114 | 6250.0 98.0 -0.222672085984 12.1277363822
115 | 6250.0 99.0 1.90155226547 10.4838476818
116 | 6250.0 101.0 6.59281955902 10.4950646924
117 | 6260.0 99.0 1.30172670303 10.5972110867
118 | 6260.0 101.0 6.0260370188 10.6805033568
119 | 6270.0 99.0 0.701901140601 10.7105744915
120 | 6270.0 101.0 5.42831610746 10.8618848046
121 | 6280.0 99.0 0.102075578168 10.8239378964
122 | 6280.0 101.0 4.83059519613 11.0432662523
123 | 6290.0 102.0 15.0482553382 8.80654660943
124 | 6290.0 103.0 3.81699522817 9.61608065005
125 | 6290.0 104.0 0.542368587505 13.7658972475
126 | 6300.0 102.0 14.2617472236 8.96978991243
127 | 6300.0 103.0 4.41471613951 9.4919775542
128 | 6300.0 104.0 1.19775693888 13.6704333276
129 | 6310.0 102.0 13.469767016 9.12396414305
130 | 6310.0 103.0 5.01243705084 9.36787445835
131 | 6310.0 104.0 1.85335575536 13.5749694077
132 | 6320.0 102.0 12.6279065775 9.19556208296
133 | 6320.0 103.0 5.604054474 9.28100229126
134 | 6320.0 104.0 2.50874410674 13.4795054878
135 | 6330.0 102.0 11.7900449761 9.27360383747
136 | 6330.0 103.0 6.19504050183 9.19842600056
137 | 6330.0 104.0 3.16118594658 13.3864281659
138 | 6340.0 102.0 10.9883833735 9.4101172429
139 | 6340.0 103.0 6.79549745959 9.11036053446
140 | 6340.0 104.0 3.8066824378 13.2988400194
141 | 6350.0 102.0 10.1865113059 9.54639198854
142 | 6350.0 103.0 7.4188951143 9.00940743918
143 | 6350.0 104.0 4.45196846391 13.2114905327
144 | 6360.0 102.0 9.36464505279 9.73970642631
145 | 6360.0 103.0 8.04229276901 8.9084543439
146 | 6360.0 104.0 5.11977425675 13.1200838294
147 | 6360.0 105.0 -0.0631395328877 13.025335889
148 | 6370.0 102.0 8.5427787997 9.93278220427
149 | 6370.0 103.0 8.66021833087 8.81012650642
150 | 6370.0 104.0 5.83935446656 13.0200853734
151 | 6370.0 105.0 0.357790686363 12.9642389802
152 | 6380.0 102.0 7.75458696415 10.0301554026
153 | 6380.0 103.0 9.27603924164 8.71275330814
154 | 6380.0 104.0 6.55914514148 12.9198482575
155 | 6380.0 105.0 0.778720905614 12.9029034117
156 | 6390.0 102.0 6.97018350058 10.1170275697
157 | 6390.0 103.0 9.8918601524 8.61561876966
158 | 6390.0 104.0 7.27788349085 12.795267842
159 | 6390.0 105.0 1.19965112487 12.841806503
160 | 6400.0 102.0 6.13379515493 10.2031837573
161 | 6400.0 103.0 10.5232554813 8.37695896994
162 | 6400.0 104.0 7.99346486358 12.5725982489
163 | 6400.0 105.0 1.62058134412 12.7804709345
164 | 6410.0 102.0 5.27509750765 10.2891012852
165 | 6410.0 103.0 11.1546508102 8.13829917023
166 | 6410.0 104.0 8.7090462363 12.3499286558
167 | 6410.0 105.0 2.07602784135 12.7193740257
168 | 6420.0 102.0 4.42292427878 10.3614152046
169 | 6420.0 103.0 11.7786798602 7.89963937052
170 | 6420.0 104.0 9.40779040026 12.1050637013
171 | 6420.0 105.0 2.54010340807 12.6580384572
172 | 6430.0 102.0 3.59726965372 10.3797920091
173 | 6430.0 103.0 12.37345426 7.6609795708
174 | 6430.0 104.0 10.0391857291 11.7728492601
175 | 6430.0 105.0 3.00417897479 12.5969415485
176 | 6430.0 106.0 14.9851158053 8.75881464948
177 | 6430.0 107.0 15.2561948665 8.16813164519
178 | 6440.0 102.0 2.77161502866 10.3981688137
179 | 6440.0 103.0 12.9682286598 7.42231977109
180 | 6440.0 104.0 10.670581058 11.4403961591
181 | 6440.0 105.0 3.46825454152 12.5358446397
182 | 6440.0 106.0 14.4042321028 8.76836104147
183 | 6440.0 107.0 14.6334286072 8.22779659512
184 | 6450.0 102.0 1.94069877586 10.4463780933
185 | 6450.0 103.0 13.6198286392 7.23735842631
186 | 6450.0 104.0 11.3051333635 11.0719054283
187 | 6450.0 105.0 4.00262545486 12.4904992778
188 | 6450.0 106.0 13.8233484002 8.77790743346
189 | 6450.0 107.0 14.0108728129 8.28746154505
190 | 6460.0 102.0 1.1064150813 10.514634796
191 | 6460.0 103.0 14.2775321068 7.05836357653
192 | 6460.0 104.0 11.9447368317 10.6497162426
193 | 6460.0 105.0 4.54478357725 12.4473018541
194 | 6460.0 106.0 13.237413535 8.84688011558
195 | 6460.0 107.0 13.3876856233 8.35380896937
196 | 6470.0 102.0 0.272131386746 10.5828914987
197 | 6470.0 103.0 14.9352355744 6.87936872674
198 | 6470.0 104.0 12.5841298347 10.2275270569
199 | 6470.0 105.0 5.08694169965 12.4038657705
200 | 6470.0 106.0 12.6464275072 8.97551774762
201 | 6470.0 107.0 12.7636565732 8.43614660027
202 | 6480.0 104.0 13.3192844627 9.90008581171
203 | 6480.0 105.0 5.60679052042 12.3434848412
204 | 6480.0 106.0 12.0556519445 9.10391671987
205 | 6480.0 107.0 12.1394170581 8.51848423117
206 | 6490.0 104.0 14.078221648 9.59627188668
207 | 6490.0 105.0 6.11190678353 12.2718869013
208 | 6490.0 106.0 11.5147566127 9.28243425005
209 | 6490.0 107.0 11.5257007984 8.64545124462
210 | 6500.0 104.0 14.8371588333 9.29245796164
211 | 6500.0 105.0 6.61702304663 12.2002889614
212 | 6500.0 106.0 10.9797543041 9.46667961543
213 | 6500.0 107.0 10.9271380266 8.83948166179
214 | 6510.0 104.0 15.5960960186 8.98888269641
215 | 6510.0 105.0 7.12213930973 12.1525570014
216 | 6510.0 106.0 10.4445415303 9.65068632101
217 | 6510.0 107.0 10.3285752549 9.03327341915
218 | 6520.0 105.0 7.62725557283 12.120815248
219 | 6520.0 106.0 9.88701945488 9.81965745921
220 | 6520.0 107.0 9.71948922762 9.21370022774
221 | 6530.0 105.0 8.13237183593 12.0888348349
222 | 6530.0 106.0 9.29645435727 9.96595591643
223 | 6530.0 107.0 9.06852064354 9.34066724118
224 | 6540.0 105.0 8.61728344851 12.0074518432
225 | 6540.0 106.0 8.70588925966 10.1122543737
226 | 6540.0 107.0 8.41734159436 9.46739559483
227 | 6550.0 105.0 9.08872529407 11.8928951393
228 | 6550.0 106.0 8.1186916038 10.2392213871
229 | 6550.0 107.0 7.76616254518 9.59412394848
230 | 6560.0 105.0 9.56227179073 11.7823956521
231 | 6560.0 106.0 7.53991255233 10.3213203582
232 | 6560.0 107.0 7.17812302889 9.67837085778
233 | 6570.0 105.0 10.0558124728 11.7084111141
234 | 6570.0 106.0 6.96113350086 10.4034193293
235 | 6570.0 107.0 6.59008351259 9.76261776708
236 | 6580.0 105.0 10.5493531549 11.6341879164
237 | 6580.0 106.0 6.37709282165 10.4833703622
238 | 6580.0 107.0 6.0020439963 9.84686467638
239 | 6580.0 108.0 0.110073252334 11.8107961682
240 | 6590.0 105.0 11.0494182553 11.5640219353
241 | 6590.0 106.0 5.74569749278 10.5449445905
242 | 6590.0 107.0 5.39548355036 9.88481158453
243 | 6590.0 108.0 0.790086021534 11.7833502913
244 | 6600.0 105.0 11.5755810294 11.5088915216
245 | 6600.0 106.0 5.1143021639 10.6065188189
246 | 6600.0 107.0 4.77692659317 9.89197137852
247 | 6600.0 108.0 1.47009879073 11.7559044143
248 | 6600.0 109.0 10.1970345614 8.43662391987
249 | 6610.0 105.0 12.1017438035 11.4537611078
250 | 6610.0 106.0 4.48290683502 10.6680930472
251 | 6610.0 107.0 4.15836963598 9.89936983231
252 | 6610.0 108.0 2.13516853715 11.7096044131
253 | 6610.0 109.0 10.0497089846 8.7468816595
254 | 6620.0 105.0 12.6051763457 11.4432600767
255 | 6620.0 106.0 3.81909987926 10.7139157287
256 | 6620.0 107.0 3.52992081864 9.90223374991
257 | 6620.0 108.0 2.785716191 11.6444502878
258 | 6620.0 109.0 9.90238340788 9.05713939913
259 | 6630.0 105.0 13.0934554 11.4623528606
260 | 6630.0 106.0 3.15529292351 10.7599770701
261 | 6630.0 107.0 2.87874176946 9.89459663632
262 | 6630.0 108.0 3.42089989185 11.5630672961
263 | 6630.0 109.0 9.68034271723 9.36954507695
264 | 6640.0 105.0 13.5865751519 11.4740471908
265 | 6640.0 106.0 2.52137201331 10.816062123
266 | 6640.0 107.0 2.22756272028 9.88719818253
267 | 6640.0 108.0 4.01988359385 11.4437373963
268 | 6640.0 109.0 9.38379737776 9.68409869297
269 | 6650.0 105.0 14.1228402512 11.4203487359
270 | 6650.0 106.0 1.93206970636 10.8876600629
271 | 6650.0 107.0 1.58374994993 9.89674457452
272 | 6650.0 108.0 4.61886729584 11.3244074964
273 | 6650.0 109.0 9.06809971333 10.0031868452
274 | 6660.0 105.0 14.6593158156 11.3664116212
275 | 6660.0 106.0 1.34276739941 10.9592580028
276 | 6660.0 107.0 1.00497089846 10.0585559187
277 | 6660.0 108.0 5.2214289047 11.2377739891
278 | 6660.0 109.0 8.70715205032 10.3327760286
279 | 6670.0 105.0 15.195580915 11.3124745064
280 | 6670.0 106.0 0.766303464147 11.0470848091
281 | 6670.0 107.0 0.426191846992 10.2206059227
282 | 6670.0 108.0 5.82525330422 11.16522141
283 | 6670.0 109.0 8.34346834088 10.6544894386
284 | 6680.0 106.0 0.209202318968 11.1594935748
285 | 6680.0 107.0 -0.152587204479 10.3826559267
286 | 6680.0 108.0 6.39582421641 11.0110471794
287 | 6680.0 109.0 7.96757765509 10.9442224355
288 | 6690.0 108.0 6.91651489762 10.7346791313
289 | 6690.0 109.0 7.61883696844 11.4556703862
290 | 6700.0 108.0 7.48603348427 10.5031791256
291 | 6700.0 109.0 7.21284977198 11.6091286375
292 | 6710.0 108.0 8.10416951124 10.316785822
293 | 6710.0 109.0 6.76013932117 11.7850209099
294 | 6720.0 108.0 8.72251600332 10.1303925184
295 | 6720.0 109.0 6.27606956903 11.9759487496
296 | 6730.0 108.0 9.34170435584 9.94185127666
297 | 6730.0 109.0 5.78168702652 12.0045879256
298 | 6740.0 108.0 9.96299735945 9.74877549869
299 | 6740.0 109.0 5.2805696005 11.9251142123
300 | 6750.0 108.0 10.5842903631 9.55546106093
301 | 6750.0 109.0 4.7796626396 11.8454018392
302 | 6750.0 110.0 10.9652322115 9.40319610871
303 | 6750.0 111.0 0.778720905614 9.23613424891
304 | 6760.0 110.0 10.3338368826 9.54472136994
305 | 6760.0 111.0 1.0910511283 9.33613270499
306 | 6770.0 110.0 9.70244155374 9.68600797137
307 | 6770.0 111.0 1.40338135098 9.43636982087
308 | 6780.0 110.0 9.098196224 9.83875024319
309 | 6780.0 111.0 1.71571157367 9.53636827695
310 | 6790.0 110.0 8.55687996205 10.017745093
311 | 6790.0 111.0 2.1187522586 9.72132962173
312 | 6800.0 110.0 8.0157741652 10.1967399428
313 | 6800.0 111.0 2.5318952688 9.91583735849
314 | 6810.0 110.0 7.43720557884 10.3833719061
315 | 6810.0 111.0 2.95198362761 10.1108224149
316 | 6820.0 110.0 6.82138466807 10.5778796429
317 | 6820.0 111.0 3.40048477622 10.3079554094
318 | 6830.0 110.0 6.20556375731 10.7723873797
319 | 6830.0 111.0 3.84877545972 10.505088404
320 | 6840.0 110.0 5.60131842757 10.9750095496
321 | 6840.0 111.0 4.33221381653 10.5248971674
322 | 6850.0 110.0 5.00001960937 11.179540998
323 | 6850.0 111.0 4.81965101042 10.5248971674
324 | 6860.0 110.0 4.4298696274 11.3270327542
325 | 6860.0 111.0 5.37359517896 10.3993621127
326 | 6870.0 110.0 3.87318941244 11.4501812109
327 | 6870.0 111.0 5.92206725464 10.2714404601
328 | 6880.0 110.0 3.31650919748 11.5735683273
329 | 6880.0 111.0 6.42318468066 10.123710044
330 | 6890.0 110.0 2.71499991417 11.670464206
331 | 6890.0 111.0 6.92430210668 9.97597962802
332 | 6890.0 112.0 14.8220053454 7.43186616308
333 | 6890.0 113.0 15.0080565023 8.0774409213
334 | 6890.0 114.0 11.259883365 6.49154655221
335 | 6900.0 112.0 14.1379937391 7.5750620429
336 | 6900.0 113.0 14.3575088484 8.20774917195
337 | 6900.0 114.0 11.6808135842 6.73020635192
338 | 6910.0 112.0 13.4539821328 7.71825792273
339 | 6910.0 113.0 13.7069611946 8.33781876279
340 | 6910.0 114.0 12.1017438035 6.96886615163
341 | 6920.0 112.0 12.8078542463 7.88293318454
342 | 6920.0 113.0 13.0982960975 8.47337752903
343 | 6920.0 114.0 12.5003647211 7.00538110099
344 | 6930.0 112.0 12.1659356619 8.04999504433
345 | 6930.0 113.0 12.5073100697 8.61108423346
346 | 6930.0 114.0 12.8575240121 7.02972440056
347 | 6940.0 112.0 11.4989717295 8.20345329555
348 | 6940.0 113.0 11.9167449721 8.7478362987
349 | 6940.0 114.0 13.0583077267 7.09463986608
350 | 6950.0 112.0 10.8294822158 8.35524092817
351 | 6950.0 113.0 11.329126386 8.87718991014
352 | 6950.0 114.0 13.2593019064 7.1597939914
353 | 6960.0 112.0 10.209872933 8.50631258139
354 | 6960.0 113.0 10.7417182651 9.00654352159
355 | 6960.0 114.0 13.2955019053 7.22972131272
356 | 6970.0 112.0 9.61152062638 8.65714557481
357 | 6970.0 113.0 10.123371773 9.10988321486
358 | 6970.0 114.0 13.3319123692 7.29964863404
359 | 6980.0 112.0 8.98686018101 8.76072392788
360 | 6980.0 113.0 9.49197644411 9.20224455735
361 | 6980.0 114.0 13.3681123681 7.36957595535
362 | 6990.0 112.0 8.35546485213 8.85236929097
363 | 6990.0 113.0 8.86058111523 9.29460589984
364 | 6990.0 114.0 13.404522832 7.43950327667
365 | 7000.0 112.0 7.7141776631 8.95141310785
366 | 7000.0 113.0 8.24728578579 9.39818425292
367 | 7000.0 114.0 13.4407228309 7.50966925778
368 | 7010.0 112.0 7.03374396369 9.0786187811
369 | 7010.0 113.0 7.64598696759 9.50892239998
370 | 7010.0 114.0 13.5036518987 7.58389245549
371 | 7020.0 112.0 6.35331026427 9.20582445435
372 | 7020.0 113.0 7.04447768428 9.61966054705
373 | 7020.0 114.0 13.6734972422 7.67649245778
374 | 7030.0 112.0 5.68403121566 9.33637136479
375 | 7030.0 113.0 6.37098933347 9.77144817967
376 | 7030.0 114.0 13.8431321205 7.76885380027
377 | 7040.0 112.0 5.0595812354 9.47956724462
378 | 7040.0 113.0 5.67961144835 9.93325952387
379 | 7040.0 114.0 14.0285518821 7.84952081258
380 | 7050.0 112.0 4.43513125514 9.62276312445
381 | 7050.0 113.0 4.98802309813 10.0953095279
382 | 7050.0 114.0 14.3543518718 7.8266094718
383 | 7060.0 112.0 3.80352546115 9.75211673589
384 | 7060.0 113.0 4.38461962883 10.1907734478
385 | 7060.0 114.0 14.6803623266 7.80345947123
386 | 7070.0 112.0 3.14308594715 9.8261012738
387 | 7070.0 113.0 3.78142662464 10.2862373676
388 | 7070.0 114.0 15.0061623163 7.78030947066
389 | 7080.0 112.0 2.48285689825 9.90032447151
390 | 7080.0 113.0 3.17802315535 10.3817012875
391 | 7090.0 112.0 1.83862319769 9.94137395706
392 | 7090.0 113.0 2.59166735993 10.5862327359
393 | 7100.0 112.0 1.23290461219 9.90509766751
394 | 7100.0 113.0 2.00552202962 10.7907641842
395 | 7100.0 115.0 14.8678867393 8.4931862924
396 | 7110.0 112.0 0.626975561574 9.86906003775
397 | 7110.0 113.0 1.43053135012 11.0022167668
398 | 7110.0 115.0 14.0719076947 8.59700330528
399 | 7120.0 112.0 0.0210465109626 9.83278374819
400 | 7120.0 113.0 0.900580204088 11.2408765665
401 | 7120.0 115.0 13.275718185 8.70082031815
402 | 7130.0 113.0 0.37083952316 11.4795363662
403 | 7130.0 115.0 12.4384879789 8.75953062888
404 | 7140.0 113.0 -0.158901157767 11.7181961659
405 | 7140.0 115.0 11.5966275404 8.81346774362
406 | 7150.0 115.0 10.7547671019 8.86740485835
407 | 7160.0 115.0 9.99141014925 8.9098863027
408 | 7170.0 115.0 9.24783691695 8.94974248925
409 | 7170.0 116.0 0.0841860438502 11.0499487267
410 | 7180.0 115.0 8.50426368464 8.98959867581
411 | 7180.0 116.0 0.602351143748 10.967372436
412 | 7180.0 117.0 0.255083712866 10.3996007725
413 | 7190.0 115.0 7.72406952326 8.99055331501
414 | 7190.0 116.0 1.12030577854 10.8847961453
415 | 7190.0 117.0 0.754938348227 10.300079636
416 | 7200.0 115.0 6.93482536216 8.98220022202
417 | 7200.0 116.0 1.60963715842 10.794582741
418 | 7200.0 117.0 1.25479298359 10.2007971594
419 | 7210.0 115.0 6.14558120107 8.97360846923
420 | 7210.0 116.0 2.05624412104 10.693390986
421 | 7210.0 117.0 1.76496040932 10.1062878787
422 | 7220.0 115.0 5.30372076256 8.93685486007
423 | 7220.0 116.0 2.50264061856 10.5921992309
424 | 7220.0 117.0 2.27596969549 10.0127332372
425 | 7230.0 115.0 4.46186032406 8.90010125091
426 | 7230.0 116.0 2.95577199958 10.489575517
427 | 7230.0 117.0 2.79160921407 9.9139280801
428 | 7240.0 115.0 3.60379407212 8.92587650928
429 | 7240.0 116.0 3.47035919262 10.3745414935
430 | 7240.0 117.0 3.34723710348 9.77073220027
431 | 7250.0 115.0 2.72152433257 9.04520640914
432 | 7250.0 116.0 3.98473592054 10.2597461299
433 | 7250.0 117.0 3.9028649929 9.62753632044
434 | 7260.0 115.0 1.83904412791 9.164536309
435 | 7260.0 116.0 4.50521613664 10.149962622
436 | 7260.0 117.0 4.41050683731 9.50653580199
437 | 7270.0 115.0 0.997604619625 9.39842291272
438 | 7270.0 116.0 5.03432542224 10.0475775679
439 | 7270.0 117.0 4.88594751996 9.40057085091
440 | 7280.0 115.0 0.166267436604 9.6609486924
441 | 7280.0 116.0 5.56343470784 9.94519251386
442 | 7280.0 117.0 5.3613882026 9.29436724004
443 | 7290.0 116.0 6.09275445855 9.84304611958
444 | 7290.0 117.0 5.90438818543 9.22611053732
445 | 7290.0 118.0 15.1713774274 8.90559042631
446 | 7300.0 116.0 6.57850793157 9.7468662203
447 | 7300.0 117.0 6.45475444711 9.1616723914
448 | 7300.0 118.0 14.5555565166 8.88792960113
449 | 7310.0 116.0 7.05963117217 9.65140230041
450 | 7310.0 117.0 6.97902303518 9.09771156508
451 | 7310.0 118.0 13.9397356058 8.87026877595
452 | 7320.0 116.0 7.54075441277 9.55593838053
453 | 7320.0 117.0 7.44204627636 9.03398939855
454 | 7320.0 118.0 13.3207577184 8.86000640456
455 | 7330.0 116.0 8.0397671877 9.45665590385
456 | 7330.0 117.0 7.90506951753 8.97050589183
457 | 7330.0 118.0 12.6725251808 8.91728475649
458 | 7340.0 116.0 8.56592996176 9.35116827237
459 | 7340.0 117.0 8.38282531638 8.88220176594
460 | 7340.0 118.0 12.0242926431 8.97456310842
461 | 7350.0 116.0 9.09209273582 9.2459193007
462 | 7350.0 117.0 8.89530785832 8.73709660771
463 | 7350.0 118.0 11.4206787087 9.14067032903
464 | 7360.0 116.0 9.61825550989 9.14067032903
465 | 7360.0 117.0 9.40779040026 8.59175278968
466 | 7360.0 118.0 10.8465298897 9.37933012874
467 | 7370.0 116.0 10.1334740982 9.00893011958
468 | 7370.0 117.0 9.95352642952 8.43256670328
469 | 7370.0 118.0 10.2456520017 9.61798992845
470 | 7380.0 116.0 10.6489031517 8.87742856994
471 | 7380.0 117.0 10.4990519937 8.27361927667
472 | 7380.0 118.0 9.60415434754 9.85664972817
473 | 7390.0 116.0 11.1679101121 8.74998423689
474 | 7390.0 117.0 11.0559426737 8.12087700485
475 | 7390.0 118.0 8.98159855327 10.0666703519
476 | 7400.0 116.0 11.7203810248 8.660486812
477 | 7400.0 117.0 11.6387205623 7.98317030041
478 | 7400.0 118.0 8.4028195018 10.2098662317
479 | 7410.0 116.0 12.2728519376 8.57098938711
480 | 7410.0 117.0 12.2214984508 7.84546359598
481 | 7410.0 118.0 7.78720905614 10.3459023176
482 |
--------------------------------------------------------------------------------
/src/datasets/hotel/val/uni_examples_val.txt:
--------------------------------------------------------------------------------
1 | 5940.0 95.0 14.6694181409 7.59129090929
2 | 5940.0 96.0 14.6927797681 6.77030119827
3 | 5950.0 95.0 15.195580915 7.38007698654
4 | 5950.0 96.0 15.268612308 6.60085274047
5 | 5970.0 97.0 14.796328602 8.11896772645
6 | 5970.0 98.0 14.748342557 8.76859970127
7 | 5980.0 97.0 14.2503821076 8.22326205893
8 | 5980.0 98.0 14.2747960604 8.83566310499
9 | 5990.0 97.0 13.7046460784 8.3277950512
10 | 5990.0 98.0 13.8012495637 8.90272650871
11 | 6000.0 97.0 13.1595414444 8.42612288868
12 | 6000.0 98.0 13.3041309748 8.98768939741
13 | 6010.0 97.0 12.6163309965 8.51084711758
14 | 6010.0 98.0 12.751660062 9.11465641086
15 | 6020.0 97.0 12.0733310137 8.59557134648
16 | 6020.0 98.0 12.1991891492 9.2413847645
17 | 6030.0 97.0 11.5204391707 8.69771774076
18 | 6030.0 98.0 11.6502961433 9.36859043775
19 | 6040.0 97.0 10.9284008173 8.86931413675
20 | 6040.0 98.0 11.1336042992 9.4986600286
21 | 6040.0 99.0 14.9234495282 8.73160743232
22 | 6050.0 97.0 10.336572929 9.04067187295
23 | 6050.0 98.0 10.6169124551 9.62872961944
24 | 6050.0 99.0 14.2996309433 8.83375382659
25 | 6060.0 97.0 9.74453457566 9.21226826894
26 | 6060.0 98.0 10.100431076 9.75903787008
27 | 6060.0 99.0 13.6758123584 8.93613888067
28 | 6070.0 97.0 9.19290552333 9.36047600456
29 | 6070.0 98.0 9.54817062838 9.87192395535
30 | 6070.0 99.0 13.0551507501 9.02229506837
31 | 6070.0 100.0 14.6401634907 9.11465641086
32 | 6080.0 97.0 8.64148693611 9.50844508038
33 | 6080.0 98.0 8.98686018101 9.98027550442
34 | 6080.0 99.0 12.4393298393 9.08410795649
35 | 6080.0 100.0 13.9081658394 9.06286723432
36 | 6090.0 97.0 8.08964741868 9.65164096021
37 | 6090.0 98.0 8.42554973364 10.0888657133
38 | 6090.0 99.0 11.8235089285 9.14592084462
39 | 6090.0 100.0 13.1601728398 8.9986677482
40 | 6100.0 97.0 7.53612418036 9.74877549869
41 | 6100.0 98.0 7.86844858846 10.1907734478
42 | 6100.0 99.0 11.1923240648 9.23541826951
43 | 6100.0 100.0 12.3481984468 8.88506568353
44 | 6110.0 97.0 6.98281140716 9.84614869698
45 | 6110.0 98.0 7.31513581525 10.2862373676
46 | 6110.0 99.0 10.5544043175 9.33661002459
47 | 6110.0 100.0 11.5358031237 8.77862341286
48 | 6110.0 101.0 15.1280216148 8.40249556851
49 | 6120.0 97.0 6.42928816884 9.95044302945
50 | 6120.0 98.0 6.76203350716 10.3817012875
51 | 6120.0 99.0 9.91669503534 9.43780177967
52 | 6120.0 100.0 10.7173043124 8.73900588611
53 | 6120.0 101.0 14.5191460526 8.51323371558
54 | 6130.0 97.0 5.87597539564 10.0652383931
55 | 6130.0 98.0 6.20872073395 10.4771652074
56 | 6130.0 99.0 9.28761482266 9.50558116279
57 | 6130.0 100.0 9.88028457137 8.72086774133
58 | 6130.0 101.0 13.9102704905 8.62421052245
59 | 6140.0 97.0 5.32245215732 10.1802724166
60 | 6140.0 98.0 5.66277423958 10.5890966535
61 | 6140.0 99.0 8.67116251657 9.52252600857
62 | 6140.0 100.0 9.00032994803 8.75332547409
63 | 6140.0 101.0 13.3013949283 8.73494866951
64 | 6150.0 97.0 4.79565798793 10.2721564395
65 | 6150.0 98.0 5.11682774522 10.7010280996
66 | 6150.0 99.0 8.05471021048 9.53947085435
67 | 6150.0 100.0 8.12142765023 8.78649918625
68 | 6150.0 101.0 12.6474798327 8.9139435193
69 | 6160.0 97.0 4.28043939956 10.3544940704
70 | 6160.0 98.0 4.57109171596 10.8127208858
71 | 6160.0 99.0 7.44562418322 9.61560333045
72 | 6160.0 100.0 7.25431139857 8.8246847542
73 | 6160.0 101.0 11.9935647371 9.09293836908
74 | 6170.0 97.0 3.7652208112 10.4368317013
75 | 6170.0 98.0 4.01798940786 10.9392105797
76 | 6170.0 99.0 6.84137885349 9.73111467352
77 | 6170.0 100.0 6.38719514692 8.86287032216
78 | 6170.0 101.0 11.3531194085 9.26334146608
79 | 6180.0 97.0 3.22011617727 10.5647533539
80 | 6180.0 98.0 3.46320337889 11.0695188303
81 | 6180.0 99.0 6.23713352375 9.84662601658
82 | 6180.0 100.0 5.48114284998 8.88840692073
83 | 6180.0 101.0 10.7667636131 9.39985487151
84 | 6190.0 97.0 2.6676452645 10.7038920171
85 | 6190.0 98.0 2.90820688481 11.1995884212
86 | 6190.0 99.0 5.63983354264 9.95450024605
87 | 6190.0 100.0 4.54899287945 8.90535176651
88 | 6190.0 101.0 10.1804078177 9.53612961715
89 | 6200.0 97.0 2.12464528167 10.836348206
90 | 6200.0 98.0 2.3593138789 11.3456482186
91 | 6200.0 99.0 5.04905797992 10.0544987021
92 | 6200.0 100.0 3.61705337402 8.92253527209
93 | 6200.0 101.0 9.57616248796 9.67526828038
94 | 6210.0 97.0 1.5961673914 10.9583033636
95 | 6210.0 98.0 1.81610343096 11.507220903
96 | 6210.0 99.0 4.45849288231 10.1544971582
97 | 6210.0 100.0 2.68048363619 8.98673475821
98 | 6210.0 101.0 8.94476715908 9.81846416021
99 | 6220.0 97.0 1.06747903602 11.0804971811
100 | 6220.0 98.0 1.27289298302 11.6690322472
101 | 6220.0 99.0 3.83804173913 10.2413693253
102 | 6220.0 100.0 1.74307203792 9.06262857452
103 | 6220.0 101.0 8.31337183021 9.96166004004
104 | 6230.0 97.0 0.538790680641 11.2026909985
105 | 6230.0 98.0 0.752202301802 11.8263090552
106 | 6230.0 99.0 3.17297199271 10.3081940692
107 | 6230.0 100.0 0.831337183021 9.22730383632
108 | 6230.0 101.0 7.71102068646 10.1222780852
109 | 6240.0 97.0 0.0103127903717 11.3246461562
110 | 6240.0 98.0 0.264765107909 11.9769033888
111 | 6240.0 99.0 2.5079022463 10.3750188131
112 | 6240.0 100.0 -0.0631395328877 9.45092806865
113 | 6240.0 101.0 7.15181489019 10.3086713888
114 | 6250.0 98.0 -0.222672085984 12.1277363822
115 | 6250.0 99.0 1.90155226547 10.4838476818
116 | 6250.0 101.0 6.59281955902 10.4950646924
117 | 6260.0 99.0 1.30172670303 10.5972110867
118 | 6260.0 101.0 6.0260370188 10.6805033568
119 | 6270.0 99.0 0.701901140601 10.7105744915
120 | 6270.0 101.0 5.42831610746 10.8618848046
121 | 6280.0 99.0 0.102075578168 10.8239378964
122 | 6280.0 101.0 4.83059519613 11.0432662523
123 | 6290.0 102.0 15.0482553382 8.80654660943
124 | 6290.0 103.0 3.81699522817 9.61608065005
125 | 6290.0 104.0 0.542368587505 13.7658972475
126 | 6300.0 102.0 14.2617472236 8.96978991243
127 | 6300.0 103.0 4.41471613951 9.4919775542
128 | 6300.0 104.0 1.19775693888 13.6704333276
129 | 6310.0 102.0 13.469767016 9.12396414305
130 | 6310.0 103.0 5.01243705084 9.36787445835
131 | 6310.0 104.0 1.85335575536 13.5749694077
132 | 6320.0 102.0 12.6279065775 9.19556208296
133 | 6320.0 103.0 5.604054474 9.28100229126
134 | 6320.0 104.0 2.50874410674 13.4795054878
135 | 6330.0 102.0 11.7900449761 9.27360383747
136 | 6330.0 103.0 6.19504050183 9.19842600056
137 | 6330.0 104.0 3.16118594658 13.3864281659
138 | 6340.0 102.0 10.9883833735 9.4101172429
139 | 6340.0 103.0 6.79549745959 9.11036053446
140 | 6340.0 104.0 3.8066824378 13.2988400194
141 | 6350.0 102.0 10.1865113059 9.54639198854
142 | 6350.0 103.0 7.4188951143 9.00940743918
143 | 6350.0 104.0 4.45196846391 13.2114905327
144 | 6360.0 102.0 9.36464505279 9.73970642631
145 | 6360.0 103.0 8.04229276901 8.9084543439
146 | 6360.0 104.0 5.11977425675 13.1200838294
147 | 6360.0 105.0 -0.0631395328877 13.025335889
148 | 6370.0 102.0 8.5427787997 9.93278220427
149 | 6370.0 103.0 8.66021833087 8.81012650642
150 | 6370.0 104.0 5.83935446656 13.0200853734
151 | 6370.0 105.0 0.357790686363 12.9642389802
152 | 6380.0 102.0 7.75458696415 10.0301554026
153 | 6380.0 103.0 9.27603924164 8.71275330814
154 | 6380.0 104.0 6.55914514148 12.9198482575
155 | 6380.0 105.0 0.778720905614 12.9029034117
156 | 6390.0 102.0 6.97018350058 10.1170275697
157 | 6390.0 103.0 9.8918601524 8.61561876966
158 | 6390.0 104.0 7.27788349085 12.795267842
159 | 6390.0 105.0 1.19965112487 12.841806503
160 | 6400.0 102.0 6.13379515493 10.2031837573
161 | 6400.0 103.0 10.5232554813 8.37695896994
162 | 6400.0 104.0 7.99346486358 12.5725982489
163 | 6400.0 105.0 1.62058134412 12.7804709345
164 | 6410.0 102.0 5.27509750765 10.2891012852
165 | 6410.0 103.0 11.1546508102 8.13829917023
166 | 6410.0 104.0 8.7090462363 12.3499286558
167 | 6410.0 105.0 2.07602784135 12.7193740257
168 | 6420.0 102.0 4.42292427878 10.3614152046
169 | 6420.0 103.0 11.7786798602 7.89963937052
170 | 6420.0 104.0 9.40779040026 12.1050637013
171 | 6420.0 105.0 2.54010340807 12.6580384572
172 | 6430.0 102.0 3.59726965372 10.3797920091
173 | 6430.0 103.0 12.37345426 7.6609795708
174 | 6430.0 104.0 10.0391857291 11.7728492601
175 | 6430.0 105.0 3.00417897479 12.5969415485
176 | 6430.0 106.0 14.9851158053 8.75881464948
177 | 6430.0 107.0 15.2561948665 8.16813164519
178 | 6440.0 102.0 2.77161502866 10.3981688137
179 | 6440.0 103.0 12.9682286598 7.42231977109
180 | 6440.0 104.0 10.670581058 11.4403961591
181 | 6440.0 105.0 3.46825454152 12.5358446397
182 | 6440.0 106.0 14.4042321028 8.76836104147
183 | 6440.0 107.0 14.6334286072 8.22779659512
184 | 6450.0 102.0 1.94069877586 10.4463780933
185 | 6450.0 103.0 13.6198286392 7.23735842631
186 | 6450.0 104.0 11.3051333635 11.0719054283
187 | 6450.0 105.0 4.00262545486 12.4904992778
188 | 6450.0 106.0 13.8233484002 8.77790743346
189 | 6450.0 107.0 14.0108728129 8.28746154505
190 | 6460.0 102.0 1.1064150813 10.514634796
191 | 6460.0 103.0 14.2775321068 7.05836357653
192 | 6460.0 104.0 11.9447368317 10.6497162426
193 | 6460.0 105.0 4.54478357725 12.4473018541
194 | 6460.0 106.0 13.237413535 8.84688011558
195 | 6460.0 107.0 13.3876856233 8.35380896937
196 | 6470.0 102.0 0.272131386746 10.5828914987
197 | 6470.0 103.0 14.9352355744 6.87936872674
198 | 6470.0 104.0 12.5841298347 10.2275270569
199 | 6470.0 105.0 5.08694169965 12.4038657705
200 | 6470.0 106.0 12.6464275072 8.97551774762
201 | 6470.0 107.0 12.7636565732 8.43614660027
202 | 6480.0 104.0 13.3192844627 9.90008581171
203 | 6480.0 105.0 5.60679052042 12.3434848412
204 | 6480.0 106.0 12.0556519445 9.10391671987
205 | 6480.0 107.0 12.1394170581 8.51848423117
206 | 6490.0 104.0 14.078221648 9.59627188668
207 | 6490.0 105.0 6.11190678353 12.2718869013
208 | 6490.0 106.0 11.5147566127 9.28243425005
209 | 6490.0 107.0 11.5257007984 8.64545124462
210 | 6500.0 104.0 14.8371588333 9.29245796164
211 | 6500.0 105.0 6.61702304663 12.2002889614
212 | 6500.0 106.0 10.9797543041 9.46667961543
213 | 6500.0 107.0 10.9271380266 8.83948166179
214 | 6510.0 104.0 15.5960960186 8.98888269641
215 | 6510.0 105.0 7.12213930973 12.1525570014
216 | 6510.0 106.0 10.4445415303 9.65068632101
217 | 6510.0 107.0 10.3285752549 9.03327341915
218 | 6520.0 105.0 7.62725557283 12.120815248
219 | 6520.0 106.0 9.88701945488 9.81965745921
220 | 6520.0 107.0 9.71948922762 9.21370022774
221 | 6530.0 105.0 8.13237183593 12.0888348349
222 | 6530.0 106.0 9.29645435727 9.96595591643
223 | 6530.0 107.0 9.06852064354 9.34066724118
224 | 6540.0 105.0 8.61728344851 12.0074518432
225 | 6540.0 106.0 8.70588925966 10.1122543737
226 | 6540.0 107.0 8.41734159436 9.46739559483
227 | 6550.0 105.0 9.08872529407 11.8928951393
228 | 6550.0 106.0 8.1186916038 10.2392213871
229 | 6550.0 107.0 7.76616254518 9.59412394848
230 | 6560.0 105.0 9.56227179073 11.7823956521
231 | 6560.0 106.0 7.53991255233 10.3213203582
232 | 6560.0 107.0 7.17812302889 9.67837085778
233 | 6570.0 105.0 10.0558124728 11.7084111141
234 | 6570.0 106.0 6.96113350086 10.4034193293
235 | 6570.0 107.0 6.59008351259 9.76261776708
236 | 6580.0 105.0 10.5493531549 11.6341879164
237 | 6580.0 106.0 6.37709282165 10.4833703622
238 | 6580.0 107.0 6.0020439963 9.84686467638
239 | 6580.0 108.0 0.110073252334 11.8107961682
240 | 6590.0 105.0 11.0494182553 11.5640219353
241 | 6590.0 106.0 5.74569749278 10.5449445905
242 | 6590.0 107.0 5.39548355036 9.88481158453
243 | 6590.0 108.0 0.790086021534 11.7833502913
244 | 6600.0 105.0 11.5755810294 11.5088915216
245 | 6600.0 106.0 5.1143021639 10.6065188189
246 | 6600.0 107.0 4.77692659317 9.89197137852
247 | 6600.0 108.0 1.47009879073 11.7559044143
248 | 6600.0 109.0 10.1970345614 8.43662391987
249 | 6610.0 105.0 12.1017438035 11.4537611078
250 | 6610.0 106.0 4.48290683502 10.6680930472
251 | 6610.0 107.0 4.15836963598 9.89936983231
252 | 6610.0 108.0 2.13516853715 11.7096044131
253 | 6610.0 109.0 10.0497089846 8.7468816595
254 | 6620.0 105.0 12.6051763457 11.4432600767
255 | 6620.0 106.0 3.81909987926 10.7139157287
256 | 6620.0 107.0 3.52992081864 9.90223374991
257 | 6620.0 108.0 2.785716191 11.6444502878
258 | 6620.0 109.0 9.90238340788 9.05713939913
259 | 6630.0 105.0 13.0934554 11.4623528606
260 | 6630.0 106.0 3.15529292351 10.7599770701
261 | 6630.0 107.0 2.87874176946 9.89459663632
262 | 6630.0 108.0 3.42089989185 11.5630672961
263 | 6630.0 109.0 9.68034271723 9.36954507695
264 | 6640.0 105.0 13.5865751519 11.4740471908
265 | 6640.0 106.0 2.52137201331 10.816062123
266 | 6640.0 107.0 2.22756272028 9.88719818253
267 | 6640.0 108.0 4.01988359385 11.4437373963
268 | 6640.0 109.0 9.38379737776 9.68409869297
269 | 6650.0 105.0 14.1228402512 11.4203487359
270 | 6650.0 106.0 1.93206970636 10.8876600629
271 | 6650.0 107.0 1.58374994993 9.89674457452
272 | 6650.0 108.0 4.61886729584 11.3244074964
273 | 6650.0 109.0 9.06809971333 10.0031868452
274 | 6660.0 105.0 14.6593158156 11.3664116212
275 | 6660.0 106.0 1.34276739941 10.9592580028
276 | 6660.0 107.0 1.00497089846 10.0585559187
277 | 6660.0 108.0 5.2214289047 11.2377739891
278 | 6660.0 109.0 8.70715205032 10.3327760286
279 | 6670.0 105.0 15.195580915 11.3124745064
280 | 6670.0 106.0 0.766303464147 11.0470848091
281 | 6670.0 107.0 0.426191846992 10.2206059227
282 | 6670.0 108.0 5.82525330422 11.16522141
283 | 6670.0 109.0 8.34346834088 10.6544894386
284 | 6680.0 106.0 0.209202318968 11.1594935748
285 | 6680.0 107.0 -0.152587204479 10.3826559267
286 | 6680.0 108.0 6.39582421641 11.0110471794
287 | 6680.0 109.0 7.96757765509 10.9442224355
288 | 6690.0 108.0 6.91651489762 10.7346791313
289 | 6690.0 109.0 7.61883696844 11.4556703862
290 | 6700.0 108.0 7.48603348427 10.5031791256
291 | 6700.0 109.0 7.21284977198 11.6091286375
292 | 6710.0 108.0 8.10416951124 10.316785822
293 | 6710.0 109.0 6.76013932117 11.7850209099
294 | 6720.0 108.0 8.72251600332 10.1303925184
295 | 6720.0 109.0 6.27606956903 11.9759487496
296 | 6730.0 108.0 9.34170435584 9.94185127666
297 | 6730.0 109.0 5.78168702652 12.0045879256
298 | 6740.0 108.0 9.96299735945 9.74877549869
299 | 6740.0 109.0 5.2805696005 11.9251142123
300 | 6750.0 108.0 10.5842903631 9.55546106093
301 | 6750.0 109.0 4.7796626396 11.8454018392
302 | 6750.0 110.0 10.9652322115 9.40319610871
303 | 6750.0 111.0 0.778720905614 9.23613424891
304 | 6760.0 110.0 10.3338368826 9.54472136994
305 | 6760.0 111.0 1.0910511283 9.33613270499
306 | 6770.0 110.0 9.70244155374 9.68600797137
307 | 6770.0 111.0 1.40338135098 9.43636982087
308 | 6780.0 110.0 9.098196224 9.83875024319
309 | 6780.0 111.0 1.71571157367 9.53636827695
310 | 6790.0 110.0 8.55687996205 10.017745093
311 | 6790.0 111.0 2.1187522586 9.72132962173
312 | 6800.0 110.0 8.0157741652 10.1967399428
313 | 6800.0 111.0 2.5318952688 9.91583735849
314 | 6810.0 110.0 7.43720557884 10.3833719061
315 | 6810.0 111.0 2.95198362761 10.1108224149
316 | 6820.0 110.0 6.82138466807 10.5778796429
317 | 6820.0 111.0 3.40048477622 10.3079554094
318 | 6830.0 110.0 6.20556375731 10.7723873797
319 | 6830.0 111.0 3.84877545972 10.505088404
320 | 6840.0 110.0 5.60131842757 10.9750095496
321 | 6840.0 111.0 4.33221381653 10.5248971674
322 | 6850.0 110.0 5.00001960937 11.179540998
323 | 6850.0 111.0 4.81965101042 10.5248971674
324 | 6860.0 110.0 4.4298696274 11.3270327542
325 | 6860.0 111.0 5.37359517896 10.3993621127
326 | 6870.0 110.0 3.87318941244 11.4501812109
327 | 6870.0 111.0 5.92206725464 10.2714404601
328 | 6880.0 110.0 3.31650919748 11.5735683273
329 | 6880.0 111.0 6.42318468066 10.123710044
330 | 6890.0 110.0 2.71499991417 11.670464206
331 | 6890.0 111.0 6.92430210668 9.97597962802
332 | 6890.0 112.0 14.8220053454 7.43186616308
333 | 6890.0 113.0 15.0080565023 8.0774409213
334 | 6890.0 114.0 11.259883365 6.49154655221
335 | 6900.0 112.0 14.1379937391 7.5750620429
336 | 6900.0 113.0 14.3575088484 8.20774917195
337 | 6900.0 114.0 11.6808135842 6.73020635192
338 | 6910.0 112.0 13.4539821328 7.71825792273
339 | 6910.0 113.0 13.7069611946 8.33781876279
340 | 6910.0 114.0 12.1017438035 6.96886615163
341 | 6920.0 112.0 12.8078542463 7.88293318454
342 | 6920.0 113.0 13.0982960975 8.47337752903
343 | 6920.0 114.0 12.5003647211 7.00538110099
344 | 6930.0 112.0 12.1659356619 8.04999504433
345 | 6930.0 113.0 12.5073100697 8.61108423346
346 | 6930.0 114.0 12.8575240121 7.02972440056
347 | 6940.0 112.0 11.4989717295 8.20345329555
348 | 6940.0 113.0 11.9167449721 8.7478362987
349 | 6940.0 114.0 13.0583077267 7.09463986608
350 | 6950.0 112.0 10.8294822158 8.35524092817
351 | 6950.0 113.0 11.329126386 8.87718991014
352 | 6950.0 114.0 13.2593019064 7.1597939914
353 | 6960.0 112.0 10.209872933 8.50631258139
354 | 6960.0 113.0 10.7417182651 9.00654352159
355 | 6960.0 114.0 13.2955019053 7.22972131272
356 | 6970.0 112.0 9.61152062638 8.65714557481
357 | 6970.0 113.0 10.123371773 9.10988321486
358 | 6970.0 114.0 13.3319123692 7.29964863404
359 | 6980.0 112.0 8.98686018101 8.76072392788
360 | 6980.0 113.0 9.49197644411 9.20224455735
361 | 6980.0 114.0 13.3681123681 7.36957595535
362 | 6990.0 112.0 8.35546485213 8.85236929097
363 | 6990.0 113.0 8.86058111523 9.29460589984
364 | 6990.0 114.0 13.404522832 7.43950327667
365 | 7000.0 112.0 7.7141776631 8.95141310785
366 | 7000.0 113.0 8.24728578579 9.39818425292
367 | 7000.0 114.0 13.4407228309 7.50966925778
368 | 7010.0 112.0 7.03374396369 9.0786187811
369 | 7010.0 113.0 7.64598696759 9.50892239998
370 | 7010.0 114.0 13.5036518987 7.58389245549
371 | 7020.0 112.0 6.35331026427 9.20582445435
372 | 7020.0 113.0 7.04447768428 9.61966054705
373 | 7020.0 114.0 13.6734972422 7.67649245778
374 | 7030.0 112.0 5.68403121566 9.33637136479
375 | 7030.0 113.0 6.37098933347 9.77144817967
376 | 7030.0 114.0 13.8431321205 7.76885380027
377 | 7040.0 112.0 5.0595812354 9.47956724462
378 | 7040.0 113.0 5.67961144835 9.93325952387
379 | 7040.0 114.0 14.0285518821 7.84952081258
380 | 7050.0 112.0 4.43513125514 9.62276312445
381 | 7050.0 113.0 4.98802309813 10.0953095279
382 | 7050.0 114.0 14.3543518718 7.8266094718
383 | 7060.0 112.0 3.80352546115 9.75211673589
384 | 7060.0 113.0 4.38461962883 10.1907734478
385 | 7060.0 114.0 14.6803623266 7.80345947123
386 | 7070.0 112.0 3.14308594715 9.8261012738
387 | 7070.0 113.0 3.78142662464 10.2862373676
388 | 7070.0 114.0 15.0061623163 7.78030947066
389 | 7080.0 112.0 2.48285689825 9.90032447151
390 | 7080.0 113.0 3.17802315535 10.3817012875
391 | 7090.0 112.0 1.83862319769 9.94137395706
392 | 7090.0 113.0 2.59166735993 10.5862327359
393 | 7100.0 112.0 1.23290461219 9.90509766751
394 | 7100.0 113.0 2.00552202962 10.7907641842
395 | 7100.0 115.0 14.8678867393 8.4931862924
396 | 7110.0 112.0 0.626975561574 9.86906003775
397 | 7110.0 113.0 1.43053135012 11.0022167668
398 | 7110.0 115.0 14.0719076947 8.59700330528
399 | 7120.0 112.0 0.0210465109626 9.83278374819
400 | 7120.0 113.0 0.900580204088 11.2408765665
401 | 7120.0 115.0 13.275718185 8.70082031815
402 | 7130.0 113.0 0.37083952316 11.4795363662
403 | 7130.0 115.0 12.4384879789 8.75953062888
404 | 7140.0 113.0 -0.158901157767 11.7181961659
405 | 7140.0 115.0 11.5966275404 8.81346774362
406 | 7150.0 115.0 10.7547671019 8.86740485835
407 | 7160.0 115.0 9.99141014925 8.9098863027
408 | 7170.0 115.0 9.24783691695 8.94974248925
409 | 7170.0 116.0 0.0841860438502 11.0499487267
410 | 7180.0 115.0 8.50426368464 8.98959867581
411 | 7180.0 116.0 0.602351143748 10.967372436
412 | 7180.0 117.0 0.255083712866 10.3996007725
413 | 7190.0 115.0 7.72406952326 8.99055331501
414 | 7190.0 116.0 1.12030577854 10.8847961453
415 | 7190.0 117.0 0.754938348227 10.300079636
416 | 7200.0 115.0 6.93482536216 8.98220022202
417 | 7200.0 116.0 1.60963715842 10.794582741
418 | 7200.0 117.0 1.25479298359 10.2007971594
419 | 7210.0 115.0 6.14558120107 8.97360846923
420 | 7210.0 116.0 2.05624412104 10.693390986
421 | 7210.0 117.0 1.76496040932 10.1062878787
422 | 7220.0 115.0 5.30372076256 8.93685486007
423 | 7220.0 116.0 2.50264061856 10.5921992309
424 | 7220.0 117.0 2.27596969549 10.0127332372
425 | 7230.0 115.0 4.46186032406 8.90010125091
426 | 7230.0 116.0 2.95577199958 10.489575517
427 | 7230.0 117.0 2.79160921407 9.9139280801
428 | 7240.0 115.0 3.60379407212 8.92587650928
429 | 7240.0 116.0 3.47035919262 10.3745414935
430 | 7240.0 117.0 3.34723710348 9.77073220027
431 | 7250.0 115.0 2.72152433257 9.04520640914
432 | 7250.0 116.0 3.98473592054 10.2597461299
433 | 7250.0 117.0 3.9028649929 9.62753632044
434 | 7260.0 115.0 1.83904412791 9.164536309
435 | 7260.0 116.0 4.50521613664 10.149962622
436 | 7260.0 117.0 4.41050683731 9.50653580199
437 | 7270.0 115.0 0.997604619625 9.39842291272
438 | 7270.0 116.0 5.03432542224 10.0475775679
439 | 7270.0 117.0 4.88594751996 9.40057085091
440 | 7280.0 115.0 0.166267436604 9.6609486924
441 | 7280.0 116.0 5.56343470784 9.94519251386
442 | 7280.0 117.0 5.3613882026 9.29436724004
443 | 7290.0 116.0 6.09275445855 9.84304611958
444 | 7290.0 117.0 5.90438818543 9.22611053732
445 | 7290.0 118.0 15.1713774274 8.90559042631
446 | 7300.0 116.0 6.57850793157 9.7468662203
447 | 7300.0 117.0 6.45475444711 9.1616723914
448 | 7300.0 118.0 14.5555565166 8.88792960113
449 | 7310.0 116.0 7.05963117217 9.65140230041
450 | 7310.0 117.0 6.97902303518 9.09771156508
451 | 7310.0 118.0 13.9397356058 8.87026877595
452 | 7320.0 116.0 7.54075441277 9.55593838053
453 | 7320.0 117.0 7.44204627636 9.03398939855
454 | 7320.0 118.0 13.3207577184 8.86000640456
455 | 7330.0 116.0 8.0397671877 9.45665590385
456 | 7330.0 117.0 7.90506951753 8.97050589183
457 | 7330.0 118.0 12.6725251808 8.91728475649
458 | 7340.0 116.0 8.56592996176 9.35116827237
459 | 7340.0 117.0 8.38282531638 8.88220176594
460 | 7340.0 118.0 12.0242926431 8.97456310842
461 | 7350.0 116.0 9.09209273582 9.2459193007
462 | 7350.0 117.0 8.89530785832 8.73709660771
463 | 7350.0 118.0 11.4206787087 9.14067032903
464 | 7360.0 116.0 9.61825550989 9.14067032903
465 | 7360.0 117.0 9.40779040026 8.59175278968
466 | 7360.0 118.0 10.8465298897 9.37933012874
467 | 7370.0 116.0 10.1334740982 9.00893011958
468 | 7370.0 117.0 9.95352642952 8.43256670328
469 | 7370.0 118.0 10.2456520017 9.61798992845
470 | 7380.0 116.0 10.6489031517 8.87742856994
471 | 7380.0 117.0 10.4990519937 8.27361927667
472 | 7380.0 118.0 9.60415434754 9.85664972817
473 | 7390.0 116.0 11.1679101121 8.74998423689
474 | 7390.0 117.0 11.0559426737 8.12087700485
475 | 7390.0 118.0 8.98159855327 10.0666703519
476 | 7400.0 116.0 11.7203810248 8.660486812
477 | 7400.0 117.0 11.6387205623 7.98317030041
478 | 7400.0 118.0 8.4028195018 10.2098662317
479 | 7410.0 116.0 12.2728519376 8.57098938711
480 | 7410.0 117.0 12.2214984508 7.84546359598
481 | 7410.0 118.0 7.78720905614 10.3459023176
482 |
--------------------------------------------------------------------------------
/src/datasets/raw/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/viewsetting/Unsupervised_sampling_promoting/b59d767ac9938097ffeb30525e22f0ced91253a3/src/datasets/raw/.DS_Store
--------------------------------------------------------------------------------
/src/datasets/raw/val/uni_examples_val.txt:
--------------------------------------------------------------------------------
1 | 5940.0 95.0 14.6694181409 7.59129090929
2 | 5940.0 96.0 14.6927797681 6.77030119827
3 | 5950.0 95.0 15.195580915 7.38007698654
4 | 5950.0 96.0 15.268612308 6.60085274047
5 | 5970.0 97.0 14.796328602 8.11896772645
6 | 5970.0 98.0 14.748342557 8.76859970127
7 | 5980.0 97.0 14.2503821076 8.22326205893
8 | 5980.0 98.0 14.2747960604 8.83566310499
9 | 5990.0 97.0 13.7046460784 8.3277950512
10 | 5990.0 98.0 13.8012495637 8.90272650871
11 | 6000.0 97.0 13.1595414444 8.42612288868
12 | 6000.0 98.0 13.3041309748 8.98768939741
13 | 6010.0 97.0 12.6163309965 8.51084711758
14 | 6010.0 98.0 12.751660062 9.11465641086
15 | 6020.0 97.0 12.0733310137 8.59557134648
16 | 6020.0 98.0 12.1991891492 9.2413847645
17 | 6030.0 97.0 11.5204391707 8.69771774076
18 | 6030.0 98.0 11.6502961433 9.36859043775
19 | 6040.0 97.0 10.9284008173 8.86931413675
20 | 6040.0 98.0 11.1336042992 9.4986600286
21 | 6040.0 99.0 14.9234495282 8.73160743232
22 | 6050.0 97.0 10.336572929 9.04067187295
23 | 6050.0 98.0 10.6169124551 9.62872961944
24 | 6050.0 99.0 14.2996309433 8.83375382659
25 | 6060.0 97.0 9.74453457566 9.21226826894
26 | 6060.0 98.0 10.100431076 9.75903787008
27 | 6060.0 99.0 13.6758123584 8.93613888067
28 | 6070.0 97.0 9.19290552333 9.36047600456
29 | 6070.0 98.0 9.54817062838 9.87192395535
30 | 6070.0 99.0 13.0551507501 9.02229506837
31 | 6070.0 100.0 14.6401634907 9.11465641086
32 | 6080.0 97.0 8.64148693611 9.50844508038
33 | 6080.0 98.0 8.98686018101 9.98027550442
34 | 6080.0 99.0 12.4393298393 9.08410795649
35 | 6080.0 100.0 13.9081658394 9.06286723432
36 | 6090.0 97.0 8.08964741868 9.65164096021
37 | 6090.0 98.0 8.42554973364 10.0888657133
38 | 6090.0 99.0 11.8235089285 9.14592084462
39 | 6090.0 100.0 13.1601728398 8.9986677482
40 | 6100.0 97.0 7.53612418036 9.74877549869
41 | 6100.0 98.0 7.86844858846 10.1907734478
42 | 6100.0 99.0 11.1923240648 9.23541826951
43 | 6100.0 100.0 12.3481984468 8.88506568353
44 | 6110.0 97.0 6.98281140716 9.84614869698
45 | 6110.0 98.0 7.31513581525 10.2862373676
46 | 6110.0 99.0 10.5544043175 9.33661002459
47 | 6110.0 100.0 11.5358031237 8.77862341286
48 | 6110.0 101.0 15.1280216148 8.40249556851
49 | 6120.0 97.0 6.42928816884 9.95044302945
50 | 6120.0 98.0 6.76203350716 10.3817012875
51 | 6120.0 99.0 9.91669503534 9.43780177967
52 | 6120.0 100.0 10.7173043124 8.73900588611
53 | 6120.0 101.0 14.5191460526 8.51323371558
54 | 6130.0 97.0 5.87597539564 10.0652383931
55 | 6130.0 98.0 6.20872073395 10.4771652074
56 | 6130.0 99.0 9.28761482266 9.50558116279
57 | 6130.0 100.0 9.88028457137 8.72086774133
58 | 6130.0 101.0 13.9102704905 8.62421052245
59 | 6140.0 97.0 5.32245215732 10.1802724166
60 | 6140.0 98.0 5.66277423958 10.5890966535
61 | 6140.0 99.0 8.67116251657 9.52252600857
62 | 6140.0 100.0 9.00032994803 8.75332547409
63 | 6140.0 101.0 13.3013949283 8.73494866951
64 | 6150.0 97.0 4.79565798793 10.2721564395
65 | 6150.0 98.0 5.11682774522 10.7010280996
66 | 6150.0 99.0 8.05471021048 9.53947085435
67 | 6150.0 100.0 8.12142765023 8.78649918625
68 | 6150.0 101.0 12.6474798327 8.9139435193
69 | 6160.0 97.0 4.28043939956 10.3544940704
70 | 6160.0 98.0 4.57109171596 10.8127208858
71 | 6160.0 99.0 7.44562418322 9.61560333045
72 | 6160.0 100.0 7.25431139857 8.8246847542
73 | 6160.0 101.0 11.9935647371 9.09293836908
74 | 6170.0 97.0 3.7652208112 10.4368317013
75 | 6170.0 98.0 4.01798940786 10.9392105797
76 | 6170.0 99.0 6.84137885349 9.73111467352
77 | 6170.0 100.0 6.38719514692 8.86287032216
78 | 6170.0 101.0 11.3531194085 9.26334146608
79 | 6180.0 97.0 3.22011617727 10.5647533539
80 | 6180.0 98.0 3.46320337889 11.0695188303
81 | 6180.0 99.0 6.23713352375 9.84662601658
82 | 6180.0 100.0 5.48114284998 8.88840692073
83 | 6180.0 101.0 10.7667636131 9.39985487151
84 | 6190.0 97.0 2.6676452645 10.7038920171
85 | 6190.0 98.0 2.90820688481 11.1995884212
86 | 6190.0 99.0 5.63983354264 9.95450024605
87 | 6190.0 100.0 4.54899287945 8.90535176651
88 | 6190.0 101.0 10.1804078177 9.53612961715
89 | 6200.0 97.0 2.12464528167 10.836348206
90 | 6200.0 98.0 2.3593138789 11.3456482186
91 | 6200.0 99.0 5.04905797992 10.0544987021
92 | 6200.0 100.0 3.61705337402 8.92253527209
93 | 6200.0 101.0 9.57616248796 9.67526828038
94 | 6210.0 97.0 1.5961673914 10.9583033636
95 | 6210.0 98.0 1.81610343096 11.507220903
96 | 6210.0 99.0 4.45849288231 10.1544971582
97 | 6210.0 100.0 2.68048363619 8.98673475821
98 | 6210.0 101.0 8.94476715908 9.81846416021
99 | 6220.0 97.0 1.06747903602 11.0804971811
100 | 6220.0 98.0 1.27289298302 11.6690322472
101 | 6220.0 99.0 3.83804173913 10.2413693253
102 | 6220.0 100.0 1.74307203792 9.06262857452
103 | 6220.0 101.0 8.31337183021 9.96166004004
104 | 6230.0 97.0 0.538790680641 11.2026909985
105 | 6230.0 98.0 0.752202301802 11.8263090552
106 | 6230.0 99.0 3.17297199271 10.3081940692
107 | 6230.0 100.0 0.831337183021 9.22730383632
108 | 6230.0 101.0 7.71102068646 10.1222780852
109 | 6240.0 97.0 0.0103127903717 11.3246461562
110 | 6240.0 98.0 0.264765107909 11.9769033888
111 | 6240.0 99.0 2.5079022463 10.3750188131
112 | 6240.0 100.0 -0.0631395328877 9.45092806865
113 | 6240.0 101.0 7.15181489019 10.3086713888
114 | 6250.0 98.0 -0.222672085984 12.1277363822
115 | 6250.0 99.0 1.90155226547 10.4838476818
116 | 6250.0 101.0 6.59281955902 10.4950646924
117 | 6260.0 99.0 1.30172670303 10.5972110867
118 | 6260.0 101.0 6.0260370188 10.6805033568
119 | 6270.0 99.0 0.701901140601 10.7105744915
120 | 6270.0 101.0 5.42831610746 10.8618848046
121 | 6280.0 99.0 0.102075578168 10.8239378964
122 | 6280.0 101.0 4.83059519613 11.0432662523
123 | 6290.0 102.0 15.0482553382 8.80654660943
124 | 6290.0 103.0 3.81699522817 9.61608065005
125 | 6290.0 104.0 0.542368587505 13.7658972475
126 | 6300.0 102.0 14.2617472236 8.96978991243
127 | 6300.0 103.0 4.41471613951 9.4919775542
128 | 6300.0 104.0 1.19775693888 13.6704333276
129 | 6310.0 102.0 13.469767016 9.12396414305
130 | 6310.0 103.0 5.01243705084 9.36787445835
131 | 6310.0 104.0 1.85335575536 13.5749694077
132 | 6320.0 102.0 12.6279065775 9.19556208296
133 | 6320.0 103.0 5.604054474 9.28100229126
134 | 6320.0 104.0 2.50874410674 13.4795054878
135 | 6330.0 102.0 11.7900449761 9.27360383747
136 | 6330.0 103.0 6.19504050183 9.19842600056
137 | 6330.0 104.0 3.16118594658 13.3864281659
138 | 6340.0 102.0 10.9883833735 9.4101172429
139 | 6340.0 103.0 6.79549745959 9.11036053446
140 | 6340.0 104.0 3.8066824378 13.2988400194
141 | 6350.0 102.0 10.1865113059 9.54639198854
142 | 6350.0 103.0 7.4188951143 9.00940743918
143 | 6350.0 104.0 4.45196846391 13.2114905327
144 | 6360.0 102.0 9.36464505279 9.73970642631
145 | 6360.0 103.0 8.04229276901 8.9084543439
146 | 6360.0 104.0 5.11977425675 13.1200838294
147 | 6360.0 105.0 -0.0631395328877 13.025335889
148 | 6370.0 102.0 8.5427787997 9.93278220427
149 | 6370.0 103.0 8.66021833087 8.81012650642
150 | 6370.0 104.0 5.83935446656 13.0200853734
151 | 6370.0 105.0 0.357790686363 12.9642389802
152 | 6380.0 102.0 7.75458696415 10.0301554026
153 | 6380.0 103.0 9.27603924164 8.71275330814
154 | 6380.0 104.0 6.55914514148 12.9198482575
155 | 6380.0 105.0 0.778720905614 12.9029034117
156 | 6390.0 102.0 6.97018350058 10.1170275697
157 | 6390.0 103.0 9.8918601524 8.61561876966
158 | 6390.0 104.0 7.27788349085 12.795267842
159 | 6390.0 105.0 1.19965112487 12.841806503
160 | 6400.0 102.0 6.13379515493 10.2031837573
161 | 6400.0 103.0 10.5232554813 8.37695896994
162 | 6400.0 104.0 7.99346486358 12.5725982489
163 | 6400.0 105.0 1.62058134412 12.7804709345
164 | 6410.0 102.0 5.27509750765 10.2891012852
165 | 6410.0 103.0 11.1546508102 8.13829917023
166 | 6410.0 104.0 8.7090462363 12.3499286558
167 | 6410.0 105.0 2.07602784135 12.7193740257
168 | 6420.0 102.0 4.42292427878 10.3614152046
169 | 6420.0 103.0 11.7786798602 7.89963937052
170 | 6420.0 104.0 9.40779040026 12.1050637013
171 | 6420.0 105.0 2.54010340807 12.6580384572
172 | 6430.0 102.0 3.59726965372 10.3797920091
173 | 6430.0 103.0 12.37345426 7.6609795708
174 | 6430.0 104.0 10.0391857291 11.7728492601
175 | 6430.0 105.0 3.00417897479 12.5969415485
176 | 6430.0 106.0 14.9851158053 8.75881464948
177 | 6430.0 107.0 15.2561948665 8.16813164519
178 | 6440.0 102.0 2.77161502866 10.3981688137
179 | 6440.0 103.0 12.9682286598 7.42231977109
180 | 6440.0 104.0 10.670581058 11.4403961591
181 | 6440.0 105.0 3.46825454152 12.5358446397
182 | 6440.0 106.0 14.4042321028 8.76836104147
183 | 6440.0 107.0 14.6334286072 8.22779659512
184 | 6450.0 102.0 1.94069877586 10.4463780933
185 | 6450.0 103.0 13.6198286392 7.23735842631
186 | 6450.0 104.0 11.3051333635 11.0719054283
187 | 6450.0 105.0 4.00262545486 12.4904992778
188 | 6450.0 106.0 13.8233484002 8.77790743346
189 | 6450.0 107.0 14.0108728129 8.28746154505
190 | 6460.0 102.0 1.1064150813 10.514634796
191 | 6460.0 103.0 14.2775321068 7.05836357653
192 | 6460.0 104.0 11.9447368317 10.6497162426
193 | 6460.0 105.0 4.54478357725 12.4473018541
194 | 6460.0 106.0 13.237413535 8.84688011558
195 | 6460.0 107.0 13.3876856233 8.35380896937
196 | 6470.0 102.0 0.272131386746 10.5828914987
197 | 6470.0 103.0 14.9352355744 6.87936872674
198 | 6470.0 104.0 12.5841298347 10.2275270569
199 | 6470.0 105.0 5.08694169965 12.4038657705
200 | 6470.0 106.0 12.6464275072 8.97551774762
201 | 6470.0 107.0 12.7636565732 8.43614660027
202 | 6480.0 104.0 13.3192844627 9.90008581171
203 | 6480.0 105.0 5.60679052042 12.3434848412
204 | 6480.0 106.0 12.0556519445 9.10391671987
205 | 6480.0 107.0 12.1394170581 8.51848423117
206 | 6490.0 104.0 14.078221648 9.59627188668
207 | 6490.0 105.0 6.11190678353 12.2718869013
208 | 6490.0 106.0 11.5147566127 9.28243425005
209 | 6490.0 107.0 11.5257007984 8.64545124462
210 | 6500.0 104.0 14.8371588333 9.29245796164
211 | 6500.0 105.0 6.61702304663 12.2002889614
212 | 6500.0 106.0 10.9797543041 9.46667961543
213 | 6500.0 107.0 10.9271380266 8.83948166179
214 | 6510.0 104.0 15.5960960186 8.98888269641
215 | 6510.0 105.0 7.12213930973 12.1525570014
216 | 6510.0 106.0 10.4445415303 9.65068632101
217 | 6510.0 107.0 10.3285752549 9.03327341915
218 | 6520.0 105.0 7.62725557283 12.120815248
219 | 6520.0 106.0 9.88701945488 9.81965745921
220 | 6520.0 107.0 9.71948922762 9.21370022774
221 | 6530.0 105.0 8.13237183593 12.0888348349
222 | 6530.0 106.0 9.29645435727 9.96595591643
223 | 6530.0 107.0 9.06852064354 9.34066724118
224 | 6540.0 105.0 8.61728344851 12.0074518432
225 | 6540.0 106.0 8.70588925966 10.1122543737
226 | 6540.0 107.0 8.41734159436 9.46739559483
227 | 6550.0 105.0 9.08872529407 11.8928951393
228 | 6550.0 106.0 8.1186916038 10.2392213871
229 | 6550.0 107.0 7.76616254518 9.59412394848
230 | 6560.0 105.0 9.56227179073 11.7823956521
231 | 6560.0 106.0 7.53991255233 10.3213203582
232 | 6560.0 107.0 7.17812302889 9.67837085778
233 | 6570.0 105.0 10.0558124728 11.7084111141
234 | 6570.0 106.0 6.96113350086 10.4034193293
235 | 6570.0 107.0 6.59008351259 9.76261776708
236 | 6580.0 105.0 10.5493531549 11.6341879164
237 | 6580.0 106.0 6.37709282165 10.4833703622
238 | 6580.0 107.0 6.0020439963 9.84686467638
239 | 6580.0 108.0 0.110073252334 11.8107961682
240 | 6590.0 105.0 11.0494182553 11.5640219353
241 | 6590.0 106.0 5.74569749278 10.5449445905
242 | 6590.0 107.0 5.39548355036 9.88481158453
243 | 6590.0 108.0 0.790086021534 11.7833502913
244 | 6600.0 105.0 11.5755810294 11.5088915216
245 | 6600.0 106.0 5.1143021639 10.6065188189
246 | 6600.0 107.0 4.77692659317 9.89197137852
247 | 6600.0 108.0 1.47009879073 11.7559044143
248 | 6600.0 109.0 10.1970345614 8.43662391987
249 | 6610.0 105.0 12.1017438035 11.4537611078
250 | 6610.0 106.0 4.48290683502 10.6680930472
251 | 6610.0 107.0 4.15836963598 9.89936983231
252 | 6610.0 108.0 2.13516853715 11.7096044131
253 | 6610.0 109.0 10.0497089846 8.7468816595
254 | 6620.0 105.0 12.6051763457 11.4432600767
255 | 6620.0 106.0 3.81909987926 10.7139157287
256 | 6620.0 107.0 3.52992081864 9.90223374991
257 | 6620.0 108.0 2.785716191 11.6444502878
258 | 6620.0 109.0 9.90238340788 9.05713939913
259 | 6630.0 105.0 13.0934554 11.4623528606
260 | 6630.0 106.0 3.15529292351 10.7599770701
261 | 6630.0 107.0 2.87874176946 9.89459663632
262 | 6630.0 108.0 3.42089989185 11.5630672961
263 | 6630.0 109.0 9.68034271723 9.36954507695
264 | 6640.0 105.0 13.5865751519 11.4740471908
265 | 6640.0 106.0 2.52137201331 10.816062123
266 | 6640.0 107.0 2.22756272028 9.88719818253
267 | 6640.0 108.0 4.01988359385 11.4437373963
268 | 6640.0 109.0 9.38379737776 9.68409869297
269 | 6650.0 105.0 14.1228402512 11.4203487359
270 | 6650.0 106.0 1.93206970636 10.8876600629
271 | 6650.0 107.0 1.58374994993 9.89674457452
272 | 6650.0 108.0 4.61886729584 11.3244074964
273 | 6650.0 109.0 9.06809971333 10.0031868452
274 | 6660.0 105.0 14.6593158156 11.3664116212
275 | 6660.0 106.0 1.34276739941 10.9592580028
276 | 6660.0 107.0 1.00497089846 10.0585559187
277 | 6660.0 108.0 5.2214289047 11.2377739891
278 | 6660.0 109.0 8.70715205032 10.3327760286
279 | 6670.0 105.0 15.195580915 11.3124745064
280 | 6670.0 106.0 0.766303464147 11.0470848091
281 | 6670.0 107.0 0.426191846992 10.2206059227
282 | 6670.0 108.0 5.82525330422 11.16522141
283 | 6670.0 109.0 8.34346834088 10.6544894386
284 | 6680.0 106.0 0.209202318968 11.1594935748
285 | 6680.0 107.0 -0.152587204479 10.3826559267
286 | 6680.0 108.0 6.39582421641 11.0110471794
287 | 6680.0 109.0 7.96757765509 10.9442224355
288 | 6690.0 108.0 6.91651489762 10.7346791313
289 | 6690.0 109.0 7.61883696844 11.4556703862
290 | 6700.0 108.0 7.48603348427 10.5031791256
291 | 6700.0 109.0 7.21284977198 11.6091286375
292 | 6710.0 108.0 8.10416951124 10.316785822
293 | 6710.0 109.0 6.76013932117 11.7850209099
294 | 6720.0 108.0 8.72251600332 10.1303925184
295 | 6720.0 109.0 6.27606956903 11.9759487496
296 | 6730.0 108.0 9.34170435584 9.94185127666
297 | 6730.0 109.0 5.78168702652 12.0045879256
298 | 6740.0 108.0 9.96299735945 9.74877549869
299 | 6740.0 109.0 5.2805696005 11.9251142123
300 | 6750.0 108.0 10.5842903631 9.55546106093
301 | 6750.0 109.0 4.7796626396 11.8454018392
302 | 6750.0 110.0 10.9652322115 9.40319610871
303 | 6750.0 111.0 0.778720905614 9.23613424891
304 | 6760.0 110.0 10.3338368826 9.54472136994
305 | 6760.0 111.0 1.0910511283 9.33613270499
306 | 6770.0 110.0 9.70244155374 9.68600797137
307 | 6770.0 111.0 1.40338135098 9.43636982087
308 | 6780.0 110.0 9.098196224 9.83875024319
309 | 6780.0 111.0 1.71571157367 9.53636827695
310 | 6790.0 110.0 8.55687996205 10.017745093
311 | 6790.0 111.0 2.1187522586 9.72132962173
312 | 6800.0 110.0 8.0157741652 10.1967399428
313 | 6800.0 111.0 2.5318952688 9.91583735849
314 | 6810.0 110.0 7.43720557884 10.3833719061
315 | 6810.0 111.0 2.95198362761 10.1108224149
316 | 6820.0 110.0 6.82138466807 10.5778796429
317 | 6820.0 111.0 3.40048477622 10.3079554094
318 | 6830.0 110.0 6.20556375731 10.7723873797
319 | 6830.0 111.0 3.84877545972 10.505088404
320 | 6840.0 110.0 5.60131842757 10.9750095496
321 | 6840.0 111.0 4.33221381653 10.5248971674
322 | 6850.0 110.0 5.00001960937 11.179540998
323 | 6850.0 111.0 4.81965101042 10.5248971674
324 | 6860.0 110.0 4.4298696274 11.3270327542
325 | 6860.0 111.0 5.37359517896 10.3993621127
326 | 6870.0 110.0 3.87318941244 11.4501812109
327 | 6870.0 111.0 5.92206725464 10.2714404601
328 | 6880.0 110.0 3.31650919748 11.5735683273
329 | 6880.0 111.0 6.42318468066 10.123710044
330 | 6890.0 110.0 2.71499991417 11.670464206
331 | 6890.0 111.0 6.92430210668 9.97597962802
332 | 6890.0 112.0 14.8220053454 7.43186616308
333 | 6890.0 113.0 15.0080565023 8.0774409213
334 | 6890.0 114.0 11.259883365 6.49154655221
335 | 6900.0 112.0 14.1379937391 7.5750620429
336 | 6900.0 113.0 14.3575088484 8.20774917195
337 | 6900.0 114.0 11.6808135842 6.73020635192
338 | 6910.0 112.0 13.4539821328 7.71825792273
339 | 6910.0 113.0 13.7069611946 8.33781876279
340 | 6910.0 114.0 12.1017438035 6.96886615163
341 | 6920.0 112.0 12.8078542463 7.88293318454
342 | 6920.0 113.0 13.0982960975 8.47337752903
343 | 6920.0 114.0 12.5003647211 7.00538110099
344 | 6930.0 112.0 12.1659356619 8.04999504433
345 | 6930.0 113.0 12.5073100697 8.61108423346
346 | 6930.0 114.0 12.8575240121 7.02972440056
347 | 6940.0 112.0 11.4989717295 8.20345329555
348 | 6940.0 113.0 11.9167449721 8.7478362987
349 | 6940.0 114.0 13.0583077267 7.09463986608
350 | 6950.0 112.0 10.8294822158 8.35524092817
351 | 6950.0 113.0 11.329126386 8.87718991014
352 | 6950.0 114.0 13.2593019064 7.1597939914
353 | 6960.0 112.0 10.209872933 8.50631258139
354 | 6960.0 113.0 10.7417182651 9.00654352159
355 | 6960.0 114.0 13.2955019053 7.22972131272
356 | 6970.0 112.0 9.61152062638 8.65714557481
357 | 6970.0 113.0 10.123371773 9.10988321486
358 | 6970.0 114.0 13.3319123692 7.29964863404
359 | 6980.0 112.0 8.98686018101 8.76072392788
360 | 6980.0 113.0 9.49197644411 9.20224455735
361 | 6980.0 114.0 13.3681123681 7.36957595535
362 | 6990.0 112.0 8.35546485213 8.85236929097
363 | 6990.0 113.0 8.86058111523 9.29460589984
364 | 6990.0 114.0 13.404522832 7.43950327667
365 | 7000.0 112.0 7.7141776631 8.95141310785
366 | 7000.0 113.0 8.24728578579 9.39818425292
367 | 7000.0 114.0 13.4407228309 7.50966925778
368 | 7010.0 112.0 7.03374396369 9.0786187811
369 | 7010.0 113.0 7.64598696759 9.50892239998
370 | 7010.0 114.0 13.5036518987 7.58389245549
371 | 7020.0 112.0 6.35331026427 9.20582445435
372 | 7020.0 113.0 7.04447768428 9.61966054705
373 | 7020.0 114.0 13.6734972422 7.67649245778
374 | 7030.0 112.0 5.68403121566 9.33637136479
375 | 7030.0 113.0 6.37098933347 9.77144817967
376 | 7030.0 114.0 13.8431321205 7.76885380027
377 | 7040.0 112.0 5.0595812354 9.47956724462
378 | 7040.0 113.0 5.67961144835 9.93325952387
379 | 7040.0 114.0 14.0285518821 7.84952081258
380 | 7050.0 112.0 4.43513125514 9.62276312445
381 | 7050.0 113.0 4.98802309813 10.0953095279
382 | 7050.0 114.0 14.3543518718 7.8266094718
383 | 7060.0 112.0 3.80352546115 9.75211673589
384 | 7060.0 113.0 4.38461962883 10.1907734478
385 | 7060.0 114.0 14.6803623266 7.80345947123
386 | 7070.0 112.0 3.14308594715 9.8261012738
387 | 7070.0 113.0 3.78142662464 10.2862373676
388 | 7070.0 114.0 15.0061623163 7.78030947066
389 | 7080.0 112.0 2.48285689825 9.90032447151
390 | 7080.0 113.0 3.17802315535 10.3817012875
391 | 7090.0 112.0 1.83862319769 9.94137395706
392 | 7090.0 113.0 2.59166735993 10.5862327359
393 | 7100.0 112.0 1.23290461219 9.90509766751
394 | 7100.0 113.0 2.00552202962 10.7907641842
395 | 7100.0 115.0 14.8678867393 8.4931862924
396 | 7110.0 112.0 0.626975561574 9.86906003775
397 | 7110.0 113.0 1.43053135012 11.0022167668
398 | 7110.0 115.0 14.0719076947 8.59700330528
399 | 7120.0 112.0 0.0210465109626 9.83278374819
400 | 7120.0 113.0 0.900580204088 11.2408765665
401 | 7120.0 115.0 13.275718185 8.70082031815
402 | 7130.0 113.0 0.37083952316 11.4795363662
403 | 7130.0 115.0 12.4384879789 8.75953062888
404 | 7140.0 113.0 -0.158901157767 11.7181961659
405 | 7140.0 115.0 11.5966275404 8.81346774362
406 | 7150.0 115.0 10.7547671019 8.86740485835
407 | 7160.0 115.0 9.99141014925 8.9098863027
408 | 7170.0 115.0 9.24783691695 8.94974248925
409 | 7170.0 116.0 0.0841860438502 11.0499487267
410 | 7180.0 115.0 8.50426368464 8.98959867581
411 | 7180.0 116.0 0.602351143748 10.967372436
412 | 7180.0 117.0 0.255083712866 10.3996007725
413 | 7190.0 115.0 7.72406952326 8.99055331501
414 | 7190.0 116.0 1.12030577854 10.8847961453
415 | 7190.0 117.0 0.754938348227 10.300079636
416 | 7200.0 115.0 6.93482536216 8.98220022202
417 | 7200.0 116.0 1.60963715842 10.794582741
418 | 7200.0 117.0 1.25479298359 10.2007971594
419 | 7210.0 115.0 6.14558120107 8.97360846923
420 | 7210.0 116.0 2.05624412104 10.693390986
421 | 7210.0 117.0 1.76496040932 10.1062878787
422 | 7220.0 115.0 5.30372076256 8.93685486007
423 | 7220.0 116.0 2.50264061856 10.5921992309
424 | 7220.0 117.0 2.27596969549 10.0127332372
425 | 7230.0 115.0 4.46186032406 8.90010125091
426 | 7230.0 116.0 2.95577199958 10.489575517
427 | 7230.0 117.0 2.79160921407 9.9139280801
428 | 7240.0 115.0 3.60379407212 8.92587650928
429 | 7240.0 116.0 3.47035919262 10.3745414935
430 | 7240.0 117.0 3.34723710348 9.77073220027
431 | 7250.0 115.0 2.72152433257 9.04520640914
432 | 7250.0 116.0 3.98473592054 10.2597461299
433 | 7250.0 117.0 3.9028649929 9.62753632044
434 | 7260.0 115.0 1.83904412791 9.164536309
435 | 7260.0 116.0 4.50521613664 10.149962622
436 | 7260.0 117.0 4.41050683731 9.50653580199
437 | 7270.0 115.0 0.997604619625 9.39842291272
438 | 7270.0 116.0 5.03432542224 10.0475775679
439 | 7270.0 117.0 4.88594751996 9.40057085091
440 | 7280.0 115.0 0.166267436604 9.6609486924
441 | 7280.0 116.0 5.56343470784 9.94519251386
442 | 7280.0 117.0 5.3613882026 9.29436724004
443 | 7290.0 116.0 6.09275445855 9.84304611958
444 | 7290.0 117.0 5.90438818543 9.22611053732
445 | 7290.0 118.0 15.1713774274 8.90559042631
446 | 7300.0 116.0 6.57850793157 9.7468662203
447 | 7300.0 117.0 6.45475444711 9.1616723914
448 | 7300.0 118.0 14.5555565166 8.88792960113
449 | 7310.0 116.0 7.05963117217 9.65140230041
450 | 7310.0 117.0 6.97902303518 9.09771156508
451 | 7310.0 118.0 13.9397356058 8.87026877595
452 | 7320.0 116.0 7.54075441277 9.55593838053
453 | 7320.0 117.0 7.44204627636 9.03398939855
454 | 7320.0 118.0 13.3207577184 8.86000640456
455 | 7330.0 116.0 8.0397671877 9.45665590385
456 | 7330.0 117.0 7.90506951753 8.97050589183
457 | 7330.0 118.0 12.6725251808 8.91728475649
458 | 7340.0 116.0 8.56592996176 9.35116827237
459 | 7340.0 117.0 8.38282531638 8.88220176594
460 | 7340.0 118.0 12.0242926431 8.97456310842
461 | 7350.0 116.0 9.09209273582 9.2459193007
462 | 7350.0 117.0 8.89530785832 8.73709660771
463 | 7350.0 118.0 11.4206787087 9.14067032903
464 | 7360.0 116.0 9.61825550989 9.14067032903
465 | 7360.0 117.0 9.40779040026 8.59175278968
466 | 7360.0 118.0 10.8465298897 9.37933012874
467 | 7370.0 116.0 10.1334740982 9.00893011958
468 | 7370.0 117.0 9.95352642952 8.43256670328
469 | 7370.0 118.0 10.2456520017 9.61798992845
470 | 7380.0 116.0 10.6489031517 8.87742856994
471 | 7380.0 117.0 10.4990519937 8.27361927667
472 | 7380.0 118.0 9.60415434754 9.85664972817
473 | 7390.0 116.0 11.1679101121 8.74998423689
474 | 7390.0 117.0 11.0559426737 8.12087700485
475 | 7390.0 118.0 8.98159855327 10.0666703519
476 | 7400.0 116.0 11.7203810248 8.660486812
477 | 7400.0 117.0 11.6387205623 7.98317030041
478 | 7400.0 118.0 8.4028195018 10.2098662317
479 | 7410.0 116.0 12.2728519376 8.57098938711
480 | 7410.0 117.0 12.2214984508 7.84546359598
481 | 7410.0 118.0 7.78720905614 10.3459023176
482 |
--------------------------------------------------------------------------------
/src/datasets/univ/val/uni_examples_val.txt:
--------------------------------------------------------------------------------
1 | 5940.0 95.0 14.6694181409 7.59129090929
2 | 5940.0 96.0 14.6927797681 6.77030119827
3 | 5950.0 95.0 15.195580915 7.38007698654
4 | 5950.0 96.0 15.268612308 6.60085274047
5 | 5970.0 97.0 14.796328602 8.11896772645
6 | 5970.0 98.0 14.748342557 8.76859970127
7 | 5980.0 97.0 14.2503821076 8.22326205893
8 | 5980.0 98.0 14.2747960604 8.83566310499
9 | 5990.0 97.0 13.7046460784 8.3277950512
10 | 5990.0 98.0 13.8012495637 8.90272650871
11 | 6000.0 97.0 13.1595414444 8.42612288868
12 | 6000.0 98.0 13.3041309748 8.98768939741
13 | 6010.0 97.0 12.6163309965 8.51084711758
14 | 6010.0 98.0 12.751660062 9.11465641086
15 | 6020.0 97.0 12.0733310137 8.59557134648
16 | 6020.0 98.0 12.1991891492 9.2413847645
17 | 6030.0 97.0 11.5204391707 8.69771774076
18 | 6030.0 98.0 11.6502961433 9.36859043775
19 | 6040.0 97.0 10.9284008173 8.86931413675
20 | 6040.0 98.0 11.1336042992 9.4986600286
21 | 6040.0 99.0 14.9234495282 8.73160743232
22 | 6050.0 97.0 10.336572929 9.04067187295
23 | 6050.0 98.0 10.6169124551 9.62872961944
24 | 6050.0 99.0 14.2996309433 8.83375382659
25 | 6060.0 97.0 9.74453457566 9.21226826894
26 | 6060.0 98.0 10.100431076 9.75903787008
27 | 6060.0 99.0 13.6758123584 8.93613888067
28 | 6070.0 97.0 9.19290552333 9.36047600456
29 | 6070.0 98.0 9.54817062838 9.87192395535
30 | 6070.0 99.0 13.0551507501 9.02229506837
31 | 6070.0 100.0 14.6401634907 9.11465641086
32 | 6080.0 97.0 8.64148693611 9.50844508038
33 | 6080.0 98.0 8.98686018101 9.98027550442
34 | 6080.0 99.0 12.4393298393 9.08410795649
35 | 6080.0 100.0 13.9081658394 9.06286723432
36 | 6090.0 97.0 8.08964741868 9.65164096021
37 | 6090.0 98.0 8.42554973364 10.0888657133
38 | 6090.0 99.0 11.8235089285 9.14592084462
39 | 6090.0 100.0 13.1601728398 8.9986677482
40 | 6100.0 97.0 7.53612418036 9.74877549869
41 | 6100.0 98.0 7.86844858846 10.1907734478
42 | 6100.0 99.0 11.1923240648 9.23541826951
43 | 6100.0 100.0 12.3481984468 8.88506568353
44 | 6110.0 97.0 6.98281140716 9.84614869698
45 | 6110.0 98.0 7.31513581525 10.2862373676
46 | 6110.0 99.0 10.5544043175 9.33661002459
47 | 6110.0 100.0 11.5358031237 8.77862341286
48 | 6110.0 101.0 15.1280216148 8.40249556851
49 | 6120.0 97.0 6.42928816884 9.95044302945
50 | 6120.0 98.0 6.76203350716 10.3817012875
51 | 6120.0 99.0 9.91669503534 9.43780177967
52 | 6120.0 100.0 10.7173043124 8.73900588611
53 | 6120.0 101.0 14.5191460526 8.51323371558
54 | 6130.0 97.0 5.87597539564 10.0652383931
55 | 6130.0 98.0 6.20872073395 10.4771652074
56 | 6130.0 99.0 9.28761482266 9.50558116279
57 | 6130.0 100.0 9.88028457137 8.72086774133
58 | 6130.0 101.0 13.9102704905 8.62421052245
59 | 6140.0 97.0 5.32245215732 10.1802724166
60 | 6140.0 98.0 5.66277423958 10.5890966535
61 | 6140.0 99.0 8.67116251657 9.52252600857
62 | 6140.0 100.0 9.00032994803 8.75332547409
63 | 6140.0 101.0 13.3013949283 8.73494866951
64 | 6150.0 97.0 4.79565798793 10.2721564395
65 | 6150.0 98.0 5.11682774522 10.7010280996
66 | 6150.0 99.0 8.05471021048 9.53947085435
67 | 6150.0 100.0 8.12142765023 8.78649918625
68 | 6150.0 101.0 12.6474798327 8.9139435193
69 | 6160.0 97.0 4.28043939956 10.3544940704
70 | 6160.0 98.0 4.57109171596 10.8127208858
71 | 6160.0 99.0 7.44562418322 9.61560333045
72 | 6160.0 100.0 7.25431139857 8.8246847542
73 | 6160.0 101.0 11.9935647371 9.09293836908
74 | 6170.0 97.0 3.7652208112 10.4368317013
75 | 6170.0 98.0 4.01798940786 10.9392105797
76 | 6170.0 99.0 6.84137885349 9.73111467352
77 | 6170.0 100.0 6.38719514692 8.86287032216
78 | 6170.0 101.0 11.3531194085 9.26334146608
79 | 6180.0 97.0 3.22011617727 10.5647533539
80 | 6180.0 98.0 3.46320337889 11.0695188303
81 | 6180.0 99.0 6.23713352375 9.84662601658
82 | 6180.0 100.0 5.48114284998 8.88840692073
83 | 6180.0 101.0 10.7667636131 9.39985487151
84 | 6190.0 97.0 2.6676452645 10.7038920171
85 | 6190.0 98.0 2.90820688481 11.1995884212
86 | 6190.0 99.0 5.63983354264 9.95450024605
87 | 6190.0 100.0 4.54899287945 8.90535176651
88 | 6190.0 101.0 10.1804078177 9.53612961715
89 | 6200.0 97.0 2.12464528167 10.836348206
90 | 6200.0 98.0 2.3593138789 11.3456482186
91 | 6200.0 99.0 5.04905797992 10.0544987021
92 | 6200.0 100.0 3.61705337402 8.92253527209
93 | 6200.0 101.0 9.57616248796 9.67526828038
94 | 6210.0 97.0 1.5961673914 10.9583033636
95 | 6210.0 98.0 1.81610343096 11.507220903
96 | 6210.0 99.0 4.45849288231 10.1544971582
97 | 6210.0 100.0 2.68048363619 8.98673475821
98 | 6210.0 101.0 8.94476715908 9.81846416021
99 | 6220.0 97.0 1.06747903602 11.0804971811
100 | 6220.0 98.0 1.27289298302 11.6690322472
101 | 6220.0 99.0 3.83804173913 10.2413693253
102 | 6220.0 100.0 1.74307203792 9.06262857452
103 | 6220.0 101.0 8.31337183021 9.96166004004
104 | 6230.0 97.0 0.538790680641 11.2026909985
105 | 6230.0 98.0 0.752202301802 11.8263090552
106 | 6230.0 99.0 3.17297199271 10.3081940692
107 | 6230.0 100.0 0.831337183021 9.22730383632
108 | 6230.0 101.0 7.71102068646 10.1222780852
109 | 6240.0 97.0 0.0103127903717 11.3246461562
110 | 6240.0 98.0 0.264765107909 11.9769033888
111 | 6240.0 99.0 2.5079022463 10.3750188131
112 | 6240.0 100.0 -0.0631395328877 9.45092806865
113 | 6240.0 101.0 7.15181489019 10.3086713888
114 | 6250.0 98.0 -0.222672085984 12.1277363822
115 | 6250.0 99.0 1.90155226547 10.4838476818
116 | 6250.0 101.0 6.59281955902 10.4950646924
117 | 6260.0 99.0 1.30172670303 10.5972110867
118 | 6260.0 101.0 6.0260370188 10.6805033568
119 | 6270.0 99.0 0.701901140601 10.7105744915
120 | 6270.0 101.0 5.42831610746 10.8618848046
121 | 6280.0 99.0 0.102075578168 10.8239378964
122 | 6280.0 101.0 4.83059519613 11.0432662523
123 | 6290.0 102.0 15.0482553382 8.80654660943
124 | 6290.0 103.0 3.81699522817 9.61608065005
125 | 6290.0 104.0 0.542368587505 13.7658972475
126 | 6300.0 102.0 14.2617472236 8.96978991243
127 | 6300.0 103.0 4.41471613951 9.4919775542
128 | 6300.0 104.0 1.19775693888 13.6704333276
129 | 6310.0 102.0 13.469767016 9.12396414305
130 | 6310.0 103.0 5.01243705084 9.36787445835
131 | 6310.0 104.0 1.85335575536 13.5749694077
132 | 6320.0 102.0 12.6279065775 9.19556208296
133 | 6320.0 103.0 5.604054474 9.28100229126
134 | 6320.0 104.0 2.50874410674 13.4795054878
135 | 6330.0 102.0 11.7900449761 9.27360383747
136 | 6330.0 103.0 6.19504050183 9.19842600056
137 | 6330.0 104.0 3.16118594658 13.3864281659
138 | 6340.0 102.0 10.9883833735 9.4101172429
139 | 6340.0 103.0 6.79549745959 9.11036053446
140 | 6340.0 104.0 3.8066824378 13.2988400194
141 | 6350.0 102.0 10.1865113059 9.54639198854
142 | 6350.0 103.0 7.4188951143 9.00940743918
143 | 6350.0 104.0 4.45196846391 13.2114905327
144 | 6360.0 102.0 9.36464505279 9.73970642631
145 | 6360.0 103.0 8.04229276901 8.9084543439
146 | 6360.0 104.0 5.11977425675 13.1200838294
147 | 6360.0 105.0 -0.0631395328877 13.025335889
148 | 6370.0 102.0 8.5427787997 9.93278220427
149 | 6370.0 103.0 8.66021833087 8.81012650642
150 | 6370.0 104.0 5.83935446656 13.0200853734
151 | 6370.0 105.0 0.357790686363 12.9642389802
152 | 6380.0 102.0 7.75458696415 10.0301554026
153 | 6380.0 103.0 9.27603924164 8.71275330814
154 | 6380.0 104.0 6.55914514148 12.9198482575
155 | 6380.0 105.0 0.778720905614 12.9029034117
156 | 6390.0 102.0 6.97018350058 10.1170275697
157 | 6390.0 103.0 9.8918601524 8.61561876966
158 | 6390.0 104.0 7.27788349085 12.795267842
159 | 6390.0 105.0 1.19965112487 12.841806503
160 | 6400.0 102.0 6.13379515493 10.2031837573
161 | 6400.0 103.0 10.5232554813 8.37695896994
162 | 6400.0 104.0 7.99346486358 12.5725982489
163 | 6400.0 105.0 1.62058134412 12.7804709345
164 | 6410.0 102.0 5.27509750765 10.2891012852
165 | 6410.0 103.0 11.1546508102 8.13829917023
166 | 6410.0 104.0 8.7090462363 12.3499286558
167 | 6410.0 105.0 2.07602784135 12.7193740257
168 | 6420.0 102.0 4.42292427878 10.3614152046
169 | 6420.0 103.0 11.7786798602 7.89963937052
170 | 6420.0 104.0 9.40779040026 12.1050637013
171 | 6420.0 105.0 2.54010340807 12.6580384572
172 | 6430.0 102.0 3.59726965372 10.3797920091
173 | 6430.0 103.0 12.37345426 7.6609795708
174 | 6430.0 104.0 10.0391857291 11.7728492601
175 | 6430.0 105.0 3.00417897479 12.5969415485
176 | 6430.0 106.0 14.9851158053 8.75881464948
177 | 6430.0 107.0 15.2561948665 8.16813164519
178 | 6440.0 102.0 2.77161502866 10.3981688137
179 | 6440.0 103.0 12.9682286598 7.42231977109
180 | 6440.0 104.0 10.670581058 11.4403961591
181 | 6440.0 105.0 3.46825454152 12.5358446397
182 | 6440.0 106.0 14.4042321028 8.76836104147
183 | 6440.0 107.0 14.6334286072 8.22779659512
184 | 6450.0 102.0 1.94069877586 10.4463780933
185 | 6450.0 103.0 13.6198286392 7.23735842631
186 | 6450.0 104.0 11.3051333635 11.0719054283
187 | 6450.0 105.0 4.00262545486 12.4904992778
188 | 6450.0 106.0 13.8233484002 8.77790743346
189 | 6450.0 107.0 14.0108728129 8.28746154505
190 | 6460.0 102.0 1.1064150813 10.514634796
191 | 6460.0 103.0 14.2775321068 7.05836357653
192 | 6460.0 104.0 11.9447368317 10.6497162426
193 | 6460.0 105.0 4.54478357725 12.4473018541
194 | 6460.0 106.0 13.237413535 8.84688011558
195 | 6460.0 107.0 13.3876856233 8.35380896937
196 | 6470.0 102.0 0.272131386746 10.5828914987
197 | 6470.0 103.0 14.9352355744 6.87936872674
198 | 6470.0 104.0 12.5841298347 10.2275270569
199 | 6470.0 105.0 5.08694169965 12.4038657705
200 | 6470.0 106.0 12.6464275072 8.97551774762
201 | 6470.0 107.0 12.7636565732 8.43614660027
202 | 6480.0 104.0 13.3192844627 9.90008581171
203 | 6480.0 105.0 5.60679052042 12.3434848412
204 | 6480.0 106.0 12.0556519445 9.10391671987
205 | 6480.0 107.0 12.1394170581 8.51848423117
206 | 6490.0 104.0 14.078221648 9.59627188668
207 | 6490.0 105.0 6.11190678353 12.2718869013
208 | 6490.0 106.0 11.5147566127 9.28243425005
209 | 6490.0 107.0 11.5257007984 8.64545124462
210 | 6500.0 104.0 14.8371588333 9.29245796164
211 | 6500.0 105.0 6.61702304663 12.2002889614
212 | 6500.0 106.0 10.9797543041 9.46667961543
213 | 6500.0 107.0 10.9271380266 8.83948166179
214 | 6510.0 104.0 15.5960960186 8.98888269641
215 | 6510.0 105.0 7.12213930973 12.1525570014
216 | 6510.0 106.0 10.4445415303 9.65068632101
217 | 6510.0 107.0 10.3285752549 9.03327341915
218 | 6520.0 105.0 7.62725557283 12.120815248
219 | 6520.0 106.0 9.88701945488 9.81965745921
220 | 6520.0 107.0 9.71948922762 9.21370022774
221 | 6530.0 105.0 8.13237183593 12.0888348349
222 | 6530.0 106.0 9.29645435727 9.96595591643
223 | 6530.0 107.0 9.06852064354 9.34066724118
224 | 6540.0 105.0 8.61728344851 12.0074518432
225 | 6540.0 106.0 8.70588925966 10.1122543737
226 | 6540.0 107.0 8.41734159436 9.46739559483
227 | 6550.0 105.0 9.08872529407 11.8928951393
228 | 6550.0 106.0 8.1186916038 10.2392213871
229 | 6550.0 107.0 7.76616254518 9.59412394848
230 | 6560.0 105.0 9.56227179073 11.7823956521
231 | 6560.0 106.0 7.53991255233 10.3213203582
232 | 6560.0 107.0 7.17812302889 9.67837085778
233 | 6570.0 105.0 10.0558124728 11.7084111141
234 | 6570.0 106.0 6.96113350086 10.4034193293
235 | 6570.0 107.0 6.59008351259 9.76261776708
236 | 6580.0 105.0 10.5493531549 11.6341879164
237 | 6580.0 106.0 6.37709282165 10.4833703622
238 | 6580.0 107.0 6.0020439963 9.84686467638
239 | 6580.0 108.0 0.110073252334 11.8107961682
240 | 6590.0 105.0 11.0494182553 11.5640219353
241 | 6590.0 106.0 5.74569749278 10.5449445905
242 | 6590.0 107.0 5.39548355036 9.88481158453
243 | 6590.0 108.0 0.790086021534 11.7833502913
244 | 6600.0 105.0 11.5755810294 11.5088915216
245 | 6600.0 106.0 5.1143021639 10.6065188189
246 | 6600.0 107.0 4.77692659317 9.89197137852
247 | 6600.0 108.0 1.47009879073 11.7559044143
248 | 6600.0 109.0 10.1970345614 8.43662391987
249 | 6610.0 105.0 12.1017438035 11.4537611078
250 | 6610.0 106.0 4.48290683502 10.6680930472
251 | 6610.0 107.0 4.15836963598 9.89936983231
252 | 6610.0 108.0 2.13516853715 11.7096044131
253 | 6610.0 109.0 10.0497089846 8.7468816595
254 | 6620.0 105.0 12.6051763457 11.4432600767
255 | 6620.0 106.0 3.81909987926 10.7139157287
256 | 6620.0 107.0 3.52992081864 9.90223374991
257 | 6620.0 108.0 2.785716191 11.6444502878
258 | 6620.0 109.0 9.90238340788 9.05713939913
259 | 6630.0 105.0 13.0934554 11.4623528606
260 | 6630.0 106.0 3.15529292351 10.7599770701
261 | 6630.0 107.0 2.87874176946 9.89459663632
262 | 6630.0 108.0 3.42089989185 11.5630672961
263 | 6630.0 109.0 9.68034271723 9.36954507695
264 | 6640.0 105.0 13.5865751519 11.4740471908
265 | 6640.0 106.0 2.52137201331 10.816062123
266 | 6640.0 107.0 2.22756272028 9.88719818253
267 | 6640.0 108.0 4.01988359385 11.4437373963
268 | 6640.0 109.0 9.38379737776 9.68409869297
269 | 6650.0 105.0 14.1228402512 11.4203487359
270 | 6650.0 106.0 1.93206970636 10.8876600629
271 | 6650.0 107.0 1.58374994993 9.89674457452
272 | 6650.0 108.0 4.61886729584 11.3244074964
273 | 6650.0 109.0 9.06809971333 10.0031868452
274 | 6660.0 105.0 14.6593158156 11.3664116212
275 | 6660.0 106.0 1.34276739941 10.9592580028
276 | 6660.0 107.0 1.00497089846 10.0585559187
277 | 6660.0 108.0 5.2214289047 11.2377739891
278 | 6660.0 109.0 8.70715205032 10.3327760286
279 | 6670.0 105.0 15.195580915 11.3124745064
280 | 6670.0 106.0 0.766303464147 11.0470848091
281 | 6670.0 107.0 0.426191846992 10.2206059227
282 | 6670.0 108.0 5.82525330422 11.16522141
283 | 6670.0 109.0 8.34346834088 10.6544894386
284 | 6680.0 106.0 0.209202318968 11.1594935748
285 | 6680.0 107.0 -0.152587204479 10.3826559267
286 | 6680.0 108.0 6.39582421641 11.0110471794
287 | 6680.0 109.0 7.96757765509 10.9442224355
288 | 6690.0 108.0 6.91651489762 10.7346791313
289 | 6690.0 109.0 7.61883696844 11.4556703862
290 | 6700.0 108.0 7.48603348427 10.5031791256
291 | 6700.0 109.0 7.21284977198 11.6091286375
292 | 6710.0 108.0 8.10416951124 10.316785822
293 | 6710.0 109.0 6.76013932117 11.7850209099
294 | 6720.0 108.0 8.72251600332 10.1303925184
295 | 6720.0 109.0 6.27606956903 11.9759487496
296 | 6730.0 108.0 9.34170435584 9.94185127666
297 | 6730.0 109.0 5.78168702652 12.0045879256
298 | 6740.0 108.0 9.96299735945 9.74877549869
299 | 6740.0 109.0 5.2805696005 11.9251142123
300 | 6750.0 108.0 10.5842903631 9.55546106093
301 | 6750.0 109.0 4.7796626396 11.8454018392
302 | 6750.0 110.0 10.9652322115 9.40319610871
303 | 6750.0 111.0 0.778720905614 9.23613424891
304 | 6760.0 110.0 10.3338368826 9.54472136994
305 | 6760.0 111.0 1.0910511283 9.33613270499
306 | 6770.0 110.0 9.70244155374 9.68600797137
307 | 6770.0 111.0 1.40338135098 9.43636982087
308 | 6780.0 110.0 9.098196224 9.83875024319
309 | 6780.0 111.0 1.71571157367 9.53636827695
310 | 6790.0 110.0 8.55687996205 10.017745093
311 | 6790.0 111.0 2.1187522586 9.72132962173
312 | 6800.0 110.0 8.0157741652 10.1967399428
313 | 6800.0 111.0 2.5318952688 9.91583735849
314 | 6810.0 110.0 7.43720557884 10.3833719061
315 | 6810.0 111.0 2.95198362761 10.1108224149
316 | 6820.0 110.0 6.82138466807 10.5778796429
317 | 6820.0 111.0 3.40048477622 10.3079554094
318 | 6830.0 110.0 6.20556375731 10.7723873797
319 | 6830.0 111.0 3.84877545972 10.505088404
320 | 6840.0 110.0 5.60131842757 10.9750095496
321 | 6840.0 111.0 4.33221381653 10.5248971674
322 | 6850.0 110.0 5.00001960937 11.179540998
323 | 6850.0 111.0 4.81965101042 10.5248971674
324 | 6860.0 110.0 4.4298696274 11.3270327542
325 | 6860.0 111.0 5.37359517896 10.3993621127
326 | 6870.0 110.0 3.87318941244 11.4501812109
327 | 6870.0 111.0 5.92206725464 10.2714404601
328 | 6880.0 110.0 3.31650919748 11.5735683273
329 | 6880.0 111.0 6.42318468066 10.123710044
330 | 6890.0 110.0 2.71499991417 11.670464206
331 | 6890.0 111.0 6.92430210668 9.97597962802
332 | 6890.0 112.0 14.8220053454 7.43186616308
333 | 6890.0 113.0 15.0080565023 8.0774409213
334 | 6890.0 114.0 11.259883365 6.49154655221
335 | 6900.0 112.0 14.1379937391 7.5750620429
336 | 6900.0 113.0 14.3575088484 8.20774917195
337 | 6900.0 114.0 11.6808135842 6.73020635192
338 | 6910.0 112.0 13.4539821328 7.71825792273
339 | 6910.0 113.0 13.7069611946 8.33781876279
340 | 6910.0 114.0 12.1017438035 6.96886615163
341 | 6920.0 112.0 12.8078542463 7.88293318454
342 | 6920.0 113.0 13.0982960975 8.47337752903
343 | 6920.0 114.0 12.5003647211 7.00538110099
344 | 6930.0 112.0 12.1659356619 8.04999504433
345 | 6930.0 113.0 12.5073100697 8.61108423346
346 | 6930.0 114.0 12.8575240121 7.02972440056
347 | 6940.0 112.0 11.4989717295 8.20345329555
348 | 6940.0 113.0 11.9167449721 8.7478362987
349 | 6940.0 114.0 13.0583077267 7.09463986608
350 | 6950.0 112.0 10.8294822158 8.35524092817
351 | 6950.0 113.0 11.329126386 8.87718991014
352 | 6950.0 114.0 13.2593019064 7.1597939914
353 | 6960.0 112.0 10.209872933 8.50631258139
354 | 6960.0 113.0 10.7417182651 9.00654352159
355 | 6960.0 114.0 13.2955019053 7.22972131272
356 | 6970.0 112.0 9.61152062638 8.65714557481
357 | 6970.0 113.0 10.123371773 9.10988321486
358 | 6970.0 114.0 13.3319123692 7.29964863404
359 | 6980.0 112.0 8.98686018101 8.76072392788
360 | 6980.0 113.0 9.49197644411 9.20224455735
361 | 6980.0 114.0 13.3681123681 7.36957595535
362 | 6990.0 112.0 8.35546485213 8.85236929097
363 | 6990.0 113.0 8.86058111523 9.29460589984
364 | 6990.0 114.0 13.404522832 7.43950327667
365 | 7000.0 112.0 7.7141776631 8.95141310785
366 | 7000.0 113.0 8.24728578579 9.39818425292
367 | 7000.0 114.0 13.4407228309 7.50966925778
368 | 7010.0 112.0 7.03374396369 9.0786187811
369 | 7010.0 113.0 7.64598696759 9.50892239998
370 | 7010.0 114.0 13.5036518987 7.58389245549
371 | 7020.0 112.0 6.35331026427 9.20582445435
372 | 7020.0 113.0 7.04447768428 9.61966054705
373 | 7020.0 114.0 13.6734972422 7.67649245778
374 | 7030.0 112.0 5.68403121566 9.33637136479
375 | 7030.0 113.0 6.37098933347 9.77144817967
376 | 7030.0 114.0 13.8431321205 7.76885380027
377 | 7040.0 112.0 5.0595812354 9.47956724462
378 | 7040.0 113.0 5.67961144835 9.93325952387
379 | 7040.0 114.0 14.0285518821 7.84952081258
380 | 7050.0 112.0 4.43513125514 9.62276312445
381 | 7050.0 113.0 4.98802309813 10.0953095279
382 | 7050.0 114.0 14.3543518718 7.8266094718
383 | 7060.0 112.0 3.80352546115 9.75211673589
384 | 7060.0 113.0 4.38461962883 10.1907734478
385 | 7060.0 114.0 14.6803623266 7.80345947123
386 | 7070.0 112.0 3.14308594715 9.8261012738
387 | 7070.0 113.0 3.78142662464 10.2862373676
388 | 7070.0 114.0 15.0061623163 7.78030947066
389 | 7080.0 112.0 2.48285689825 9.90032447151
390 | 7080.0 113.0 3.17802315535 10.3817012875
391 | 7090.0 112.0 1.83862319769 9.94137395706
392 | 7090.0 113.0 2.59166735993 10.5862327359
393 | 7100.0 112.0 1.23290461219 9.90509766751
394 | 7100.0 113.0 2.00552202962 10.7907641842
395 | 7100.0 115.0 14.8678867393 8.4931862924
396 | 7110.0 112.0 0.626975561574 9.86906003775
397 | 7110.0 113.0 1.43053135012 11.0022167668
398 | 7110.0 115.0 14.0719076947 8.59700330528
399 | 7120.0 112.0 0.0210465109626 9.83278374819
400 | 7120.0 113.0 0.900580204088 11.2408765665
401 | 7120.0 115.0 13.275718185 8.70082031815
402 | 7130.0 113.0 0.37083952316 11.4795363662
403 | 7130.0 115.0 12.4384879789 8.75953062888
404 | 7140.0 113.0 -0.158901157767 11.7181961659
405 | 7140.0 115.0 11.5966275404 8.81346774362
406 | 7150.0 115.0 10.7547671019 8.86740485835
407 | 7160.0 115.0 9.99141014925 8.9098863027
408 | 7170.0 115.0 9.24783691695 8.94974248925
409 | 7170.0 116.0 0.0841860438502 11.0499487267
410 | 7180.0 115.0 8.50426368464 8.98959867581
411 | 7180.0 116.0 0.602351143748 10.967372436
412 | 7180.0 117.0 0.255083712866 10.3996007725
413 | 7190.0 115.0 7.72406952326 8.99055331501
414 | 7190.0 116.0 1.12030577854 10.8847961453
415 | 7190.0 117.0 0.754938348227 10.300079636
416 | 7200.0 115.0 6.93482536216 8.98220022202
417 | 7200.0 116.0 1.60963715842 10.794582741
418 | 7200.0 117.0 1.25479298359 10.2007971594
419 | 7210.0 115.0 6.14558120107 8.97360846923
420 | 7210.0 116.0 2.05624412104 10.693390986
421 | 7210.0 117.0 1.76496040932 10.1062878787
422 | 7220.0 115.0 5.30372076256 8.93685486007
423 | 7220.0 116.0 2.50264061856 10.5921992309
424 | 7220.0 117.0 2.27596969549 10.0127332372
425 | 7230.0 115.0 4.46186032406 8.90010125091
426 | 7230.0 116.0 2.95577199958 10.489575517
427 | 7230.0 117.0 2.79160921407 9.9139280801
428 | 7240.0 115.0 3.60379407212 8.92587650928
429 | 7240.0 116.0 3.47035919262 10.3745414935
430 | 7240.0 117.0 3.34723710348 9.77073220027
431 | 7250.0 115.0 2.72152433257 9.04520640914
432 | 7250.0 116.0 3.98473592054 10.2597461299
433 | 7250.0 117.0 3.9028649929 9.62753632044
434 | 7260.0 115.0 1.83904412791 9.164536309
435 | 7260.0 116.0 4.50521613664 10.149962622
436 | 7260.0 117.0 4.41050683731 9.50653580199
437 | 7270.0 115.0 0.997604619625 9.39842291272
438 | 7270.0 116.0 5.03432542224 10.0475775679
439 | 7270.0 117.0 4.88594751996 9.40057085091
440 | 7280.0 115.0 0.166267436604 9.6609486924
441 | 7280.0 116.0 5.56343470784 9.94519251386
442 | 7280.0 117.0 5.3613882026 9.29436724004
443 | 7290.0 116.0 6.09275445855 9.84304611958
444 | 7290.0 117.0 5.90438818543 9.22611053732
445 | 7290.0 118.0 15.1713774274 8.90559042631
446 | 7300.0 116.0 6.57850793157 9.7468662203
447 | 7300.0 117.0 6.45475444711 9.1616723914
448 | 7300.0 118.0 14.5555565166 8.88792960113
449 | 7310.0 116.0 7.05963117217 9.65140230041
450 | 7310.0 117.0 6.97902303518 9.09771156508
451 | 7310.0 118.0 13.9397356058 8.87026877595
452 | 7320.0 116.0 7.54075441277 9.55593838053
453 | 7320.0 117.0 7.44204627636 9.03398939855
454 | 7320.0 118.0 13.3207577184 8.86000640456
455 | 7330.0 116.0 8.0397671877 9.45665590385
456 | 7330.0 117.0 7.90506951753 8.97050589183
457 | 7330.0 118.0 12.6725251808 8.91728475649
458 | 7340.0 116.0 8.56592996176 9.35116827237
459 | 7340.0 117.0 8.38282531638 8.88220176594
460 | 7340.0 118.0 12.0242926431 8.97456310842
461 | 7350.0 116.0 9.09209273582 9.2459193007
462 | 7350.0 117.0 8.89530785832 8.73709660771
463 | 7350.0 118.0 11.4206787087 9.14067032903
464 | 7360.0 116.0 9.61825550989 9.14067032903
465 | 7360.0 117.0 9.40779040026 8.59175278968
466 | 7360.0 118.0 10.8465298897 9.37933012874
467 | 7370.0 116.0 10.1334740982 9.00893011958
468 | 7370.0 117.0 9.95352642952 8.43256670328
469 | 7370.0 118.0 10.2456520017 9.61798992845
470 | 7380.0 116.0 10.6489031517 8.87742856994
471 | 7380.0 117.0 10.4990519937 8.27361927667
472 | 7380.0 118.0 9.60415434754 9.85664972817
473 | 7390.0 116.0 11.1679101121 8.74998423689
474 | 7390.0 117.0 11.0559426737 8.12087700485
475 | 7390.0 118.0 8.98159855327 10.0666703519
476 | 7400.0 116.0 11.7203810248 8.660486812
477 | 7400.0 117.0 11.6387205623 7.98317030041
478 | 7400.0 118.0 8.4028195018 10.2098662317
479 | 7410.0 116.0 12.2728519376 8.57098938711
480 | 7410.0 117.0 12.2214984508 7.84546359598
481 | 7410.0 118.0 7.78720905614 10.3459023176
482 |
--------------------------------------------------------------------------------
/src/datasets/zara1/val/uni_examples_val.txt:
--------------------------------------------------------------------------------
1 | 5940.0 95.0 14.6694181409 7.59129090929
2 | 5940.0 96.0 14.6927797681 6.77030119827
3 | 5950.0 95.0 15.195580915 7.38007698654
4 | 5950.0 96.0 15.268612308 6.60085274047
5 | 5970.0 97.0 14.796328602 8.11896772645
6 | 5970.0 98.0 14.748342557 8.76859970127
7 | 5980.0 97.0 14.2503821076 8.22326205893
8 | 5980.0 98.0 14.2747960604 8.83566310499
9 | 5990.0 97.0 13.7046460784 8.3277950512
10 | 5990.0 98.0 13.8012495637 8.90272650871
11 | 6000.0 97.0 13.1595414444 8.42612288868
12 | 6000.0 98.0 13.3041309748 8.98768939741
13 | 6010.0 97.0 12.6163309965 8.51084711758
14 | 6010.0 98.0 12.751660062 9.11465641086
15 | 6020.0 97.0 12.0733310137 8.59557134648
16 | 6020.0 98.0 12.1991891492 9.2413847645
17 | 6030.0 97.0 11.5204391707 8.69771774076
18 | 6030.0 98.0 11.6502961433 9.36859043775
19 | 6040.0 97.0 10.9284008173 8.86931413675
20 | 6040.0 98.0 11.1336042992 9.4986600286
21 | 6040.0 99.0 14.9234495282 8.73160743232
22 | 6050.0 97.0 10.336572929 9.04067187295
23 | 6050.0 98.0 10.6169124551 9.62872961944
24 | 6050.0 99.0 14.2996309433 8.83375382659
25 | 6060.0 97.0 9.74453457566 9.21226826894
26 | 6060.0 98.0 10.100431076 9.75903787008
27 | 6060.0 99.0 13.6758123584 8.93613888067
28 | 6070.0 97.0 9.19290552333 9.36047600456
29 | 6070.0 98.0 9.54817062838 9.87192395535
30 | 6070.0 99.0 13.0551507501 9.02229506837
31 | 6070.0 100.0 14.6401634907 9.11465641086
32 | 6080.0 97.0 8.64148693611 9.50844508038
33 | 6080.0 98.0 8.98686018101 9.98027550442
34 | 6080.0 99.0 12.4393298393 9.08410795649
35 | 6080.0 100.0 13.9081658394 9.06286723432
36 | 6090.0 97.0 8.08964741868 9.65164096021
37 | 6090.0 98.0 8.42554973364 10.0888657133
38 | 6090.0 99.0 11.8235089285 9.14592084462
39 | 6090.0 100.0 13.1601728398 8.9986677482
40 | 6100.0 97.0 7.53612418036 9.74877549869
41 | 6100.0 98.0 7.86844858846 10.1907734478
42 | 6100.0 99.0 11.1923240648 9.23541826951
43 | 6100.0 100.0 12.3481984468 8.88506568353
44 | 6110.0 97.0 6.98281140716 9.84614869698
45 | 6110.0 98.0 7.31513581525 10.2862373676
46 | 6110.0 99.0 10.5544043175 9.33661002459
47 | 6110.0 100.0 11.5358031237 8.77862341286
48 | 6110.0 101.0 15.1280216148 8.40249556851
49 | 6120.0 97.0 6.42928816884 9.95044302945
50 | 6120.0 98.0 6.76203350716 10.3817012875
51 | 6120.0 99.0 9.91669503534 9.43780177967
52 | 6120.0 100.0 10.7173043124 8.73900588611
53 | 6120.0 101.0 14.5191460526 8.51323371558
54 | 6130.0 97.0 5.87597539564 10.0652383931
55 | 6130.0 98.0 6.20872073395 10.4771652074
56 | 6130.0 99.0 9.28761482266 9.50558116279
57 | 6130.0 100.0 9.88028457137 8.72086774133
58 | 6130.0 101.0 13.9102704905 8.62421052245
59 | 6140.0 97.0 5.32245215732 10.1802724166
60 | 6140.0 98.0 5.66277423958 10.5890966535
61 | 6140.0 99.0 8.67116251657 9.52252600857
62 | 6140.0 100.0 9.00032994803 8.75332547409
63 | 6140.0 101.0 13.3013949283 8.73494866951
64 | 6150.0 97.0 4.79565798793 10.2721564395
65 | 6150.0 98.0 5.11682774522 10.7010280996
66 | 6150.0 99.0 8.05471021048 9.53947085435
67 | 6150.0 100.0 8.12142765023 8.78649918625
68 | 6150.0 101.0 12.6474798327 8.9139435193
69 | 6160.0 97.0 4.28043939956 10.3544940704
70 | 6160.0 98.0 4.57109171596 10.8127208858
71 | 6160.0 99.0 7.44562418322 9.61560333045
72 | 6160.0 100.0 7.25431139857 8.8246847542
73 | 6160.0 101.0 11.9935647371 9.09293836908
74 | 6170.0 97.0 3.7652208112 10.4368317013
75 | 6170.0 98.0 4.01798940786 10.9392105797
76 | 6170.0 99.0 6.84137885349 9.73111467352
77 | 6170.0 100.0 6.38719514692 8.86287032216
78 | 6170.0 101.0 11.3531194085 9.26334146608
79 | 6180.0 97.0 3.22011617727 10.5647533539
80 | 6180.0 98.0 3.46320337889 11.0695188303
81 | 6180.0 99.0 6.23713352375 9.84662601658
82 | 6180.0 100.0 5.48114284998 8.88840692073
83 | 6180.0 101.0 10.7667636131 9.39985487151
84 | 6190.0 97.0 2.6676452645 10.7038920171
85 | 6190.0 98.0 2.90820688481 11.1995884212
86 | 6190.0 99.0 5.63983354264 9.95450024605
87 | 6190.0 100.0 4.54899287945 8.90535176651
88 | 6190.0 101.0 10.1804078177 9.53612961715
89 | 6200.0 97.0 2.12464528167 10.836348206
90 | 6200.0 98.0 2.3593138789 11.3456482186
91 | 6200.0 99.0 5.04905797992 10.0544987021
92 | 6200.0 100.0 3.61705337402 8.92253527209
93 | 6200.0 101.0 9.57616248796 9.67526828038
94 | 6210.0 97.0 1.5961673914 10.9583033636
95 | 6210.0 98.0 1.81610343096 11.507220903
96 | 6210.0 99.0 4.45849288231 10.1544971582
97 | 6210.0 100.0 2.68048363619 8.98673475821
98 | 6210.0 101.0 8.94476715908 9.81846416021
99 | 6220.0 97.0 1.06747903602 11.0804971811
100 | 6220.0 98.0 1.27289298302 11.6690322472
101 | 6220.0 99.0 3.83804173913 10.2413693253
102 | 6220.0 100.0 1.74307203792 9.06262857452
103 | 6220.0 101.0 8.31337183021 9.96166004004
104 | 6230.0 97.0 0.538790680641 11.2026909985
105 | 6230.0 98.0 0.752202301802 11.8263090552
106 | 6230.0 99.0 3.17297199271 10.3081940692
107 | 6230.0 100.0 0.831337183021 9.22730383632
108 | 6230.0 101.0 7.71102068646 10.1222780852
109 | 6240.0 97.0 0.0103127903717 11.3246461562
110 | 6240.0 98.0 0.264765107909 11.9769033888
111 | 6240.0 99.0 2.5079022463 10.3750188131
112 | 6240.0 100.0 -0.0631395328877 9.45092806865
113 | 6240.0 101.0 7.15181489019 10.3086713888
114 | 6250.0 98.0 -0.222672085984 12.1277363822
115 | 6250.0 99.0 1.90155226547 10.4838476818
116 | 6250.0 101.0 6.59281955902 10.4950646924
117 | 6260.0 99.0 1.30172670303 10.5972110867
118 | 6260.0 101.0 6.0260370188 10.6805033568
119 | 6270.0 99.0 0.701901140601 10.7105744915
120 | 6270.0 101.0 5.42831610746 10.8618848046
121 | 6280.0 99.0 0.102075578168 10.8239378964
122 | 6280.0 101.0 4.83059519613 11.0432662523
123 | 6290.0 102.0 15.0482553382 8.80654660943
124 | 6290.0 103.0 3.81699522817 9.61608065005
125 | 6290.0 104.0 0.542368587505 13.7658972475
126 | 6300.0 102.0 14.2617472236 8.96978991243
127 | 6300.0 103.0 4.41471613951 9.4919775542
128 | 6300.0 104.0 1.19775693888 13.6704333276
129 | 6310.0 102.0 13.469767016 9.12396414305
130 | 6310.0 103.0 5.01243705084 9.36787445835
131 | 6310.0 104.0 1.85335575536 13.5749694077
132 | 6320.0 102.0 12.6279065775 9.19556208296
133 | 6320.0 103.0 5.604054474 9.28100229126
134 | 6320.0 104.0 2.50874410674 13.4795054878
135 | 6330.0 102.0 11.7900449761 9.27360383747
136 | 6330.0 103.0 6.19504050183 9.19842600056
137 | 6330.0 104.0 3.16118594658 13.3864281659
138 | 6340.0 102.0 10.9883833735 9.4101172429
139 | 6340.0 103.0 6.79549745959 9.11036053446
140 | 6340.0 104.0 3.8066824378 13.2988400194
141 | 6350.0 102.0 10.1865113059 9.54639198854
142 | 6350.0 103.0 7.4188951143 9.00940743918
143 | 6350.0 104.0 4.45196846391 13.2114905327
144 | 6360.0 102.0 9.36464505279 9.73970642631
145 | 6360.0 103.0 8.04229276901 8.9084543439
146 | 6360.0 104.0 5.11977425675 13.1200838294
147 | 6360.0 105.0 -0.0631395328877 13.025335889
148 | 6370.0 102.0 8.5427787997 9.93278220427
149 | 6370.0 103.0 8.66021833087 8.81012650642
150 | 6370.0 104.0 5.83935446656 13.0200853734
151 | 6370.0 105.0 0.357790686363 12.9642389802
152 | 6380.0 102.0 7.75458696415 10.0301554026
153 | 6380.0 103.0 9.27603924164 8.71275330814
154 | 6380.0 104.0 6.55914514148 12.9198482575
155 | 6380.0 105.0 0.778720905614 12.9029034117
156 | 6390.0 102.0 6.97018350058 10.1170275697
157 | 6390.0 103.0 9.8918601524 8.61561876966
158 | 6390.0 104.0 7.27788349085 12.795267842
159 | 6390.0 105.0 1.19965112487 12.841806503
160 | 6400.0 102.0 6.13379515493 10.2031837573
161 | 6400.0 103.0 10.5232554813 8.37695896994
162 | 6400.0 104.0 7.99346486358 12.5725982489
163 | 6400.0 105.0 1.62058134412 12.7804709345
164 | 6410.0 102.0 5.27509750765 10.2891012852
165 | 6410.0 103.0 11.1546508102 8.13829917023
166 | 6410.0 104.0 8.7090462363 12.3499286558
167 | 6410.0 105.0 2.07602784135 12.7193740257
168 | 6420.0 102.0 4.42292427878 10.3614152046
169 | 6420.0 103.0 11.7786798602 7.89963937052
170 | 6420.0 104.0 9.40779040026 12.1050637013
171 | 6420.0 105.0 2.54010340807 12.6580384572
172 | 6430.0 102.0 3.59726965372 10.3797920091
173 | 6430.0 103.0 12.37345426 7.6609795708
174 | 6430.0 104.0 10.0391857291 11.7728492601
175 | 6430.0 105.0 3.00417897479 12.5969415485
176 | 6430.0 106.0 14.9851158053 8.75881464948
177 | 6430.0 107.0 15.2561948665 8.16813164519
178 | 6440.0 102.0 2.77161502866 10.3981688137
179 | 6440.0 103.0 12.9682286598 7.42231977109
180 | 6440.0 104.0 10.670581058 11.4403961591
181 | 6440.0 105.0 3.46825454152 12.5358446397
182 | 6440.0 106.0 14.4042321028 8.76836104147
183 | 6440.0 107.0 14.6334286072 8.22779659512
184 | 6450.0 102.0 1.94069877586 10.4463780933
185 | 6450.0 103.0 13.6198286392 7.23735842631
186 | 6450.0 104.0 11.3051333635 11.0719054283
187 | 6450.0 105.0 4.00262545486 12.4904992778
188 | 6450.0 106.0 13.8233484002 8.77790743346
189 | 6450.0 107.0 14.0108728129 8.28746154505
190 | 6460.0 102.0 1.1064150813 10.514634796
191 | 6460.0 103.0 14.2775321068 7.05836357653
192 | 6460.0 104.0 11.9447368317 10.6497162426
193 | 6460.0 105.0 4.54478357725 12.4473018541
194 | 6460.0 106.0 13.237413535 8.84688011558
195 | 6460.0 107.0 13.3876856233 8.35380896937
196 | 6470.0 102.0 0.272131386746 10.5828914987
197 | 6470.0 103.0 14.9352355744 6.87936872674
198 | 6470.0 104.0 12.5841298347 10.2275270569
199 | 6470.0 105.0 5.08694169965 12.4038657705
200 | 6470.0 106.0 12.6464275072 8.97551774762
201 | 6470.0 107.0 12.7636565732 8.43614660027
202 | 6480.0 104.0 13.3192844627 9.90008581171
203 | 6480.0 105.0 5.60679052042 12.3434848412
204 | 6480.0 106.0 12.0556519445 9.10391671987
205 | 6480.0 107.0 12.1394170581 8.51848423117
206 | 6490.0 104.0 14.078221648 9.59627188668
207 | 6490.0 105.0 6.11190678353 12.2718869013
208 | 6490.0 106.0 11.5147566127 9.28243425005
209 | 6490.0 107.0 11.5257007984 8.64545124462
210 | 6500.0 104.0 14.8371588333 9.29245796164
211 | 6500.0 105.0 6.61702304663 12.2002889614
212 | 6500.0 106.0 10.9797543041 9.46667961543
213 | 6500.0 107.0 10.9271380266 8.83948166179
214 | 6510.0 104.0 15.5960960186 8.98888269641
215 | 6510.0 105.0 7.12213930973 12.1525570014
216 | 6510.0 106.0 10.4445415303 9.65068632101
217 | 6510.0 107.0 10.3285752549 9.03327341915
218 | 6520.0 105.0 7.62725557283 12.120815248
219 | 6520.0 106.0 9.88701945488 9.81965745921
220 | 6520.0 107.0 9.71948922762 9.21370022774
221 | 6530.0 105.0 8.13237183593 12.0888348349
222 | 6530.0 106.0 9.29645435727 9.96595591643
223 | 6530.0 107.0 9.06852064354 9.34066724118
224 | 6540.0 105.0 8.61728344851 12.0074518432
225 | 6540.0 106.0 8.70588925966 10.1122543737
226 | 6540.0 107.0 8.41734159436 9.46739559483
227 | 6550.0 105.0 9.08872529407 11.8928951393
228 | 6550.0 106.0 8.1186916038 10.2392213871
229 | 6550.0 107.0 7.76616254518 9.59412394848
230 | 6560.0 105.0 9.56227179073 11.7823956521
231 | 6560.0 106.0 7.53991255233 10.3213203582
232 | 6560.0 107.0 7.17812302889 9.67837085778
233 | 6570.0 105.0 10.0558124728 11.7084111141
234 | 6570.0 106.0 6.96113350086 10.4034193293
235 | 6570.0 107.0 6.59008351259 9.76261776708
236 | 6580.0 105.0 10.5493531549 11.6341879164
237 | 6580.0 106.0 6.37709282165 10.4833703622
238 | 6580.0 107.0 6.0020439963 9.84686467638
239 | 6580.0 108.0 0.110073252334 11.8107961682
240 | 6590.0 105.0 11.0494182553 11.5640219353
241 | 6590.0 106.0 5.74569749278 10.5449445905
242 | 6590.0 107.0 5.39548355036 9.88481158453
243 | 6590.0 108.0 0.790086021534 11.7833502913
244 | 6600.0 105.0 11.5755810294 11.5088915216
245 | 6600.0 106.0 5.1143021639 10.6065188189
246 | 6600.0 107.0 4.77692659317 9.89197137852
247 | 6600.0 108.0 1.47009879073 11.7559044143
248 | 6600.0 109.0 10.1970345614 8.43662391987
249 | 6610.0 105.0 12.1017438035 11.4537611078
250 | 6610.0 106.0 4.48290683502 10.6680930472
251 | 6610.0 107.0 4.15836963598 9.89936983231
252 | 6610.0 108.0 2.13516853715 11.7096044131
253 | 6610.0 109.0 10.0497089846 8.7468816595
254 | 6620.0 105.0 12.6051763457 11.4432600767
255 | 6620.0 106.0 3.81909987926 10.7139157287
256 | 6620.0 107.0 3.52992081864 9.90223374991
257 | 6620.0 108.0 2.785716191 11.6444502878
258 | 6620.0 109.0 9.90238340788 9.05713939913
259 | 6630.0 105.0 13.0934554 11.4623528606
260 | 6630.0 106.0 3.15529292351 10.7599770701
261 | 6630.0 107.0 2.87874176946 9.89459663632
262 | 6630.0 108.0 3.42089989185 11.5630672961
263 | 6630.0 109.0 9.68034271723 9.36954507695
264 | 6640.0 105.0 13.5865751519 11.4740471908
265 | 6640.0 106.0 2.52137201331 10.816062123
266 | 6640.0 107.0 2.22756272028 9.88719818253
267 | 6640.0 108.0 4.01988359385 11.4437373963
268 | 6640.0 109.0 9.38379737776 9.68409869297
269 | 6650.0 105.0 14.1228402512 11.4203487359
270 | 6650.0 106.0 1.93206970636 10.8876600629
271 | 6650.0 107.0 1.58374994993 9.89674457452
272 | 6650.0 108.0 4.61886729584 11.3244074964
273 | 6650.0 109.0 9.06809971333 10.0031868452
274 | 6660.0 105.0 14.6593158156 11.3664116212
275 | 6660.0 106.0 1.34276739941 10.9592580028
276 | 6660.0 107.0 1.00497089846 10.0585559187
277 | 6660.0 108.0 5.2214289047 11.2377739891
278 | 6660.0 109.0 8.70715205032 10.3327760286
279 | 6670.0 105.0 15.195580915 11.3124745064
280 | 6670.0 106.0 0.766303464147 11.0470848091
281 | 6670.0 107.0 0.426191846992 10.2206059227
282 | 6670.0 108.0 5.82525330422 11.16522141
283 | 6670.0 109.0 8.34346834088 10.6544894386
284 | 6680.0 106.0 0.209202318968 11.1594935748
285 | 6680.0 107.0 -0.152587204479 10.3826559267
286 | 6680.0 108.0 6.39582421641 11.0110471794
287 | 6680.0 109.0 7.96757765509 10.9442224355
288 | 6690.0 108.0 6.91651489762 10.7346791313
289 | 6690.0 109.0 7.61883696844 11.4556703862
290 | 6700.0 108.0 7.48603348427 10.5031791256
291 | 6700.0 109.0 7.21284977198 11.6091286375
292 | 6710.0 108.0 8.10416951124 10.316785822
293 | 6710.0 109.0 6.76013932117 11.7850209099
294 | 6720.0 108.0 8.72251600332 10.1303925184
295 | 6720.0 109.0 6.27606956903 11.9759487496
296 | 6730.0 108.0 9.34170435584 9.94185127666
297 | 6730.0 109.0 5.78168702652 12.0045879256
298 | 6740.0 108.0 9.96299735945 9.74877549869
299 | 6740.0 109.0 5.2805696005 11.9251142123
300 | 6750.0 108.0 10.5842903631 9.55546106093
301 | 6750.0 109.0 4.7796626396 11.8454018392
302 | 6750.0 110.0 10.9652322115 9.40319610871
303 | 6750.0 111.0 0.778720905614 9.23613424891
304 | 6760.0 110.0 10.3338368826 9.54472136994
305 | 6760.0 111.0 1.0910511283 9.33613270499
306 | 6770.0 110.0 9.70244155374 9.68600797137
307 | 6770.0 111.0 1.40338135098 9.43636982087
308 | 6780.0 110.0 9.098196224 9.83875024319
309 | 6780.0 111.0 1.71571157367 9.53636827695
310 | 6790.0 110.0 8.55687996205 10.017745093
311 | 6790.0 111.0 2.1187522586 9.72132962173
312 | 6800.0 110.0 8.0157741652 10.1967399428
313 | 6800.0 111.0 2.5318952688 9.91583735849
314 | 6810.0 110.0 7.43720557884 10.3833719061
315 | 6810.0 111.0 2.95198362761 10.1108224149
316 | 6820.0 110.0 6.82138466807 10.5778796429
317 | 6820.0 111.0 3.40048477622 10.3079554094
318 | 6830.0 110.0 6.20556375731 10.7723873797
319 | 6830.0 111.0 3.84877545972 10.505088404
320 | 6840.0 110.0 5.60131842757 10.9750095496
321 | 6840.0 111.0 4.33221381653 10.5248971674
322 | 6850.0 110.0 5.00001960937 11.179540998
323 | 6850.0 111.0 4.81965101042 10.5248971674
324 | 6860.0 110.0 4.4298696274 11.3270327542
325 | 6860.0 111.0 5.37359517896 10.3993621127
326 | 6870.0 110.0 3.87318941244 11.4501812109
327 | 6870.0 111.0 5.92206725464 10.2714404601
328 | 6880.0 110.0 3.31650919748 11.5735683273
329 | 6880.0 111.0 6.42318468066 10.123710044
330 | 6890.0 110.0 2.71499991417 11.670464206
331 | 6890.0 111.0 6.92430210668 9.97597962802
332 | 6890.0 112.0 14.8220053454 7.43186616308
333 | 6890.0 113.0 15.0080565023 8.0774409213
334 | 6890.0 114.0 11.259883365 6.49154655221
335 | 6900.0 112.0 14.1379937391 7.5750620429
336 | 6900.0 113.0 14.3575088484 8.20774917195
337 | 6900.0 114.0 11.6808135842 6.73020635192
338 | 6910.0 112.0 13.4539821328 7.71825792273
339 | 6910.0 113.0 13.7069611946 8.33781876279
340 | 6910.0 114.0 12.1017438035 6.96886615163
341 | 6920.0 112.0 12.8078542463 7.88293318454
342 | 6920.0 113.0 13.0982960975 8.47337752903
343 | 6920.0 114.0 12.5003647211 7.00538110099
344 | 6930.0 112.0 12.1659356619 8.04999504433
345 | 6930.0 113.0 12.5073100697 8.61108423346
346 | 6930.0 114.0 12.8575240121 7.02972440056
347 | 6940.0 112.0 11.4989717295 8.20345329555
348 | 6940.0 113.0 11.9167449721 8.7478362987
349 | 6940.0 114.0 13.0583077267 7.09463986608
350 | 6950.0 112.0 10.8294822158 8.35524092817
351 | 6950.0 113.0 11.329126386 8.87718991014
352 | 6950.0 114.0 13.2593019064 7.1597939914
353 | 6960.0 112.0 10.209872933 8.50631258139
354 | 6960.0 113.0 10.7417182651 9.00654352159
355 | 6960.0 114.0 13.2955019053 7.22972131272
356 | 6970.0 112.0 9.61152062638 8.65714557481
357 | 6970.0 113.0 10.123371773 9.10988321486
358 | 6970.0 114.0 13.3319123692 7.29964863404
359 | 6980.0 112.0 8.98686018101 8.76072392788
360 | 6980.0 113.0 9.49197644411 9.20224455735
361 | 6980.0 114.0 13.3681123681 7.36957595535
362 | 6990.0 112.0 8.35546485213 8.85236929097
363 | 6990.0 113.0 8.86058111523 9.29460589984
364 | 6990.0 114.0 13.404522832 7.43950327667
365 | 7000.0 112.0 7.7141776631 8.95141310785
366 | 7000.0 113.0 8.24728578579 9.39818425292
367 | 7000.0 114.0 13.4407228309 7.50966925778
368 | 7010.0 112.0 7.03374396369 9.0786187811
369 | 7010.0 113.0 7.64598696759 9.50892239998
370 | 7010.0 114.0 13.5036518987 7.58389245549
371 | 7020.0 112.0 6.35331026427 9.20582445435
372 | 7020.0 113.0 7.04447768428 9.61966054705
373 | 7020.0 114.0 13.6734972422 7.67649245778
374 | 7030.0 112.0 5.68403121566 9.33637136479
375 | 7030.0 113.0 6.37098933347 9.77144817967
376 | 7030.0 114.0 13.8431321205 7.76885380027
377 | 7040.0 112.0 5.0595812354 9.47956724462
378 | 7040.0 113.0 5.67961144835 9.93325952387
379 | 7040.0 114.0 14.0285518821 7.84952081258
380 | 7050.0 112.0 4.43513125514 9.62276312445
381 | 7050.0 113.0 4.98802309813 10.0953095279
382 | 7050.0 114.0 14.3543518718 7.8266094718
383 | 7060.0 112.0 3.80352546115 9.75211673589
384 | 7060.0 113.0 4.38461962883 10.1907734478
385 | 7060.0 114.0 14.6803623266 7.80345947123
386 | 7070.0 112.0 3.14308594715 9.8261012738
387 | 7070.0 113.0 3.78142662464 10.2862373676
388 | 7070.0 114.0 15.0061623163 7.78030947066
389 | 7080.0 112.0 2.48285689825 9.90032447151
390 | 7080.0 113.0 3.17802315535 10.3817012875
391 | 7090.0 112.0 1.83862319769 9.94137395706
392 | 7090.0 113.0 2.59166735993 10.5862327359
393 | 7100.0 112.0 1.23290461219 9.90509766751
394 | 7100.0 113.0 2.00552202962 10.7907641842
395 | 7100.0 115.0 14.8678867393 8.4931862924
396 | 7110.0 112.0 0.626975561574 9.86906003775
397 | 7110.0 113.0 1.43053135012 11.0022167668
398 | 7110.0 115.0 14.0719076947 8.59700330528
399 | 7120.0 112.0 0.0210465109626 9.83278374819
400 | 7120.0 113.0 0.900580204088 11.2408765665
401 | 7120.0 115.0 13.275718185 8.70082031815
402 | 7130.0 113.0 0.37083952316 11.4795363662
403 | 7130.0 115.0 12.4384879789 8.75953062888
404 | 7140.0 113.0 -0.158901157767 11.7181961659
405 | 7140.0 115.0 11.5966275404 8.81346774362
406 | 7150.0 115.0 10.7547671019 8.86740485835
407 | 7160.0 115.0 9.99141014925 8.9098863027
408 | 7170.0 115.0 9.24783691695 8.94974248925
409 | 7170.0 116.0 0.0841860438502 11.0499487267
410 | 7180.0 115.0 8.50426368464 8.98959867581
411 | 7180.0 116.0 0.602351143748 10.967372436
412 | 7180.0 117.0 0.255083712866 10.3996007725
413 | 7190.0 115.0 7.72406952326 8.99055331501
414 | 7190.0 116.0 1.12030577854 10.8847961453
415 | 7190.0 117.0 0.754938348227 10.300079636
416 | 7200.0 115.0 6.93482536216 8.98220022202
417 | 7200.0 116.0 1.60963715842 10.794582741
418 | 7200.0 117.0 1.25479298359 10.2007971594
419 | 7210.0 115.0 6.14558120107 8.97360846923
420 | 7210.0 116.0 2.05624412104 10.693390986
421 | 7210.0 117.0 1.76496040932 10.1062878787
422 | 7220.0 115.0 5.30372076256 8.93685486007
423 | 7220.0 116.0 2.50264061856 10.5921992309
424 | 7220.0 117.0 2.27596969549 10.0127332372
425 | 7230.0 115.0 4.46186032406 8.90010125091
426 | 7230.0 116.0 2.95577199958 10.489575517
427 | 7230.0 117.0 2.79160921407 9.9139280801
428 | 7240.0 115.0 3.60379407212 8.92587650928
429 | 7240.0 116.0 3.47035919262 10.3745414935
430 | 7240.0 117.0 3.34723710348 9.77073220027
431 | 7250.0 115.0 2.72152433257 9.04520640914
432 | 7250.0 116.0 3.98473592054 10.2597461299
433 | 7250.0 117.0 3.9028649929 9.62753632044
434 | 7260.0 115.0 1.83904412791 9.164536309
435 | 7260.0 116.0 4.50521613664 10.149962622
436 | 7260.0 117.0 4.41050683731 9.50653580199
437 | 7270.0 115.0 0.997604619625 9.39842291272
438 | 7270.0 116.0 5.03432542224 10.0475775679
439 | 7270.0 117.0 4.88594751996 9.40057085091
440 | 7280.0 115.0 0.166267436604 9.6609486924
441 | 7280.0 116.0 5.56343470784 9.94519251386
442 | 7280.0 117.0 5.3613882026 9.29436724004
443 | 7290.0 116.0 6.09275445855 9.84304611958
444 | 7290.0 117.0 5.90438818543 9.22611053732
445 | 7290.0 118.0 15.1713774274 8.90559042631
446 | 7300.0 116.0 6.57850793157 9.7468662203
447 | 7300.0 117.0 6.45475444711 9.1616723914
448 | 7300.0 118.0 14.5555565166 8.88792960113
449 | 7310.0 116.0 7.05963117217 9.65140230041
450 | 7310.0 117.0 6.97902303518 9.09771156508
451 | 7310.0 118.0 13.9397356058 8.87026877595
452 | 7320.0 116.0 7.54075441277 9.55593838053
453 | 7320.0 117.0 7.44204627636 9.03398939855
454 | 7320.0 118.0 13.3207577184 8.86000640456
455 | 7330.0 116.0 8.0397671877 9.45665590385
456 | 7330.0 117.0 7.90506951753 8.97050589183
457 | 7330.0 118.0 12.6725251808 8.91728475649
458 | 7340.0 116.0 8.56592996176 9.35116827237
459 | 7340.0 117.0 8.38282531638 8.88220176594
460 | 7340.0 118.0 12.0242926431 8.97456310842
461 | 7350.0 116.0 9.09209273582 9.2459193007
462 | 7350.0 117.0 8.89530785832 8.73709660771
463 | 7350.0 118.0 11.4206787087 9.14067032903
464 | 7360.0 116.0 9.61825550989 9.14067032903
465 | 7360.0 117.0 9.40779040026 8.59175278968
466 | 7360.0 118.0 10.8465298897 9.37933012874
467 | 7370.0 116.0 10.1334740982 9.00893011958
468 | 7370.0 117.0 9.95352642952 8.43256670328
469 | 7370.0 118.0 10.2456520017 9.61798992845
470 | 7380.0 116.0 10.6489031517 8.87742856994
471 | 7380.0 117.0 10.4990519937 8.27361927667
472 | 7380.0 118.0 9.60415434754 9.85664972817
473 | 7390.0 116.0 11.1679101121 8.74998423689
474 | 7390.0 117.0 11.0559426737 8.12087700485
475 | 7390.0 118.0 8.98159855327 10.0666703519
476 | 7400.0 116.0 11.7203810248 8.660486812
477 | 7400.0 117.0 11.6387205623 7.98317030041
478 | 7400.0 118.0 8.4028195018 10.2098662317
479 | 7410.0 116.0 12.2728519376 8.57098938711
480 | 7410.0 117.0 12.2214984508 7.84546359598
481 | 7410.0 118.0 7.78720905614 10.3459023176
482 |
--------------------------------------------------------------------------------
/src/datasets/zara2/val/uni_examples_val.txt:
--------------------------------------------------------------------------------
1 | 5940.0 95.0 14.6694181409 7.59129090929
2 | 5940.0 96.0 14.6927797681 6.77030119827
3 | 5950.0 95.0 15.195580915 7.38007698654
4 | 5950.0 96.0 15.268612308 6.60085274047
5 | 5970.0 97.0 14.796328602 8.11896772645
6 | 5970.0 98.0 14.748342557 8.76859970127
7 | 5980.0 97.0 14.2503821076 8.22326205893
8 | 5980.0 98.0 14.2747960604 8.83566310499
9 | 5990.0 97.0 13.7046460784 8.3277950512
10 | 5990.0 98.0 13.8012495637 8.90272650871
11 | 6000.0 97.0 13.1595414444 8.42612288868
12 | 6000.0 98.0 13.3041309748 8.98768939741
13 | 6010.0 97.0 12.6163309965 8.51084711758
14 | 6010.0 98.0 12.751660062 9.11465641086
15 | 6020.0 97.0 12.0733310137 8.59557134648
16 | 6020.0 98.0 12.1991891492 9.2413847645
17 | 6030.0 97.0 11.5204391707 8.69771774076
18 | 6030.0 98.0 11.6502961433 9.36859043775
19 | 6040.0 97.0 10.9284008173 8.86931413675
20 | 6040.0 98.0 11.1336042992 9.4986600286
21 | 6040.0 99.0 14.9234495282 8.73160743232
22 | 6050.0 97.0 10.336572929 9.04067187295
23 | 6050.0 98.0 10.6169124551 9.62872961944
24 | 6050.0 99.0 14.2996309433 8.83375382659
25 | 6060.0 97.0 9.74453457566 9.21226826894
26 | 6060.0 98.0 10.100431076 9.75903787008
27 | 6060.0 99.0 13.6758123584 8.93613888067
28 | 6070.0 97.0 9.19290552333 9.36047600456
29 | 6070.0 98.0 9.54817062838 9.87192395535
30 | 6070.0 99.0 13.0551507501 9.02229506837
31 | 6070.0 100.0 14.6401634907 9.11465641086
32 | 6080.0 97.0 8.64148693611 9.50844508038
33 | 6080.0 98.0 8.98686018101 9.98027550442
34 | 6080.0 99.0 12.4393298393 9.08410795649
35 | 6080.0 100.0 13.9081658394 9.06286723432
36 | 6090.0 97.0 8.08964741868 9.65164096021
37 | 6090.0 98.0 8.42554973364 10.0888657133
38 | 6090.0 99.0 11.8235089285 9.14592084462
39 | 6090.0 100.0 13.1601728398 8.9986677482
40 | 6100.0 97.0 7.53612418036 9.74877549869
41 | 6100.0 98.0 7.86844858846 10.1907734478
42 | 6100.0 99.0 11.1923240648 9.23541826951
43 | 6100.0 100.0 12.3481984468 8.88506568353
44 | 6110.0 97.0 6.98281140716 9.84614869698
45 | 6110.0 98.0 7.31513581525 10.2862373676
46 | 6110.0 99.0 10.5544043175 9.33661002459
47 | 6110.0 100.0 11.5358031237 8.77862341286
48 | 6110.0 101.0 15.1280216148 8.40249556851
49 | 6120.0 97.0 6.42928816884 9.95044302945
50 | 6120.0 98.0 6.76203350716 10.3817012875
51 | 6120.0 99.0 9.91669503534 9.43780177967
52 | 6120.0 100.0 10.7173043124 8.73900588611
53 | 6120.0 101.0 14.5191460526 8.51323371558
54 | 6130.0 97.0 5.87597539564 10.0652383931
55 | 6130.0 98.0 6.20872073395 10.4771652074
56 | 6130.0 99.0 9.28761482266 9.50558116279
57 | 6130.0 100.0 9.88028457137 8.72086774133
58 | 6130.0 101.0 13.9102704905 8.62421052245
59 | 6140.0 97.0 5.32245215732 10.1802724166
60 | 6140.0 98.0 5.66277423958 10.5890966535
61 | 6140.0 99.0 8.67116251657 9.52252600857
62 | 6140.0 100.0 9.00032994803 8.75332547409
63 | 6140.0 101.0 13.3013949283 8.73494866951
64 | 6150.0 97.0 4.79565798793 10.2721564395
65 | 6150.0 98.0 5.11682774522 10.7010280996
66 | 6150.0 99.0 8.05471021048 9.53947085435
67 | 6150.0 100.0 8.12142765023 8.78649918625
68 | 6150.0 101.0 12.6474798327 8.9139435193
69 | 6160.0 97.0 4.28043939956 10.3544940704
70 | 6160.0 98.0 4.57109171596 10.8127208858
71 | 6160.0 99.0 7.44562418322 9.61560333045
72 | 6160.0 100.0 7.25431139857 8.8246847542
73 | 6160.0 101.0 11.9935647371 9.09293836908
74 | 6170.0 97.0 3.7652208112 10.4368317013
75 | 6170.0 98.0 4.01798940786 10.9392105797
76 | 6170.0 99.0 6.84137885349 9.73111467352
77 | 6170.0 100.0 6.38719514692 8.86287032216
78 | 6170.0 101.0 11.3531194085 9.26334146608
79 | 6180.0 97.0 3.22011617727 10.5647533539
80 | 6180.0 98.0 3.46320337889 11.0695188303
81 | 6180.0 99.0 6.23713352375 9.84662601658
82 | 6180.0 100.0 5.48114284998 8.88840692073
83 | 6180.0 101.0 10.7667636131 9.39985487151
84 | 6190.0 97.0 2.6676452645 10.7038920171
85 | 6190.0 98.0 2.90820688481 11.1995884212
86 | 6190.0 99.0 5.63983354264 9.95450024605
87 | 6190.0 100.0 4.54899287945 8.90535176651
88 | 6190.0 101.0 10.1804078177 9.53612961715
89 | 6200.0 97.0 2.12464528167 10.836348206
90 | 6200.0 98.0 2.3593138789 11.3456482186
91 | 6200.0 99.0 5.04905797992 10.0544987021
92 | 6200.0 100.0 3.61705337402 8.92253527209
93 | 6200.0 101.0 9.57616248796 9.67526828038
94 | 6210.0 97.0 1.5961673914 10.9583033636
95 | 6210.0 98.0 1.81610343096 11.507220903
96 | 6210.0 99.0 4.45849288231 10.1544971582
97 | 6210.0 100.0 2.68048363619 8.98673475821
98 | 6210.0 101.0 8.94476715908 9.81846416021
99 | 6220.0 97.0 1.06747903602 11.0804971811
100 | 6220.0 98.0 1.27289298302 11.6690322472
101 | 6220.0 99.0 3.83804173913 10.2413693253
102 | 6220.0 100.0 1.74307203792 9.06262857452
103 | 6220.0 101.0 8.31337183021 9.96166004004
104 | 6230.0 97.0 0.538790680641 11.2026909985
105 | 6230.0 98.0 0.752202301802 11.8263090552
106 | 6230.0 99.0 3.17297199271 10.3081940692
107 | 6230.0 100.0 0.831337183021 9.22730383632
108 | 6230.0 101.0 7.71102068646 10.1222780852
109 | 6240.0 97.0 0.0103127903717 11.3246461562
110 | 6240.0 98.0 0.264765107909 11.9769033888
111 | 6240.0 99.0 2.5079022463 10.3750188131
112 | 6240.0 100.0 -0.0631395328877 9.45092806865
113 | 6240.0 101.0 7.15181489019 10.3086713888
114 | 6250.0 98.0 -0.222672085984 12.1277363822
115 | 6250.0 99.0 1.90155226547 10.4838476818
116 | 6250.0 101.0 6.59281955902 10.4950646924
117 | 6260.0 99.0 1.30172670303 10.5972110867
118 | 6260.0 101.0 6.0260370188 10.6805033568
119 | 6270.0 99.0 0.701901140601 10.7105744915
120 | 6270.0 101.0 5.42831610746 10.8618848046
121 | 6280.0 99.0 0.102075578168 10.8239378964
122 | 6280.0 101.0 4.83059519613 11.0432662523
123 | 6290.0 102.0 15.0482553382 8.80654660943
124 | 6290.0 103.0 3.81699522817 9.61608065005
125 | 6290.0 104.0 0.542368587505 13.7658972475
126 | 6300.0 102.0 14.2617472236 8.96978991243
127 | 6300.0 103.0 4.41471613951 9.4919775542
128 | 6300.0 104.0 1.19775693888 13.6704333276
129 | 6310.0 102.0 13.469767016 9.12396414305
130 | 6310.0 103.0 5.01243705084 9.36787445835
131 | 6310.0 104.0 1.85335575536 13.5749694077
132 | 6320.0 102.0 12.6279065775 9.19556208296
133 | 6320.0 103.0 5.604054474 9.28100229126
134 | 6320.0 104.0 2.50874410674 13.4795054878
135 | 6330.0 102.0 11.7900449761 9.27360383747
136 | 6330.0 103.0 6.19504050183 9.19842600056
137 | 6330.0 104.0 3.16118594658 13.3864281659
138 | 6340.0 102.0 10.9883833735 9.4101172429
139 | 6340.0 103.0 6.79549745959 9.11036053446
140 | 6340.0 104.0 3.8066824378 13.2988400194
141 | 6350.0 102.0 10.1865113059 9.54639198854
142 | 6350.0 103.0 7.4188951143 9.00940743918
143 | 6350.0 104.0 4.45196846391 13.2114905327
144 | 6360.0 102.0 9.36464505279 9.73970642631
145 | 6360.0 103.0 8.04229276901 8.9084543439
146 | 6360.0 104.0 5.11977425675 13.1200838294
147 | 6360.0 105.0 -0.0631395328877 13.025335889
148 | 6370.0 102.0 8.5427787997 9.93278220427
149 | 6370.0 103.0 8.66021833087 8.81012650642
150 | 6370.0 104.0 5.83935446656 13.0200853734
151 | 6370.0 105.0 0.357790686363 12.9642389802
152 | 6380.0 102.0 7.75458696415 10.0301554026
153 | 6380.0 103.0 9.27603924164 8.71275330814
154 | 6380.0 104.0 6.55914514148 12.9198482575
155 | 6380.0 105.0 0.778720905614 12.9029034117
156 | 6390.0 102.0 6.97018350058 10.1170275697
157 | 6390.0 103.0 9.8918601524 8.61561876966
158 | 6390.0 104.0 7.27788349085 12.795267842
159 | 6390.0 105.0 1.19965112487 12.841806503
160 | 6400.0 102.0 6.13379515493 10.2031837573
161 | 6400.0 103.0 10.5232554813 8.37695896994
162 | 6400.0 104.0 7.99346486358 12.5725982489
163 | 6400.0 105.0 1.62058134412 12.7804709345
164 | 6410.0 102.0 5.27509750765 10.2891012852
165 | 6410.0 103.0 11.1546508102 8.13829917023
166 | 6410.0 104.0 8.7090462363 12.3499286558
167 | 6410.0 105.0 2.07602784135 12.7193740257
168 | 6420.0 102.0 4.42292427878 10.3614152046
169 | 6420.0 103.0 11.7786798602 7.89963937052
170 | 6420.0 104.0 9.40779040026 12.1050637013
171 | 6420.0 105.0 2.54010340807 12.6580384572
172 | 6430.0 102.0 3.59726965372 10.3797920091
173 | 6430.0 103.0 12.37345426 7.6609795708
174 | 6430.0 104.0 10.0391857291 11.7728492601
175 | 6430.0 105.0 3.00417897479 12.5969415485
176 | 6430.0 106.0 14.9851158053 8.75881464948
177 | 6430.0 107.0 15.2561948665 8.16813164519
178 | 6440.0 102.0 2.77161502866 10.3981688137
179 | 6440.0 103.0 12.9682286598 7.42231977109
180 | 6440.0 104.0 10.670581058 11.4403961591
181 | 6440.0 105.0 3.46825454152 12.5358446397
182 | 6440.0 106.0 14.4042321028 8.76836104147
183 | 6440.0 107.0 14.6334286072 8.22779659512
184 | 6450.0 102.0 1.94069877586 10.4463780933
185 | 6450.0 103.0 13.6198286392 7.23735842631
186 | 6450.0 104.0 11.3051333635 11.0719054283
187 | 6450.0 105.0 4.00262545486 12.4904992778
188 | 6450.0 106.0 13.8233484002 8.77790743346
189 | 6450.0 107.0 14.0108728129 8.28746154505
190 | 6460.0 102.0 1.1064150813 10.514634796
191 | 6460.0 103.0 14.2775321068 7.05836357653
192 | 6460.0 104.0 11.9447368317 10.6497162426
193 | 6460.0 105.0 4.54478357725 12.4473018541
194 | 6460.0 106.0 13.237413535 8.84688011558
195 | 6460.0 107.0 13.3876856233 8.35380896937
196 | 6470.0 102.0 0.272131386746 10.5828914987
197 | 6470.0 103.0 14.9352355744 6.87936872674
198 | 6470.0 104.0 12.5841298347 10.2275270569
199 | 6470.0 105.0 5.08694169965 12.4038657705
200 | 6470.0 106.0 12.6464275072 8.97551774762
201 | 6470.0 107.0 12.7636565732 8.43614660027
202 | 6480.0 104.0 13.3192844627 9.90008581171
203 | 6480.0 105.0 5.60679052042 12.3434848412
204 | 6480.0 106.0 12.0556519445 9.10391671987
205 | 6480.0 107.0 12.1394170581 8.51848423117
206 | 6490.0 104.0 14.078221648 9.59627188668
207 | 6490.0 105.0 6.11190678353 12.2718869013
208 | 6490.0 106.0 11.5147566127 9.28243425005
209 | 6490.0 107.0 11.5257007984 8.64545124462
210 | 6500.0 104.0 14.8371588333 9.29245796164
211 | 6500.0 105.0 6.61702304663 12.2002889614
212 | 6500.0 106.0 10.9797543041 9.46667961543
213 | 6500.0 107.0 10.9271380266 8.83948166179
214 | 6510.0 104.0 15.5960960186 8.98888269641
215 | 6510.0 105.0 7.12213930973 12.1525570014
216 | 6510.0 106.0 10.4445415303 9.65068632101
217 | 6510.0 107.0 10.3285752549 9.03327341915
218 | 6520.0 105.0 7.62725557283 12.120815248
219 | 6520.0 106.0 9.88701945488 9.81965745921
220 | 6520.0 107.0 9.71948922762 9.21370022774
221 | 6530.0 105.0 8.13237183593 12.0888348349
222 | 6530.0 106.0 9.29645435727 9.96595591643
223 | 6530.0 107.0 9.06852064354 9.34066724118
224 | 6540.0 105.0 8.61728344851 12.0074518432
225 | 6540.0 106.0 8.70588925966 10.1122543737
226 | 6540.0 107.0 8.41734159436 9.46739559483
227 | 6550.0 105.0 9.08872529407 11.8928951393
228 | 6550.0 106.0 8.1186916038 10.2392213871
229 | 6550.0 107.0 7.76616254518 9.59412394848
230 | 6560.0 105.0 9.56227179073 11.7823956521
231 | 6560.0 106.0 7.53991255233 10.3213203582
232 | 6560.0 107.0 7.17812302889 9.67837085778
233 | 6570.0 105.0 10.0558124728 11.7084111141
234 | 6570.0 106.0 6.96113350086 10.4034193293
235 | 6570.0 107.0 6.59008351259 9.76261776708
236 | 6580.0 105.0 10.5493531549 11.6341879164
237 | 6580.0 106.0 6.37709282165 10.4833703622
238 | 6580.0 107.0 6.0020439963 9.84686467638
239 | 6580.0 108.0 0.110073252334 11.8107961682
240 | 6590.0 105.0 11.0494182553 11.5640219353
241 | 6590.0 106.0 5.74569749278 10.5449445905
242 | 6590.0 107.0 5.39548355036 9.88481158453
243 | 6590.0 108.0 0.790086021534 11.7833502913
244 | 6600.0 105.0 11.5755810294 11.5088915216
245 | 6600.0 106.0 5.1143021639 10.6065188189
246 | 6600.0 107.0 4.77692659317 9.89197137852
247 | 6600.0 108.0 1.47009879073 11.7559044143
248 | 6600.0 109.0 10.1970345614 8.43662391987
249 | 6610.0 105.0 12.1017438035 11.4537611078
250 | 6610.0 106.0 4.48290683502 10.6680930472
251 | 6610.0 107.0 4.15836963598 9.89936983231
252 | 6610.0 108.0 2.13516853715 11.7096044131
253 | 6610.0 109.0 10.0497089846 8.7468816595
254 | 6620.0 105.0 12.6051763457 11.4432600767
255 | 6620.0 106.0 3.81909987926 10.7139157287
256 | 6620.0 107.0 3.52992081864 9.90223374991
257 | 6620.0 108.0 2.785716191 11.6444502878
258 | 6620.0 109.0 9.90238340788 9.05713939913
259 | 6630.0 105.0 13.0934554 11.4623528606
260 | 6630.0 106.0 3.15529292351 10.7599770701
261 | 6630.0 107.0 2.87874176946 9.89459663632
262 | 6630.0 108.0 3.42089989185 11.5630672961
263 | 6630.0 109.0 9.68034271723 9.36954507695
264 | 6640.0 105.0 13.5865751519 11.4740471908
265 | 6640.0 106.0 2.52137201331 10.816062123
266 | 6640.0 107.0 2.22756272028 9.88719818253
267 | 6640.0 108.0 4.01988359385 11.4437373963
268 | 6640.0 109.0 9.38379737776 9.68409869297
269 | 6650.0 105.0 14.1228402512 11.4203487359
270 | 6650.0 106.0 1.93206970636 10.8876600629
271 | 6650.0 107.0 1.58374994993 9.89674457452
272 | 6650.0 108.0 4.61886729584 11.3244074964
273 | 6650.0 109.0 9.06809971333 10.0031868452
274 | 6660.0 105.0 14.6593158156 11.3664116212
275 | 6660.0 106.0 1.34276739941 10.9592580028
276 | 6660.0 107.0 1.00497089846 10.0585559187
277 | 6660.0 108.0 5.2214289047 11.2377739891
278 | 6660.0 109.0 8.70715205032 10.3327760286
279 | 6670.0 105.0 15.195580915 11.3124745064
280 | 6670.0 106.0 0.766303464147 11.0470848091
281 | 6670.0 107.0 0.426191846992 10.2206059227
282 | 6670.0 108.0 5.82525330422 11.16522141
283 | 6670.0 109.0 8.34346834088 10.6544894386
284 | 6680.0 106.0 0.209202318968 11.1594935748
285 | 6680.0 107.0 -0.152587204479 10.3826559267
286 | 6680.0 108.0 6.39582421641 11.0110471794
287 | 6680.0 109.0 7.96757765509 10.9442224355
288 | 6690.0 108.0 6.91651489762 10.7346791313
289 | 6690.0 109.0 7.61883696844 11.4556703862
290 | 6700.0 108.0 7.48603348427 10.5031791256
291 | 6700.0 109.0 7.21284977198 11.6091286375
292 | 6710.0 108.0 8.10416951124 10.316785822
293 | 6710.0 109.0 6.76013932117 11.7850209099
294 | 6720.0 108.0 8.72251600332 10.1303925184
295 | 6720.0 109.0 6.27606956903 11.9759487496
296 | 6730.0 108.0 9.34170435584 9.94185127666
297 | 6730.0 109.0 5.78168702652 12.0045879256
298 | 6740.0 108.0 9.96299735945 9.74877549869
299 | 6740.0 109.0 5.2805696005 11.9251142123
300 | 6750.0 108.0 10.5842903631 9.55546106093
301 | 6750.0 109.0 4.7796626396 11.8454018392
302 | 6750.0 110.0 10.9652322115 9.40319610871
303 | 6750.0 111.0 0.778720905614 9.23613424891
304 | 6760.0 110.0 10.3338368826 9.54472136994
305 | 6760.0 111.0 1.0910511283 9.33613270499
306 | 6770.0 110.0 9.70244155374 9.68600797137
307 | 6770.0 111.0 1.40338135098 9.43636982087
308 | 6780.0 110.0 9.098196224 9.83875024319
309 | 6780.0 111.0 1.71571157367 9.53636827695
310 | 6790.0 110.0 8.55687996205 10.017745093
311 | 6790.0 111.0 2.1187522586 9.72132962173
312 | 6800.0 110.0 8.0157741652 10.1967399428
313 | 6800.0 111.0 2.5318952688 9.91583735849
314 | 6810.0 110.0 7.43720557884 10.3833719061
315 | 6810.0 111.0 2.95198362761 10.1108224149
316 | 6820.0 110.0 6.82138466807 10.5778796429
317 | 6820.0 111.0 3.40048477622 10.3079554094
318 | 6830.0 110.0 6.20556375731 10.7723873797
319 | 6830.0 111.0 3.84877545972 10.505088404
320 | 6840.0 110.0 5.60131842757 10.9750095496
321 | 6840.0 111.0 4.33221381653 10.5248971674
322 | 6850.0 110.0 5.00001960937 11.179540998
323 | 6850.0 111.0 4.81965101042 10.5248971674
324 | 6860.0 110.0 4.4298696274 11.3270327542
325 | 6860.0 111.0 5.37359517896 10.3993621127
326 | 6870.0 110.0 3.87318941244 11.4501812109
327 | 6870.0 111.0 5.92206725464 10.2714404601
328 | 6880.0 110.0 3.31650919748 11.5735683273
329 | 6880.0 111.0 6.42318468066 10.123710044
330 | 6890.0 110.0 2.71499991417 11.670464206
331 | 6890.0 111.0 6.92430210668 9.97597962802
332 | 6890.0 112.0 14.8220053454 7.43186616308
333 | 6890.0 113.0 15.0080565023 8.0774409213
334 | 6890.0 114.0 11.259883365 6.49154655221
335 | 6900.0 112.0 14.1379937391 7.5750620429
336 | 6900.0 113.0 14.3575088484 8.20774917195
337 | 6900.0 114.0 11.6808135842 6.73020635192
338 | 6910.0 112.0 13.4539821328 7.71825792273
339 | 6910.0 113.0 13.7069611946 8.33781876279
340 | 6910.0 114.0 12.1017438035 6.96886615163
341 | 6920.0 112.0 12.8078542463 7.88293318454
342 | 6920.0 113.0 13.0982960975 8.47337752903
343 | 6920.0 114.0 12.5003647211 7.00538110099
344 | 6930.0 112.0 12.1659356619 8.04999504433
345 | 6930.0 113.0 12.5073100697 8.61108423346
346 | 6930.0 114.0 12.8575240121 7.02972440056
347 | 6940.0 112.0 11.4989717295 8.20345329555
348 | 6940.0 113.0 11.9167449721 8.7478362987
349 | 6940.0 114.0 13.0583077267 7.09463986608
350 | 6950.0 112.0 10.8294822158 8.35524092817
351 | 6950.0 113.0 11.329126386 8.87718991014
352 | 6950.0 114.0 13.2593019064 7.1597939914
353 | 6960.0 112.0 10.209872933 8.50631258139
354 | 6960.0 113.0 10.7417182651 9.00654352159
355 | 6960.0 114.0 13.2955019053 7.22972131272
356 | 6970.0 112.0 9.61152062638 8.65714557481
357 | 6970.0 113.0 10.123371773 9.10988321486
358 | 6970.0 114.0 13.3319123692 7.29964863404
359 | 6980.0 112.0 8.98686018101 8.76072392788
360 | 6980.0 113.0 9.49197644411 9.20224455735
361 | 6980.0 114.0 13.3681123681 7.36957595535
362 | 6990.0 112.0 8.35546485213 8.85236929097
363 | 6990.0 113.0 8.86058111523 9.29460589984
364 | 6990.0 114.0 13.404522832 7.43950327667
365 | 7000.0 112.0 7.7141776631 8.95141310785
366 | 7000.0 113.0 8.24728578579 9.39818425292
367 | 7000.0 114.0 13.4407228309 7.50966925778
368 | 7010.0 112.0 7.03374396369 9.0786187811
369 | 7010.0 113.0 7.64598696759 9.50892239998
370 | 7010.0 114.0 13.5036518987 7.58389245549
371 | 7020.0 112.0 6.35331026427 9.20582445435
372 | 7020.0 113.0 7.04447768428 9.61966054705
373 | 7020.0 114.0 13.6734972422 7.67649245778
374 | 7030.0 112.0 5.68403121566 9.33637136479
375 | 7030.0 113.0 6.37098933347 9.77144817967
376 | 7030.0 114.0 13.8431321205 7.76885380027
377 | 7040.0 112.0 5.0595812354 9.47956724462
378 | 7040.0 113.0 5.67961144835 9.93325952387
379 | 7040.0 114.0 14.0285518821 7.84952081258
380 | 7050.0 112.0 4.43513125514 9.62276312445
381 | 7050.0 113.0 4.98802309813 10.0953095279
382 | 7050.0 114.0 14.3543518718 7.8266094718
383 | 7060.0 112.0 3.80352546115 9.75211673589
384 | 7060.0 113.0 4.38461962883 10.1907734478
385 | 7060.0 114.0 14.6803623266 7.80345947123
386 | 7070.0 112.0 3.14308594715 9.8261012738
387 | 7070.0 113.0 3.78142662464 10.2862373676
388 | 7070.0 114.0 15.0061623163 7.78030947066
389 | 7080.0 112.0 2.48285689825 9.90032447151
390 | 7080.0 113.0 3.17802315535 10.3817012875
391 | 7090.0 112.0 1.83862319769 9.94137395706
392 | 7090.0 113.0 2.59166735993 10.5862327359
393 | 7100.0 112.0 1.23290461219 9.90509766751
394 | 7100.0 113.0 2.00552202962 10.7907641842
395 | 7100.0 115.0 14.8678867393 8.4931862924
396 | 7110.0 112.0 0.626975561574 9.86906003775
397 | 7110.0 113.0 1.43053135012 11.0022167668
398 | 7110.0 115.0 14.0719076947 8.59700330528
399 | 7120.0 112.0 0.0210465109626 9.83278374819
400 | 7120.0 113.0 0.900580204088 11.2408765665
401 | 7120.0 115.0 13.275718185 8.70082031815
402 | 7130.0 113.0 0.37083952316 11.4795363662
403 | 7130.0 115.0 12.4384879789 8.75953062888
404 | 7140.0 113.0 -0.158901157767 11.7181961659
405 | 7140.0 115.0 11.5966275404 8.81346774362
406 | 7150.0 115.0 10.7547671019 8.86740485835
407 | 7160.0 115.0 9.99141014925 8.9098863027
408 | 7170.0 115.0 9.24783691695 8.94974248925
409 | 7170.0 116.0 0.0841860438502 11.0499487267
410 | 7180.0 115.0 8.50426368464 8.98959867581
411 | 7180.0 116.0 0.602351143748 10.967372436
412 | 7180.0 117.0 0.255083712866 10.3996007725
413 | 7190.0 115.0 7.72406952326 8.99055331501
414 | 7190.0 116.0 1.12030577854 10.8847961453
415 | 7190.0 117.0 0.754938348227 10.300079636
416 | 7200.0 115.0 6.93482536216 8.98220022202
417 | 7200.0 116.0 1.60963715842 10.794582741
418 | 7200.0 117.0 1.25479298359 10.2007971594
419 | 7210.0 115.0 6.14558120107 8.97360846923
420 | 7210.0 116.0 2.05624412104 10.693390986
421 | 7210.0 117.0 1.76496040932 10.1062878787
422 | 7220.0 115.0 5.30372076256 8.93685486007
423 | 7220.0 116.0 2.50264061856 10.5921992309
424 | 7220.0 117.0 2.27596969549 10.0127332372
425 | 7230.0 115.0 4.46186032406 8.90010125091
426 | 7230.0 116.0 2.95577199958 10.489575517
427 | 7230.0 117.0 2.79160921407 9.9139280801
428 | 7240.0 115.0 3.60379407212 8.92587650928
429 | 7240.0 116.0 3.47035919262 10.3745414935
430 | 7240.0 117.0 3.34723710348 9.77073220027
431 | 7250.0 115.0 2.72152433257 9.04520640914
432 | 7250.0 116.0 3.98473592054 10.2597461299
433 | 7250.0 117.0 3.9028649929 9.62753632044
434 | 7260.0 115.0 1.83904412791 9.164536309
435 | 7260.0 116.0 4.50521613664 10.149962622
436 | 7260.0 117.0 4.41050683731 9.50653580199
437 | 7270.0 115.0 0.997604619625 9.39842291272
438 | 7270.0 116.0 5.03432542224 10.0475775679
439 | 7270.0 117.0 4.88594751996 9.40057085091
440 | 7280.0 115.0 0.166267436604 9.6609486924
441 | 7280.0 116.0 5.56343470784 9.94519251386
442 | 7280.0 117.0 5.3613882026 9.29436724004
443 | 7290.0 116.0 6.09275445855 9.84304611958
444 | 7290.0 117.0 5.90438818543 9.22611053732
445 | 7290.0 118.0 15.1713774274 8.90559042631
446 | 7300.0 116.0 6.57850793157 9.7468662203
447 | 7300.0 117.0 6.45475444711 9.1616723914
448 | 7300.0 118.0 14.5555565166 8.88792960113
449 | 7310.0 116.0 7.05963117217 9.65140230041
450 | 7310.0 117.0 6.97902303518 9.09771156508
451 | 7310.0 118.0 13.9397356058 8.87026877595
452 | 7320.0 116.0 7.54075441277 9.55593838053
453 | 7320.0 117.0 7.44204627636 9.03398939855
454 | 7320.0 118.0 13.3207577184 8.86000640456
455 | 7330.0 116.0 8.0397671877 9.45665590385
456 | 7330.0 117.0 7.90506951753 8.97050589183
457 | 7330.0 118.0 12.6725251808 8.91728475649
458 | 7340.0 116.0 8.56592996176 9.35116827237
459 | 7340.0 117.0 8.38282531638 8.88220176594
460 | 7340.0 118.0 12.0242926431 8.97456310842
461 | 7350.0 116.0 9.09209273582 9.2459193007
462 | 7350.0 117.0 8.89530785832 8.73709660771
463 | 7350.0 118.0 11.4206787087 9.14067032903
464 | 7360.0 116.0 9.61825550989 9.14067032903
465 | 7360.0 117.0 9.40779040026 8.59175278968
466 | 7360.0 118.0 10.8465298897 9.37933012874
467 | 7370.0 116.0 10.1334740982 9.00893011958
468 | 7370.0 117.0 9.95352642952 8.43256670328
469 | 7370.0 118.0 10.2456520017 9.61798992845
470 | 7380.0 116.0 10.6489031517 8.87742856994
471 | 7380.0 117.0 10.4990519937 8.27361927667
472 | 7380.0 118.0 9.60415434754 9.85664972817
473 | 7390.0 116.0 11.1679101121 8.74998423689
474 | 7390.0 117.0 11.0559426737 8.12087700485
475 | 7390.0 118.0 8.98159855327 10.0666703519
476 | 7400.0 116.0 11.7203810248 8.660486812
477 | 7400.0 117.0 11.6387205623 7.98317030041
478 | 7400.0 118.0 8.4028195018 10.2098662317
479 | 7410.0 116.0 12.2728519376 8.57098938711
480 | 7410.0 117.0 12.2214984508 7.84546359598
481 | 7410.0 118.0 7.78720905614 10.3459023176
482 |
--------------------------------------------------------------------------------
/src/models/pecnet.py:
--------------------------------------------------------------------------------
1 | # Baseline model for "It is Not the Journey but the Destination: Endpoint Conditioned Trajectory Prediction"
2 | # Source-code directly referred from PECNet at https://github.com/HarshayuGirase/Human-Path-Prediction/blob/master/PECNet/utils/models.py
3 |
4 | import torch
5 | import torch.nn as nn
6 | from torch.autograd import Variable
7 | import random
8 | import torch.nn.functional as F
9 | from torch.nn.utils import weight_norm
10 | import pdb
11 | from torch.nn import functional as F
12 | from torch.distributions.normal import Normal
13 | import math
14 | import numpy as np
15 | import yaml
16 |
17 | '''MLP model'''
18 | class MLP(nn.Module):
19 | def __init__(self, input_dim, output_dim, hidden_size=(1024, 512), activation='relu', discrim=False, dropout=-1):
20 | super(MLP, self).__init__()
21 | dims = []
22 | dims.append(input_dim)
23 | dims.extend(hidden_size)
24 | dims.append(output_dim)
25 | self.layers = nn.ModuleList()
26 | for i in range(len(dims)-1):
27 | self.layers.append(nn.Linear(dims[i], dims[i+1]))
28 |
29 | if activation == 'relu':
30 | self.activation = nn.ReLU()
31 | elif activation == 'sigmoid':
32 | self.activation = nn.Sigmoid()
33 |
34 | self.sigmoid = nn.Sigmoid() if discrim else None
35 | self.dropout = dropout
36 |
37 | def forward(self, x):
38 | for i in range(len(self.layers)):
39 | x = self.layers[i](x)
40 | if i != len(self.layers)-1:
41 | x = self.activation(x)
42 | if self.dropout != -1:
43 | x = nn.Dropout(min(0.1, self.dropout/3) if i == 1 else self.dropout)(x)
44 | elif self.sigmoid:
45 | x = self.sigmoid(x)
46 | return x
47 |
48 | class PECNet(nn.Module):
49 |
50 | def __init__(self, enc_past_size, enc_dest_size, enc_latent_size, dec_size, predictor_size, non_local_theta_size, non_local_phi_size, non_local_g_size, fdim, zdim, nonlocal_pools, non_local_dim, sigma, past_length, future_length, verbose):
51 | '''
52 | Args:
53 | size parameters: Dimension sizes
54 | nonlocal_pools: Number of nonlocal pooling operations to be performed
55 | sigma: Standard deviation used for sampling N(0, sigma)
56 | past_length: Length of past history (number of timesteps)
57 | future_length: Length of future trajectory to be predicted
58 | '''
59 | super(PECNet, self).__init__()
60 |
61 | self.zdim = zdim
62 | self.nonlocal_pools = nonlocal_pools
63 | self.sigma = sigma
64 |
65 | # takes in the past
66 | self.encoder_past = MLP(input_dim = past_length*2, output_dim = fdim, hidden_size=enc_past_size)
67 |
68 | self.encoder_dest = MLP(input_dim = 2, output_dim = fdim, hidden_size=enc_dest_size)
69 |
70 | self.encoder_latent = MLP(input_dim = 2*fdim, output_dim = 2*zdim, hidden_size=enc_latent_size)
71 |
72 | self.decoder = MLP(input_dim = fdim + zdim, output_dim = 2, hidden_size=dec_size)
73 |
74 | self.non_local_theta = MLP(input_dim = 2*fdim + 2, output_dim = non_local_dim, hidden_size=non_local_theta_size)
75 | self.non_local_phi = MLP(input_dim = 2*fdim + 2, output_dim = non_local_dim, hidden_size=non_local_phi_size)
76 | self.non_local_g = MLP(input_dim = 2*fdim + 2, output_dim = 2*fdim + 2, hidden_size=non_local_g_size)
77 |
78 | self.predictor = MLP(input_dim = 2*fdim + 2, output_dim = 2*(future_length-1), hidden_size=predictor_size)
79 |
80 | architecture = lambda net: [l.in_features for l in net.layers] + [net.layers[-1].out_features]
81 |
82 | if verbose:
83 | print("Past Encoder architecture : {}".format(architecture(self.encoder_past)))
84 | print("Dest Encoder architecture : {}".format(architecture(self.encoder_dest)))
85 | print("Latent Encoder architecture : {}".format(architecture(self.encoder_latent)))
86 | print("Decoder architecture : {}".format(architecture(self.decoder)))
87 | print("Predictor architecture : {}".format(architecture(self.predictor)))
88 |
89 | print("Non Local Theta architecture : {}".format(architecture(self.non_local_theta)))
90 | print("Non Local Phi architecture : {}".format(architecture(self.non_local_phi)))
91 | print("Non Local g architecture : {}".format(architecture(self.non_local_g)))
92 |
93 | def non_local_social_pooling(self, feat, mask):
94 |
95 | # N,C
96 | theta_x = self.non_local_theta(feat)
97 |
98 | # C,N
99 | phi_x = self.non_local_phi(feat).transpose(1,0)
100 |
101 | # f_ij = (theta_i)^T(phi_j), (N,N)
102 | f = torch.matmul(theta_x, phi_x)
103 |
104 | # f_weights_i = exp(f_ij)/(\sum_{j=1}^N exp(f_ij))
105 | f_weights = F.softmax(f, dim = -1)
106 |
107 | # setting weights of non neighbours to zero
108 | f_weights = f_weights * mask
109 |
110 | # rescaling row weights to 1
111 | f_weights = F.normalize(f_weights, p=1, dim=1)
112 |
113 | # ith row of all_pooled_f = \sum_{j=1}^N f_weights_i_j * g_row_j
114 | pooled_f = torch.matmul(f_weights, self.non_local_g(feat))
115 |
116 | return pooled_f + feat
117 |
118 | def forward(self, x, initial_pos, dest = None, mask = None, device=torch.device('cpu'), noise=None):
119 |
120 | # provide destination iff training
121 | # assert model.training
122 | assert self.training ^ (dest is None)
123 | assert self.training ^ (mask is None)
124 |
125 | # encode
126 | ftraj = self.encoder_past(x)
127 |
128 | if not self.training:
129 | if noise is not None:
130 | z = noise * self.sigma
131 | else:
132 | z = torch.Tensor(x.size(0), self.zdim)
133 | z.normal_(0, self.sigma)
134 |
135 | else:
136 | # during training, use the destination to produce generated_dest and use it again to predict final future points
137 |
138 | # CVAE code
139 | dest_features = self.encoder_dest(dest)
140 | features = torch.cat((ftraj, dest_features), dim = 1)
141 | latent = self.encoder_latent(features)
142 |
143 | mu = latent[:, 0:self.zdim] # 2-d array
144 | logvar = latent[:, self.zdim:] # 2-d array
145 |
146 | var = logvar.mul(0.5).exp_()
147 | eps = torch.FloatTensor(var.size()).normal_()
148 | eps = eps.to(device)
149 | z = eps.mul(var).add_(mu)
150 |
151 | z = z.to(device)
152 | decoder_input = torch.cat((ftraj, z), dim = 1)
153 | generated_dest = self.decoder(decoder_input)
154 |
155 | if self.training:
156 | # prediction in training, no best selection
157 | generated_dest_features = self.encoder_dest(generated_dest)
158 |
159 | prediction_features = torch.cat((ftraj, generated_dest_features, initial_pos), dim = 1)
160 |
161 | for i in range(self.nonlocal_pools):
162 | # non local social pooling
163 | prediction_features = self.non_local_social_pooling(prediction_features, mask)
164 |
165 | pred_future = self.predictor(prediction_features)
166 | return generated_dest, mu, logvar, pred_future
167 |
168 | return generated_dest
169 |
170 | # separated for forward to let choose the best destination
171 | def predict(self, past, generated_dest, mask, initial_pos):
172 | ftraj = self.encoder_past(past)
173 | generated_dest_features = self.encoder_dest(generated_dest)
174 | prediction_features = torch.cat((ftraj, generated_dest_features, initial_pos), dim = 1)
175 |
176 | for i in range(self.nonlocal_pools):
177 | # non local social pooling
178 | prediction_features = self.non_local_social_pooling(prediction_features, mask)
179 |
180 | interpolated_future = self.predictor(prediction_features)
181 | return interpolated_future
--------------------------------------------------------------------------------
/src/utils/blackbox_function.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 |
4 | class BlackBoxFunctionPECNET():
5 | def __init__(self, model,initial_pos,device,x):
6 | self.model = model
7 | self.initial_pos = initial_pos
8 | self.device = device
9 | self.x = x
10 | self.dest_recon_label = self.model(self.x, self.initial_pos, noise=torch.zeros((x.size(0), 16)).to(device), device = self.device) # [batchsize,2]
11 |
12 |
13 | def __call__(self, noise_recon):
14 | dest_recon = self.model(self.x, self.initial_pos, noise=noise_recon, device = self.device) # [batchsize,2]
15 | return torch.norm(dest_recon - self.dest_recon_label, dim=-1)
--------------------------------------------------------------------------------
/src/utils/exp_scheduler.py:
--------------------------------------------------------------------------------
1 | from easydict import EasyDict as edict
2 | import yaml
3 | class ConfigYAML():
4 | def __init__(self,file_path) -> None:
5 | self.file_path = file_path
6 | with open(file_path, 'r') as stream:
7 | self.data = yaml.safe_load(stream)
8 | self.data = edict(self.data)
9 | def __str__(self) -> str:
10 | return str(self.data)
11 | def __call__(self, ):
12 | return self.data
13 |
14 | if __name__ == "__main__":
15 | c = ConfigYAML('configs/pecnet/eth.yaml')
16 | print(c)
17 | print(c().gpu_idx)
--------------------------------------------------------------------------------
/src/utils/pecnet.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import torch
3 |
4 | def model_forward_post_hook(model, all_dest_recon, mask, x, y, initial_pos, dest,data_scale=170 ):
5 | all_guesses = []
6 | all_l2_errors_dest = []
7 | for dest_recon in all_dest_recon:
8 | dest_recon = dest_recon.cpu().numpy()
9 | all_guesses.append(dest_recon)
10 |
11 | l2error_sample = np.linalg.norm(dest_recon - dest, axis=1)
12 | all_l2_errors_dest.append(l2error_sample)
13 |
14 | all_l2_errors_dest = np.array(all_l2_errors_dest)
15 | all_guesses = np.array(all_guesses)
16 |
17 | # choosing the best guess
18 | indices = np.argmin(all_l2_errors_dest, axis=0)
19 | best_guess_dest = all_guesses[indices, np.arange(x.shape[0]), :]
20 | best_guess_dest = torch.FloatTensor(best_guess_dest).to(x.device)
21 |
22 | # using the best guess for interpolation
23 | interpolated_future = model.predict(x, best_guess_dest, mask, initial_pos)
24 | interpolated_future = interpolated_future.cpu().numpy()
25 | best_guess_dest = best_guess_dest.cpu().numpy()
26 |
27 | # final overall prediction
28 | predicted_future = np.concatenate((interpolated_future, best_guess_dest), axis=1)
29 | predicted_future = np.reshape(predicted_future, (-1, 12, 2)) # making sure
30 |
31 | tcc = evaluate_tcc(predicted_future / data_scale, y / data_scale)
32 | ADEs = np.mean(np.linalg.norm(y - predicted_future, axis=2), axis=1) / data_scale
33 | FDEs = np.min(all_l2_errors_dest, axis=0) / data_scale
34 | TCCs = tcc.detach().cpu().numpy()
35 | return ADEs, FDEs,
36 |
37 | def evaluate_tcc(pred, gt):
38 | """Get ADE, FDE, TCC scores for each pedestrian"""
39 | pred, gt = torch.FloatTensor(pred).permute(1, 0, 2), torch.FloatTensor(gt).permute(1, 0, 2)
40 | pred_best = pred
41 | pred_gt_stack = torch.stack([pred_best.permute(1, 0, 2), gt.permute(1, 0, 2)], dim=0)
42 | pred_gt_stack = pred_gt_stack.permute(3, 1, 0, 2)
43 | covariance = pred_gt_stack - pred_gt_stack.mean(dim=-1, keepdim=True)
44 | factor = 1 / (covariance.shape[-1] - 1)
45 | covariance = factor * covariance @ covariance.transpose(-1, -2)
46 | variance = covariance.diagonal(offset=0, dim1=-2, dim2=-1)
47 | stddev = variance.sqrt()
48 | corrcoef = covariance / stddev.unsqueeze(-1) / stddev.unsqueeze(-2)
49 | corrcoef.clip_(-1, 1)
50 | corrcoef[torch.isnan(corrcoef)] = 0
51 | TCCs = corrcoef[:, :, 0, 1].mean(dim=0)
52 | return TCCs
--------------------------------------------------------------------------------
/src/utils/sample.py:
--------------------------------------------------------------------------------
1 | # modified from https://github.com/InhwanBae/NPSN/blob/main/npsn/sampler.py
2 | import torch
3 | import numpy as np
4 |
5 | def generate_statistics_matrices(V):
6 | r"""generate mean and covariance matrices from the network output."""
7 |
8 | mu = V[:, :, 0:2]
9 | sx = V[:, :, 2].exp()
10 | sy = V[:, :, 3].exp()
11 | corr = V[:, :, 4].tanh()
12 |
13 | cov = torch.zeros(V.size(0), V.size(1), 2, 2, device=V.device)
14 | cov[:, :, 0, 0] = sx * sx
15 | cov[:, :, 0, 1] = corr * sx * sy
16 | cov[:, :, 1, 0] = corr * sx * sy
17 | cov[:, :, 1, 1] = sy * sy
18 |
19 | return mu, cov
20 |
21 | def mc_sample_fast(mu, cov, n_sample):
22 | r_sample = torch.randn((n_sample,) + mu.shape, dtype=mu.dtype, device=mu.device)
23 | sample = mu + (torch.cholesky(cov) @ r_sample.unsqueeze(dim=-1)).squeeze(dim=-1)
24 | return sample
25 |
26 | def qmc_sample_fast(mu, cov, n_sample, rng=None):
27 | rng = torch.quasirandom.SobolEngine(dimension=2, scramble=True, seed=0)
28 | qr_seq = torch.stack([box_muller_transform(rng.draw(n_sample)) for _ in range(mu.size(0))], dim=1).unsqueeze(dim=2).type_as(mu)
29 | sample = mu + (torch.cholesky(cov) @ qr_seq.unsqueeze(dim=-1)).squeeze(dim=-1)
30 | return sample
31 |
32 | def box_muller_transform(x: torch.FloatTensor):
33 | r"""Box-Muller transform"""
34 | shape = x.shape
35 | x = x.view(shape[:-1] + (-1, 2))
36 | z = torch.zeros_like(x, device=x.device)
37 | z[..., 0] = (-2 * x[..., 0].log()).sqrt() * (2 * np.pi * x[..., 1]).cos()
38 | z[..., 1] = (-2 * x[..., 0].log()).sqrt() * (2 * np.pi * x[..., 1]).sin()
39 | return z.view(shape)
40 |
41 |
42 | def inv_box_muller_transform(z: torch.FloatTensor):
43 | r"""Inverse Box-Muller transform"""
44 | shape = z.shape
45 | z = z.view(shape[:-1] + (-1, 2))
46 | x = torch.zeros_like(z, device=z.device)
47 | x[..., 0] = z.square().sum(dim=-1).div(-2).exp()
48 | x[..., 1] = torch.atan2(z[..., 1], z[..., 0]).div(2 * np.pi).add(0.5)
49 | return x.view(shape)
50 |
--------------------------------------------------------------------------------