├── .gitignore ├── README.md ├── cite.bib ├── data ├── __init__.py ├── dems.py └── hmaps.py ├── download.py ├── eval.py ├── eval_dem.py ├── example.ipynb ├── get_dem_focals.py ├── grid_planner.cpp ├── images └── visual_abstract.png ├── maps ├── heatmap_0_f.xml ├── heatmap_0_h.xml ├── heatmap_0_k.xml ├── heatmap_1_f.xml ├── heatmap_1_h.xml ├── heatmap_1_k.xml ├── heatmap_2_f.xml ├── heatmap_2_h.xml ├── heatmap_2_k.xml ├── heatmap_3_f.xml ├── heatmap_3_h.xml ├── heatmap_3_k.xml ├── heatmap_4_f.xml ├── heatmap_4_h.xml ├── heatmap_4_k.xml ├── heatmap_5_f.xml ├── heatmap_5_h.xml ├── heatmap_5_k.xml ├── heatmap_6_f.xml ├── heatmap_6_h.xml ├── heatmap_6_k.xml ├── heatmap_7_f.xml ├── heatmap_7_h.xml ├── heatmap_7_k.xml ├── heatmap_8_f.xml ├── heatmap_8_h.xml └── heatmap_8_k.xml ├── models ├── __init__.py └── autoencoder.py ├── modules ├── __init__.py ├── attention.py ├── decoder.py ├── encoder.py ├── planners.py ├── pos_emb.py └── resblock.py ├── requirements.txt ├── train.py ├── utils ├── __init__.py ├── checkpoint.py └── metrics.py └── weights ├── cf.pth ├── dem_64.pth └── focal.pth /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_store 2 | # Byte-compiled / optimized / DLL files 3 | __pycache__/ 4 | *.py[cod] 5 | *$py.class 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | pip-wheel-metadata/ 25 | share/python-wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | MANIFEST 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .nox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | 55 | # Translations 56 | *.mo 57 | *.pot 58 | 59 | # Django stuff: 60 | *.log 61 | local_settings.py 62 | db.sqlite3 63 | db.sqlite3-journal 64 | 65 | # Flask stuff: 66 | instance/ 67 | .webassets-cache 68 | 69 | # Scrapy stuff: 70 | .scrapy 71 | 72 | # Sphinx documentation 73 | docs/_build/ 74 | 75 | # PyBuilder 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | .python-version 87 | 88 | # pipenv 89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 92 | # install all needed dependencies. 93 | #Pipfile.lock 94 | 95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 96 | __pypackages__/ 97 | 98 | # Celery stuff 99 | celerybeat-schedule 100 | celerybeat.pid 101 | 102 | # SageMath parsed files 103 | *.sage.py 104 | 105 | # Environments 106 | .env 107 | .venv 108 | env/ 109 | venv/ 110 | ENV/ 111 | env.bak/ 112 | venv.bak/ 113 | 114 | # Spyder project settings 115 | .spyderproject 116 | .spyproject 117 | 118 | # Rope project settings 119 | .ropeproject 120 | 121 | # mkdocs documentation 122 | /site 123 | 124 | # mypy 125 | .mypy_cache/ 126 | .dmypy.json 127 | dmypy.json 128 | 129 | # Pyre type checker 130 | .pyre/ 131 | 132 | 133 | TransPath_data 134 | wandb -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TransPath: Learning Heuristics For Grid-Based Pathfinding via Transformers 2 | [This](https://github.com/AIRI-Institute/TransPath) is the code repository for the following paper accepted at AAAI 2023: 3 | 4 | Daniil Kirilenko, Anton Andreychuk, Aleksandr Panov, Konstantin Yakovlev, "TransPath: Learning Heuristics For Grid-Based Pathfinding via Transformers", AAAI, 2023. 5 | 6 | ![Visual abstract](images/visual_abstract.png) 7 | 8 | ## Data 9 | ### Grids 10 | Train, validation, and test maps with pre-computed values mentioned in our paper are available [here](https://disk.yandex.ru/d/xLeW_jrUpTVnCA). One can download and exctract it manually or just run `download.py`. 11 | 12 | ### DEM 13 | DEM data with paired imagery used in our work are available [here](https://disk.yandex.ru/d/LIMbKd4AZPEUdA). Use `get_dem_focals.py` to generate gt-focal values. 14 | 15 | ## Pretrained models 16 | Directory `./weights` contains parameters for some of the pre-trained models from the paper. 17 | 18 | Use `train.py` to train a model from scratch. Argument `--mode` defines the type of the model: `cf` and `f` are the models for grid-based pathfinding that predict correction factor and focal values respectively, `dem` is the model for DEM data. 19 | 20 | Use `eval.py` and `eval_dem.py` to evaluate a model on the test set. 21 | 22 | ## Examples 23 | Check `example.ipynb` for some examples of predictions and search results of our models. There are a few examples of train and out-of-distribution maps in the directory `./maps`. 24 | -------------------------------------------------------------------------------- /cite.bib: -------------------------------------------------------------------------------- 1 | @article{Kirilenko_Andreychuk_Panov_Yakovlev_2023, 2 | title={TransPath: Learning Heuristics for Grid-Based Pathfinding via Transformers}, 3 | volume={37}, 4 | url={https://ojs.aaai.org/index.php/AAAI/article/view/26465}, 5 | DOI={10.1609/aaai.v37i10.26465}, 6 | abstractNote={Heuristic search algorithms, e.g. A*, are the commonly used tools for pathfinding on grids, i.e. graphs of regular structure that are widely employed to represent environments in robotics, video games, etc. Instance-independent heuristics for grid graphs, e.g. Manhattan distance, do not take the obstacles into account, and thus the search led by such heuristics performs poorly in obstacle-rich environments. To this end, we suggest learning the instance-dependent heuristic proxies that are supposed to notably increase the efficiency of the search. The first heuristic proxy we suggest to learn is the correction factor, i.e. the ratio between the instance-independent cost-to-go estimate and the perfect one (computed offline at the training phase). Unlike learning the absolute values of the cost-to-go heuristic function, which was known before, learning the correction factor utilizes the knowledge of the instance-independent heuristic. The second heuristic proxy is the path probability, which indicates how likely the grid cell is lying on the shortest path. This heuristic can be employed in the Focal Search framework as the secondary heuristic, allowing us to preserve the guarantees on the bounded sub-optimality of the solution. We learn both suggested heuristics in a supervised fashion with the state-of-the-art neural networks containing attention blocks (transformers). We conduct a thorough empirical evaluation on a comprehensive dataset of planning tasks, showing that the suggested techniques i) reduce the computational effort of the A* up to a factor of 4x while producing the solutions, whose costs exceed those of the optimal solutions by less than 0.3% on average; ii) outperform the competitors, which include the conventional techniques from the heuristic search, i.e. weighted A*, as well as the state-of-the-art learnable planners. The project web-page is: https://airi-institute.github.io/TransPath/.}, 7 | number={10}, 8 | journal={Proceedings of the AAAI Conference on Artificial Intelligence}, 9 | author={Kirilenko, Daniil and Andreychuk, Anton and Panov, Aleksandr and Yakovlev, Konstantin}, 10 | year={2023}, 11 | month={Jun.}, 12 | pages={12436-12443} 13 | } 14 | -------------------------------------------------------------------------------- /data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cognitive-AI-Systems/TransPath/7c82b00fe2a1ce4641307e5d522759e229e11f40/data/__init__.py -------------------------------------------------------------------------------- /data/dems.py: -------------------------------------------------------------------------------- 1 | from torch.utils.data import Dataset 2 | import numpy as np 3 | import torch 4 | 5 | def sg2img(start, goal, img_size=128): 6 | img = torch.zeros((2, img_size, img_size)) 7 | img[0, start[0], start[1]] = 1 8 | img[1, goal[0], goal[1]] = 1 9 | return img.float() 10 | 11 | 12 | class DemData(Dataset): 13 | def __init__(self, split='train'): 14 | data = np.load(split + '.npz') 15 | data_focal = np.load(split + '_focal.npz') 16 | self.dems = torch.tensor(data['dem']).float() 17 | self.starts = data_focal['start'] 18 | self.goals = data_focal['goal'] 19 | self.focal = torch.tensor(data_focal['focal']).float() 20 | self.rgb = torch.tensor(data['rgb']).float() / 255. 21 | self.img_size = self.rgb.shape[-1] 22 | 23 | def __len__(self): 24 | return len(self.dems) * 10 25 | 26 | def __getitem__(self, idx): 27 | map_idx, task_idx = idx // 10, idx % 10 28 | dem = self.dems[map_idx] 29 | dem = dem - dem.min() 30 | dem = dem / dem.max() 31 | rgb = self.rgb[map_idx] 32 | start = self.starts[map_idx][task_idx] 33 | goal = self.goals[map_idx][task_idx] 34 | focal = self.focal[map_idx][task_idx] 35 | sg = sg2img(start, goal, img_size=self.img_size) 36 | return dem, rgb, sg, focal 37 | -------------------------------------------------------------------------------- /data/hmaps.py: -------------------------------------------------------------------------------- 1 | import os 2 | import torch 3 | import numpy as np 4 | import xml.etree.ElementTree as ET 5 | from torch.utils.data import Dataset 6 | 7 | 8 | def proc_grid(grid): 9 | rows = [] 10 | for row in grid: 11 | rows.append([float(i) for i in row.text.split()]) 12 | return np.array(rows) 13 | 14 | 15 | class PathData(Dataset): 16 | def __init__(self, xml_path, koef_path, h_path, grid_size=64, limit_k=1, clip_value=0.): 17 | self.img_size = grid_size 18 | self.xml_path = xml_path 19 | self.koef_path = koef_path 20 | self.h_path = h_path 21 | self.file_names = os.listdir(self.xml_path) 22 | self.size = len(self.file_names) // limit_k 23 | self.grid_size = grid_size 24 | self.clip_value = clip_value 25 | 26 | def __len__(self): 27 | return self.size 28 | 29 | def __getitem__(self, idx): 30 | tree = ET.parse(os.path.join(self.xml_path, self.file_names[idx])) 31 | 32 | root = tree.getroot() 33 | 34 | map_designs = ((torch.tensor(proc_grid(root[0][6])) == 1) * 1.).reshape(1, self.grid_size, self.grid_size) 35 | hm = torch.FloatTensor(proc_grid(root[1][2])).reshape(1, self.grid_size, self.grid_size) 36 | if self.clip_value != 0: 37 | hm = torch.where(hm >= self.clip_value, hm, torch.zeros_like(hm)) 38 | 39 | start_maps = torch.zeros_like(map_designs) 40 | start = (int(root[0][1].text), int(root[0][0].text)) 41 | start_maps[0][start[0], start[1]] = 1 42 | 43 | goal_maps = torch.zeros_like(map_designs) 44 | goal = (int(root[0][3].text), int(root[0][2].text)) 45 | goal_maps[0][goal[0], goal[1]] = 1 46 | 47 | koef = torch.zeros_like(hm) 48 | if self.koef_path is not None: 49 | tree = ET.parse(os.path.join(self.koef_path, self.file_names[idx])) 50 | root = tree.getroot() 51 | koef = torch.FloatTensor(proc_grid(root[1][2])).reshape(1, self.grid_size, self.grid_size) 52 | 53 | h = torch.zeros_like(hm) 54 | if self.h_path is not None: 55 | tree = ET.parse(os.path.join(self.h_path, self.file_names[idx])) 56 | root = tree.getroot() 57 | h = torch.FloatTensor(proc_grid(root[1][2])).reshape(1, self.grid_size, self.grid_size) 58 | 59 | return map_designs, start_maps, goal_maps, hm, koef, h 60 | 61 | 62 | class OODMaps(Dataset): 63 | def __init__(self, xml_path, grid_size=64, clip_value=0.): 64 | self.img_size = grid_size 65 | self.xml_path = xml_path 66 | self.file_names = os.listdir(self.xml_path) 67 | self.size = len(self.file_names) // 3 68 | self.grid_size = grid_size 69 | self.clip_value = clip_value 70 | 71 | def __len__(self): 72 | return self.size 73 | 74 | def __getitem__(self, idx): 75 | tree = ET.parse(os.path.join(self.xml_path, f'heatmap_{idx}_f.xml')) 76 | 77 | root = tree.getroot() 78 | 79 | map_designs = ((torch.tensor(proc_grid(root[0][6])) == 1) * 1.).reshape(1, self.grid_size, self.grid_size) 80 | hm = torch.FloatTensor(proc_grid(root[1][2])).reshape(1, self.grid_size, self.grid_size) 81 | if self.clip_value != 0: 82 | hm = torch.where(hm >= self.clip_value, hm, torch.zeros_like(hm)) 83 | 84 | start_maps = torch.zeros_like(map_designs) 85 | start = (int(root[0][1].text), int(root[0][0].text)) 86 | start_maps[0][start[0], start[1]] = 1 87 | 88 | goal_maps = torch.zeros_like(map_designs) 89 | goal = (int(root[0][3].text), int(root[0][2].text)) 90 | goal_maps[0][goal[0], goal[1]] = 1 91 | 92 | koef = torch.zeros_like(hm) 93 | tree = ET.parse(os.path.join(self.xml_path, f'heatmap_{idx}_k.xml')) 94 | root = tree.getroot() 95 | koef = torch.FloatTensor(proc_grid(root[1][2])).reshape(1, self.grid_size, self.grid_size) 96 | 97 | h = torch.zeros_like(hm) 98 | tree = ET.parse(os.path.join(self.xml_path, f'heatmap_{idx}_h.xml')) 99 | root = tree.getroot() 100 | h = torch.FloatTensor(proc_grid(root[1][2])).reshape(1, self.grid_size, self.grid_size) 101 | 102 | return map_designs, start_maps, goal_maps, hm, koef, h 103 | 104 | 105 | class GridData(Dataset): 106 | """ 107 | 'mode' argument defines type of ground truth values: 108 | f - focal values 109 | h - absolute ideal heuristic values 110 | cf - correction factor values 111 | """ 112 | def __init__(self, path, mode='f', clip_value=0.95, img_size=64): 113 | self.img_size = img_size 114 | self.clip_v = clip_value 115 | self.mode = mode 116 | 117 | self.maps = np.load(os.path.join(path, 'maps.npy'), mmap_mode='c') 118 | self.goals = np.load(os.path.join(path, 'goals.npy'), mmap_mode='c') 119 | self.starts = np.load(os.path.join(path, 'starts.npy'), mmap_mode='c') 120 | 121 | file_gt = {'f' : 'focal.npy', 'h':'abs.npy', 'cf': 'cf.npy'}[mode] 122 | self.gt_values = np.load(os.path.join(path, file_gt), mmap_mode='c') 123 | 124 | 125 | def __len__(self): 126 | return len(self.gt_values) 127 | 128 | 129 | 130 | def __getitem__(self, idx): 131 | gt_ = torch.from_numpy(self.gt_values[idx].astype('float32')) 132 | if self.mode == 'f': 133 | gt_= torch.where( gt_ >= self.clip_v, gt_ , torch.zeros_like( torch.from_numpy(self.gt_values[idx]))) 134 | return (torch.from_numpy(self.maps[idx].astype('float32')), 135 | torch.from_numpy(self.starts[idx].astype('float32')), 136 | torch.from_numpy(self.goals[idx].astype('float32')), 137 | gt_ ) 138 | -------------------------------------------------------------------------------- /download.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from urllib.parse import urlencode 3 | import zipfile 4 | 5 | 6 | def main(): 7 | base_url = 'https://cloud-api.yandex.net/v1/disk/public/resources/download?' 8 | public_key = 'https://disk.yandex.ru/d/xLeW_jrUpTVnCA' 9 | 10 | final_url = base_url + urlencode(dict(public_key=public_key)) 11 | response = requests.get(final_url) 12 | download_url = response.json()['href'] 13 | print('downloading...') 14 | download_response = requests.get(download_url) 15 | with open('TransPath_data.zip', 'wb') as f: 16 | f.write(download_response.content) 17 | print('extracting...') 18 | with zipfile.ZipFile('TransPath_data.zip', 'r') as zip_ref: 19 | zip_ref.extractall('./TransPath_data') 20 | print('done!') 21 | 22 | if __name__ == '__main__': 23 | main() 24 | -------------------------------------------------------------------------------- /eval.py: -------------------------------------------------------------------------------- 1 | from models.autoencoder import Autoencoder 2 | from data.hmaps import GridData 3 | from modules.planners import DifferentiableDiagAstar, get_diag_heuristic 4 | 5 | import pytorch_lightning as pl 6 | from torch.utils.data import DataLoader 7 | import torch 8 | from tqdm import tqdm 9 | 10 | import argparse 11 | 12 | 13 | def main(mode, state_dict_path, hardness_limit=1.05): 14 | device = 'cuda' 15 | 16 | test_data = GridData( 17 | path='./TransPath_data/test', 18 | mode=mode 19 | ) 20 | test_dataloader = DataLoader(test_data, batch_size=256, 21 | shuffle=False, num_workers=0, pin_memory=True) 22 | model = Autoencoder(mode=mode) 23 | model.load_state_dict(torch.load(state_dict_path)) 24 | model.to(device) 25 | model.eval() 26 | 27 | vanilla_planner = DifferentiableDiagAstar(mode='default', h_w=1) 28 | if mode == 'cf': 29 | learnable_planner = DifferentiableDiagAstar(mode='k') 30 | else: 31 | learnable_planner = DifferentiableDiagAstar(mode=mode, f_w=100) 32 | vanilla_planner.to(device) 33 | learnable_planner.to(device) 34 | 35 | expansions_ratio = [] 36 | cost_ratio = [] 37 | hardness = [] 38 | 39 | for batch in tqdm(test_dataloader): 40 | with torch.no_grad(): 41 | map_design, start, goal, gt_heatmap = batch 42 | inputs = torch.cat([map_design, start + goal], dim=1) if mode == 'f' else torch.cat([map_design, goal], dim=1) 43 | inputs = inputs.to(device) 44 | 45 | predictions = (model(inputs) + 1) / 2 46 | learn_outputs = learnable_planner( 47 | predictions.to(device), 48 | start.to(device), 49 | goal.to(device), 50 | ((map_design == 0)*1.).to(device) 51 | ) 52 | vanilla_outputs = vanilla_planner( 53 | ((map_design == 0)*1.).to(device), 54 | start.to(device), 55 | goal.to(device), 56 | ((map_design == 0)*1.).to(device) 57 | ) 58 | expansions_ratio.append(((learn_outputs.histories).sum((-1, -2, -3))) / ((vanilla_outputs.histories).sum((-1, -2, -3)))) 59 | learn_costs = (learn_outputs.g * goal.to(device)).sum((-1, -2, -3)) 60 | vanilla_costs = (vanilla_outputs.g * goal.to(device)).sum((-1, -2, -3)) 61 | cost_ratio.append(learn_costs / vanilla_costs) 62 | start_heur = (get_diag_heuristic(goal[:, 0].to(device)) * start[:, 0].to(device)).sum((-1, -2)) 63 | hardness.append(vanilla_costs / start_heur) 64 | 65 | expansions_ratio = torch.cat(expansions_ratio, dim=0) 66 | cost_ratio = torch.cat(cost_ratio, dim=0) 67 | hardness = torch.cat(hardness, dim=0) 68 | mask = torch.where(hardness >= hardness_limit, torch.ones_like(hardness), torch.zeros_like(hardness)) 69 | n = mask.sum() 70 | expansions_ratio = (expansions_ratio * mask).sum() / n 71 | cost_ratio = (cost_ratio * mask).sum() / n 72 | 73 | print(f'expansions_ratio: {expansions_ratio}, cost_ratio: {cost_ratio}') 74 | 75 | if __name__ == '__main__': 76 | parser = argparse.ArgumentParser() 77 | parser.add_argument('--mode', type=str, choices=['f', 'cf'], default='f') 78 | parser.add_argument('--seed', type=int, default=39) 79 | parser.add_argument('--weights_path', type=str, default='./weights/focal.pth') 80 | 81 | args = parser.parse_args() 82 | pl.seed_everything(args.seed) 83 | 84 | main( 85 | mode=args.mode, 86 | state_dict_path=args.weights_path, 87 | ) 88 | -------------------------------------------------------------------------------- /eval_dem.py: -------------------------------------------------------------------------------- 1 | from models.autoencoder import DemAutoencoder 2 | from data.dems import DemData 3 | 4 | import cppimport.import_hook 5 | from grid_planner import grid_planner 6 | 7 | from torch.utils.data import DataLoader 8 | import torch 9 | from tqdm import tqdm 10 | import numpy as np 11 | 12 | import argparse 13 | import os 14 | 15 | 16 | def get_predictions(name='test', ckpt_path='./model.ckpt'): 17 | dataset = DemData(split=name) 18 | dataloader = DataLoader(dataset, batch_size=10, shuffle=False, num_workers=0, pin_memory=True) 19 | model = DemAutoencoder(resolution=(dataset.img_size, dataset.img_size)) 20 | model.load_state_dict(torch.load(ckpt_path, map_location='cpu')['state_dict']) 21 | model.eval() 22 | predictions_dem = [] 23 | predictions_focal = [] 24 | for batch in tqdm(dataloader): 25 | with torch.no_grad(): 26 | dem, rgb, sg, focal = batch 27 | inputs = torch.cat([rgb, sg], dim=1) 28 | predictions = (model(inputs) + 1) / 2 29 | predictions_dem.append(predictions[:, 0].numpy()) 30 | predictions_focal.append(predictions[:, 1].numpy()) 31 | predictions_dem = np.stack(predictions_dem, axis=0) 32 | predictions_focal = np.stack(predictions_focal, axis=0) 33 | np.savez(name + '_predictions.npz', dem=predictions_dem, focal=predictions_focal) 34 | print('Saved predictions to ' + name + '_predictions.npz') 35 | return predictions_dem, predictions_focal 36 | 37 | 38 | def get_metrics(name='test', ckpt_path='./model.ckpt'): 39 | source_data = np.load(name + '.npz') 40 | source_focal = np.load(name + '_focal.npz') 41 | gt_dem = source_data['dem'] 42 | starts = source_focal['start'] 43 | goals = source_focal['goal'] 44 | gt_focal = source_focal['focal'] 45 | if os.path.exists(name + '_predictions.npz'): 46 | print('loading predictions') 47 | predictions = np.load(name + '_predictions.npz') 48 | predictions_dem = predictions['dem'] 49 | predictions_focal = predictions['focal'] 50 | else: 51 | predictions_dem, predictions_focal = get_predictions(name, ckpt_path) 52 | gt_dem_num = [] 53 | pred_dem_num = [] 54 | pred_focal_num = [] 55 | for i in tqdm(range(len(gt_dem))): 56 | for j in range(10): 57 | # search with A* and gt-dem 58 | planner = grid_planner(gt_dem[i][0].tolist()) 59 | gt_dem_path = planner.find_path(starts[i][j], goals[i][j]) 60 | gt_dem_num.append(planner.get_num_expansions()) 61 | # search with A* and pred-dem 62 | planner = grid_planner((predictions_dem[i][j] * 255.).tolist()) 63 | pred_dem_path = planner.find_path(starts[i][j], goals[i][j]) 64 | pred_dem_num.append(planner.get_num_expansions()) 65 | # focal search with predicted dem and focal values 66 | planner = grid_planner((predictions_dem[i][j] * 255.)) 67 | pred_focal_path = planner.find_focal_path_reexpand(starts[i][j], goals[i][j], predictions_focal[i][j].tolist()) 68 | pred_focal_num.append(planner.get_num_expansions()) 69 | 70 | gt_dem_num = np.array(gt_dem_num) 71 | pred_dem_num = np.array(pred_dem_num) 72 | pred_focal_num = np.array(pred_focal_num) 73 | 74 | focal2pred_ratio_mean = (pred_focal_num / pred_dem_num).mean() 75 | pred2gt_ratio_mean = (pred_dem_num / gt_dem_num).mean() 76 | general_ratio_mean = (pred_focal_num / gt_dem_num).mean() 77 | 78 | print(f'Focal2pred ratio: {focal2pred_ratio_mean:.3f}') 79 | print(f'Pred2gt ratio: {pred2gt_ratio_mean:.3f}') 80 | print(f'General ratio:{general_ratio_mean:.3f}') 81 | 82 | 83 | if __name__ == '__main__': 84 | parser = argparse.ArgumentParser() 85 | parser.add_argument('--ckpt_path', type=str, default='./weights/dem_64.ckpt') 86 | 87 | args = parser.parse_args() 88 | get_metrics(ckpt_path=args.ckpt_path, name='./test') 89 | -------------------------------------------------------------------------------- /get_dem_focals.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import numpy as np 3 | from tqdm import tqdm 4 | import cppimport.import_hook 5 | from grid_planner import grid_planner 6 | 7 | def generate_tasks(num_tasks, grid_size): 8 | tasks = [] 9 | while len(tasks) < num_tasks: 10 | coords = np.random.randint(0, grid_size, 4) 11 | if abs(coords[0] - coords[2]) + abs(coords[1] - coords[3]) > grid_size: 12 | tasks.append({'start': (coords[0], coords[1]), 'goal': (coords[2], coords[3])}) 13 | return tasks 14 | 15 | def get_focal_values(dem): 16 | results = [] 17 | starts = [] 18 | goals = [] 19 | dem = dem[0] 20 | planner = grid_planner(dem.tolist()) 21 | tasks = generate_tasks(10, dem.shape[0]) 22 | for task in tasks: 23 | starts.append(np.array(task['start'])) 24 | goals.append(np.array(task['goal'])) 25 | results.append(planner.find_heatmap(task['start'], task['goal'])) 26 | return np.stack(results)[:, None, :, :], np.stack(starts), np.stack(goals) 27 | 28 | def proc_file(filename): 29 | split = filename[:-4] 30 | new_filename = split + '_focal.npz' 31 | focals, starts, goals = [], [], [] 32 | dems = np.load(filename)['dem'] 33 | for dem in tqdm(dems): 34 | focal, start, goal = get_focal_values(dem) 35 | focals.append(focal) 36 | starts.append(start) 37 | goals.append(goal) 38 | np.savez(new_filename, focal=np.stack(focals), start=np.stack(starts), goal=np.stack(goals)) 39 | 40 | 41 | def main(): 42 | parser = argparse.ArgumentParser() 43 | parser.add_argument('--filenames', nargs='+', type=str, default=['./val.npz', './train.npz', './test.npz']) 44 | args = parser.parse_args() 45 | for filename in args.filenames: 46 | proc_file(filename) 47 | 48 | 49 | if __name__ == '__main__': 50 | main() 51 | -------------------------------------------------------------------------------- /grid_planner.cpp: -------------------------------------------------------------------------------- 1 | // cppimport 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #define INF 1000000000 13 | namespace py = pybind11; 14 | 15 | struct Node { 16 | int i; 17 | int j; 18 | float g; 19 | float h; 20 | float f; 21 | std::pair parent; 22 | 23 | Node(int _i = INF, int _j = INF, float _g = INF, float _h = 0) : i(_i), j(_j), g(_g), h(_h), f(_g+_h){} 24 | bool operator<(const Node& other) const 25 | { 26 | return this->f < other.f or 27 | (this->f == other.f and (this->g < other.g or 28 | (this->g == other.g and (this->i < other.i or 29 | (this->i == other.i and this->j < other.j))))); 30 | } 31 | bool operator>(const Node& other) const 32 | { 33 | return this->f > other.f or 34 | (this->f == other.f and (this->g > other.g or 35 | (this->g == other.g and (this->i > other.i or 36 | (this->i == other.i and this->j > other.j))))); 37 | } 38 | bool operator==(const Node& other) const 39 | { 40 | return this->i == other.i and this->j == other.j; 41 | } 42 | bool operator==(const std::pair &other) const 43 | { 44 | return this->i == other.first and this->j == other.second; 45 | } 46 | }; 47 | 48 | struct FNode 49 | { 50 | Node* node; 51 | float h2; 52 | FNode(Node* _node, float _h2):node(_node), h2(_h2){} 53 | bool operator<(const FNode& other) const 54 | { 55 | return this->h2 > other.h2 or (this->h2 == other.h2 and this->node->h < other.node->h); 56 | } 57 | bool operator>(const FNode& other) const 58 | { 59 | return this->h2 < other.h2 or (this->h2 == other.h2 and this->node->h > other.node->h); 60 | } 61 | bool operator==(const FNode& other) const 62 | { 63 | return *this->node == *other.node; 64 | } 65 | }; 66 | 67 | class grid_planner 68 | { 69 | std::pair start; 70 | std::pair goal; 71 | std::priority_queue, std::greater> OPEN; 72 | std::priority_queue, std::greater> FOCAL; 73 | std::vector> grid; 74 | std::vector> nodes; 75 | std::list> expanded_nodes; 76 | std::vector> expanded_flags; 77 | 78 | inline float h(std::pair n) 79 | { 80 | int di = n.first - goal.first; 81 | int dj = n.second - goal.second; 82 | return std::min(di, dj)*std::sqrt(2.0) + std::abs(di - dj); 83 | } 84 | 85 | std::vector> get_neighbors(std::pair node) 86 | { 87 | std::vector> neighbors; 88 | std::vector> deltas = {{0,1},{1,0},{-1,0},{0,-1},{1,1},{1,-1},{-1,1},{-1,-1}}; 89 | for(auto d:deltas) 90 | { 91 | std::pair n(node.first + d.first, node.second + d.second); 92 | if(n.first >= 0 and n.first < int(grid.size()) and n.second >= 0 and n.second < int(grid.front().size())) 93 | neighbors.push_back(n); 94 | } 95 | return neighbors; 96 | } 97 | 98 | void compute_shortest_path() 99 | { 100 | Node current; 101 | while(!OPEN.empty() and !(current == goal)) 102 | { 103 | current = OPEN.top(); 104 | OPEN.pop(); 105 | if(nodes[current.i][current.j].g < current.g) 106 | continue; 107 | expanded_nodes.push_back({current.i, current.j}); 108 | for(auto n: get_neighbors({current.i, current.j})) { 109 | float cost = std::abs(grid[current.i][current.j] - grid[n.first][n.second]); 110 | if(current.i != n.first and current.j != n.second) 111 | cost += std::sqrt(2.0); 112 | else 113 | cost += 1.0; 114 | if(nodes[n.first][n.second].g > current.g + cost) 115 | { 116 | OPEN.push(Node(n.first, n.second, current.g + cost, h(n))); 117 | nodes[n.first][n.second].g = current.g + cost; 118 | nodes[n.first][n.second].parent = {current.i, current.j}; 119 | } 120 | } 121 | } 122 | } 123 | void reset(std::pair s, std::pair g) 124 | { 125 | start = s; 126 | goal = g; 127 | for(size_t i=0; i, std::greater>(); 132 | OPEN.push(Node(start.first, start.second, 0, h(start))); 133 | FOCAL = std::priority_queue, std::greater>(); 134 | FOCAL.push(FNode(&nodes[start.first][start.second], 1)); 135 | expanded_nodes.clear(); 136 | } 137 | public: 138 | std::list> find_focal_path(std::pair s, std::pair g, std::vector> heatmap) 139 | { 140 | reset(s, g); 141 | Node current; 142 | expanded_flags = std::vector>(heatmap.size(), std::vector(heatmap.front().size(), 0)); 143 | while(!FOCAL.empty() and !(current == goal)) 144 | { 145 | current = *FOCAL.top().node; 146 | FOCAL.pop(); 147 | if(expanded_flags[current.i][current.j] == 1) 148 | continue; 149 | expanded_flags[current.i][current.j] = 1; 150 | expanded_nodes.push_back({current.i, current.j}); 151 | for(auto n: get_neighbors({current.i, current.j})) { 152 | float cost = std::abs(grid[current.i][current.j] - grid[n.first][n.second]); 153 | if(current.i != n.first and current.j != n.second) 154 | cost += std::sqrt(2.0); 155 | else 156 | cost += 1.0; 157 | if(expanded_flags[n.first][n.second] == 0) 158 | { 159 | nodes[n.first][n.second].g = current.g + cost; 160 | nodes[n.first][n.second].h = h(n); 161 | nodes[n.first][n.second].f = nodes[n.first][n.second].g + nodes[n.first][n.second].h; 162 | nodes[n.first][n.second].parent = {current.i, current.j}; 163 | FOCAL.push(FNode(&nodes[n.first][n.second], heatmap[n.first][n.second])); 164 | } 165 | } 166 | } 167 | return get_path(); 168 | } 169 | 170 | std::list> find_focal_path_reexpand(std::pair s, std::pair g, std::vector> heatmap) 171 | { 172 | reset(s, g); 173 | Node current; 174 | while(!FOCAL.empty() and !(current == goal)) 175 | { 176 | current = *FOCAL.top().node; 177 | FOCAL.pop(); 178 | if(nodes[current.i][current.j].g < current.g ) 179 | continue; 180 | expanded_nodes.push_back({current.i, current.j}); 181 | for(auto n: get_neighbors({current.i, current.j})) { 182 | float cost = std::abs(grid[current.i][current.j] - grid[n.first][n.second]); 183 | if(current.i != n.first and current.j != n.second) 184 | cost += std::sqrt(2.0); 185 | else 186 | cost += 1.0; 187 | if(nodes[n.first][n.second].g > current.g + cost) 188 | { 189 | nodes[n.first][n.second].g = current.g + cost; 190 | nodes[n.first][n.second].h = h(n); 191 | nodes[n.first][n.second].f = nodes[n.first][n.second].g + nodes[n.first][n.second].h; 192 | nodes[n.first][n.second].parent = {current.i, current.j}; 193 | FOCAL.push(FNode(&nodes[n.first][n.second], heatmap[n.first][n.second])); 194 | } 195 | } 196 | } 197 | return get_path(); 198 | } 199 | 200 | int get_num_expansions() 201 | { 202 | return expanded_nodes.size(); 203 | } 204 | 205 | float get_path_cost() 206 | { 207 | return nodes[goal.first][goal.second].g; 208 | } 209 | 210 | grid_planner(std::vector> _grid):grid(_grid) 211 | { 212 | nodes = std::vector>(grid.size(), std::vector(grid.front().size(), Node())); 213 | } 214 | 215 | std::list> find_path(std::pair s, std::pair g) 216 | { 217 | reset(s, g); 218 | compute_shortest_path(); 219 | return get_path(); 220 | } 221 | 222 | std::list> get_path() 223 | { 224 | std::list> path; 225 | std::pair next_node(INF,INF); 226 | if(nodes[goal.first][goal.second].g < INF) 227 | next_node = goal; 228 | if(next_node.first < INF and (next_node.first != start.first or next_node.second != start.second)) 229 | { 230 | while (nodes[next_node.first][next_node.second].parent != start) { 231 | path.push_back(next_node); 232 | next_node = nodes[next_node.first][next_node.second].parent; 233 | } 234 | path.push_back(next_node); 235 | path.push_back(start); 236 | path.reverse(); 237 | } 238 | return path; 239 | } 240 | 241 | std::vector> find_heatmap(std::pair s, std::pair g) 242 | { 243 | reset(s, {-1,-1}); 244 | compute_shortest_path(); 245 | std::vector> heatmap(grid.size(), std::vector(grid.size(), 0)); 246 | for(size_t i = 0; i < grid.size(); i++) 247 | for(size_t j = 0; j < grid.size(); j++) 248 | heatmap[i][j] = nodes[i][j].g; 249 | reset(g, {-1,-1}); 250 | compute_shortest_path(); 251 | for(size_t i = 0; i < grid.size(); i++) 252 | for(size_t j = 0; j < grid.size(); j++) 253 | heatmap[i][j] += nodes[i][j].g; 254 | 255 | reset(s,g); 256 | compute_shortest_path(); 257 | auto path = get_path(); 258 | reset(path.front(), {-1,-1}); 259 | OPEN = std::priority_queue, std::greater>(); 260 | for(auto it = path.begin(); it != path.end(); it++) 261 | { 262 | nodes[it->first][it->second].g = 0; 263 | OPEN.push(Node(it->first, it->second, 0, 0)); 264 | } 265 | compute_shortest_path(); 266 | float min_g = INF; 267 | for(size_t i = 0; i < grid.size(); i++) 268 | for(size_t j = 0; j < grid.size(); j++) 269 | { 270 | heatmap[i][j] += nodes[i][j].g; 271 | min_g = std::fmin(min_g, heatmap[i][j]); 272 | } 273 | for(size_t i = 0; i < grid.size(); i++) 274 | for(size_t j = 0; j < grid.size(); j++) 275 | heatmap[i][j] = min_g/heatmap[i][j]; 276 | return heatmap; 277 | } 278 | 279 | std::vector> get_expansions() 280 | { 281 | std::vector> expansions(grid.size(), std::vector(grid.size(), 0)); 282 | for(size_t i = 0; i < grid.size(); i++) 283 | for(size_t j = 0; j < grid.size(); j++) 284 | if(nodes[i][j].g < INF) 285 | expansions[i][j] = 1; 286 | for(auto n: expanded_nodes) 287 | expansions[n.first][n.second] = 2; 288 | return expansions; 289 | } 290 | }; 291 | 292 | PYBIND11_MODULE(grid_planner, m) { 293 | py::class_(m, "grid_planner") 294 | .def(py::init>>()) 295 | .def("find_path", &grid_planner::find_path) 296 | .def("get_path", &grid_planner::get_path) 297 | .def("find_heatmap", &grid_planner::find_heatmap) 298 | .def("get_expansions", &grid_planner::get_expansions) 299 | .def("get_num_expansions", &grid_planner::get_num_expansions) 300 | .def("get_path_cost", &grid_planner::get_path_cost) 301 | .def("find_focal_path", &grid_planner::find_focal_path) 302 | .def("find_focal_path_reexpand", &grid_planner::find_focal_path_reexpand); 303 | } 304 | 305 | /* 306 | <% 307 | setup_pybind11(cfg) 308 | %> 309 | */ -------------------------------------------------------------------------------- /images/visual_abstract.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cognitive-AI-Systems/TransPath/7c82b00fe2a1ce4641307e5d522759e229e11f40/images/visual_abstract.png -------------------------------------------------------------------------------- /maps/heatmap_0_f.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 20 5 | 55 6 | 44 7 | 33 8 | 64 9 | 64 10 | 11 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 12 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 13 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 14 | 0 0 0 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 15 | 0 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 16 | 1 0 0 1 1 0 0 0 1 1 0 0 0 0 0 1 1 1 1 0 0 0 1 0 0 0 0 1 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 1 1 0 0 0 0 0 17 | 1 0 0 1 1 0 0 0 1 1 0 0 0 1 1 1 1 1 1 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 18 | 1 0 0 0 1 0 0 0 1 1 0 0 0 1 1 1 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 19 | 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 20 | 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 1 1 1 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 21 | 1 1 0 0 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 22 | 1 1 0 0 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 23 | 0 1 0 0 0 1 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 1 0 1 1 1 24 | 0 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 1 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 0 25 | 0 1 1 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 26 | 0 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 27 | 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 0 0 0 1 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 28 | 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 1 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 29 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 1 1 1 1 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 1 1 30 | 0 0 0 0 0 0 0 1 1 1 0 1 0 0 0 0 0 1 1 0 1 0 1 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 1 1 0 31 | 0 0 0 0 0 1 1 1 1 1 0 0 0 0 1 1 1 1 1 0 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 32 | 0 0 0 0 0 1 0 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 1 0 0 33 | 0 1 1 1 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 1 1 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 1 1 0 0 0 34 | 0 0 0 1 0 0 0 0 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 35 | 1 1 0 1 0 0 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 0 36 | 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 37 | 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 38 | 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 1 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 39 | 0 0 1 1 1 1 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 40 | 1 1 1 1 1 1 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 41 | 0 0 0 0 0 1 0 1 0 0 0 0 0 1 1 0 0 1 0 0 0 0 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 42 | 0 0 0 0 0 1 1 1 0 0 0 0 0 1 1 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 0 1 1 0 0 0 0 0 0 0 0 43 | 0 0 0 0 0 1 0 0 0 0 0 0 0 1 1 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 1 1 0 1 1 0 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 44 | 1 1 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 1 1 0 1 1 1 1 0 0 0 1 0 0 0 0 0 1 0 0 0 0 1 1 0 0 0 0 45 | 1 1 0 1 0 0 0 0 0 1 0 1 1 1 1 1 0 0 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 46 | 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 1 0 0 0 0 0 47 | 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 1 1 1 1 0 1 1 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 48 | 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 0 1 1 1 0 0 0 1 1 0 0 1 0 0 0 0 1 1 0 0 0 0 0 1 49 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 1 1 50 | 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 1 1 1 1 1 0 51 | 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 52 | 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 53 | 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 54 | 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 55 | 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 1 1 56 | 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 1 1 0 0 0 0 57 | 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 58 | 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 1 1 0 0 0 1 0 0 1 1 59 | 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 1 0 0 0 1 0 1 1 1 60 | 1 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 1 0 0 0 61 | 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 1 0 0 0 62 | 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 63 | 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 1 0 0 0 64 | 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 1 1 1 0 0 1 0 0 0 65 | 0 0 1 1 1 0 1 1 1 0 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 1 0 0 0 0 0 0 1 66 | 0 1 1 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 0 1 67 | 0 1 1 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 68 | 0 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 69 | 0 0 1 1 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 70 | 0 0 1 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 71 | 0 0 1 1 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 72 | 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 73 | 0 0 0 1 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 74 | 0 0 0 1 1 1 0 1 1 1 0 1 0 1 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 75 | 76 | 77 | 78 | 64 79 | 64 80 | 81 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 82 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 83 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 84 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 85 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 86 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 87 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 88 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 89 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 90 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 91 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 92 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 93 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 94 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 95 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 96 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 97 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 98 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 99 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 100 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 101 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 102 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 103 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.02 0.02 0.02 0.01 0.01 0.01 0 0 0 0.02 0.03 0 0 0.02 0.02 0.02 0.02 0.02 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 104 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0.01 0.02 0 0 0 0.02 0.01 0.02 0.02 0 0.03 0.03 0.04 0.04 0 0.03 0.03 0.03 0.03 0.03 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 105 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0 0.01 0.01 0.01 0 0 0.01 0.02 0.02 0.01 0 0 0.02 0.03 0.03 0 0.04 0.05 0.06 0.06 0 0.04 0.05 0.05 0.05 0.04 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 106 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0.01 0.01 0 0 0.01 0.01 0.02 0.02 0.02 0 0 0.03 0.04 0.05 0 0.06 0.07 0.09 0.09 0 0 0.07 0.07 0.07 0.06 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 107 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0.01 0.01 0 0.01 0.01 0.02 0.02 0.02 0 0 0.05 0.06 0.07 0 0 0.11 0.13 0.13 0 0 0.1 0.1 0.1 0.09 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 108 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0.01 0.01 0 0 0.01 0.02 0.02 0.02 0.01 0 0.07 0.09 0.1 0 0 0.15 0.2 0.2 0.2 0 0.16 0.16 0.16 0.12 0.09 0 0 0 0 0 0 0.01 0 0 0 0 0 0 0 0 0 109 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0.01 0.01 0 0 0.01 0.02 0.02 0.02 0.01 0 0.1 0.13 0.16 0.19 0 0.2 0.27 0.31 0.31 0 0.24 0.24 0.21 0.16 0.12 0 0.06 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 110 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0.01 0.01 0 0 0.01 0.02 0.02 0.02 0.01 0 0 0.18 0.24 0.29 0 0 0.37 0.42 0.42 0 0.37 0.33 0.29 0.21 0.16 0.1 0.07 0.05 0.03 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 111 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0 0.01 0.01 0 0 0.01 0.02 0.02 0.02 0.01 0 0 0.24 0.33 0.45 0.45 0.45 0.51 0.51 0.51 0.51 0.51 0.45 0.39 0.29 0.19 0.12 0.08 0.05 0 0.02 0 0 0.01 0.01 0 0 0 0 0 0 0 112 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0.01 0.01 0 0.01 0.02 0.02 0.02 0.01 0 0 0.33 0.45 0.62 0.71 0.71 0.62 0.62 0.62 0.62 0.62 0.62 0.54 0 0.16 0.1 0 0 0 0 0.02 0 0 0.01 0.01 0 0 0 0 0 0 113 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0.01 0.01 0 0.01 0.02 0.02 0.02 0.01 0.01 0 0.45 0.62 1 1 1 1 0.62 0.62 0 0 0 0.62 0 0 0.09 0.07 0.06 0.04 0.03 0.02 0 0.01 0.01 0.01 0 0 0 0 0 0 114 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0.01 0.01 0 0 0.02 0.02 0.02 0.01 0.01 0 0 0.71 1 0 0 1 0 0 0 0 1 0.62 0.39 0 0.07 0.07 0.07 0.05 0.04 0 0.02 0.02 0.01 0.01 0 0 0 0 0 0 115 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0.01 0.01 0 0 0.02 0.02 0.02 0.01 0.01 0 0 0.81 1 0 0 1 1 1 1 1 1 0.62 0.39 0 0.07 0.07 0.07 0.06 0.04 0 0.02 0.02 0.02 0 0 0 0 0 0 0 116 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0.01 0.01 0 0 0.02 0.02 0.02 0.02 0.02 0 0 0.81 1 0.81 0 0.62 0.71 0.81 0.81 0.71 0.62 0.45 0.33 0 0.07 0.07 0.07 0.06 0 0.04 0.03 0.02 0.02 0 0 0 0 0 0 0 117 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0.02 0.03 0.03 0.03 0.03 0 0 0.81 1 0.81 0 0.45 0.51 0.58 0 0 0 0 0.24 0 0 0.1 0.1 0 0 0.05 0.04 0.03 0 0 0 0 0 0 0 0 118 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0 0 0.04 0.05 0.05 0.05 0.04 0 0.81 1 0.81 0 0 0.37 0 0 0 0.12 0.13 0.18 0 0 0.15 0.13 0 0.09 0.07 0.05 0.03 0 0 0 0 0 0 0 0 119 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.03 0.03 0.03 0.03 0.03 0.03 0 0 0.06 0.07 0.07 0.07 0.06 0 0.81 1 0.81 0 0 0.24 0.2 0.13 0.12 0.12 0.12 0.16 0.18 0.18 0.18 0.18 0.16 0.12 0.09 0.06 0 0 0 0 0 0 0 0 0 120 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.04 0.04 0.05 0.05 0.05 0.05 0.05 0 0 0.09 0.1 0.1 0.1 0.09 0 0 1 0.81 0.67 0 0.16 0.13 0.11 0.1 0.09 0.09 0.12 0.13 0.15 0 0 0.18 0.13 0.09 0.06 0.04 0 0 0 0 0 0 0 0 121 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.04 0.05 0.06 0.06 0.07 0.07 0.07 0.07 0.07 0 0.13 0.16 0.16 0.16 0.13 0 0 1 0.81 0.67 0 0.1 0.09 0.07 0.07 0.07 0.07 0.09 0.1 0.1 0 0 0.18 0.13 0.09 0.06 0.04 0 0 0.01 0.01 0 0 0 0 122 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.04 0.05 0.07 0.08 0.09 0.1 0.11 0.11 0.11 0.11 0 0.2 0.24 0.24 0.24 0.2 0 0 1 0.81 0.67 0 0.07 0.06 0.05 0.05 0.05 0 0 0 0 0 0.18 0.16 0.12 0.09 0.06 0.04 0.03 0.02 0.01 0.01 0 0 0 0 123 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.04 0.05 0.07 0.09 0.1 0.11 0.13 0.15 0.16 0.16 0.16 0 0 0.37 0.37 0.37 0.31 0.23 0 1 0.81 0.67 0 0 0 0 0 0 0 0 0 0.18 0.18 0.16 0.14 0.1 0.08 0.06 0.04 0.03 0.02 0.01 0.01 0 0 0 0 124 | 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.04 0.05 0.07 0.09 0.12 0.14 0.15 0.17 0.2 0.22 0.25 0.25 0 0 0.51 0.58 0.58 0.42 0.27 0 1 0.81 0.58 0.42 0 0 0 0.18 0.18 0.18 0.18 0.18 0.18 0.16 0.14 0.12 0.09 0.07 0.05 0.04 0.03 0.02 0.01 0.01 0 0 0 0 125 | 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.03 0.04 0.05 0.07 0.09 0.12 0.16 0.18 0.21 0.23 0.27 0.3 0.34 0.39 0 0 0.71 0.81 0.81 0 0 0 1 0.71 0.51 0.37 0.27 0.18 0.18 0.18 0.18 0.18 0.18 0.18 0.16 0.14 0.12 0.11 0.08 0.06 0.05 0 0.02 0.01 0 0 0 0 0 0 126 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0.03 0.04 0.05 0.07 0.09 0.12 0.16 0.22 0.25 0.28 0.32 0.36 0.41 0.47 0.54 0.62 0 1 1 1 1 1 1 1 0.62 0.45 0.33 0.24 0.18 0.18 0.18 0.18 0.18 0.18 0.16 0.14 0.12 0.11 0.1 0.07 0.05 0 0 0.01 0.01 0 0 0 0 0 0 127 | 0 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.04 0.05 0.07 0.08 0.12 0.16 0.22 0.3 0.34 0.38 0.44 0.5 0.57 0.65 0.75 1 1 1 0.99 0.99 0.99 0.99 0.86 0.75 0.54 0.39 0.29 0.21 0.18 0.18 0.18 0.18 0 0 0 0 0 0.08 0.07 0.05 0.04 0 0 0.01 0.01 0.01 0 0 0 0 0 128 | 0 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.04 0.06 0.08 0 0 0.18 0.25 0.34 0.38 0.44 0.5 0.57 0.65 0.75 1 1 1 0.99 0.99 0.99 0.99 0.86 0.75 0.65 0.47 0.34 0.25 0.19 0.18 0.18 0.18 0 0 0 0 0 0 0.05 0.05 0.04 0.03 0 0 0.01 0.01 0.01 0 0 0 0 0 129 | 0 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.04 0.06 0.09 0.11 0.15 0.21 0.28 0.38 0.44 0.5 0.57 0.65 0.75 1 1 1 0.99 0.99 0.99 0.99 0.86 0.75 0.65 0.57 0.41 0.3 0.22 0.18 0.18 0.18 0.16 0.12 0.08 0.05 0.03 0.02 0 0 0.04 0.03 0.02 0.02 0 0.01 0.01 0.01 0 0 0 0 0 130 | 0 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.04 0.06 0.09 0.13 0.17 0.23 0.32 0.44 0.5 0.57 0.65 0.75 1 1 1 0.99 0.99 0.99 0.99 0.86 0.75 0.65 0.57 0.5 0.36 0.27 0.2 0.18 0.18 0.16 0.14 0.1 0.08 0.05 0.03 0.02 0 0 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 131 | 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.03 0.04 0.06 0.09 0.13 0.2 0.27 0.36 0.5 0.57 0.65 0.75 1 1 1 0.99 0.99 0.99 0.99 0.86 0.75 0.65 0.57 0.5 0.44 0.32 0.23 0.18 0.18 0.16 0.14 0.12 0.09 0.07 0.05 0.03 0.02 0 0 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 132 | 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.03 0.05 0.07 0.11 0 0.3 0.41 0.57 0.65 0.75 1 1 1 0.99 0.99 0.99 0.99 0.86 0.75 0.65 0.57 0.5 0.44 0.38 0.28 0.21 0.18 0.16 0.14 0.12 0.11 0.08 0.06 0.05 0.03 0.02 0 0 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 133 | 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0 0 0 0 0 0.34 0.47 0.65 0.75 1 1 1 0.99 0.99 0.99 0.99 0.86 0.75 0.65 0.57 0.5 0.44 0.38 0.34 0.25 0.18 0.16 0.14 0.12 0.11 0.1 0.07 0.05 0.04 0.03 0.02 0 0 0.02 0.02 0.02 0.01 0.01 0 0 0 0 0 0 0 0 134 | 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0.02 0.04 0.05 0.08 0.12 0.19 0 0.54 0.75 1 1 1 0.99 0.99 0.99 0.99 0.86 0.75 0.65 0.57 0.5 0.44 0.38 0.34 0.3 0.22 0.16 0.14 0.12 0.11 0.1 0.09 0.06 0.05 0.04 0.03 0 0 0 0 0.02 0.02 0.01 0 0 0 0 0 0 0 0 0 135 | 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.05 0.07 0.1 0.12 0 0.62 1 1 1 0.99 0.99 0.99 0.99 0.86 0.75 0.65 0.57 0.5 0.44 0.38 0.34 0.3 0.26 0.19 0.14 0.12 0.11 0.1 0.09 0.08 0.06 0.04 0.03 0.02 0 0 0 0 0.02 0.02 0.01 0.01 0 0 0 0 0 0 0 0 136 | 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.04 0.06 0.07 0.08 0 0.62 1 1 0.99 0.99 0.99 0.99 0.86 0.75 0.65 0.57 0.5 0.44 0.38 0.34 0.3 0.26 0.23 0.17 0.13 0.11 0.1 0.09 0.08 0.07 0.05 0.04 0.03 0.02 0 0 0 0 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 137 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0.03 0.04 0.05 0.05 0 0 0.71 0.81 0.81 0.81 0.81 0.71 0.62 0.54 0.47 0.41 0.36 0.32 0.28 0.25 0.22 0.19 0.17 0.13 0.09 0.08 0.07 0.06 0.06 0.05 0.04 0.03 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 138 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0.02 0.03 0.03 0.04 0 0 0.51 0.58 0.67 0.67 0.58 0.51 0.45 0.39 0.34 0.3 0.27 0.23 0.21 0.18 0.16 0.14 0.13 0.09 0.07 0.06 0.05 0.05 0.04 0.04 0.03 0.02 0.02 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 139 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.02 0.02 0 0.37 0.42 0.48 0.48 0.42 0.37 0.33 0.29 0.25 0.22 0.2 0.17 0.15 0.14 0.12 0.11 0.09 0.07 0.05 0.05 0.04 0.04 0.03 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 140 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.02 0.01 0 0.27 0 0 0 0.31 0.27 0.24 0.21 0.19 0.16 0.15 0.13 0.11 0.1 0.09 0.08 0.07 0.05 0.04 0.03 0.03 0.03 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 141 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0 0.18 0.16 0.16 0.17 0.2 0.2 0.18 0.16 0.14 0.12 0.11 0.1 0.09 0.08 0.07 0.06 0.05 0.04 0.03 0.03 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 142 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0 0 0.12 0 0 0 0.13 0.13 0.12 0.1 0.09 0.08 0.07 0.06 0.06 0.05 0.05 0.04 0.03 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 143 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0.03 0.04 0.07 0.08 0.07 0.07 0.07 0.09 0.09 0.09 0.08 0.07 0.06 0.05 0.05 0.04 0.04 0.03 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 144 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.04 0.04 0.05 0 0 0.05 0.06 0.06 0.06 0.06 0.05 0.05 0.04 0.04 0.03 0.03 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 145 | 146 | 147 | -------------------------------------------------------------------------------- /maps/heatmap_1_f.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 43 5 | 23 6 | 18 7 | 46 8 | 64 9 | 64 10 | 11 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 12 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 13 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 14 | 0 0 0 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 15 | 0 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 16 | 1 0 0 1 1 0 0 0 1 1 0 0 0 0 0 1 1 1 1 0 0 0 1 0 0 0 0 1 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 1 1 0 0 0 0 0 17 | 1 0 0 1 1 0 0 0 1 1 0 0 0 1 1 1 1 1 1 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 18 | 1 0 0 0 1 0 0 0 1 1 0 0 0 1 1 1 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 19 | 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 20 | 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 1 1 1 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 21 | 1 1 0 0 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 22 | 1 1 0 0 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 23 | 0 1 0 0 0 1 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 1 0 1 1 1 24 | 0 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 1 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 0 25 | 0 1 1 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 26 | 0 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 27 | 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 0 0 0 1 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 28 | 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 1 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 29 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 1 1 1 1 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 1 1 30 | 0 0 0 0 0 0 0 1 1 1 0 1 0 0 0 0 0 1 1 0 1 0 1 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 1 1 0 31 | 0 0 0 0 0 1 1 1 1 1 0 0 0 0 1 1 1 1 1 0 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 32 | 0 0 0 0 0 1 0 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 1 0 0 33 | 0 1 1 1 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 1 1 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 1 1 0 0 0 34 | 0 0 0 1 0 0 0 0 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 35 | 1 1 0 1 0 0 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 0 36 | 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 37 | 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 38 | 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 1 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 39 | 0 0 1 1 1 1 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 40 | 1 1 1 1 1 1 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 41 | 0 0 0 0 0 1 0 1 0 0 0 0 0 1 1 0 0 1 0 0 0 0 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 42 | 0 0 0 0 0 1 1 1 0 0 0 0 0 1 1 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 0 1 1 0 0 0 0 0 0 0 0 43 | 0 0 0 0 0 1 0 0 0 0 0 0 0 1 1 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 1 1 0 1 1 0 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 44 | 1 1 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 1 1 0 1 1 1 1 0 0 0 1 0 0 0 0 0 1 0 0 0 0 1 1 0 0 0 0 45 | 1 1 0 1 0 0 0 0 0 1 0 1 1 1 1 1 0 0 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 46 | 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 1 0 0 0 0 0 47 | 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 1 1 1 1 0 1 1 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 48 | 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 0 1 1 1 0 0 0 1 1 0 0 1 0 0 0 0 1 1 0 0 0 0 0 1 49 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 1 1 50 | 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 1 1 1 1 1 0 51 | 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 52 | 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 53 | 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 54 | 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 55 | 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 1 1 56 | 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 1 1 0 0 0 0 57 | 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 58 | 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 1 1 0 0 0 1 0 0 1 1 59 | 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 1 0 0 0 1 0 1 1 1 60 | 1 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 1 0 0 0 61 | 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 1 0 0 0 62 | 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 63 | 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 1 0 0 0 64 | 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 1 1 1 0 0 1 0 0 0 65 | 0 0 1 1 1 0 1 1 1 0 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 1 0 0 0 0 0 0 1 66 | 0 1 1 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 0 1 67 | 0 1 1 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 68 | 0 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 69 | 0 0 1 1 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 70 | 0 0 1 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 71 | 0 0 1 1 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 72 | 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 73 | 0 0 0 1 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 74 | 0 0 0 1 1 1 0 1 1 1 0 1 0 1 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 75 | 76 | 77 | 78 | 64 79 | 64 80 | 81 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 82 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 83 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 84 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 85 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 86 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 87 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 88 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 89 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 90 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 91 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 92 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 93 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0.01 0 0.01 0.01 0 0 0 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 94 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0.02 0.02 0.02 0.03 0.03 0.02 0.02 0 0.02 0.02 0.02 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 95 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.02 0.02 0 0 0.03 0.03 0.03 0 0 0.03 0.03 0 0.02 0.03 0.03 0 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 96 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0.01 0.01 0.02 0.02 0.02 0.02 0 0.03 0.04 0.04 0.04 0 0.04 0.04 0 0 0.04 0.04 0.03 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 97 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0.01 0.01 0 0.03 0.03 0.03 0 0.04 0.05 0.05 0.05 0 0.05 0.05 0.05 0 0.05 0.05 0.04 0.04 0.03 0.02 0.02 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 98 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0.02 0.02 0.02 0.01 0.01 0 0 0.01 0.01 0 0.04 0.05 0.05 0 0.06 0.06 0.06 0.06 0 0.06 0.06 0.06 0.06 0.06 0.06 0.06 0.05 0.04 0.03 0.03 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 99 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0.02 0.02 0.02 0.02 0 0 0 0 0 0 0.06 0.06 0.06 0 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.06 0.05 0.04 0.03 0 0 0 0 0 0 0 0 0 0 0 0 0 100 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0 0.02 0 0.03 0.03 0 0 0 0 0 0 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0 0.09 0.08 0.06 0 0 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 101 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0 0 0 0.03 0 0.03 0 0 0.05 0 0 0.06 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0.07 0 0 0 0 0 0 0.13 0.1 0.07 0.05 0.03 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 102 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0 0 0.02 0.02 0 0.04 0.04 0.05 0 0 0.06 0.06 0.07 0.08 0.08 0.08 0.08 0.07 0.07 0.07 0.07 0.07 0 0 0 0 0 0 0 0.42 0.35 0.3 0.22 0.17 0.12 0.08 0.05 0.04 0.03 0 0.01 0 0 0 0 0 0 0 0 0 103 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.02 0.03 0.04 0.05 0.06 0.06 0.07 0.07 0.08 0.08 0.09 0.09 0.09 0.09 0.09 0.09 0.08 0.08 0.07 0 0 0 0.05 0.06 0 0 0.53 0.64 0.53 0.4 0.3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 104 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0.01 0.02 0.02 0.03 0.03 0.04 0.06 0.06 0.07 0.07 0.08 0.08 0.09 0 0.08 0.08 0 0 0 0.09 0.08 0.08 0.07 0 0.06 0.07 0.08 0.08 0 0.73 1 0.73 0.53 0.4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 105 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.03 0.03 0.04 0.05 0.06 0.07 0 0.06 0.08 0.09 0 0 0.06 0.06 0.05 0.03 0 0 0.09 0.08 0.08 0 0.09 0.1 0.12 0.12 0 0.83 1 0.83 0.61 0.45 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 106 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.03 0 0.04 0.05 0.07 0.07 0 0.07 0.09 0 0 0.04 0.04 0.04 0.04 0.03 0 0 0.09 0.09 0.09 0 0.13 0.15 0.18 0.18 0 0 1 0.83 0.69 0.51 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 107 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0.02 0.03 0 0 0.06 0.07 0.07 0 0 0.09 0.09 0 0.03 0.03 0.03 0.03 0.03 0 0 0.09 0.11 0.13 0 0 0.22 0.26 0.26 0 0 1 0.83 0.69 0.58 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 108 | 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0.02 0.02 0.03 0 0 0.06 0.07 0.07 0 0 0.09 0.09 0 0 0.03 0.03 0.03 0.03 0.03 0 0.12 0.16 0.19 0 0 0.3 0.4 0.4 0.4 0 1 0.83 0.69 0.51 0.38 0 0 0 0 0 0 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 109 | 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0.02 0.02 0.03 0 0 0.06 0.07 0.07 0 0 0.09 0.09 0 0 0.03 0.03 0.03 0.03 0.03 0 0.16 0.21 0.28 0.33 0 0.4 0.53 0.61 0.61 0 1 0.83 0.61 0.45 0.33 0 0.13 0.09 0.06 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 110 | 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.03 0 0.06 0.07 0.07 0 0 0.09 0.09 0 0 0.03 0.03 0.03 0.03 0.03 0 0 0.28 0.37 0.5 0 0 0.73 0.83 0.83 0 1 0.73 0.53 0.4 0.3 0.22 0.15 0.1 0.07 0.05 0.03 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 111 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0 0 0.03 0.03 0 0.06 0.07 0.07 0.06 0 0.09 0.09 0 0 0.03 0.03 0.03 0.03 0.03 0 0 0.37 0.5 0.67 0.77 0.87 0.99 0.99 1 1 1 0.64 0.47 0.35 0.26 0.2 0.15 0.1 0 0.04 0 0 0.02 0.02 0.02 0.01 0.01 0.01 0 0 0 112 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0 0 0.03 0.03 0 0 0.07 0.07 0.06 0 0 0.09 0.08 0 0.03 0.03 0.03 0.03 0.03 0 0 0.42 0.56 0.77 0.87 0.99 0.99 1 1 1 0.77 0.56 0.42 0 0.2 0.15 0 0 0 0 0.04 0 0 0.02 0.02 0.01 0.01 0.01 0 0 0 113 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.03 0.03 0 0 0.07 0.07 0.06 0 0 0.09 0.08 0 0.03 0.03 0.03 0.03 0.03 0.02 0 0.47 0.64 1 1 1 1 1 1 0 0 0 0.31 0 0 0.12 0.1 0.09 0.07 0.05 0.04 0 0.03 0.02 0.02 0 0 0 0 0 0 114 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.03 0.03 0 0 0.07 0.07 0.06 0 0 0.09 0.08 0 0 0.03 0.03 0.03 0.03 0.02 0 0 0.73 1 0 0 0.83 0 0 0 0 0.26 0.23 0.18 0 0.1 0.1 0.1 0.08 0.06 0 0.04 0.03 0.03 0.02 0 0 0 0 0 0 115 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.03 0.04 0 0.07 0.07 0.06 0 0 0.09 0.08 0 0 0.03 0.03 0.03 0.03 0.02 0 0 0.83 1 0 0 0.53 0.45 0.3 0.26 0.26 0.23 0.21 0.16 0 0.1 0.1 0.1 0.08 0.06 0 0.04 0.04 0.03 0 0 0 0 0 0 0 116 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.04 0.05 0.07 0.07 0.07 0.06 0.06 0 0.09 0.08 0 0 0.03 0.03 0.03 0.03 0.03 0 0 0.83 1 0.83 0 0.35 0.3 0.25 0.22 0.2 0.18 0.16 0.12 0 0.1 0.1 0.1 0.08 0 0.06 0.05 0.04 0.03 0 0 0 0 0 0 0 117 | 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.03 0.04 0.05 0.06 0.07 0.07 0.07 0.08 0.08 0.09 0.09 0.08 0.07 0 0.03 0.03 0.04 0.04 0.04 0 0 0.83 1 0.83 0 0.23 0.2 0.17 0 0 0 0 0.09 0 0 0.1 0.1 0 0 0.06 0.06 0.04 0 0 0 0 0 0 0 0 118 | 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.04 0.05 0.06 0.08 0.08 0.09 0.09 0.09 0.09 0.09 0.09 0.09 0 0 0.05 0.06 0.06 0.06 0.05 0 0.83 1 0.83 0 0 0.14 0 0 0 0.05 0.05 0.07 0 0 0.1 0.1 0 0.06 0.06 0.06 0.04 0 0 0 0 0 0 0 0 119 | 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.03 0.03 0.04 0.06 0.08 0.1 0.11 0.12 0.14 0.14 0.14 0.14 0.14 0.14 0 0 0.07 0.08 0.08 0.08 0.07 0 0.83 1 0.83 0 0 0.09 0.08 0.05 0.05 0.05 0.05 0.06 0.07 0.07 0.09 0.1 0.1 0.08 0.06 0.05 0 0 0 0 0 0 0 0 0 120 | 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.03 0.03 0.04 0.06 0.08 0.1 0.13 0.14 0.16 0.18 0.2 0.2 0.2 0.2 0.18 0 0 0.1 0.12 0.12 0.12 0.1 0 0 1 0.83 0.69 0 0.06 0.05 0.05 0.04 0.04 0.04 0.05 0.05 0.06 0 0 0.1 0.08 0.06 0.04 0.03 0 0 0 0 0 0 0 0 121 | 0 0 0 0 0 0 0 0 0 0 0.02 0.02 0.03 0.04 0.06 0.08 0.1 0.13 0.17 0.19 0.21 0.24 0.27 0.3 0.3 0.27 0.24 0.21 0 0.15 0.18 0.18 0.18 0.15 0 0 1 0.83 0.69 0 0.04 0.04 0.03 0.03 0.03 0.03 0.04 0.04 0.04 0 0 0.1 0.08 0.06 0.04 0.03 0 0 0.01 0 0 0 0 0 122 | 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.04 0.06 0.08 0.1 0.13 0.17 0.22 0.25 0.28 0.32 0.36 0.41 0.41 0.36 0.32 0.28 0 0.22 0.26 0.26 0.26 0.22 0 0 1 0.83 0.69 0 0.03 0.03 0.02 0.02 0.02 0 0 0 0 0 0.1 0.09 0.07 0.05 0.04 0.03 0.02 0.01 0.01 0.01 0 0 0 0 123 | 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.05 0.07 0.1 0.13 0.17 0.22 0.3 0.33 0.38 0.43 0.48 0.48 0.48 0.48 0.43 0.38 0 0 0.4 0.4 0.4 0.33 0.25 0 1 0.83 0.69 0 0 0 0 0 0 0 0 0 0.1 0.1 0.09 0.08 0.06 0.05 0.04 0.03 0.02 0.01 0.01 0.01 0 0 0 0 124 | 0 0 0 0 0 0 0 0 0 0 0.03 0.04 0.05 0.08 0.12 0.17 0.22 0.3 0.4 0.45 0.51 0.58 0.58 0.58 0.58 0.58 0.58 0.51 0 0 0.53 0.61 0.61 0.45 0.3 0 1 0.83 0.61 0.45 0 0 0 0.1 0.1 0.1 0.1 0.1 0.1 0.09 0.08 0.07 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 125 | 0 0 0 0 0 0 0 0 0 0 0.03 0.04 0.06 0.09 0.14 0.2 0.3 0.4 0.53 0.61 0.69 0.69 0.69 0.69 0.69 0.69 0.69 0.69 0 0 0.73 0.83 0.83 0 0 0 1 0.73 0.53 0.4 0.3 0.2 0.14 0.1 0.1 0.1 0.1 0.1 0.09 0.08 0.07 0.06 0.05 0.04 0.03 0 0.01 0.01 0 0 0 0 0 0 126 | 0 0 0 0 0 0 0 0 0 0 0 0.05 0.07 0.11 0.16 0.23 0.35 0.53 0.73 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0 0.99 0.99 0.99 0.99 1 1 1 0.64 0.47 0.35 0.26 0.2 0.14 0.1 0.1 0.1 0.1 0.09 0.08 0.07 0.06 0.06 0.04 0.03 0 0 0.01 0.01 0 0 0 0 0 0 127 | 0 0 0 0 0 0 0 0 0 0 0 0.06 0.09 0.13 0.19 0.28 0.42 0.64 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0.77 0.56 0.42 0.31 0.23 0.18 0.14 0.1 0.1 0 0 0 0 0 0.05 0.04 0.03 0.03 0 0 0.01 0 0 0 0 0 0 0 128 | 0 0 0 0 0 0 0 0 0 0 0 0.05 0.07 0.11 0.16 0.23 0 0 0.73 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.83 0.73 0.64 0.56 0.42 0.31 0.23 0.18 0.14 0.1 0.08 0 0 0 0 0 0 0.03 0.03 0.03 0.02 0 0 0 0 0 0 0 0 0 0 129 | 0 0 0 0 0 0 0 0 0 0 0 0.04 0.06 0.09 0.14 0.18 0.26 0.4 0.53 0.61 0.69 0.69 0.69 0.69 0.69 0.69 0.69 0.69 0.69 0.69 0.69 0.69 0.69 0.61 0.53 0.47 0.42 0.31 0.23 0.18 0.14 0.1 0.08 0.06 0.05 0.03 0.02 0.02 0.01 0 0 0.02 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 130 | 0 0 0 0 0 0 0 0 0 0 0 0.04 0.05 0.08 0.1 0.15 0.22 0.3 0.4 0.45 0.51 0.58 0.58 0.58 0.58 0.58 0.58 0.58 0.58 0.58 0.58 0.58 0.51 0.45 0.4 0.35 0.31 0.23 0.18 0.14 0.1 0.08 0.06 0.05 0.04 0.03 0.02 0.01 0.01 0 0 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 131 | 0 0 0 0 0 0 0 0 0 0.01 0.02 0.03 0.05 0.06 0.09 0.13 0.17 0.22 0.3 0.33 0.38 0.43 0.48 0.48 0.48 0.48 0.48 0.48 0.48 0.48 0.48 0.43 0.38 0.33 0.3 0.26 0.23 0.18 0.14 0.1 0.08 0.06 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0 0 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 132 | 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.03 0.04 0.05 0.08 0.1 0.13 0 0.22 0.25 0.28 0.32 0.36 0.41 0.41 0.41 0.41 0.41 0.41 0.41 0.36 0.32 0.28 0.25 0.22 0.2 0.18 0.14 0.1 0.08 0.06 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 133 | 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.03 0 0 0 0 0 0.17 0.19 0.21 0.24 0.27 0.3 0.34 0.34 0.34 0.34 0.34 0.3 0.27 0.24 0.21 0.19 0.17 0.15 0.14 0.1 0.08 0.06 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 134 | 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0.02 0.03 0.03 0.05 0.07 0.1 0 0.14 0.16 0.18 0.2 0.23 0.26 0.29 0.29 0.29 0.26 0.23 0.2 0.18 0.16 0.14 0.13 0.12 0.1 0.08 0.06 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 135 | 0 0 0 0 0 0 0 0 0 0.01 0 0 0.02 0.02 0.03 0.04 0.06 0.07 0 0.11 0.12 0.14 0.15 0.17 0.19 0.22 0.24 0.22 0.19 0.17 0.15 0.14 0.12 0.11 0.1 0.09 0.08 0.06 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 136 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0.02 0.03 0.04 0.05 0 0.08 0.09 0.11 0.12 0.13 0.15 0.16 0.16 0.16 0.15 0.13 0.12 0.11 0.09 0.08 0.08 0.07 0.06 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 137 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.03 0.03 0 0 0.07 0.08 0.09 0.1 0.11 0.11 0.11 0.11 0.11 0.1 0.09 0.08 0.07 0.06 0.06 0.05 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 138 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.02 0 0 0.06 0.06 0.07 0.08 0.08 0.08 0.08 0.08 0.08 0.08 0.07 0.06 0.06 0.05 0.04 0.04 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 139 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0 0.04 0.05 0.05 0.05 0.05 0.05 0.05 0.05 0.05 0.05 0.05 0.05 0.04 0.04 0.03 0.03 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 140 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0.03 0 0 0 0.04 0.04 0.04 0.04 0.04 0.04 0.04 0.04 0.03 0.03 0.03 0.02 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 141 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0 0.02 0.02 0.02 0.02 0.03 0.03 0.03 0.03 0.03 0.03 0.03 0.03 0.03 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 142 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0 0 0.02 0 0 0 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 143 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 144 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 145 | 146 | 147 | -------------------------------------------------------------------------------- /maps/heatmap_2_f.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 57 5 | 46 6 | 38 7 | 11 8 | 64 9 | 64 10 | 11 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 12 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 13 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 14 | 0 0 0 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 15 | 0 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 16 | 1 0 0 1 1 0 0 0 1 1 0 0 0 0 0 1 1 1 1 0 0 0 1 0 0 0 0 1 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 1 1 0 0 0 0 0 17 | 1 0 0 1 1 0 0 0 1 1 0 0 0 1 1 1 1 1 1 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 18 | 1 0 0 0 1 0 0 0 1 1 0 0 0 1 1 1 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 19 | 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 20 | 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 1 1 1 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 21 | 1 1 0 0 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 22 | 1 1 0 0 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 23 | 0 1 0 0 0 1 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 1 0 1 1 1 24 | 0 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 1 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 0 25 | 0 1 1 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 26 | 0 1 1 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 27 | 0 1 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 0 0 0 1 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 28 | 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 1 0 0 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 29 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 1 1 1 1 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 1 1 30 | 0 0 0 0 0 0 0 1 1 1 0 1 0 0 0 0 0 1 1 0 1 0 1 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 1 1 0 31 | 0 0 0 0 0 1 1 1 1 1 0 0 0 0 1 1 1 1 1 0 1 0 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 32 | 0 0 0 0 0 1 0 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 1 0 0 33 | 0 1 1 1 0 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 1 1 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 1 1 0 0 0 34 | 0 0 0 1 0 0 0 0 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 35 | 1 1 0 1 0 0 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 0 36 | 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 37 | 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 1 0 0 0 0 1 1 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 38 | 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 1 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 39 | 0 0 1 1 1 1 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 40 | 1 1 1 1 1 1 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 41 | 0 0 0 0 0 1 0 1 0 0 0 0 0 1 1 0 0 1 0 0 0 0 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 42 | 0 0 0 0 0 1 1 1 0 0 0 0 0 1 1 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 0 1 1 0 0 0 0 0 0 0 0 43 | 0 0 0 0 0 1 0 0 0 0 0 0 0 1 1 0 0 1 1 0 0 0 1 1 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 1 1 0 1 1 0 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 44 | 1 1 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 1 1 0 1 1 1 1 0 0 0 1 0 0 0 0 0 1 0 0 0 0 1 1 0 0 0 0 45 | 1 1 0 1 0 0 0 0 0 1 0 1 1 1 1 1 0 0 1 0 0 0 1 1 0 0 1 1 0 0 0 0 0 1 1 0 0 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 46 | 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1 1 0 0 0 0 0 47 | 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 1 1 1 1 0 1 1 0 0 1 1 0 0 0 1 1 0 0 0 0 0 0 48 | 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 0 1 1 1 0 0 0 1 1 0 0 1 0 0 0 0 1 1 0 0 0 0 0 1 49 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 1 1 50 | 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 1 1 1 1 1 0 51 | 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 52 | 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 1 0 0 0 1 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 53 | 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 54 | 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 55 | 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 1 1 1 1 1 1 56 | 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 1 1 0 0 0 0 57 | 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 58 | 0 0 0 1 0 0 0 0 1 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 1 1 0 0 0 1 0 0 1 1 59 | 0 0 0 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 1 0 0 0 1 0 1 1 1 60 | 1 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 1 0 0 0 61 | 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 1 0 0 0 62 | 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 63 | 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 1 1 0 0 1 0 0 0 64 | 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 1 1 1 0 0 1 0 0 0 65 | 0 0 1 1 1 0 1 1 1 0 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 1 0 0 0 0 0 0 1 66 | 0 1 1 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 0 1 67 | 0 1 1 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 68 | 0 1 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 69 | 0 0 1 1 1 1 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 70 | 0 0 1 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 71 | 0 0 1 1 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 72 | 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 73 | 0 0 0 1 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 74 | 0 0 0 1 1 1 0 1 1 1 0 1 0 1 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 75 | 76 | 77 | 78 | 64 79 | 64 80 | 81 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 82 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 83 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.02 0.03 0.03 0.03 0.03 0.03 0.03 0.03 0 0.02 0.02 0 0 0 0 0 0 0 0 0 0 0 0 0 0 84 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0.02 0.02 0.03 0.03 0.04 0.04 0.04 0.04 0.04 0.04 0.04 0.03 0.03 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 85 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.02 0.03 0.03 0.04 0.05 0.05 0.06 0.06 0.06 0.06 0.06 0.05 0.05 0.03 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 86 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.04 0.05 0.06 0.07 0.08 0.09 0.09 0.09 0.09 0.09 0.08 0.06 0.05 0.03 0 0 0 0 0 0.01 0.01 0 0 0 0 0 0 0 87 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0.03 0.05 0.07 0.08 0.09 0.11 0.13 0.13 0.13 0.13 0.13 0.1 0.08 0.06 0 0 0.03 0.02 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 88 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0.04 0.06 0.08 0.12 0.14 0.16 0.19 0.19 0.19 0.19 0.17 0.13 0.1 0.08 0.06 0.05 0.03 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 89 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0 0 0.05 0.07 0.09 0.14 0.2 0.24 0.28 0.28 0.28 0.25 0.22 0.17 0.13 0.1 0.08 0.06 0.04 0.03 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0 0 0 0 90 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.04 0.06 0.08 0.12 0 0.35 0.42 0.42 0.37 0.33 0.29 0.22 0.17 0.13 0.1 0.07 0.05 0.03 0.03 0.03 0.02 0.02 0.02 0.01 0.01 0.01 0 0 0 91 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0.02 0.02 0.03 0.03 0 0 0 0 0 0.64 0.57 0.5 0.44 0.39 0.29 0.22 0.17 0.12 0.08 0.05 0.04 0.03 0.03 0.03 0.03 0.02 0.02 0.01 0.01 0 0 0 92 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.03 0.03 0 0 0 1 0.87 0.87 0.87 0.77 0.68 0.6 0.53 0.39 0.29 0.2 0.13 0.09 0.06 0.04 0.04 0.04 0.04 0.04 0.03 0 0 0.01 0 0 0 93 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0.01 0.02 0.02 0.03 0 0 0 0.48 0.73 1 1 0.87 0.87 0.87 0.77 0.68 0.6 0.44 0.29 0.2 0.13 0 0 0 0 0 0 0.04 0.03 0 0 0 0 0 0 94 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0 0 0.32 0.48 0.64 1 1 1 0.87 0 0.87 0.77 0.68 0 0 0 0 0 0 0.09 0.07 0.06 0.04 0.04 0.03 0.02 0 0 0 0 0 95 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.03 0.03 0.03 0 0 0.32 0.37 0.48 0 0 1 1 0 0.87 0.87 0.77 0 0.5 0.37 0.28 0.21 0.16 0.12 0.09 0.07 0.05 0.03 0.03 0.02 0.01 0.01 0.01 0 0 96 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.04 0.04 0.05 0.06 0 0.32 0.37 0.44 0.44 0 1 1 0 0 0.87 0.87 0.77 0.68 0.5 0.37 0.28 0.21 0.16 0.12 0.08 0.06 0.04 0.03 0.02 0.01 0.01 0.01 0 0 97 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0 0.06 0.07 0.08 0 0.32 0.33 0.44 0.44 0 0.77 1 1 0 0.87 0.87 0.87 0.77 0.57 0.42 0.32 0.24 0.18 0.12 0.08 0.06 0.04 0.03 0.02 0.01 0.01 0 0 0 98 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0 0.08 0.11 0.12 0 0.32 0.32 0.39 0.44 0 0.68 0.77 1 1 1 1 1 0.87 0.64 0.48 0.35 0.27 0.18 0.12 0.08 0.06 0 0 0 0 0 0 0 0 99 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.11 0.14 0.18 0 0.32 0.32 0.35 0.39 0.44 0.6 0.68 0.77 0.87 0.99 1 1 1 0.73 0.54 0.4 0 0 0 0 0 0 0.02 0.01 0.01 0.01 0 0 0 100 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0 0 0 0 0 0 0.14 0.18 0.24 0.32 0.32 0.32 0.32 0.29 0.33 0.44 0.5 0.57 0.64 0.73 0.83 0 1 0.73 0.61 0 0 0.15 0.11 0.07 0.05 0.03 0.02 0.02 0.01 0.01 0 0 0 101 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0.04 0.06 0.09 0.12 0.15 0.2 0.27 0.32 0.32 0.32 0.28 0.22 0.25 0.33 0 0 0 0 0 0 1 0.83 0.54 0.4 0.27 0.18 0.12 0.08 0.06 0.04 0.03 0.02 0.01 0.01 0 0 0 102 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0.03 0.04 0.06 0.09 0.13 0.17 0.23 0.27 0.32 0.32 0 0 0 0 0 0 0 0.4 0.54 0.73 1 1 0.73 0.54 0.35 0.27 0.18 0 0.07 0.05 0.03 0.02 0.02 0.01 0 0 0 0 103 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.03 0.04 0.06 0.09 0.13 0.19 0.23 0.27 0.32 0 0 0 0.02 0.02 0 0 0.34 0.45 0.61 0.83 1 0 0 0 0 0 0 0 0.05 0.04 0.03 0.02 0.01 0 0 0.01 0.01 0.01 104 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0 0.04 0.05 0 0 0 0.23 0.27 0.32 0.32 0 0.02 0.02 0.03 0.03 0 0.38 0.51 0.69 0.83 1 0 0 0.01 0.01 0.01 0.02 0.03 0.03 0.03 0.03 0.02 0.01 0 0.02 0.01 0.01 0.01 105 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0 0 0.03 0.04 0.03 0.02 0 0 0.27 0.32 0.32 0 0.03 0.03 0.04 0.04 0 0.43 0.58 0.69 0.83 1 0 0 0 0.01 0 0 0.02 0.02 0.02 0.02 0.01 0 0 0.02 0.02 0.01 0.01 106 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.02 0.03 0.02 0.02 0 0 0.27 0.32 0.32 0 0.04 0.05 0.06 0.06 0 0 0.58 0.69 0.83 1 0 0 0 0 0.01 0.01 0.01 0.02 0.01 0 0 0 0.04 0.03 0.02 0.02 0.01 107 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.01 0.01 0 0 0.27 0.32 0.32 0 0 0.07 0.08 0.08 0 0 0.58 0.69 0.83 1 0 0 0 0 0.01 0 0 0 0 0 0 0.07 0.05 0.04 0.03 0.02 0.02 108 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0.27 0.32 0.32 0 0 0.09 0.12 0.12 0.12 0 0.58 0.69 0.83 1 0.99 0 0 0 0 0 0 0.19 0.17 0.15 0.12 0.09 0.07 0.05 0.04 0.03 0.02 109 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0.24 0.32 0.32 0.32 0 0.12 0.16 0.18 0.16 0 0.51 0.69 0.83 1 0.99 0 0.57 0.57 0.5 0.44 0.33 0.25 0.22 0.2 0.15 0.12 0.09 0.07 0.05 0 0 110 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0.28 0.32 0.32 0 0 0.21 0.21 0.21 0 0.45 0.61 0.83 1 0.99 0.99 0.73 0.68 0.68 0.6 0.44 0.33 0.29 0.26 0.2 0.15 0.12 0.09 0 0 0 111 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0.25 0.28 0.32 0.32 0.28 0.25 0.25 0.25 0.3 0.4 0.54 0.73 1 1 0.99 0.83 0.57 0 0.68 0 0 0.33 0.29 0.22 0.17 0.13 0.09 0.06 0.04 0.03 112 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0.22 0.25 0.28 0.32 0.32 0.28 0.25 0.25 0.27 0.35 0.48 0.57 0 1 1 0 0 0 0 0.68 0 0 0.33 0.25 0.19 0.13 0.09 0.06 0.04 0.03 113 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0 0.2 0.22 0.25 0.28 0.32 0.32 0.24 0.21 0 0 0 0.57 0 0 1 0.99 0.87 0.77 0.68 0.68 0 0.37 0.33 0.28 0 0 0.08 0.05 0.04 0.03 114 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0.17 0.19 0 0 0.32 0 0 0 0 0.48 0.57 0.57 0 1 0.99 0.99 0.87 0.77 0 0.68 0.5 0.37 0.28 0 0 0.05 0.05 0.03 0.02 115 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0.13 0.13 0 0 0.28 0.32 0.32 0.32 0.35 0.48 0.57 0.57 0 1 0.99 0.99 0.87 0.87 0 0.68 0.57 0.42 0 0 0.03 0.04 0.03 0.03 0.02 116 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0.1 0.1 0.1 0 0.25 0.28 0.32 0.32 0.32 0.42 0.57 0.57 0 1 1 0.99 0.83 0 0.87 0.77 0.57 0.48 0 0 0.02 0.03 0.02 0.02 0.02 117 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0.1 0.1 0.1 0 0.25 0.25 0.24 0 0 0 0 0.57 0 0 1 0.99 0 0 0.87 0.87 0.64 0 0 0.01 0.01 0.02 0.01 0.01 0.01 118 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0.1 0.1 0.1 0 0 0.25 0 0 0 0.28 0.42 0.57 0 0 1 1 0 0.73 0.87 0.87 0.73 0 0 0.01 0.01 0.01 0.01 0.01 0 119 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0.1 0.1 0.1 0 0 0.22 0.25 0.25 0.25 0.28 0.37 0.5 0.57 0.64 1 1 1 0.87 0.87 0.87 0 0 0 0 0.01 0 0 0 0 120 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0.1 0.1 0.1 0 0.17 0.19 0.21 0.21 0.21 0.28 0.37 0.42 0.48 0 0 1 1 0.87 0.87 0.87 0 0 0 0 0 0 0 0.07 121 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0.1 0.1 0.1 0 0.13 0.14 0.16 0.18 0.18 0.21 0.28 0.32 0.32 0 0 1 1 1 0.87 0.87 0 0 0.42 0.32 0.24 0.18 0.12 0.08 122 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0.09 0.1 0.1 0 0.1 0.11 0.12 0.14 0.15 0 0 0 0 0 0.57 0.77 1 1 1 0.87 0.87 0.77 0.57 0.42 0.32 0.21 0.14 0.1 123 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0.08 0.09 0.1 0 0 0 0 0 0 0 0 0 0.25 0.37 0.5 0.68 0.77 1 1 1 0.87 0.87 0.64 0.48 0.32 0.21 0.14 0.1 124 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0.07 0.08 0.09 0.1 0 0 0 0.1 0.1 0.1 0.12 0.17 0.25 0.33 0.44 0.6 0.68 0.77 1 1 1 0.87 0.73 0.48 0.32 0.21 0 0 125 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0 0.06 0.07 0.08 0.09 0.1 0.1 0.1 0.1 0.1 0.1 0.1 0.14 0.19 0.25 0.33 0.44 0.5 0.57 0.64 0 1 0.99 0 0 0 0 0 0 126 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.03 0.04 0.05 0.05 0.06 0.07 0.07 0.08 0.08 0.08 0.08 0.08 0.08 0.11 0.14 0.19 0.25 0.33 0.37 0.42 0 0 1 1 0 0 0.27 0.18 0.12 0.08 127 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.03 0.04 0.04 0.05 0.05 0.06 0.06 0.07 0.07 0.07 0 0 0 0 0 0.19 0.25 0.28 0.28 0 0 1 1 0.73 0.48 0.32 0.21 0.14 0.1 128 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.03 0.03 0.04 0.04 0.04 0.05 0.05 0.06 0 0 0 0 0 0 0.17 0.22 0.25 0.28 0 0 0.64 0.64 0.54 0 0.27 0.18 0 0 129 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.02 0.03 0.03 0.03 0.04 0.04 0.04 0.04 0.03 0.02 0.01 0.01 0 0 0.2 0.22 0.25 0.28 0 0.42 0.42 0.35 0 0.18 0 0 0 130 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.02 0.02 0.03 0.03 0.03 0.03 0.03 0.02 0.01 0.01 0.01 0 0 0.18 0.2 0.22 0.25 0.28 0.28 0.28 0.24 0 0 0.11 0.07 0.05 131 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0.13 0.15 0.17 0.19 0.19 0.19 0.19 0.16 0 0 0.07 0.06 0.04 132 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0.1 0.12 0.13 0.13 0.13 0.13 0.13 0.11 0.09 0.07 0.05 0.04 0.04 133 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0.08 0.09 0.09 0.09 0.09 0 0 0.08 0.07 0 0.04 0.03 0.03 134 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0.06 0.06 0.06 0 0 0 0.05 0.05 0 0.03 0.02 0.02 135 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0.04 0.04 0.04 0.04 0 0.03 0.04 0.03 0.03 0.02 0.02 0 136 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0.03 0.03 0.03 0.03 0.03 0.02 0.03 0.02 0 0 0.01 0 137 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0.01 138 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 139 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 140 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 141 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 142 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 143 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 144 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 145 | 146 | 147 | -------------------------------------------------------------------------------- /maps/heatmap_4_f.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 13 5 | 7 6 | 29 7 | 31 8 | 64 9 | 64 10 | 11 | 1 0 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 12 | 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 13 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 14 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 15 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 16 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 17 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 19 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 20 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 21 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 22 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 23 | 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 24 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 25 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 26 | 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 27 | 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 28 | 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 29 | 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 1 1 30 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 31 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 32 | 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 0 1 1 1 1 1 33 | 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 34 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 1 1 1 1 1 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 35 | 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 36 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 37 | 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 38 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 39 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 40 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 41 | 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 42 | 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 43 | 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 44 | 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 45 | 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 46 | 0 0 0 0 1 1 1 1 1 1 1 1 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 47 | 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 1 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 48 | 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 49 | 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 50 | 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 51 | 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 1 1 1 1 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 52 | 0 0 0 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 53 | 0 0 1 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 54 | 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 55 | 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 56 | 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 57 | 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 1 1 1 0 0 0 58 | 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 59 | 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 60 | 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 1 1 1 0 0 61 | 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 62 | 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 63 | 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 64 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 65 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 66 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 67 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 68 | 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 69 | 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 70 | 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 71 | 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 72 | 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 73 | 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 74 | 1 1 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 1 1 75 | 76 | 77 | 78 | 64 79 | 64 80 | 81 | 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.02 0.02 0.03 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 82 | 0 0 0 0 0 0.01 0.01 0.02 0.02 0.02 0.03 0.04 0.04 0.05 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 83 | 0 0 0 0 0 0.01 0.01 0.02 0.03 0.04 0.05 0.06 0.07 0.08 0.08 0.08 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 84 | 0 0 0 0 0 0.01 0.01 0.02 0.04 0.06 0.07 0.09 0.11 0.13 0.13 0.13 0.11 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 85 | 0 0 0 0 0.01 0.01 0.02 0.03 0.05 0.07 0.12 0.14 0.17 0.21 0.21 0.21 0.17 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 86 | 0 0 0 0 0.01 0.01 0.02 0.04 0.06 0.09 0.14 0.23 0.28 0.34 0.34 0.34 0.24 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 87 | 0 0 0 0.01 0.01 0.02 0.03 0.04 0.07 0.11 0.17 0.28 0.46 0.58 0.58 0.5 0 0 0 0 0 0 0.01 0.01 0.02 0.03 0.03 0.04 0.03 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 88 | 0 0 0 0.01 0.01 0.02 0.03 0.05 0.08 0.13 0.21 0.34 0.58 1 1 0.72 0 0 0 0 0 0 0.01 0.02 0.03 0.04 0.05 0.06 0.05 0.04 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 89 | 0 0 0 0.01 0.01 0.02 0.03 0.05 0.08 0.13 0.21 0.34 0.58 1 1 1 0 0 0 0 0 0 0 0 0 0 0.08 0.1 0.08 0.07 0.05 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 90 | 0 0 0 0.01 0.01 0.02 0.03 0.05 0.08 0.13 0.21 0.34 0.5 0.72 1 1 0 0 0 0 0 0 0 0 0 0 0 0.16 0.13 0.09 0.07 0.05 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 91 | 0 0 0 0.01 0.01 0.02 0.03 0.05 0.08 0.13 0.21 0.3 0.43 0.62 0.72 1 1 1 0.99 0.99 0.99 0 0 0 0 0 0 0.26 0.18 0.13 0.09 0.06 0.04 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 92 | 0 0 0 0.01 0.01 0.02 0.03 0.05 0.08 0.13 0.18 0.26 0.37 0.53 0.62 0.72 1 1 1 0.99 0.99 0.99 0 0 0 0 0.43 0.37 0.26 0.18 0 0 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 93 | 0 0 0 0 0.01 0.02 0.03 0.04 0.07 0.09 0 0 0 0 0.53 0.62 0.72 1 1 1 0.99 0.99 0.99 0.99 0.85 0.72 0.62 0.53 0.37 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 94 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.62 0.72 1 1 1 0.99 0.99 0.99 0.99 0.85 0.72 0.62 0.43 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 95 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.53 0.62 0.72 1 1 1 0.99 0.99 0.99 0.99 0.85 0.72 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 96 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.45 0.53 0.62 0.72 1 1 1 1 1 1 1 0.85 0.72 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 97 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.32 0.37 0.43 0.5 0.58 0.68 0.79 0.79 0 0 1 1 1 0.72 0.62 0.53 0.45 0.32 0.22 0 0 0 0 0 0 0 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 98 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.26 0.3 0.34 0.4 0.46 0.54 0 0 0 0 0 1 1 0.72 0.62 0.53 0.37 0.26 0.18 0.13 0.09 0.07 0.05 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 99 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.21 0.24 0.28 0.32 0.32 0 0 0 0 0.68 1 1 0.85 0.72 0.62 0.43 0.3 0.21 0.15 0.11 0.08 0.06 0.04 0.03 0.02 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 100 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.2 0.2 0 0 0 0 0 0 0.85 1 1 0.85 0.72 0.5 0.34 0.24 0.17 0.12 0.09 0.06 0.04 0.03 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 101 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.5 0.72 1 1 1 0.85 0.58 0.4 0.28 0.2 0.14 0.1 0.06 0.04 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 102 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.62 0.72 1 1 1 0.68 0.46 0.32 0.23 0.16 0.1 0.06 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 103 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0.79 0.54 0.38 0.26 0.16 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 104 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.06 0.09 0.12 0 0 0 0 0 1 0.79 0.63 0.44 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 105 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.05 0.06 0.09 0.12 0.17 0.24 0 0 0 0 1 0.79 0.63 0.51 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 106 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.04 0.06 0.09 0.12 0.17 0.24 0.34 0.5 0.72 0.85 1 1 0.79 0.63 0.51 0.35 0.21 0.13 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 107 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.04 0.06 0.09 0.14 0.2 0.28 0.4 0.58 0.85 1 1 1 0.79 0.63 0.44 0.3 0.21 0.13 0.08 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 108 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.04 0.06 0.09 0.14 0.23 0.32 0.46 0.68 1 1 1 0.99 0.79 0.54 0.38 0.26 0.19 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 109 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.04 0.06 0.09 0.14 0.23 0.38 0.54 0.79 1 1 0.99 0.99 0.68 0.46 0.32 0.23 0 0 0 0 0 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 110 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.03 0 0 0 0 0.38 0.63 0.79 1 0.99 0.99 0.85 0.58 0.4 0.28 0.2 0.14 0 0 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 111 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0 0 0 0.23 0.38 0.54 0.79 1 0.99 0.85 0.72 0.5 0.34 0.24 0.17 0.12 0.09 0.06 0.04 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 112 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.23 0.32 0.46 0.68 1 0.85 0.72 0.62 0.43 0.3 0.21 0.15 0.11 0.08 0.06 0.04 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 113 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.23 0.32 0.46 0.58 0.58 0.5 0.43 0.3 0.21 0.15 0.11 0.08 0.06 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 114 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.07 0.12 0.16 0.23 0.28 0.34 0.34 0.34 0.3 0.21 0.15 0.11 0.08 0.06 0.04 0.03 0 0 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 115 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.06 0.08 0.12 0.14 0.17 0.21 0.21 0.21 0.21 0.15 0.11 0.08 0.06 0.04 0.03 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 116 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.03 0.04 0.06 0.07 0.09 0.11 0.13 0.13 0.13 0.13 0.11 0.08 0.06 0.04 0.03 0.02 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 117 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.03 0.04 0.05 0.06 0.07 0.08 0.08 0.08 0.08 0.07 0.06 0.04 0.03 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 118 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.02 0.02 0.02 0.03 0.04 0.04 0.05 0.05 0.05 0 0.04 0.04 0.03 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 119 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 120 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 121 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 122 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 123 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 124 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 125 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 126 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 128 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 129 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 130 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 131 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 132 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 133 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 134 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 135 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 136 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 137 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 138 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 139 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 140 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 141 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 142 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 143 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 144 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 145 | 146 | 147 | -------------------------------------------------------------------------------- /maps/heatmap_5_f.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 53 5 | 50 6 | 12 7 | 6 8 | 64 9 | 64 10 | 11 | 1 0 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 12 | 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 13 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 0 14 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 15 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 16 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 17 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 19 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 20 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 21 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 22 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 23 | 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 24 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 25 | 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 26 | 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 27 | 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 28 | 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 29 | 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 1 1 30 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 31 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 32 | 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 0 1 1 1 1 1 33 | 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 34 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 1 1 1 1 1 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 35 | 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 36 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 37 | 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 38 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 39 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 40 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 41 | 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 42 | 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 43 | 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 44 | 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 45 | 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 46 | 0 0 0 0 1 1 1 1 1 1 1 1 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 47 | 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 1 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 48 | 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 49 | 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 50 | 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 51 | 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 1 1 1 1 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 52 | 0 0 0 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 53 | 0 0 1 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 54 | 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 55 | 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 56 | 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 57 | 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 1 1 1 0 0 0 58 | 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 59 | 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 60 | 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 1 1 1 0 0 61 | 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 1 1 1 1 1 1 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 62 | 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 63 | 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 64 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 65 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 66 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 67 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 68 | 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 69 | 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 70 | 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 71 | 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 72 | 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 73 | 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 74 | 1 1 1 1 0 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 1 1 1 75 | 76 | 77 | 78 | 64 79 | 64 80 | 81 | 0 0.03 0 0 0.06 0.07 0.1 0.11 0.12 0.13 0.15 0.16 0.18 0 0 0 0 0 0 0 0.02 0.02 0.02 0.02 0.02 0.03 0.03 0.03 0.03 0.03 0.02 0.02 0.02 0 0 0 0 0 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 82 | 0 0 0.04 0.05 0.06 0.08 0.11 0.14 0.15 0.17 0.19 0.21 0.24 0.24 0 0 0 0 0 0 0.02 0.02 0.03 0.03 0.03 0.04 0.04 0.04 0.04 0.04 0.03 0.03 0.03 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 83 | 0.03 0.03 0.04 0.06 0.07 0.09 0.12 0.15 0.2 0.22 0.25 0.28 0.31 0.31 0.31 0.31 0 0 0 0 0 0.03 0.03 0.04 0.04 0.05 0.05 0.06 0.05 0.05 0.04 0.04 0.03 0.03 0.03 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 84 | 0.03 0.04 0.05 0.06 0.08 0.1 0.13 0.17 0.22 0.29 0.33 0.37 0.41 0.41 0.41 0.41 0.34 0 0 0 0 0 0.04 0.05 0.05 0.06 0.07 0.07 0.07 0.06 0.05 0.05 0.04 0.04 0.03 0.03 0.02 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 85 | 0.03 0.04 0.05 0.07 0.09 0.11 0.15 0.19 0.25 0.33 0.43 0.49 0.55 0.55 0.55 0.5 0.41 0 0 0 0 0 0.06 0.06 0.07 0.08 0.08 0.09 0.08 0.08 0.07 0.06 0.06 0.05 0.04 0.03 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 86 | 0.04 0.05 0.06 0.08 0.1 0.13 0.16 0.21 0.28 0.37 0.49 0.65 0.74 0.74 0.67 0.62 0.5 0 0 0 0 0.05 0.06 0.08 0.09 0.1 0.11 0.12 0.11 0.1 0.09 0.08 0.07 0.06 0.05 0.04 0.03 0.03 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 87 | 0.04 0.05 0.07 0.08 0.11 0.14 0.18 0.24 0.31 0.41 0.55 0.74 1 1 0.83 0.76 0 0 0 0 0 0 0.07 0.09 0.11 0.13 0.14 0.16 0.14 0.13 0.11 0.1 0.08 0.07 0.06 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 88 | 0.04 0.05 0.07 0.08 0.11 0.14 0.18 0.24 0.31 0.41 0.55 0.74 1 1 1 0.83 0 0 0 0 0 0 0.08 0.1 0.13 0.16 0.18 0.2 0.18 0.16 0.15 0.12 0.1 0.08 0.07 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 89 | 0.04 0.05 0.07 0.08 0.11 0.14 0.18 0.24 0.31 0.41 0.55 0.67 0.83 1 1 1 0 0 0 0 0 0 0 0 0 0 0.24 0.27 0.24 0.21 0.18 0.15 0.12 0.09 0.07 0.06 0.04 0.03 0.03 0.02 0.02 0.01 0.01 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 90 | 0.04 0.05 0.07 0.08 0.11 0.14 0.18 0.24 0.31 0.41 0.5 0.62 0.76 0.83 1 1 0 0 0 0 0 0 0 0 0 0 0 0.35 0.31 0.26 0.21 0.18 0.14 0.1 0.08 0.06 0.05 0.04 0.03 0.02 0.02 0.01 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 91 | 0.04 0.05 0.07 0.08 0.11 0.14 0.18 0.24 0.31 0.38 0.46 0.57 0.7 0.76 0.83 1 1 1 1 1 1 0 0 0 0 0 0 0.46 0.38 0.31 0.26 0.2 0.15 0.12 0.09 0.07 0.05 0.04 0.03 0.03 0.02 0 0 0 0 0 0 0 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 92 | 0.04 0.05 0.07 0.08 0.11 0.14 0.18 0.24 0.29 0.35 0.43 0.52 0.64 0.7 0.76 0.83 0.91 0.99 0.99 0.99 1 1 0 0 0 0 0.62 0.57 0.46 0.38 0 0 0.14 0.1 0.08 0.06 0.05 0.04 0.03 0.02 0 0 0 0 0 0 0.02 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 93 | 0 0 0 0 0.1 0.13 0.16 0.2 0.24 0.29 0 0 0 0 0.7 0.76 0.83 0.91 0.99 0.99 0.99 1 1 1 1 0.83 0.76 0.7 0.57 0 0 0 0 0 0 0 0.04 0.03 0 0 0 0 0.02 0.03 0.03 0.03 0.03 0.03 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 94 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.76 0.83 0.91 0.99 0.99 0.99 0.99 1 1 1 0.83 0.76 0.62 0 0 0 0 0 0 0 0 0 0 0 0 0 0.03 0.03 0.04 0.04 0.04 0.03 0.03 0.03 0 0 0 0 0 0 0 0 0 0 0 0 0 0 95 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.7 0.76 0.83 0.91 0.99 0.99 0.99 0.99 1 1 1 0.83 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.03 0.04 0.05 0.05 0.04 0.04 0.04 0.03 0 0 0 0 0 0 0 0 0 0 0 0 0 0 96 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.64 0.7 0.76 0.83 0.91 0.99 0.99 0.99 0.99 1 1 1 0.83 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.06 0.06 0.05 0.05 0.05 0.04 0.03 0.03 0 0 0 0 0 0 0 0 0 0 0 0 97 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.52 0.57 0.62 0.67 0.74 0.8 0.88 0.88 0 0 1 1 1 0.83 0.76 0.7 0.64 0.59 0.54 0 0 0 0 0 0 0 0.12 0.09 0.07 0.06 0.06 0.06 0.05 0.05 0.04 0.03 0.03 0 0 0 0 0 0 0 0 0 0 0 98 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.46 0.5 0.55 0.6 0.65 0.71 0 0 0 0 0 1 0.91 0.83 0.76 0.7 0.64 0.59 0.54 0.44 0.36 0.3 0.25 0.2 0.17 0.13 0.1 0.08 0.07 0.07 0.07 0.07 0.05 0.05 0.04 0.03 0.03 0 0 0 0 0 0 0 0 0 0 99 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.41 0.45 0.49 0.53 0.53 0 0 0 0 0.8 1 1 0.91 0.83 0.76 0.7 0.64 0.59 0.48 0.39 0.32 0.27 0.22 0.17 0.13 0.1 0 0 0 0 0.07 0.06 0.05 0.04 0.03 0.03 0.02 0.02 0 0 0 0 0 0 0 0 100 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.4 0.4 0 0 0 0 0 0 1 1 1 0.91 0.83 0.76 0.7 0.64 0.52 0.43 0.35 0.29 0.22 0.17 0 0 0 0 0 0.07 0.07 0.06 0.05 0.04 0.04 0.03 0.02 0.02 0 0 0 0 0 0 0 0 101 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.67 0.83 1 1 1 0.91 0.83 0.76 0.7 0.57 0.46 0.38 0.29 0.22 0 0 0 0 0 0.07 0.07 0.07 0.06 0.06 0.05 0.04 0.03 0.02 0.02 0 0 0 0 0 0 0 0 102 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.76 0.83 1 1 1 0.91 0.83 0.76 0.62 0.5 0.38 0.29 0 0 0 0 0 0.07 0.07 0.07 0.07 0.06 0.06 0.05 0 0 0 0 0 0 0 0 0 0 0 0 103 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0.99 0.91 0.83 0.67 0.5 0 0 0 0 0 0 0.06 0.07 0.07 0.07 0.07 0.06 0 0 0 0 0 0 0 0 0 0 0 0 0 0 104 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.15 0.18 0.22 0 0 0 0 0 1 0.99 0.99 0.91 0 0 0 0 0 0 0 0 0.06 0.07 0.07 0.07 0.07 0.06 0 0 0 0 0 0 0 0 0 0 0 0 0 0 105 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.11 0.14 0.18 0.22 0.26 0.32 0 0 0 0 1 0.99 0.99 0.99 0 0 0 0 0 0 0 0 0 0.07 0.07 0.07 0.07 0 0 0 0 0.01 0.01 0 0 0 0 0 0 0 0 0 106 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.07 0.09 0.12 0.15 0.2 0.26 0.32 0.38 0.47 0.58 0.71 0.88 1 0.99 0.99 0.99 0.99 0.8 0.65 0 0 0 0 0 0.11 0.1 0.08 0.07 0.07 0 0 0 0 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 107 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0 0 0 0 0.04 0.06 0.07 0.09 0.12 0.15 0.2 0.26 0.34 0.42 0.51 0.63 0.77 0.88 1 0.99 0.99 0.99 0.99 0.88 0.71 0.53 0 0 0 0 0.14 0.11 0.1 0.07 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 108 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.03 0.04 0.06 0.07 0.09 0.12 0.15 0.2 0.26 0.34 0.45 0.56 0.68 0.77 0.88 1 0.99 0.99 0.99 0.99 0.88 0 0 0 0 0.22 0.2 0.17 0.14 0.11 0 0 0 0 0 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 109 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.03 0.04 0.06 0.07 0.09 0.12 0.15 0.2 0.26 0.34 0.45 0.61 0.68 0.77 0.88 1 0.99 0.99 0.99 0.99 0 0 0 0 0 0.29 0.24 0.2 0.15 0.12 0 0 0 0 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 110 | 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.02 0.02 0.03 0.04 0.05 0.06 0.08 0 0 0 0 0.34 0.45 0.56 0.68 0.77 0.88 1 0.99 0.99 0.99 0.99 0.99 0 0 0.53 0.43 0.35 0.29 0.22 0.17 0.13 0.1 0 0 0 0 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 111 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.03 0.04 0 0 0 0 0 0.26 0.34 0.42 0.51 0.63 0.77 0.88 1 0.99 0.99 0.99 0.99 0.99 0.99 0.8 0.65 0.53 0.43 0.33 0.25 0.19 0.15 0.11 0.09 0.07 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 112 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.03 0.03 0 0 0 0 0.26 0.32 0.38 0.47 0.58 0.71 0.88 1 0.99 0.99 0.99 0.99 0.99 0.99 0.88 0.71 0.58 0.43 0.33 0.25 0.19 0.15 0.11 0.09 0.07 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 113 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.02 0.03 0 0 0 0 0 0 0.29 0.35 0.43 0.53 0.65 0.8 1 1 0.99 0.99 0.99 0.99 0.99 0.88 0.77 0.58 0.43 0.33 0.25 0.19 0.15 0.11 0.09 0.07 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 114 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.02 0 0 0 0 0.17 0.22 0.27 0.33 0.4 0.49 0.6 0.74 1 1 1 0.99 0.99 0.99 0.99 0.88 0 0 0.38 0.29 0.22 0.17 0.13 0.1 0.08 0.06 0.05 0.04 0 0 0 0.01 0.01 0.01 0 0 0 0 0 0 115 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0 0 0 0 0.17 0.21 0.25 0.3 0.37 0.45 0.55 0.67 0.83 1 1 1 0.99 0.99 0.99 0 0 0 0 0 0.2 0.15 0.12 0.09 0.07 0.06 0 0 0 0 0 0 0.01 0.01 0 0 0 0 0 0 116 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0 0 0 0 0 0 0.1 0.13 0.16 0.19 0.23 0.28 0.34 0.41 0.5 0.62 0.76 0.83 1 1 1 0.99 0.99 0.99 0 0 0 0 0 0.14 0.11 0.08 0.06 0.05 0 0 0 0 0 0 0.01 0.01 0 0 0 0 0 0 117 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0 0 0 0 0.06 0.08 0.1 0.12 0.15 0.18 0.21 0.26 0.31 0.38 0.46 0.57 0.7 0.76 0.83 1 1 1 0.99 0.99 0.99 0 0 0 0 0 0.1 0.07 0 0.04 0 0 0 0 0 0 0 0 0 0 0 0 0 0 118 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.03 0.04 0.06 0.07 0.08 0.1 0.12 0.15 0.18 0.21 0.26 0.31 0.38 0.46 0 0.7 0.76 0.83 1 1 1 0.99 0.99 0.99 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 119 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.02 0.02 0.03 0.04 0.05 0.06 0.07 0.08 0.1 0.12 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0.99 0.99 0.99 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 120 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.02 0.03 0.03 0.04 0.05 0.06 0.07 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0.99 0.99 0.99 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 121 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0.02 0.03 0.03 0.04 0.05 0.06 0 0 0 0 0.01 0.01 0 0 0 0 0 0 0 0 0 0 1 1 0.99 0.99 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 122 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.02 0.03 0.03 0.04 0.04 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 1 1 1 0.99 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 123 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.02 0.02 0.03 0.03 0.03 0.03 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0.5 0.67 0.83 1 1 1 0.99 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 124 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.02 0.02 0.02 0.03 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0.38 0.5 0.62 0.76 0.83 1 1 1 0.99 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 125 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.02 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0.38 0.46 0.57 0.7 0.76 0.83 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 126 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.02 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0.35 0.43 0.52 0.64 0.7 0.76 0.83 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0.27 0.32 0.39 0.48 0.59 0.64 0.7 0.76 0.83 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 128 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0 0 0 0 0 0 0.2 0.25 0.3 0.36 0.44 0.54 0.59 0.64 0.7 0.76 0.83 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 129 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.19 0.23 0.27 0.33 0.41 0.5 0.54 0.59 0.64 0.7 0.76 0.83 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 130 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.16 0.19 0.23 0.27 0.33 0.41 0.44 0 0 0.64 0.7 0.76 0.83 1 1 0 0 0 0 0 0 0.37 0.28 0 0 0 0 0.08 0.06 131 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.16 0.19 0.23 0.27 0 0 0 0 0 0 0.7 0.76 0.83 1 1 1 1 1 0.74 0.55 0.41 0.31 0.24 0.18 0.14 0.11 0.08 0.07 132 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.57 0.62 0.67 0.74 0.8 0.88 0.88 0.8 0.65 0.49 0.37 0.28 0.21 0.16 0.13 0.1 0.08 0.06 133 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.55 0.6 0.65 0.71 0.71 0.65 0.53 0.43 0.33 0.25 0.19 0.15 0.11 0.09 0.07 0.05 134 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.45 0.49 0.53 0.53 0.53 0.53 0.43 0.35 0.29 0.22 0.17 0.13 0.1 0.08 0.06 0.05 135 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.37 0.4 0.4 0.4 0.4 0.4 0.35 0.29 0.24 0.2 0.15 0.12 0.09 0.07 0.06 0.04 136 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.19 0.25 0.3 0.3 0.3 0.3 0.3 0.3 0.27 0.24 0.2 0.17 0.14 0.11 0.08 0.06 0.05 0.04 137 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.17 0.21 0.23 0.23 0.23 0.23 0.23 0.23 0.21 0.18 0.17 0.14 0.11 0.1 0.07 0.06 0.04 0.03 138 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.14 0.16 0.18 0.18 0.18 0.18 0.18 0.18 0.16 0.14 0.13 0.11 0.1 0.08 0.07 0.05 0.04 0.03 139 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.12 0.14 0.14 0.14 0.14 0.14 0.14 0.12 0.11 0.1 0.09 0.08 0.07 0.06 0.05 0.04 0.03 140 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.1 0.1 0.1 0.1 0.1 0.1 0.09 0.08 0.08 0.07 0.06 0.06 0.05 0.04 0.03 0.03 141 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.08 0.08 0.08 0.08 0.08 0.08 0.07 0.07 0.06 0.05 0.05 0.04 0.04 0.03 0.03 0.02 142 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.04 0.06 0.06 0.06 0.06 0.06 0.06 0.06 0.06 0.05 0.05 0.04 0.04 0.03 0.03 0.03 0 0 143 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.04 0.04 0.05 0.05 0.05 0.05 0.05 0.05 0.04 0.04 0.04 0.03 0.03 0 0 0 0.02 0.01 144 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.03 0.03 0.04 0.04 0.04 0.04 0.04 0 0 0 0 0 0 0.02 0.02 0 0 0 145 | 146 | 147 | -------------------------------------------------------------------------------- /maps/heatmap_7_f.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 14 5 | 63 6 | 13 7 | 30 8 | 64 9 | 64 10 | 11 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 12 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 13 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 14 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 15 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 16 | 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 17 | 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 | 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 19 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 20 | 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 21 | 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 22 | 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 23 | 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 0 0 0 0 1 1 1 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 24 | 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 25 | 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 26 | 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 27 | 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 28 | 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 29 | 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 1 1 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 30 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 1 1 1 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 31 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 1 1 1 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 32 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 33 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 34 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 35 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 36 | 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 37 | 1 1 1 1 1 1 1 0 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 38 | 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 39 | 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 40 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 41 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 42 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 43 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 44 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 45 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 46 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 47 | 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 48 | 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 49 | 0 0 0 0 0 1 1 1 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 50 | 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 51 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 52 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 53 | 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 54 | 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 55 | 0 0 0 0 0 0 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 56 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 57 | 0 0 0 0 0 0 0 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 58 | 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 59 | 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 1 1 1 1 0 60 | 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 1 1 1 1 0 61 | 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 62 | 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 63 | 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 64 | 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 65 | 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 66 | 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 0 67 | 0 0 0 0 0 0 1 1 1 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 68 | 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 69 | 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 70 | 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 71 | 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 72 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 73 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 74 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 75 | 76 | 77 | 78 | 64 79 | 64 80 | 81 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 82 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 83 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 84 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 85 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 86 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 87 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 88 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 89 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 90 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 91 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 92 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 93 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 94 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 95 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 96 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 97 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 98 | 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 99 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 100 | 0 0 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0 0 0 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 101 | 0 0 0 0 0 0 0 0 0 0 0 0 0.02 0.02 0.02 0 0 0 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 102 | 0 0 0 0 0 0 0 0 0 0 0 0 0.03 0.03 0.03 0.03 0.03 0.02 0.02 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 103 | 0 0 0 0 0 0 0.01 0 0 0 0 0 0.04 0.04 0.05 0.05 0.04 0.03 0.02 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 104 | 0 0 0 0 0 0.01 0.01 0.02 0.02 0.03 0.04 0.05 0.06 0.06 0.07 0.07 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 105 | 0 0 0 0 0 0.01 0.01 0.02 0.03 0.04 0.05 0.07 0.08 0.09 0.1 0.1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 106 | 0 0 0 0 0 0.01 0.01 0.02 0.03 0.04 0.06 0.09 0.12 0.13 0.15 0.15 0 0 0 0 0 0 0 0 0 0 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 107 | 0 0 0 0 0 0 0 0.02 0 0 0 0 0.17 0.2 0.22 0.22 0.22 0.21 0.19 0.16 0 0 0 0 0 0 0.02 0.01 0.01 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 108 | 0 0 0 0 0 0 0 0 0 0 0 0 0.25 0.29 0.32 0.33 0.32 0.29 0.26 0.23 0 0 0 0 0 0 0.02 0.02 0.02 0.02 0.01 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 109 | 0.04 0.05 0.05 0 0 0 0 0 0 0 0 0 0.36 0.44 0.48 0.47 0.44 0.4 0.36 0.3 0 0 0 0 0 0 0.04 0.03 0.03 0.02 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 110 | 0.05 0.06 0.08 0.09 0.11 0.12 0.12 0 0 0 0 0 0.51 0.66 0.69 0.66 0.6 0.54 0.47 0.4 0 0 0 0 0 0.06 0.05 0.05 0.04 0.03 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 111 | 0.06 0.08 0.1 0.12 0.15 0.17 0.18 0 0 0 0 0 0.66 1 1 0.84 0.76 0.68 0.6 0.52 0 0 0 0 0 0.08 0.08 0.06 0.05 0.04 0.03 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 112 | 0.08 0.1 0.13 0.16 0.21 0.25 0.28 0 0 0 0 0 0.69 1 1 1 0.89 0.81 0.72 0.63 0 0 0 0 0 0.13 0.11 0.09 0.07 0.05 0.04 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 113 | 0.1 0.13 0.17 0.21 0.26 0.32 0.37 0.41 0.45 0.47 0.49 0.5 0.65 0.84 1 1 1 0.91 0.83 0.74 0.64 0.54 0.44 0.34 0.23 0.16 0.14 0.11 0.08 0.07 0.05 0.04 0.03 0.03 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 114 | 0.12 0.16 0.2 0.26 0.32 0.39 0.44 0.48 0.5 0.5 0.49 0.46 0.59 0.75 0.89 1 1 1 0.92 0.84 0.74 0.63 0.52 0.38 0.25 0.18 0.16 0.13 0.1 0.08 0.07 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 115 | 0.15 0.19 0.24 0.31 0.39 0.45 0.49 0.5 0.5 0.48 0.44 0.4 0.51 0.66 0.79 0.9 1 1 1 0.92 0.83 0.72 0.6 0 0 0 0.18 0.15 0.12 0.1 0.08 0.06 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 116 | 0.17 0.22 0.28 0.36 0.45 0.5 0.49 0.47 0.45 0.41 0.37 0.33 0.44 0.56 0.69 0.8 0.9 1 1 1 0.91 0.81 0.69 0 0 0 0.18 0.16 0.13 0.11 0.09 0.07 0.06 0.04 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 117 | 0.2 0.25 0.32 0.41 0.5 0 0 0 0.34 0.33 0.3 0.28 0.37 0.47 0.58 0.69 0.79 0.89 1 1 1 0.89 0.77 0 0 0 0.19 0.16 0.14 0.12 0.1 0.08 0.06 0.05 0.04 0.03 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 118 | 0.22 0.29 0.36 0.45 0.5 0 0 0 0.23 0.24 0.22 0.23 0.3 0.38 0.47 0.56 0.66 0.75 0.84 1 1 1 0.85 0.67 0.49 0.35 0.25 0.18 0.15 0.13 0.11 0.09 0.07 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 119 | 0.25 0.32 0.39 0.47 0.5 0 0 0 0.16 0.17 0.16 0 0 0 0 0 0 0 0 0 0 1 1 0.73 0.55 0.4 0.29 0.21 0.18 0.15 0.12 0.1 0.08 0.06 0.05 0.03 0.02 0.02 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 120 | 0.27 0.34 0.41 0.47 0.5 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0.78 0.58 0.43 0.31 0.24 0.21 0.18 0.14 0.11 0.08 0.06 0.05 0.04 0.03 0.02 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 121 | 0.29 0.36 0.42 0.48 0.5 0.46 0.36 0 0 0 0 0 0.07 0.1 0.13 0.17 0.23 0.31 0.41 0.54 0.71 1 1 0.82 0.61 0.44 0.31 0.26 0.24 0.2 0.16 0.13 0.1 0.07 0.05 0.04 0.03 0.02 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 122 | 0.3 0.37 0.43 0.48 0.5 0.48 0.42 0 0 0 0 0 0.08 0.11 0.15 0.21 0.28 0.37 0.5 0.65 0.82 1 1 0.83 0.62 0.44 0.3 0.27 0.26 0.23 0.18 0.14 0.11 0.08 0.06 0.04 0.03 0.02 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 123 | 0.31 0.38 0.43 0.48 0.5 0.49 0.44 0.34 0.22 0 0 0 0 0.13 0.18 0.24 0.32 0.43 0.56 0.71 0.86 1 1 0.8 0.59 0.41 0.28 0.26 0.27 0.25 0.2 0.15 0.11 0.09 0.06 0.05 0.03 0.02 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 124 | 0.32 0.38 0.43 0.48 0.5 0.49 0.45 0 0 0 0 0 0 0.14 0.2 0.27 0.36 0.46 0.6 0.75 0.89 1 1 0.7 0.51 0.36 0 0 0 0.27 0.22 0.16 0.12 0.09 0.07 0.05 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 125 | 0.32 0.38 0.43 0.48 0.5 0.5 0.46 0 0 0 0 0 0 0.16 0.21 0.29 0.39 0.5 0.63 0.77 0.9 1 0 0 0 0 0 0 0 0.27 0.23 0.17 0.13 0.1 0.07 0.05 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 126 | 0.33 0.38 0.43 0.47 0.5 0.5 0.46 0 0 0 0 0 0.12 0.17 0.23 0.31 0.41 0.52 0.66 0.79 0.91 1 0 0 0 0 0 0 0 0.27 0.23 0.18 0.13 0.1 0.07 0.05 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 | 0.33 0.38 0.43 0.47 0.5 0.5 0.47 0 0 0 0 0 0.13 0.17 0.23 0.32 0.42 0.54 0.65 0.8 0.92 1 0 0 0 0 0.23 0.24 0.27 0.26 0.22 0.18 0.14 0.1 0.07 0 0 0 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 128 | 0.33 0.38 0.43 0.47 0.5 0.5 0.47 0.4 0.29 0.21 0 0 0.13 0.18 0.24 0.32 0.42 0.54 0.68 0.81 0.92 1 1 0.71 0.54 0.41 0.3 0.26 0.27 0.25 0.21 0.17 0.13 0.1 0.07 0 0 0 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 129 | 0.32 0.38 0.43 0.47 0.5 0.5 0.47 0.41 0.33 0.24 0 0 0.13 0.18 0.24 0.32 0.42 0.54 0.68 0.81 0.92 1 1 0.82 0.64 0.49 0.36 0.27 0.26 0.24 0.21 0.17 0.13 0.1 0.07 0 0 0 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 130 | 0.32 0.37 0.42 0.47 0.49 0.5 0.47 0.41 0.33 0.25 0 0 0.13 0.17 0.23 0.31 0.41 0.53 0.67 0.81 0.92 1 1 0.86 0.71 0.55 0.41 0.3 0.26 0.23 0.19 0.16 0.12 0.1 0.07 0.05 0.04 0.03 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 131 | 0.31 0.37 0.42 0.46 0.49 0.5 0.47 0.4 0.32 0.24 0 0 0.12 0.17 0.22 0.3 0.4 0.51 0.66 0.8 0.92 1 1 0.89 0.74 0.59 0.44 0.33 0.25 0.22 0.18 0.15 0.12 0.09 0.07 0.05 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 132 | 0.3 0.36 0.41 0.46 0.49 0.5 0.46 0.39 0.3 0.22 0 0 0.11 0.16 0.21 0.29 0.38 0.5 0.63 0.78 0.91 1 1 0.9 0.76 0.61 0.46 0.34 0.25 0.2 0.17 0.14 0.11 0.08 0.06 0.05 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 133 | 0.29 0.35 0.4 0.45 0.49 0.5 0.44 0.34 0.26 0 0 0 0.1 0.14 0.19 0.26 0.35 0.47 0.6 0.74 0.9 1 1 0.91 0.78 0.62 0.47 0.35 0.25 0.19 0.16 0.13 0.1 0.08 0.06 0.04 0.03 0.02 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 134 | 0.28 0.34 0.39 0.44 0.48 0.5 0 0 0 0 0 0 0 0 0 0.24 0.32 0.42 0.55 0.71 0.86 1 1 0.92 0.78 0.62 0.47 0.34 0.25 0.18 0.14 0.11 0.09 0.07 0.05 0.04 0.03 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 135 | 0.27 0.32 0.38 0.43 0.48 0.5 0 0 0 0 0 0 0 0 0 0.2 0.28 0.37 0.49 0.64 0.81 1 1 0.92 0.76 0.6 0.45 0.33 0.24 0.17 0.13 0.1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 136 | 0.25 0.31 0.37 0.42 0.47 0.5 0 0 0 0 0 0 0 0 0 0.17 0.23 0.31 0.41 0.54 0.71 1 1 0.9 0.74 0.56 0.42 0.31 0.23 0.16 0.12 0.09 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 137 | 0.23 0.29 0.35 0.41 0.46 0.5 0 0 0 0.12 0 0 0 0 0 0 0 0 0 0 0 0 1 0.87 0.68 0.52 0.39 0.29 0.21 0.15 0.11 0.08 0.06 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 138 | 0.21 0.26 0.32 0.39 0.45 0.49 0.46 0.34 0.22 0.14 0 0 0 0 0 0 0 0 0 0 0 0 1 0.8 0.62 0.47 0.35 0.26 0.19 0.14 0.1 0.07 0.06 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 139 | 0.19 0.24 0.29 0.36 0.43 0.48 0.48 0 0 0 0 0.19 0.26 0.35 0.45 0.55 0.65 0.74 0.82 1 1 1 1 0.71 0.55 0.42 0.32 0.24 0.17 0.13 0.09 0.07 0.05 0.04 0.03 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 140 | 0.16 0.21 0.26 0.32 0.39 0.47 0.49 0 0 0 0 0.25 0.32 0.43 0.55 0.68 0.79 0.88 1 1 1 1 0.8 0.63 0.48 0.37 0.28 0.21 0.15 0.11 0.08 0.06 0.04 0.03 0.02 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 141 | 0.14 0.18 0.22 0.28 0.35 0.43 0.5 0 0 0 0 0.36 0.37 0.51 0.67 0.81 0.91 1 1 1 1 0.83 0.69 0.54 0.42 0.32 0.24 0.18 0.13 0.1 0.07 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 142 | 0.12 0.15 0.19 0.24 0.29 0.36 0.44 0.5 0.49 0.49 0.48 0.46 0.44 0.59 0.78 1 1 1 1 0.92 0.83 0.72 0.59 0.46 0.35 0.27 0.21 0.15 0.12 0.09 0.06 0.05 0.03 0.02 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 143 | 0.1 0.12 0.16 0.2 0.24 0.3 0.37 0.44 0.47 0.49 0.5 0.5 0.49 0.66 1 1 1 1 0.88 0.8 0.7 0.6 0.49 0.38 0.29 0.23 0.17 0.13 0.1 0.07 0.05 0.04 0.03 0.02 0.02 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 144 | 0.08 0.1 0.13 0.16 0.2 0.24 0.3 0.36 0.4 0.44 0.46 0.48 0.49 0.64 1 1 1 0.83 0.75 0.67 0.58 0.48 0.39 0.31 0.24 0.19 0.14 0.11 0.08 0.06 0.05 0.03 0.03 0.02 0.01 0.01 0.01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 145 | 146 | 147 | -------------------------------------------------------------------------------- /models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cognitive-AI-Systems/TransPath/7c82b00fe2a1ce4641307e5d522759e229e11f40/models/__init__.py -------------------------------------------------------------------------------- /models/autoencoder.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import wandb 3 | from torch import nn 4 | import pytorch_lightning as pl 5 | 6 | from modules.encoder import Encoder 7 | from modules.decoder import Decoder 8 | from modules.attention import SpatialTransformer 9 | from modules.pos_emb import PosEmbeds 10 | 11 | 12 | def base_loss(criterion, na_outputs, va_outputs): 13 | return criterion(na_outputs.histories, va_outputs.paths) 14 | 15 | 16 | def adv_loss(criterion, na_outputs, va_outputs): 17 | loss_1 = criterion( 18 | torch.clamp(na_outputs.histories - na_outputs.paths - va_outputs.paths, 0, 1), 19 | torch.zeros_like(na_outputs.histories) 20 | ) 21 | na_cost = (na_outputs.paths * na_outputs.g).sum((1, 2, 3), keepdim=True) 22 | va_cost = (va_outputs.paths * va_outputs.g).sum((1, 2, 3), keepdim=True) 23 | cost_coefs = (na_cost / va_cost - 1).view(-1, 1, 1, 1) 24 | loss_2 = criterion( 25 | (na_outputs.paths - va_outputs.paths) * cost_coefs, 26 | torch.zeros_like(na_outputs.histories) 27 | ) 28 | return loss_1 + loss_2 29 | 30 | 31 | class Autoencoder(pl.LightningModule): 32 | def __init__(self, 33 | in_channels=2, 34 | out_channels=1, 35 | hidden_channels=64, 36 | attn_blocks=4, 37 | attn_heads=4, 38 | cnn_dropout=0.15, 39 | attn_dropout=0.15, 40 | downsample_steps=3, 41 | resolution=(64, 64), 42 | mode='f', 43 | *args, 44 | **kwargs): 45 | super().__init__() 46 | heads_dim = hidden_channels // attn_heads 47 | self.encoder = Encoder(in_channels, hidden_channels, downsample_steps, cnn_dropout) 48 | self.pos = PosEmbeds( 49 | hidden_channels, 50 | (resolution[0] // 2**downsample_steps, resolution[1] // 2**downsample_steps) 51 | ) 52 | self.transformer = SpatialTransformer( 53 | hidden_channels, 54 | attn_heads, 55 | heads_dim, 56 | attn_blocks, 57 | attn_dropout 58 | ) 59 | self.decoder_pos = PosEmbeds( 60 | hidden_channels, 61 | (resolution[0] // 2**downsample_steps, resolution[1] // 2**downsample_steps) 62 | ) 63 | self.decoder = Decoder(hidden_channels, out_channels, downsample_steps, cnn_dropout) 64 | 65 | self.recon_criterion = nn.L1Loss() if mode == 'h' else nn.MSELoss() 66 | self.mode = mode 67 | self.k = 64*64 if mode == 'h' else 1 68 | 69 | self.automatic_optimization = False 70 | self.save_hyperparameters() 71 | 72 | def forward(self, inputs): 73 | x = self.encoder(inputs) 74 | x = self.pos(x) 75 | x = self.transformer(x) 76 | x = self.decoder_pos(x) 77 | x = self.decoder(x) 78 | return x 79 | 80 | def step(self, batch, batch_idx, regime): 81 | map_design, start, goal, gt_hmap = batch 82 | inputs = torch.cat([map_design, start + goal], dim=1) if self.mode in ('f', 'nastar') else torch.cat([map_design, goal], dim=1) 83 | predictions = self(inputs) 84 | 85 | loss = self.recon_criterion((predictions + 1) / 2 * self.k, gt_hmap) 86 | self.log(f'{regime}_recon_loss', loss, on_step=False, on_epoch=True) 87 | return loss 88 | 89 | def training_step(self, batch, batch_idx): 90 | optimizer = self.optimizers() 91 | sch = self.lr_schedulers() 92 | 93 | loss = self.step(batch, batch_idx, 'train') 94 | optimizer.zero_grad() 95 | loss.backward() 96 | optimizer.step() 97 | sch.step() 98 | 99 | self.log('train_loss', loss, on_step=False, on_epoch=True) 100 | self.log('lr', sch.get_last_lr()[0], on_step=True, on_epoch=False) 101 | return loss 102 | 103 | def validation_step(self, batch, batch_idx): 104 | loss = self.step(batch, batch_idx, 'val') 105 | self.log('val_loss', loss, on_step=False, on_epoch=True) 106 | return loss 107 | 108 | def configure_optimizers(self): 109 | optimizer = torch.optim.Adam(self.parameters(), lr=0.0004) 110 | scheduler = torch.optim.lr_scheduler.OneCycleLR( 111 | optimizer, max_lr=4e-4, total_steps=self.trainer.estimated_stepping_batches 112 | ) 113 | return [optimizer], [scheduler] 114 | 115 | 116 | class DemAutoencoder(Autoencoder): 117 | def __init__(self, 118 | in_channels=5, 119 | out_channels=2, 120 | hidden_channels=64, 121 | attn_blocks=6, 122 | attn_heads=4, 123 | cnn_dropout=0.15, 124 | attn_dropout=0.15, 125 | downsample_steps=3, 126 | resolution=(128, 128), 127 | *args, 128 | **kwargs): 129 | super().__init__(in_channels, 130 | out_channels, 131 | hidden_channels, 132 | attn_blocks, 133 | attn_heads, 134 | cnn_dropout, 135 | attn_dropout, 136 | downsample_steps, 137 | resolution) 138 | self.recon_criterion = nn.MSELoss() 139 | 140 | def step(self, batch, batch_idx, regime): 141 | dem, rgb, sg, focal = batch 142 | inputs = torch.cat([rgb, sg], dim=1) 143 | gt_outputs = torch.cat([dem, focal], dim=1) 144 | outputs = (self(inputs) + 1) / 2 145 | 146 | loss = self.recon_criterion(outputs, gt_outputs) 147 | self.log(f'{regime}_recon_loss', loss, on_step=False, on_epoch=True) 148 | return loss 149 | 150 | 151 | 152 | class PathLogger(pl.Callback): 153 | def __init__(self, val_batch, num_samples=20, mode='f'): 154 | super().__init__() 155 | map_design, start, goal, gt_hmap = val_batch[:num_samples] 156 | inputs = torch.cat([map_design, start + goal], dim=1) if mode == 'f' else torch.cat([map_design, goal], dim=1) 157 | self.val_samples = inputs[:num_samples] 158 | if mode == 'f': 159 | self.hm = gt_hmap[:num_samples] 160 | elif mode == 'h': 161 | self.hm = (gt_hmap / gt_hmap.amax(dim=(2, 3), keepdim=True))[:num_samples] 162 | else: 163 | self.hm = gt_hmap[:num_samples] 164 | 165 | 166 | def on_validation_epoch_end(self, trainer, pl_module): 167 | val_samples = self.val_samples.to(device=pl_module.device) 168 | prediction = (pl_module(val_samples) + 1) / 2 169 | if pl_module.mode == 'h': 170 | prediction = prediction * 64 * 64 171 | 172 | trainer.logger.experiment.log({ 173 | 'data': [wandb.Image(x) for x in torch.cat([self.val_samples, self.hm], dim=1)], 174 | 'predictions': [wandb.Image(x) for x in torch.cat([val_samples, prediction], dim=1)] 175 | }) 176 | 177 | class DemPathLogger(pl.Callback): 178 | def __init__(self, val_batch, num_samples=20): 179 | super().__init__() 180 | dem, rgb, sg, focal = val_batch[:num_samples] 181 | inputs = torch.cat([rgb, sg], dim=1) 182 | self.val_samples = inputs[:num_samples] 183 | self.hm = torch.cat([dem, focal], dim=1)[:num_samples] 184 | 185 | def on_validation_epoch_end(self, trainer, pl_module): 186 | val_samples = self.val_samples.to(device=pl_module.device) 187 | prediction = (pl_module(val_samples) + 1) / 2 188 | 189 | # log images, gt_dems, start/goals and predictions 190 | trainer.logger.experiment.log({ 191 | 'rgb': [wandb.Image(x) for x in torch.unbind(val_samples[:, :3])], 192 | 'sg': [wandb.Image(x) for x in torch.unbind(val_samples[:, 3:])], 193 | 'dem': [wandb.Image(x) for x in torch.unbind(self.hm[:, :1])], 194 | 'focal': [wandb.Image(x) for x in torch.unbind(self.hm[:, 1:])], 195 | 'pred_dem': [wandb.Image(x) for x in torch.unbind(prediction[:, :1])], 196 | 'pred_focal': [wandb.Image(x) for x in torch.unbind(prediction[:, 1:])] 197 | }) 198 | -------------------------------------------------------------------------------- /modules/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cognitive-AI-Systems/TransPath/7c82b00fe2a1ce4641307e5d522759e229e11f40/modules/__init__.py -------------------------------------------------------------------------------- /modules/attention.py: -------------------------------------------------------------------------------- 1 | from inspect import isfunction 2 | 3 | import torch 4 | from torch import nn, einsum 5 | from torch.nn import functional as F 6 | from einops import rearrange, repeat 7 | 8 | from utils.checkpoint import checkpoint 9 | 10 | 11 | def default(val, d): 12 | if val is not None: 13 | return val 14 | return d() if isfunction(d) else d 15 | 16 | 17 | def Normalize(in_channels): 18 | return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True) 19 | 20 | 21 | class FeedForward(nn.Module): 22 | def __init__(self, dim, dim_out=None, mult=4, dropout=0.2): 23 | super().__init__() 24 | inner_dim = int(dim * mult) 25 | dim_out = default(dim_out, dim) 26 | project_in = nn.Sequential( 27 | nn.Linear(dim, inner_dim), 28 | nn.GELU() 29 | ) 30 | 31 | self.net = nn.Sequential( 32 | project_in, 33 | nn.Dropout(dropout), 34 | nn.Linear(inner_dim, dim_out) 35 | ) 36 | 37 | def forward(self, x): 38 | return self.net(x) 39 | 40 | 41 | class CrossAttention(nn.Module): 42 | def __init__(self, query_dim, context_dim=None, heads=8, dim_head=64, dropout=0.3): 43 | super().__init__() 44 | inner_dim = dim_head * heads 45 | context_dim = default(context_dim, query_dim) 46 | 47 | self.scale = dim_head ** -0.5 48 | self.heads = heads 49 | 50 | self.to_q = nn.Linear(query_dim, inner_dim, bias=False) 51 | self.to_k = nn.Linear(context_dim, inner_dim, bias=False) 52 | self.to_v = nn.Linear(context_dim, inner_dim, bias=False) 53 | 54 | self.to_out = nn.Sequential( 55 | nn.Linear(inner_dim, query_dim), 56 | nn.Dropout(dropout) 57 | ) 58 | 59 | def forward(self, x, context=None): 60 | h = self.heads 61 | 62 | q = self.to_q(x) 63 | context = default(context, x) 64 | k = self.to_k(context) 65 | v = self.to_v(context) 66 | 67 | q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (q, k, v)) 68 | 69 | sim = einsum('b i d, b j d -> b i j', q, k) * self.scale 70 | 71 | attn = sim.softmax(dim=-1) 72 | 73 | out = einsum('b i j, b j d -> b i d', attn, v) 74 | out = rearrange(out, '(b h) n d -> b n (h d)', h=h) 75 | return self.to_out(out) 76 | 77 | 78 | class BasicTransformerBlock(nn.Module): 79 | def __init__(self, dim, n_heads, d_head, dropout=0.3, context_dim=None, checkpoint=False): 80 | super().__init__() 81 | self.attn1 = CrossAttention(query_dim=dim, heads=n_heads, dim_head=d_head, dropout=dropout) 82 | self.ff = FeedForward(dim, dropout=dropout) 83 | self.attn2 = CrossAttention(query_dim=dim, context_dim=context_dim, 84 | heads=n_heads, dim_head=d_head, dropout=dropout) 85 | self.norm1 = nn.LayerNorm(dim) 86 | self.norm2 = nn.LayerNorm(dim) 87 | self.norm3 = nn.LayerNorm(dim) 88 | self.checkpoint = checkpoint 89 | 90 | def forward(self, x, context=None): 91 | return checkpoint(self._forward, (x, context), self.parameters(), self.checkpoint) 92 | 93 | def _forward(self, x, context=None): 94 | x = self.attn1(self.norm1(x)) + x 95 | x = self.attn2(self.norm2(x), context=context) + x 96 | x = self.ff(self.norm3(x)) + x 97 | return x 98 | 99 | 100 | class SpatialTransformer(nn.Module): 101 | """ 102 | Transformer block for image-like data. 103 | First, project the input (aka embedding) 104 | and reshape to b, t, d. 105 | Then apply standard transformer action. 106 | Finally, reshape to image 107 | """ 108 | def __init__(self, in_channels, n_heads, d_head, 109 | depth=3, dropout=0.3, context_dim=None): 110 | super().__init__() 111 | self.in_channels = in_channels 112 | inner_dim = n_heads * d_head 113 | self.norm = Normalize(in_channels) 114 | 115 | self.proj_in = nn.Conv2d( 116 | in_channels, 117 | inner_dim, 118 | kernel_size=1, 119 | stride=1, 120 | padding=0 121 | ) 122 | 123 | self.transformer_blocks = nn.ModuleList( 124 | [BasicTransformerBlock(inner_dim, n_heads, d_head, dropout=dropout, context_dim=context_dim) 125 | for d in range(depth)] 126 | ) 127 | 128 | self.proj_out = nn.Conv2d( 129 | inner_dim, 130 | in_channels, 131 | kernel_size=1, 132 | stride=1, 133 | padding=0 134 | ) 135 | 136 | def forward(self, x, context=None): 137 | # note: if no context is given, cross-attention defaults to self-attention 138 | b, c, h, w = x.shape 139 | x_in = x 140 | x = self.norm(x) 141 | x = self.proj_in(x) 142 | x = rearrange(x, 'b c h w -> b (h w) c') 143 | for block in self.transformer_blocks: 144 | x = block(x, context=context) 145 | x = rearrange(x, 'b (h w) c -> b c h w', h=h, w=w) 146 | x = self.proj_out(x) 147 | return x + x_in 148 | -------------------------------------------------------------------------------- /modules/decoder.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | 4 | from .resblock import ResnetBlock 5 | 6 | 7 | def nonlinearity(x): 8 | return x*torch.sigmoid(x) 9 | 10 | 11 | def Normalize(in_channels, num_groups=32): 12 | return torch.nn.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True) 13 | 14 | 15 | class Upsample(nn.Module): 16 | def __init__(self, in_channels): 17 | super().__init__() 18 | self.conv = torch.nn.Conv2d( 19 | in_channels, 20 | in_channels, 21 | kernel_size=3, 22 | stride=1, 23 | padding=1 24 | ) 25 | 26 | def forward(self, x): 27 | x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") 28 | x = self.conv(x) 29 | return x 30 | 31 | 32 | class Decoder(nn.Module): 33 | def __init__(self, hidden_channels, out_channels, upsample_steps, dropout=0.1): 34 | super().__init__() 35 | self.layers = nn.ModuleList([]) 36 | for _ in range(upsample_steps): 37 | self.layers.append( 38 | nn.Sequential( 39 | ResnetBlock(hidden_channels, hidden_channels, dropout), 40 | Upsample(hidden_channels) 41 | ) 42 | ) 43 | self.norm = Normalize(hidden_channels) 44 | self.conv_out = nn.Conv2d( 45 | hidden_channels, 46 | out_channels, 47 | kernel_size=3, 48 | stride=1, 49 | padding=1 50 | ) 51 | 52 | def forward(self, x): 53 | for layer in self.layers: 54 | x = layer(x) 55 | x = self.norm(x) 56 | x = nonlinearity(x) 57 | x = self.conv_out(x) 58 | return torch.tanh(x) 59 | -------------------------------------------------------------------------------- /modules/encoder.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | 4 | from .resblock import ResnetBlock 5 | 6 | 7 | class Downsample(nn.Module): 8 | def __init__(self, in_channels): 9 | super().__init__() 10 | self.conv = torch.nn.Conv2d( 11 | in_channels, 12 | in_channels, 13 | kernel_size=3, 14 | stride=2, 15 | padding=0 16 | ) 17 | 18 | def forward(self, x): 19 | pad = (0,1,0,1) 20 | x = torch.nn.functional.pad(x, pad, mode="constant", value=0) 21 | x = self.conv(x) 22 | return x 23 | 24 | 25 | class Encoder(nn.Module): 26 | def __init__(self, in_channels, hidden_channels, downsample_steps, dropout=0.1): 27 | super().__init__() 28 | self.layers = nn.ModuleList([ 29 | nn.Conv2d( 30 | in_channels, 31 | hidden_channels, 32 | kernel_size=5, 33 | stride=1, 34 | padding=2 35 | ) 36 | ]) 37 | for _ in range(downsample_steps): 38 | self.layers.append( 39 | nn.Sequential( 40 | ResnetBlock(hidden_channels, hidden_channels, dropout), 41 | Downsample(hidden_channels) 42 | ) 43 | ) 44 | 45 | def forward(self, x): 46 | for layer in self.layers: 47 | x = layer(x) 48 | return x 49 | -------------------------------------------------------------------------------- /modules/planners.py: -------------------------------------------------------------------------------- 1 | """ 2 | modified code from 3 | https://github.com/omron-sinicx/neural-astar/blob/minimal/neural_astar/planner/differentiable_astar.py 4 | """ 5 | 6 | import math 7 | from dataclasses import dataclass 8 | from typing import Optional, List 9 | 10 | import torch 11 | import torch.nn as nn 12 | import torch.nn.functional as F 13 | 14 | 15 | @dataclass 16 | class AstarOutput: 17 | """ 18 | Output structure of A* search planners 19 | """ 20 | histories: torch.tensor 21 | paths: torch.tensor 22 | intermediate_results: Optional[List[dict]] = None 23 | g: Optional[torch.tensor] = None 24 | 25 | 26 | def get_diag_heuristic(goal_maps): 27 | num_samples, size = goal_maps.shape[0], goal_maps.shape[-1] 28 | grid = torch.meshgrid(torch.arange(0, size), torch.arange(0, size)) 29 | loc = torch.stack(grid, dim=0).type_as(goal_maps) 30 | loc_expand = loc.reshape(2, -1).unsqueeze(0).expand(num_samples, 2, -1) 31 | goal_loc = torch.einsum("kij, bij -> bk", loc, goal_maps) 32 | goal_loc_expand = goal_loc.unsqueeze(-1).expand(num_samples, 2, -1) 33 | 34 | #diagonal distance 35 | dxdy = torch.abs(loc_expand - goal_loc_expand) 36 | h = dxdy.min(dim=1)[0] * (2**0.5) + torch.abs(dxdy[:, 0] - dxdy[:, 1]) 37 | h = h.reshape_as(goal_maps) 38 | 39 | return h 40 | 41 | 42 | def _st_softmax_noexp(val: torch.tensor) -> torch.tensor: 43 | """ 44 | Softmax + discretized activation 45 | Used a detach() trick as done in straight-through softmax 46 | 47 | Args: 48 | val (torch.tensor): exponential of inputs. 49 | 50 | Returns: 51 | torch.tensor: one-hot matrices for input argmax. 52 | """ 53 | 54 | val_ = val.reshape(val.shape[0], -1) 55 | y = val_ / (val_.sum(dim=-1, keepdim=True)) 56 | _, ind = y.max(dim=-1) 57 | y_hard = torch.zeros_like(y) 58 | y_hard[range(len(y_hard)), ind] = 1 59 | y_hard = y_hard.reshape_as(val) 60 | y = y.reshape_as(val) 61 | return (y_hard - y).detach() + y 62 | 63 | 64 | def expand(x: torch.tensor, neighbor_filter: torch.tensor) -> torch.tensor: 65 | """ 66 | Expand neighboring node 67 | 68 | Args: 69 | x (torch.tensor): selected nodes 70 | neighbor_filter (torch.tensor): 3x3 filter to indicate 8 neighbors 71 | 72 | Returns: 73 | torch.tensor: neighboring nodes of x 74 | """ 75 | 76 | x = x.unsqueeze(0) 77 | num_samples = x.shape[1] 78 | y = F.conv2d(x, neighbor_filter, padding=1, groups=num_samples).squeeze() 79 | y = y.squeeze(0) 80 | return y 81 | 82 | 83 | def backtrack(start_maps: torch.tensor, goal_maps: torch.tensor, 84 | parents: torch.tensor, current_t: int) -> torch.tensor: 85 | """ 86 | Backtrack the search results to obtain paths 87 | 88 | Args: 89 | start_maps (torch.tensor): one-hot matrices for start locations 90 | goal_maps (torch.tensor): one-hot matrices for goal locations 91 | parents (torch.tensor): parent nodes 92 | current_t (int): current time step 93 | 94 | Returns: 95 | torch.tensor: solution paths 96 | """ 97 | 98 | num_samples = start_maps.shape[0] 99 | parents = parents.type(torch.long) 100 | goal_maps = goal_maps.type(torch.long) 101 | start_maps = start_maps.type(torch.long) 102 | path_maps = goal_maps.type(torch.long) 103 | num_samples = len(parents) 104 | loc = (parents * goal_maps.view(num_samples, -1)).sum(-1) 105 | for _ in range(current_t): 106 | path_maps.view(num_samples, -1)[range(num_samples), loc] = 1 107 | loc = parents[range(num_samples), loc] 108 | return path_maps 109 | 110 | 111 | class DifferentiableDiagAstar(nn.Module): 112 | def __init__(self, g_ratio: float = 0.5, Tmax: float = 0.95, h_w=1, f_w=2, mode='default'): 113 | """ 114 | Differentiable A* module 115 | 116 | Args: 117 | g_ratio (float, optional): ratio between g(v) + h(v). 118 | Tmax (float, optional): how much of the map the planner is able to potentially explore during training. 119 | """ 120 | 121 | super().__init__() 122 | 123 | neighbor_filter = torch.ones(1, 1, 3, 3) 124 | neighbor_filter[0, 0, 1, 1] = 0 125 | self.neighbor_filter = nn.Parameter(neighbor_filter, 126 | requires_grad=False) 127 | cost_filter = torch.ones(1, 1, 3, 3) 128 | cost_filter[0, 0, 1, 1] = 0 129 | cost_filter[0, 0, 0, 0] = 2**0.5 130 | cost_filter[0, 0, 0, 2] = 2**0.5 131 | cost_filter[0, 0, 2, 0] = 2**0.5 132 | cost_filter[0, 0, 2, 2] = 2**0.5 133 | self.cost_filter = nn.Parameter(cost_filter, requires_grad=False) 134 | 135 | self.get_heuristic = get_diag_heuristic 136 | 137 | self.g_ratio = g_ratio 138 | assert (Tmax > 0) & (Tmax <= 1), "Tmax must be within (0, 1]" 139 | self.Tmax = Tmax 140 | self.mode = mode 141 | self.h_w = h_w 142 | self.f_w = f_w 143 | 144 | def forward(self, 145 | cost_maps: torch.tensor, 146 | start_maps: torch.tensor, 147 | goal_maps: torch.tensor, 148 | obstacles_maps: torch.tensor) -> AstarOutput: 149 | assert cost_maps.ndim == 4 150 | assert start_maps.ndim == 4 151 | assert goal_maps.ndim == 4 152 | assert obstacles_maps.ndim == 4 153 | 154 | cost_maps = cost_maps[:, 0] 155 | start_maps = start_maps[:, 0] 156 | goal_maps = goal_maps[:, 0] 157 | obstacles_maps = obstacles_maps[:, 0] 158 | if self.mode == 'h': 159 | heuristic = cost_maps 160 | elif self.mode == 'f': 161 | focal_map = cost_maps 162 | elif self.mode == 'k': 163 | heuristic_koef = cost_maps 164 | cost_maps = torch.zeros_like(obstacles_maps) 165 | 166 | num_samples = start_maps.shape[0] 167 | neighbor_filter = self.neighbor_filter 168 | neighbor_filter = torch.repeat_interleave(neighbor_filter, num_samples, 169 | 0) 170 | cost_filter = self.cost_filter 171 | cost_filter = torch.repeat_interleave(cost_filter, num_samples, 172 | 0) 173 | size = start_maps.shape[-1] 174 | 175 | open_maps = start_maps 176 | histories = torch.zeros_like(start_maps) 177 | intermediate_results = [] 178 | 179 | h = self.get_heuristic(goal_maps) * self.h_w 180 | if self.mode == 'h': 181 | h = heuristic 182 | elif self.mode == 'k': 183 | thr = torch.tensor(0.1).to(next(self.parameters()).device) 184 | heuristic_koef = torch.where(heuristic_koef > thr, heuristic_koef, thr) 185 | h = h / (heuristic_koef) 186 | g = torch.zeros_like(start_maps) 187 | 188 | parents = ( 189 | torch.ones_like(start_maps).reshape(num_samples, -1) * 190 | goal_maps.reshape(num_samples, -1).max(-1, keepdim=True)[-1]) 191 | 192 | size = cost_maps.shape[-1] 193 | Tmax = self.Tmax if self.training else 1. 194 | Tmax = int(Tmax * size * size) 195 | for t in range(Tmax): 196 | if self.mode != 'f': 197 | f = self.g_ratio * g + (1 - self.g_ratio + 0.001) * h 198 | f_exp = torch.exp(-1 * f / math.sqrt(size)) 199 | f_exp = f_exp * open_maps 200 | selected_node_maps = _st_softmax_noexp(f_exp) 201 | else: 202 | f = self.g_ratio * g + (1 - self.g_ratio) * h 203 | f_open = (f * open_maps + (open_maps == 0) * size**2).view(num_samples, -1) 204 | min_values, _ = f_open.min(dim=-1) 205 | new_open = torch.where((f <= min_values.view(-1, 1, 1) * self.f_w) * (open_maps == 1) , 1., 0.) 206 | focal_exp = torch.exp(focal_map) 207 | focal_exp = focal_exp * new_open 208 | selected_node_maps = _st_softmax_noexp(focal_exp) 209 | 210 | dist_to_goal = (selected_node_maps * goal_maps).sum((1, 2), 211 | keepdim=True) 212 | is_unsolved = (dist_to_goal < 1e-8).float() 213 | 214 | histories = histories + selected_node_maps 215 | histories = torch.clamp(histories, 0, 1) 216 | open_maps = open_maps - is_unsolved * selected_node_maps 217 | open_maps = torch.clamp(open_maps, 0, 1) 218 | 219 | neighbor_nodes = expand(selected_node_maps, neighbor_filter) 220 | neighbor_nodes = neighbor_nodes * obstacles_maps 221 | 222 | g2 = (g*selected_node_maps).sum((1, 2), keepdim=True) + expand(selected_node_maps, cost_filter) 223 | idx = (1 - open_maps) * (1 - histories) + open_maps * (g > g2) 224 | idx = idx * neighbor_nodes 225 | idx = idx.detach() 226 | g = g2 * idx + g * (1 - idx) 227 | g = g.detach() 228 | 229 | # update open maps 230 | open_maps = torch.clamp(open_maps + idx, 0, 1) 231 | open_maps = open_maps.detach() 232 | 233 | # for backtracking 234 | idx = idx.reshape(num_samples, -1) 235 | snm = selected_node_maps.reshape(num_samples, -1) 236 | new_parents = snm.max(-1, keepdim=True)[1] 237 | parents = new_parents * idx + parents * (1 - idx) 238 | 239 | if torch.all(is_unsolved.flatten() == 0): 240 | break 241 | 242 | # backtracking 243 | path_maps = backtrack(start_maps, goal_maps, parents, t) 244 | 245 | return AstarOutput(histories.unsqueeze(1), path_maps.unsqueeze(1), 246 | intermediate_results, g.unsqueeze(1)) 247 | -------------------------------------------------------------------------------- /modules/pos_emb.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | import numpy as np 4 | 5 | 6 | def build_grid(resolution, max_v=1.): 7 | """ 8 | :param resolution: tuple of 2 numbers 9 | :return: grid for positional embeddings built on input resolution 10 | """ 11 | ranges = [np.linspace(0., max_v, num=res) for res in resolution] 12 | grid = np.meshgrid(*ranges, sparse=False, indexing="ij") 13 | grid = np.stack(grid, axis=-1) 14 | grid = np.reshape(grid, [resolution[0], resolution[1], -1]) 15 | grid = np.expand_dims(grid, axis=0) 16 | grid = grid.astype(np.float32) 17 | return np.concatenate([grid, max_v - grid], axis=-1) 18 | 19 | 20 | class PosEmbeds(nn.Module): 21 | def __init__(self, hidden_size, resolution): 22 | super().__init__() 23 | self.linear = nn.Linear(4, hidden_size) 24 | self.grid = nn.Parameter(torch.Tensor(build_grid(resolution)), requires_grad=False) 25 | 26 | def forward(self, inputs): 27 | pos_emb = self.linear(self.grid).moveaxis(3, 1) 28 | return inputs + pos_emb 29 | 30 | def change_resolution(self, resolution, max_v): 31 | self.grid = nn.Parameter(torch.Tensor(build_grid(resolution, max_v)), requires_grad=False) 32 | -------------------------------------------------------------------------------- /modules/resblock.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | 4 | 5 | def nonlinearity(x): 6 | return x*torch.sigmoid(x) 7 | 8 | 9 | def Normalize(in_channels, num_groups=32): 10 | return torch.nn.GroupNorm(num_groups=num_groups, num_channels=in_channels, eps=1e-6, affine=True) 11 | 12 | 13 | class Upsample(nn.Module): 14 | def __init__(self, in_channels): 15 | super().__init__() 16 | self.conv = torch.nn.Conv2d( 17 | in_channels, 18 | in_channels, 19 | kernel_size=3, 20 | stride=1, 21 | padding=1 22 | ) 23 | 24 | def forward(self, x): 25 | x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest") 26 | x = self.conv(x) 27 | return x 28 | 29 | 30 | class Downsample(nn.Module): 31 | def __init__(self, in_channels): 32 | super().__init__() 33 | self.conv = torch.nn.Conv2d( 34 | in_channels, 35 | in_channels, 36 | kernel_size=3, 37 | stride=2, 38 | padding=0 39 | ) 40 | 41 | def forward(self, x): 42 | pad = (0,1,0,1) 43 | x = torch.nn.functional.pad(x, pad, mode="constant", value=0) 44 | x = self.conv(x) 45 | return x 46 | 47 | 48 | class ResnetBlock(nn.Module): 49 | def __init__(self, in_channels, out_channels=None, dropout=0.1): 50 | super().__init__() 51 | self.in_channels = in_channels 52 | out_channels = in_channels if out_channels is None else out_channels 53 | self.out_channels = out_channels 54 | 55 | self.norm1 = Normalize(in_channels) 56 | self.conv1 = torch.nn.Conv2d( 57 | in_channels, 58 | out_channels, 59 | kernel_size=3, 60 | stride=1, 61 | padding=1 62 | ) 63 | self.norm2 = Normalize(out_channels) 64 | self.dropout = torch.nn.Dropout(dropout) 65 | self.conv2 = torch.nn.Conv2d( 66 | out_channels, 67 | out_channels, 68 | kernel_size=3, 69 | stride=1, 70 | padding=1 71 | ) 72 | if self.in_channels != self.out_channels: 73 | self.nin_shortcut = torch.nn.Conv2d( 74 | in_channels, 75 | out_channels, 76 | kernel_size=1, 77 | stride=1, 78 | padding=0 79 | ) 80 | 81 | def forward(self, x): 82 | h = x 83 | h = self.norm1(h) 84 | h = nonlinearity(h) 85 | h = self.conv1(h) 86 | 87 | h = self.norm2(h) 88 | h = nonlinearity(h) 89 | h = self.dropout(h) 90 | h = self.conv2(h) 91 | 92 | if self.in_channels != self.out_channels: 93 | if self.use_conv_shortcut: 94 | x = self.conv_shortcut(x) 95 | else: 96 | x = self.nin_shortcut(x) 97 | return x+h 98 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | torch 3 | torchvision 4 | pytorch_lightning 5 | einops 6 | matplotlib 7 | wandb 8 | tqdm -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | from models.autoencoder import Autoencoder, PathLogger, DemAutoencoder, DemPathLogger 2 | from data.hmaps import GridData 3 | from data.dems import DemData 4 | 5 | import pytorch_lightning as pl 6 | import wandb 7 | from torch.utils.data import DataLoader 8 | from pytorch_lightning.loggers import WandbLogger 9 | import torch 10 | 11 | import argparse 12 | import multiprocessing 13 | 14 | 15 | def main(mode, run_name, proj_name, batch_size, max_epochs): 16 | train_data = GridData( 17 | path='./TransPath_data/train', 18 | mode=mode 19 | ) if mode != 'dem' else DemData(split='train') 20 | val_data = GridData( 21 | path='./TransPath_data/val', 22 | mode=mode 23 | ) if mode != 'dem' else DemData(split='val') 24 | resolution = (train_data.img_size, train_data.img_size) 25 | train_dataloader = DataLoader( train_data, 26 | batch_size=batch_size, 27 | shuffle=True, 28 | num_workers=multiprocessing.cpu_count(), 29 | pin_memory=True) 30 | val_dataloader = DataLoader( val_data, 31 | batch_size=batch_size, 32 | shuffle=False, 33 | num_workers=multiprocessing.cpu_count(), 34 | pin_memory=True) 35 | 36 | samples = next(iter(val_dataloader)) 37 | 38 | model = Autoencoder(mode=mode, resolution=resolution) if mode != 'dem' else DemAutoencoder(resolution=resolution) 39 | callback = PathLogger(samples, mode=mode) if mode != 'dem' else DemPathLogger(samples) 40 | wandb_logger = WandbLogger(project=proj_name, name=f'{run_name}_{mode}', log_model='all') 41 | trainer = pl.Trainer( 42 | logger=wandb_logger, 43 | accelerator="auto", 44 | max_epochs=max_epochs, 45 | deterministic=False, 46 | callbacks=[callback], 47 | ) 48 | trainer.fit(model, train_dataloader, val_dataloader) 49 | wandb.finish() 50 | 51 | if __name__ == '__main__': 52 | parser = argparse.ArgumentParser() 53 | parser.add_argument('--mode', type=str, choices=['f', 'cf', 'dem'], default='dem') 54 | parser.add_argument('--run_name', type=str, default='default') 55 | parser.add_argument('--proj_name', type=str, default='TransPath_runs') 56 | parser.add_argument('--seed', type=int, default=39) 57 | parser.add_argument('--batch', type=int, default=256) 58 | parser.add_argument('--epoch', type=int, default=160) 59 | 60 | args = parser.parse_args() 61 | pl.seed_everything(args.seed) 62 | torch.set_float32_matmul_precision('high') #fix for tesor blocks warning with new video card 63 | main( 64 | mode=args.mode, 65 | run_name=args.run_name, 66 | proj_name=args.proj_name, 67 | batch_size=args.batch, 68 | max_epochs=args.epoch, 69 | ) 70 | -------------------------------------------------------------------------------- /utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cognitive-AI-Systems/TransPath/7c82b00fe2a1ce4641307e5d522759e229e11f40/utils/__init__.py -------------------------------------------------------------------------------- /utils/checkpoint.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | 4 | def checkpoint(func, inputs, params, flag): 5 | """ 6 | Evaluate a function without caching intermediate activations, allowing for 7 | reduced memory at the expense of extra compute in the backward pass. 8 | :param func: the function to evaluate. 9 | :param inputs: the argument sequence to pass to `func`. 10 | :param params: a sequence of parameters `func` depends on but does not 11 | explicitly take as arguments. 12 | :param flag: if False, disable gradient checkpointing. 13 | """ 14 | if flag: 15 | args = tuple(inputs) + tuple(params) 16 | return CheckpointFunction.apply(func, len(inputs), *args) 17 | else: 18 | return func(*inputs) 19 | 20 | 21 | class CheckpointFunction(torch.autograd.Function): 22 | @staticmethod 23 | def forward(ctx, run_function, length, *args): 24 | ctx.run_function = run_function 25 | ctx.input_tensors = list(args[:length]) 26 | ctx.input_params = list(args[length:]) 27 | 28 | with torch.no_grad(): 29 | output_tensors = ctx.run_function(*ctx.input_tensors) 30 | return output_tensors 31 | 32 | @staticmethod 33 | def backward(ctx, *output_grads): 34 | ctx.input_tensors = [x.detach().requires_grad_(True) for x in ctx.input_tensors] 35 | with torch.enable_grad(): 36 | shallow_copies = [x.view_as(x) for x in ctx.input_tensors] 37 | output_tensors = ctx.run_function(*shallow_copies) 38 | input_grads = torch.autograd.grad( 39 | output_tensors, 40 | ctx.input_tensors + ctx.input_params, 41 | output_grads, 42 | allow_unused=True, 43 | ) 44 | del ctx.input_tensors 45 | del ctx.input_params 46 | del output_tensors 47 | return (None, None) + input_grads 48 | -------------------------------------------------------------------------------- /utils/metrics.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Optional, List 3 | 4 | import torch 5 | import numpy as np 6 | 7 | EPS = 1e-10 8 | 9 | 10 | @dataclass 11 | class Metrics: 12 | p_opt: float 13 | p_exp: float 14 | h_mean: float 15 | pcost_dif: float 16 | pcost_dif_list: list = None 17 | p_exp_list:list = None 18 | abs_cost: list = None 19 | abs_exp: list = None 20 | 21 | def __repr__(self): 22 | return f"optimality: {self.p_opt:0.3f}, efficiency: {self.p_exp:0.3f}, h_mean: {self.h_mean:0.3f}, cost_diff: {self.pcost_dif:0.3f}" 23 | 24 | 25 | @dataclass 26 | class AstarOutput: 27 | """ 28 | Output structure of A* search planners 29 | """ 30 | histories: torch.tensor 31 | paths: torch.tensor 32 | intermediate_results: Optional[List[dict]] = None 33 | g: Optional[torch.tensor] = None 34 | 35 | 36 | def calc_metrics(na_outputs: AstarOutput, va_outputs: AstarOutput) -> Metrics: 37 | """ 38 | Calculate opt, exp, and hmean metrics for problem instances each with a single starting point 39 | 40 | Args: 41 | na_outputs (AstarOutput): outputs from Neural A* 42 | va_outputs (AstarOutput): outputs from vanilla A* 43 | 44 | Returns: 45 | Metrics: opt, exp, and hmean values 46 | """ 47 | pathlen_astar = va_outputs.paths.sum((1, 2, 3)).detach().cpu().numpy() 48 | pathlen_na = na_outputs.paths.sum((1, 2, 3)).detach().cpu().numpy() 49 | p_opt = (pathlen_astar == pathlen_na).mean() 50 | 51 | # pathcost_astar = (va_outputs.paths * va_outputs.g).sum((1, 2, 3)).detach().cpu().numpy() 52 | # pathcost_na =(na_outputs.paths * na_outputs.g).sum((1, 2, 3)).detach().cpu().numpy() 53 | pathcost_astar = torch.amax(va_outputs.paths * va_outputs.g, dim=(1, 2, 3)).detach().cpu().numpy() 54 | pathcost_na = torch.amax(na_outputs.paths * na_outputs.g, dim=(1, 2, 3)).detach().cpu().numpy() 55 | pcost_dif_list = pathcost_na / pathcost_astar 56 | pcost_dif = pcost_dif_list.mean() 57 | pcost_dif_list = pcost_dif_list.tolist() 58 | 59 | exp_astar = va_outputs.histories.sum((1, 2, 3)).detach().cpu().numpy() 60 | exp_na = na_outputs.histories.sum((1, 2, 3)).detach().cpu().numpy() 61 | # p_exp_list = np.maximum((exp_astar - exp_na) / exp_astar, 0.) 62 | p_exp_list = (exp_astar - exp_na) / exp_astar 63 | p_exp = p_exp_list.mean() 64 | h_mean = 2. / (1. / (p_opt + EPS) + 1. / (p_exp + EPS)) 65 | 66 | return Metrics(p_opt, 1- p_exp, h_mean, pcost_dif, pcost_dif_list, (1 - p_exp_list).tolist(), 67 | pathcost_na.tolist(), exp_na.tolist()) -------------------------------------------------------------------------------- /weights/cf.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cognitive-AI-Systems/TransPath/7c82b00fe2a1ce4641307e5d522759e229e11f40/weights/cf.pth -------------------------------------------------------------------------------- /weights/dem_64.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cognitive-AI-Systems/TransPath/7c82b00fe2a1ce4641307e5d522759e229e11f40/weights/dem_64.pth -------------------------------------------------------------------------------- /weights/focal.pth: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cognitive-AI-Systems/TransPath/7c82b00fe2a1ce4641307e5d522759e229e11f40/weights/focal.pth --------------------------------------------------------------------------------