├── src ├── __init__.py ├── loss_fns │ ├── __init__.py │ └── loss_functions.py ├── models │ ├── __init__.py │ └── VGGEncoders.py ├── datasets │ ├── __init__.py │ └── CoronalScanPairsDataset.py ├── utils │ ├── __init__.py │ └── misc.py ├── registration │ ├── __init__.py │ └── registration.py └── assets │ ├── val_subjects.txt │ └── test_subjects.txt ├── .gitignore ├── contrastive_scan_correlator.png ├── copy_to_temp.sh ├── LICENSE ├── README.md └── train_contrastive_networks.py /src/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/loss_fns/__init__.py: -------------------------------------------------------------------------------- 1 | from .loss_functions import NCELoss -------------------------------------------------------------------------------- /src/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .VGGEncoders import VGGEncoder, VGGEncoderPoolAll -------------------------------------------------------------------------------- /src/datasets/__init__.py: -------------------------------------------------------------------------------- 1 | from .CoronalScanPairsDataset import CoronalScanPairsDataset -------------------------------------------------------------------------------- /src/utils/__init__.py: -------------------------------------------------------------------------------- 1 | from .misc import load_checkpoint, save_checkpoint, get_batch_corrrelations,get_dataset_similarities -------------------------------------------------------------------------------- /src/registration/__init__.py: -------------------------------------------------------------------------------- 1 | from .registration import get_dense_correspondances,register_scans,get_salient_point_correspondances -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pt 2 | *.png 3 | *.jpg 4 | *.pkl 5 | *.pyc 6 | .vscode 7 | .ipynb_checkpoints 8 | images/dev 9 | model_weights/* 10 | -------------------------------------------------------------------------------- /contrastive_scan_correlator.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rwindsor1/biobank-self-supervised-alignment/HEAD/contrastive_scan_correlator.png -------------------------------------------------------------------------------- /copy_to_temp.sh: -------------------------------------------------------------------------------- 1 | mkdir -p $2 2 | echo 'Copying MRIs...' 3 | rsync -r --no-inc-recursive --info=progress2 $1'/mri-mid-corr-slices' $2 4 | echo 'Copying DXAs...' 5 | rsync -r --no-inc-recursive --info=progress2 $1'/dxas-processed' $2 6 | -------------------------------------------------------------------------------- /src/loss_fns/loss_functions.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn.functional as F 3 | 4 | class NCELoss(torch.nn.Module): 5 | def __init__(self, temperature=0.1)-> None: 6 | super().__init__() 7 | self.temperature = temperature 8 | 9 | def forward(self, batch_similarities): 10 | ax1_softmaxes = F.softmax(batch_similarities/self.temperature,dim=1) 11 | ax2_softmaxes = F.softmax(batch_similarities/self.temperature,dim=0) 12 | softmax_scores = torch.cat((-ax1_softmaxes.diag().log(),-ax2_softmaxes.diag().log())) 13 | loss = softmax_scores.mean() 14 | return loss -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Rhydian Windsor 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/models/VGGEncoders.py: -------------------------------------------------------------------------------- 1 | import sys, os, glob 2 | import torch 3 | import torch.nn as nn 4 | import torch.nn.functional as F 5 | 6 | class ConvSequence(nn.Module): 7 | def __init__(self, in_channels, out_channels, kernel_size, padding=0, stride=1): 8 | super(ConvSequence, self).__init__() 9 | self.layers = nn.Sequential(nn.Conv2d(in_channels, out_channels, kernel_size, padding=padding, stride=stride, bias=False), 10 | nn.BatchNorm2d(out_channels), 11 | nn.ReLU(), 12 | nn.Conv2d(out_channels, out_channels, kernel_size, padding=padding, stride=1, bias=False), 13 | nn.BatchNorm2d(out_channels), 14 | nn.ReLU()) 15 | 16 | def forward(self,x): 17 | return self.layers(x) 18 | 19 | class VGGEncoder(nn.Module): 20 | ''' Scan encoding model ''' 21 | def __init__(self, embedding_size=512, input_modes=1): 22 | super(VGGEncoder, self).__init__() 23 | self.embedding_size = embedding_size 24 | self.convs1 = ConvSequence(input_modes, 64, kernel_size=(3,3), padding=1) 25 | self.convs2 = ConvSequence(64, 128, kernel_size=(3,3), padding=1) 26 | self.convs3 = ConvSequence(128, 256, kernel_size=(3,3), padding=1) 27 | self.convs4 = ConvSequence(256, embedding_size, kernel_size=(3,3), padding=1) 28 | 29 | def forward(self, x): 30 | x1 = self.convs1(x) 31 | x2 = F.max_pool2d(x1,kernel_size=2) 32 | x2 = self.convs2(x2) 33 | x3 = F.max_pool2d(x2,kernel_size=2) 34 | x3 = self.convs3(x3) 35 | x4 = F.max_pool2d(x3, kernel_size=2) 36 | x4 = self.convs4(x4) 37 | return x4 38 | 39 | def forward_with_skips(self, x): 40 | x1 = self.convs1(x) 41 | x2 = F.max_pool2d(x1,kernel_size=2) 42 | x2 = self.convs2(x2) 43 | x3 = F.max_pool2d(x2,kernel_size=2) 44 | x3 = self.convs3(x3) 45 | x4 = F.max_pool2d(x3, kernel_size=2) 46 | x4 = self.convs4(x4) 47 | return x4, x3, x2, x1 48 | 49 | class VGGEncoderPoolAll(VGGEncoder): 50 | ''' Baseline model that encodes entire scan into a single vector instead of spatially. ''' 51 | def __init__(self, embedding_size=512, input_modes=1): 52 | super().__init__(embedding_size=embedding_size, input_modes=input_modes) 53 | # extra convolutions block after max pooling 54 | self.convs5 = ConvSequence(256, embedding_size, kernel_size=(1,1), padding=0) 55 | 56 | def forward(self, x): 57 | x1 = self.convs1(x) 58 | x2 = F.max_pool2d(x1,kernel_size=2) 59 | x2 = self.convs2(x2) 60 | x3 = F.max_pool2d(x2,kernel_size=2) 61 | x3 = self.convs3(x3) 62 | x4 = F.max_pool2d(x3, kernel_size=2) 63 | x4 = self.convs4(x4) 64 | x5 = F.max_pool2d(x4, kernel_size=(x4.shape[-2],x4.shape[-1])) 65 | x5 = self.convs5(x5) 66 | return x5 67 | 68 | 69 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Representation Learning for Whole Body Biobank Scans 2 | 3 | ![](contrastive_scan_correlator.png) 4 | 5 | See the [Project Page](https://www.robots.ox.ac.uk/~vgg/research/self-supervised-medical-imaging/) here. 6 | 7 | This repository contains code for the experiments detailed in 'Self-Supervised Multi-Modal Alignment For Whole Body Medical Imaging' (see arxiv [here](https://arxiv.org/abs/2107.06652)). 8 | This uses data from the UK Biobank ([register here](https://www.ukbiobank.ac.uk/enable-your-research/register)). For details on downloading and preprocessing the UK Biobank, please view [this repository](https://github.com/rwindsor1/UKBiobankDXAMRIPreprocessing). 9 | 10 | You are welcome to use this code either to reproduce the results of our experiments or for your own research. 11 | However, if you do, please cite the following: 12 | 13 | Windsor, R., Jamaludin, A., Kadir, T. ,Zisserman, A. "Self-Supervised Multi-Modal Alignment For Whole Body Medical Imaging" 14 | In: Proceedings of 24th International Conference on Medical Image Computing and Computer Assisted Intervention (MICCAI) 2021 15 | 16 | bibtex: 17 | ``` 18 | @inproceedings{Windsor21SelfSupervisedAlignment, 19 | author = {Rhydian Windsor and 20 | Amir Jamaludin and 21 | Timor Kadir and 22 | Andrew Zisserman}, 23 | title = {Self-Supervised Multi-Modal Alignment for Whole Body Medical Imaging}, 24 | booktitle = {MICCAI}, 25 | year = {2021} 26 | } 27 | ``` 28 | 29 | # To run this code 30 | 31 | To train the contrastive networks used in this experiment, run the following command 32 | 33 | `python train_contrastive_networks.py with BATCH_SIZE=20` 34 | 35 | For the dataset using in this experiment, we found it took approximately 100 epochs to converge. 36 | 37 | To view the results of the registration methods please see the notebook at `notebooks/demonstrate_registrations.ipynb`. 38 | 39 | 40 | ## To be added 41 | 42 | - [x] Dataset preprocessing code (see [this repo](https://github.com/rwindsor1/UKBiobankDXAMRIPreprocessing)) 43 | - [x] Contrastive networks training and evaluation code 44 | - [x] Correspondance-matching registration methods 45 | - [ ] Regressor Network training and evaluation code 46 | - [ ] Cross modal segmentation 47 | - [ ] Option to train with larger models + at higher spatial embedding resolutions 48 | 49 | 50 | 51 | ## Libraries Used 52 | - Code is tested using python 3.7 53 | - This deep learning aspect of this project is implemented in vanilla pytorch (v.1.7). 54 | - Experiment logging is done using [sacred](https://sacred.readthedocs.io/en/stable/quickstart.html)(v.0.8.2). 55 | - For visualisation of results [omniboard](https://github.com/vivekratnavel/omniboard) is recommended. 56 | # Changes from paper 57 | 58 | As we used raw DXA scans from the Biobank as opposed to those that can downloaded, the code requires some 59 | adaption to work with the publically available data. These changes are itemised below: 60 | 61 | - The size of DXA scans fed to the network is changed to (1000,300) as opposed to (800,300) used in the paper. This slightly increases batch GPU usage but ensures the entire body is shown in the image for more patients. 62 | 63 | - The size of MRI scans are changed from (501 ×224) to (700,224) for similar reasons. 64 | 65 | - We train with a batch size of 20 as this is found to slightly improve performance. 66 | 67 | - Instead of synthesising a coronal slice which follows the plane of the spine using SpineNet, we select the coronal 68 | slice from the scan using a heuristic described in https://github.com/rwindsor1/UKBiobankDXAMRIPreprocessing. 69 | 70 | - We add an additional augmentation using training which selects nearby slices to the mid-coronal slice instead of 71 | the mid-coronal slice itself. 72 | 73 | 74 | -------------------------------------------------------------------------------- /src/utils/misc.py: -------------------------------------------------------------------------------- 1 | from torch.serialization import load 2 | from functools import cmp_to_key 3 | from pickle import load 4 | from typing import OrderedDict 5 | import torch 6 | from torch import optim 7 | import numpy as np 8 | import torch.nn as nn 9 | from tqdm import tqdm 10 | import torch.nn.functional as F 11 | import glob 12 | import os 13 | import re 14 | from sacred import SETTINGS 15 | SETTINGS.CONFIG.READ_ONLY_CONFIG = False 16 | 17 | def optimiser_to(optim : torch.optim.Optimizer, 18 | device: torch.device): 19 | for param in optim.state.values(): 20 | # Not sure there are any global tensors in the state dict 21 | if isinstance(param, torch.Tensor): 22 | param.data = param.data.to(device) 23 | if param._grad is not None: 24 | param._grad.data = param._grad.data.to(device) 25 | elif isinstance(param, dict): 26 | for subparam in param.values(): 27 | if isinstance(subparam, torch.Tensor): 28 | subparam.data = subparam.data.to(device) 29 | if subparam._grad is not None: 30 | subparam._grad.data = subparam._grad.data.to(device) 31 | 32 | 33 | def load_checkpoint(dxa_model : nn.Module, 34 | mri_model: nn.Module, 35 | optimiser : torch.optim.Optimizer, 36 | load_from_path : str, 37 | use_cuda: bool, 38 | verbose : bool=True): 39 | 40 | if not os.path.isdir(load_from_path): 41 | os.mkdir(load_from_path) 42 | # load model weights 43 | if os.path.isdir(load_from_path): 44 | list_of_pt = glob.glob(load_from_path + '/*.pt') 45 | if len(list_of_pt): 46 | dxa_model_dict = dxa_model.state_dict() 47 | mri_model_dict = mri_model.state_dict() 48 | optim_state_dict = optimiser.state_dict() 49 | latest_pt = max(list_of_pt, key=os.path.getctime) 50 | print(latest_pt) 51 | checkpoint = torch.load(latest_pt, map_location=torch.device('cpu')) 52 | # alter model weight names if they have been saved from nn.DataParallel object 53 | for model_weights in ['dxa_model_weights','mri_model_weights']: 54 | checkpoint[model_weights] = OrderedDict((re.sub('^module\.','',k) 55 | if re.search('^module.',k) else k, v) 56 | for k,v in checkpoint[model_weights].items()) 57 | 58 | dxa_model_dict.update(checkpoint['dxa_model_weights']) 59 | mri_model_dict.update(checkpoint['mri_model_weights']) 60 | 61 | dxa_model.load_state_dict(dxa_model_dict) 62 | mri_model.load_state_dict(mri_model_dict) 63 | if 'optim_state' in checkpoint: 64 | optim_state_dict.update(checkpoint['optim_state']) 65 | optimiser.load_state_dict(optim_state_dict) 66 | val_stats = checkpoint['val_stats'] 67 | epochs = checkpoint['epochs'] 68 | if verbose: 69 | print(f"==> Resuming model trained for {epochs} epochs...") 70 | else: 71 | if verbose: 72 | print("==> Training Fresh Model ") 73 | val_stats = {'mean_rank':999999999999} 74 | epochs = 0 75 | if use_cuda: 76 | dxa_model.to('cuda:0') 77 | mri_model.to('cuda:0') 78 | optimiser_to(optimiser,'cuda:0') 79 | 80 | return dxa_model, mri_model, optimiser,val_stats, epochs 81 | 82 | 83 | def save_checkpoint(dxa_model : nn.Module, mri_model : nn.Module, 84 | optimiser : torch.optim.Optimizer, 85 | val_stats : dict, 86 | epochs : int, save_weights_path : str): 87 | if isinstance(dxa_model, nn.DataParallel): dxa_model = dxa_model.module 88 | if isinstance(mri_model, nn.DataParallel): mri_model = mri_model.module 89 | print(f'==> Saving Model Weights to {save_weights_path}') 90 | state = {'dxa_model_weights': dxa_model.state_dict(), 91 | 'mri_model_weights': mri_model.state_dict(), 92 | 'optim_state' : optimiser.state_dict(), 93 | 'val_stats' : val_stats, 94 | 'epochs' : epochs 95 | } 96 | if not os.path.isdir(save_weights_path): 97 | os.mkdir(save_weights_path) 98 | previous_checkpoints = glob.glob(save_weights_path + '/ckpt*.pt', recursive=True) 99 | torch.save(state, save_weights_path + '/ckpt' + str(epochs) + '.pt') 100 | for previous_checkpoint in previous_checkpoints: 101 | os.remove(previous_checkpoint) 102 | return 103 | 104 | 105 | def get_batch_corrrelations(scan_embeds_1, scan_embeds_2): 106 | ''' gets correlations between scan embeddings''' 107 | batch_size, channels, h, w = scan_embeds_2.shape 108 | 109 | scan_embeds_1 = F.normalize(scan_embeds_1,dim=1) 110 | scan_embeds_2 = F.normalize(scan_embeds_2,dim=1) 111 | correlation_maps = F.conv2d(scan_embeds_1, scan_embeds_2)/(h*w) 112 | return correlation_maps 113 | 114 | def get_dataset_similarities(scan_embeds_1, scan_embeds_2, batch_size=50): 115 | ''' Gets similarities for entire dataset. 116 | Splits job into batches to reduce GPU memory''' 117 | ds_size, channels, h, w = scan_embeds_2.shape 118 | ds_similarities = torch.zeros(ds_size, ds_size) 119 | 120 | for batch_1_start_idx in tqdm(range(0, ds_size, batch_size)): 121 | for batch_2_start_idx in range(0, ds_size, batch_size): 122 | 123 | batch_1_end_idx = batch_1_start_idx + batch_size 124 | batch_2_end_idx = batch_2_start_idx + batch_size 125 | if batch_2_end_idx >= ds_size: batch_2_end_idx = ds_size 126 | if batch_1_end_idx >= ds_size: batch_1_end_idx = ds_size 127 | 128 | correlations = get_batch_corrrelations(scan_embeds_1[batch_1_start_idx:batch_1_end_idx], 129 | scan_embeds_2[batch_2_start_idx:batch_2_end_idx]) 130 | similarities,_ = torch.max(correlations.flatten(start_dim=2),dim=-1) 131 | ds_similarities[batch_1_start_idx:batch_1_end_idx,batch_2_start_idx:batch_2_end_idx] = similarities 132 | return ds_similarities -------------------------------------------------------------------------------- /src/datasets/CoronalScanPairsDataset.py: -------------------------------------------------------------------------------- 1 | from genericpath import isfile 2 | import sys 3 | import os 4 | import glob 5 | from os.path import dirname,abspath,join, basename, isfile 6 | from numpy.lib.npyio import savez_compressed 7 | import torch 8 | import pickle 9 | import random 10 | import numpy as np 11 | import pandas as pd 12 | from torch.utils.data import Dataset 13 | import torchvision.transforms as t 14 | import torch.nn.functional as F 15 | import torchvision.transforms.functional as TF 16 | 17 | def get_seqs(scan_obj, seq_names): 18 | out_img = [] 19 | if isinstance(seq_names,str): seq_names = list(seq_names) 20 | for seq_name in seq_names: 21 | out_img.append(scan_obj[seq_name]) 22 | return torch.cat(out_img,dim=1) 23 | 24 | def resample_scans(scan_obj, resolution=2,transpose=False): 25 | scaling_factors = np.array(scan_obj['pixel_spacing'])/resolution 26 | for seq_name in scan_obj.keys(): 27 | if seq_name == 'pixel_spacing': scan_obj['pixel_spacing'] = [resolution]*2 28 | else: 29 | scan = torch.Tensor(scan_obj[seq_name])[None,None] 30 | scan_obj[seq_name] = F.interpolate(scan, 31 | scale_factor=list(scaling_factors), 32 | recompute_scale_factor=False, 33 | align_corners=False, 34 | mode='bicubic') 35 | if transpose: 36 | scan_obj[seq_name] = scan_obj[seq_name].permute(0,1,3,2) 37 | return scan_obj 38 | 39 | def pad_to_size(scan_img : torch.Tensor, output_shape : tuple): 40 | ''' Pads or crops image to a given size''' 41 | if (scan_img.shape[1] != output_shape[1]) or (scan_img[2] != output_shape[2]): 42 | diff = (output_shape[1] - scan_img.shape[1], output_shape[2] - scan_img.shape[2]) 43 | scan_img = F.pad(scan_img,[int(np.floor(diff[1]/2)),int(np.ceil(diff[1]/2)),int(np.floor(diff[0]/2)),int(np.ceil(diff[0]/2))]) 44 | return scan_img 45 | 46 | def normalise_channels(scan_img : torch.Tensor, eps : float = 1e-5): 47 | scan_min = scan_img.flatten(start_dim=-2).min(dim=-1)[0][:,None,None] 48 | scan_max = scan_img.flatten(start_dim=-2).max(dim=-1)[0][:,None,None] 49 | return (scan_img-scan_min)/(scan_max-scan_min + eps) 50 | 51 | class CoronalScanPairsDataset(Dataset): 52 | def __init__(self, 53 | root : str, 54 | set_type : str, 55 | augment : bool = False, 56 | mri_seqs : list = ['fat_scan','water_scan'], 57 | dxa_seqs : list = ['bone','tissue'], 58 | mri_dirname : str = 'mri-mid-corr-slices', 59 | dxa_dirname : str = 'dxas-processed', 60 | skip_failed_samples : bool = False, 61 | pad_scans = True): 62 | super().__init__() 63 | assert set_type in ['train', 'val', 'test', 'all'] 64 | self.set_type = set_type 65 | 66 | self.mri_root = join(root, mri_dirname) 67 | self.dxa_root = join(root, dxa_dirname) 68 | self.mri_seqs = mri_seqs 69 | self.dxa_seqs = dxa_seqs 70 | self.pad_scans = pad_scans 71 | 72 | patients = [] 73 | 74 | all_mris = os.listdir(self.mri_root) 75 | all_dxas = os.listdir(self.dxa_root) 76 | mri_subjects = [basename(x).split('_')[0] for x in all_mris] 77 | dxa_subjects = [basename(x).split('_')[0] for x in all_dxas] 78 | intersection_subjects = set(mri_subjects).intersection(dxa_subjects) 79 | # load in the dataset split file 80 | if set_type is not 'all': 81 | split_file = join(abspath(__file__+'/../../'), 82 | 'assets', 83 | f'{self.set_type}_subjects.txt') 84 | split_subjects = [x.replace('\n','') for x in open(split_file,'r')] 85 | intersection_subjects = intersection_subjects.intersection(split_subjects) 86 | intersection_subjects = sorted(list(intersection_subjects)) 87 | 88 | # now we have subjects, construct scan pairs 89 | dxa_df = pd.DataFrame(all_dxas,columns=['dxa_scan']) 90 | dxa_df['subject'] = dxa_df['dxa_scan'].apply(lambda x: x.split('_')[0]) 91 | mri_df = pd.DataFrame(all_mris,columns=['mri_scan']) 92 | mri_df['subject'] = mri_df['mri_scan'].apply(lambda x: x.split('_')[0]) 93 | all_subjects_df = dxa_df.merge(mri_df) 94 | all_subjects_df = all_subjects_df[all_subjects_df['subject'].isin(intersection_subjects)] 95 | self.pairs =list(map(list,zip(all_subjects_df['dxa_scan'].tolist(),all_subjects_df['mri_scan'].tolist()))) 96 | 97 | self.augment = augment 98 | self.skip_failed_samples = skip_failed_samples 99 | 100 | def __len__(self): 101 | return len(self.pairs) 102 | 103 | def __getitem__(self, idx): 104 | dxa_path, mri_filename = self.pairs[idx] 105 | dxa_fp = os.path.join(self.dxa_root,dxa_path,f'{dxa_path}.pkl') 106 | mri_fp = os.path.join(self.mri_root, mri_filename) 107 | 108 | if not(os.path.isfile(dxa_fp)): 109 | if self.skip_failed_samples: 110 | return self.__getitem__(np.random.randint(self.__len__())) 111 | else: 112 | raise FileNotFoundError(f'Could not find file at {dxa_fp}') 113 | 114 | if not(os.path.isfile(mri_fp)): 115 | if self.skip_failed_samples: 116 | return self.__getitem__(np.random.randint(self.__len__())) 117 | else: 118 | raise FileNotFoundError(f'Could not find file at {mri_fp}') 119 | 120 | with open(dxa_fp,'rb') as f: 121 | dxa_scan = pickle.load(f) 122 | with open(mri_fp,'rb') as f: 123 | mri_scan =pickle.load(f) 124 | 125 | if self.augment: 126 | mri_mid_slice = np.random.randint(mri_scan['fat_scan'].shape[1]) 127 | else: 128 | mri_mid_slice = mri_scan['fat_scan'].shape[1]//2 129 | 130 | # get mid sag from mri scan 131 | for sequence in ['fat_scan','water_scan']: 132 | mri_scan[sequence] = mri_scan[sequence][:,mri_mid_slice] 133 | mri_scan['pixel_spacing'] = np.array(mri_scan['pixel_spacing'])[[0,2]] 134 | 135 | # resample both scans to 2x2 mm 136 | mri_scan=resample_scans(mri_scan,transpose=True) 137 | dxa_scan=resample_scans(dxa_scan,transpose=False) 138 | 139 | 140 | # parameters for augmentation 141 | ROT_LOW = -10 142 | ROT_HIGH = 10 143 | TRANS_LOW = -4 144 | TRANS_HIGH = 5 145 | # no zoom 146 | CONTRAST_VAR = 0.2 147 | BRIGHTNESS_VAR = 0.2 148 | 149 | if self.augment: 150 | dxa_rot = np.random.random()*(ROT_HIGH-ROT_LOW) + ROT_LOW 151 | mri_rot = np.random.random()*(ROT_HIGH-ROT_LOW) + ROT_LOW 152 | dxa_delta_x = np.random.randint(TRANS_LOW,TRANS_HIGH) 153 | dxa_delta_y = np.random.randint(TRANS_LOW,TRANS_HIGH) 154 | mri_delta_x = np.random.randint(TRANS_LOW,TRANS_HIGH) 155 | mri_delta_y = np.random.randint(TRANS_LOW,TRANS_HIGH) 156 | dxa_brightness = 1 + 2*BRIGHTNESS_VAR*(np.random.random()-0.5) 157 | mri_brightness = 1 + 2*BRIGHTNESS_VAR*(np.random.random()-0.5) 158 | dxa_gamma = random.choice([1,1,0.5,1.5]) 159 | mri_gamma = random.choice([1,1,0.5,1.5]) 160 | 161 | else: 162 | mri_rot = 0; dxa_rot = 0; 163 | dxa_delta_x = 0; dxa_delta_y = 0; 164 | mri_delta_x = 0; mri_delta_y = 0; 165 | mri_brightness=1; dxa_brightness=1; 166 | mri_gamma=1; dxa_gamma=1 167 | 168 | 169 | mri_img = get_seqs(mri_scan, self.mri_seqs) 170 | dxa_img = get_seqs(dxa_scan, self.dxa_seqs) 171 | 172 | # augment 173 | dxa_img = TF.affine(dxa_img, dxa_rot,(dxa_delta_x,dxa_delta_y),1,(0,0))[0] 174 | mri_img = TF.affine(mri_img, mri_rot,(mri_delta_x,mri_delta_y),1,(0,0))[0] 175 | 176 | # normalise scans 177 | dxa_img = normalise_channels(dxa_img) 178 | mri_img = normalise_channels(mri_img) 179 | 180 | # adjust brightness/contrast 181 | mri_img = TF.adjust_gamma(TF.adjust_brightness(mri_img, mri_brightness), mri_gamma, gain=1) 182 | dxa_img = TF.adjust_gamma(TF.adjust_brightness(dxa_img, dxa_brightness), dxa_gamma, gain=1) 183 | 184 | output_dxa_shape = (2,1000,300) 185 | output_mri_shape = (2,700,224) 186 | 187 | # crop to correct size 188 | if self.pad_scans: 189 | mri_img = pad_to_size(mri_img, output_mri_shape) 190 | dxa_img = pad_to_size(dxa_img, output_dxa_shape) 191 | 192 | 193 | 194 | return_dict = {'dxa_img': dxa_img, 'mri_img': mri_img, 195 | 'mri_filename': mri_fp, 'dxa_filename': dxa_fp} 196 | 197 | return return_dict 198 | -------------------------------------------------------------------------------- /src/registration/registration.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Function definitions to perform automatic scan correlation using our method 3 | Rhydian Windsor 18/02/20 4 | ''' 5 | 6 | import sys, os, glob 7 | import torch 8 | import numpy as np 9 | import cv2 10 | import torch.nn.functional as F 11 | import math 12 | import torchvision.transforms.functional as TF 13 | sys.path.append('/users/rhydian/self-supervised-project') 14 | from gen_utils import * 15 | from torchgeometry.core import get_rotation_matrix2d 16 | 17 | def get_4d_corr(se1, se2): 18 | # get 4d correlation between spatial feature maps 19 | b1, c1, h1, w1 = se1.size() 20 | b2, c2, h2, w2 = se2.size() 21 | corr = torch.bmm(se1.permute(0,2,3,1).view(b1,h1*w1,c1),se2.view(b2, c2, h2*w2)) 22 | corr = corr.view(b1, h1, w1, h2, w2) 23 | return corr 24 | 25 | def fit_RANSAC(src_pts, tgt_pts, tol=5): 26 | src_pts = np.array(src_pts)[:,np.newaxis] 27 | tgt_pts = np.array(tgt_pts)[:,np.newaxis] 28 | M, mask = cv2.findHomography(src_pts, tgt_pts, cv2.RANSAC,tol) 29 | src_inliers = np.array([src_point[0] for src_idx, src_point in enumerate(src_pts) if mask.ravel()[src_idx]]) 30 | tgt_inliers = np.array([tgt_point[0] for tgt_idx, tgt_point in enumerate(tgt_pts) if mask.ravel()[tgt_idx]]) 31 | return src_inliers, tgt_inliers,M 32 | 33 | def resample_point(src_img, tgt_img, point): 34 | # resample points from one image frame to another 35 | hs, ws = src_img.size() 36 | ht, wt = tgt_img.size() 37 | resampled_point = [None, None] 38 | resampled_point[0] = np.round(point[0]*ht/hs).astype(np.int) 39 | resampled_point[1] = np.round(point[1]*wt/ws).astype(np.int) 40 | return resampled_point 41 | 42 | def correlate_point(corr, pt=(30,30),tgt='a', softmax=True): 43 | # get correlation map in other image correlating to a point `pt` 44 | assert tgt in ['a','b'] 45 | if tgt == 'a': 46 | corr_point_map = corr[:,pt[0],pt[1],:,:] 47 | else: 48 | corr_point_map = corr[:,:,:,pt[0],pt[1]] 49 | temperature = 0.1 50 | if softmax: 51 | b, h, w = corr_point_map.size() 52 | corr_point_map = F.softmax(corr_point_map.view(b,h*w)/temperature,dim=1).view(b,h,w) 53 | 54 | return corr_point_map 55 | 56 | def transform_points(src_pts, M): 57 | tgt_pts = [] 58 | for src_pt in src_pts: 59 | tgt_pt = (M @ np.concatenate([src_pt,[1]]))[:2] 60 | tgt_pts.append(tgt_pt) 61 | return np.array(tgt_pts) 62 | 63 | def register_scans(scan1,scan2, t, angle): 64 | scan1 = F.pad(scan1, (int(t[1]),0,int(t[0]),0)) 65 | hdiff = scan2.shape[-2] - scan1.shape[-2] 66 | wdiff = scan2.shape[-1] - scan1.shape[-1] 67 | scan1 = F.pad(scan1, (0,wdiff,0,hdiff)) 68 | scan1 = TF.rotate(scan1, angle,center=(0,0)) 69 | return scan1 70 | 71 | 72 | def find_best_rigid(src_pts, tgt_pts, allow_scaling=True): 73 | src_centroid = np.mean(src_pts,axis=0) 74 | tgt_centroid = np.mean(tgt_pts,axis=0) 75 | covar = (src_pts - src_centroid).T@(tgt_pts - tgt_centroid) 76 | U,S,V = np.linalg.svd(covar) 77 | R = V@U.T 78 | pred_exp_norm = np.linalg.norm((R@(src_pts-src_centroid).T).T,axis=1).mean() 79 | tgt_norm = np.linalg.norm(tgt_pts-tgt_centroid,axis=1).mean() 80 | if allow_scaling: 81 | scaling = tgt_norm/pred_exp_norm 82 | else: 83 | scaling = 1 84 | angle = math.atan2(R[1,0], R[0,0]) 85 | t = tgt_centroid - scaling*R@src_centroid 86 | M = np.concatenate([scaling*R,t[:,None]],axis=1) 87 | M = np.concatenate([M,np.zeros(3)[None]]) 88 | M[-1,-1] = 1 89 | angle = angle*180/np.pi 90 | 91 | return M, scaling, angle, t 92 | 93 | 94 | def find_matching_error(src_pts, tgt_pts, M): 95 | est_tgt_pts = [] 96 | for src_pt in src_pts: 97 | est_tgt_pt = (M @ np.concatenate([src_pt,[1]]))[:2].tolist() 98 | est_tgt_pts.append(est_tgt_pt) 99 | 100 | errors = np.linalg.norm(np.array(est_tgt_pt) - tgt_pts, axis=-1) 101 | avg_error = np.mean(errors) 102 | return avg_error 103 | 104 | 105 | def get_most_discriminative_points(points, corr_map, src_img, tgt_img, num_points=80, use_cyclic=True): 106 | scored_points = [] 107 | for idx, point in enumerate(points): 108 | ses_point = resample_point(src_img[0],corr_map[:,:,0,0], point) 109 | # get correlation map 110 | corr_point_map = correlate_point(corr_map[None], ses_point, tgt='a', softmax=True) 111 | # find argmax of correlation map 112 | tgt_ses_pt = [int(corr_point_map[0].argmax() // corr_point_map.shape[2]), int(corr_point_map[0].argmax() % corr_point_map.shape[2])] 113 | if use_cyclic: 114 | other_corr_point_map = correlate_point(corr_map[None], tgt_ses_pt, tgt='b', softmax=True) 115 | cyclic_score = other_corr_point_map[0,ses_point[0], ses_point[1]] 116 | score = corr_point_map.max()*cyclic_score 117 | else: 118 | # get correlation at this point 119 | score = corr_point_map.max() 120 | # resample argmax correlation point 121 | corres_point = resample_point(corr_map[0,0,:,:], tgt_img[0], tgt_ses_pt) 122 | # save to array 123 | scored_points.append([point, corres_point, corr_point_map.max(), ses_point]) 124 | 125 | # sort and choose most discriminative point 126 | scored_points.sort(key=lambda x: -x[2]) 127 | return [x[0] for x in scored_points[:num_points]],[x[1] for x in scored_points[:num_points]], [x[2] for x in scored_points[:num_points]] 128 | 129 | def get_dense_correspondances(img1,img2,enc_model1,enc_model2,use_cuda, upsample_factor,rotation_range=(-2,2),tries=1): 130 | enc_model1.eval();enc_model2.eval() 131 | img1=img1[None];img2=img2[None] 132 | if use_cuda: 133 | img1 = img1.cuda() 134 | img2 = img2.cuda() 135 | else: 136 | img1=img1.cpu();img2=img2.cpu() 137 | enc_model1 = enc_model1.module.cpu() 138 | enc_model2 = enc_model2.module.cpu() 139 | 140 | with torch.no_grad(): 141 | ses1 = enc_model1(img1) 142 | ses2 = enc_model2(img2) 143 | 144 | if upsample_factor != 1: 145 | ses1 = F.interpolate(ses1, scale_factor=upsample_factor, mode='bicubic',align_corners=False) 146 | ses2 = F.interpolate(ses2, scale_factor=upsample_factor, mode='bicubic',align_corners=False) 147 | angles=[] 148 | image_indexes=[] 149 | ses1 = F.normalize(ses1, dim=1) 150 | ses2 = F.normalize(ses2, dim=1) 151 | best_score = -9999 152 | best_angle = 0 153 | best_idx = None 154 | for angle in np.linspace(rotation_range[0],rotation_range[1],tries): 155 | if ses1.shape[-1] > ses2.shape[-1]: 156 | correlate_response = F.conv2d(ses1,TF.rotate(ses2,angle,center=(0,0))) 157 | max_response, flat_idx = F.max_pool2d_with_indices(correlate_response,(correlate_response.shape[-2:])) 158 | ses_index = [flat_idx.item()//correlate_response.shape[-1],flat_idx.item()%correlate_response.shape[-1]] 159 | else: 160 | correlate_response = F.conv2d(ses2,TF.rotate(ses1,angle,center=(0,0))) 161 | max_response, flat_idx = F.max_pool2d_with_indices(correlate_response,(correlate_response.shape[-2:])) 162 | ses_index = [flat_idx.item()//correlate_response.shape[-1],flat_idx.item()%correlate_response.shape[-1]] 163 | img_idx = resample_point(ses2[0,0],img2[0,0],ses_index) 164 | if max_response > best_score: 165 | best_score = max_response 166 | best_angle = angle 167 | best_idx = img_idx 168 | 169 | radians = -best_angle*3.1419/180 170 | M = [[np.cos(radians),np.sin(radians),best_idx[0]], 171 | [-np.sin(radians),np.cos(radians),best_idx[1]], 172 | [0,0,1]] 173 | M = np.array(M) 174 | return M, best_idx, best_angle 175 | 176 | def get_salient_point_correspondances(img1,img2,model1,model2,threshold,use_ransac,ransac_tolerance,use_cuda): 177 | if use_cuda: 178 | img1 = img1.cuda()[None] 179 | img2 = img2.cuda()[None] 180 | else: 181 | img1=img1.cpu();img2=img2.cpu() 182 | model1 = model1.module.cpu() 183 | model2 = model2.module.cpu() 184 | with torch.no_grad(): 185 | ses1 = F.normalize(model1(img1),dim=1) 186 | ses2 = F.normalize(model2(img2),dim=1) 187 | b1, c1, w1, h1 = ses1.size() 188 | b2, c2, w2, h2 = ses2.size() 189 | corr4d = torch.bmm(ses1.view(b1,c1,w1*h1).permute(0,2,1),ses2.view(b2,c2,w2*h2)).view(b1, w1, h1, w2, h2) 190 | corr4d_points = corr4d.view(w1*h1,w2*h2) 191 | scores, matches = torch.topk(corr4d_points,2) 192 | good = [] 193 | for idx in range(len(matches)): 194 | if threshold*scores[idx,0] > scores[idx,1]: 195 | if resample_point(ses2[0,0], img2[0,0],[matches[idx,0].item()// h2, matches[idx,0].item() % h2]) not in [x[1] for x in good]: 196 | good.append([resample_point(ses1[0,0], img1[0,0],[idx // h1, idx % h1]), 197 | resample_point(ses2[0,0], img2[0,0],[matches[idx,0].item()// h2, matches[idx,0].item() % h2])]) 198 | good = np.array(good) 199 | if use_ransac: 200 | M, mask = cv2.findHomography(good[:,0],good[:,1],cv2.RANSAC, ransac_tolerance) 201 | ransac_good = good[[idx for idx, val in enumerate(mask) if val==1]] 202 | M, scaling, angle, t = find_best_rigid(ransac_good[:,0],ransac_good[:,1],allow_scaling=False) 203 | return ransac_good, np.linalg.inv(M), angle, t 204 | 205 | def RefinementNetwork(dxa_img, mri_img, dxa_model, mri_model,threshold, ransac_tolerance, use_cuda, refinement_model): 206 | ransac_good, M, coarse_angle, coarse_t = get_salient_point_correspondances(dxa_img, mri_img,dxa_model,mri_model,threshold,True,ransac_tolerance,use_cuda) 207 | refinement_model.eval() 208 | # get matrix to warp DXA onto MRI 209 | new_M=np.linalg.inv(M); new_M[0,1],new_M[1,0]=new_M[1,0],new_M[0,1]; new_M[1,2],new_M[0,2]=new_M[0,2],new_M[1,2] 210 | warped_dxa = cv2.warpPerspective(np.array(255*dxa_img[0].permute(1,2,0)).astype('uint8'), 211 | new_M,(mri_img.shape[-1], mri_img.shape[-2])) 212 | warped_dxa = torch.Tensor(warped_dxa).permute(2,0,1).float()[None]/255 213 | 214 | refinement_input = torch.cat([warped_dxa,mri_img],dim=1) 215 | if use_cuda: 216 | refinement_input = refinement_input.cuda() 217 | with torch.no_grad(): 218 | angle, t_x, t_y = refinement_model(refinement_input)[0].tolist() 219 | warped_dxa2 = TF.affine(warped_dxa, -angle, [-t_x,-t_y],1,[0,0]) 220 | 221 | R=get_rotation_matrix2d(torch.Tensor([mri_img.shape[-2]/2, mri_img.shape[-1]/2])[None],torch.Tensor([angle]),torch.Tensor([1]))[0] 222 | R[0,2] += t_y 223 | R[1,2] += t_x 224 | # get homography matrix from these predictions 225 | # plt.subplot(121) 226 | # plt.imshow(red(warped_dxa[0,0])+grayscale(mri_img[0,0])) 227 | # for pt in src_pts: 228 | # plt.scatter(pt[1],pt[0]) 229 | # plt.subplot(122) 230 | # plt.imshow(red(warped_dxa[0,0])+grayscale(mri_img[0,0])) 231 | # for pt in src_pts: 232 | # pt = R@np.array(pt + [1]) 233 | # plt.scatter(pt[1],pt[0]) 234 | # plt.subplot(122) 235 | #plt.imshow(red(warped_dxa2[0,0])+grayscale(mri_img[0,0])) 236 | # plt.savefig('test.png') 237 | # os.system('imgcat test.png') 238 | # plt.close('all') 239 | #import pdb; pdb.set_trace() 240 | return M,R, angle, t_x, t_y, coarse_angle, coarse_t[0], coarse_t[1] 241 | 242 | 243 | 244 | 245 | #cv2.warpPerspective(np.array(255*sample['dxa_img'][0]).astype('uint8'),new_M, 246 | # (sample['mri_img'].shape[-1], sample['mri_img'].shape[-2])) 247 | 248 | -------------------------------------------------------------------------------- /train_contrastive_networks.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Train self-supervised embedding correlator 3 | Rhydian Windsor 07/02/20 4 | ''' 5 | 6 | import glob 7 | import os 8 | import pickle 9 | 10 | from tqdm.utils import SimpleTextIOWrapper 11 | from src.loss_fns import NCELoss 12 | 13 | from torch import optim 14 | from src.utils.misc import load_checkpoint, optimiser_to 15 | 16 | import matplotlib.pyplot as plt 17 | import numpy as np 18 | import torch 19 | import torch.nn as nn 20 | import torch.nn.functional as F 21 | import torchvision.transforms.functional as TF 22 | # from gen_utils import balanced_l1w_loss, grayscale, red 23 | # from loss_functions import SSECLoss 24 | 25 | from sacred import SETTINGS, Experiment 26 | from sacred.observers import MongoObserver 27 | from sklearn.metrics import auc 28 | from torch.optim import Adam 29 | from torch.utils.data import DataLoader 30 | from torchvision.utils import make_grid, save_image 31 | from tqdm import tqdm 32 | 33 | from src.datasets import CoronalScanPairsDataset 34 | from src.models import VGGEncoder, VGGEncoderPoolAll 35 | from src.utils import load_checkpoint, save_checkpoint, get_batch_corrrelations, get_dataset_similarities 36 | 37 | SETTINGS['CAPTURE_MODE'] = 'sys' 38 | 39 | ex = Experiment('TrainScanEncoders') 40 | ex.captured_out_filter = lambda captured_output: "output capturing turned off." 41 | ex.observers.append(MongoObserver(url='login1.triton.cluster:27017')) 42 | 43 | @ex.config 44 | def expt_config(): 45 | ADAM_BETAS = (0.9,0.999) 46 | NUM_WORKERS = 20 47 | TRAIN_NUM_WORKERS = NUM_WORKERS 48 | VAL_NUM_WORKERS = NUM_WORKERS 49 | TEST_NUM_WORKERS = NUM_WORKERS 50 | # Scan Encoder Details 51 | ENCODER_TYPE = 'VGGEncoder' # 'VGGEncoder' (ours), 'VGGEncoderPoolAll' (baseline) 52 | EMBEDDING_SIZE = 128 53 | MARGIN=0.1 54 | SOFTMAX_TEMP = 0.005 55 | # Use four modes of scans 56 | MRI_SEQS=['fat_scan','water_scan'] # 0 = Both, 1 = Bone, 2=Tissue 57 | DXA_SEQS=['bone','tissue'] # 0 = Both, 1 = Fat, 2=Water 58 | BATCH_SIZE=10 59 | TRAIN_BATCH_SIZE = BATCH_SIZE 60 | VAL_BATCH_SIZE = BATCH_SIZE 61 | TEST_BATCH_SIZE = BATCH_SIZE 62 | USE_CUDA=True 63 | TRAINING_ITERATIONS = 20000/BATCH_SIZE 64 | MARGIN = 0.1 65 | VALIDATION_ITERATIONS = 100 66 | TRAINING_AUGMENTATION = True 67 | ALLOW_ROTATIONS=False 68 | POOL_SPATIAL_MAPS=False # for baseline 69 | NOTE='' 70 | LR=0.00001 71 | COPY_TO_TEMP=False 72 | BOTH_MODELS_WEIGHTS_PATH = './model_weights/SSECEncoders' + NOTE 73 | LOAD_FROM_PATH = BOTH_MODELS_WEIGHTS_PATH 74 | SAVE_IMAGES_PATH = 'images/contrastive_examples' # the path to save responses of the network from the save_examples command 75 | SAVE_ROC_PATH = 'images/roc_curve.png' 76 | DATASET_ROOT = '/work/rhydian/UKBB_Downloads' 77 | TMP_DIR = '/tmp/rhydian/UKBB_Downloads' 78 | 79 | @ex.capture 80 | def get_dataloaders(TRAIN_BATCH_SIZE, VAL_BATCH_SIZE, TEST_BATCH_SIZE, 81 | TRAIN_NUM_WORKERS, VAL_NUM_WORKERS, TEST_NUM_WORKERS, 82 | DATASET_ROOT, COPY_TO_TEMP, TMP_DIR, 83 | TRAINING_AUGMENTATION, MRI_SEQS, DXA_SEQS): 84 | if COPY_TO_TEMP: DATASET_ROOT=TMP_DIR 85 | print(DATASET_ROOT) 86 | train_ds = CoronalScanPairsDataset(set_type='train', root=DATASET_ROOT, mri_seqs=MRI_SEQS, dxa_seqs=DXA_SEQS, augment=TRAINING_AUGMENTATION) 87 | val_ds = CoronalScanPairsDataset(set_type='val' , root=DATASET_ROOT, mri_seqs=MRI_SEQS, dxa_seqs=DXA_SEQS, augment=False) 88 | test_ds = CoronalScanPairsDataset(set_type='test' , root=DATASET_ROOT, mri_seqs=MRI_SEQS, dxa_seqs=DXA_SEQS, augment=False) 89 | 90 | train_dl = DataLoader(train_ds, batch_size=TRAIN_BATCH_SIZE, num_workers=TRAIN_NUM_WORKERS, shuffle=True, drop_last=True) 91 | val_dl = DataLoader(val_ds, batch_size=VAL_BATCH_SIZE, num_workers=VAL_NUM_WORKERS, shuffle=False, drop_last=True) 92 | test_dl = DataLoader(test_ds, batch_size=TEST_BATCH_SIZE, num_workers=TEST_NUM_WORKERS, shuffle=False, drop_last=True) 93 | return train_dl, val_dl, test_dl 94 | 95 | 96 | 97 | @ex.capture 98 | def load_model_and_optimisers(ENCODER_TYPE, EMBEDDING_SIZE, 99 | LOAD_FROM_PATH, USE_CUDA, 100 | MRI_SEQS, DXA_SEQS, LR, ADAM_BETAS): 101 | 102 | mri_model = eval(ENCODER_TYPE)(input_modes=len(DXA_SEQS), embedding_size=EMBEDDING_SIZE) 103 | dxa_model = eval(ENCODER_TYPE)(input_modes=len(MRI_SEQS), embedding_size=EMBEDDING_SIZE) 104 | optimiser = Adam(list(dxa_model.parameters()) + list(mri_model.parameters()), lr=LR, betas=ADAM_BETAS) 105 | print(f'Trying to load model from {LOAD_FROM_PATH}') 106 | dxa_model, mri_model, optimiser,val_stats,epochs = load_checkpoint(dxa_model, mri_model, optimiser, 107 | LOAD_FROM_PATH, USE_CUDA) 108 | dxa_model = nn.DataParallel(dxa_model) 109 | mri_model = nn.DataParallel(mri_model) 110 | return dxa_model, mri_model, optimiser, val_stats, epochs 111 | 112 | @ex.capture 113 | def validate(dxa_model, mri_model, dl, USE_CUDA, return_similarities=False): 114 | dxa_model.eval() 115 | mri_model.eval() 116 | all_mri_ses = [] 117 | all_dxa_ses = [] 118 | pbar = tqdm(dl) 119 | # begin by encoding all scans 120 | print('Encoding scans') 121 | for idx, sample in enumerate(pbar): 122 | mri_img = sample['mri_img'] 123 | dxa_img = sample['dxa_img'] 124 | if USE_CUDA: 125 | mri_img = mri_img.cuda() 126 | dxa_img = dxa_img.cuda() 127 | 128 | with torch.no_grad(): 129 | dxa_ses = dxa_model(dxa_img).cpu() 130 | mri_ses = mri_model(mri_img).cpu() 131 | all_mri_ses.append(mri_ses) 132 | all_dxa_ses.append(dxa_ses) 133 | 134 | all_mri_ses = torch.cat(all_mri_ses) 135 | num_scans,_,_,_ = all_mri_ses.size() 136 | all_dxa_ses = torch.cat(all_dxa_ses) 137 | 138 | # now correlate encodings 139 | mri_b, mri_c, mri_h, mri_w = all_mri_ses.size() 140 | if USE_CUDA: 141 | all_mri_ses = all_mri_ses.cuda() 142 | all_dxa_ses = all_dxa_ses.cuda() 143 | 144 | print('Calculating encoding similarities + statistics') 145 | similarities = get_dataset_similarities(all_dxa_ses, all_mri_ses) 146 | # corrs = (F.conv2d(all_dxa_ses, all_mri_ses)/(mri_h*mri_w)).view(num_scans,num_scans,-1) 147 | rank_stats = get_rank_statistics(similarities) 148 | if not return_similarities: 149 | return rank_stats 150 | else: 151 | return rank_stats, similarities 152 | 153 | 154 | def get_rank_statistics(similarities_matrix): 155 | sorted_similarities_values, sorted_similarities_idxs = similarities_matrix.sort(dim=1,descending=True) 156 | ranks = [] 157 | for idx, row in enumerate(tqdm(sorted_similarities_idxs)): 158 | rank = torch.where(row==idx)[0][0] 159 | ranks.append(rank.cpu()) 160 | ranks = np.array(ranks) 161 | mean_rank = np.mean(ranks) 162 | median_rank = np.median(ranks) 163 | top_10 = np.sum(ranks<10) / len(ranks) 164 | top_5 = np.sum(ranks<5) / len(ranks) 165 | top_1 = np.sum(ranks<1) / len(ranks) 166 | 167 | ranks_stats = {'mean_rank': mean_rank, 'median_rank': median_rank, 168 | 'top_10': top_10, 'top_5': top_5, 'top_1':top_1} 169 | 170 | return ranks_stats 171 | 172 | @ex.capture 173 | def run_epoch(dxa_model, mri_model, dl, optimiser, SOFTMAX_TEMP, USE_CUDA, train=False): 174 | if train: dxa_model.train(); mri_model.train() 175 | else: dxa_model.eval(); mri_model.eval() 176 | pbar = tqdm(dl) 177 | criterion = NCELoss(SOFTMAX_TEMP) 178 | epoch_losses = torch.Tensor() 179 | epoch_correct = torch.Tensor() 180 | epoch_matching_similarities = torch.Tensor() 181 | epoch_non_matching_similarities = torch.Tensor() 182 | for idx, sample in enumerate(pbar): 183 | with torch.set_grad_enabled(train): 184 | if train: optimiser.zero_grad() 185 | mri_img = sample['mri_img'] 186 | dxa_img = sample['dxa_img'] 187 | if USE_CUDA: 188 | mri_img = mri_img.cuda() 189 | dxa_img = dxa_img.cuda() 190 | 191 | dxa_ses = dxa_model(dxa_img) 192 | mri_ses = mri_model(mri_img) 193 | 194 | # correlate and measure similarities 195 | correlations = get_batch_corrrelations(dxa_ses, mri_ses) 196 | similarities, _ = torch.max(correlations.flatten(start_dim=2),dim=-1) 197 | 198 | loss = criterion(similarities) 199 | if train: 200 | loss.backward() 201 | optimiser.step() 202 | 203 | correct = (similarities.argmax(dim=1) == torch.arange(similarities.shape[0]).cuda()).cpu() 204 | matching_similarities = similarities.diag().cpu() 205 | non_matching_similarities = similarities[~torch.eye(similarities.shape[0]).bool()].view(-1).cpu() 206 | # take sub sample of non matching similarities 207 | non_matching_similarities = non_matching_similarities[torch.randperm(non_matching_similarities.shape[0])][:100] 208 | 209 | epoch_matching_similarities = torch.cat([epoch_matching_similarities, matching_similarities]) 210 | epoch_non_matching_similarities = torch.cat([epoch_non_matching_similarities, non_matching_similarities]) 211 | epoch_correct = torch.cat([epoch_correct,correct.float()]) 212 | epoch_losses = torch.cat([epoch_losses, loss[None].cpu()]) 213 | pbar.set_description(f"Loss:{epoch_losses[-100:].mean():.4} Correct: {epoch_correct[-100:].mean():.4}") 214 | 215 | mean_loss = epoch_losses.mean() 216 | mean_correct = epoch_correct.mean() 217 | mean_matching_similarity = matching_similarities.mean() 218 | mean_non_matching_similarity = non_matching_similarities.mean() 219 | train_stats = {'mean_loss':mean_loss.item(),'mean_correct':mean_correct.item(), 220 | 'mean_matching':mean_matching_similarity.item(), 221 | 'mean_non_matching':mean_non_matching_similarity.item()} 222 | return train_stats 223 | 224 | 225 | 226 | @ex.capture 227 | def save_models(dxa_model, mri_model, val_stats, epochs, BOTH_MODELS_WEIGHTS_PATH): 228 | print(f'==> Saving Model Weights to {BOTH_MODELS_WEIGHTS_PATH}') 229 | state = {'dxa_model_weights': dxa_model.state_dict(), 230 | 'mri_model_weights': mri_model.state_dict(), 231 | 'val_stats' : val_stats, 232 | 'epochs' : epochs 233 | } 234 | if not os.path.isdir(BOTH_MODELS_WEIGHTS_PATH): 235 | os.mkdir(BOTH_MODELS_WEIGHTS_PATH) 236 | previous_checkpoints = glob.glob(BOTH_MODELS_WEIGHTS_PATH + '/ckpt*.pt', recursive=True) 237 | for previous_checkpoint in previous_checkpoints: 238 | os.remove(previous_checkpoint) 239 | torch.save(state, BOTH_MODELS_WEIGHTS_PATH + '/ckpt' + str(epochs) + '.pt') 240 | return 241 | 242 | 243 | @ex.capture 244 | def make_roc_curve(similarities, SAVE_ROC_PATH): 245 | roc_points = [] 246 | for threshold in np.linspace(0,1,1000): 247 | tpr = (similarities.diag()>threshold).sum()/(similarities.diag().shape[0]) 248 | fpr = (similarities[~np.eye(similarities.shape[0],dtype=bool)]>threshold).sum()/similarities[~np.eye(similarities.shape[0],dtype=bool)].shape[0] 249 | roc_points.append([fpr.item(), tpr.item()]) 250 | 251 | roc_points.sort(key=lambda x:x[0]) 252 | roc_points = np.array(roc_points) 253 | plt.figure(figsize=(10,10)) 254 | plt.plot(roc_points[:,0], roc_points[:,1], linewidth=2) 255 | plt.xlabel("False Positive Rate") 256 | plt.ylabel("True Positive Rate") 257 | epsilon=0.01 258 | plt.xlim([0-epsilon,1+epsilon]) 259 | plt.ylim([0-epsilon,1+epsilon]) 260 | plt.plot([0,1],[0,1],color='gray', linestyle='--') 261 | plt.savefig(SAVE_ROC_PATH) 262 | print(f"AUC: {auc(roc_points[:,0], roc_points[:,1])}") 263 | 264 | 265 | @ex.command(unobserved=True) 266 | def setup(_run): 267 | print('Loading Dataloaders...') 268 | train_dl, val_dl, test_dl = get_dataloaders() 269 | print('Loading Models and Optimizers...') 270 | dxa_model, mri_model, optimiser, val_stats, epochs = load_model_and_optimisers() 271 | return train_dl, val_dl, test_dl, dxa_model, mri_model, optimiser, val_stats, epochs 272 | 273 | @ex.command(unobserved=True) 274 | def test(NOTE,USE_CUDA): 275 | name=NOTE 276 | if USE_CUDA: os.system('nvidia-smi') 277 | train_dl, val_dl, test_dl, dxa_model, mri_model, optimiser, val_stats, epochs = setup() 278 | val_stats, similarities = validate(dxa_model, mri_model, test_dl, return_similarities=True) 279 | print(val_stats) 280 | make_roc_curve(similarities) 281 | with open(f'roc_curve_statistics/{name}.pkl','wb') as f: 282 | pickle.dump([val_stats,similarities],f) 283 | 284 | @ex.automain 285 | def main(COPY_TO_TEMP, DATASET_ROOT, TMP_DIR, USE_CUDA,BOTH_MODELS_WEIGHTS_PATH, _run): 286 | if COPY_TO_TEMP and not(os.path.isdir(TMP_DIR)): print('Copying to temp directory');os.system(f'bash copy_to_temp.sh {DATASET_ROOT} {TMP_DIR}') 287 | if USE_CUDA: os.system('nvidia-smi') 288 | train_dl, val_dl, test_dl, dxa_model, mri_model, optimiser, val_stats, epochs = setup() 289 | best_rank = val_stats['mean_rank'] 290 | val_stats = validate(dxa_model, mri_model, val_dl) 291 | 292 | while True: 293 | print(f'Epoch {epochs}:\nTraining Epoch...') 294 | train_stats = run_epoch(dxa_model,mri_model, train_dl,optimiser, train=True) 295 | for key in train_stats: 296 | _run.log_scalar(f'training.{key}', train_stats[key]) 297 | print(train_stats) 298 | print(f'Epoch {epochs}:Validating Epoch...') 299 | val_stats = validate(dxa_model, mri_model, val_dl) 300 | for key in val_stats: 301 | _run.log_scalar(f'validation.{key}', val_stats[key]) 302 | print(val_stats) 303 | if val_stats['mean_rank'] < best_rank: 304 | print('Saving Model') 305 | save_checkpoint(dxa_model,mri_model,optimiser,val_stats,epochs,BOTH_MODELS_WEIGHTS_PATH) 306 | best_rank = val_stats['mean_rank'] 307 | 308 | epochs += 1 309 | 310 | -------------------------------------------------------------------------------- /src/assets/val_subjects.txt: -------------------------------------------------------------------------------- 1 | 2574095 2 | 3520272 3 | 5763216 4 | 1895500 5 | 1517792 6 | 5500219 7 | 5007270 8 | 5366794 9 | 4407921 10 | 4085022 11 | 4914779 12 | 3462202 13 | 1828759 14 | 4261068 15 | 4757992 16 | 2587195 17 | 4109487 18 | 2311311 19 | 2546058 20 | 1552693 21 | 1875039 22 | 1455231 23 | 4102485 24 | 1728992 25 | 5344336 26 | 3056322 27 | 1715798 28 | 3405882 29 | 5862135 30 | 3275039 31 | 3455958 32 | 5140577 33 | 1060042 34 | 2413451 35 | 5509153 36 | 1528770 37 | 3680240 38 | 1072809 39 | 3558242 40 | 4866314 41 | 1922058 42 | 1770438 43 | 3095379 44 | 3444558 45 | 3679144 46 | 2625024 47 | 5432500 48 | 3672453 49 | 6011960 50 | 5687988 51 | 2005103 52 | 3337449 53 | 1521628 54 | 2376270 55 | 4707373 56 | 2839725 57 | 1144970 58 | 5144057 59 | 3032810 60 | 5749216 61 | 4351567 62 | 5152391 63 | 5241091 64 | 4479907 65 | 5324330 66 | 1844017 67 | 5578404 68 | 2138023 69 | 4154496 70 | 5855779 71 | 5799133 72 | 1045220 73 | 1907683 74 | 3095628 75 | 5361297 76 | 4651121 77 | 4364219 78 | 2938678 79 | 5509392 80 | 4668117 81 | 1340279 82 | 4395158 83 | 2899200 84 | 3138070 85 | 3539102 86 | 3871787 87 | 4435304 88 | 3885158 89 | 5991108 90 | 4352930 91 | 5979339 92 | 1065830 93 | 1432104 94 | 4662558 95 | 4267039 96 | 2280488 97 | 3862076 98 | 1280848 99 | 1523891 100 | 2259230 101 | 5609144 102 | 2798451 103 | 1202416 104 | 2830800 105 | 4546441 106 | 3460875 107 | 1068882 108 | 3400601 109 | 4751468 110 | 5490674 111 | 2790708 112 | 1013312 113 | 2747518 114 | 2700013 115 | 2432931 116 | 5391463 117 | 1460867 118 | 3941854 119 | 1005489 120 | 1770096 121 | 5235038 122 | 1289699 123 | 2406243 124 | 1490612 125 | 3023463 126 | 1616436 127 | 1623111 128 | 2916296 129 | 4744870 130 | 5810506 131 | 4059710 132 | 1633980 133 | 2482632 134 | 1492523 135 | 3798293 136 | 5640885 137 | 1335300 138 | 5410041 139 | 5106092 140 | 3005176 141 | 5629846 142 | 4991517 143 | 4517886 144 | 1806728 145 | 4422648 146 | 1911256 147 | 4755448 148 | 4772506 149 | 3369322 150 | 4388907 151 | 2821951 152 | 3965241 153 | 4205893 154 | 4179719 155 | 3265127 156 | 3881099 157 | 3341154 158 | 2205198 159 | 1314447 160 | 5073986 161 | 5848005 162 | 4036091 163 | 4601965 164 | 3013366 165 | 5904228 166 | 5759924 167 | 1206661 168 | 4472186 169 | 3933997 170 | 1787501 171 | 2261363 172 | 4934234 173 | 5805715 174 | 5634721 175 | 1920037 176 | 1715442 177 | 3110125 178 | 1611858 179 | 2956662 180 | 4242443 181 | 1688194 182 | 4255855 183 | 1750594 184 | 3580327 185 | 1636822 186 | 1886138 187 | 5742754 188 | 4446197 189 | 5045654 190 | 2226243 191 | 4680886 192 | 4780519 193 | 2207187 194 | 1942064 195 | 1966583 196 | 1574181 197 | 3511483 198 | 3667060 199 | 3932661 200 | 3234428 201 | 4005684 202 | 4674797 203 | 1730802 204 | 3818617 205 | 5173273 206 | 1129165 207 | 3441690 208 | 3708027 209 | 5417666 210 | 1326785 211 | 2864287 212 | 2400848 213 | 2321603 214 | 2419846 215 | 1502182 216 | 2549474 217 | 2906759 218 | 5874371 219 | 4481593 220 | 3105930 221 | 2384383 222 | 3223312 223 | 1313393 224 | 2728106 225 | 3179573 226 | 3464367 227 | 1087882 228 | 2639271 229 | 4052924 230 | 1771815 231 | 2018919 232 | 5051084 233 | 1072603 234 | 2506559 235 | 2633467 236 | 3087156 237 | 1260134 238 | 5484455 239 | 1917127 240 | 5479392 241 | 1868337 242 | 1514228 243 | 5776410 244 | 1781095 245 | 2054483 246 | 1355139 247 | 4811606 248 | 1418047 249 | 1227559 250 | 5133990 251 | 2998042 252 | 2554473 253 | 2846838 254 | 4321430 255 | 1562845 256 | 5866345 257 | 5830411 258 | 2780812 259 | 4838240 260 | 2369805 261 | 4351197 262 | 5898854 263 | 1778001 264 | 3159204 265 | 3953995 266 | 3485734 267 | 2622813 268 | 3924949 269 | 2205441 270 | 5244814 271 | 2763570 272 | 3037381 273 | 5705929 274 | 1284047 275 | 4555440 276 | 3855856 277 | 5087878 278 | 2069508 279 | 3782584 280 | 4055615 281 | 2049151 282 | 1324476 283 | 3205179 284 | 4191745 285 | 4538273 286 | 3242596 287 | 4253303 288 | 4482985 289 | 3369513 290 | 3528777 291 | 1760562 292 | 1180351 293 | 1684198 294 | 5357760 295 | 2330185 296 | 3446796 297 | 2100021 298 | 4130663 299 | 1144691 300 | 4667674 301 | 5784487 302 | 2892053 303 | 1079865 304 | 1655578 305 | 3028514 306 | 3065706 307 | 1098010 308 | 4194715 309 | 2955976 310 | 3974745 311 | 1270001 312 | 4603329 313 | 3672858 314 | 1359176 315 | 2781975 316 | 2939616 317 | 5790549 318 | 3161556 319 | 4277137 320 | 5757193 321 | 1726596 322 | 4071463 323 | 3207932 324 | 1648360 325 | 5796407 326 | 3985226 327 | 1722703 328 | 2566515 329 | 4786337 330 | 4215289 331 | 3325730 332 | 4872911 333 | 5891868 334 | 3459624 335 | 5308650 336 | 4611872 337 | 2124946 338 | 4564075 339 | 2322155 340 | 2053283 341 | 5970423 342 | 4222536 343 | 5685266 344 | 5739817 345 | 3593127 346 | 3749711 347 | 4577579 348 | 3919619 349 | 4898059 350 | 6008118 351 | 3999945 352 | 4176197 353 | 1779589 354 | 4971651 355 | 4443648 356 | 3596243 357 | 4306931 358 | 1257743 359 | 1349307 360 | 5011109 361 | 5414421 362 | 4626462 363 | 3396132 364 | 2386808 365 | 5734895 366 | 5202847 367 | 1096874 368 | 2789214 369 | 2565267 370 | 3287308 371 | 4958111 372 | 5569899 373 | 4540493 374 | 2817446 375 | 4187210 376 | 4071259 377 | 5616825 378 | 3876400 379 | 1202364 380 | 4004361 381 | 4767129 382 | 2572804 383 | 3459384 384 | 1692842 385 | 4737935 386 | 3480993 387 | 2112226 388 | 3424129 389 | 1510907 390 | 3015326 391 | 2285161 392 | 4125580 393 | 3644015 394 | 3221412 395 | 5241058 396 | 3731259 397 | 3502998 398 | 1673581 399 | 5641413 400 | 4980259 401 | 5951616 402 | 5216037 403 | 3510296 404 | 4670444 405 | 1138934 406 | 3593408 407 | 4827173 408 | 1343432 409 | 1484986 410 | 2724406 411 | 1171872 412 | 3011201 413 | 1193638 414 | 3008088 415 | 3791522 416 | 1408399 417 | 3229089 418 | 1960236 419 | 3124972 420 | 4954650 421 | 1722368 422 | 2344719 423 | 4872620 424 | 2542263 425 | 4387899 426 | 3108210 427 | 3131763 428 | 1647790 429 | 2326109 430 | 2802009 431 | 3240313 432 | 1133492 433 | 5834929 434 | 3483555 435 | 3586020 436 | 5247291 437 | 4407962 438 | 4882240 439 | 1835391 440 | 1542201 441 | 5801655 442 | 3451244 443 | 1678121 444 | 5491406 445 | 4398410 446 | 5500632 447 | 3722908 448 | 4771561 449 | 4734566 450 | 2670974 451 | 3863445 452 | 5448875 453 | 2961377 454 | 3383471 455 | 3625861 456 | 1966094 457 | 1557682 458 | 3181141 459 | 4124589 460 | 1567417 461 | 2608119 462 | 5885915 463 | 1694635 464 | 1633344 465 | 2500037 466 | 3153524 467 | 2429952 468 | 1822187 469 | 1486485 470 | 4549884 471 | 2443586 472 | 5678942 473 | 5722360 474 | 4858003 475 | 1796412 476 | 2954629 477 | 1492075 478 | 2767127 479 | 1299013 480 | 1746641 481 | 1442103 482 | 1258133 483 | 1093764 484 | 5554027 485 | 5431283 486 | 5641144 487 | 2317926 488 | 5638273 489 | 2872796 490 | 2142026 491 | 5303690 492 | 5119985 493 | 2827427 494 | 3378684 495 | 4231036 496 | 4432916 497 | 5885504 498 | 4865996 499 | 5877428 500 | 5540058 501 | 5743328 502 | 2508624 503 | 5326912 504 | 4820933 505 | 2568763 506 | 2759263 507 | 1676280 508 | 1084725 509 | 1077292 510 | 3991804 511 | 5062373 512 | 3738046 513 | 1045712 514 | 3240671 515 | 3733298 516 | 5205270 517 | 5136209 518 | 3055352 519 | 2178679 520 | 4727266 521 | 1237258 522 | 5896120 523 | 4634340 524 | 5294287 525 | 2496577 526 | 4531340 527 | 1893395 528 | 5686944 529 | 1048264 530 | 1133320 531 | 4406664 532 | 5796160 533 | 4970532 534 | 1562179 535 | 5445038 536 | 2901648 537 | 4449171 538 | 2392641 539 | 2392751 540 | 5286090 541 | 4942623 542 | 3697435 543 | 4308553 544 | 5800674 545 | 1332066 546 | 2376360 547 | 4430006 548 | 3520466 549 | 2665199 550 | 3687259 551 | 5151203 552 | 3529072 553 | 2974927 554 | 3780220 555 | 1728690 556 | 2223453 557 | 5215575 558 | 5539569 559 | 2118918 560 | 5171300 561 | 4395679 562 | 3216767 563 | 2558638 564 | 3046877 565 | 5954139 566 | 5660665 567 | 5903753 568 | 3974396 569 | 2186419 570 | 5409548 571 | 4489621 572 | 3211122 573 | 4968510 574 | 5282415 575 | 2463860 576 | 3989511 577 | 2821261 578 | 3308168 579 | 2529377 580 | 3497782 581 | 1071794 582 | 3623737 583 | 1312166 584 | 3729567 585 | 3696540 586 | 3733227 587 | 1011996 588 | 2423284 589 | 5864961 590 | 4237783 591 | 4351712 592 | 3510757 593 | 3161410 594 | 3629528 595 | 4322169 596 | 5434615 597 | 1769340 598 | 5034193 599 | 4865366 600 | 4829370 601 | 4219682 602 | 1709585 603 | 2481766 604 | 5977081 605 | 2987751 606 | 5150738 607 | 4790692 608 | 3469700 609 | 5018255 610 | 2334608 611 | 4283719 612 | 2087172 613 | 5524083 614 | 1944264 615 | 1713931 616 | 3612612 617 | 4593543 618 | 4261614 619 | 4823566 620 | 5355208 621 | 5769239 622 | 1890505 623 | 4281054 624 | 1278160 625 | 4929857 626 | 2726920 627 | 4186167 628 | 5859046 629 | 3134892 630 | 5019454 631 | 4610302 632 | 4932143 633 | 1324696 634 | 4110002 635 | 2256017 636 | 3132593 637 | 1283988 638 | 4597861 639 | 5182889 640 | 5393338 641 | 1925828 642 | 4602117 643 | 4887114 644 | 3664409 645 | 5507302 646 | 3137331 647 | 2544559 648 | 3735933 649 | 1421794 650 | 3067129 651 | 3965974 652 | 6016628 653 | 2501360 654 | 1195197 655 | 2723733 656 | 5615127 657 | 2838372 658 | 1356635 659 | 3625947 660 | 1073812 661 | 5197031 662 | 5786041 663 | 5830946 664 | 4433425 665 | 4656138 666 | 1117560 667 | 1804701 668 | 1649242 669 | 4294282 670 | 4622760 671 | 1340771 672 | 3414768 673 | 3707022 674 | 5457413 675 | 2863556 676 | 2151618 677 | 5787427 678 | 3423976 679 | 3239586 680 | 2875616 681 | 5459336 682 | 2104289 683 | 4025550 684 | 2787012 685 | 1059518 686 | 5949616 687 | 5182947 688 | 4839844 689 | 1652816 690 | 4948901 691 | 1361653 692 | 1830852 693 | 2953219 694 | 2293427 695 | 5945875 696 | 5960511 697 | 4456338 698 | 1243820 699 | 5117607 700 | 1579565 701 | 1663471 702 | 2084492 703 | 2626255 704 | 1267526 705 | 5147890 706 | 1173080 707 | 3707035 708 | 1900011 709 | 4364577 710 | 4233465 711 | 1314373 712 | 3433448 713 | 4082210 714 | 4531108 715 | 4501837 716 | 5204733 717 | 5144859 718 | 3303615 719 | 4747756 720 | 2179876 721 | 3790303 722 | 1152869 723 | 1845185 724 | 1222205 725 | 5343645 726 | 5746540 727 | 1600430 728 | 3745178 729 | 3253871 730 | 2405730 731 | 4766526 732 | 1441689 733 | 1113777 734 | 1476825 735 | 1258756 736 | 5912036 737 | 5638113 738 | 2579431 739 | 4331164 740 | 5522940 741 | 2783729 742 | 6009753 743 | 4011468 744 | 4715545 745 | 1095711 746 | 6000771 747 | 5489359 748 | 2077448 749 | 4113434 750 | 3146072 751 | 1006938 752 | 2658023 753 | 1227607 754 | 5178891 755 | 4552351 756 | 6010300 757 | 4168683 758 | 3641626 759 | 1402497 760 | 2442831 761 | 4730389 762 | 3729840 763 | 1523299 764 | 1301380 765 | 5608165 766 | 1757551 767 | 5157447 768 | 4146291 769 | 1970345 770 | 5786288 771 | 5695723 772 | 4656913 773 | 2841138 774 | 2515635 775 | 4815953 776 | 1132031 777 | 2111174 778 | 1803154 779 | 4350110 780 | 1755841 781 | 2616814 782 | 5999052 783 | 3894300 784 | 5443954 785 | 5256864 786 | 2225134 787 | 2522436 788 | 2988177 789 | 4706204 790 | 3203100 791 | 1668493 792 | 2817980 793 | 5593152 794 | 2270916 795 | 2447565 796 | 4004694 797 | 3031355 798 | 1283571 799 | 3789472 800 | 4034559 801 | 3158283 802 | 5369745 803 | 3908378 804 | 5916617 805 | 1762049 806 | 4985032 807 | 3253180 808 | 1103662 809 | 1421092 810 | 3192145 811 | 1463709 812 | 2660709 813 | 5487941 814 | 3468132 815 | 5671177 816 | 5974751 817 | 3192580 818 | 3440188 819 | 5260473 820 | 3918190 821 | 2849410 822 | 3125981 823 | 5499114 824 | 4702107 825 | 5941686 826 | 1449988 827 | 3782965 828 | 4917032 829 | 4687659 830 | 4851454 831 | 1753072 832 | 4053248 833 | 2902958 834 | 4964270 835 | 5089584 836 | 5901857 837 | 5035409 838 | 3173382 839 | 4881081 840 | 2036626 841 | 5629007 842 | 1533340 843 | 5127109 844 | 3338864 845 | 5504023 846 | 5137980 847 | 5794231 848 | 4512063 849 | 4041690 850 | 3546874 851 | 5952236 852 | 3173341 853 | 5725525 854 | 2917876 855 | 3396047 856 | 2585014 857 | 1206904 858 | 3257432 859 | 1419789 860 | 4545669 861 | 4317708 862 | 5436558 863 | 5132691 864 | 5553920 865 | 1019858 866 | 3052917 867 | 4563696 868 | 2917263 869 | 3773730 870 | 3680015 871 | 6014177 872 | 1111264 873 | 3537385 874 | 1965172 875 | 3455021 876 | 4866724 877 | 4501146 878 | 5587976 879 | 3893504 880 | 4693751 881 | 4788166 882 | 4174310 883 | 2015381 884 | 2807336 885 | 5110074 886 | 1274199 887 | 1447512 888 | 1998386 889 | 1976192 890 | 3085320 891 | 3670233 892 | 4773348 893 | 4749013 894 | 4347417 895 | 5953310 896 | 1315752 897 | 2042762 898 | 3463530 899 | 1027428 900 | 4264331 901 | 5561263 902 | 3354549 903 | 4237832 904 | 5276121 905 | 4235237 906 | 2616641 907 | 4437688 908 | 1825556 909 | 3130368 910 | 2707302 911 | 4329711 912 | 2549524 913 | 3861932 914 | 3351827 915 | 4145693 916 | 5365909 917 | 2643719 918 | 1870588 919 | 3726719 920 | 5371886 921 | 3962088 922 | 5296625 923 | 2286428 924 | 5697042 925 | 4870281 926 | 4952740 927 | 5936608 928 | 4853608 929 | 5683217 930 | 2891652 931 | 4726235 932 | 5373262 933 | 1382794 934 | 4283592 935 | 3741308 936 | 4900269 937 | 1419761 938 | 3909861 939 | 4928676 940 | 1959515 941 | 2766958 942 | 5051942 943 | 1841954 944 | 1550583 945 | 4177887 946 | 5381847 947 | 1831046 948 | 5771645 949 | 6012760 950 | 1369955 951 | 1047834 952 | 4525358 953 | 2145834 954 | 3401498 955 | 3727240 956 | 3957693 957 | 4170817 958 | 1451022 959 | 1312748 960 | 5453322 961 | 3363187 962 | 4958903 963 | 3676586 964 | 2059226 965 | 2589539 966 | 1308789 967 | 4109928 968 | 1476667 969 | 2132560 970 | 2321778 971 | 5014308 972 | 4630999 973 | 4619213 974 | 1608802 975 | 5506241 976 | 3493552 977 | 2249229 978 | 1904096 979 | 4411291 980 | 4759904 981 | 2029555 982 | 1521129 983 | 5112155 984 | 3941038 985 | 2799193 986 | 1916513 987 | 4149851 988 | 5300228 989 | 2200057 990 | 1973205 991 | 1324922 992 | 1342003 993 | 5578861 994 | 5198626 995 | 3791907 996 | 1800999 997 | 5308900 998 | 4245914 999 | 1250802 1000 | 3386989 1001 | 1714925 1002 | 2063304 1003 | 2386723 1004 | 5440132 1005 | 5540950 1006 | 5272485 1007 | 4747353 1008 | 2803212 1009 | 2154502 1010 | 4592665 1011 | 3896696 1012 | 5566840 1013 | 5615940 1014 | 2650531 1015 | 1368186 1016 | 1153759 1017 | 4609443 1018 | 5445878 1019 | 4087796 1020 | 3920773 1021 | 4487666 1022 | 4773620 1023 | 5809114 1024 | 4273761 1025 | 2257259 1026 | 4383210 1027 | 2340898 1028 | 1104661 1029 | 5976031 1030 | 2345971 1031 | 4711914 1032 | 4674408 1033 | 5482454 1034 | 2811155 1035 | 4530058 1036 | 4544056 1037 | 2876782 1038 | 1580302 1039 | 3142406 1040 | 2816276 1041 | 3302995 1042 | 5960292 1043 | 4867148 1044 | 1647540 1045 | 4851053 1046 | 4574659 1047 | 2447693 1048 | 1598892 1049 | 1953783 1050 | 4720993 1051 | 5860789 1052 | 1577408 1053 | 5291241 1054 | 2645784 1055 | 4152120 1056 | 4308949 1057 | 3571329 1058 | 2339482 1059 | 2386952 1060 | 1718884 1061 | 6024389 1062 | 4261963 1063 | 5261379 1064 | 2607450 1065 | 3496314 1066 | 2235452 1067 | 5207054 1068 | 3934598 1069 | 4772209 1070 | 3860919 1071 | 2248484 1072 | 4816891 1073 | 3045014 1074 | 2601635 1075 | 1102732 1076 | 1967880 1077 | 5927133 1078 | 5989755 1079 | 5446793 1080 | 3459842 1081 | 1623670 1082 | 2070770 1083 | 2842025 1084 | 5737246 1085 | 3271947 1086 | 4320278 1087 | 4902023 1088 | 4292387 1089 | 1474626 1090 | 4427928 1091 | 1209284 1092 | 5305152 1093 | 3419116 1094 | 4865921 1095 | 5270503 1096 | 3358856 1097 | 1163945 1098 | 4560535 1099 | 1846848 1100 | 5699520 1101 | 3612768 1102 | 1027946 1103 | 5289033 1104 | 2537278 1105 | 2075776 1106 | 3337341 1107 | 3581362 1108 | 4933715 1109 | 2683910 1110 | 5229637 1111 | 5472633 1112 | 3799662 1113 | 5776841 1114 | 5072662 1115 | 2983426 1116 | 2554382 1117 | 1169445 1118 | 1903811 1119 | 2269039 1120 | 1296720 1121 | 1490839 1122 | 4047139 1123 | 3938440 1124 | 3613236 1125 | 1243895 1126 | 5926818 1127 | 5318295 1128 | 4140111 1129 | 4527143 1130 | 2970160 1131 | 2149946 1132 | 5250140 1133 | 4064731 1134 | 5398940 1135 | 4167885 1136 | 2447308 1137 | 4058205 1138 | 1525932 1139 | 1363978 1140 | 2395251 1141 | 3503526 1142 | 3205225 1143 | 5492043 1144 | 3862624 1145 | 3273236 1146 | 3391470 1147 | 3422813 1148 | 5844511 1149 | 4616954 1150 | 3423563 1151 | 1325972 1152 | 4447642 1153 | 2922964 1154 | 2687785 1155 | 1602344 1156 | 4312482 1157 | 2430004 1158 | 5725487 1159 | 1080294 1160 | 5938647 1161 | 3331333 1162 | 4192355 1163 | 1663468 1164 | 4497896 1165 | 1840808 1166 | 1684803 1167 | 4488423 1168 | 5048055 1169 | 5309431 1170 | 3607449 1171 | 3268075 1172 | 5682113 1173 | 2212139 1174 | 2823115 1175 | 4709293 1176 | 4491155 1177 | 4918241 1178 | 4078022 1179 | 1645729 1180 | 5506071 1181 | 2122519 1182 | 5322244 1183 | 2378361 1184 | 2917192 1185 | 1188615 1186 | 1401067 1187 | 1954762 1188 | 1079419 1189 | 3268058 1190 | 4992315 1191 | 5085542 1192 | 3039657 1193 | 2444100 1194 | 5195996 1195 | 2557519 1196 | 2704148 1197 | 3059939 1198 | 2581293 1199 | 1413194 1200 | 1383196 1201 | 4352246 1202 | 5887474 1203 | 4787303 1204 | 1936195 1205 | 3070231 1206 | 2101458 1207 | 5288238 1208 | 5052939 1209 | 4633012 1210 | 3127783 1211 | 1566341 1212 | 2213023 1213 | 5443006 1214 | 1121246 1215 | 1547155 1216 | 3054509 1217 | 2613515 1218 | 1374355 1219 | 2813241 1220 | 1181595 1221 | 2405599 1222 | 3255450 1223 | 2886851 1224 | 2282919 1225 | 2848464 1226 | 5073703 1227 | 5652610 1228 | 1565973 1229 | 3422635 1230 | 4481215 1231 | 2274784 1232 | 5709859 1233 | 5429921 1234 | 2547821 1235 | 5575570 1236 | 3111020 1237 | 5746531 1238 | 2501345 1239 | 3062919 1240 | 1299644 1241 | 2688867 1242 | 1553218 1243 | 4866902 1244 | 5685981 1245 | 5918995 1246 | 3285136 1247 | 1075359 1248 | 5249988 1249 | 2987653 1250 | 3486052 1251 | 4279201 1252 | 4115357 1253 | 4988687 1254 | 5882763 1255 | 1761470 1256 | 4725734 1257 | 1434514 1258 | 3616324 1259 | 5258682 1260 | 4228047 1261 | 4935799 1262 | 5700478 1263 | 2452264 1264 | 4987847 1265 | 5762340 1266 | 3973345 1267 | 4732914 1268 | 4566484 1269 | 5865326 1270 | 2086713 1271 | 4915434 1272 | 5065964 1273 | 4622957 1274 | 3600891 1275 | 3777047 1276 | 4098080 1277 | 1832684 1278 | 5468157 1279 | 5829107 1280 | 2614839 1281 | 2628222 1282 | 4970526 1283 | 4827213 1284 | 2409246 1285 | 5223237 1286 | 1019965 1287 | 5351447 1288 | 2290644 1289 | 5098018 1290 | 3299329 1291 | 2881494 1292 | 4946591 1293 | 3967054 1294 | 6022397 1295 | 4299162 1296 | 4422964 1297 | 4779643 1298 | 1061811 1299 | 2264832 1300 | 4517873 1301 | 2694819 1302 | 1346547 1303 | 4560187 1304 | 2095809 1305 | 5805381 1306 | 3649654 1307 | 2834326 1308 | 2938308 1309 | 3069547 1310 | 3058469 1311 | 4220516 1312 | 2447824 1313 | 3768014 1314 | 4753508 1315 | 1332206 1316 | 1977301 1317 | 3506373 1318 | 2078836 1319 | 5921060 1320 | 1440423 1321 | 4654976 1322 | 1136958 1323 | 2192718 1324 | 2352392 1325 | 5517244 1326 | 1403566 1327 | 2555496 1328 | 1446092 1329 | 4691630 1330 | 2442085 1331 | 5601189 1332 | 2189120 1333 | 4382567 1334 | 4428453 1335 | 5793060 1336 | 5016327 1337 | 2365790 1338 | 2442271 1339 | 2528583 1340 | 4489363 1341 | 3256897 1342 | 4637926 1343 | 5505862 1344 | 1675908 1345 | 4026690 1346 | 3381283 1347 | 5237146 1348 | 2036188 1349 | 3371420 1350 | 5870730 1351 | 2308194 1352 | 3061904 1353 | 3682228 1354 | 1624238 1355 | 4693085 1356 | 6015427 1357 | 3821377 1358 | 4004419 1359 | 1980249 1360 | 2437677 1361 | 5222852 1362 | 3885467 1363 | 2813919 1364 | 1799932 1365 | 4276715 1366 | 3551399 1367 | 2292301 1368 | 1337099 1369 | 5807087 1370 | 5174405 1371 | 2902463 1372 | 2643654 1373 | 1416511 1374 | 1870341 1375 | 5680443 1376 | 5010345 1377 | 2368700 1378 | 3587624 1379 | 3692373 1380 | 5321142 1381 | 1291881 1382 | 2265541 1383 | 4878268 1384 | 3886152 1385 | 5801791 1386 | 1091664 1387 | 3522727 1388 | 2543016 1389 | 4207117 1390 | 1768346 1391 | 3740139 1392 | 5234790 1393 | 2967162 1394 | 3230123 1395 | 1510016 1396 | 3984252 1397 | 1865213 1398 | 3332641 1399 | 2110452 1400 | 3951648 1401 | 5958207 1402 | 3991627 1403 | 3120586 1404 | 2221210 1405 | 5663452 1406 | 3258826 1407 | 1570019 1408 | 5499013 1409 | 4690946 1410 | 5523029 1411 | 1787317 1412 | 1039591 1413 | 3895928 1414 | 5013753 1415 | 4894250 1416 | 3560673 1417 | 4709807 1418 | 3451586 1419 | 5675148 1420 | 6008726 1421 | 2457906 1422 | 4128374 1423 | 4876241 1424 | 2520973 1425 | 1081831 1426 | 4786725 1427 | 4863163 1428 | 4717856 1429 | 2159134 1430 | 5862410 1431 | 4461054 1432 | 1441209 1433 | 2825497 1434 | 2413473 1435 | 5927899 1436 | 5614001 1437 | 1948216 1438 | 1362431 1439 | 4023355 1440 | 1458335 1441 | 1814009 1442 | 1919245 1443 | 3217065 1444 | 4806315 1445 | 5659892 1446 | 5134350 1447 | 3545887 1448 | 2294249 1449 | 5303992 1450 | 2487913 1451 | 5654625 1452 | 4768528 1453 | 4385503 1454 | 4879269 1455 | 5591630 1456 | 3776359 1457 | 1545026 1458 | 2447689 1459 | 5502426 1460 | 4526047 1461 | 1023156 1462 | 3372708 1463 | 5066044 1464 | 2844219 1465 | 5037427 1466 | 5978896 1467 | 1713408 1468 | 3761551 1469 | 4265811 1470 | 3876107 1471 | 3907758 1472 | 2458239 1473 | 5692873 1474 | 2319249 1475 | 4731100 1476 | 2486523 1477 | 1246327 1478 | 2107868 1479 | 5748995 1480 | 5362413 1481 | 1469034 1482 | 3376773 1483 | 3489706 1484 | 5456261 1485 | 4351831 1486 | 5552268 1487 | 3733569 1488 | 2220867 1489 | 1694742 1490 | 3453533 1491 | 1585426 1492 | 1951947 1493 | 5039746 1494 | 4496116 1495 | 1576202 1496 | 3095598 1497 | 1165364 1498 | 2148838 1499 | 5679828 1500 | 3261649 1501 | 4607680 1502 | 2765187 1503 | 3308977 1504 | 5793009 1505 | 5398437 1506 | 2881571 1507 | 2453553 1508 | 1378898 1509 | 4234214 1510 | 5022493 1511 | 3663824 1512 | 3832534 1513 | 4788947 1514 | 4660799 1515 | 1987266 1516 | 2754815 1517 | 5914035 1518 | 1605438 1519 | 1327118 1520 | 2925331 1521 | 1527966 1522 | 2919819 1523 | 3706705 1524 | 4314744 1525 | 5082002 1526 | 2158878 1527 | 4822737 1528 | 2293209 1529 | 5859022 1530 | 1381351 1531 | 5653657 1532 | 4846669 1533 | 5787297 1534 | 3371013 1535 | 2820709 1536 | 1809961 1537 | 4931389 1538 | 2576030 1539 | 2498625 1540 | 2145550 1541 | 2436258 1542 | 5928282 1543 | 2970548 1544 | 1111014 1545 | 2880852 1546 | 5575775 1547 | 2114908 1548 | 2518131 1549 | 4243111 1550 | 4323106 1551 | 1323962 1552 | 1254746 1553 | 3829331 1554 | 5372523 1555 | 1709811 1556 | 4543178 1557 | 2183582 1558 | 1796211 1559 | 6003909 1560 | 3761527 1561 | 1177485 1562 | 1875786 1563 | 5436338 1564 | 2582888 1565 | 4775929 1566 | 1329496 1567 | 2706805 1568 | 4647915 1569 | 4837551 1570 | 1566824 1571 | 3402241 1572 | 3429411 1573 | 1222390 1574 | 2963120 1575 | 1429629 1576 | 5033893 1577 | 3179439 1578 | 1830072 1579 | 5702003 1580 | 4177584 1581 | 2089215 1582 | 4469551 1583 | 3066356 1584 | 2358939 1585 | 4109491 1586 | 5650051 1587 | 5087275 1588 | 2349528 1589 | 5041945 1590 | 3260101 1591 | 3289626 1592 | 1856123 1593 | 5108156 1594 | 1143322 1595 | 1558570 1596 | 2006335 1597 | 1391010 1598 | 3564450 1599 | 4142385 1600 | 4129791 1601 | 2408989 1602 | 5939133 1603 | 5120326 1604 | 1485514 1605 | 4992588 1606 | 5504172 1607 | 4651289 1608 | 2557866 1609 | 2979704 1610 | 3268245 1611 | 3295309 1612 | 4956071 1613 | 3673008 1614 | 2443141 1615 | 2277914 1616 | 2999295 1617 | 3014356 1618 | 5341406 1619 | 5338305 1620 | 4924482 1621 | 3418432 1622 | 4098577 1623 | 2693967 1624 | 5320057 1625 | 3027956 1626 | 5814517 1627 | 1737383 1628 | 3331488 1629 | 1095248 1630 | 4502396 1631 | 2992312 1632 | 2801751 1633 | 1136419 1634 | 5664449 1635 | 3927452 1636 | 2493251 1637 | 1046189 1638 | 4316697 1639 | 3680862 1640 | 4575753 1641 | 1612073 1642 | 1318427 1643 | 4764670 1644 | 5164815 1645 | 6005953 1646 | 4530540 1647 | 3073765 1648 | 5426145 1649 | 5937224 1650 | 2337439 1651 | 1510158 1652 | 2844585 1653 | 4392092 1654 | 4763471 1655 | 2923737 1656 | 5467696 1657 | 5534258 1658 | 4261692 1659 | 3772854 1660 | 3423983 1661 | 2299025 1662 | 4356001 1663 | 4142035 1664 | 4917412 1665 | 3785412 1666 | 1352274 1667 | 3343053 1668 | 3640392 1669 | 5508134 1670 | 5988524 1671 | 3727605 1672 | 2897290 1673 | 5029465 1674 | 1161805 1675 | 5778338 1676 | 3994312 1677 | 2119426 1678 | 5751975 1679 | 2131477 1680 | 5756970 1681 | 5581438 1682 | 2587693 1683 | 2009790 1684 | 2079372 1685 | 2951872 1686 | 3754961 1687 | 1509313 1688 | 2069414 1689 | 3971286 1690 | 4650625 1691 | 1300730 1692 | 3977206 1693 | 5853632 1694 | 1768572 1695 | 1739538 1696 | 5939540 1697 | 1339856 1698 | 1074017 1699 | 5121084 1700 | 5704477 1701 | 5206463 1702 | 2054532 1703 | 1874413 1704 | 4040188 1705 | 2536631 1706 | 1724412 1707 | 3105769 1708 | 1848799 1709 | 3284301 1710 | 5744299 1711 | 4893275 1712 | 1514514 1713 | 1010440 1714 | 3966050 1715 | 2425201 1716 | 3864370 1717 | 2208918 1718 | 2969153 1719 | 3728878 1720 | 2563637 1721 | 5193257 1722 | 4362439 1723 | 5379993 1724 | 5668482 1725 | 1281893 1726 | 2473012 1727 | 3338805 1728 | 2550215 1729 | 3803528 1730 | 1029535 1731 | 2086450 1732 | 2352530 1733 | 2400689 1734 | 1759533 1735 | 5322530 1736 | 5573401 1737 | 2619579 1738 | 2470545 1739 | 3130457 1740 | 3392301 1741 | 5613653 1742 | 3975246 1743 | 4884812 1744 | 2512638 1745 | 4863576 1746 | 2597022 1747 | 4159460 1748 | 5471236 1749 | 5515324 1750 | 3550284 1751 | 4662101 1752 | 5442143 1753 | 5432830 1754 | 6007773 1755 | 3473989 1756 | 1761236 1757 | 3055383 1758 | 4762925 1759 | 2582111 1760 | 2995289 1761 | 5312426 1762 | 1924652 1763 | 3112918 1764 | 2403248 1765 | 2198627 1766 | 3648695 1767 | 4489618 1768 | 3508619 1769 | 3293791 1770 | 1985760 1771 | 1951724 1772 | 2662361 1773 | 1251190 1774 | 5367074 1775 | 5283768 1776 | 3476157 1777 | 3376442 1778 | 2788437 1779 | 4799387 1780 | 3278319 1781 | 5867395 1782 | 2392682 1783 | 3033223 1784 | 2813132 1785 | 5832007 1786 | 2800522 1787 | 2479820 1788 | 4362365 1789 | 2282215 1790 | 1588509 1791 | 1751880 1792 | 3419003 1793 | 4929062 1794 | 1513264 1795 | 5251482 1796 | 3983847 1797 | 2652332 1798 | 1836546 1799 | 1734263 1800 | 1028883 1801 | 3873995 1802 | 2960717 1803 | 2334197 1804 | 4739084 1805 | 5352591 1806 | 3282814 1807 | 4673956 1808 | 3457364 1809 | 3735392 1810 | 3773786 1811 | 5263151 1812 | 5050630 1813 | 3809401 1814 | 4876016 1815 | 4531404 1816 | 1758843 1817 | 4730085 1818 | 1835223 1819 | 5849996 1820 | 4557070 1821 | 1114891 1822 | 1833790 1823 | 3286612 1824 | 1529011 1825 | 2894623 1826 | 1411838 1827 | 4891162 1828 | 2139775 1829 | 2739900 1830 | 5075827 1831 | 1311627 1832 | 2706270 1833 | 1319117 1834 | 4855607 1835 | 4808166 1836 | 5302446 1837 | 4305504 1838 | 1928465 1839 | 2911920 1840 | 2446821 1841 | 3303932 1842 | 1870689 1843 | 5518716 1844 | 4254222 1845 | 1869909 1846 | 3804949 1847 | 3552236 1848 | 3916006 1849 | 3104255 1850 | 3705020 1851 | 2418043 1852 | 4888503 1853 | 5715245 1854 | 1838975 1855 | 2098076 1856 | 5674740 1857 | 3221211 1858 | 5755634 1859 | 5206216 1860 | 5993559 1861 | 5341162 1862 | 2429779 1863 | 1506479 1864 | 4498141 1865 | 5107042 1866 | 3387713 1867 | 3675329 1868 | 4728120 1869 | 3061120 1870 | 2465403 1871 | 4968148 1872 | 5765385 1873 | 4754651 1874 | 4198705 1875 | 4018049 1876 | 2484441 1877 | 1719881 1878 | 4812872 1879 | 3506351 1880 | 5404663 1881 | 1503536 1882 | 3008452 1883 | 2130646 1884 | 5336439 1885 | 3717412 1886 | 5519343 1887 | 1629731 1888 | 4372759 1889 | 2708407 1890 | 1928825 1891 | 4309483 1892 | 2951899 1893 | 2342726 1894 | 2344921 1895 | 4430113 1896 | 5592878 1897 | 4200376 1898 | 2559223 1899 | 3907060 1900 | 1597368 1901 | 5483377 1902 | 5167245 1903 | 2443959 1904 | 5406464 1905 | 2935656 1906 | 1404446 1907 | 4573971 1908 | 4555769 1909 | 1185279 1910 | 3536216 1911 | 3236919 1912 | 5655371 1913 | 5651708 1914 | 3390883 1915 | 5920571 1916 | 2489085 1917 | 1473599 1918 | 5534551 1919 | 2287527 1920 | 5687595 1921 | 3006154 1922 | 2905685 1923 | 1489309 1924 | 1043957 1925 | 3931785 1926 | 1271198 1927 | 5419741 1928 | 4539601 1929 | 3429917 1930 | 2058220 1931 | 1829728 1932 | 1809199 1933 | 1250023 1934 | 3896325 1935 | 5487402 1936 | 3216598 1937 | 1666148 1938 | 2617940 1939 | 2307627 1940 | 5122600 1941 | 3081460 1942 | 4101417 1943 | 3405285 1944 | 5504497 1945 | 3089653 1946 | 1684415 1947 | 4161591 1948 | 1625149 1949 | 5942170 1950 | 4901312 1951 | 4874307 1952 | 1927786 1953 | 5631564 1954 | 1428959 1955 | 3395454 1956 | 3860215 1957 | 2647384 1958 | 5762505 1959 | 1093442 1960 | 5738730 1961 | 5503654 1962 | 3773463 1963 | 3194178 1964 | 4368600 1965 | 1166321 1966 | 1883713 1967 | 3993295 1968 | 1591619 1969 | 1373772 1970 | 1822640 1971 | 2869989 1972 | 2560227 1973 | 2733731 1974 | 4275005 1975 | 3089730 1976 | 2580734 1977 | 6006923 1978 | 2926644 1979 | 1306477 1980 | 5969859 1981 | 1248832 1982 | 3192062 1983 | 2522158 1984 | 5956835 1985 | 1105755 1986 | 4684579 1987 | 4501086 1988 | 2648419 1989 | 4967290 1990 | 4258024 1991 | 5559937 1992 | 1707147 1993 | 4572806 1994 | 5624621 1995 | 5320993 1996 | 5748392 1997 | 5735094 1998 | 3612461 1999 | 5213450 2000 | 1539828 2001 | 1192029 2002 | 1353237 2003 | 1027445 2004 | 6015058 2005 | 2439059 2006 | 4628811 2007 | 1581654 2008 | 2727374 2009 | 3587414 2010 | 3794817 2011 | 1371874 2012 | 5850842 2013 | 2003758 2014 | 5031648 2015 | 2613289 2016 | 2099526 2017 | 5195382 2018 | 5779758 2019 | 5344432 2020 | 1471435 2021 | 1684728 2022 | 1500409 2023 | 1809662 2024 | 3157480 2025 | 4671753 2026 | 2696258 2027 | -------------------------------------------------------------------------------- /src/assets/test_subjects.txt: -------------------------------------------------------------------------------- 1 | 3348373 2 | 4348748 3 | 1794105 4 | 4249348 5 | 1026323 6 | 1464762 7 | 4859100 8 | 2728539 9 | 2524346 10 | 1669272 11 | 1615214 12 | 2476130 13 | 3750219 14 | 5821536 15 | 4970172 16 | 5970624 17 | 4162966 18 | 4299009 19 | 2167519 20 | 1744190 21 | 5375562 22 | 3452821 23 | 2357907 24 | 1546090 25 | 3255007 26 | 4746489 27 | 3176827 28 | 4932261 29 | 3596997 30 | 4550380 31 | 5214633 32 | 1561993 33 | 1239965 34 | 1377622 35 | 3303268 36 | 2776412 37 | 1878172 38 | 2410386 39 | 4308200 40 | 4644631 41 | 1053634 42 | 4290617 43 | 3830781 44 | 5668035 45 | 5613197 46 | 1489071 47 | 4785930 48 | 1822467 49 | 2888944 50 | 3819525 51 | 2581278 52 | 3490786 53 | 5334289 54 | 2365184 55 | 2775986 56 | 1089369 57 | 1051648 58 | 3080620 59 | 1877433 60 | 4080868 61 | 2682697 62 | 2492791 63 | 5378333 64 | 4018960 65 | 2610660 66 | 4051388 67 | 3458040 68 | 5892312 69 | 5874414 70 | 5861559 71 | 3710482 72 | 1199769 73 | 3884972 74 | 1381364 75 | 5382610 76 | 1935775 77 | 5717802 78 | 1577826 79 | 5720927 80 | 2716961 81 | 1726970 82 | 2949044 83 | 5435122 84 | 5063692 85 | 4338160 86 | 3682579 87 | 2805916 88 | 5357304 89 | 5972743 90 | 4607890 91 | 2857124 92 | 2263739 93 | 1717970 94 | 3397121 95 | 1482923 96 | 1884525 97 | 5459086 98 | 4620335 99 | 4554516 100 | 3959420 101 | 3888487 102 | 3208758 103 | 4659072 104 | 2202598 105 | 2451447 106 | 5143867 107 | 2238002 108 | 2153103 109 | 6021756 110 | 4188310 111 | 3447293 112 | 3737866 113 | 2621500 114 | 2121299 115 | 1748027 116 | 5704404 117 | 4031532 118 | 3715338 119 | 4511686 120 | 3442525 121 | 4657171 122 | 3751164 123 | 4868562 124 | 5678535 125 | 4833131 126 | 1756390 127 | 3813247 128 | 1654651 129 | 3434923 130 | 4601046 131 | 3316610 132 | 5501267 133 | 2899177 134 | 3820518 135 | 3746604 136 | 3283007 137 | 3152258 138 | 3465040 139 | 3357501 140 | 5153429 141 | 1138122 142 | 4807862 143 | 1573261 144 | 3782198 145 | 1130527 146 | 4489497 147 | 3339459 148 | 1390825 149 | 4602976 150 | 3236376 151 | 4502752 152 | 5499518 153 | 5554193 154 | 5016908 155 | 3605249 156 | 3181628 157 | 5620001 158 | 3186581 159 | 1500148 160 | 3121106 161 | 5113289 162 | 2374978 163 | 2378567 164 | 2143590 165 | 1858255 166 | 1721077 167 | 3755461 168 | 1985105 169 | 2843322 170 | 5057018 171 | 5439236 172 | 1094953 173 | 3629698 174 | 3689381 175 | 5973793 176 | 3552151 177 | 3148502 178 | 1788076 179 | 2033014 180 | 2840381 181 | 2368669 182 | 3690815 183 | 3946313 184 | 2906240 185 | 4285528 186 | 1020865 187 | 1632054 188 | 1543082 189 | 5361774 190 | 4509208 191 | 2299724 192 | 5825893 193 | 3330607 194 | 3348828 195 | 3791424 196 | 5992877 197 | 1328220 198 | 5194196 199 | 5017620 200 | 1415212 201 | 2977264 202 | 2871394 203 | 1206985 204 | 1716313 205 | 5903639 206 | 1511152 207 | 2486366 208 | 2591521 209 | 2165232 210 | 3650672 211 | 2321192 212 | 2520083 213 | 2403957 214 | 3073098 215 | 5802268 216 | 4466311 217 | 2246889 218 | 4200071 219 | 1394259 220 | 4255341 221 | 3575487 222 | 4041151 223 | 5245620 224 | 5027746 225 | 5432873 226 | 1281056 227 | 2003278 228 | 3093729 229 | 2413763 230 | 2393182 231 | 2646397 232 | 4913496 233 | 4580634 234 | 3319326 235 | 2626078 236 | 5274080 237 | 1205083 238 | 2547041 239 | 5294835 240 | 2827171 241 | 3384176 242 | 1439534 243 | 1865339 244 | 5680365 245 | 1881235 246 | 4970460 247 | 4645712 248 | 5032764 249 | 2597304 250 | 1343301 251 | 4023983 252 | 4272401 253 | 6010944 254 | 3558036 255 | 1859509 256 | 3218351 257 | 2507322 258 | 1453091 259 | 5142945 260 | 4422479 261 | 2004375 262 | 3309928 263 | 1109972 264 | 4545632 265 | 1625803 266 | 4242275 267 | 1889811 268 | 5853953 269 | 5663338 270 | 1618705 271 | 1586868 272 | 5750721 273 | 5198131 274 | 3520022 275 | 5797568 276 | 1962259 277 | 3095392 278 | 5465943 279 | 1610192 280 | 3737928 281 | 2279451 282 | 4282983 283 | 5326744 284 | 3011728 285 | 1631348 286 | 4000613 287 | 2099677 288 | 4699218 289 | 5286767 290 | 3646831 291 | 2668002 292 | 2335731 293 | 5299820 294 | 5695296 295 | 1727301 296 | 3053104 297 | 5514018 298 | 2077175 299 | 4380385 300 | 2501919 301 | 5310632 302 | 4360510 303 | 2276247 304 | 5894745 305 | 3603512 306 | 4418445 307 | 1113665 308 | 2142498 309 | 4261593 310 | 4648820 311 | 3780932 312 | 3856950 313 | 2031682 314 | 5136010 315 | 4148351 316 | 1559643 317 | 4931425 318 | 3115793 319 | 4204962 320 | 4735592 321 | 3749520 322 | 2682166 323 | 2971525 324 | 1224733 325 | 2424416 326 | 1947293 327 | 3142104 328 | 2628200 329 | 2548153 330 | 2318118 331 | 5597500 332 | 1612538 333 | 1127790 334 | 2951635 335 | 1135517 336 | 1838969 337 | 5084463 338 | 3657258 339 | 4727777 340 | 3033651 341 | 2956729 342 | 1545058 343 | 1167798 344 | 3426891 345 | 2503336 346 | 1735255 347 | 3629310 348 | 3992418 349 | 2702243 350 | 4196500 351 | 3230207 352 | 2214579 353 | 5675643 354 | 2797229 355 | 2672457 356 | 4355736 357 | 4516838 358 | 3908764 359 | 4272237 360 | 4451678 361 | 3568710 362 | 4021002 363 | 1181318 364 | 2247014 365 | 2678619 366 | 1989372 367 | 1679833 368 | 2931702 369 | 5148295 370 | 2331904 371 | 4803158 372 | 3225487 373 | 1290826 374 | 1272870 375 | 1035937 376 | 5895537 377 | 2788017 378 | 2976971 379 | 1284718 380 | 4772341 381 | 5081531 382 | 1639467 383 | 1663966 384 | 1014197 385 | 2614862 386 | 4478131 387 | 2263805 388 | 1197281 389 | 4570979 390 | 1809935 391 | 5667146 392 | 4288857 393 | 4951318 394 | 2653625 395 | 5824451 396 | 1492147 397 | 4911703 398 | 1390261 399 | 1407906 400 | 3010834 401 | 3158661 402 | 1170508 403 | 4733702 404 | 3406325 405 | 5544535 406 | 3782396 407 | 2687126 408 | 4906629 409 | 1646391 410 | 1111023 411 | 2128705 412 | 3012870 413 | 1667736 414 | 4451171 415 | 4894563 416 | 1127026 417 | 2155905 418 | 2960789 419 | 5279841 420 | 4324085 421 | 2563295 422 | 1226524 423 | 2713402 424 | 1553695 425 | 4332403 426 | 2113160 427 | 2762662 428 | 1371973 429 | 3354025 430 | 4907201 431 | 4212001 432 | 4217332 433 | 2844048 434 | 4918334 435 | 5088454 436 | 5164064 437 | 4914410 438 | 5488729 439 | 4201888 440 | 2994568 441 | 3387780 442 | 5711458 443 | 2947000 444 | 4351152 445 | 4360014 446 | 3026744 447 | 2755715 448 | 1938108 449 | 3626754 450 | 1228403 451 | 3599951 452 | 3584737 453 | 5036190 454 | 1517178 455 | 4836031 456 | 5000696 457 | 3739583 458 | 2076964 459 | 4345289 460 | 3687160 461 | 1757115 462 | 1333343 463 | 2727205 464 | 4353342 465 | 5598523 466 | 1703767 467 | 3431820 468 | 4450775 469 | 5584370 470 | 4397516 471 | 2497620 472 | 4927885 473 | 5264640 474 | 4388796 475 | 5898628 476 | 4652300 477 | 1073016 478 | 1585983 479 | 1310677 480 | 3005419 481 | 3464684 482 | 1114638 483 | 5490826 484 | 2247374 485 | 5953999 486 | 3974734 487 | 2120188 488 | 4662770 489 | 5324420 490 | 1525389 491 | 3695302 492 | 3012146 493 | 4071682 494 | 4030853 495 | 4356242 496 | 4299775 497 | 4496209 498 | 1070671 499 | 4934120 500 | 3088042 501 | 4992540 502 | 2887235 503 | 4953075 504 | 4461437 505 | 5059770 506 | 2109020 507 | 5062165 508 | 2594070 509 | 2673643 510 | 5183096 511 | 4402653 512 | 3544915 513 | 3319597 514 | 3616348 515 | 5669375 516 | 5913020 517 | 4620840 518 | 5510732 519 | 3845811 520 | 1290280 521 | 3638245 522 | 4221699 523 | 1043272 524 | 3135997 525 | 2793134 526 | 2779608 527 | 1595174 528 | 1126041 529 | 1442345 530 | 2889275 531 | 3759846 532 | 5571989 533 | 2482281 534 | 3105680 535 | 4935331 536 | 5093473 537 | 1595422 538 | 4568022 539 | 4393819 540 | 2716363 541 | 1931741 542 | 3245118 543 | 1709312 544 | 1691903 545 | 4169389 546 | 5786181 547 | 4428061 548 | 1195317 549 | 5705644 550 | 2915233 551 | 5464083 552 | 4074159 553 | 2926351 554 | 4321588 555 | 3989716 556 | 1594445 557 | 3965270 558 | 4517631 559 | 5912055 560 | 3857265 561 | 2932586 562 | 3011318 563 | 2837745 564 | 4239075 565 | 2033558 566 | 5211162 567 | 4272589 568 | 2277339 569 | 1793907 570 | 5086685 571 | 4519765 572 | 3532526 573 | 5118491 574 | 5854615 575 | 4578849 576 | 4905739 577 | 3606279 578 | 2388729 579 | 2008399 580 | 3137462 581 | 4328138 582 | 4325519 583 | 4282204 584 | 5198669 585 | 1601038 586 | 1482728 587 | 4419395 588 | 3052497 589 | 3571260 590 | 1760852 591 | 6004959 592 | 2792920 593 | 1174958 594 | 1537195 595 | 5858391 596 | 3411746 597 | 2226410 598 | 3868127 599 | 1187624 600 | 1124458 601 | 2538694 602 | 2718375 603 | 1585616 604 | 4922852 605 | 1583013 606 | 1357400 607 | 4293931 608 | 5614225 609 | 5558208 610 | 5015512 611 | 3615317 612 | 1173114 613 | 5308961 614 | 4764247 615 | 5871403 616 | 2389034 617 | 2976676 618 | 3082915 619 | 4648497 620 | 3412006 621 | 4426281 622 | 4622304 623 | 2284354 624 | 2930338 625 | 4233479 626 | 2774931 627 | 1740564 628 | 6018036 629 | 4469443 630 | 2743078 631 | 4159569 632 | 4171495 633 | 5262375 634 | 2624598 635 | 1389801 636 | 3734962 637 | 3515185 638 | 1675876 639 | 2234960 640 | 5398752 641 | 2424398 642 | 2131933 643 | 2267948 644 | 4319126 645 | 4159696 646 | 3768180 647 | 3354477 648 | 5673767 649 | 2959339 650 | 4806498 651 | 1139719 652 | 4605142 653 | 3931830 654 | 4621538 655 | 1794533 656 | 2492710 657 | 2661601 658 | 3458989 659 | 3374223 660 | 1759000 661 | 5159440 662 | 1094256 663 | 3665174 664 | 1412987 665 | 3888469 666 | 3952806 667 | 3150113 668 | 4404100 669 | 5162513 670 | 1733111 671 | 4579106 672 | 5491775 673 | 3610245 674 | 2024299 675 | 1103741 676 | 4077563 677 | 5297786 678 | 5684041 679 | 3963118 680 | 2738601 681 | 2237719 682 | 2256875 683 | 3131366 684 | 1401140 685 | 2778038 686 | 4827625 687 | 2211109 688 | 5378508 689 | 3055311 690 | 1587038 691 | 3819250 692 | 4032691 693 | 4341065 694 | 3834593 695 | 4555347 696 | 4615262 697 | 4776255 698 | 1761760 699 | 5728375 700 | 2253348 701 | 2595311 702 | 4931773 703 | 2450697 704 | 3400508 705 | 1746548 706 | 3606174 707 | 4889217 708 | 1306137 709 | 3593093 710 | 1551033 711 | 4383736 712 | 5987222 713 | 3239496 714 | 3726449 715 | 5739780 716 | 1662472 717 | 4767846 718 | 2570977 719 | 2655814 720 | 5971069 721 | 3925133 722 | 2680286 723 | 3136621 724 | 3372249 725 | 2917658 726 | 1363993 727 | 4322397 728 | 2374532 729 | 5142049 730 | 3371596 731 | 4023146 732 | 5204768 733 | 2299081 734 | 1996443 735 | 3858439 736 | 4354985 737 | 1457118 738 | 1834057 739 | 5887694 740 | 5772231 741 | 3085842 742 | 2936235 743 | 3204817 744 | 2109907 745 | 5319611 746 | 3655250 747 | 5428491 748 | 2658374 749 | 5182025 750 | 4296519 751 | 5322226 752 | 1232718 753 | 2556398 754 | 4469418 755 | 1672427 756 | 5174962 757 | 4652152 758 | 2129210 759 | 3444249 760 | 3259422 761 | 3444148 762 | 2616694 763 | 3498048 764 | 1765850 765 | 2980070 766 | 1324452 767 | 3538444 768 | 3103640 769 | 1744037 770 | 5725262 771 | 2211501 772 | 2971064 773 | 4382682 774 | 4116067 775 | 3760855 776 | 1005955 777 | 2751139 778 | 2357266 779 | 5940452 780 | 1188111 781 | 3414446 782 | 5057641 783 | 4380957 784 | 2032414 785 | 3375796 786 | 4569784 787 | 5361120 788 | 4203954 789 | 1747362 790 | 3932069 791 | 1767951 792 | 4080978 793 | 5283094 794 | 3625074 795 | 4116225 796 | 1120988 797 | 1367563 798 | 1419644 799 | 5343392 800 | 3091599 801 | 5438489 802 | 4018285 803 | 4575544 804 | 3496780 805 | 3585924 806 | 2617607 807 | 4199379 808 | 6024641 809 | 5290795 810 | 2998244 811 | 4747044 812 | 3020342 813 | 2157037 814 | 5616856 815 | 1503932 816 | 5273751 817 | 3352588 818 | 1542408 819 | 3633317 820 | 2620400 821 | 2322677 822 | 5959241 823 | 1497751 824 | 5314833 825 | 2550535 826 | 5617665 827 | 3198974 828 | 5543213 829 | 5792249 830 | 3786025 831 | 3770057 832 | 5339311 833 | 5124817 834 | 2001700 835 | 1735979 836 | 4496120 837 | 1758163 838 | 5338809 839 | 5501130 840 | 3836491 841 | 5539172 842 | 3209155 843 | 4605435 844 | 3258408 845 | 2491548 846 | 1747318 847 | 1388668 848 | 1401433 849 | 4675744 850 | 2554557 851 | 4308431 852 | 2909123 853 | 2289149 854 | 5674399 855 | 4117841 856 | 2624141 857 | 3785394 858 | 4300375 859 | 5801414 860 | 3178021 861 | 2950302 862 | 4857864 863 | 3672377 864 | 5290651 865 | 4826524 866 | 3281222 867 | 3679851 868 | 3580471 869 | 4164466 870 | 3420901 871 | 5615776 872 | 2810204 873 | 5607101 874 | 3551458 875 | 3745792 876 | 3340378 877 | 2734586 878 | 1225665 879 | 5982866 880 | 3246353 881 | 1186237 882 | 5984092 883 | 4552518 884 | 5176626 885 | 4132197 886 | 4260560 887 | 5097488 888 | 1268852 889 | 3837834 890 | 3240765 891 | 2285135 892 | 2567270 893 | 4704334 894 | 2089907 895 | 2551197 896 | 5407375 897 | 5322628 898 | 3109043 899 | 2347030 900 | 3495297 901 | 5562101 902 | 2773743 903 | 4114719 904 | 5822732 905 | 5662744 906 | 1717941 907 | 2212842 908 | 5714636 909 | 3230860 910 | 5173497 911 | 3621501 912 | 2934761 913 | 3926652 914 | 4010108 915 | 2708268 916 | 1353397 917 | 2095852 918 | 3114573 919 | 2416309 920 | 4386843 921 | 1112669 922 | 3230036 923 | 3915882 924 | 4272397 925 | 5395933 926 | 3129234 927 | 4629886 928 | 4484799 929 | 3066715 930 | 4141491 931 | 1854867 932 | 1220920 933 | 5716404 934 | 1081932 935 | 5325466 936 | 5026313 937 | 5636815 938 | 4845995 939 | 5896464 940 | 4224590 941 | 2863204 942 | 2747207 943 | 2010875 944 | 2026147 945 | 4256421 946 | 1825668 947 | 4059261 948 | 4222077 949 | 4324481 950 | 4652346 951 | 5473738 952 | 1679575 953 | 3568104 954 | 2603754 955 | 4062393 956 | 5549667 957 | 4303706 958 | 2879858 959 | 1042597 960 | 2101239 961 | 5340304 962 | 2287889 963 | 5340595 964 | 4834786 965 | 4087282 966 | 1589516 967 | 2515366 968 | 2555166 969 | 1080068 970 | 1131416 971 | 3787961 972 | 3062657 973 | 3670501 974 | 4216588 975 | 4368276 976 | 4405227 977 | 4190433 978 | 5311362 979 | 5879467 980 | 4945647 981 | 6022020 982 | 5050664 983 | 1562471 984 | 4030581 985 | 3662444 986 | 4167913 987 | 2245058 988 | 4765581 989 | 3883359 990 | 1517135 991 | 5831037 992 | 5184834 993 | 1657728 994 | 3265213 995 | 1016715 996 | 5930902 997 | 2597130 998 | 3140345 999 | 4979178 1000 | 5616217 1001 | 3590234 1002 | 1187121 1003 | 5045276 1004 | 3703636 1005 | 3692543 1006 | 6008787 1007 | 4316920 1008 | 6005274 1009 | 3596012 1010 | 4548060 1011 | 4534126 1012 | 4487220 1013 | 1666662 1014 | 4484767 1015 | 2861360 1016 | 5529436 1017 | 5669107 1018 | 2488081 1019 | 1840239 1020 | 5233989 1021 | 4289800 1022 | 1664958 1023 | 2919371 1024 | 5519979 1025 | 4407689 1026 | 2206582 1027 | 3770526 1028 | 1200808 1029 | 3578913 1030 | 1616270 1031 | 5398826 1032 | 4783522 1033 | 3801135 1034 | 3882708 1035 | 2409951 1036 | 5926545 1037 | 4626959 1038 | 3704999 1039 | 4466208 1040 | 3606125 1041 | 2570693 1042 | 2849221 1043 | 5943712 1044 | 4984744 1045 | 5325046 1046 | 5923772 1047 | 5847992 1048 | 1875271 1049 | 1564584 1050 | 4711931 1051 | 5750266 1052 | 1600506 1053 | 4886135 1054 | 4269847 1055 | 2608332 1056 | 1144860 1057 | 5077914 1058 | 2007656 1059 | 4758049 1060 | 2511313 1061 | 4548890 1062 | 2822300 1063 | 1437135 1064 | 3714204 1065 | 3583353 1066 | 1192066 1067 | 5173569 1068 | 2545492 1069 | 3310413 1070 | 1405827 1071 | 5275482 1072 | 2948952 1073 | 3051398 1074 | 5036070 1075 | 1399339 1076 | 5040335 1077 | 5693828 1078 | 4935323 1079 | 2489481 1080 | 1777766 1081 | 2854246 1082 | 5624782 1083 | 5060695 1084 | 4437241 1085 | 4418900 1086 | 5620860 1087 | 3086209 1088 | 4227075 1089 | 3677900 1090 | 2345676 1091 | 5916297 1092 | 1112256 1093 | 1302810 1094 | 2068917 1095 | 2821368 1096 | 1610238 1097 | 3503961 1098 | 3218845 1099 | 3880042 1100 | 1783542 1101 | 3448346 1102 | 4296928 1103 | 4054472 1104 | 2598193 1105 | 2736584 1106 | 1682518 1107 | 1534556 1108 | 3392399 1109 | 4973580 1110 | 5502909 1111 | 4105797 1112 | 2822160 1113 | 4523918 1114 | 2305359 1115 | 2372005 1116 | 3301980 1117 | 4272294 1118 | 3511954 1119 | 3689961 1120 | 2651291 1121 | 2754380 1122 | 4321622 1123 | 4369837 1124 | 3990662 1125 | 3313212 1126 | 4932320 1127 | 2910427 1128 | 2248929 1129 | 1139297 1130 | 5202526 1131 | 2081161 1132 | 4154586 1133 | 3560913 1134 | 2893571 1135 | 3483991 1136 | 1963701 1137 | 2200167 1138 | 5302094 1139 | 1795368 1140 | 3657496 1141 | 6011909 1142 | 3494185 1143 | 3610661 1144 | 3708403 1145 | 1752873 1146 | 3183494 1147 | 2243597 1148 | 3740930 1149 | 5170623 1150 | 5247817 1151 | 2008815 1152 | 1982861 1153 | 2941048 1154 | 2986396 1155 | 3436774 1156 | 2921653 1157 | 3084046 1158 | 3737909 1159 | 4158553 1160 | 5521098 1161 | 5879546 1162 | 1018784 1163 | 5554012 1164 | 2262632 1165 | 5502432 1166 | 3103665 1167 | 4616741 1168 | 2272532 1169 | 5553379 1170 | 2139624 1171 | 1333494 1172 | 3833711 1173 | 5523649 1174 | 5747649 1175 | 2592708 1176 | 5029842 1177 | 6016724 1178 | 5227257 1179 | 5745052 1180 | 2119301 1181 | 1783983 1182 | 1261218 1183 | 4151175 1184 | 1394233 1185 | 5393685 1186 | 5742263 1187 | 2452917 1188 | 2957527 1189 | 1618712 1190 | 3681796 1191 | 2034405 1192 | 3513294 1193 | 4194833 1194 | 1428306 1195 | 4497877 1196 | 5361412 1197 | 1882220 1198 | 1840469 1199 | 1056422 1200 | 4666077 1201 | 4080483 1202 | 2410717 1203 | 1298169 1204 | 2864598 1205 | 2385295 1206 | 5838781 1207 | 4435979 1208 | 4457038 1209 | 4027300 1210 | 3392491 1211 | 4642716 1212 | 2840794 1213 | 5591818 1214 | 2291533 1215 | 3844512 1216 | 5603400 1217 | 5260523 1218 | 5156695 1219 | 1389985 1220 | 4243128 1221 | 4729262 1222 | 2562042 1223 | 3046489 1224 | 2423682 1225 | 3910758 1226 | 4708741 1227 | 5489825 1228 | 3342619 1229 | 4983082 1230 | 4546803 1231 | 3426046 1232 | 3295614 1233 | 2576122 1234 | 3381353 1235 | 4838913 1236 | 3640551 1237 | 4083932 1238 | 4080662 1239 | 2765554 1240 | 1289541 1241 | 4920296 1242 | 2714197 1243 | 2207750 1244 | 2046065 1245 | 1930647 1246 | 5653704 1247 | 5662162 1248 | 5353139 1249 | 5788071 1250 | 2766910 1251 | 3152170 1252 | 2085962 1253 | 5939337 1254 | 5117272 1255 | 2820061 1256 | 4389700 1257 | 2866000 1258 | 4254655 1259 | 1865844 1260 | 1260892 1261 | 1788680 1262 | 4213777 1263 | 5441463 1264 | 4689651 1265 | 3195356 1266 | 3977877 1267 | 4634485 1268 | 3465381 1269 | 1614319 1270 | 1799892 1271 | 2516586 1272 | 5274165 1273 | 1021779 1274 | 4255116 1275 | 3833965 1276 | 2444098 1277 | 3266076 1278 | 4919603 1279 | 2539609 1280 | 2924486 1281 | 4532375 1282 | 5818954 1283 | 3373034 1284 | 1341750 1285 | 2412620 1286 | 5107379 1287 | 5498191 1288 | 5653517 1289 | 2849866 1290 | 2231672 1291 | 5098727 1292 | 1739372 1293 | 3163070 1294 | 5047452 1295 | 1299761 1296 | 5332022 1297 | 3780361 1298 | 1061596 1299 | 2516291 1300 | 2756446 1301 | 5046549 1302 | 3091134 1303 | 5326293 1304 | 2786447 1305 | 2190842 1306 | 2464969 1307 | 5045486 1308 | 1033073 1309 | 2402967 1310 | 5837397 1311 | 1164182 1312 | 4358173 1313 | 1890439 1314 | 5978768 1315 | 2300146 1316 | 4107805 1317 | 3008575 1318 | 2037487 1319 | 5984264 1320 | 4794147 1321 | 2448144 1322 | 4073978 1323 | 1919977 1324 | 1226840 1325 | 5617886 1326 | 3274657 1327 | 3660484 1328 | 1630018 1329 | 3305559 1330 | 5075718 1331 | 2868769 1332 | 4964974 1333 | 1868725 1334 | 1769170 1335 | 1066335 1336 | 2677073 1337 | 1495165 1338 | 4154697 1339 | 1870781 1340 | 3813694 1341 | 4929456 1342 | 1159623 1343 | 1326623 1344 | 2661775 1345 | 1853884 1346 | 4769502 1347 | 1930336 1348 | 3140812 1349 | 1843215 1350 | 1157390 1351 | 4066400 1352 | 5229666 1353 | 1299610 1354 | 1375993 1355 | 2076636 1356 | 2006004 1357 | 1032855 1358 | 1614136 1359 | 5457448 1360 | 2205046 1361 | 4317532 1362 | 2890654 1363 | 4412163 1364 | 1339423 1365 | 2165676 1366 | 3951261 1367 | 2103257 1368 | 4724908 1369 | 4543100 1370 | 5372451 1371 | 5918598 1372 | 5330425 1373 | 1051842 1374 | 5697275 1375 | 2074024 1376 | 1606542 1377 | 5228056 1378 | 2503100 1379 | 4197246 1380 | 2728080 1381 | 3849363 1382 | 1971145 1383 | 5573886 1384 | 5649172 1385 | 5072993 1386 | 5099971 1387 | 2822727 1388 | 2799410 1389 | 1262711 1390 | 2921216 1391 | 3697528 1392 | 1325862 1393 | 4168652 1394 | 1601959 1395 | 3938703 1396 | 2234668 1397 | 1809374 1398 | 1242320 1399 | 4110907 1400 | 2095669 1401 | 3777762 1402 | 4905864 1403 | 1986011 1404 | 5915951 1405 | 4630747 1406 | 1400645 1407 | 1403732 1408 | 5686077 1409 | 2446627 1410 | 3089991 1411 | 4052214 1412 | 5480943 1413 | 5429091 1414 | 5407560 1415 | 2000194 1416 | 1394682 1417 | 3615513 1418 | 3987098 1419 | 4333813 1420 | 2477463 1421 | 3039187 1422 | 5828810 1423 | 3630180 1424 | 5205047 1425 | 2209051 1426 | 4171263 1427 | 5652953 1428 | 2347581 1429 | 2449937 1430 | 2275095 1431 | 3650775 1432 | 2957232 1433 | 2101277 1434 | 2544516 1435 | 3663394 1436 | 4864672 1437 | 2762587 1438 | 2461939 1439 | 4080618 1440 | 2064665 1441 | 3963613 1442 | 3972471 1443 | 5037977 1444 | 3446692 1445 | 1877312 1446 | 3994675 1447 | 4713848 1448 | 5897720 1449 | 1926089 1450 | 4522051 1451 | 1833159 1452 | 1719726 1453 | 3268579 1454 | 1679199 1455 | 5336780 1456 | 2230396 1457 | 4832397 1458 | 1120248 1459 | 5376201 1460 | 2578088 1461 | 1440200 1462 | 2179941 1463 | 5899490 1464 | 2616176 1465 | 4053321 1466 | 4897781 1467 | 5658331 1468 | 4515940 1469 | 1910203 1470 | 2561759 1471 | 5258145 1472 | 1061131 1473 | 5078731 1474 | 3140383 1475 | 2138544 1476 | 2404132 1477 | 1659565 1478 | 1907090 1479 | 2694034 1480 | 4689839 1481 | 4395805 1482 | 1675789 1483 | 5853165 1484 | 2363552 1485 | 2389406 1486 | 4319721 1487 | 2618765 1488 | 2361911 1489 | 5698279 1490 | 3288158 1491 | 3115106 1492 | 3694377 1493 | 5799918 1494 | 5194368 1495 | 2678415 1496 | 5386370 1497 | 5146743 1498 | 1960213 1499 | 2402972 1500 | 3302893 1501 | 3045569 1502 | 5371561 1503 | 5239082 1504 | 3942223 1505 | 3418602 1506 | 1627864 1507 | 2689073 1508 | 1018862 1509 | 2263383 1510 | 5988903 1511 | 4736258 1512 | 2577915 1513 | 3234961 1514 | 2320232 1515 | 1974592 1516 | 1000459 1517 | 5003666 1518 | 2159608 1519 | 3828378 1520 | 4277652 1521 | 1929859 1522 | 5915289 1523 | 5957476 1524 | 1045555 1525 | 5481573 1526 | 1028055 1527 | 5400695 1528 | 2075957 1529 | 5966267 1530 | 5886408 1531 | 2409825 1532 | 4239044 1533 | 2618487 1534 | 1275896 1535 | 4350925 1536 | 4770265 1537 | 3356923 1538 | 4579351 1539 | 4377876 1540 | 4944378 1541 | 3479119 1542 | 3057931 1543 | 1347580 1544 | 2316000 1545 | 5098888 1546 | 5761885 1547 | 4328174 1548 | 2745608 1549 | 2895183 1550 | 1472634 1551 | 5331976 1552 | 5380945 1553 | 4180825 1554 | 3323638 1555 | 3579321 1556 | 1087239 1557 | 4731647 1558 | 5523493 1559 | 4659691 1560 | 2550849 1561 | 4098495 1562 | 5235822 1563 | 1258977 1564 | 1512566 1565 | 4675863 1566 | 3952389 1567 | 4869467 1568 | 2913377 1569 | 4602841 1570 | 1795930 1571 | 5274754 1572 | 2489109 1573 | 2054639 1574 | 1090023 1575 | 2491932 1576 | 3464100 1577 | 5268532 1578 | 5820907 1579 | 2779050 1580 | 4325671 1581 | 3996787 1582 | 3109354 1583 | 5119498 1584 | 1656322 1585 | 2590485 1586 | 2268496 1587 | 3493795 1588 | 3110070 1589 | 3575380 1590 | 4147342 1591 | 2413427 1592 | 3660312 1593 | 1511910 1594 | 5049241 1595 | 4474744 1596 | 1103212 1597 | 2991390 1598 | 2857715 1599 | 5762496 1600 | 1002616 1601 | 3596458 1602 | 1508867 1603 | 3123196 1604 | 3583531 1605 | 2010127 1606 | 2648062 1607 | 2857476 1608 | 4980389 1609 | 1299305 1610 | 3110862 1611 | 3188205 1612 | 4735044 1613 | 5604930 1614 | 1402729 1615 | 4309708 1616 | 1785504 1617 | 4048599 1618 | 4790413 1619 | 5962184 1620 | 3178892 1621 | 1677322 1622 | 4149309 1623 | 2189592 1624 | 2771249 1625 | 1852246 1626 | 3624790 1627 | 3427216 1628 | 5068019 1629 | 2023229 1630 | 1915665 1631 | 4512042 1632 | 4455135 1633 | 3653603 1634 | 4338062 1635 | 4192960 1636 | 3033664 1637 | 4369829 1638 | 2549670 1639 | 2313243 1640 | 2814198 1641 | 3826517 1642 | 2934577 1643 | 2583527 1644 | 3112540 1645 | 1711530 1646 | 2196993 1647 | 5375761 1648 | 2268947 1649 | 1023901 1650 | 4390038 1651 | 5924262 1652 | 2981679 1653 | 2608358 1654 | 2176369 1655 | 3231396 1656 | 5175835 1657 | 4171364 1658 | 1483181 1659 | 5157953 1660 | 3447570 1661 | 1623456 1662 | 2567158 1663 | 1559409 1664 | 1874830 1665 | 4357324 1666 | 3357354 1667 | 2524840 1668 | 4887632 1669 | 3326146 1670 | 4045716 1671 | 5288123 1672 | 1730409 1673 | 2410687 1674 | 1506912 1675 | 4965865 1676 | 5578822 1677 | 4153178 1678 | 3860103 1679 | 3966160 1680 | 5529721 1681 | 2990792 1682 | 4215769 1683 | 4094097 1684 | 2584525 1685 | 4876103 1686 | 5311186 1687 | 5645644 1688 | 4073356 1689 | 5382140 1690 | 1194830 1691 | 5649199 1692 | 5082472 1693 | 4383817 1694 | 1732713 1695 | 4734553 1696 | 5992008 1697 | 3756499 1698 | 2734203 1699 | 2524050 1700 | 2570147 1701 | 4486198 1702 | 1046616 1703 | 4167493 1704 | 3740927 1705 | 5091453 1706 | 4180515 1707 | 3566734 1708 | 2291151 1709 | 3185806 1710 | 1296948 1711 | 4846717 1712 | 2314357 1713 | 1336647 1714 | 2776635 1715 | 2514076 1716 | 1539274 1717 | 2558130 1718 | 1169338 1719 | 5435221 1720 | 2429870 1721 | 4800997 1722 | 1369422 1723 | 4994968 1724 | 3582983 1725 | 1671144 1726 | 4166617 1727 | 2334109 1728 | 5214100 1729 | 1264009 1730 | 5368041 1731 | 4024645 1732 | 2323052 1733 | 1484787 1734 | 5215922 1735 | 3655218 1736 | 4429190 1737 | 1965503 1738 | 4048004 1739 | 2206465 1740 | 4223388 1741 | 1659430 1742 | 2620999 1743 | 2182575 1744 | 4250727 1745 | 3883254 1746 | 5978279 1747 | 5729318 1748 | 1451350 1749 | 4680047 1750 | 2708629 1751 | 2770221 1752 | 5069335 1753 | 3530628 1754 | 5462931 1755 | 2404593 1756 | 1323133 1757 | 3185271 1758 | 1496571 1759 | 5376642 1760 | 1288483 1761 | 2451242 1762 | 2065096 1763 | 2826901 1764 | 1900492 1765 | 4365502 1766 | 3552441 1767 | 5879103 1768 | 1957550 1769 | 2283290 1770 | 5736777 1771 | 3578344 1772 | 5782322 1773 | 3611421 1774 | 3337195 1775 | 4376027 1776 | 1944900 1777 | 4112320 1778 | 1312947 1779 | 3124597 1780 | 1645057 1781 | 5828803 1782 | 1034443 1783 | 3910368 1784 | 3248025 1785 | 4264755 1786 | 3019129 1787 | 1747046 1788 | 4320045 1789 | 3304013 1790 | 2539586 1791 | 5927370 1792 | 2826649 1793 | 1885859 1794 | 3143342 1795 | 3314087 1796 | 1103532 1797 | 4342687 1798 | 5951193 1799 | 5944305 1800 | 2326975 1801 | 3353525 1802 | 2960687 1803 | 3960551 1804 | 5135515 1805 | 3776601 1806 | 1012907 1807 | 1715128 1808 | 1438272 1809 | 4984465 1810 | 5947241 1811 | 3739265 1812 | 1547113 1813 | 5764080 1814 | 2164506 1815 | 2885029 1816 | 3348036 1817 | 1524253 1818 | 4464338 1819 | 6006186 1820 | 2846726 1821 | 2835455 1822 | 3310864 1823 | 4013177 1824 | 2899121 1825 | 2392463 1826 | 2652231 1827 | 2371164 1828 | 3816912 1829 | 3981555 1830 | 3522004 1831 | 4640918 1832 | 1829906 1833 | 5472355 1834 | 3487777 1835 | 5576211 1836 | 2478594 1837 | 2699898 1838 | 5493748 1839 | 2033006 1840 | 4331318 1841 | 2580267 1842 | 2136122 1843 | 5442570 1844 | 3523916 1845 | 2718107 1846 | 3334475 1847 | 4670178 1848 | 5577070 1849 | 2507712 1850 | 5738128 1851 | 5623588 1852 | 3541771 1853 | 5597830 1854 | 2047063 1855 | 5191740 1856 | 2679630 1857 | 1464499 1858 | 5455267 1859 | 2794327 1860 | 5217283 1861 | 1051492 1862 | 4950036 1863 | 5415305 1864 | 4748270 1865 | 1418126 1866 | 4264817 1867 | 6004527 1868 | 2094920 1869 | 3985817 1870 | 1429751 1871 | 4851214 1872 | 2041774 1873 | 3812019 1874 | 3858701 1875 | 1712936 1876 | 1696364 1877 | 1257311 1878 | 2511100 1879 | 2992274 1880 | 4650280 1881 | 4194428 1882 | 4045358 1883 | 3746307 1884 | 4388158 1885 | 2107411 1886 | 2128463 1887 | 1576670 1888 | 5411589 1889 | 1804153 1890 | 1205949 1891 | 4430961 1892 | 3800284 1893 | 4868543 1894 | 1022494 1895 | 1808529 1896 | 4716704 1897 | 1872489 1898 | 5748767 1899 | 3697417 1900 | 5328464 1901 | 2392393 1902 | 2483158 1903 | 2583129 1904 | 1880776 1905 | 4326884 1906 | 3748240 1907 | 1060920 1908 | 2773898 1909 | 3201657 1910 | 1692217 1911 | 4151016 1912 | 2005472 1913 | 4824214 1914 | 2838348 1915 | 1029158 1916 | 1369716 1917 | 2325084 1918 | 2561082 1919 | 3679311 1920 | 1172158 1921 | 2602345 1922 | 5337227 1923 | 2662456 1924 | 2133559 1925 | 1618524 1926 | 4094541 1927 | 4502810 1928 | 5382349 1929 | 2757168 1930 | 3961860 1931 | 3823867 1932 | 1226160 1933 | 4786228 1934 | 1890450 1935 | 1165970 1936 | 5374573 1937 | 3365121 1938 | 1431197 1939 | 1054487 1940 | 3880645 1941 | 4231722 1942 | 4928025 1943 | 3121675 1944 | 4521046 1945 | 5258670 1946 | 1505000 1947 | 3089705 1948 | 5955540 1949 | 2760450 1950 | 3532251 1951 | 4591498 1952 | 2759539 1953 | 3754483 1954 | 3975624 1955 | 3972687 1956 | 3329553 1957 | 3835572 1958 | 3771493 1959 | 2348728 1960 | 4232558 1961 | 2889573 1962 | 4927326 1963 | 5143087 1964 | 4949571 1965 | 5650221 1966 | 3234816 1967 | 5980447 1968 | 4951466 1969 | 5540693 1970 | 3306431 1971 | 5119654 1972 | 4307879 1973 | 3204291 1974 | 3321357 1975 | 4060988 1976 | 2670480 1977 | 6012584 1978 | 2217296 1979 | 2364371 1980 | 4030928 1981 | 1749412 1982 | 4868550 1983 | 3303572 1984 | 5063472 1985 | 4524737 1986 | 2020757 1987 | 3875558 1988 | 3662380 1989 | 1618393 1990 | 4569332 1991 | 4616307 1992 | 2941114 1993 | 2826138 1994 | 5257052 1995 | 3164905 1996 | 5307504 1997 | 2894546 1998 | 5426039 1999 | 4936402 2000 | 4029900 2001 | 4004062 2002 | 4782845 2003 | 4435656 2004 | 2272031 2005 | 1410022 2006 | 3086855 2007 | 5009765 2008 | 3374698 2009 | 4944615 2010 | 2913437 2011 | 1362648 2012 | 3102722 2013 | 5595625 2014 | 1040790 2015 | 2966750 2016 | 2746117 2017 | 3670749 2018 | 5421072 2019 | 1481243 2020 | 2494133 2021 | 4970741 2022 | 5761190 2023 | 5828571 2024 | 3077176 2025 | 2879914 2026 | 5887378 2027 | 5421626 2028 | --------------------------------------------------------------------------------