├── .gitignore ├── LICENSE ├── README.md ├── clr.py ├── data.py ├── flops_benchmark.py ├── imagenet.py ├── logger.py ├── model.py ├── requirements.txt ├── results ├── mobilenet_v2_0.5_160 │ ├── loss.png │ ├── model_best.pth.tar │ ├── params.txt │ ├── results.csv │ ├── top1.png │ └── top5.png └── mobilenet_v2_1.0_224 │ ├── loss.png │ ├── model_best.pth.tar │ ├── params.txt │ ├── results.csv │ ├── top1.png │ └── top5.png └── run.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # SageMath parsed files 80 | *.sage.py 81 | 82 | # dotenv 83 | .env 84 | 85 | # virtualenv 86 | .venv 87 | venv/ 88 | ENV/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # mkdocs documentation 98 | /site 99 | 100 | # mypy 101 | .mypy_cache/ 102 | 103 | # visual studio 104 | .vscode/ 105 | 106 | .idea/ 107 | results/* 108 | # models 109 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Evgenii Zheltonozhskii 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # MobileNetv2 in PyTorch 2 | 3 | An implementation of `MobileNetv2` in PyTorch. `MobileNetv2` is an efficient convolutional neural network architecture for mobile devices. For more information check the paper: 4 | [Inverted Residuals and Linear Bottlenecks: Mobile Networks for Classification, Detection and Segmentation](https://arxiv.org/abs/1801.04381) 5 | 6 | ## Usage 7 | 8 | Clone the repo: 9 | ```bash 10 | git clone https://github.com/Randl/MobileNetV2-pytorch 11 | pip install -r requirements.txt 12 | ``` 13 | 14 | Use the model defined in `model.py` to run ImageNet example: 15 | ```bash 16 | python imagenet.py --dataroot "/path/to/imagenet/" 17 | ``` 18 | 19 | To run continue training from checkpoint 20 | ```bash 21 | python imagenet.py --dataroot "/path/to/imagenet/" --resume "/path/to/checkpoint/folder" 22 | ``` 23 | ## Results 24 | 25 | For x1.0 model I achieved 0.3% higher top-1 accuracy than claimed. 26 | 27 | |Classification Checkpoint| MACs (M) | Parameters (M)| Top-1 Accuracy| Top-5 Accuracy| Claimed top-1| Claimed top-5| 28 | |-------------------------|------------|---------------|---------------|---------------|---------------|---------------| 29 | | [mobilenet_v2_1.0_224]|300 |3.47 | 72.10| 90.48| 71.8| 91.0| 30 | | [mobilenet_v2_0.5_160]|50 |1.95 | 60.61| 82.87| 61.0| 83.2| 31 | 32 | You can test it with 33 | ```bash 34 | python imagenet.py --dataroot "/path/to/imagenet/" --resume "results/mobilenet_v2_1.0_224/model_best.pth.tar" -e 35 | python imagenet.py --dataroot "/path/to/imagenet/" --resume "results/mobilenet_v2_0.5_160/model_best.pth.tar" -e --scaling 0.5 --input-size 160 36 | ``` 37 | -------------------------------------------------------------------------------- /clr.py: -------------------------------------------------------------------------------- 1 | # temporary file until https://github.com/pytorch/pytorch/pull/2016 is merged (hopefully 0.5) 2 | 3 | 4 | import numpy as np 5 | from torch.optim import Optimizer 6 | 7 | 8 | class CyclicLR(object): 9 | """Sets the learning rate of each parameter group according to 10 | cyclical learning rate policy (CLR). The policy cycles the learning 11 | rate between two boundaries with a constant frequency, as detailed in 12 | the paper `Cyclical Learning Rates for Training Neural Networks`_. 13 | The distance between the two boundaries can be scaled on a per-iteration 14 | or per-cycle basis. 15 | Cyclical learning rate policy changes the learning rate after every batch. 16 | `batch_step` should be called after a batch has been used for training. 17 | To resume training, save `last_batch_iteration` and use it to instantiate `CycleLR`. 18 | This class has three built-in policies, as put forth in the paper: 19 | "triangular": 20 | A basic triangular cycle w/ no amplitude scaling. 21 | "triangular2": 22 | A basic triangular cycle that scales initial amplitude by half each cycle. 23 | "exp_range": 24 | A cycle that scales initial amplitude by gamma**(cycle iterations) at each 25 | cycle iteration. 26 | This implementation was adapted from the github repo: `bckenstler/CLR`_ 27 | Args: 28 | optimizer (Optimizer): Wrapped optimizer. 29 | base_lr (float or list): Initial learning rate which is the 30 | lower boundary in the cycle for eachparam groups. 31 | Default: 0.001 32 | max_lr (float or list): Upper boundaries in the cycle for 33 | each parameter group. Functionally, 34 | it defines the cycle amplitude (max_lr - base_lr). 35 | The lr at any cycle is the sum of base_lr 36 | and some scaling of the amplitude; therefore 37 | max_lr may not actually be reached depending on 38 | scaling function. Default: 0.006 39 | step_size (int): Number of training iterations per 40 | half cycle. Authors suggest setting step_size 41 | 2-8 x training iterations in epoch. Default: 2000 42 | mode (str): One of {triangular, triangular2, exp_range}. 43 | Values correspond to policies detailed above. 44 | If scale_fn is not None, this argument is ignored. 45 | Default: 'triangular' 46 | gamma (float): Constant in 'exp_range' scaling function: 47 | gamma**(cycle iterations) 48 | Default: 1.0 49 | scale_fn (function): Custom scaling policy defined by a single 50 | argument lambda function, where 51 | 0 <= scale_fn(x) <= 1 for all x >= 0. 52 | mode paramater is ignored 53 | Default: None 54 | scale_mode (str): {'cycle', 'iterations'}. 55 | Defines whether scale_fn is evaluated on 56 | cycle number or cycle iterations (training 57 | iterations since start of cycle). 58 | Default: 'cycle' 59 | last_batch_iteration (int): The index of the last batch. Default: -1 60 | Example: 61 | >>> optimizer = torch.optim.SGD(model.parameters(), lr=0.1, momentum=0.9) 62 | >>> scheduler = torch.optim.CyclicLR(optimizer) 63 | >>> data_loader = torch.utils.data.DataLoader(...) 64 | >>> for epoch in range(10): 65 | >>> for batch in data_loader: 66 | >>> scheduler.batch_step() 67 | >>> train_batch(...) 68 | .. _Cyclical Learning Rates for Training Neural Networks: https://arxiv.org/abs/1506.01186 69 | .. _bckenstler/CLR: https://github.com/bckenstler/CLR 70 | """ 71 | 72 | def __init__(self, optimizer, base_lr=1e-3, max_lr=6e-3, 73 | step_size=2000, mode='triangular', gamma=1., 74 | scale_fn=None, scale_mode='cycle', last_batch_iteration=-1): 75 | 76 | if not isinstance(optimizer, Optimizer): 77 | raise TypeError('{} is not an Optimizer'.format( 78 | type(optimizer).__name__)) 79 | self.optimizer = optimizer 80 | 81 | if isinstance(base_lr, list) or isinstance(base_lr, tuple): 82 | if len(base_lr) != len(optimizer.param_groups): 83 | raise ValueError("expected {} base_lr, got {}".format( 84 | len(optimizer.param_groups), len(base_lr))) 85 | self.base_lrs = list(base_lr) 86 | else: 87 | self.base_lrs = [base_lr] * len(optimizer.param_groups) 88 | 89 | if isinstance(max_lr, list) or isinstance(max_lr, tuple): 90 | if len(max_lr) != len(optimizer.param_groups): 91 | raise ValueError("expected {} max_lr, got {}".format( 92 | len(optimizer.param_groups), len(max_lr))) 93 | self.max_lrs = list(max_lr) 94 | else: 95 | self.max_lrs = [max_lr] * len(optimizer.param_groups) 96 | 97 | self.step_size = step_size 98 | 99 | if mode not in ['triangular', 'triangular2', 'exp_range'] \ 100 | and scale_fn is None: 101 | raise ValueError('mode is invalid and scale_fn is None') 102 | 103 | self.mode = mode 104 | self.gamma = gamma 105 | 106 | if scale_fn is None: 107 | if self.mode == 'triangular': 108 | self.scale_fn = self._triangular_scale_fn 109 | self.scale_mode = 'cycle' 110 | elif self.mode == 'triangular2': 111 | self.scale_fn = self._triangular2_scale_fn 112 | self.scale_mode = 'cycle' 113 | elif self.mode == 'exp_range': 114 | self.scale_fn = self._exp_range_scale_fn 115 | self.scale_mode = 'iterations' 116 | else: 117 | self.scale_fn = scale_fn 118 | self.scale_mode = scale_mode 119 | 120 | self.batch_step(last_batch_iteration + 1) 121 | self.last_batch_iteration = last_batch_iteration 122 | 123 | def batch_step(self, batch_iteration=None): 124 | if batch_iteration is None: 125 | batch_iteration = self.last_batch_iteration + 1 126 | self.last_batch_iteration = batch_iteration 127 | for param_group, lr in zip(self.optimizer.param_groups, self.get_lr()): 128 | param_group['lr'] = lr 129 | 130 | def _triangular_scale_fn(self, x): 131 | return 1. 132 | 133 | def _triangular2_scale_fn(self, x): 134 | return 1 / (2. ** (x - 1)) 135 | 136 | def _exp_range_scale_fn(self, x): 137 | return self.gamma ** (x) 138 | 139 | def get_lr(self): 140 | step_size = float(self.step_size) 141 | cycle = np.floor(1 + self.last_batch_iteration / (2 * step_size)) 142 | x = np.abs(self.last_batch_iteration / step_size - 2 * cycle + 1) 143 | 144 | lrs = [] 145 | param_lrs = zip(self.optimizer.param_groups, self.base_lrs, self.max_lrs) 146 | for param_group, base_lr, max_lr in param_lrs: 147 | base_height = (max_lr - base_lr) * np.maximum(0, (1 - x)) 148 | if self.scale_mode == 'cycle': 149 | lr = base_lr + base_height * self.scale_fn(cycle) 150 | else: 151 | lr = base_lr + base_height * self.scale_fn(self.last_batch_iteration) 152 | lrs.append(lr) 153 | return lrs 154 | -------------------------------------------------------------------------------- /data.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import torch 4 | import torch.nn.parallel 5 | import torch.optim 6 | import torch.utils.data 7 | from torchvision import datasets, transforms 8 | 9 | __imagenet_stats = {'mean': [0.485, 0.456, 0.406], 10 | 'std': [0.229, 0.224, 0.225]} 11 | 12 | 13 | def inception_preproccess(input_size, normalize=__imagenet_stats): 14 | return transforms.Compose([ 15 | transforms.RandomResizedCrop(input_size), 16 | transforms.RandomHorizontalFlip(), 17 | transforms.ToTensor(), 18 | transforms.Normalize(**normalize) 19 | ]) 20 | 21 | 22 | def scale_crop(input_size, scale_size=None, normalize=__imagenet_stats): 23 | t_list = [ 24 | transforms.CenterCrop(input_size), 25 | transforms.ToTensor(), 26 | transforms.Normalize(**normalize), 27 | ] 28 | if scale_size != input_size: 29 | t_list = [transforms.Resize(scale_size)] + t_list 30 | 31 | return transforms.Compose(t_list) 32 | 33 | 34 | def get_transform(augment=True, input_size=224): 35 | normalize = __imagenet_stats 36 | scale_size = int(input_size / 0.875) 37 | if augment: 38 | return inception_preproccess(input_size=input_size, normalize=normalize) 39 | else: 40 | return scale_crop(input_size=input_size, scale_size=scale_size, normalize=normalize) 41 | 42 | 43 | def get_loaders(dataroot, val_batch_size, train_batch_size, input_size, workers): 44 | val_data = datasets.ImageFolder(root=os.path.join(dataroot, 'val'), transform=get_transform(False, input_size)) 45 | val_loader = torch.utils.data.DataLoader(val_data, batch_size=val_batch_size, shuffle=False, num_workers=workers, 46 | pin_memory=True) 47 | 48 | train_data = datasets.ImageFolder(root=os.path.join(dataroot, 'train'), 49 | transform=get_transform(input_size=input_size)) 50 | train_loader = torch.utils.data.DataLoader(train_data, batch_size=train_batch_size, shuffle=True, 51 | num_workers=workers, pin_memory=True) 52 | return train_loader, val_loader 53 | -------------------------------------------------------------------------------- /flops_benchmark.py: -------------------------------------------------------------------------------- 1 | #### https://github.com/warmspringwinds/pytorch-segmentation-detection/blob/master/pytorch_segmentation_detection/utils/flops_benchmark.py 2 | import torch 3 | 4 | 5 | # ---- Public functions 6 | 7 | def add_flops_counting_methods(net_main_module): 8 | """Adds flops counting functions to an existing model. After that 9 | the flops count should be activated and the model should be run on an input 10 | image. 11 | 12 | Example: 13 | 14 | fcn = add_flops_counting_methods(fcn) 15 | fcn = fcn.cuda().train() 16 | fcn.start_flops_count() 17 | 18 | 19 | _ = fcn(batch) 20 | 21 | fcn.compute_average_flops_cost() / 1e9 / 2 # Result in GFLOPs per image in batch 22 | 23 | Important: dividing by 2 only works for resnet models -- see below for the details 24 | of flops computation. 25 | 26 | Attention: we are counting multiply-add as two flops in this work, because in 27 | most resnet models convolutions are bias-free (BN layers act as bias there) 28 | and it makes sense to count muliply and add as separate flops therefore. 29 | This is why in the above example we divide by 2 in order to be consistent with 30 | most modern benchmarks. For example in "Spatially Adaptive Computatin Time for Residual 31 | Networks" by Figurnov et al multiply-add was counted as two flops. 32 | 33 | This module computes the average flops which is necessary for dynamic networks which 34 | have different number of executed layers. For static networks it is enough to run the network 35 | once and get statistics (above example). 36 | 37 | Implementation: 38 | The module works by adding batch_count to the main module which tracks the sum 39 | of all batch sizes that were run through the network. 40 | 41 | Also each convolutional layer of the network tracks the overall number of flops 42 | performed. 43 | 44 | The parameters are updated with the help of registered hook-functions which 45 | are being called each time the respective layer is executed. 46 | 47 | Parameters 48 | ---------- 49 | net_main_module : torch.nn.Module 50 | Main module containing network 51 | 52 | Returns 53 | ------- 54 | net_main_module : torch.nn.Module 55 | Updated main module with new methods/attributes that are used 56 | to compute flops. 57 | """ 58 | 59 | # adding additional methods to the existing module object, 60 | # this is done this way so that each function has access to self object 61 | net_main_module.start_flops_count = start_flops_count.__get__(net_main_module) 62 | net_main_module.stop_flops_count = stop_flops_count.__get__(net_main_module) 63 | net_main_module.reset_flops_count = reset_flops_count.__get__(net_main_module) 64 | net_main_module.compute_average_flops_cost = compute_average_flops_cost.__get__(net_main_module) 65 | 66 | net_main_module.reset_flops_count() 67 | 68 | # Adding varialbles necessary for masked flops computation 69 | net_main_module.apply(add_flops_mask_variable_or_reset) 70 | 71 | return net_main_module 72 | 73 | 74 | def compute_average_flops_cost(self): 75 | """ 76 | A method that will be available after add_flops_counting_methods() is called 77 | on a desired net object. 78 | 79 | Returns current mean flops consumption per image. 80 | 81 | """ 82 | 83 | batches_count = self.__batch_counter__ 84 | 85 | flops_sum = 0 86 | 87 | for module in self.modules(): 88 | 89 | if isinstance(module, torch.nn.Conv2d): 90 | flops_sum += module.__flops__ 91 | 92 | return flops_sum / batches_count 93 | 94 | 95 | def start_flops_count(self): 96 | """ 97 | A method that will be available after add_flops_counting_methods() is called 98 | on a desired net object. 99 | 100 | Activates the computation of mean flops consumption per image. 101 | Call it before you run the network. 102 | 103 | """ 104 | 105 | add_batch_counter_hook_function(self) 106 | 107 | self.apply(add_flops_counter_hook_function) 108 | 109 | 110 | def stop_flops_count(self): 111 | """ 112 | A method that will be available after add_flops_counting_methods() is called 113 | on a desired net object. 114 | 115 | Stops computing the mean flops consumption per image. 116 | Call whenever you want to pause the computation. 117 | 118 | """ 119 | 120 | remove_batch_counter_hook_function(self) 121 | 122 | self.apply(remove_flops_counter_hook_function) 123 | 124 | 125 | def reset_flops_count(self): 126 | """ 127 | A method that will be available after add_flops_counting_methods() is called 128 | on a desired net object. 129 | 130 | Resets statistics computed so far. 131 | 132 | """ 133 | 134 | add_batch_counter_variables_or_reset(self) 135 | 136 | self.apply(add_flops_counter_variable_or_reset) 137 | 138 | 139 | def add_flops_mask(module, mask): 140 | def add_flops_mask_func(module): 141 | if isinstance(module, torch.nn.Conv2d): 142 | module.__mask__ = mask 143 | 144 | module.apply(add_flops_mask_func) 145 | 146 | 147 | def remove_flops_mask(module): 148 | module.apply(add_flops_mask_variable_or_reset) 149 | 150 | 151 | # ---- Internal functions 152 | 153 | 154 | def conv_flops_counter_hook(conv_module, input, output): 155 | # Can have multiple inputs, getting the first one 156 | input = input[0] 157 | 158 | batch_size = input.shape[0] 159 | output_height, output_width = output.shape[2:] 160 | 161 | kernel_height, kernel_width = conv_module.kernel_size 162 | in_channels = conv_module.in_channels 163 | out_channels = conv_module.out_channels 164 | groups = conv_module.groups 165 | 166 | # We count multiply-add as 2 flops 167 | conv_per_position_flops = 2 * kernel_height * kernel_width * in_channels * out_channels / groups 168 | 169 | active_elements_count = batch_size * output_height * output_width 170 | 171 | if conv_module.__mask__ is not None: 172 | # (b, 1, h, w) 173 | flops_mask = conv_module.__mask__.expand(batch_size, 1, output_height, output_width) 174 | active_elements_count = flops_mask.sum() 175 | 176 | overall_conv_flops = conv_per_position_flops * active_elements_count 177 | 178 | bias_flops = 0 179 | 180 | if conv_module.bias is not None: 181 | bias_flops = out_channels * active_elements_count 182 | 183 | overall_flops = overall_conv_flops + bias_flops 184 | 185 | conv_module.__flops__ += overall_flops 186 | 187 | 188 | def batch_counter_hook(module, input, output): 189 | # Can have multiple inputs, getting the first one 190 | input = input[0] 191 | 192 | batch_size = input.shape[0] 193 | 194 | module.__batch_counter__ += batch_size 195 | 196 | 197 | def add_batch_counter_variables_or_reset(module): 198 | module.__batch_counter__ = 0 199 | 200 | 201 | def add_batch_counter_hook_function(module): 202 | if hasattr(module, '__batch_counter_handle__'): 203 | return 204 | 205 | handle = module.register_forward_hook(batch_counter_hook) 206 | module.__batch_counter_handle__ = handle 207 | 208 | 209 | def remove_batch_counter_hook_function(module): 210 | if hasattr(module, '__batch_counter_handle__'): 211 | module.__batch_counter_handle__.remove() 212 | 213 | del module.__batch_counter_handle__ 214 | 215 | 216 | def add_flops_counter_variable_or_reset(module): 217 | if isinstance(module, torch.nn.Conv2d): 218 | module.__flops__ = 0 219 | 220 | 221 | def add_flops_counter_hook_function(module): 222 | if isinstance(module, torch.nn.Conv2d): 223 | 224 | if hasattr(module, '__flops_handle__'): 225 | return 226 | 227 | handle = module.register_forward_hook(conv_flops_counter_hook) 228 | module.__flops_handle__ = handle 229 | 230 | 231 | def remove_flops_counter_hook_function(module): 232 | if isinstance(module, torch.nn.Conv2d): 233 | 234 | if hasattr(module, '__flops_handle__'): 235 | module.__flops_handle__.remove() 236 | 237 | del module.__flops_handle__ 238 | 239 | 240 | # --- Masked flops counting 241 | 242 | 243 | # Also being run in the initialization 244 | def add_flops_mask_variable_or_reset(module): 245 | if isinstance(module, torch.nn.Conv2d): 246 | module.__mask__ = None 247 | 248 | 249 | def count_flops(model, batch_size, device, dtype, input_size, in_channels, *params): 250 | net = model(*params, input_size=input_size) 251 | # print(net) 252 | net = add_flops_counting_methods(net) 253 | 254 | net.to(device=device, dtype=dtype) 255 | net = net.train() 256 | 257 | batch = torch.randn(batch_size, in_channels, input_size, input_size).to(device=device, dtype=dtype) 258 | net.start_flops_count() 259 | 260 | _ = net(batch) 261 | return net.compute_average_flops_cost() / 2 # Result in FLOPs 262 | -------------------------------------------------------------------------------- /imagenet.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import csv 3 | import os 4 | import random 5 | import sys 6 | from datetime import datetime 7 | 8 | import torch 9 | import torch.backends.cudnn as cudnn 10 | import torch.nn.parallel 11 | import torch.optim 12 | import torch.utils.data 13 | from torch.optim.lr_scheduler import MultiStepLR 14 | from tqdm import trange 15 | 16 | import flops_benchmark 17 | from clr import CyclicLR 18 | from data import get_loaders 19 | from logger import CsvLogger 20 | from model import MobileNet2 21 | from run import train, test, save_checkpoint, find_bounds_clr 22 | 23 | parser = argparse.ArgumentParser(description='MobileNetv2 training with PyTorch') 24 | parser.add_argument('--dataroot', required=True, metavar='PATH', 25 | help='Path to ImageNet train and val folders, preprocessed as described in ' 26 | 'https://github.com/facebook/fb.resnet.torch/blob/master/INSTALL.md#download-the-imagenet-dataset') 27 | parser.add_argument('--gpus', default=None, help='List of GPUs used for training - e.g 0,1,3') 28 | parser.add_argument('-j', '--workers', default=4, type=int, metavar='N', 29 | help='Number of data loading workers (default: 4)') 30 | parser.add_argument('--type', default='float32', help='Type of tensor: float32, float16, float64. Default: float32') 31 | 32 | # Optimization options 33 | parser.add_argument('--epochs', type=int, default=400, help='Number of epochs to train.') 34 | parser.add_argument('-b', '--batch-size', default=64, type=int, metavar='N', help='mini-batch size (default: 64)') 35 | parser.add_argument('--learning_rate', '-lr', type=float, default=0.01, help='The learning rate.') 36 | parser.add_argument('--momentum', '-m', type=float, default=0.9, help='Momentum.') 37 | parser.add_argument('--decay', '-d', type=float, default=4e-5, help='Weight decay (L2 penalty).') 38 | parser.add_argument('--gamma', type=float, default=0.1, help='LR is multiplied by gamma at scheduled epochs.') 39 | parser.add_argument('--schedule', type=int, nargs='+', default=[200, 300], 40 | help='Decrease learning rate at these epochs.') 41 | 42 | # CLR 43 | parser.add_argument('--clr', dest='clr', action='store_true', help='Use CLR') 44 | parser.add_argument('--min-lr', type=float, default=1e-5, help='Minimal LR for CLR.') 45 | parser.add_argument('--max-lr', type=float, default=1, help='Maximal LR for CLR.') 46 | parser.add_argument('--epochs-per-step', type=int, default=20, 47 | help='Number of epochs per step in CLR, recommended to be between 2 and 10.') 48 | parser.add_argument('--mode', default='triangular2', help='CLR mode. One of {triangular, triangular2, exp_range}') 49 | parser.add_argument('--find-clr', dest='find_clr', action='store_true', 50 | help='Run search for optimal LR in range (min_lr, max_lr)') 51 | 52 | # Checkpoints 53 | parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true', help='Just evaluate model') 54 | parser.add_argument('--save', '-s', type=str, default='', help='Folder to save checkpoints.') 55 | parser.add_argument('--results_dir', metavar='RESULTS_DIR', default='./results', help='Directory to store results') 56 | parser.add_argument('--resume', default='', type=str, metavar='PATH', help='path to latest checkpoint (default: none)') 57 | parser.add_argument('--start-epoch', default=0, type=int, metavar='N', help='manual epoch number (useful on restarts)') 58 | parser.add_argument('--log-interval', type=int, default=100, metavar='N', help='Number of batches between log messages') 59 | parser.add_argument('--seed', type=int, default=None, metavar='S', help='random seed (default: 1)') 60 | 61 | # Architecture 62 | parser.add_argument('--scaling', type=float, default=1, metavar='SC', help='Scaling of MobileNet (default x1).') 63 | parser.add_argument('--input-size', type=int, default=224, metavar='I', 64 | help='Input size of MobileNet, multiple of 32 (default 224).') 65 | 66 | # https://github.com/keras-team/keras/blob/fe066966b5afa96f2f6b9f71ec0c71158b44068d/keras/applications/mobilenetv2.py#L30 67 | claimed_acc_top1 = {224: {1.4: 0.75, 1.3: 0.744, 1.0: 0.718, 0.75: 0.698, 0.5: 0.654, 0.35: 0.603}, 68 | 192: {1.0: 0.707, 0.75: 0.687, 0.5: 0.639, 0.35: 0.582}, 69 | 160: {1.0: 0.688, 0.75: 0.664, 0.5: 0.610, 0.35: 0.557}, 70 | 128: {1.0: 0.653, 0.75: 0.632, 0.5: 0.577, 0.35: 0.508}, 71 | 96: {1.0: 0.603, 0.75: 0.588, 0.5: 0.512, 0.35: 0.455}, 72 | } 73 | claimed_acc_top5 = {224: {1.4: 0.925, 1.3: 0.921, 1.0: 0.910, 0.75: 0.896, 0.5: 0.864, 0.35: 0.829}, 74 | 192: {1.0: 0.901, 0.75: 0.889, 0.5: 0.854, 0.35: 0.812}, 75 | 160: {1.0: 0.890, 0.75: 0.873, 0.5: 0.832, 0.35: 0.791}, 76 | 128: {1.0: 0.869, 0.75: 0.855, 0.5: 0.808, 0.35: 0.750}, 77 | 96: {1.0: 0.832, 0.75: 0.816, 0.5: 0.758, 0.35: 0.704}, 78 | } 79 | 80 | 81 | def main(): 82 | args = parser.parse_args() 83 | 84 | if args.seed is None: 85 | args.seed = random.randint(1, 10000) 86 | print("Random Seed: ", args.seed) 87 | random.seed(args.seed) 88 | torch.manual_seed(args.seed) 89 | if args.gpus: 90 | torch.cuda.manual_seed_all(args.seed) 91 | 92 | time_stamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') 93 | if args.evaluate: 94 | args.results_dir = '/tmp' 95 | if args.save is '': 96 | args.save = time_stamp 97 | save_path = os.path.join(args.results_dir, args.save) 98 | if not os.path.exists(save_path): 99 | os.makedirs(save_path) 100 | 101 | if args.gpus is not None: 102 | args.gpus = [int(i) for i in args.gpus.split(',')] 103 | device = 'cuda:' + str(args.gpus[0]) 104 | cudnn.benchmark = True 105 | else: 106 | device = 'cpu' 107 | 108 | if args.type == 'float64': 109 | dtype = torch.float64 110 | elif args.type == 'float32': 111 | dtype = torch.float32 112 | elif args.type == 'float16': 113 | dtype = torch.float16 114 | else: 115 | raise ValueError('Wrong type!') # TODO int8 116 | 117 | model = MobileNet2(input_size=args.input_size, scale=args.scaling) 118 | num_parameters = sum([l.nelement() for l in model.parameters()]) 119 | print(model) 120 | print('number of parameters: {}'.format(num_parameters)) 121 | print('FLOPs: {}'.format( 122 | flops_benchmark.count_flops(MobileNet2, 123 | args.batch_size // len(args.gpus) if args.gpus is not None else args.batch_size, 124 | device, dtype, args.input_size, 3, args.scaling))) 125 | 126 | train_loader, val_loader = get_loaders(args.dataroot, args.batch_size, args.batch_size, args.input_size, 127 | args.workers) 128 | # define loss function (criterion) and optimizer 129 | criterion = torch.nn.CrossEntropyLoss() 130 | if args.gpus is not None: 131 | model = torch.nn.DataParallel(model, args.gpus) 132 | model.to(device=device, dtype=dtype) 133 | criterion.to(device=device, dtype=dtype) 134 | 135 | optimizer = torch.optim.SGD(model.parameters(), args.learning_rate, momentum=args.momentum, weight_decay=args.decay, 136 | nesterov=True) 137 | if args.find_clr: 138 | find_bounds_clr(model, train_loader, optimizer, criterion, device, dtype, min_lr=args.min_lr, 139 | max_lr=args.max_lr, step_size=args.epochs_per_step * len(train_loader), mode=args.mode, 140 | save_path=save_path) 141 | return 142 | 143 | if args.clr: 144 | scheduler = CyclicLR(optimizer, base_lr=args.min_lr, max_lr=args.max_lr, 145 | step_size=args.epochs_per_step * len(train_loader), mode=args.mode) 146 | else: 147 | scheduler = MultiStepLR(optimizer, milestones=args.schedule, gamma=args.gamma) 148 | 149 | best_test = 0 150 | 151 | # optionally resume from a checkpoint 152 | data = None 153 | if args.resume: 154 | if os.path.isfile(args.resume): 155 | print("=> loading checkpoint '{}'".format(args.resume)) 156 | checkpoint = torch.load(args.resume, map_location=device) 157 | args.start_epoch = checkpoint['epoch'] - 1 158 | best_test = checkpoint['best_prec1'] 159 | model.load_state_dict(checkpoint['state_dict']) 160 | optimizer.load_state_dict(checkpoint['optimizer']) 161 | print("=> loaded checkpoint '{}' (epoch {})" 162 | .format(args.resume, checkpoint['epoch'])) 163 | elif os.path.isdir(args.resume): 164 | checkpoint_path = os.path.join(args.resume, 'checkpoint.pth.tar') 165 | csv_path = os.path.join(args.resume, 'results.csv') 166 | print("=> loading checkpoint '{}'".format(checkpoint_path)) 167 | checkpoint = torch.load(checkpoint_path, map_location=device) 168 | args.start_epoch = checkpoint['epoch'] - 1 169 | best_test = checkpoint['best_prec1'] 170 | model.load_state_dict(checkpoint['state_dict']) 171 | optimizer.load_state_dict(checkpoint['optimizer']) 172 | print("=> loaded checkpoint '{}' (epoch {})".format(checkpoint_path, checkpoint['epoch'])) 173 | data = [] 174 | with open(csv_path) as csvfile: 175 | reader = csv.DictReader(csvfile) 176 | for row in reader: 177 | data.append(row) 178 | else: 179 | print("=> no checkpoint found at '{}'".format(args.resume)) 180 | 181 | if args.evaluate: 182 | loss, top1, top5 = test(model, val_loader, criterion, device, dtype) # TODO 183 | return 184 | 185 | csv_logger = CsvLogger(filepath=save_path, data=data) 186 | csv_logger.save_params(sys.argv, args) 187 | 188 | claimed_acc1 = None 189 | claimed_acc5 = None 190 | if args.input_size in claimed_acc_top1: 191 | if args.scaling in claimed_acc_top1[args.input_size]: 192 | claimed_acc1 = claimed_acc_top1[args.input_size][args.scaling] 193 | claimed_acc5 = claimed_acc_top5[args.input_size][args.scaling] 194 | csv_logger.write_text( 195 | 'Claimed accuracies are: {:.2f}% top-1, {:.2f}% top-5'.format(claimed_acc1 * 100., claimed_acc5 * 100.)) 196 | train_network(args.start_epoch, args.epochs, scheduler, model, train_loader, val_loader, optimizer, criterion, 197 | device, dtype, args.batch_size, args.log_interval, csv_logger, save_path, claimed_acc1, claimed_acc5, 198 | best_test) 199 | 200 | 201 | def train_network(start_epoch, epochs, scheduler, model, train_loader, val_loader, optimizer, criterion, device, dtype, 202 | batch_size, log_interval, csv_logger, save_path, claimed_acc1, claimed_acc5, best_test): 203 | for epoch in trange(start_epoch, epochs + 1): 204 | if not isinstance(scheduler, CyclicLR): 205 | scheduler.step() 206 | train_loss, train_accuracy1, train_accuracy5, = train(model, train_loader, epoch, optimizer, criterion, device, 207 | dtype, batch_size, log_interval, scheduler) 208 | test_loss, test_accuracy1, test_accuracy5 = test(model, val_loader, criterion, device, dtype) 209 | csv_logger.write({'epoch': epoch + 1, 'val_error1': 1 - test_accuracy1, 'val_error5': 1 - test_accuracy5, 210 | 'val_loss': test_loss, 'train_error1': 1 - train_accuracy1, 211 | 'train_error5': 1 - train_accuracy5, 'train_loss': train_loss}) 212 | save_checkpoint({'epoch': epoch + 1, 'state_dict': model.state_dict(), 'best_prec1': best_test, 213 | 'optimizer': optimizer.state_dict()}, test_accuracy1 > best_test, filepath=save_path) 214 | 215 | csv_logger.plot_progress(claimed_acc1=claimed_acc1, claimed_acc5=claimed_acc5) 216 | 217 | if test_accuracy1 > best_test: 218 | best_test = test_accuracy1 219 | 220 | csv_logger.write_text('Best accuracy is {:.2f}% top-1'.format(best_test * 100.)) 221 | 222 | 223 | if __name__ == '__main__': 224 | main() 225 | -------------------------------------------------------------------------------- /logger.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import os.path 3 | 4 | import matplotlib 5 | 6 | matplotlib.use('Agg') 7 | 8 | from matplotlib import pyplot as plt 9 | import numpy as np 10 | 11 | plt.switch_backend('agg') 12 | 13 | 14 | class CsvLogger: 15 | def __init__(self, filepath='./', filename='results.csv', data=None): 16 | self.log_path = filepath 17 | self.log_name = filename 18 | self.csv_path = os.path.join(self.log_path, self.log_name) 19 | self.fieldsnames = ['epoch', 'val_error1', 'val_error5', 'val_loss', 'train_error1', 'train_error5', 20 | 'train_loss'] 21 | 22 | with open(self.csv_path, 'w') as f: 23 | writer = csv.DictWriter(f, fieldnames=self.fieldsnames) 24 | writer.writeheader() 25 | 26 | self.data = {} 27 | for field in self.fieldsnames: 28 | self.data[field] = [] 29 | if data is not None: 30 | for d in data: 31 | d_num = {} 32 | for key in d: 33 | d_num[key] = float(d[key]) if key != 'epoch' else int(d[key]) 34 | self.write(d_num) 35 | 36 | def write(self, data): 37 | for k in self.data: 38 | self.data[k].append(data[k]) 39 | with open(self.csv_path, 'a') as f: 40 | writer = csv.DictWriter(f, fieldnames=self.fieldsnames) 41 | writer.writerow(data) 42 | 43 | def save_params(self, args, params): 44 | with open(os.path.join(self.log_path, 'params.txt'), 'w') as f: 45 | f.write('{}\n'.format(' '.join(args))) 46 | f.write('{}\n'.format(params)) 47 | 48 | def write_text(self, text, print_t=True): 49 | with open(os.path.join(self.log_path, 'params.txt'), 'a') as f: 50 | f.write('{}\n'.format(text)) 51 | if print_t: 52 | print(text) 53 | 54 | def plot_progress_errk(self, claimed_acc=None, title='MobileNetv2', k=1): 55 | tr_str = 'train_error{}'.format(k) 56 | val_str = 'val_error{}'.format(k) 57 | plt.figure(figsize=(9, 8), dpi=300) 58 | plt.plot(self.data[tr_str], label='Training error') 59 | plt.plot(self.data[val_str], label='Validation error') 60 | if claimed_acc is not None: 61 | plt.plot((0, len(self.data[tr_str])), (1 - claimed_acc, 1 - claimed_acc), 'k--', 62 | label='Claimed validation error ({:.2f}%)'.format(100. * (1 - claimed_acc))) 63 | plt.plot((0, len(self.data[tr_str])), 64 | (np.min(self.data[val_str]), np.min(self.data[val_str])), 'r--', 65 | label='Best validation error ({:.2f}%)'.format(100. * np.min(self.data[val_str]))) 66 | plt.title('Top-{} error for {}'.format(k, title)) 67 | plt.xlabel('Epoch') 68 | plt.ylabel('Error') 69 | plt.legend() 70 | plt.xlim(0, len(self.data[tr_str]) + 1) 71 | plt.savefig(os.path.join(self.log_path, 'top{}.png'.format(k))) 72 | 73 | def plot_progress_loss(self, title='MobileNetv2'): 74 | plt.figure(figsize=(9, 8), dpi=300) 75 | plt.plot(self.data['train_loss'], label='Training') 76 | plt.plot(self.data['val_loss'], label='Validation') 77 | plt.title(title) 78 | plt.xlabel('Epoch') 79 | plt.ylabel('Loss') 80 | plt.legend() 81 | plt.xlim(0, len(self.data['train_loss']) + 1) 82 | plt.savefig(os.path.join(self.log_path, 'loss.png')) 83 | 84 | def plot_progress(self, claimed_acc1=None, claimed_acc5=None, title='MobileNetv2'): 85 | self.plot_progress_errk(claimed_acc1, title, 1) 86 | self.plot_progress_errk(claimed_acc5, title, 5) 87 | self.plot_progress_loss(title) 88 | plt.close('all') 89 | -------------------------------------------------------------------------------- /model.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | 3 | import torch 4 | import torch.nn as nn 5 | import torch.nn.functional as F 6 | from torch.nn import init 7 | 8 | 9 | def _make_divisible(v, divisor, min_value=None): 10 | """ 11 | This function is taken from the original tf repo. 12 | It ensures that all layers have a channel number that is divisible by 8 13 | It can be seen here: 14 | https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py 15 | :param v: 16 | :param divisor: 17 | :param min_value: 18 | :return: 19 | """ 20 | if min_value is None: 21 | min_value = divisor 22 | new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) 23 | # Make sure that round down does not go down by more than 10%. 24 | if new_v < 0.9 * v: 25 | new_v += divisor 26 | return new_v 27 | 28 | 29 | class LinearBottleneck(nn.Module): 30 | def __init__(self, inplanes, outplanes, stride=1, t=6, activation=nn.ReLU6): 31 | super(LinearBottleneck, self).__init__() 32 | self.conv1 = nn.Conv2d(inplanes, inplanes * t, kernel_size=1, bias=False) 33 | self.bn1 = nn.BatchNorm2d(inplanes * t) 34 | self.conv2 = nn.Conv2d(inplanes * t, inplanes * t, kernel_size=3, stride=stride, padding=1, bias=False, 35 | groups=inplanes * t) 36 | self.bn2 = nn.BatchNorm2d(inplanes * t) 37 | self.conv3 = nn.Conv2d(inplanes * t, outplanes, kernel_size=1, bias=False) 38 | self.bn3 = nn.BatchNorm2d(outplanes) 39 | self.activation = activation(inplace=True) 40 | self.stride = stride 41 | self.t = t 42 | self.inplanes = inplanes 43 | self.outplanes = outplanes 44 | 45 | def forward(self, x): 46 | residual = x 47 | 48 | out = self.conv1(x) 49 | out = self.bn1(out) 50 | out = self.activation(out) 51 | 52 | out = self.conv2(out) 53 | out = self.bn2(out) 54 | out = self.activation(out) 55 | 56 | out = self.conv3(out) 57 | out = self.bn3(out) 58 | 59 | if self.stride == 1 and self.inplanes == self.outplanes: 60 | out += residual 61 | 62 | return out 63 | 64 | 65 | class MobileNet2(nn.Module): 66 | """MobileNet2 implementation. 67 | """ 68 | 69 | def __init__(self, scale=1.0, input_size=224, t=6, in_channels=3, num_classes=1000, activation=nn.ReLU6): 70 | """ 71 | MobileNet2 constructor. 72 | :param in_channels: (int, optional): number of channels in the input tensor. 73 | Default is 3 for RGB image inputs. 74 | :param input_size: 75 | :param num_classes: number of classes to predict. Default 76 | is 1000 for ImageNet. 77 | :param scale: 78 | :param t: 79 | :param activation: 80 | """ 81 | 82 | super(MobileNet2, self).__init__() 83 | 84 | self.scale = scale 85 | self.t = t 86 | self.activation_type = activation 87 | self.activation = activation(inplace=True) 88 | self.num_classes = num_classes 89 | 90 | self.num_of_channels = [32, 16, 24, 32, 64, 96, 160, 320] 91 | # assert (input_size % 32 == 0) 92 | 93 | self.c = [_make_divisible(ch * self.scale, 8) for ch in self.num_of_channels] 94 | self.n = [1, 1, 2, 3, 4, 3, 3, 1] 95 | self.s = [2, 1, 2, 2, 2, 1, 2, 1] 96 | self.conv1 = nn.Conv2d(in_channels, self.c[0], kernel_size=3, bias=False, stride=self.s[0], padding=1) 97 | self.bn1 = nn.BatchNorm2d(self.c[0]) 98 | self.bottlenecks = self._make_bottlenecks() 99 | 100 | # Last convolution has 1280 output channels for scale <= 1 101 | self.last_conv_out_ch = 1280 if self.scale <= 1 else _make_divisible(1280 * self.scale, 8) 102 | self.conv_last = nn.Conv2d(self.c[-1], self.last_conv_out_ch, kernel_size=1, bias=False) 103 | self.bn_last = nn.BatchNorm2d(self.last_conv_out_ch) 104 | self.avgpool = nn.AdaptiveAvgPool2d(1) 105 | self.dropout = nn.Dropout(p=0.2, inplace=True) # confirmed by paper authors 106 | self.fc = nn.Linear(self.last_conv_out_ch, self.num_classes) 107 | self.init_params() 108 | 109 | def init_params(self): 110 | for m in self.modules(): 111 | if isinstance(m, nn.Conv2d): 112 | init.kaiming_normal_(m.weight, mode='fan_out') 113 | if m.bias is not None: 114 | init.constant_(m.bias, 0) 115 | elif isinstance(m, nn.BatchNorm2d): 116 | init.constant_(m.weight, 1) 117 | init.constant_(m.bias, 0) 118 | elif isinstance(m, nn.Linear): 119 | init.normal_(m.weight, std=0.001) 120 | if m.bias is not None: 121 | init.constant_(m.bias, 0) 122 | 123 | def _make_stage(self, inplanes, outplanes, n, stride, t, stage): 124 | modules = OrderedDict() 125 | stage_name = "LinearBottleneck{}".format(stage) 126 | 127 | # First module is the only one utilizing stride 128 | first_module = LinearBottleneck(inplanes=inplanes, outplanes=outplanes, stride=stride, t=t, 129 | activation=self.activation_type) 130 | modules[stage_name + "_0"] = first_module 131 | 132 | # add more LinearBottleneck depending on number of repeats 133 | for i in range(n - 1): 134 | name = stage_name + "_{}".format(i + 1) 135 | module = LinearBottleneck(inplanes=outplanes, outplanes=outplanes, stride=1, t=6, 136 | activation=self.activation_type) 137 | modules[name] = module 138 | 139 | return nn.Sequential(modules) 140 | 141 | def _make_bottlenecks(self): 142 | modules = OrderedDict() 143 | stage_name = "Bottlenecks" 144 | 145 | # First module is the only one with t=1 146 | bottleneck1 = self._make_stage(inplanes=self.c[0], outplanes=self.c[1], n=self.n[1], stride=self.s[1], t=1, 147 | stage=0) 148 | modules[stage_name + "_0"] = bottleneck1 149 | 150 | # add more LinearBottleneck depending on number of repeats 151 | for i in range(1, len(self.c) - 1): 152 | name = stage_name + "_{}".format(i) 153 | module = self._make_stage(inplanes=self.c[i], outplanes=self.c[i + 1], n=self.n[i + 1], 154 | stride=self.s[i + 1], 155 | t=self.t, stage=i) 156 | modules[name] = module 157 | 158 | return nn.Sequential(modules) 159 | 160 | def forward(self, x): 161 | x = self.conv1(x) 162 | x = self.bn1(x) 163 | x = self.activation(x) 164 | 165 | x = self.bottlenecks(x) 166 | x = self.conv_last(x) 167 | x = self.bn_last(x) 168 | x = self.activation(x) 169 | 170 | # average pooling layer 171 | x = self.avgpool(x) 172 | x = self.dropout(x) 173 | 174 | # flatten for input to fully-connected layer 175 | x = x.view(x.size(0), -1) 176 | x = self.fc(x) 177 | return F.log_softmax(x, dim=1) #TODO not needed(?) 178 | 179 | 180 | if __name__ == "__main__": 181 | """Testing 182 | """ 183 | model1 = MobileNet2() 184 | print(model1) 185 | model2 = MobileNet2(scale=0.35) 186 | print(model2) 187 | model3 = MobileNet2(in_channels=2, num_classes=10) 188 | print(model3) 189 | x = torch.randn(1, 2, 224, 224) 190 | print(model3(x)) 191 | model4_size = 32 * 10 192 | model4 = MobileNet2(input_size=model4_size, num_classes=10) 193 | print(model4) 194 | x2 = torch.randn(1, 3, model4_size, model4_size) 195 | print(model4(x2)) 196 | model5 = MobileNet2(input_size=196, num_classes=10) 197 | x3 = torch.randn(1, 3, 196, 196) 198 | print(model5(x3)) # fail 199 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | torch>=0.4.0 2 | torchvision>=0.1.9 3 | tqdm>=4.19.4 4 | matplotlib 5 | numpy -------------------------------------------------------------------------------- /results/mobilenet_v2_0.5_160/loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Randl/MobileNetV2-pytorch/3518846c69971c10cae89b6b29497a502200da65/results/mobilenet_v2_0.5_160/loss.png -------------------------------------------------------------------------------- /results/mobilenet_v2_0.5_160/model_best.pth.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Randl/MobileNetV2-pytorch/3518846c69971c10cae89b6b29497a502200da65/results/mobilenet_v2_0.5_160/model_best.pth.tar -------------------------------------------------------------------------------- /results/mobilenet_v2_0.5_160/params.txt: -------------------------------------------------------------------------------- 1 | imagenet.py --dataroot /home/chaimb/ILSVRC/Data/CLS-LOC --seed 42 --gpus 0,1,2,3,4,5,6 -j 32 --scaling 0.5 --input-size 160 -b 2048 -lr 0.08 2 | Namespace(batch_size=2048, dataroot='/home/chaimb/ILSVRC/Data/CLS-LOC', decay=4e-05, epochs=400, evaluate=False, gamma=0.1, gpus=[0, 1, 2, 3, 4, 5, 6], input_size=160, learning_rate=0.08, log_interval=100, momentum=0.9, results_dir='./results', resume='', save='2018-06-12_20-29-41', scaling=0.5, schedule=[200, 300], seed=42, start_epoch=0, type='float32', workers=32) 3 | Claimed accuracies are: 61.00% top-1, 83.20% top-5 4 | Best accuracy is 60.68% top-1 5 | -------------------------------------------------------------------------------- /results/mobilenet_v2_0.5_160/results.csv: -------------------------------------------------------------------------------- 1 | epoch,val_error1 ,val_error5 ,val_loss ,train_error1 ,train_error5 ,train_loss 2 | 1 ,0.92694 ,0.79196 ,5.0863229942321775,0.9691515625987869 ,0.9024217763960514 ,5.194215297698975 3 | 2 ,0.84798 ,0.64942 ,4.344557933807373 ,0.8917580612051357 ,0.7312208322568408 ,4.6394782066345215 4 | 3 ,0.79518 ,0.56966 ,3.918274688720703 ,0.8360096693093094 ,0.637620232178943 ,4.290246963500977 5 | 4 ,0.7645 ,0.53264 ,3.7207171440124513,0.7973472623006993 ,0.5812154075151795 ,3.878848075866699 6 | 5 ,0.7288399999999999 ,0.48341999999999996,3.4715453433990477,0.7669827586879774 ,0.5411464703664706 ,3.8225533962249756 7 | 6 ,0.71114 ,0.45631999999999995,3.3348944759368897,0.7431380920676227 ,0.5109146582763996 ,3.610210657119751 8 | 7 ,0.67882 ,0.42196 ,3.149055213928223 ,0.7244309289889608 ,0.48710979911284014,3.518906593322754 9 | 8 ,0.66056 ,0.40468000000000004,3.046949291229248 ,0.7078585383482403 ,0.468443224029342 ,3.4073050022125244 10 | 9 ,0.6483 ,0.39134 ,2.984099130630493 ,0.6942670237369524 ,0.4520987505922335 ,3.4784841537475586 11 | 10 ,0.6424799999999999 ,0.38351999999999997,2.9332941341400147,0.6820664284983925 ,0.4388202318667278 ,3.2480106353759766 12 | 11 ,0.62024 ,0.35929999999999995,2.8101455545425416,0.671077228807798 ,0.4265845124015839 ,3.194042444229126 13 | 12 ,0.62322 ,0.36360000000000003,2.8272254133224486,0.6619051224391512 ,0.41677470618584467,3.1789815425872803 14 | 13 ,0.6067 ,0.34697999999999996,2.733345136642456 ,0.6519993100040822 ,0.40597283570369824,3.131643772125244 15 | 14 ,0.59494 ,0.33646 ,2.6742573881149294,0.6435897896214935 ,0.3979926114238035 ,3.0151114463806152 16 | 15 ,0.58558 ,0.32796000000000003,2.6289117765426635,0.6369036979566286 ,0.3902340600405724 ,2.9711194038391113 17 | 16 ,0.57946 ,0.32208000000000003,2.5959751081466673,0.6292434944078329 ,0.38312413604159334,2.9080209732055664 18 | 17 ,0.5768 ,0.32218 ,2.581822772026062 ,0.623398823104248 ,0.3769563218534352 ,2.9138689041137695 19 | 18 ,0.57328 ,0.31774 ,2.565135746002197 ,0.6174292656616975 ,0.37077758012811757,3.01961612701416 20 | 19 ,0.57454 ,0.31506 ,2.5592668724060057,0.6122051223610974 ,0.36568690888853683,2.9635941982269287 21 | 20 ,0.56064 ,0.30406 ,2.4844932556152344,0.6070629355891932 ,0.3610255337516498 ,2.742203712463379 22 | 21 ,0.55462 ,0.29882 ,2.460219621658325 ,0.6023945356069895 ,0.3565499267464741 ,2.9036617279052734 23 | 22 ,0.56726 ,0.31286 ,2.532589859962463 ,0.5985449203733784 ,0.35215627626999446,2.8002116680145264 24 | 23 ,0.5555399999999999 ,0.29988000000000004,2.464372224807739 ,0.5945415390811659 ,0.3486828805300167 ,2.6914124488830566 25 | 24 ,0.54002 ,0.28534000000000004,2.38246045589447 ,0.5908261764469425 ,0.3453093937012115 ,2.734339475631714 26 | 25 ,0.54522 ,0.28732 ,2.39137749671936 ,0.5872661409480575 ,0.3415752981461433 ,2.7598226070404053 27 | 26 ,0.5442400000000001 ,0.2893 ,2.3936502265930177,0.5835827803869441 ,0.33865530410945643,2.697506904602051 28 | 27 ,0.5400400000000001 ,0.28642 ,2.3866665363311768,0.5810608609182097 ,0.33529898912475886,2.831444501876831 29 | 28 ,0.54372 ,0.28668000000000005,2.3928034591674803,0.5775554631051221 ,0.3320800488929234 ,2.733222723007202 30 | 29 ,0.52512 ,0.27293999999999996,2.2980409622192384,0.5748009432025645 ,0.3291311749366008 ,2.5453953742980957 31 | 30 ,0.5256000000000001 ,0.27178 ,2.293926544189453 ,0.5720315930710048 ,0.32716031555605163,2.6768908500671387 32 | 31 ,0.53408 ,0.27914000000000005,2.340768675804138 ,0.5697196384234062 ,0.3248663132909293 ,2.6050772666931152 33 | 32 ,0.5232600000000001 ,0.26934 ,2.276133532524109 ,0.5677183380464842 ,0.32242556981252246,2.609675884246826 34 | 33 ,0.5186200000000001 ,0.26718 ,2.2687688207626344,0.564517350197125 ,0.31962109545437867,2.6683573722839355 35 | 34 ,0.52308 ,0.2694 ,2.2787072896957397,0.5626190808848495 ,0.3181349504006894 ,2.5079119205474854 36 | 35 ,0.51088 ,0.25882000000000005,2.2246651315689085,0.5605038219061216 ,0.31644586537118113,2.5526702404022217 37 | 36 ,0.518 ,0.26456 ,2.247015609741211 ,0.5585532565231544 ,0.3149503538570694 ,2.554319143295288 38 | 37 ,0.5058199999999999 ,0.25693999999999995,2.2028353881835936,0.5560992438924823 ,0.31225905756236305,2.564445734024048 39 | 38 ,0.50406 ,0.25258 ,2.1912978887557983,0.5555380368055063 ,0.31138095189776194,2.502481460571289 40 | 39 ,0.50596 ,0.25614000000000003,2.1945962476730347,0.5530824630980973 ,0.30937965152084 ,2.6678824424743652 41 | 40 ,0.5036 ,0.25416000000000005,2.1787475967407226,0.5515323138981881 ,0.3083610489499027 ,2.3990120887756348 42 | 41 ,0.5103599999999999 ,0.25893999999999995,2.2098016691207887,0.5502904773538501 ,0.30666181692160355,2.4297268390655518 43 | 42 ,0.5001 ,0.25207999999999997,2.1715317010879516,0.5486575910868763 ,0.3049696097386211 ,2.575899839401245 44 | 43 ,0.50396 ,0.25376 ,2.1742380094528198,0.5466289718670556 ,0.3036387918202701 ,2.5864720344543457 45 | 44 ,0.49456 ,0.24616000000000005,2.1346331596374513,0.546251191296685 ,0.30306743773450295,2.52939510345459 46 | 45 ,0.50694 ,0.25583999999999996,2.2021358823776245,0.5437979592043816 ,0.30084758661439137,2.556642770767212 47 | 46 ,0.5044 ,0.25466 ,2.1832437467575074,0.5430939135959637 ,0.2997173670567538 ,2.4849069118499756 48 | 47 ,0.50066 ,0.25242 ,2.1769688367843627,0.5416397706153843 ,0.2987338887124005 ,2.5337367057800293 49 | 48 ,0.50188 ,0.25370000000000004,2.1791861295700072,0.5407265407241991 ,0.2982819569970191 ,2.419394016265869 50 | 49 ,0.49964 ,0.25186 ,2.16300745010376 ,0.5396041265502467 ,0.29717593412880605,2.4111216068267822 51 | 50 ,0.49429999999999996,0.24434 ,2.128661942481995 ,0.5390031120064753 ,0.29618621147750446,2.4753639698028564 52 | 51 ,0.492 ,0.24497999999999998,2.121062445640564 ,0.5372960745944908 ,0.295231613052787 ,2.5697474479675293 53 | 52 ,0.49642 ,0.24594000000000005,2.1407376194000243,0.5357997825420105 ,0.2937649814583111 ,2.4552416801452637 54 | 53 ,0.49294000000000004,0.244 ,2.113852343559265 ,0.5351558383879698 ,0.29319440791091245,2.3621809482574463 55 | 54 ,0.49104000000000003,0.24382000000000004,2.1220169019699098,0.5339959583723277 ,0.2918027079998158 ,2.4501852989196777 56 | 55 ,0.49272000000000005,0.24392000000000003,2.124539685249329 ,0.5333543558333925 ,0.2916871883212727 ,2.4731321334838867 57 | 56 ,0.49350000000000005,0.24407999999999996,2.120119013786316 ,0.5316668318806214 ,0.2904188134724045 ,2.43821382522583 58 | 57 ,0.49004000000000003,0.24283999999999994,2.112200026512146 ,0.5311626040945482 ,0.28946187343258134,2.435812473297119 59 | 58 ,0.48519999999999996,0.23902 ,2.0922942209243773,0.5304468504106021 ,0.2888358816610169 ,2.483163595199585 60 | 59 ,0.48422 ,0.23765999999999998,2.0848828506469728,0.5297420242638158 ,0.2881536911269179 ,2.481032133102417 61 | 60 ,0.48973999999999995,0.24502000000000002,2.1248234605789182,0.5286328792421284 ,0.28719831216383185,2.5112578868865967 62 | 61 ,0.49244 ,0.24404000000000003,2.1255528831481936,0.5277891172657428 ,0.2872630968484202 ,2.4013137817382812 63 | 62 ,0.48995999999999995,0.24297999999999997,2.1171942234039305,0.5272513263298227 ,0.28625620235301097,2.5671191215515137 64 | 63 ,0.47948 ,0.23424 ,2.061637907028198 ,0.5262818976761031 ,0.2850752478014186 ,2.294400930404663 65 | 64 ,0.49573999999999996,0.2489 ,2.1401083993911745,0.5261741833812454 ,0.2848254755234876 ,2.460958957672119 66 | 65 ,0.49194000000000004,0.24668 ,2.1365620708465576,0.5251665083474676 ,0.2835336845235633 ,2.4623169898986816 67 | 66 ,0.47562000000000004,0.23104000000000002,2.044657425880432 ,0.5248675621523189 ,0.2840792808431688 ,2.325960874557495 68 | 67 ,0.48440000000000005,0.23787999999999998,2.0824490690231325,0.5240386304049355 ,0.2831363904939793 ,2.4309208393096924 69 | 68 ,0.47875999999999996,0.23238000000000003,2.0542493772506716,0.5231082286696426 ,0.2817798148094667 ,2.398404121398926 70 | 69 ,0.4818 ,0.23409999999999997,2.06172429561615 ,0.5232026738122353 ,0.28228794528738255,2.472740650177002 71 | 70 ,0.4787 ,0.23706000000000005,2.0659581899642943,0.5220209387222743 ,0.2811444565774798 ,2.3377909660339355 72 | 71 ,0.48462000000000005,0.23712 ,2.0814891386032106,0.5210796094498219 ,0.28028118114188083,2.435771942138672 73 | 72 ,0.47738 ,0.23214 ,2.048105254173279 ,0.5204981083652638 ,0.28011648754612006,2.4318268299102783 74 | 73 ,0.47641999999999995,0.23243999999999998,2.048414263725281 ,0.5202498971640699 ,0.27948971523618704,2.3634326457977295 75 | 74 ,0.47206000000000004,0.22694000000000003,2.0147369480133057,0.5191719736771241 ,0.278660002950435 ,2.3781402111053467 76 | 75 ,0.47682 ,0.23087999999999997,2.038754005432129 ,0.5192874933556672 ,0.2789597296839522 ,2.5077126026153564 77 | 76 ,0.48116000000000003,0.23468 ,2.061471657752991 ,0.5181650791817147 ,0.2775571022356961 ,2.3655097484588623 78 | 77 ,0.48475999999999997,0.23695999999999995,2.0815147876739504,0.5177334414639153 ,0.27771945421635114,2.4310824871063232 79 | 78 ,0.47121999999999997,0.22724 ,2.0202086973190307,0.5174688389569821 ,0.27736430925866806,2.353368043899536 80 | 79 ,0.47682 ,0.23487999999999998,2.0616024923324585,0.5164791163056807 ,0.27634492614936224,2.392667055130005 81 | 80 ,0.46965999999999997,0.22584000000000004,2.0220248746871947,0.5161817311872692 ,0.2767219261813644 ,2.2067110538482666 82 | 81 ,0.4779 ,0.23158 ,2.0549475860595705,0.5159936214404524 ,0.2761685244780735 ,2.3617541790008545 83 | 82 ,0.47631999999999997,0.23338000000000003,2.057671236991882 ,0.5150273149402069 ,0.2753926693397504 ,2.384498119354248 84 | 83 ,0.47740000000000005,0.23228000000000004,2.0585829782485963,0.5144044453221165 ,0.27466286596517087,2.2906651496887207 85 | 84 ,0.48262000000000005,0.2378 ,2.0793990230560304,0.5143755654024806 ,0.2747838494122936 ,2.429347515106201 86 | 85 ,0.47 ,0.22418000000000005,2.008458762168884 ,0.5142264825740906 ,0.2742983545470653 ,2.305520534515381 87 | 86 ,0.47882 ,0.23102 ,2.0526673555374146,0.5131134348605607 ,0.2738511060618951 ,2.2690491676330566 88 | 87 ,0.47362000000000004,0.23219999999999996,2.0431170082092285,0.5129011284243195 ,0.27334609773745344,2.3346753120422363 89 | 88 ,0.47785999999999995,0.23212 ,2.0530474519729616,0.512997134643649 ,0.2733718555036151 ,2.301201820373535 90 | 89 ,0.47312 ,0.22896000000000005,2.032184042930603 ,0.512901908962688 ,0.273222772675225 ,2.3732080459594727 91 | 90 ,0.47153999999999996,0.22719999999999996,2.0225574445724486,0.511617923346449 ,0.2721370438045938 ,2.409538507461548 92 | 91 ,0.48218000000000005,0.23750000000000004,2.0779174995422363,0.5119996066086623 ,0.27220885333449896,2.36938738822937 93 | 92 ,0.4697 ,0.22494000000000003,2.0102605962753297,0.5113002442304555 ,0.2714860748052361 ,2.2441155910491943 94 | 93 ,0.47572000000000003,0.23038000000000003,2.0420077562332155,0.5108248963640181 ,0.2707125612820187 ,2.3029773235321045 95 | 94 ,0.46684000000000003,0.22321999999999997,1.990864725112915 ,0.5102051488994019 ,0.27108175593033534,2.399219036102295 96 | 95 ,0.48022 ,0.23758 ,2.0849895334243773,0.510081043298805 ,0.2712402052191478 ,2.3251144886016846 97 | 96 ,0.48056 ,0.23373999999999995,2.0594991350173952,0.509207620864415 ,0.27034648878717604,2.418752670288086 98 | 97 ,0.46209999999999996,0.21840000000000004,1.9719325494766236,0.5097781944118136 ,0.27043703123792606,2.3093740940093994 99 | 98 ,0.46968 ,0.22653999999999996,2.013598947525024 ,0.5085714820940596 ,0.27009047220229676,2.351404905319214 100 | 99 ,0.46082 ,0.21931999999999996,1.9699711227416992,0.5082491197478549 ,0.2694395032029392 ,2.5266804695129395 101 | 100 ,0.47496000000000005,0.22834 ,2.0279460382461547,0.5070931424240556 ,0.2684505610900062 ,2.3275113105773926 102 | 101 ,0.4699 ,0.22618000000000005,2.008146357536316 ,0.5076434219738722 ,0.26921939138301254,2.411346673965454 103 | 102 ,0.46482 ,0.22082000000000002,1.9822244024276734,0.5076621548947171 ,0.26876277643741997,2.3941810131073 104 | 103 ,0.4685 ,0.22528000000000004,2.006483449935913 ,0.5073101320905081 ,0.26832099172082957,2.2784953117370605 105 | 104 ,0.4655 ,0.22360000000000002,1.993592257499695 ,0.5062727965987259 ,0.26812039336011617,2.2791807651519775 106 | 105 ,0.46284000000000003,0.22148 ,1.9812727069854736,0.5076801072771934 ,0.2683170890289869 ,2.295785903930664 107 | 106 ,0.45975999999999995,0.21877999999999997,1.9632367372512818,0.506157276920183 ,0.2672758508453621 ,2.3987462520599365 108 | 107 ,0.47448 ,0.22934 ,2.032461085319519 ,0.5057670077359158 ,0.26719311377829746,2.3139877319335938 109 | 108 ,0.4717 ,0.2268 ,2.0078865671157837,0.5054680615407672 ,0.2670362255662221 ,2.2800185680389404 110 | 109 ,0.46174000000000004,0.21975999999999996,1.9780486726760864,0.5051066722761357 ,0.2663836954901274 ,2.2969141006469727 111 | 110 ,0.46506000000000003,0.22287999999999997,1.987909927368164 ,0.5048842188411035 ,0.2666225402308988 ,2.2489843368530273 112 | 111 ,0.46682 ,0.22321999999999997,1.9967779684066773,0.5042871069891748 ,0.265886492549371 ,2.4824788570404053 113 | 112 ,0.46387999999999996,0.22102 ,1.9816057586669922,0.5044674113523062 ,0.2662869087324291 ,2.324923038482666 114 | 113 ,0.46519999999999995,0.22568 ,2.004482235908508 ,0.504179392694317 ,0.26586385693668346,2.3179800510406494 115 | 114 ,0.46120000000000005,0.21806000000000003,1.9652127122879028,0.5033122145668754 ,0.2654579769850457 ,2.2453205585479736 116 | 115 ,0.4625 ,0.21841999999999995,1.9649800777435302,0.5037516576683602 ,0.2651957160932181 ,2.24239444732666 117 | 116 ,0.4696 ,0.22663999999999995,2.018500747680664 ,0.5027525685566363 ,0.26444639925942515,2.3105602264404297 118 | 117 ,0.46258 ,0.22065999999999997,1.9819749593734741,0.5030640033656815 ,0.2644136166479467 ,2.3084001541137695 119 | 118 ,0.4647 ,0.22099999999999997,1.982657141685486 ,0.5028025230122224 ,0.2640303723089964 ,2.332547187805176 120 | 119 ,0.46192 ,0.22187999999999997,1.97360032081604 ,0.5024918687415458 ,0.2644682543337441 ,2.266057014465332 121 | 120 ,0.46992 ,0.22624 ,2.0141633129119874,0.5025597755796083 ,0.26439098103525926,2.317833423614502 122 | 121 ,0.46908000000000005,0.22663999999999995,2.0059516429901123,0.5017089887579058 ,0.26356126874950725,2.295189380645752 123 | 122 ,0.46597999999999995,0.22097999999999995,1.9907819080352782,0.5019173925023046 ,0.26372284019179393,2.1600966453552246 124 | 123 ,0.46340000000000003,0.21853999999999996,1.9691112756729126,0.5012992061144254 ,0.2637868443380137 ,2.229222536087036 125 | 124 ,0.46448 ,0.22192 ,1.9910471963882446,0.5009503054636906 ,0.26327090847641255,2.337778091430664 126 | 125 ,0.46778 ,0.22350000000000003,1.9963623523712157,0.5005093012854687 ,0.2632334426347229 ,2.172328472137451 127 | 126 ,0.45404 ,0.21318000000000004,1.9444833278656006,0.5010884607549211 ,0.26326778632293835,2.305861711502075 128 | 127 ,0.46142000000000005,0.22036 ,1.972012047767639 ,0.5004062702208221 ,0.2626511610117963 ,2.363752841949463 129 | 128 ,0.4659 ,0.22177999999999998,1.9819045782089233,0.5003695849175009 ,0.2629891341253716 ,2.294834852218628 130 | 129 ,0.45716 ,0.21509999999999996,1.9554623794555663,0.5000768830293006 ,0.2623170905900636 ,2.2594268321990967 131 | 130 ,0.47034 ,0.22550000000000003,2.015523900985718 ,0.4999418498915442 ,0.2621641050698309 ,2.359321117401123 132 | 131 ,0.45692 ,0.21509999999999996,1.9445851993560792,0.49998243788670793,0.2620243887018632 ,2.31573748588562 133 | 132 ,0.45538 ,0.21418000000000004,1.9375147485733033,0.500118251562833 ,0.26214537214898603,2.276916265487671 134 | 133 ,0.45986000000000005,0.21672000000000002,1.9545992946624755,0.4994165475695206 ,0.2614311795417772 ,2.1733193397521973 135 | 134 ,0.472 ,0.22814 ,2.0269447565078735,0.4993010278909775 ,0.26130239071096895,2.2925891876220703 136 | 135 ,0.46294 ,0.22024 ,1.9751090812683105,0.4989536883169797 ,0.2611923348010057 ,2.2863101959228516 137 | 136 ,0.45802 ,0.21719999999999995,1.9505061054229735,0.4988647069429668 ,0.26178632449946027,2.297260046005249 138 | 137 ,0.45604 ,0.21430000000000005,1.946243748664856 ,0.49839326176837206,0.26120482341490214,2.3090178966522217 139 | 138 ,0.45382 ,0.21252000000000004,1.9297368097305299,0.4982238849424002 ,0.26140620231398404,2.320775032043457 140 | 139 ,0.46094 ,0.21736 ,1.9694025325775146,0.4981364646451243 ,0.260938659831232 ,2.2200119495391846 141 | 140 ,0.46284000000000003,0.22026 ,1.9794608306884767,0.497878886983508 ,0.26032671775030114,2.3091278076171875 142 | 141 ,0.45233999999999996,0.21236 ,1.9287086009979248,0.49712644799624095,0.2598427839618098 ,2.319275379180908 143 | 142 ,0.46026 ,0.21830000000000005,1.9534587955474854,0.49696643763069137,0.2597873657376438 ,2.2945780754089355 144 | 143 ,0.45858 ,0.21645999999999999,1.955648012161255 ,0.4972045018330944 ,0.2597623885098508 ,2.294762134552002 145 | 144 ,0.45924 ,0.21706000000000003,1.9544968700408936,0.4973129966663207 ,0.25987634711165675,2.2573888301849365 146 | 145 ,0.45164000000000004,0.21426 ,1.9324300336837767,0.4967049572772324 ,0.25968199305789175,2.295177698135376 147 | 146 ,0.45555999999999996,0.21630000000000005,1.9492059659957885,0.49716937760651037,0.25952666592255347,2.220447301864624 148 | 147 ,0.46077999999999997,0.21914 ,1.9702605104446411,0.49669168812496733,0.25909815035822803,2.2406108379364014 149 | 148 ,0.45928 ,0.21855999999999998,1.964121880531311 ,0.49692506909715906,0.25973897235879473,2.3496170043945312 150 | 149 ,0.44720000000000004,0.20858 ,1.9028898668289185,0.4965566549872109 ,0.25918244850202976,2.145237684249878 151 | 150 ,0.46016 ,0.21841999999999995,1.9552607679367064,0.4954771704235279 ,0.25889364930567205,2.2462007999420166 152 | 151 ,0.45794 ,0.21518000000000004,1.947073402404785 ,0.49573787023861837,0.25878827662591997,2.0824429988861084 153 | 152 ,0.46814 ,0.22482000000000002,1.9972295951843262,0.4962655141757476 ,0.25924411103314404,2.236236810684204 154 | 153 ,0.45758 ,0.21545999999999998,1.939933753013611 ,0.4952687666791292 ,0.25797261403080163,2.2564921379089355 155 | 154 ,0.46512 ,0.22209999999999996,1.994394941329956 ,0.4958143629987347 ,0.2582309722307865 ,2.2469539642333984 156 | 155 ,0.46399999999999997,0.22016000000000002,1.9818537378311156,0.49580421599994384,0.2583176119896937 ,2.1356186866760254 157 | 156 ,0.45474000000000003,0.21692 ,1.9448313617706299,0.49531169628939864,0.257844605738362 ,2.266680955886841 158 | 157 ,0.45498000000000005,0.21626 ,1.9423119401931763,0.49506504616494185,0.25825985215042224,2.2116496562957764 159 | 158 ,0.45124 ,0.21342000000000005,1.9273758029937744,0.4952711082942348 ,0.2583878604428619 ,2.2333874702453613 160 | 159 ,0.45254000000000005,0.21494000000000002,1.9348428869247436,0.49446793431301306,0.2579944691051206 ,2.1847453117370605 161 | 160 ,0.46302 ,0.21974000000000005,1.9725937509536744,0.4937701330115434 ,0.2569625973819182 ,2.2597782611846924 162 | 161 ,0.45431999999999995,0.2157 ,1.9356817483901978,0.4942423587245066 ,0.25707109221514446,2.1958186626434326 163 | 162 ,0.45802 ,0.21536 ,1.9477687168121338,0.4938677003076102 ,0.25723110258069404,2.307528257369995 164 | 163 ,0.45802 ,0.21589999999999998,1.9475456857681275,0.4935593876520391 ,0.2568462971650066 ,2.2936882972717285 165 | 164 ,0.45216 ,0.21364000000000005,1.9342482614517211,0.49428528833477603,0.25696884168886647,2.202544927597046 166 | 165 ,0.45355999999999996,0.2127 ,1.9269879055023194,0.4935508017299852 ,0.25677526817346996,2.3081908226013184 167 | 166 ,0.45836 ,0.21565999999999996,1.951942400932312 ,0.49364602741094643,0.25667848141577176,2.2573795318603516 168 | 167 ,0.4576 ,0.21565999999999996,1.953117117881775 ,0.4937560833209098 ,0.25689156839038163,2.2772040367126465 169 | 168 ,0.46197999999999995,0.21755999999999998,1.9661316823959352,0.49355002119161673,0.25652705697227607,2.253927707672119 170 | 169 ,0.45387999999999995,0.21364000000000005,1.928521237373352 ,0.49377637731849167,0.2565660838907028 ,2.2618021965026855 171 | 170 ,0.45672 ,0.21760000000000002,1.9497074317932128,0.49300286379527414,0.25610088302305634,2.3162262439727783 172 | 171 ,0.46416 ,0.22126 ,1.9806125593185424,0.4932674663022073 ,0.2565098851281683 ,2.142352819442749 173 | 172 ,0.45828 ,0.21433999999999997,1.9505506801605224,0.49333381206353266,0.25599394926656716,2.1487417221069336 174 | 173 ,0.45036 ,0.21275999999999995,1.929549765586853 ,0.492726553212813 ,0.25614225155658865,2.1350855827331543 175 | 174 ,0.44862 ,0.21009999999999995,1.90473717212677 ,0.49284441450646166,0.2562156221632309 ,2.3177642822265625 176 | 175 ,0.44806 ,0.20904 ,1.9126193809509278,0.4922246670418454 ,0.25559665523698316,2.2237131595611572 177 | 176 ,0.44889999999999997,0.21053999999999995,1.9089170169830323,0.49289280788531087,0.2562546490816576 ,2.245448589324951 178 | 177 ,0.45230000000000004,0.21114 ,1.91820405960083 ,0.4930309631765414 ,0.2557270051445284 ,2.2425637245178223 179 | 178 ,0.45392 ,0.21423999999999999,1.933074679374695 ,0.4916431659572874 ,0.2553406386521039 ,2.215891122817993 180 | 179 ,0.4597 ,0.21616000000000002,1.9528593730926513,0.4911990396255913 ,0.25531019765573104,2.303452730178833 181 | 180 ,0.45638 ,0.21814 ,1.9603045320510863,0.4923339424134402 ,0.255286781504675 ,2.181192398071289 182 | 181 ,0.44974000000000003,0.21138 ,1.9225932168960571,0.492089633904089 ,0.25564114592398957,2.3209774494171143 183 | 182 ,0.46048 ,0.21836 ,1.9713381481170655,0.4917805407101494 ,0.2548824626297742 ,2.238272190093994 184 | 183 ,0.44892 ,0.20992 ,1.906259627342224 ,0.4920943171343002 ,0.2551259906007569 ,2.202975034713745 185 | 184 ,0.4504 ,0.20911999999999997,1.914268741607666 ,0.49146598374763006,0.25476147918265146,2.3216521739959717 186 | 185 ,0.45840000000000003,0.21608000000000005,1.9457774925231934,0.4913434392237702 ,0.25516267590407804,2.137620687484741 187 | 186 ,0.45138 ,0.20904 ,1.9165087413787842,0.4913528056841926 ,0.25485514378687557,2.189300298690796 188 | 187 ,0.45655999999999997,0.21555999999999997,1.9456792068481445,0.49156901481227666,0.2547817731802333 ,2.3459317684173584 189 | 188 ,0.4527 ,0.21462000000000003,1.9322506380081177,0.4911436214014254 ,0.2550760361451707 ,2.4034276008605957 190 | 189 ,0.44748 ,0.20938 ,1.9030444765090941,0.4907564743706324 ,0.25388805674826154,2.166001319885254 191 | 190 ,0.45204 ,0.21043999999999996,1.9181049871444702,0.48974567718338047,0.2541479760249835 ,2.3711318969726562 192 | 191 ,0.45108000000000004,0.21055999999999997,1.913484206199646 ,0.49069090914767555,0.25428300916273994,2.1708202362060547 193 | 192 ,0.45784 ,0.21543999999999996,1.9571105337142944,0.49063939361535225,0.2537139966920784 ,2.2519690990448 194 | 193 ,0.44555999999999996,0.20550000000000002,1.8882037830352782,0.4907065199150462 ,0.2539676716618521 ,2.2625527381896973 195 | 194 ,0.45433999999999997,0.21392 ,1.9320763492584228,0.49044816171506134,0.25426271516515797,2.1873345375061035 196 | 195 ,0.46065999999999996,0.22046 ,1.9690080070495606,0.4904528449452725 ,0.25418466132830464,2.370694398880005 197 | 196 ,0.4718 ,0.22552000000000005,2.018118267059326 ,0.4904263066407424 ,0.2536976053863391 ,2.292919158935547 198 | 197 ,0.45152000000000003,0.2126 ,1.9251720809936523,0.489974374925361 ,0.25335416850418413,2.288735866546631 199 | 198 ,0.45277999999999996,0.21252000000000004,1.9287763357162475,0.48971679726374473,0.2537592679174534 ,2.230243682861328 200 | 199 ,0.45028 ,0.21202 ,1.916774911880493 ,0.4900727227597963 ,0.253466566029253 ,2.178762197494507 201 | 200 ,0.44848 ,0.20845999999999998,1.9030662345886231,0.4894389256045465 ,0.2537155577688155 ,2.1086039543151855 202 | 201 ,0.41108 ,0.1834 ,1.7302805757522584,0.4625501593469079 ,0.23278854357004197,2.202009439468384 203 | 202 ,0.40990000000000004,0.1824 ,1.717132329940796 ,0.4572955750499349 ,0.22837772124945455,2.043377637863159 204 | 203 ,0.40822 ,0.18066000000000004,1.7106974148750305,0.4546331586748644 ,0.22737941267609918,2.0062530040740967 205 | 204 ,0.40762 ,0.18096 ,1.7098391270637512,0.4535107445009121 ,0.2267783981323278 ,2.0827858448028564 206 | 205 ,0.40584 ,0.17934000000000005,1.7045838809013367,0.4521003116689706 ,0.22524151808468373,2.060337781906128 207 | 206 ,0.4062 ,0.18054000000000003,1.7065866136550902,0.45171472571491456,0.22474275406719035,2.052906036376953 208 | 207 ,0.4052 ,0.1795 ,1.6985454893112182,0.451598425498003 ,0.22465299215480883,2.0993521213531494 209 | 208 ,0.40522 ,0.17889999999999995,1.6970116925239562,0.4508631583548437 ,0.2237826918738931 ,2.017688274383545 210 | 209 ,0.40536000000000005,0.17842000000000002,1.6981632494926453,0.4500888642932577 ,0.2239247498569663 ,2.014716625213623 211 | 210 ,0.4034 ,0.17859999999999998,1.6954904675483704,0.44995148954039554,0.22342442476273583,1.9772852659225464 212 | 211 ,0.4034 ,0.17889999999999995,1.6963275957107544,0.45014818520926625,0.22365234196634787,1.9209442138671875 213 | 212 ,0.40513999999999994,0.17874 ,1.702221894264221 ,0.44981489532590213,0.22359224051197069,1.933029294013977 214 | 213 ,0.40332 ,0.17756000000000005,1.6924196696281433,0.449494874594803 ,0.22269071869631363,1.9565271139144897 215 | 214 ,0.40384 ,0.17825999999999997,1.690766649246216 ,0.4491959283996544 ,0.2226056400141434 ,1.9721087217330933 216 | 215 ,0.40478000000000003,0.17800000000000005,1.6917375016212464,0.44856759501298427,0.22202882215979647,2.059985637664795 217 | 216 ,0.40408 ,0.17796 ,1.6917555904388428,0.44876741283532906,0.2227476979972166 ,2.1076395511627197 218 | 217 ,0.40378000000000003,0.17788000000000004,1.6919544887542726,0.44851764055739807,0.22253148886913254,1.8680992126464844 219 | 218 ,0.40471999999999997,0.17757999999999996,1.6955097174644471,0.44797438585289817,0.2214769815332427 ,2.0635693073272705 220 | 219 ,0.40298 ,0.17869999999999997,1.6922832942008972,0.4477573961864456 ,0.22117179103114581,1.9986435174942017 221 | 220 ,0.40534000000000003,0.17832000000000003,1.6949209451675415,0.4482608434341503 ,0.2217150457356457 ,2.1340644359588623 222 | 221 ,0.402 ,0.17678000000000005,1.6878811407089234,0.44758567774536806,0.22194218240088914,2.099299907684326 223 | 222 ,0.40244 ,0.17664000000000002,1.685697419643402 ,0.44769885580880553,0.221665871818428 ,2.0568718910217285 224 | 223 ,0.40244 ,0.17852 ,1.6883731341362 ,0.4473171725465923 ,0.22094465436590238,1.9594590663909912 225 | 224 ,0.40346000000000004,0.17779999999999996,1.6917100000381469,0.44749279367951256,0.22140361092660055,2.0066354274749756 226 | 225 ,0.40406 ,0.17789999999999995,1.6890762281417846,0.44711267149403633,0.22125843079005314,2.0282089710235596 227 | 226 ,0.40190000000000003,0.17784 ,1.68691171169281 ,0.4471821394088359 ,0.22120769579609845,2.0186710357666016 228 | 227 ,0.40318 ,0.17822000000000005,1.686385772228241 ,0.44715013733572595,0.22048101457499292,1.9889837503433228 229 | 228 ,0.4022 ,0.17759999999999998,1.6892516827583313,0.4471673091798337 ,0.22163230866858108,2.073277235031128 230 | 229 ,0.40264 ,0.17728 ,1.6884992742538452,0.4473343443907001 ,0.22116164403235483,2.0059711933135986 231 | 230 ,0.40291999999999994,0.1773 ,1.6860632634162902,0.44723599655626467,0.22114759434172127,1.943172812461853 232 | 231 ,0.40071999999999997,0.1764 ,1.6812610125541687,0.44730702554780133,0.22076981377135063,1.9418914318084717 233 | 232 ,0.40181999999999995,0.17710000000000004,1.6895540499687194,0.44655536709890276,0.22055906841184636,1.8989872932434082 234 | 233 ,0.40359999999999996,0.17772 ,1.693667221069336 ,0.44709003588134877,0.22061214502090676,1.8889678716659546 235 | 234 ,0.40254 ,0.17847999999999997,1.6897777843475341,0.4467840648408834 ,0.22085177030004677,1.9733788967132568 236 | 235 ,0.40105999999999997,0.17672 ,1.683946166038513 ,0.44660766316959455,0.2204490125018831 ,1.9942035675048828 237 | 236 ,0.40092000000000005,0.17657999999999996,1.6813623213768005,0.4463407190475559 ,0.22070502908676226,2.080871105194092 238 | 237 ,0.4022 ,0.17676000000000003,1.6842049431800843,0.44664981224149547,0.22067458809038942,1.9674619436264038 239 | 238 ,0.40380000000000005,0.1773 ,1.6893304657936097,0.4460956299998361 ,0.2207370311598722 ,2.00005841255188 240 | 239 ,0.40252 ,0.1774 ,1.682943172454834 ,0.44601211239440297,0.21984487580463752,2.045797348022461 241 | 240 ,0.40092000000000005,0.17772 ,1.6829278302192687,0.4457358018119417 ,0.21949597515390262,2.049071788787842 242 | 241 ,0.40025999999999995,0.17572 ,1.6794001698493957,0.4460815803092024 ,0.21978087165841764,1.9668012857437134 243 | 242 ,0.40134000000000003,0.17720000000000002,1.6828402185440063,0.44616900060647835,0.2205863872547451 ,1.8903841972351074 244 | 243 ,0.40049999999999997,0.17635999999999996,1.6854614686965943,0.4460862635394137 ,0.2202031429157948 ,1.9914335012435913 245 | 244 ,0.40096 ,0.17588000000000004,1.6832505989074706,0.44591610617507316,0.2201516273834715 ,2.0179049968719482 246 | 245 ,0.40049999999999997,0.17667999999999995,1.683279685974121 ,0.4462392490596464 ,0.22047555080641323,1.9234559535980225 247 | 246 ,0.40154 ,0.17728 ,1.6879426956176757,0.44581151403368957,0.22007123193151246,1.964600920677185 248 | 247 ,0.40069999999999995,0.17637999999999998,1.685914101600647 ,0.44587161548806675,0.22019221537863531,2.1582984924316406 249 | 248 ,0.40049999999999997,0.17656000000000005,1.6830172061920166,0.44542280592615946,0.2198511201115858 ,1.969218373298645 250 | 249 ,0.40049999999999997,0.17623999999999995,1.681358892917633 ,0.4457436071956271 ,0.21926805795029058,1.8629745244979858 251 | 250 ,0.4011 ,0.17625999999999997,1.6793432426452637,0.44532758024519836,0.21968408490071945,1.9971508979797363 252 | 251 ,0.40092000000000005,0.17732000000000003,1.6827560114860534,0.445696774893515 ,0.21996429817502328,2.0254971981048584 253 | 252 ,0.39942 ,0.17657999999999996,1.6819242572784423,0.445639795592612 ,0.2199955197097646 ,1.9463783502578735 254 | 253 ,0.40208 ,0.17718 ,1.6860434627532959,0.4452159632584979 ,0.21967003521008577,2.0399625301361084 255 | 254 ,0.40074 ,0.17837999999999998,1.6862781715393067,0.4454985181479073 ,0.2197059399750384 ,2.0375473499298096 256 | 255 ,0.40105999999999997,0.17625999999999997,1.6826235413551331,0.4457537541944181 ,0.2202484141411697 ,2.000822067260742 257 | 256 ,0.39946000000000004,0.17722000000000004,1.682346591949463 ,0.4460394312373016 ,0.2197106232052496 ,2.028555154800415 258 | 257 ,0.40142 ,0.17832000000000003,1.6894632053375245,0.44495136075156483,0.21946709523426688,1.9970602989196777 259 | 258 ,0.39942 ,0.17545999999999995,1.6766006994247435,0.4453135305545647 ,0.2197043788983013 ,1.9619733095169067 260 | 259 ,0.40115999999999996,0.17647999999999997,1.6809416151046752,0.44561950159503017,0.2194608509273186 ,1.9997793436050415 261 | 260 ,0.40254 ,0.17891999999999997,1.6906608724594117,0.4455000792246444 ,0.21928601033276696,1.9994895458221436 262 | 261 ,0.39970000000000006,0.17586000000000002,1.683009729385376 ,0.4454173421575798 ,0.2193117680989286 ,1.9545624256134033 263 | 262 ,0.40147999999999995,0.17823999999999995,1.6876145267486573,0.44494433590624793,0.21913692750437685,2.0828328132629395 264 | 263 ,0.40247999999999995,0.17722000000000004,1.6918319582939148,0.44558047467660344,0.21935079501735522,1.903985619544983 265 | 264 ,0.40054 ,0.17769999999999997,1.68566641330719 ,0.44494589698298503,0.220050157395562 ,1.995285153388977 266 | 265 ,0.40264 ,0.17700000000000005,1.6893892669677735,0.4456116962113448 ,0.21914473288806224,1.935260534286499 267 | 266 ,0.40132 ,0.17672 ,1.6881626749038696,0.4451527396506466 ,0.21944133746810524,1.9945030212402344 268 | 267 ,0.40122 ,0.17644000000000004,1.6822999811172485,0.44581619726390076,0.21957402899075606,1.932342290878296 269 | 268 ,0.40046000000000004,0.17623999999999995,1.6820681858062745,0.44537285147057326,0.2193187929442454 ,2.1069138050079346 270 | 269 ,0.3994 ,0.17659999999999998,1.6804382491111756,0.44479135038601525,0.2191658074240126 ,2.025639772415161 271 | 270 ,0.40169999999999995,0.17691999999999997,1.686670560836792 ,0.44572565481315085,0.21924151964576044,1.9432374238967896 272 | 271 ,0.40136000000000005,0.17552 ,1.682754533290863 ,0.44518005849354536,0.21991512425780557,2.008519172668457 273 | 272 ,0.40127999999999997,0.17637999999999998,1.6814967131614684,0.44478666715580406,0.21905106828383814,1.9794284105300903 274 | 273 ,0.39970000000000006,0.17722000000000004,1.685111210346222 ,0.44493887213766825,0.2189191572995558 ,2.009369134902954 275 | 274 ,0.40127999999999997,0.17622000000000004,1.6867558670043945,0.44484911022528684,0.21949441407716563,2.0550734996795654 276 | 275 ,0.40232 ,0.17713999999999996,1.6830100226402283,0.44478900877090966,0.2192204451098101 ,2.099720001220703 277 | 276 ,0.40166 ,0.17764000000000002,1.6869463539123535,0.44548212684216815,0.21923839749228635,1.896890640258789 278 | 277 ,0.40066 ,0.17634000000000005,1.6784739375114441,0.44493653052256266,0.21873416970621318,2.014864921569824 279 | 278 ,0.40215999999999996,0.17720000000000002,1.6877111792564392,0.44503331728026085,0.2190705817430515 ,1.920290231704712 280 | 279 ,0.40125999999999995,0.17667999999999995,1.681001992225647 ,0.4448171081521769 ,0.21902765213278208,2.085205078125 281 | 280 ,0.401 ,0.17501999999999995,1.6803583955764771,0.4448662820693945 ,0.21944680123668503,2.091466188430786 282 | 281 ,0.40124000000000004,0.17667999999999995,1.6842355775833129,0.44518474172375655,0.21966613251824318,2.030574083328247 283 | 282 ,0.40158000000000005,0.1773 ,1.6853622674942017,0.44538377900773274,0.21943431262278845,2.108177423477173 284 | 283 ,0.40188 ,0.17691999999999997,1.689446551799774 ,0.4451238597310109 ,0.2191205361986377 ,1.8677006959915161 285 | 284 ,0.40215999999999996,0.17859999999999998,1.6884005212783812,0.4453119694778276 ,0.21894803721919154,1.965569257736206 286 | 285 ,0.40047999999999995,0.17645999999999995,1.6836801314353942,0.4455032013781185 ,0.21954827122459442,1.8870484828948975 287 | 286 ,0.40025999999999995,0.17588000000000004,1.6853505229949952,0.44444010812017476,0.21929771840829493,2.0096864700317383 288 | 287 ,0.40169999999999995,0.17666000000000004,1.6857201337814331,0.4452620150222414 ,0.21937889439862246,1.948986291885376 289 | 288 ,0.4013 ,0.17635999999999996,1.6803104996681213,0.4449716547491467 ,0.21944680123668503,2.0055670738220215 290 | 289 ,0.3991 ,0.17620000000000002,1.6805700659751892,0.4450184870512587 ,0.21948348654000616,2.022188663482666 291 | 290 ,0.39944 ,0.17720000000000002,1.6852909684181214,0.4444619631944938 ,0.21871465624699982,1.9961810111999512 292 | 291 ,0.40034000000000003,0.17742000000000002,1.6878566694259645,0.4451082489636402 ,0.21931098756056 ,1.9765015840530396 293 | 292 ,0.40159999999999996,0.17662 ,1.6865052247047425,0.4451090295020087 ,0.21863816348688347,1.9659641981124878 294 | 293 ,0.39922 ,0.17723999999999995,1.679892909526825 ,0.4448108638452286 ,0.21880597923611833,2.001532793045044 295 | 294 ,0.40074 ,0.17610000000000003,1.6812583422660827,0.4443722012821123 ,0.21888091091949757,1.9776086807250977 296 | 295 ,0.40036000000000005,0.17579999999999996,1.6785633754730225,0.4454540274609009 ,0.21937265009167428,2.1129150390625 297 | 296 ,0.40064 ,0.17576000000000003,1.6827451920509338,0.44454391972318985,0.21873338916784468,2.045975923538208 298 | 297 ,0.40115999999999996,0.17664000000000002,1.6812806177139281,0.44483740214975875,0.21923761695391775,1.9696977138519287 299 | 298 ,0.40152 ,0.17672 ,1.6824861645698548,0.44522923241076295,0.21895506206450832,2.017645835876465 300 | 299 ,0.40064 ,0.17769999999999997,1.683691647052765 ,0.4446227540984118 ,0.2189628674481937 ,2.0087215900421143 301 | 300 ,0.40108 ,0.17584 ,1.6823703622817994,0.44475232346758853,0.2187209005539481 ,1.8921555280685425 302 | 301 ,0.39605999999999997,0.17269999999999996,1.658390154838562 ,0.43964213876879443,0.21524516319886478,1.9158344268798828 303 | 302 ,0.39614000000000005,0.17274 ,1.6575476598739625,0.43806623180272364,0.2143990596073736 ,2.0299806594848633 304 | 303 ,0.39476 ,0.17262 ,1.6552311182022095,0.4385829482026933 ,0.21456219212639727,1.9282954931259155 305 | 304 ,0.39522 ,0.17300000000000004,1.6549755454063415,0.43795695643112875,0.2141508484061797 ,1.923496961593628 306 | 305 ,0.39486 ,0.17206 ,1.6561623311042786,0.4373957493441526 ,0.2138612686714535 ,1.9783618450164795 307 | 306 ,0.39486 ,0.17281999999999997,1.6571214485168457,0.4372568135145535 ,0.2139611775826259 ,2.040966272354126 308 | 307 ,0.3952 ,0.17235999999999996,1.6542551636695861,0.43728022966560953,0.2137090636895893 ,2.074671506881714 309 | 308 ,0.39427999999999996,0.17164000000000001,1.6534245777130128,0.4372568135145535 ,0.21352797878808927,1.8666011095046997 310 | 309 ,0.39568000000000003,0.17262 ,1.6555373907089233,0.43771967276709434,0.21324074066846865,2.0139243602752686 311 | 310 ,0.39505999999999997,0.17306 ,1.6538658475875854,0.43722793359491774,0.21362944877599876,2.0342583656311035 312 | 311 ,0.39505999999999997,0.17237999999999998,1.6535902428627014,0.43712412199190265,0.21335938250048592,2.1127729415893555 313 | 312 ,0.39448000000000005,0.17245999999999995,1.6529887008666992,0.43648798322154725,0.21345929141165831,1.9449093341827393 314 | 313 ,0.39548000000000005,0.1724 ,1.654395773410797 ,0.4374917555634824 ,0.21361930177720778,1.9536739587783813 315 | 314 ,0.39524000000000004,0.17201999999999995,1.6517110133171082,0.43706714269099967,0.21308151084128768,1.8902829885482788 316 | 315 ,0.39561999999999997,0.17286 ,1.6543660712242128,0.43644661468801493,0.21344758333613023,2.0074074268341064 317 | 316 ,0.39552 ,0.17162 ,1.6531854486465454,0.43732940358282724,0.2133679684225398 ,1.9212276935577393 318 | 317 ,0.39474 ,0.17264000000000002,1.6532063245773316,0.43718344290791133,0.21296052739416482,2.0437259674072266 319 | 318 ,0.395 ,0.17247999999999997,1.6530010557174684,0.4373450143501979 ,0.21330942804489972,1.946571946144104 320 | 319 ,0.39474 ,0.17206 ,1.6535975217819214,0.4374472648764759 ,0.21325947358931352,2.0022027492523193 321 | 320 ,0.39449999999999996,0.17181999999999997,1.6538713812828063,0.43692196255445226,0.2134187034164945 ,1.9771579504013062 322 | 321 ,0.39476 ,0.1724 ,1.6540744233131408,0.4372115422891786 ,0.21310258537723814,1.9496607780456543 323 | 322 ,0.39559999999999995,0.1723 ,1.6526210761070252,0.4368540557163898 ,0.21322747151620358,1.9922665357589722 324 | 323 ,0.39478 ,0.17257999999999996,1.6535461473464965,0.4370078217749911 ,0.21308385245639327,2.0399112701416016 325 | 324 ,0.3954 ,0.17186 ,1.6543952536582947,0.4370046996215169 ,0.2131142934527661 ,1.9081318378448486 326 | 325 ,0.39461999999999997,0.17196 ,1.6529425692558288,0.43713270791395653,0.21307994976455058,1.94970703125 327 | 326 ,0.39402000000000004,0.17190000000000005,1.6502574920654296,0.437403554727838 ,0.21340153157238673,1.9466488361358643 328 | 327 ,0.39534 ,0.17206 ,1.6523849081993103,0.4371522213731699 ,0.21322591043946648,2.0162243843078613 329 | 328 ,0.39458000000000004,0.17245999999999995,1.652992262840271 ,0.4371108528396376 ,0.2128707654817834 ,1.9914060831069946 330 | 329 ,0.39466 ,0.17222000000000004,1.6544014310836792,0.43693913439856 ,0.21327898704852688,1.8247112035751343 331 | 330 ,0.39490000000000003,0.17269999999999996,1.6537206888198852,0.43698674723904063,0.21325244874399674,1.9547038078308105 332 | 331 ,0.39512 ,0.17290000000000005,1.654029459953308 ,0.4366097472070386 ,0.21307760814944499,1.9248343706130981 333 | 332 ,0.3941 ,0.17215999999999998,1.650585446357727 ,0.4371108528396376 ,0.21301282346485662,1.818377137184143 334 | 333 ,0.39444 ,0.17186 ,1.6535586428642273,0.43727554643539834,0.2132220077476239 ,1.96422278881073 335 | 334 ,0.39383999999999997,0.17213999999999996,1.6499545621871947,0.4367174615018963 ,0.21309087730171006,1.898654818534851 336 | 335 ,0.39346000000000003,0.17191999999999996,1.6507054328918458,0.4372427638239199 ,0.21335626034701172,1.8233815431594849 337 | 336 ,0.39424000000000003,0.17206 ,1.6517313885688782,0.43673697496110964,0.2130690222273911 ,1.8904227018356323 338 | 337 ,0.39496 ,0.17191999999999996,1.6509188747406005,0.4369648921647217 ,0.21348504917781996,1.8574035167694092 339 | 338 ,0.39474 ,0.1723 ,1.6520080614089965,0.43668233727531225,0.21294413608842566,1.9846665859222412 340 | 339 ,0.39427999999999996,0.17247999999999997,1.652449107170105 ,0.4368439087175989 ,0.2131728338304062 ,1.927560567855835 341 | 340 ,0.39456 ,0.17171999999999998,1.6525385451316834,0.4362686519399891 ,0.21300111538932864,1.9070255756378174 342 | 341 ,0.39508 ,0.17220000000000002,1.6518986129760742,0.4364973496819696 ,0.21334143011800955,1.8965257406234741 343 | 342 ,0.39583999999999997,0.17259999999999998,1.6529682636260987,0.436847030871073 ,0.21301126238811963,2.0165328979492188 344 | 343 ,0.39503999999999995,0.17162 ,1.652568666934967 ,0.43571134754485563,0.21250625406367785,1.9413059949874878 345 | 344 ,0.3941 ,0.17122000000000004,1.6509872770309448,0.43634124200826274,0.21269670542560026,1.9120986461639404 346 | 345 ,0.39466 ,0.17264000000000002,1.6530960321426391,0.43747146156590044,0.2137035999210095 ,2.0218095779418945 347 | 346 ,0.39502000000000004,0.17201999999999995,1.652053451538086 ,0.43618825648803006,0.21319468890472515,1.9205304384231567 348 | 347 ,0.39412 ,0.17191999999999996,1.6491221165657044,0.4368501530245471 ,0.2126748503512813 ,1.830946683883667 349 | 348 ,0.39542 ,0.17159999999999997,1.652110183238983 ,0.43623196663666797,0.21235795177365635,1.9268076419830322 350 | 349 ,0.39481999999999995,0.17254000000000003,1.6526780343055725,0.4370070412366226 ,0.21307916922618209,1.8933340311050415 351 | 350 ,0.3944 ,0.17088000000000003,1.6506448841094972,0.4362233807146141 ,0.21249610706488697,2.026419162750244 352 | 351 ,0.39478 ,0.17191999999999996,1.651453685760498 ,0.435935362056625 ,0.2122845811670141 ,1.8861769437789917 353 | 352 ,0.39574 ,0.17220000000000002,1.6522731757164002,0.43667140973815277,0.21279895595187825,1.9955594539642334 354 | 353 ,0.39461999999999997,0.1725 ,1.6530088949203492,0.43649110537502134,0.21260850458995584,1.8613394498825073 355 | 354 ,0.39366 ,0.17196 ,1.6501879358291627,0.4363615360058447 ,0.21224633478695598,2.012373447418213 356 | 355 ,0.39442 ,0.17135999999999996,1.6504539728164673,0.4365527679061356 ,0.2125733803633718 ,1.973877191543579 357 | 356 ,0.39471999999999996,0.17144000000000004,1.6499564027786255,0.43648017783786186,0.2126280180491692 ,1.924246907234192 358 | 357 ,0.39408 ,0.17134000000000005,1.6495525598526002,0.4360563455037477 ,0.21266158119901624,1.7818175554275513 359 | 358 ,0.39470000000000005,0.17213999999999996,1.6505969595909118,0.4365004718354438 ,0.21256791659479213,1.93817138671875 360 | 359 ,0.39461999999999997,0.17259999999999998,1.6533386182785035,0.43639041592548045,0.21289340109447086,1.9715956449508667 361 | 360 ,0.39503999999999995,0.17208 ,1.6491358137130738,0.43639119646384894,0.2124750325289365 ,1.9580695629119873 362 | 361 ,0.39471999999999996,0.17174 ,1.6501601123809815,0.4366721902765214 ,0.21283564125519938,1.8696626424789429 363 | 362 ,0.39476 ,0.17190000000000005,1.6503692889213561,0.43630065401309903,0.21269670542560026,1.9976744651794434 364 | 363 ,0.39522 ,0.17142000000000002,1.6515040135383605,0.43624679686567014,0.21269280273375757,1.9309991598129272 365 | 364 ,0.39442 ,0.17266000000000004,1.6541628313064576,0.4364091488463253 ,0.21265689796880505,1.988036870956421 366 | 365 ,0.39324000000000003,0.17113999999999996,1.6498348879814149,0.4359751695134202 ,0.21270685242439125,2.0170071125030518 367 | 366 ,0.39474 ,0.17162 ,1.6509067988395691,0.4362585049411981 ,0.21251093729388904,1.8606528043746948 368 | 367 ,0.39458000000000004,0.17149999999999999,1.6506847906112672,0.4366214552825666 ,0.21259367436095378,1.865309238433838 369 | 368 ,0.39480000000000004,0.17159999999999997,1.651498827934265 ,0.43688293563602554,0.21290276755489335,1.9633949995040894 370 | 369 ,0.39414000000000005,0.17206 ,1.6507004618644714,0.4360298071992176 ,0.2129698938545873 ,1.8659745454788208 371 | 370 ,0.39476 ,0.17152 ,1.6499107265472412,0.43634514470010544,0.21291447563042132,1.9261772632598877 372 | 371 ,0.39446000000000003,0.17176000000000002,1.6519738841056824,0.4363755856964783 ,0.21281456671924892,1.8712085485458374 373 | 372 ,0.39354 ,0.17213999999999996,1.6522010207176208,0.43540459596602166,0.2126280180491692 ,1.8091799020767212 374 | 373 ,0.39427999999999996,0.17203999999999997,1.6496510124206543,0.435879943832459 ,0.21289027894099677,1.937243103981018 375 | 374 ,0.39464 ,0.17179999999999995,1.6503850817680359,0.435960339284418 ,0.21203012565887192,1.9576112031936646 376 | 375 ,0.39436000000000004,0.17179999999999995,1.6518455696105958,0.43564344070679306,0.21312912368176828,2.052851438522339 377 | 376 ,0.3943 ,0.17169999999999996,1.6509624361991881,0.4363068983200473 ,0.21284656879235886,2.006412982940674 378 | 377 ,0.39374 ,0.17147999999999997,1.6527552795410156,0.4362787989387801 ,0.21276227064855713,1.9653937816619873 379 | 378 ,0.39444 ,0.17198000000000002,1.651243715286255 ,0.4359283372113081 ,0.2125249869845227 ,1.9560596942901611 380 | 379 ,0.39486 ,0.17123999999999995,1.651046462059021 ,0.4358854076010388 ,0.21222604078937402,2.016141891479492 381 | 380 ,0.39326000000000005,0.17181999999999997,1.6493628907203675,0.43673307226926705,0.21293164747452908,2.084007978439331 382 | 381 ,0.39376 ,0.17196 ,1.650302906036377 ,0.43660896666867 ,0.21288481517241697,2.0103302001953125 383 | 382 ,0.39349999999999996,0.17156000000000005,1.6502563333511353,0.4359837554354741 ,0.2125835273621628 ,1.9159033298492432 384 | 383 ,0.39436000000000004,0.17079999999999995,1.6499921679496765,0.43649656914360113,0.21226272609269514,1.96381676197052 385 | 384 ,0.39392000000000005,0.17135999999999996,1.6501990723609925,0.4363740246197412 ,0.2127833451845076 ,1.8612139225006104 386 | 385 ,0.3944 ,0.17179999999999995,1.6508662629127502,0.4357199334669095 ,0.2127817841077705 ,1.960961103439331 387 | 386 ,0.39536000000000004,0.17220000000000002,1.6510561800003052,0.43589243244635556,0.2120878854981435 ,2.0422167778015137 388 | 387 ,0.39403999999999995,0.17132000000000003,1.6496809244155883,0.4363630970825817 ,0.21268890004191487,2.0444071292877197 389 | 388 ,0.39422 ,0.17200000000000004,1.6496447777748109,0.43549591895514017,0.21245005530114336,1.9546632766723633 390 | 389 ,0.39402000000000004,0.17171999999999998,1.6525780987739562,0.43629597078288784,0.21231892485522963,1.9264973402023315 391 | 390 ,0.39422 ,0.17154000000000003,1.6495667123794555,0.4364161736916421 ,0.21241180892108524,1.9351860284805298 392 | 391 ,0.39458000000000004,0.17201999999999995,1.6502821135520935,0.43598999974242236,0.21270997457786534,1.892309546470642 393 | 392 ,0.39464 ,0.17162 ,1.6506765365600586,0.43653871821550194,0.21285827686788683,1.8665776252746582 394 | 393 ,0.3941 ,0.17186 ,1.650919668674469 ,0.43628348216899127,0.21268968058028348,2.0514423847198486 395 | 394 ,0.39344 ,0.17113999999999996,1.6489854550361633,0.4363771467732154 ,0.21255230582742135,1.8857016563415527 396 | 395 ,0.39456 ,0.17168000000000005,1.65083101272583 ,0.43642788176717007,0.21264597043164557,1.890779972076416 397 | 396 ,0.39368000000000003,0.17110000000000003,1.6496319890022277,0.43565983201253233,0.2122853617053827 ,2.0174386501312256 398 | 397 ,0.39493999999999996,0.17076000000000002,1.6513740563392638,0.43613283826386406,0.21265455635369945,1.842015027999878 399 | 398 ,0.39459999999999995,0.17171999999999998,1.6493063688278198,0.43564109909168747,0.21261318782016703,1.9099395275115967 400 | 399 ,0.39352 ,0.17171999999999998,1.649168930053711 ,0.43579486515028876,0.21241571161292794,1.859471082687378 401 | 400 ,0.39364 ,0.17122000000000004,1.6497600150108338,0.4361671819520796 ,0.21226740932290633,2.020566701889038 402 | 401 ,0.39424000000000003,0.17166000000000003,1.6480052709579467,0.4360126353551098 ,0.2127294880370787 ,2.0154826641082764 403 | -------------------------------------------------------------------------------- /results/mobilenet_v2_0.5_160/top1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Randl/MobileNetV2-pytorch/3518846c69971c10cae89b6b29497a502200da65/results/mobilenet_v2_0.5_160/top1.png -------------------------------------------------------------------------------- /results/mobilenet_v2_0.5_160/top5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Randl/MobileNetV2-pytorch/3518846c69971c10cae89b6b29497a502200da65/results/mobilenet_v2_0.5_160/top5.png -------------------------------------------------------------------------------- /results/mobilenet_v2_1.0_224/loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Randl/MobileNetV2-pytorch/3518846c69971c10cae89b6b29497a502200da65/results/mobilenet_v2_1.0_224/loss.png -------------------------------------------------------------------------------- /results/mobilenet_v2_1.0_224/model_best.pth.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Randl/MobileNetV2-pytorch/3518846c69971c10cae89b6b29497a502200da65/results/mobilenet_v2_1.0_224/model_best.pth.tar -------------------------------------------------------------------------------- /results/mobilenet_v2_1.0_224/params.txt: -------------------------------------------------------------------------------- 1 | /home/chaimb/research/MobileNet2/imagenet.py --dataroot /home/chaimb/ILSVRC/Data/CLS-LOC --seed 42 --gpus 1,2,3,4,5,6 -j 32 --scaling 1.0 -b 256 --resume results/2018-05-22_21-10-55/ 2 | Namespace(batch_size=256, dataroot='/home/chaimb/ILSVRC/Data/CLS-LOC', decay=4e-05, epochs=400, evaluate=False, gamma=0.1, gpus=[1, 2, 3, 4, 5, 6], input_size=224, learning_rate=0.01, log_interval=100, momentum=0.9, results_dir='./results', resume='results/2018-05-22_21-10-55/', save='2018-05-23_18-18-08', scaling=1.0, schedule=[200, 300], seed=42, start_epoch=57, type='float32', workers=32) 3 | Claimed accuracies are: 71.80% top-1, 91.00% top-5 4 | Best accuracy is 72.10% top-1 5 | -------------------------------------------------------------------------------- /results/mobilenet_v2_1.0_224/results.csv: -------------------------------------------------------------------------------- 1 | epoch,val_error1,val_error5,val_loss,train_error1,train_error5,train_loss 2 | 1,0.87636,0.69004,4.542515543042397,0.950774567250015,0.857963872001074,4.962518692016602 3 | 2,0.773,0.5313,3.7507492072728215,0.8489861196861923,0.6558637554666956,4.086591720581055 4 | 3,0.69996,0.44167999999999996,3.283305904086755,0.781247097372942,0.5564270700072669,3.6509053707122803 5 | 4,0.65852,0.39783999999999997,3.0447275839289842,0.7334500498373748,0.4951774436900107,3.4058666229248047 6 | 5,0.6186400000000001,0.35392,2.809631197428217,0.6963690135634153,0.4526256139909941,3.6249983310699463 7 | 6,0.58394,0.32202,2.6313758638440348,0.668966653059281,0.4216858536006626,3.096695899963379 8 | 7,0.5683400000000001,0.30591999999999997,2.5339085836799775,0.645010369452226,0.3968093152571054,2.7435855865478516 9 | 8,0.54116,0.28459999999999996,2.4030141787869588,0.6257763429748034,0.37737390988060104,2.929133653640747 10 | 9,0.52766,0.27118,2.3276406885409844,0.6101132795334254,0.36214560631049664,2.9608848094940186 11 | 10,0.5129,0.25751999999999997,2.2429519253117696,0.5959995847535879,0.34778526140620236,2.5928611755371094 12 | 11,0.50378,0.24750000000000005,2.1929043431063087,0.5835952690008406,0.3361763142509915,2.7333974838256836 13 | 12,0.49314,0.24314000000000002,2.14561635164582,0.5728503778195972,0.325756907569427,2.7042078971862793 14 | 13,0.48109999999999997,0.23319999999999996,2.086567188099939,0.5627853355573473,0.31677915525454525,2.805424928665161 15 | 14,0.47638,0.22853999999999997,2.0634501403083605,0.5537427985578773,0.30856164731061597,2.65675687789917 16 | 15,0.46831999999999996,0.22155999999999998,2.016384353443068,0.5456025639124329,0.30115980196180514,2.6708271503448486 17 | 16,0.46074000000000004,0.21518000000000004,1.9784560176182766,0.5377932775352472,0.2943035529325997,2.493051528930664 18 | 17,0.45154000000000005,0.20952000000000004,1.94295776498561,0.5308199477507616,0.28846746755106867,2.4177756309509277 19 | 18,0.44910000000000005,0.20831999999999995,1.9305041386764876,0.5250119617504978,0.28239175689039764,2.615767478942871 20 | 19,0.44138,0.20140000000000002,1.8923455577115624,0.5186778928898419,0.2777530173661982,2.276423692703247 21 | 20,0.43844000000000005,0.19886000000000004,1.8710846566424078,0.5143256109468946,0.2734756671066302,2.806741237640381 22 | 21,0.43266000000000004,0.19413999999999998,1.8387210898253383,0.5090398051151801,0.26955033965127106,2.523489236831665 23 | 22,0.43057999999999996,0.19130000000000003,1.8257429593679857,0.50468908424897,0.26513015087026126,2.289297103881836 24 | 23,0.42276,0.18974000000000002,1.8044523627174145,0.49942903618341716,0.2614686453834668,2.399625539779663 25 | 24,0.41703999999999997,0.18438,1.773847105551739,0.4949987004036164,0.25770723098549997,2.239906072616577 26 | 25,0.42242,0.18598000000000003,1.7875437803414402,0.49102810172288236,0.2552344854339832,2.3823583126068115 27 | 26,0.41246000000000005,0.17991999999999997,1.7455372013607804,0.4886380932384303,0.25170801308494517,2.381643772125244 28 | 27,0.40891999999999995,0.17956000000000005,1.7315849400296504,0.48493599975647206,0.24940074166755777,2.645637035369873 29 | 28,0.40912000000000004,0.17720000000000002,1.7258447283992961,0.4817248649083219,0.24607096498739045,2.3543550968170166 30 | 29,0.40790000000000004,0.17618,1.7285111278903729,0.47815234079554036,0.24424528574338866,2.325364828109741 31 | 30,0.40288,0.17344000000000004,1.6978594615143172,0.47555470910505815,0.24185683833567362,2.342536449432373 32 | 31,0.40084,0.17154000000000003,1.6951073170924673,0.4727783341281816,0.23963620667719354,2.323347330093384 33 | 32,0.39908,0.17084,1.6804843186115732,0.470320418805667,0.23717516920120485,2.193652629852295 34 | 33,0.39734,0.16813999999999996,1.6652359427237997,0.46778366910793046,0.23540568871973755,2.0086278915405273 35 | 34,0.39566,0.16591999999999996,1.6520183034697358,0.46577144119384906,0.23368069892527676,2.0371596813201904 36 | 35,0.39326000000000005,0.16676000000000002,1.652386143499491,0.46242215105446827,0.231104141770745,2.3517253398895264 37 | 36,0.39112,0.16335999999999995,1.6332066758554808,0.46136217994999873,0.22971790562822803,2.081660032272339 38 | 37,0.39344,0.16603999999999997,1.6454015015339365,0.4583844260740403,0.22798979368029304,1.8552772998809814 39 | 38,0.38917999999999997,0.16388000000000003,1.6263510600036504,0.45674061226990703,0.22666365899215324,2.2575225830078125 40 | 39,0.3873,0.16213999999999995,1.6301701233095052,0.4548876141830066,0.22536484314691219,2.257481336593628 41 | 40,0.38882000000000005,0.16447999999999996,1.6359720722753175,0.4531298417770673,0.22369995480682847,1.805450439453125 42 | 41,0.38408,0.15849999999999997,1.5992635695301756,0.45098492234033505,0.22247997333680936,2.282794952392578 43 | 42,0.38260000000000005,0.15824000000000005,1.598956935563866,0.4496814232648827,0.22115696080214364,2.001486301422119 44 | 43,0.38056,0.15537999999999996,1.5862606994959774,0.44853481240150583,0.21958183437444145,1.9899569749832153 45 | 44,0.38246,0.15561999999999998,1.5893803993049933,0.446424236652989,0.2185242048850774,2.0784833431243896 46 | 45,0.37673999999999996,0.15391999999999995,1.5783084369435603,0.4449021868343471,0.21774756920838578,1.9169375896453857 47 | 46,0.37632,0.15676,1.5810132345982961,0.4432443233395802,0.21624425231058875,1.5878392457962036 48 | 47,0.37668,0.15269999999999995,1.5664258556706565,0.4418206213553737,0.21519755035838417,1.9191981554031372 49 | 48,0.37448000000000004,0.15081999999999995,1.5576741899154625,0.4412211678883393,0.21441545091311276,2.4436161518096924 50 | 49,0.37317999999999996,0.15152,1.5522630512714386,0.43982478474703146,0.213112732376029,2.3768861293792725 51 | 50,0.36966,0.15061999999999998,1.540643642751538,0.438268391240174,0.212189355486053,1.751754879951477 52 | 51,0.37050000000000005,0.14980000000000004,1.5484796733576425,0.4377860185284198,0.21147126018700135,2.7214183807373047 53 | 52,0.37160000000000004,0.15012000000000003,1.5466054462048473,0.43589867675330385,0.21059237398403174,1.9598058462142944 54 | 53,0.36560000000000004,0.14790000000000003,1.5299230791172203,0.43491129571710796,0.20949883972971517,1.9581555128097534 55 | 54,0.37166,0.15069999999999995,1.550515380440926,0.43407768073951325,0.20915462230919157,2.1704320907592773 56 | 55,0.36684000000000005,0.14766,1.5247034284533287,0.4327874508163261,0.20783785408147415,2.221508502960205 57 | 56,0.36728000000000005,0.14754,1.52359773918074,0.4318336329299771,0.2070190693328816,1.8880579471588135 58 | 57,0.36444,0.14639999999999997,1.517334712099056,0.4304325665584581,0.20586621416255646,1.9638687372207642 59 | 58,0.36363999999999996,0.14424000000000003,1.5078425189974356,0.4297792559439948,0.20555790150698539,2.022902727127075 60 | 58,0.3628,0.14434000000000002,1.5044886357322032,0.4326321236809877,0.2080595269781379,1.9758371114730835 61 | 59,0.36178,0.14593999999999996,1.5078644716009801,0.43207091659401153,0.20753812734795696,1.8818944692611694 62 | 60,0.36075999999999997,0.14302000000000004,1.496679155041977,0.4303794899493977,0.2060449574489508,1.7651091814041138 63 | 61,0.36002,0.14176,1.4994472912987884,0.4297308625651457,0.20552589943387556,2.120110034942627 64 | 62,0.36178,0.14237999999999995,1.497396040327695,0.4289885705766695,0.20468604014933256,1.761661410331726 65 | 63,0.36158,0.14188,1.506163362489671,0.42793250216404266,0.20416229890404602,1.7747373580932617 66 | 64,0.36051999999999995,0.14258000000000004,1.4905010197235613,0.42680072152966786,0.20317101517600755,1.8008158206939697 67 | 65,0.35966,0.14026000000000005,1.4899917544454944,0.42645338195567006,0.20258561139960674,1.9646612405776978 68 | 66,0.35594000000000003,0.13827999999999996,1.4787756531518332,0.42527945224939445,0.20256297578691929,1.9614700078964233 69 | 67,0.35895999999999995,0.14159999999999995,1.491242267647568,0.4245722844875024,0.20168174796884408,1.884845495223999 70 | 68,0.354,0.13954,1.4788883124687233,0.42335542517095737,0.20056089487162876,1.8759682178497314 71 | 69,0.35702,0.14102000000000003,1.4860102757811546,0.4224640503540912,0.20053123441362442,1.7282379865646362 72 | 70,0.35762000000000005,0.13924000000000003,1.478301769008442,0.42237819113355246,0.19996768571154266,1.9789849519729614 73 | 71,0.35546,0.14022,1.4705861118070933,0.42111684113000103,0.19888976222459676,2.078164577484131 74 | 72,0.35778,0.14129999999999998,1.487980466107933,0.42067349533667353,0.1983761679781012,1.7479149103164673 75 | 73,0.3516,0.13859999999999995,1.4618446416392619,0.4196564538424733,0.19827157583671762,2.150721311569214 76 | 74,0.3548,0.13914000000000004,1.476715939537603,0.419308333730107,0.19792501680108843,2.1877875328063965 77 | 75,0.35274000000000005,0.13724000000000003,1.4658914363505888,0.4188119113277192,0.19732478279568555,1.7758780717849731 78 | 76,0.35316000000000003,0.13658000000000003,1.458780087530613,0.4182350934733723,0.19677996701444855,1.889710545539856 79 | 77,0.35418000000000005,0.13978000000000002,1.4672029203906352,0.41816328394346713,0.19634208498970085,2.0606274604797363 80 | 78,0.3539,0.13832,1.4656188442390792,0.4166708945828296,0.19580273297704354,1.915697693824768 81 | 79,0.35044,0.13570000000000004,1.4525376824395997,0.4158661595248707,0.19533909318613418,2.050994634628296 82 | 80,0.35222,0.13593999999999995,1.4500670863353475,0.41559453217262077,0.1950331221456687,2.2546889781951904 83 | 81,0.35182,0.13507999999999998,1.4510837872417606,0.4148779979503062,0.19462255896381975,1.6206960678100586 84 | 82,0.35258,0.138,1.458730237520471,0.4147999441134528,0.19443835190884562,2.03367018699646 85 | 83,0.34975999999999996,0.13504000000000005,1.445097810911889,0.4138664202246858,0.19349311994455054,2.321037530899048 86 | 84,0.35178,0.13683999999999996,1.4599812366828626,0.41388046991531935,0.192969378699264,1.6113821268081665 87 | 85,0.34809999999999997,0.13460000000000005,1.446339165069619,0.4130226582483002,0.19295142631678774,2.0378034114837646 88 | 86,0.34819999999999995,0.13380000000000003,1.4453940805123777,0.4125683849178132,0.1922348920944732,2.0336618423461914 89 | 87,0.34904,0.13568000000000002,1.4472587893203812,0.4122311923426064,0.19225440555368656,1.7155957221984863 90 | 88,0.34807999999999995,0.13388,1.440583700276151,0.4117566250145376,0.1922348920944732,1.8109713792800903 91 | 89,0.34397999999999995,0.13427999999999995,1.434446205442049,0.41078953797592355,0.19092749032717826,1.8499956130981445 92 | 90,0.34287999999999996,0.13132,1.422138729265758,0.4102767242677965,0.19099227501176663,2.018927812576294 93 | 91,0.34516,0.13278,1.4335458006481736,0.4104617118611391,0.19052317145227748,1.7046505212783813 94 | 92,0.35014,0.13451999999999997,1.4433092005094703,0.40984430601162847,0.19035769731814822,1.6231908798217773 95 | 93,0.3467,0.13249999999999995,1.4290206484040435,0.4087047199935684,0.1896840927061031,1.4715851545333862 96 | 94,0.34452000000000005,0.13241999999999998,1.4333686264497894,0.40867349845882694,0.19029915694050814,1.8285441398620605 97 | 95,0.34487999999999996,0.131,1.4205217431394421,0.408298059503562,0.18924855229646098,2.070307970046997 98 | 96,0.34668,0.13256,1.4240419150007015,0.40793042593198237,0.18867875928743094,1.5794494152069092 99 | 97,0.34202,0.13068000000000002,1.4144516710420043,0.4073746826135859,0.18811599112371769,1.9048686027526855 100 | 98,0.34334,0.13232,1.421217414189358,0.4060860137671357,0.18764376541075445,2.2464044094085693 101 | 99,0.34482,0.13151999999999997,1.421469394649778,0.406092258074084,0.18798173852432976,1.7740395069122314 102 | 100,0.34134,0.13168000000000002,1.417004561393845,0.40670185853990937,0.18792397868505828,1.6757746934890747 103 | 101,0.34256,0.13270000000000004,1.4224224174205138,0.4054967072988923,0.18735730782950233,1.7895927429199219 104 | 102,0.34575999999999996,0.13248000000000004,1.4236561058431256,0.4052570820197523,0.18690225396064686,1.8590813875198364 105 | 103,0.34246,0.13180000000000003,1.4165266274797672,0.4044640550373214,0.18655335330991196,1.8300749063491821 106 | 104,0.34131999999999996,0.13061999999999996,1.410558123825764,0.40474660992673084,0.18622006342654784,1.9176232814788818 107 | 105,0.34236,0.13149999999999995,1.4121536501512235,0.40437429312494,0.18637617110025473,1.9661999940872192 108 | 106,0.34065999999999996,0.13066,1.4083658771551386,0.40381933034491213,0.18644485847668568,1.807721734046936 109 | 107,0.3407,0.12919999999999998,1.4010792622456745,0.40326046487304157,0.18544186667311913,1.9673396348953247 110 | 108,0.34128000000000003,0.12883999999999995,1.4014976812260491,0.40252519772988216,0.18529044222962343,1.9356764554977417 111 | 109,0.34187999999999996,0.13022,1.4025805271705802,0.40345950215701776,0.1855964132700889,1.482975721359253 112 | 110,0.33872,0.12773999999999996,1.395684266881067,0.4013660982526087,0.18464571753721415,1.714686393737793 113 | 111,0.33530000000000004,0.12738000000000005,1.3822587748267212,0.40179695543203975,0.18452941732030248,1.885390043258667 114 | 112,0.34148,0.12885999999999997,1.3990216536485418,0.4013754647130312,0.18430774442363873,2.047757387161255 115 | 113,0.33853999999999995,0.12878,1.399475784934297,0.40160338191664313,0.18406109429918194,1.968278169631958 116 | 114,0.33716,0.12760000000000005,1.3963694143660215,0.40164553098854405,0.1847698231378111,1.8032702207565308 117 | 115,0.33818000000000004,0.12702000000000002,1.3866010443592558,0.40080176901215847,0.1832961666980183,1.8015871047973633 118 | 116,0.33999999999999997,0.12861999999999996,1.3966688872600088,0.4005043838937469,0.1837824421016152,1.9307464361190796 119 | 117,0.33802,0.12641999999999998,1.3886919134125417,0.39942255771495827,0.18331099692702046,1.8226401805877686 120 | 118,0.33974000000000004,0.12868000000000002,1.3972170015378875,0.400143775167484,0.1827505703784128,1.676957130432129 121 | 119,0.33672,0.12738000000000005,1.394286530662556,0.3994077274859562,0.18312678987204634,2.1075754165649414 122 | 120,0.34019999999999995,0.12904000000000004,1.393537344068897,0.3983251207687991,0.18213316452890216,2.0469870567321777 123 | 121,0.33472,0.12744,1.3834522812038053,0.3989550152322062,0.1827427649947274,1.6771570444107056 124 | 122,0.33908000000000005,0.12868000000000002,1.3981215088647239,0.3978427480570449,0.1817600671887427,2.0505878925323486 125 | 123,0.33855999999999997,0.12951999999999997,1.3959177372102836,0.39810578948724096,0.18167889119841518,1.7897387742996216 126 | 124,0.3347,0.1281,1.383850973783707,0.3981893070926741,0.18179206926185265,1.8031656742095947 127 | 125,0.33626,0.12683999999999995,1.3860063463145373,0.39721753682384886,0.18115280833802305,1.8821970224380493 128 | 126,0.33504,0.12729999999999997,1.383746634028396,0.39756721801295225,0.1812815971688312,1.6352322101593018 129 | 127,0.3346,0.12580000000000002,1.3743233954419896,0.3967086258075645,0.18063218924621072,1.6323447227478027 130 | 128,0.33496000000000004,0.12594000000000005,1.3749085050456378,0.39721675628548037,0.1809412824401503,1.9771372079849243 131 | 129,0.3355,0.12568000000000001,1.3783958984576925,0.39588906052060346,0.18045188488307928,1.6234573125839233 132 | 130,0.33658,0.12661999999999995,1.3854433600695766,0.3958750108299699,0.18075083107822787,1.969896912574768 133 | 131,0.33431999999999995,0.12707999999999997,1.376688435521661,0.3955276712559721,0.1800467854698099,1.695007562637329 134 | 132,0.33364000000000005,0.12480000000000002,1.3706901762254384,0.3956783151610992,0.18000775855138318,1.955890417098999 135 | 133,0.33831999999999995,0.12851999999999997,1.3892426049830962,0.39509993623001527,0.17929981025112263,1.7791192531585693 136 | 134,0.33424,0.12548000000000004,1.3749862368009529,0.3947541577327546,0.17971739827828848,1.8333827257156372 137 | 135,0.33364000000000005,0.12407999999999997,1.3660167469357958,0.39471356973759075,0.1793130794033877,1.7552043199539185 138 | 136,0.33533999999999997,0.12538000000000005,1.380402431196096,0.39437871877748953,0.1790672098172994,1.7434742450714111 139 | 137,0.33287999999999995,0.12402000000000002,1.3665181185517992,0.39432252001495516,0.17894856798528214,1.6125397682189941 140 | 138,0.33331999999999995,0.12663999999999997,1.371100284767394,0.39359349717874403,0.17904691581971754,1.7953696250915527 141 | 139,0.33724,0.12639999999999996,1.386332471607899,0.3937542880826621,0.17859186195086196,1.512040615081787 142 | 140,0.33286000000000004,0.12363999999999997,1.3697070900578887,0.39349046611409755,0.17905628228013992,1.9823888540267944 143 | 141,0.33420000000000005,0.12402000000000002,1.3714845843461094,0.39344519488872254,0.17898291167349767,1.9781737327575684 144 | 142,0.33416,0.12597999999999998,1.3722809828361686,0.3934608056560932,0.17893685990975416,1.7926490306854248 145 | 143,0.33246,0.12341999999999997,1.3668370403501453,0.3930252652464511,0.17786361965301944,1.827878475189209 146 | 144,0.33304,0.12605999999999995,1.3713278893609435,0.39264982629118605,0.17760760306814027,1.9957245588302612 147 | 145,0.33352000000000004,0.12536000000000003,1.370246750055527,0.3921783811165913,0.1778331786566466,2.0329599380493164 148 | 146,0.33268,0.12363999999999997,1.3602561489964018,0.391897387303919,0.1772485554186144,1.5629981756210327 149 | 147,0.33198000000000005,0.12473999999999996,1.3730061472374566,0.3918841181516539,0.17758106476361002,2.063603639602661 150 | 148,0.33384,0.12412000000000001,1.3657630227354107,0.3916101491842984,0.17743198193522003,1.8590593338012695 151 | 149,0.33076000000000005,0.12258000000000002,1.3636675303687855,0.3913112029891498,0.17664129656789473,1.6879324913024902 152 | 150,0.33116,0.12424000000000002,1.3615460050653438,0.3911199710888589,0.17663193010747236,1.7845358848571777 153 | 151,0.33204,0.12356,1.3674013109839693,0.39127529822419715,0.17627756568815778,1.9301847219467163 154 | 152,0.33465999999999996,0.12502000000000002,1.3743915785940326,0.39058686338314985,0.17691994876546147,2.0214202404022217 155 | 153,0.33255999999999997,0.12441999999999998,1.3717224064226052,0.3906750642187943,0.1763142509914789,1.6367316246032715 156 | 154,0.3319,0.12370000000000003,1.3597642867540827,0.39005765836928363,0.176123019091188,1.6672499179840088 157 | 155,0.33118000000000003,0.12226000000000004,1.3568798527121544,0.3902793312659474,0.17659758641925682,1.9957219362258911 158 | 156,0.33096000000000003,0.12168000000000001,1.3545632570677875,0.3898703291608354,0.17581002320540573,2.273993968963623 159 | 157,0.3296,0.12456,1.361733158480148,0.38950972043457255,0.1760754062507074,1.778895616531372 160 | 158,0.32918000000000003,0.12138000000000004,1.3516635429494235,0.38965412003275135,0.17583500043319877,1.4707447290420532 161 | 159,0.33033999999999997,0.12456,1.3648809007540041,0.3893473684539174,0.1757179196779186,1.4748791456222534 162 | 160,0.33055999999999996,0.12129999999999996,1.3537382580796067,0.38975715109739795,0.175680453836229,1.6903935670852661 163 | 161,0.32976000000000005,0.12244,1.3579227547256314,0.38896490465333555,0.17575226336613414,2.0552234649658203 164 | 162,0.32924,0.12190000000000001,1.3578496117677008,0.38866205576634427,0.1749397229244899,1.8416731357574463 165 | 163,0.32996000000000003,0.12224000000000002,1.356741189044349,0.3892849253844347,0.1751902757407895,1.987379789352417 166 | 164,0.33018000000000003,0.12290000000000001,1.3606619112649743,0.38808367683526035,0.1746930728000331,1.8900387287139893 167 | 165,0.33209999999999995,0.12292000000000003,1.3606193648005018,0.38783624617243495,0.17487181608642743,1.517669677734375 168 | 166,0.32986000000000004,0.12226000000000004,1.3566604069605166,0.38862146777118045,0.1748921100840094,2.0208449363708496 169 | 167,0.33136,0.12285999999999997,1.3520680486547703,0.3881500225965858,0.17436212453177458,1.3477116823196411 170 | 168,0.32858,0.12282000000000004,1.3512542938091316,0.3882522731228638,0.17462594650033914,1.821790337562561 171 | 169,0.33036,0.12426000000000004,1.3665044242326094,0.38778863333195435,0.17472039164293185,1.7848252058029175 172 | 170,0.33104,0.12212,1.364355101117066,0.38779097494705994,0.1744034930653069,1.786442756652832 173 | 171,0.33176000000000005,0.12312000000000001,1.3594070457074108,0.38719776578697396,0.17423958000791462,1.6653246879577637 174 | 172,0.33086000000000004,0.12329999999999997,1.3580500174845969,0.38715327509996744,0.17379623421458712,1.6310573816299438 175 | 173,0.32928,0.12072000000000005,1.3510150079216277,0.3872719169319847,0.17363778492577475,1.8001431226730347 176 | 174,0.32952000000000004,0.12024000000000001,1.3497700964917942,0.38674505353322397,0.17340674556868851,1.7307676076889038 177 | 175,0.33206,0.12158000000000002,1.3608363606795972,0.38631887958400424,0.17329825073546223,1.7076865434646606 178 | 176,0.32968,0.12161999999999995,1.3540899133195683,0.3866911963857952,0.17381340605869489,1.9812871217727661 179 | 177,0.32976000000000005,0.12094000000000005,1.3483448314423463,0.38620804313567236,0.17335366895962823,2.0024290084838867 180 | 178,0.32964000000000004,0.12228000000000006,1.348482334036,0.386825448985183,0.173167900827917,1.9468704462051392 181 | 179,0.32687999999999995,0.11972000000000005,1.3410669110563336,0.38553053583178465,0.1728775405548223,1.8588019609451294 182 | 180,0.3284,0.11982000000000004,1.3391122889457916,0.38596529570305826,0.17298213269620588,2.0952301025390625 183 | 181,0.32806,0.12102000000000002,1.340954922899908,0.38571552342512727,0.1730679919167446,1.3847062587738037 184 | 182,0.32672,0.12172000000000005,1.3450850084119914,0.38502552750734287,0.17256922789925122,1.6920965909957886 185 | 183,0.32977999999999996,0.12117999999999995,1.3474974884670607,0.38558595405595053,0.17355582839707862,1.373466968536377 186 | 184,0.33006,0.12185999999999997,1.3516445512674293,0.38493342397985586,0.17255751982372325,1.7804464101791382 187 | 185,0.32842000000000005,0.12161999999999995,1.3508442185667096,0.3850996786523537,0.1722335964007815,1.7823048830032349 188 | 186,0.32794,0.12160000000000004,1.3429109527140248,0.38512621695688387,0.17229291731679008,1.5419081449508667 189 | 187,0.32892,0.12080000000000002,1.351434744742452,0.3845291051049551,0.17240375376512196,1.6427465677261353 190 | 188,0.32682,0.11929999999999996,1.339794018590937,0.38434958128019225,0.17167629200564793,1.7369297742843628 191 | 189,0.32654000000000005,0.12051999999999996,1.344602782507332,0.38419035145301117,0.17153345348420623,1.5927283763885498 192 | 190,0.32962,0.12136000000000002,1.3490819102343248,0.38493966828680415,0.1723280415433741,2.249014139175415 193 | 191,0.32748,0.11931999999999998,1.3376863943983097,0.3841770823007461,0.17147335202982905,1.6970582008361816 194 | 192,0.33094,0.12114000000000003,1.3508079924753733,0.3842652831363905,0.17197992143100782,1.9052002429962158 195 | 193,0.32628,0.11951999999999996,1.3389866517818705,0.3841341526904767,0.17186596282920186,1.9810820817947388 196 | 194,0.32742000000000004,0.12072000000000005,1.3382810415053854,0.38351830791770314,0.17150691517967598,1.8482855558395386 197 | 195,0.32611999999999997,0.12197999999999998,1.3418390495436532,0.3834379124657441,0.17080052795615253,1.760284662246704 198 | 196,0.32733999999999996,0.12105999999999995,1.3527790142261251,0.38307886481621833,0.17125870397848209,2.1829652786254883 199 | 197,0.32524,0.12031999999999998,1.3381113875277189,0.38312803873343604,0.17094414701596283,1.7939399480819702 200 | 198,0.32672,0.11972000000000005,1.3369813426112642,0.38405219616178066,0.17111742653377737,1.7356312274932861 201 | 199,0.32587999999999995,0.11912,1.3296452649697965,0.3826526908669986,0.1706686169718702,1.9302573204040527 202 | 200,0.32602,0.12036000000000002,1.3351778505103928,0.3834433762343239,0.17123372675068904,1.7754786014556885 203 | 201,0.29576,0.10182000000000002,1.2009120671724787,0.3468345656733275,0.14868709543720682,1.3623616695404053 204 | 202,0.29486,0.10152000000000005,1.1953795786414827,0.3388715132375405,0.14332869953721883,1.3536754846572876 205 | 203,0.29269999999999996,0.10089999999999999,1.185137810299591,0.33569394153923726,0.1418152356406308,1.543792963027954 206 | 204,0.29300000000000004,0.09955999999999998,1.182703121584289,0.3341149124196924,0.1401011733833294,1.6819078922271729 207 | 205,0.29086,0.09997999999999996,1.1833170456241588,0.3324804650759815,0.13933546524379725,1.2035505771636963 208 | 206,0.29059999999999997,0.09975999999999996,1.1780648886853335,0.3316882186319192,0.13888977783536416,1.6787493228912354 209 | 207,0.29103999999999997,0.09962000000000004,1.1785606378499343,0.33008733443805527,0.13768072390250452,1.5539606809616089 210 | 208,0.29035999999999995,0.09884000000000004,1.1755531206727028,0.32921078985019125,0.1373521172493516,1.5762476921081543 211 | 209,0.29046000000000005,0.09919999999999995,1.1750773305491524,0.328978189416368,0.13668631802099185,1.6327217817306519 212 | 210,0.28996,0.09870000000000001,1.1725009731492217,0.3279174377735299,0.13649118342885824,1.4756286144256592 213 | 211,0.2885,0.09940000000000004,1.173772199421513,0.32768561787807526,0.1363655167515242,1.3316091299057007 214 | 212,0.28828,0.09872000000000003,1.1693448223629777,0.32670526168719616,0.13602988525305448,1.254042148590088 215 | 213,0.2875,0.09838000000000002,1.1713749083633325,0.3263899241863083,0.13556312330867093,1.333986759185791 216 | 214,0.2875,0.09931999999999996,1.1713830298002885,0.3261362492165346,0.13503001560296202,1.1169458627700806 217 | 215,0.28732,0.09841999999999995,1.1636850917521788,0.32552586821234075,0.1353765746385912,1.6317484378814697 218 | 216,0.28824000000000005,0.09848000000000001,1.1712204356278693,0.32533619738878694,0.13507372575159993,1.6965199708938599 219 | 217,0.28718,0.09814,1.168162159013505,0.32419739190909536,0.13412927432567334,1.4101394414901733 220 | 218,0.28724000000000005,0.09824,1.1650387311772423,0.32420597783114924,0.13422606108337165,1.3921929597854614 221 | 219,0.28779999999999994,0.09874000000000005,1.1689309004922301,0.3241700730661967,0.1340036076483394,1.5170810222625732 222 | 220,0.28688,0.09930000000000005,1.1700147206381875,0.32385551610367735,0.13378661798188685,1.3719569444656372 223 | 221,0.28776,0.09843999999999997,1.1635940359259138,0.3231342986511516,0.13339947095109383,1.3747777938842773 224 | 222,0.28776,0.09950000000000003,1.167650925109581,0.32390937325110625,0.13388184366284805,1.3441439867019653 225 | 223,0.28906,0.09802,1.1686900537841174,0.323106979808253,0.1336148995408093,1.390040636062622 226 | 224,0.28666,0.09855999999999998,1.1664045352716834,0.32218906668685654,0.13299671315293016,1.39087975025177 227 | 225,0.28618,0.09916000000000003,1.1699883554662978,0.3221765780729601,0.13299281046108746,1.3931031227111816 228 | 226,0.28632,0.09824,1.1653690452174263,0.32175040412374034,0.1325315122852837,1.518581748008728 229 | 227,0.28628,0.09748000000000001,1.1652819657204103,0.3216559589811476,0.13240896776142375,1.5165783166885376 230 | 228,0.28596,0.09828000000000003,1.1674375570550257,0.32135623224763044,0.13265717896261764,1.460974931716919 231 | 229,0.28496,0.09774000000000005,1.1621600949034399,0.3217613316608998,0.13255883112818234,1.4585870504379272 232 | 230,0.2865,0.09874000000000005,1.1650288976272758,0.32114314527302057,0.13241833422184612,1.2283737659454346 233 | 231,0.28664,0.09775999999999996,1.1667254761773713,0.3213320355582059,0.1325721002804474,1.7243760824203491 234 | 232,0.28603999999999996,0.09853999999999996,1.1647493197601668,0.3204773460446608,0.13172365507385064,1.4624059200286865 235 | 233,0.28637999999999997,0.09743999999999997,1.162021282984286,0.32076692577938704,0.13221149155418455,1.2388945817947388 236 | 234,0.2861,0.09784000000000004,1.162419495381871,0.32051247027124485,0.13197420789015013,1.3104870319366455 237 | 235,0.28544,0.0978,1.1597096726906544,0.3207411680132255,0.13195079173909408,1.482568383216858 238 | 236,0.28618,0.09743999999999997,1.1652906342428557,0.32022445161325575,0.13163155154636363,1.4615572690963745 239 | 237,0.28576,0.09730000000000005,1.162250198119757,0.3201011265510273,0.1316893113856351,1.5221716165542603 240 | 238,0.28513999999999995,0.09714,1.1622531062790327,0.32022835430509844,0.13142548941707055,1.5237981081008911 241 | 239,0.28525999999999996,0.09796000000000005,1.1609330093678163,0.3205811576476759,0.13164013746841752,1.3910331726074219 242 | 240,0.28681999999999996,0.09814,1.1646373595814317,0.31933151571965246,0.13111951837660507,1.672351598739624 243 | 241,0.28488,0.09797999999999996,1.1626177492494485,0.3196968076761265,0.13111561568476238,1.281935453414917 244 | 242,0.2863,0.09826000000000001,1.1637172508908777,0.31929014718612014,0.13116322852524298,1.5361124277114868 245 | 243,0.28562,0.09785999999999995,1.1625163973290094,0.3192042879655814,0.13136538796269337,1.4821182489395142 246 | 244,0.28676,0.09775999999999996,1.1663778947324168,0.31928468341754046,0.13093375024489395,1.2306859493255615 247 | 245,0.28566,0.09806000000000004,1.162704632902632,0.31894592976559655,0.13091735893915468,1.2798130512237549 248 | 246,0.2863,0.09777999999999998,1.1695940007968826,0.31910203743930343,0.13050367360383153,1.5869418382644653 249 | 247,0.28708,0.09811999999999999,1.1656807256596429,0.31899198152934005,0.13077452041771287,1.4907066822052002 250 | 248,0.2851,0.09674000000000005,1.165356198743898,0.31904974136861164,0.13093375024489395,1.237041711807251 251 | 249,0.28502000000000005,0.09787999999999997,1.1628973920430457,0.318341793068351,0.13053411460020437,1.3648252487182617 252 | 250,0.28664,0.09728000000000003,1.1602703757127937,0.3187734307861504,0.1309165784007862,1.5492216348648071 253 | 251,0.28647999999999996,0.09758,1.1676027233503303,0.31835194006714196,0.1305122595258854,1.4605258703231812 254 | 252,0.28702000000000005,0.09762000000000004,1.1645129041708246,0.318536147122116,0.13024921809568935,1.474815845489502 255 | 253,0.28681999999999996,0.09782000000000002,1.1638797980790236,0.31782195451490713,0.12991046444374543,1.5682870149612427 256 | 254,0.28458000000000006,0.09760000000000002,1.1618584180060698,0.31839565021577987,0.13038971500202545,1.1924995183944702 257 | 255,0.2863,0.09772000000000003,1.1596259708915437,0.3182840332290794,0.12980431122562475,1.402076005935669 258 | 256,0.28528,0.09719999999999995,1.1607328860127195,0.3183730146030923,0.1300173982002346,1.4191279411315918 259 | 257,0.28684,0.09728000000000003,1.1626630498438466,0.31801708910704074,0.1301180876497755,1.2934874296188354 260 | 258,0.28790000000000004,0.09770000000000001,1.1655453297556664,0.31816383032032514,0.13018209179599538,1.352848768234253 261 | 259,0.28644000000000003,0.09730000000000005,1.1633181395579357,0.31786488412517655,0.13014384541593715,1.6732611656188965 262 | 260,0.28474,0.09799999999999998,1.1652604628886496,0.3175370580103921,0.1300259841222885,1.6905052661895752 263 | 261,0.28557999999999995,0.09899999999999998,1.1678213694570017,0.3178843975843899,0.12995729674585754,1.6134799718856812 264 | 262,0.28542,0.09816000000000003,1.1640953330361112,0.317602623233349,0.13002520358392,1.5045270919799805 265 | 263,0.28584,0.09836,1.1621011506233896,0.3177790249046377,0.12955453894769375,1.3974053859710693 266 | 264,0.28586,0.09809999999999997,1.164080550628049,0.31733021534273054,0.1298784623706355,1.344885230064392 267 | 265,0.28535999999999995,0.09792000000000001,1.160392901423026,0.3176049648484546,0.13000881227818073,1.3594316244125366 268 | 266,0.28564,0.09787999999999997,1.1616301760077477,0.31743090479227143,0.13004081435129067,1.4075887203216553 269 | 267,0.28459999999999996,0.09718000000000004,1.1589900437666445,0.31741685510163775,0.12932974389755592,0.995049774646759 270 | 268,0.28652,0.09767999999999999,1.1668897678657455,0.3178289793602239,0.12979806691867646,1.6664389371871948 271 | 269,0.28647999999999996,0.09736,1.162663810867436,0.3175854513892412,0.12957014971506442,1.367200255393982 272 | 270,0.28657999999999995,0.09806000000000004,1.1652297581336937,0.3172357702001378,0.12945463003652136,1.2179356813430786 273 | 271,0.28506,0.09814,1.1616902673731044,0.31687438093550646,0.12932662174408172,1.5529403686523438 274 | 272,0.28437999999999997,0.09830000000000005,1.1637509907690846,0.3174605652502758,0.12959044371264639,1.4051215648651123 275 | 273,0.28659999999999997,0.09775999999999996,1.1677566006475566,0.3172115735107133,0.1291392925356335,1.2687708139419556 276 | 274,0.28600000000000003,0.09831999999999996,1.166702493721125,0.31677056933249137,0.12941248096462055,1.404902458190918 277 | 275,0.28547999999999996,0.09807999999999995,1.1634496036840944,0.31687281985876936,0.12930788882323696,1.4697972536087036 278 | 276,0.28624000000000005,0.09675999999999996,1.1620143559210154,0.31677759417780815,0.12920876045043306,1.3170980215072632 279 | 277,0.28674,0.09804000000000002,1.1646184723596185,0.31629209931257984,0.1293383298196098,1.3059169054031372 280 | 278,0.28635999999999995,0.09907999999999995,1.1708942181905921,0.31643259621891606,0.12886532356827796,1.4965498447418213 281 | 279,0.28637999999999997,0.09785999999999995,1.1670939060194152,0.316715931646694,0.12946399649694385,1.434260368347168 282 | 280,0.28732,0.09897999999999996,1.1656729177552827,0.31628819662073715,0.1289886486305064,1.2880463600158691 283 | 281,0.2862,0.09902,1.1672922381941153,0.3164724036757113,0.12920797991206456,1.798599362373352 284 | 282,0.28732,0.09952000000000005,1.1709075048565865,0.31588778043767907,0.12846646846195697,1.429523229598999 285 | 283,0.28732,0.09872000000000003,1.1688204898822063,0.31648255067450226,0.12902299231872194,1.4723329544067383 286 | 284,0.28642,0.09872000000000003,1.1684269611628688,0.31613677217724157,0.129046408469778,1.5173182487487793 287 | 285,0.28637999999999997,0.09830000000000005,1.1647243010146278,0.3161679937119829,0.12890981425528447,1.3678971529006958 288 | 286,0.28694,0.09770000000000001,1.1651380563877067,0.3159845671953774,0.1289917707839805,1.68368399143219 289 | 287,0.28652,0.09818000000000005,1.1672839566457027,0.31639825253070053,0.12880600265226938,1.568328857421875 290 | 288,0.28632,0.09841999999999995,1.1705666725732842,0.3155240495579421,0.12867253059125006,1.4330576658248901 291 | 289,0.28600000000000003,0.09787999999999997,1.162716093264064,0.31579099367998087,0.12847895707585355,1.4294620752334595 292 | 290,0.28632,0.09850000000000003,1.1644092282774496,0.31620233740019843,0.1287505844281034,1.4427961111068726 293 | 291,0.28656000000000004,0.09831999999999996,1.171740294263071,0.3156864015385973,0.12888483702749132,1.1945126056671143 294 | 292,0.2851,0.09758,1.1635771404419626,0.315909635511998,0.12863896744140302,1.1882871389389038 295 | 293,0.28722000000000003,0.09740000000000004,1.1668436875452801,0.31560522554826964,0.12846646846195697,1.3753578662872314 296 | 294,0.28647999999999996,0.09816000000000003,1.1679189659807148,0.3155302938648904,0.1286444312099828,1.0853772163391113 297 | 295,0.28580000000000005,0.09775999999999996,1.1687855509166816,0.31511973068304133,0.1281550336529118,1.4786341190338135 298 | 296,0.28502000000000005,0.09882000000000002,1.1694516261317291,0.3152758383567482,0.12819171895623283,1.1334468126296997 299 | 297,0.28606,0.09894000000000003,1.1699594785364307,0.31511895014467284,0.12811990942632767,1.6542750597000122 300 | 298,0.28603999999999996,0.09909999999999997,1.1666446392025267,0.31554590463226107,0.12873419312236423,1.3089770078659058 301 | 299,0.28756000000000004,0.09816000000000003,1.1697425814915676,0.31570669553617914,0.12833846016951733,1.6093177795410156 302 | 300,0.28534000000000004,0.09819999999999995,1.165012318108763,0.31516031867820515,0.12848676245953883,1.205627679824829 303 | 301,0.28102000000000005,0.09563999999999995,1.1470822918475891,0.3075781689662628,0.12384099809002258,1.4135395288467407 304 | 302,0.27996,0.0958,1.1456861248125836,0.3055003758292244,0.1230924617945982,1.2179172039031982 305 | 303,0.28066,0.09587999999999997,1.1437748918423847,0.3053208520044616,0.12252266878556817,1.375562071800232 306 | 304,0.28078000000000003,0.09541999999999995,1.1454281300610425,0.30546759321774597,0.12277244106349916,1.1571636199951172 307 | 305,0.27988,0.09550000000000003,1.1431789538081811,0.3042749305906256,0.12234938926775352,1.6355183124542236 308 | 306,0.2803,0.09543999999999997,1.1432425238344135,0.3037246510408089,0.12172495857292609,1.4645755290985107 309 | 307,0.28059999999999996,0.09585999999999995,1.144162274745046,0.304624611779729,0.12240324641518241,1.193345546722412 310 | 308,0.28008,0.09587999999999997,1.143193772708883,0.303727773194283,0.12166329604181192,1.5162882804870605 311 | 309,0.28068000000000004,0.09563999999999995,1.140905540816638,0.30419765729214066,0.1219653643904347,1.362437129020691 312 | 310,0.28042,0.09521999999999997,1.141061536207491,0.3033453093937012,0.12180613456325362,1.4073697328567505 313 | 311,0.28071999999999997,0.09584000000000004,1.1414291428364054,0.30389715002025497,0.12165080742791534,1.4297659397125244 314 | 312,0.28098,0.09531999999999996,1.1433446340414943,0.3037433839616537,0.12145567283578174,1.1186245679855347 315 | 313,0.28034000000000003,0.09528000000000003,1.1406854804681272,0.3031564191085159,0.12160943889438303,1.2559459209442139 316 | 314,0.28015999999999996,0.0958,1.1448300895946366,0.3038807587145157,0.12152982398079248,1.1648215055465698 317 | 315,0.27998,0.09465999999999997,1.1412337007267135,0.3036903073525934,0.12144786745209646,1.526188611984253 318 | 316,0.28044,0.09541999999999995,1.1419667425204296,0.3032344729453693,0.12138152169077099,1.224618673324585 319 | 317,0.28059999999999996,0.09553999999999996,1.140627694373228,0.302886352833003,0.12113409102794559,1.3010437488555908 320 | 318,0.28084,0.09582000000000002,1.1426474409443992,0.3026037979435936,0.12116765417779263,1.3200660943984985 321 | 319,0.28025999999999995,0.09585999999999995,1.1427599174939855,0.30321339840941897,0.12149704136931405,1.2685788869857788 322 | 320,0.27959999999999996,0.09577999999999998,1.1428166678061291,0.3033531147773866,0.12120043678927106,1.5715079307556152 323 | 321,0.27980000000000005,0.09577999999999998,1.1413282329330638,0.30248125341973375,0.12150952998321063,1.1919312477111816 324 | 322,0.28056000000000003,0.09553999999999996,1.1407267483217376,0.3035412245242033,0.12144162314514817,1.3992949724197388 325 | 323,0.28008,0.09541999999999995,1.1438221902567514,0.3032414977906861,0.12136591092340032,1.7180503606796265 326 | 324,0.27936000000000005,0.09531999999999996,1.1425649000673879,0.3030042141266517,0.12134015315723867,1.1958168745040894 327 | 325,0.28084,0.09521999999999997,1.1407556048765475,0.30273648946624443,0.12149235813910286,1.2917311191558838 328 | 326,0.28078000000000003,0.09521999999999997,1.1402794533238119,0.30301592220217977,0.1213292256200792,1.327687382698059 329 | 327,0.27988,0.09545999999999999,1.1395867930687205,0.3028082989961496,0.12071181977056855,1.529713749885559 330 | 328,0.28025999999999995,0.09538000000000002,1.1415247891326339,0.30316890772241245,0.12148221114031188,1.328936219215393 331 | 329,0.27971999999999997,0.09509999999999996,1.1387471854686737,0.30297143151517325,0.12126990470407062,1.4405319690704346 332 | 330,0.27922,0.09516000000000002,1.1420513234880505,0.30311661165172066,0.12096549474034224,1.374092936515808 333 | 331,0.2812,0.09514,1.1411895765637865,0.3030065557417573,0.12118404548353179,1.462759256362915 334 | 332,0.28037999999999996,0.09477999999999998,1.140563197129843,0.30218620991642775,0.12106618418988313,1.2137402296066284 335 | 333,0.28046000000000004,0.09518000000000004,1.1431308578775854,0.3023579283575053,0.12099437465997798,1.6493687629699707 336 | 334,0.28042,0.09538000000000002,1.1405088925848201,0.30242349358046217,0.12087339121285512,1.35067880153656 337 | 335,0.281,0.09585999999999995,1.1408935768567785,0.30227831344391476,0.12089914897901677,1.467899203300476 338 | 336,0.28046000000000004,0.09567999999999999,1.1423401628829994,0.3027076095466087,0.121518896443633,1.4779881238937378 339 | 337,0.28036000000000005,0.09558,1.143358745441145,0.3022049428372726,0.12077738499352542,1.5004267692565918 340 | 338,0.27980000000000005,0.09496000000000004,1.1423664456423448,0.3019059966421239,0.12062127731981853,1.3651176691055298 341 | 339,0.281,0.09511999999999998,1.1412852021808526,0.302204162298904,0.12114736018021066,1.2174110412597656 342 | 340,0.28086,0.09518000000000004,1.1404895711005951,0.3023602699726109,0.12090149059412236,1.6865394115447998 343 | 341,0.281,0.09508000000000005,1.1423053321789722,0.30248203395810225,0.12096081151013105,1.2280353307724 344 | 342,0.28062,0.09543999999999997,1.1404097845055619,0.3020714707762532,0.12106774526662023,1.1287691593170166 345 | 343,0.28169999999999995,0.09528000000000003,1.142279402333863,0.3016772989001434,0.1205385402527539,1.4274822473526 346 | 344,0.28080000000000005,0.09519999999999995,1.1430336565387493,0.3015711456820227,0.12048078041348242,1.1680675745010376 347 | 345,0.28046000000000004,0.09496000000000004,1.1434367087726691,0.30253120787531995,0.12086324421406425,1.298511266708374 348 | 346,0.28076,0.09504000000000001,1.1427770669058877,0.3019559510977101,0.12092256513007282,1.3935546875 349 | 347,0.28013999999999994,0.09511999999999998,1.1410120371349004,0.3024672037291001,0.12033794189204061,1.451462984085083 350 | 348,0.28102000000000005,0.09486000000000006,1.140113893364157,0.30178423265663257,0.12030047605035099,1.2168262004852295 351 | 349,0.28006,0.09521999999999997,1.1415524576999703,0.30194502356055064,0.12066186531498235,1.0374573469161987 352 | 350,0.28128,0.09563999999999995,1.1406792932931258,0.3023657337411907,0.12087963551980341,1.602816104888916 353 | 351,0.28103999999999996,0.09552000000000005,1.142614898937089,0.3017241312022555,0.12042302057421084,1.0774129629135132 354 | 352,0.28044,0.09538000000000002,1.1430609226226807,0.3021581105351605,0.12090617382433355,1.4151548147201538 355 | 353,0.28071999999999997,0.09526000000000001,1.1393566262357089,0.3015087026125399,0.12023569136576262,1.3534736633300781 356 | 354,0.27988,0.09499999999999997,1.1432971143904997,0.30212845007715616,0.12050029387269579,1.2887024879455566 357 | 355,0.28078000000000003,0.09523999999999999,1.140632791178567,0.3016296860596628,0.12052371002375173,1.680503249168396 358 | 356,0.27990000000000004,0.09540000000000004,1.140918498288612,0.3015851953726564,0.12031530627935316,1.4171557426452637 359 | 357,0.27981999999999996,0.09558,1.1398173611687155,0.3018310649587447,0.12025364374823888,1.241477608680725 360 | 358,0.28064,0.09494000000000002,1.142515287107351,0.30184901734122094,0.12090383220922796,1.0202209949493408 361 | 359,0.28003999999999996,0.09604000000000001,1.1408015367936115,0.30121053695575983,0.1200475816189458,1.5010104179382324 362 | 360,0.27993999999999997,0.09521999999999997,1.1410729419820163,0.30174286412310025,0.12077114068657713,1.2033708095550537 363 | 361,0.27984,0.09484000000000004,1.1413273359743916,0.30163905252008516,0.12035199158267429,1.2622379064559937 364 | 362,0.28052,0.09506000000000003,1.140722780507438,0.30184901734122094,0.12040194603826038,1.5005857944488525 365 | 363,0.28002000000000005,0.09486000000000006,1.1410390252665596,0.30134947278535895,0.12012329384069365,1.1325254440307617 366 | 364,0.2804,0.09558,1.142630746169966,0.30155709599138913,0.12064547400924308,1.4544963836669922 367 | 365,0.27986,0.09574000000000005,1.141007650871666,0.30144547900468865,0.12049717171922159,1.446771502494812 368 | 366,0.28022,0.09489999999999998,1.1401442365378749,0.3019731229418179,0.12023569136576262,1.3646641969680786 369 | 367,0.28042,0.09577999999999998,1.141016648131974,0.3017225701255184,0.12019978660080999,1.3484302759170532 370 | 368,0.27984,0.09570000000000001,1.1434107258611796,0.30180062396237184,0.12025520482497598,1.6325370073318481 371 | 369,0.27990000000000004,0.09565999999999997,1.1432668679222768,0.30119492618838917,0.12026769343887256,1.2026281356811523 372 | 370,0.28146000000000004,0.09611999999999998,1.1428037986767536,0.30183262603548167,0.12018027314159674,1.050337791442871 373 | 371,0.28015999999999996,0.09553999999999996,1.1401662108849506,0.30150011669048604,0.1204401924183186,1.2930361032485962 374 | 372,0.28064,0.09521999999999997,1.1424572229081271,0.30124331956723827,0.1205416624062281,1.0166188478469849 375 | 373,0.27981999999999996,0.09555999999999998,1.1404901703401489,0.3013744500131521,0.12047375556816564,1.5369950532913208 376 | 374,0.2801,0.09552000000000005,1.1414920359241718,0.3019785867103977,0.12069074523461809,1.271062970161438 377 | 375,0.27993999999999997,0.09574000000000005,1.1423395382506507,0.3010926756621112,0.1200491426956829,1.0867700576782227 378 | 376,0.27993999999999997,0.09602,1.1416668998343604,0.30095998413946035,0.12043785080321301,1.346718430519104 379 | 377,0.27980000000000005,0.09509999999999996,1.1408493234490862,0.30086475845849914,0.12038555473252122,1.475778341293335 380 | 378,0.2803,0.09436,1.1407969560550184,0.3018162347297425,0.12061425247450175,1.357787847518921 381 | 379,0.28103999999999996,0.0947,1.1434419367994582,0.30114809388627717,0.1200951944594264,1.1523959636688232 382 | 380,0.27986,0.09541999999999995,1.140250137569953,0.30115199657811975,0.12008738907574112,1.568360686302185 383 | 381,0.27969999999999995,0.09482000000000002,1.1381175955947564,0.3011192139666413,0.12013344083948463,1.3980927467346191 384 | 382,0.2802,0.09563999999999995,1.1425677916529227,0.3006774292500509,0.11991254848118937,1.2389849424362183 385 | 383,0.2803,0.09587999999999997,1.1435957584454088,0.30077187439264363,0.12022788598207723,1.2293143272399902 386 | 384,0.27980000000000005,0.09460000000000002,1.1385352967344984,0.3008897356862923,0.11983839733617863,1.3477554321289062 387 | 385,0.27903999999999995,0.09533999999999998,1.1409126717825324,0.3015859759110249,0.12033325866182942,1.4940366744995117 388 | 386,0.28098,0.09582000000000002,1.1407093016468748,0.30075860524037845,0.11978844288059243,1.1731261014938354 389 | 387,0.28062,0.09543999999999997,1.1422023748864933,0.30057361764703583,0.12011470791863976,1.361867904663086 390 | 388,0.27986,0.09541999999999995,1.1418426141751057,0.30120507318718015,0.12007255884673895,1.3795924186706543 391 | 389,0.28025999999999995,0.09545999999999999,1.1407681413146915,0.300496344348551,0.11967604535552356,1.2043880224227905 392 | 390,0.27928,0.09555999999999998,1.1403778523814923,0.3012698578717685,0.12014202676153851,1.5539734363555908 393 | 391,0.28118,0.09553999999999996,1.1436745436215887,0.3006524520222579,0.12006397292468507,1.1367026567459106 394 | 392,0.28015999999999996,0.09516000000000002,1.1387119878615652,0.30124878333581806,0.12038789634762681,1.4878884553909302 395 | 393,0.28081999999999996,0.09582000000000002,1.141186638450136,0.3009834002905164,0.1198547886419179,1.1946327686309814 396 | 394,0.2801,0.09474000000000005,1.139313713020208,0.3010731622028978,0.11998982177967432,1.3151112794876099 397 | 395,0.28037999999999996,0.09536,1.144912454242609,0.3004549758150187,0.11983293356759894,1.2336503267288208 398 | 396,0.28054,0.09518000000000004,1.1359161733060468,0.3008304147702836,0.11976892942137907,1.3871930837631226 399 | 397,0.28012000000000004,0.09516000000000002,1.142025271088493,0.3010193050554689,0.1199492337845105,1.4512814283370972 400 | 398,0.2804,0.09582000000000002,1.1450540599774341,0.30100525536483536,0.1199468921694049,1.4553933143615723 401 | 399,0.27980000000000005,0.09555999999999998,1.140247784707011,0.30090768806876855,0.11999138285641142,1.612149715423584 402 | 400,0.28022,0.09562000000000004,1.1398318496589759,0.300598594874829,0.11946920268786188,1.2399436235427856 403 | 401,0.279,0.09521999999999997,1.1389348706116482,0.300405801897801,0.11997108885882946,1.4132685661315918 404 | -------------------------------------------------------------------------------- /results/mobilenet_v2_1.0_224/top1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Randl/MobileNetV2-pytorch/3518846c69971c10cae89b6b29497a502200da65/results/mobilenet_v2_1.0_224/top1.png -------------------------------------------------------------------------------- /results/mobilenet_v2_1.0_224/top5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Randl/MobileNetV2-pytorch/3518846c69971c10cae89b6b29497a502200da65/results/mobilenet_v2_1.0_224/top5.png -------------------------------------------------------------------------------- /run.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | import matplotlib 5 | import numpy as np 6 | import torch 7 | import torch.nn.parallel 8 | import torch.optim 9 | import torch.utils.data 10 | from tqdm import tqdm, trange 11 | 12 | matplotlib.use('Agg') 13 | 14 | from matplotlib import pyplot as plt 15 | 16 | from clr import CyclicLR 17 | 18 | 19 | def train(model, loader, epoch, optimizer, criterion, device, dtype, batch_size, log_interval, scheduler): 20 | model.train() 21 | correct1, correct5 = 0, 0 22 | 23 | for batch_idx, (data, target) in enumerate(tqdm(loader)): 24 | if isinstance(scheduler, CyclicLR): 25 | scheduler.batch_step() 26 | data, target = data.to(device=device, dtype=dtype), target.to(device=device) 27 | 28 | optimizer.zero_grad() 29 | output = model(data) 30 | 31 | loss = criterion(output, target) 32 | loss.backward() 33 | optimizer.step() 34 | 35 | corr = correct(output, target, topk=(1, 5)) 36 | correct1 += corr[0] 37 | correct5 += corr[1] 38 | 39 | if batch_idx % log_interval == 0: 40 | tqdm.write( 41 | 'Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}. ' 42 | 'Top-1 accuracy: {:.2f}%({:.2f}%). ' 43 | 'Top-5 accuracy: {:.2f}%({:.2f}%).'.format(epoch, batch_idx, len(loader), 44 | 100. * batch_idx / len(loader), loss.item(), 45 | 100. * corr[0] / batch_size, 46 | 100. * correct1 / (batch_size * (batch_idx + 1)), 47 | 100. * corr[1] / batch_size, 48 | 100. * correct5 / (batch_size * (batch_idx + 1)))) 49 | return loss.item(), correct1 / len(loader.dataset), correct5 / len(loader.dataset) 50 | 51 | 52 | def test(model, loader, criterion, device, dtype): 53 | model.eval() 54 | test_loss = 0 55 | correct1, correct5 = 0, 0 56 | 57 | for batch_idx, (data, target) in enumerate(tqdm(loader)): 58 | data, target = data.to(device=device, dtype=dtype), target.to(device=device) 59 | with torch.no_grad(): 60 | output = model(data) 61 | test_loss += criterion(output, target).item() # sum up batch loss 62 | corr = correct(output, target, topk=(1, 5)) 63 | correct1 += corr[0] 64 | correct5 += corr[1] 65 | 66 | test_loss /= len(loader) 67 | 68 | tqdm.write( 69 | '\nTest set: Average loss: {:.4f}, Top1: {}/{} ({:.2f}%), ' 70 | 'Top5: {}/{} ({:.2f}%)'.format(test_loss, int(correct1), len(loader.dataset), 71 | 100. * correct1 / len(loader.dataset), int(correct5), 72 | len(loader.dataset), 100. * correct5 / len(loader.dataset))) 73 | return test_loss, correct1 / len(loader.dataset), correct5 / len(loader.dataset) 74 | 75 | 76 | def correct(output, target, topk=(1,)): 77 | """Computes the correct@k for the specified values of k""" 78 | maxk = max(topk) 79 | 80 | _, pred = output.topk(maxk, 1, True, True) 81 | pred = pred.t().type_as(target) 82 | correct = pred.eq(target.view(1, -1).expand_as(pred)) 83 | 84 | res = [] 85 | for k in topk: 86 | correct_k = correct[:k].view(-1).float().sum(0).item() 87 | res.append(correct_k) 88 | return res 89 | 90 | 91 | def save_checkpoint(state, is_best, filepath='./', filename='checkpoint.pth.tar'): 92 | save_path = os.path.join(filepath, filename) 93 | best_path = os.path.join(filepath, 'model_best.pth.tar') 94 | torch.save(state, save_path) 95 | if is_best: 96 | shutil.copyfile(save_path, best_path) 97 | 98 | 99 | def find_bounds_clr(model, loader, optimizer, criterion, device, dtype, min_lr=8e-6, max_lr=8e-5, step_size=2000, 100 | mode='triangular', save_path='.'): 101 | model.train() 102 | correct1, correct5 = 0, 0 103 | scheduler = CyclicLR(optimizer, base_lr=min_lr, max_lr=max_lr, step_size=step_size, mode=mode) 104 | epoch_count = step_size // len(loader) # Assuming step_size is multiple of batch per epoch 105 | accuracy = [] 106 | for _ in trange(epoch_count): 107 | for batch_idx, (data, target) in enumerate(tqdm(loader)): 108 | if scheduler is not None: 109 | scheduler.batch_step() 110 | data, target = data.to(device=device, dtype=dtype), target.to(device=device) 111 | 112 | optimizer.zero_grad() 113 | output = model(data) 114 | 115 | loss = criterion(output, target) 116 | loss.backward() 117 | optimizer.step() 118 | 119 | corr = correct(output, target) 120 | accuracy.append(corr[0] / data.shape[0]) 121 | 122 | lrs = np.linspace(min_lr, max_lr, step_size) 123 | plt.plot(lrs, accuracy) 124 | plt.show() 125 | plt.savefig(os.path.join(save_path, 'find_bounds_clr.png')) 126 | np.save(os.path.join(save_path, 'acc.npy'), accuracy) 127 | return 128 | --------------------------------------------------------------------------------