├── maml ├── __init__.py ├── metalearners │ ├── __init__.py │ └── meta_sgd.py ├── utils.py ├── model.py └── datasets.py ├── figures ├── 10.png ├── 11.png ├── 12.png └── 13.png ├── .gitignore ├── models ├── 2020-09-22_164258 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json ├── 2020-09-22_165037 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json ├── 2020-09-22_171209 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json ├── 2020-09-23_004223 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json ├── 2020-09-23_004245 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json ├── 2020-09-23_112843 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json ├── 2020-09-24_175224 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json ├── 2020-09-24_175635 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json ├── 2020-09-24_181123 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json ├── 2020-09-24_182934 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json ├── 2020-09-24_220708 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json └── 2020-09-24_220801 │ ├── model.th │ ├── results.json │ ├── config.json │ ├── run.txt │ └── model_results.json ├── test.py └── README.md /maml/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /figures/10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/figures/10.png -------------------------------------------------------------------------------- /figures/11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/figures/11.png -------------------------------------------------------------------------------- /figures/12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/figures/12.png -------------------------------------------------------------------------------- /figures/13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/figures/13.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/* 2 | notebooks/.ipynb_checkpoints/* 3 | maml/__pycache__/* 4 | maml/metalearners/__pycache__/* 5 | -------------------------------------------------------------------------------- /models/2020-09-22_164258/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-22_164258/model.th -------------------------------------------------------------------------------- /models/2020-09-22_165037/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-22_165037/model.th -------------------------------------------------------------------------------- /models/2020-09-22_171209/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-22_171209/model.th -------------------------------------------------------------------------------- /models/2020-09-23_004223/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-23_004223/model.th -------------------------------------------------------------------------------- /models/2020-09-23_004245/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-23_004245/model.th -------------------------------------------------------------------------------- /models/2020-09-23_112843/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-23_112843/model.th -------------------------------------------------------------------------------- /models/2020-09-24_175224/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-24_175224/model.th -------------------------------------------------------------------------------- /models/2020-09-24_175635/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-24_175635/model.th -------------------------------------------------------------------------------- /models/2020-09-24_181123/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-24_181123/model.th -------------------------------------------------------------------------------- /models/2020-09-24_182934/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-24_182934/model.th -------------------------------------------------------------------------------- /models/2020-09-24_220708/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-24_220708/model.th -------------------------------------------------------------------------------- /models/2020-09-24_220801/model.th: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fastforwardlabs/learning-to-learn/HEAD/models/2020-09-24_220801/model.th -------------------------------------------------------------------------------- /models/2020-09-22_164258/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 0.8571084713935854, "accuracies_after": 0.6832000058889391, "precision_after": 0.6082400110363959} -------------------------------------------------------------------------------- /models/2020-09-22_165037/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 0.5033954861760139, "accuracies_after": 0.8256799912452698, "precision_after": 0.8404316002130505} -------------------------------------------------------------------------------- /models/2020-09-22_171209/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 0.44001342684030526, "accuracies_after": 0.8512239903211597, "precision_after": 0.8591483801603315} -------------------------------------------------------------------------------- /models/2020-09-23_004223/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 1.4830173337459567, "accuracies_after": 0.5069200065732002, "precision_after": 0.4117480009794234} -------------------------------------------------------------------------------- /models/2020-09-23_004245/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 0.9218882453441618, "accuracies_after": 0.7048479914665221, "precision_after": 0.7269254899024963} -------------------------------------------------------------------------------- /models/2020-09-23_112843/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 0.7904103028774262, "accuracies_after": 0.7491160023212435, "precision_after": 0.7620470541715618} -------------------------------------------------------------------------------- /models/2020-09-24_175224/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 1.3226493442058562, "accuracies_after": 0.5822599923610684, "precision_after": 0.6017745363712309} -------------------------------------------------------------------------------- /models/2020-09-24_175635/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 1.7205986917018892, "accuracies_after": 0.42580000072717666, "precision_after": 0.32991333752870533} -------------------------------------------------------------------------------- /models/2020-09-24_181123/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 1.2431868052482602, "accuracies_after": 0.6059700012207033, "precision_after": 0.6211167383193972} -------------------------------------------------------------------------------- /models/2020-09-24_182934/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 0.6522263237833975, "accuracies_after": 0.7695200133323669, "precision_after": 0.7851127791404725} -------------------------------------------------------------------------------- /models/2020-09-24_220708/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 1.0433219760656356, "accuracies_after": 0.5936000132560728, "precision_after": 0.5083999997377399} -------------------------------------------------------------------------------- /models/2020-09-24_220801/results.json: -------------------------------------------------------------------------------- 1 | {"mean_outer_loss": 0.6812885040044783, "accuracies_after": 0.772919998764992, "precision_after": 0.7816292428970337} -------------------------------------------------------------------------------- /maml/metalearners/__init__.py: -------------------------------------------------------------------------------- 1 | from maml.metalearners.maml import ModelAgnosticMetaLearning, MAML, FOMAML 2 | from maml.metalearners.meta_sgd import MetaSGD 3 | 4 | __all__ = ['ModelAgnosticMetaLearning', 'MAML', 'FOMAML', 'MetaSGD'] -------------------------------------------------------------------------------- /models/2020-09-22_164258/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 100, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 5, 8 | "num_shots": 1, 9 | "num_shots_test": 1, 10 | "hidden_size": 20, 11 | "batch_size": 25, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.01, 16 | "first_order": false, 17 | "meta_lr": 0.001, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-22_164258/model.th" 22 | } -------------------------------------------------------------------------------- /models/2020-09-22_165037/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 100, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 5, 8 | "num_shots": 5, 9 | "num_shots_test": 5, 10 | "hidden_size": 20, 11 | "batch_size": 25, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.01, 16 | "first_order": false, 17 | "meta_lr": 0.001, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-22_165037/model.th" 22 | } -------------------------------------------------------------------------------- /models/2020-09-22_171209/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 100, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 5, 8 | "num_shots": 10, 9 | "num_shots_test": 10, 10 | "hidden_size": 20, 11 | "batch_size": 25, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.01, 16 | "first_order": false, 17 | "meta_lr": 0.001, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-22_171209/model.th" 22 | } -------------------------------------------------------------------------------- /models/2020-09-23_004223/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 100, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 10, 8 | "num_shots": 1, 9 | "num_shots_test": 1, 10 | "hidden_size": 20, 11 | "batch_size": 25, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.01, 16 | "first_order": false, 17 | "meta_lr": 0.001, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-23_004223/model.th" 22 | } -------------------------------------------------------------------------------- /models/2020-09-23_004245/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 100, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 10, 8 | "num_shots": 5, 9 | "num_shots_test": 5, 10 | "hidden_size": 20, 11 | "batch_size": 25, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.01, 16 | "first_order": false, 17 | "meta_lr": 0.001, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-23_004245/model.th" 22 | } -------------------------------------------------------------------------------- /models/2020-09-23_112843/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 100, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 10, 8 | "num_shots": 10, 9 | "num_shots_test": 10, 10 | "hidden_size": 20, 11 | "batch_size": 25, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.01, 16 | "first_order": false, 17 | "meta_lr": 0.001, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-23_112843/model.th" 22 | } -------------------------------------------------------------------------------- /models/2020-09-24_175224/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 20, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 10, 8 | "num_shots": 5, 9 | "num_shots_test": 5, 10 | "hidden_size": 20, 11 | "batch_size": 10, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.005, 16 | "first_order": false, 17 | "meta_lr": 0.0005, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-24_175224/model.th" 22 | } -------------------------------------------------------------------------------- /models/2020-09-24_175635/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 20, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 10, 8 | "num_shots": 1, 9 | "num_shots_test": 1, 10 | "hidden_size": 20, 11 | "batch_size": 10, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.005, 16 | "first_order": false, 17 | "meta_lr": 0.0005, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-24_175635/model.th" 22 | } -------------------------------------------------------------------------------- /models/2020-09-24_181123/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 20, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 10, 8 | "num_shots": 10, 9 | "num_shots_test": 10, 10 | "hidden_size": 20, 11 | "batch_size": 10, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.005, 16 | "first_order": false, 17 | "meta_lr": 0.0005, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-24_181123/model.th" 22 | } -------------------------------------------------------------------------------- /models/2020-09-24_182934/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 20, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 5, 8 | "num_shots": 5, 9 | "num_shots_test": 5, 10 | "hidden_size": 20, 11 | "batch_size": 10, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.005, 16 | "first_order": false, 17 | "meta_lr": 0.0005, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-24_182934/model.th" 22 | } -------------------------------------------------------------------------------- /models/2020-09-24_220708/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 20, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 5, 8 | "num_shots": 1, 9 | "num_shots_test": 1, 10 | "hidden_size": 20, 11 | "batch_size": 10, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.005, 16 | "first_order": false, 17 | "meta_lr": 0.0005, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-24_220708/model.th" 22 | } -------------------------------------------------------------------------------- /models/2020-09-24_220801/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "folder": "/storage/smb79ck2/ndata", 3 | "dataset": "quickdraw", 4 | "num_training_samples": 20, 5 | "random_seed": 123, 6 | "output_folder": "./models", 7 | "num_ways": 5, 8 | "num_shots": 10, 9 | "num_shots_test": 10, 10 | "hidden_size": 20, 11 | "batch_size": 10, 12 | "num_steps": 5, 13 | "num_epochs": 50, 14 | "num_batches": 100, 15 | "step_size": 0.005, 16 | "first_order": false, 17 | "meta_lr": 0.0005, 18 | "num_workers": 1, 19 | "verbose": true, 20 | "use_cuda": true, 21 | "model_path": "/home/paperspace/learning-to-learn/models/2020-09-24_220801/model.th" 22 | } -------------------------------------------------------------------------------- /maml/metalearners/meta_sgd.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn.functional as F 3 | import numpy as np 4 | 5 | from maml.metalearners.maml import ModelAgnosticMetaLearning 6 | 7 | __all__ = ['MetaSGD'] 8 | 9 | 10 | class MetaSGD(ModelAgnosticMetaLearning): 11 | def __init__(self, model, optimizer=None, init_step_size=0.1, 12 | num_adaptation_steps=1, scheduler=None, 13 | loss_function=F.cross_entropy, device=None): 14 | super(MetaSGD, self).__init__(model, optimizer=optimizer, 15 | step_size=init_step_size, learn_step_size=True, 16 | per_param_step_size=True, num_adaptation_steps=num_adaptation_steps, 17 | scheduler=scheduler, loss_function=loss_function, device=device) 18 | -------------------------------------------------------------------------------- /maml/utils.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | from collections import OrderedDict 4 | from torchmeta.modules import MetaModule 5 | from sklearn.metrics import confusion_matrix, accuracy_score, f1_score, precision_score, recall_score, fbeta_score 6 | 7 | def compute_precision(logits, targets): 8 | """Compute the precision""" 9 | with torch.no_grad(): 10 | _, predictions = torch.max(logits, dim=1) 11 | precision = precision_score(targets.detach().cpu().numpy(), predictions.detach().cpu().numpy(), 12 | average='weighted', zero_division=0) 13 | return precision 14 | 15 | def compute_accuracy(logits, targets): 16 | """Compute the accuracy""" 17 | with torch.no_grad(): 18 | _, predictions = torch.max(logits, dim=1) 19 | accuracy = torch.mean(predictions.eq(targets).float()) 20 | return accuracy.item() 21 | 22 | def tensors_to_device(tensors, device=torch.device('cpu')): 23 | """Place a collection of tensors in a specific device""" 24 | if isinstance(tensors, torch.Tensor): 25 | return tensors.to(device=device) 26 | elif isinstance(tensors, (list, tuple)): 27 | return type(tensors)(tensors_to_device(tensor, device=device) 28 | for tensor in tensors) 29 | elif isinstance(tensors, (dict, OrderedDict)): 30 | return type(tensors)([(name, tensors_to_device(tensor, device=device)) 31 | for (name, tensor) in tensors.items()]) 32 | else: 33 | raise NotImplementedError() 34 | 35 | class ToTensor1D(object): 36 | """Convert a `numpy.ndarray` to tensor. Unlike `ToTensor` from torchvision, 37 | this converts numpy arrays regardless of the number of dimensions. 38 | 39 | Converts automatically the array to `float32`. 40 | """ 41 | def __call__(self, array): 42 | return torch.from_numpy(array.astype('float32')) 43 | 44 | def __repr__(self): 45 | return self.__class__.__name__ + '()' 46 | -------------------------------------------------------------------------------- /maml/model.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | 3 | from collections import OrderedDict 4 | from torchmeta.modules import (MetaModule, MetaConv2d, MetaBatchNorm2d, 5 | MetaSequential, MetaLinear) 6 | 7 | 8 | def conv_block(in_channels, out_channels, **kwargs): 9 | return MetaSequential(OrderedDict([ 10 | ('conv', MetaConv2d(in_channels, out_channels, **kwargs)), 11 | ('norm', nn.BatchNorm2d(out_channels, momentum=1., 12 | track_running_stats=False)), 13 | ('relu', nn.ReLU()), 14 | ('pool', nn.MaxPool2d(2)) 15 | ])) 16 | 17 | class MetaConvModel(MetaModule): 18 | """4-layer Convolutional Neural Network architecture from [1]. 19 | 20 | Parameters 21 | ---------- 22 | in_channels : int 23 | Number of channels for the input images. 24 | 25 | out_features : int 26 | Number of classes (output of the model). 27 | 28 | hidden_size : int (default: 64) 29 | Number of channels in the intermediate representations. 30 | 31 | feature_size : int (default: 64) 32 | Number of features returned by the convolutional head. 33 | 34 | References 35 | ---------- 36 | .. [1] Finn C., Abbeel P., and Levine, S. (2017). Model-Agnostic Meta-Learning 37 | for Fast Adaptation of Deep Networks. International Conference on 38 | Machine Learning (ICML) (https://arxiv.org/abs/1703.03400) 39 | """ 40 | def __init__(self, in_channels, out_features, hidden_size=64, feature_size=64): 41 | super(MetaConvModel, self).__init__() 42 | self.in_channels = in_channels 43 | self.out_features = out_features 44 | self.hidden_size = hidden_size 45 | self.feature_size = feature_size 46 | 47 | self.features = MetaSequential(OrderedDict([ 48 | ('layer1', conv_block(in_channels, hidden_size, kernel_size=3, 49 | stride=1, padding=1, bias=True)), 50 | ('layer2', conv_block(hidden_size, hidden_size, kernel_size=3, 51 | stride=1, padding=1, bias=True)), 52 | ('layer3', conv_block(hidden_size, hidden_size, kernel_size=3, 53 | stride=1, padding=1, bias=True)), 54 | ('layer4', conv_block(hidden_size, hidden_size, kernel_size=3, 55 | stride=1, padding=1, bias=True)) 56 | ])) 57 | self.classifier = MetaLinear(feature_size, out_features, bias=True) 58 | 59 | def forward(self, inputs, params=None): 60 | features = self.features(inputs, params=self.get_subdict(params, 'features')) 61 | features = features.view((features.size(0), -1)) 62 | logits = self.classifier(features, params=self.get_subdict(params, 'classifier')) 63 | return logits 64 | 65 | 66 | def ModelConvQuickDraw(out_features, hidden_size=20): 67 | return MetaConvModel(1, out_features, hidden_size=hidden_size, 68 | feature_size=hidden_size) 69 | 70 | 71 | if __name__ == '__main__': 72 | model = ModelConvQuickDraw() 73 | -------------------------------------------------------------------------------- /maml/datasets.py: -------------------------------------------------------------------------------- 1 | import torch.nn.functional as F 2 | 3 | from collections import namedtuple 4 | from torchmeta.datasets import Omniglot, MiniImagenet, QuickDraw 5 | from torchmeta.toy import Sinusoid 6 | from torchmeta.transforms import ClassSplitter, Categorical, Rotation 7 | from torchvision.transforms import ToTensor, Resize, Compose 8 | 9 | from maml.model import ModelConvQuickDraw 10 | from maml.utils import ToTensor1D 11 | 12 | Benchmark = namedtuple('Benchmark', 'meta_train_dataset meta_val_dataset ' 13 | 'meta_test_dataset model loss_function') 14 | 15 | def get_benchmark_by_name(name, 16 | folder, 17 | num_ways, 18 | num_shots, 19 | num_shots_test, 20 | hidden_size=None, 21 | random_seed=123, 22 | num_training_samples=100): 23 | dataset_transform = ClassSplitter(shuffle=True, 24 | num_train_per_class=num_shots, 25 | num_test_per_class=num_shots_test) 26 | if name == 'quickdraw': 27 | transform = Compose([Resize(28), ToTensor()]) 28 | 29 | meta_train_dataset = QuickDraw(folder, 30 | transform=transform, 31 | target_transform=Categorical(num_ways), 32 | num_classes_per_task=num_ways, 33 | meta_train=True, 34 | dataset_transform=dataset_transform, 35 | download=True, 36 | random_seed=random_seed, 37 | num_training_samples = num_training_samples) 38 | meta_val_dataset = QuickDraw(folder, 39 | transform=transform, 40 | target_transform=Categorical(num_ways), 41 | num_classes_per_task=num_ways, 42 | meta_val=True, 43 | dataset_transform=dataset_transform) 44 | meta_test_dataset = QuickDraw(folder, 45 | transform=transform, 46 | target_transform=Categorical(num_ways), 47 | num_classes_per_task=num_ways, 48 | meta_test=True, 49 | dataset_transform=dataset_transform) 50 | 51 | model = ModelConvQuickDraw(num_ways, hidden_size=hidden_size) 52 | loss_function = F.cross_entropy 53 | 54 | else: 55 | raise NotImplementedError('Unknown dataset `{0}`.'.format(name)) 56 | 57 | return Benchmark(meta_train_dataset=meta_train_dataset, 58 | meta_val_dataset=meta_val_dataset, 59 | meta_test_dataset=meta_test_dataset, 60 | model=model, 61 | loss_function=loss_function) 62 | -------------------------------------------------------------------------------- /test.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn.functional as F 3 | import os 4 | import json 5 | 6 | from torchmeta.utils.data import BatchMetaDataLoader 7 | 8 | from maml.datasets import get_benchmark_by_name 9 | from maml.metalearners import ModelAgnosticMetaLearning 10 | 11 | def main(args): 12 | with open(args.config, 'r') as f: 13 | config = json.load(f) 14 | 15 | if args.folder is not None: 16 | config['folder'] = args.folder 17 | if args.num_steps > 0: 18 | config['num_steps'] = args.num_steps 19 | if args.num_batches > 0: 20 | config['num_batches'] = args.num_batches 21 | device = torch.device('cuda' if args.use_cuda 22 | and torch.cuda.is_available() else 'cpu') 23 | 24 | benchmark = get_benchmark_by_name(config['dataset'], 25 | config['folder'], 26 | config['num_ways'], 27 | config['num_shots'], 28 | config['num_shots_test'], 29 | hidden_size=config['hidden_size']) 30 | 31 | with open(config['model_path'], 'rb') as f: 32 | benchmark.model.load_state_dict(torch.load(f, map_location=device)) 33 | 34 | meta_test_dataloader = BatchMetaDataLoader(benchmark.meta_test_dataset, 35 | batch_size=config['batch_size'], 36 | shuffle=True, 37 | num_workers=args.num_workers, 38 | pin_memory=True) 39 | metalearner = ModelAgnosticMetaLearning(benchmark.model, 40 | first_order=config['first_order'], 41 | num_adaptation_steps=config['num_steps'], 42 | step_size=config['step_size'], 43 | loss_function=benchmark.loss_function, 44 | device=device) 45 | 46 | results = metalearner.evaluate(meta_test_dataloader, 47 | max_batches=config['num_batches'], 48 | verbose=args.verbose, 49 | desc='Test') 50 | 51 | # Save results 52 | dirname = os.path.dirname(config['model_path']) 53 | with open(os.path.join(dirname, 'results.json'), 'w') as f: 54 | json.dump(results, f) 55 | 56 | if __name__ == '__main__': 57 | import argparse 58 | 59 | parser = argparse.ArgumentParser('MAML') 60 | parser.add_argument('config', type=str, 61 | help='Path to the configuration file returned by `train.py`.') 62 | parser.add_argument('--folder', type=int, default=None, 63 | help='Path to the folder the data is downloaded to. ' 64 | '(default: path defined in configuration file).') 65 | 66 | # Optimization 67 | parser.add_argument('--num-steps', type=int, default=-1, 68 | help='Number of fast adaptation steps, ie. gradient descent updates ' 69 | '(default: number of steps in configuration file).') 70 | parser.add_argument('--num-batches', type=int, default=-1, 71 | help='Number of batch of tasks per epoch ' 72 | '(default: number of batches in configuration file).') 73 | 74 | # Misc 75 | parser.add_argument('--num-workers', type=int, default=1, 76 | help='Number of workers to use for data-loading (default: 1).') 77 | parser.add_argument('--verbose', action='store_true') 78 | parser.add_argument('--use-cuda', action='store_true') 79 | 80 | args = parser.parse_args() 81 | main(args) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Meta-Learning: Learning to learn 2 | 3 | This repo accompanies the code from our [Meta-Learning report](http://meta-learning.fastforwardlabs.com/) discussed in the Experiment section. 4 | 5 | ## Setup environment 6 | 7 | Step 1: Install Torchmeta from source 8 | 9 | ``` 10 | git clone https://github.com/nishamuktewar/pytorch-meta.git 11 | conda create --name meta_env python=3.7 ipykernel 12 | conda activate meta_env 13 | cd pytorch-meta 14 | pip install -e . 15 | ``` 16 | Step 2: Install other libraries 17 | 18 | ``` 19 | conda install -c anaconda scikit-learn 20 | ``` 21 | 22 | ## Repo structure 23 | ``` 24 | . 25 | ├── figures 26 | │   ├── 10.png 27 | │   ├── 11.png 28 | │   ├── 12.png 29 | │   └── 13.png 30 | ├── maml 31 | │   ├── datasets.py 32 | │   ├── __init__.py 33 | │   ├── metalearners 34 | │   │   ├── __init__.py 35 | │   │   ├── maml.py 36 | │   │   ├── meta_sgd.py 37 | │   ├── model.py 38 | │   └── utils.py 39 | ├── models 40 | │   ├── 2020-09-22_164258 41 | │   │   ├── config.json 42 | │   │   ├── model_results.json 43 | │   │   ├── model.th 44 | │   │   ├── results.json 45 | │   │   └── run.txt 46 | . 47 | . 48 | . 49 | ├── notebooks 50 | │   ├── visualize_runs_100samples.ipynb 51 | │   └── visualize_runs_20samples.ipynb 52 | ├── README.md 53 | ├── test.py 54 | └── train.py 55 | 56 | ``` 57 | 58 | ## Run experiments 59 | 60 | ### Meta-training 61 | 62 | - Run the following command to train a meta-learning model on the meta-training dataset. 63 | Note that this step will take a few hours if run for the first time. That is because it downloads the entire [Quick Draw!](https://quickdraw.withgoogle.com/data) dataset and transforms it into train/validation/test meta-datasets to be consumed during training and evaluation. 64 | 65 | ``` 66 | python train.py <> \ 67 | --use-cuda \ 68 | --num-training-samples 20 \ 69 | --num-ways 5 \ 70 | --num-shots 1 \ 71 | --num-shots-test 1 \ 72 | --num-steps 5 \ 73 | --hidden-size 20 \ 74 | --batch-size 10 \ 75 | --num-batches 100 \ 76 | --num-epochs 50 \ 77 | --step-size 0.005 \ 78 | --meta-lr 0.0005 79 | ``` 80 | - The configuration, model and result files are saved in the `./models` folder using the date-timestamp as the foldername 81 | 82 | ### Meta-inference 83 | 84 | - Test the meta learning model 85 | ``` 86 | python test.py ./models/<>/config.json 87 | ``` 88 | 89 | ### Results 90 | 91 |
92 | 93 |
5-way, 1/5/10-shot results based on 100 random sampled images
94 |
95 | 96 |
97 | 98 |
10-way, 1/5/10-shot results based on 100 random sampled images
99 |
100 | 101 |
102 | 103 |
5-way, 1/5/10-shot results based on 20 random sampled images
104 |
105 | 106 |
107 | 108 |
10-way, 1/5/10-shot results based on 20 random sampled images
109 |
110 | 111 | ## References 112 | 113 | Leveraged source code from the following repos: 114 | 115 | > Tristan Deleu, Tobias Würfl, Mandana Samiei, Joseph Paul Cohen, and Yoshua Bengio. Torchmeta: A Meta-Learning library for PyTorch, 2019 [[ArXiv](https://arxiv.org/abs/1909.06576)] 116 | 117 | ``` 118 | @misc{deleu2019torchmeta, 119 | title={{Torchmeta: A Meta-Learning library for PyTorch}}, 120 | author={Deleu, Tristan and W\"urfl, Tobias and Samiei, Mandana and Cohen, Joseph Paul and Bengio, Yoshua}, 121 | year={2019}, 122 | url={https://arxiv.org/abs/1909.06576}, 123 | note={Available at: https://github.com/tristandeleu/pytorch-meta, https://github.com/tristandeleu/pytorch-maml} 124 | } 125 | ``` 126 | 127 | > Chelsea Finn, Pieter Abbeel, and Sergey Levine. Model-agnostic meta-learning for fast adaptation of deep networks. International Conference on Machine Learning (ICML), 2017 [ArXiv] 128 | 129 | ``` 130 | @article{finn17maml, 131 | author={Chelsea Finn and Pieter Abbeel and Sergey Levine}, 132 | title={Model-{A}gnostic {M}eta-{L}earning for {F}ast {A}daptation of {D}eep {N}etworks}, 133 | journal={International Conference on Machine Learning (ICML)}, 134 | year={2017}, 135 | url={http://arxiv.org/abs/1703.03400}, 136 | note={Available at: } 137 | } 138 | ``` -------------------------------------------------------------------------------- /models/2020-09-22_165037/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-22_165037` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-22_165037/config.json` 3 | cuda 4 | Namespace(batch_size=25, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.001, num_batches=100, num_epochs=50, num_shots=5, num_shots_test=5, num_steps=5, num_training_samples=100, num_ways=5, num_workers=1, output_folder='./models', random_seed=123, step_size=0.01, use_cuda=True, verbose=True) 5 | epoch train loss train acc train prec val loss val acc val prec 6 | 1 1.11743 0.58568 0.59744 0.97885 0.64528 0.65870 7 | 2 0.90701 0.67088 0.68605 0.91626 0.66811 0.68164 8 | 3 0.80338 0.71274 0.72818 0.83758 0.70037 0.71580 9 | 4 0.76404 0.73005 0.74783 0.83094 0.70520 0.72265 10 | 5 0.72360 0.74437 0.76069 0.80151 0.71782 0.73426 11 | 6 0.70843 0.75072 0.76748 0.79208 0.72040 0.73762 12 | 7 0.70538 0.75112 0.76864 0.76086 0.73230 0.74984 13 | 8 0.68149 0.76141 0.77907 0.76164 0.73261 0.75083 14 | 9 0.64836 0.77182 0.78845 0.70717 0.75299 0.77054 15 | 10 0.61649 0.78462 0.80158 0.69703 0.75758 0.77369 16 | 11 0.59859 0.79011 0.80667 0.68895 0.76178 0.77834 17 | 12 0.58651 0.79706 0.81349 0.67063 0.76760 0.78324 18 | 13 0.56531 0.80298 0.81978 0.66396 0.76906 0.78597 19 | 14 0.55306 0.80725 0.82345 0.65415 0.77374 0.78973 20 | 15 0.54937 0.80824 0.82416 0.64117 0.77851 0.79521 21 | 16 0.55782 0.80645 0.82234 0.66633 0.76819 0.78435 22 | 17 0.54173 0.81118 0.82696 0.62939 0.78334 0.79973 23 | 18 0.52420 0.81810 0.83313 0.64659 0.77517 0.79131 24 | 19 0.53196 0.81480 0.83166 0.61689 0.78605 0.80118 25 | 20 0.51556 0.82093 0.83646 0.64165 0.77853 0.79482 26 | 21 0.52300 0.82016 0.83642 0.64503 0.77694 0.79422 27 | 22 0.50851 0.82317 0.83899 0.61439 0.78742 0.80334 28 | 23 0.50029 0.82725 0.84303 0.61648 0.78741 0.80413 29 | 24 0.50771 0.82310 0.83907 0.64131 0.77826 0.79604 30 | 25 0.50020 0.82517 0.84066 0.61935 0.78646 0.80307 31 | 26 0.48718 0.82968 0.84461 0.62604 0.78418 0.80050 32 | 27 0.49142 0.82973 0.84561 0.63315 0.78053 0.79554 33 | 28 0.48007 0.83278 0.84814 0.61180 0.78909 0.80435 34 | 29 0.47031 0.83544 0.85059 0.59651 0.79453 0.81000 35 | 30 0.47385 0.83371 0.84875 0.60167 0.79182 0.80755 36 | 31 0.46145 0.83813 0.85288 0.59973 0.79245 0.80850 37 | 32 0.45668 0.84019 0.85456 0.61241 0.78986 0.80601 38 | 33 0.45700 0.83853 0.85356 0.60807 0.79043 0.80648 39 | 34 0.45947 0.84085 0.85603 0.59951 0.79379 0.80923 40 | 35 0.45340 0.84128 0.85575 0.60140 0.79208 0.80736 41 | 36 0.44684 0.84382 0.85800 0.59058 0.79531 0.81019 42 | 37 0.44478 0.84354 0.85717 0.60094 0.79246 0.80776 43 | 38 0.43947 0.84662 0.86066 0.59852 0.79539 0.81060 44 | 39 0.43276 0.84834 0.86204 0.59225 0.79402 0.80829 45 | 40 0.43437 0.84765 0.86171 0.59557 0.79578 0.80960 46 | 41 0.42845 0.84986 0.86396 0.59536 0.79342 0.80878 47 | 42 0.43309 0.84690 0.86054 0.59095 0.79570 0.81031 48 | 43 0.42882 0.84989 0.86392 0.60261 0.79227 0.80655 49 | 44 0.42558 0.84982 0.86327 0.60234 0.79160 0.80621 50 | 45 0.42384 0.85088 0.86419 0.60343 0.79349 0.80849 51 | 46 0.42316 0.85221 0.86591 0.59169 0.79670 0.81099 52 | 47 0.42123 0.85093 0.86458 0.59405 0.79440 0.80868 53 | 48 0.42798 0.84843 0.86239 0.59114 0.79670 0.81125 54 | 49 0.42205 0.85050 0.86334 0.59858 0.79171 0.80614 55 | 50 0.41972 0.85120 0.86465 0.60124 0.79469 0.81024 56 | -------------------------------------------------------------------------------- /models/2020-09-22_171209/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-22_171209` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-22_171209/config.json` 3 | cuda 4 | Namespace(batch_size=25, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.001, num_batches=100, num_epochs=50, num_shots=10, num_shots_test=10, num_steps=5, num_training_samples=100, num_ways=5, num_workers=1, output_folder='./models', random_seed=123, step_size=0.01, use_cuda=True, verbose=True) 5 | epoch train loss train acc train prec val loss val acc val prec 6 | 1 0.99030 0.64886 0.65848 0.84338 0.70235 0.71094 7 | 2 0.73731 0.74241 0.75183 0.75939 0.73382 0.74268 8 | 3 0.68072 0.76130 0.77148 0.71592 0.74940 0.75813 9 | 4 0.64168 0.77792 0.78815 0.71955 0.74898 0.75819 10 | 5 0.62511 0.78383 0.79440 0.67986 0.76442 0.77426 11 | 6 0.58852 0.79882 0.80881 0.64333 0.77833 0.78836 12 | 7 0.55069 0.80927 0.81916 0.62618 0.78633 0.79646 13 | 8 0.52362 0.82083 0.83048 0.59679 0.79622 0.80655 14 | 9 0.49920 0.83033 0.83973 0.56970 0.80535 0.81502 15 | 10 0.48146 0.83533 0.84445 0.56525 0.80721 0.81647 16 | 11 0.47601 0.83767 0.84687 0.55532 0.81078 0.81977 17 | 12 0.45694 0.84411 0.85262 0.54230 0.81445 0.82383 18 | 13 0.45576 0.84461 0.85357 0.54120 0.81483 0.82394 19 | 14 0.43622 0.85132 0.85989 0.53969 0.81592 0.82524 20 | 15 0.44082 0.84973 0.85772 0.55227 0.81221 0.82140 21 | 16 0.54268 0.81960 0.82955 0.57955 0.80482 0.81500 22 | 17 0.45896 0.84506 0.85432 0.54729 0.81514 0.82498 23 | 18 0.44125 0.84997 0.85917 0.53379 0.81858 0.82809 24 | 19 0.44205 0.84912 0.85823 0.53624 0.81914 0.82897 25 | 20 0.42328 0.85518 0.86352 0.53608 0.81872 0.82838 26 | 21 0.40661 0.86128 0.86944 0.52250 0.82174 0.83050 27 | 22 0.53556 0.81912 0.82970 0.62234 0.78871 0.79885 28 | 23 0.45916 0.84406 0.85333 0.54118 0.81509 0.82432 29 | 24 0.42246 0.85609 0.86435 0.52552 0.81968 0.82865 30 | 25 0.40218 0.86191 0.86991 0.51601 0.82436 0.83304 31 | 26 0.39542 0.86454 0.87275 0.51790 0.82342 0.83242 32 | 27 0.38059 0.87034 0.87818 0.51070 0.82669 0.83510 33 | 28 0.37921 0.86905 0.87668 0.50802 0.82798 0.83674 34 | 29 0.37755 0.86882 0.87668 0.51535 0.82599 0.83450 35 | 30 0.36841 0.87201 0.87935 0.51333 0.82638 0.83471 36 | 31 0.35912 0.87562 0.88280 0.51140 0.82733 0.83558 37 | 32 0.35691 0.87522 0.88214 0.50809 0.82865 0.83687 38 | 33 0.35798 0.87542 0.88278 0.51753 0.82277 0.83064 39 | 34 0.35018 0.87841 0.88540 0.49476 0.83139 0.83909 40 | 35 0.34775 0.87875 0.88539 0.52083 0.82389 0.83182 41 | 36 0.34712 0.87898 0.88584 0.51925 0.82298 0.83074 42 | 37 0.34653 0.87898 0.88575 0.50903 0.82625 0.83445 43 | 38 0.35013 0.87796 0.88494 0.51162 0.82530 0.83367 44 | 39 0.34220 0.88150 0.88820 0.51928 0.82543 0.83334 45 | 40 0.33863 0.88111 0.88773 0.50652 0.82998 0.83749 46 | 41 0.33544 0.88381 0.89051 0.51119 0.82783 0.83571 47 | 42 0.33788 0.88234 0.88921 0.51643 0.82488 0.83294 48 | 43 0.33318 0.88322 0.89002 0.50289 0.82770 0.83518 49 | 44 0.33171 0.88445 0.89100 0.51920 0.82279 0.83061 50 | 45 0.33243 0.88400 0.89082 0.52049 0.82579 0.83355 51 | 46 0.33039 0.88499 0.89166 0.51484 0.82718 0.83513 52 | 47 0.33288 0.88295 0.88982 0.52063 0.82434 0.83159 53 | 48 0.33050 0.88457 0.89104 0.52015 0.82350 0.83139 54 | 49 0.32742 0.88562 0.89219 0.52017 0.82322 0.83126 55 | 50 0.32356 0.88706 0.89357 0.52194 0.82459 0.83229 56 | -------------------------------------------------------------------------------- /models/2020-09-23_004223/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-23_004223` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-23_004223/config.json` 3 | cuda 4 | Namespace(batch_size=25, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.001, num_batches=100, num_epochs=50, num_shots=1, num_shots_test=1, num_steps=5, num_training_samples=100, num_ways=10, num_workers=1, output_folder='./models', random_seed=123, step_size=0.01, use_cuda=True, verbose=True) 5 | epoch train loss train acc train prec val loss val acc val prec 6 | 1 2.20134 0.21044 0.13559 2.10479 0.26464 0.17926 7 | 2 2.01384 0.30872 0.21842 1.96238 0.33040 0.23704 8 | 3 1.91081 0.35320 0.25839 1.90924 0.35540 0.26020 9 | 4 1.84974 0.37152 0.27422 1.86116 0.37296 0.27568 10 | 5 1.80458 0.38696 0.29189 1.83976 0.38260 0.28594 11 | 6 1.73892 0.42016 0.32037 1.80620 0.39732 0.29905 12 | 7 1.71030 0.43136 0.33212 1.78824 0.40628 0.30919 13 | 8 1.68951 0.43628 0.33812 1.77984 0.41052 0.31328 14 | 9 1.67463 0.44132 0.34362 1.78238 0.40688 0.31027 15 | 10 1.66420 0.44452 0.34727 1.75824 0.41720 0.31946 16 | 11 1.62884 0.45760 0.36030 1.74432 0.41972 0.32280 17 | 12 1.61324 0.46192 0.36330 1.74377 0.42468 0.32800 18 | 13 1.58832 0.47504 0.37791 1.70489 0.43912 0.34229 19 | 14 1.55976 0.48328 0.38388 1.69682 0.43856 0.34099 20 | 15 1.55394 0.48680 0.38805 1.67947 0.44744 0.34871 21 | 16 1.53724 0.48744 0.39026 1.65800 0.45404 0.35759 22 | 17 1.54123 0.49024 0.39237 1.66912 0.44640 0.34983 23 | 18 1.51477 0.49580 0.39758 1.65940 0.45432 0.35623 24 | 19 1.51515 0.50012 0.40257 1.66298 0.45480 0.35812 25 | 20 1.50563 0.50060 0.40270 1.64879 0.45912 0.36283 26 | 21 1.50065 0.50188 0.40570 1.66757 0.45240 0.35483 27 | 22 1.49225 0.50492 0.40731 1.64199 0.45624 0.36007 28 | 23 1.48902 0.50792 0.41009 1.63556 0.45984 0.36250 29 | 24 1.48061 0.51188 0.41474 1.62274 0.46960 0.37177 30 | 25 1.47104 0.51260 0.41464 1.63859 0.46020 0.36423 31 | 26 1.46840 0.51052 0.41334 1.64104 0.46188 0.36651 32 | 27 1.45585 0.51788 0.41993 1.63490 0.46676 0.37249 33 | 28 1.44266 0.52068 0.42462 1.62928 0.46984 0.37632 34 | 29 1.45279 0.52124 0.42394 1.62150 0.46776 0.37101 35 | 30 1.44363 0.52152 0.42522 1.60830 0.47312 0.37829 36 | 31 1.42937 0.52568 0.43028 1.60754 0.47604 0.37966 37 | 32 1.40752 0.53588 0.44119 1.61253 0.46960 0.37269 38 | 33 1.41201 0.53228 0.43571 1.57365 0.48620 0.39074 39 | 34 1.40807 0.53144 0.43603 1.58999 0.48140 0.38561 40 | 35 1.39019 0.54108 0.44536 1.59902 0.47736 0.38332 41 | 36 1.38771 0.54112 0.44506 1.58509 0.47936 0.38501 42 | 37 1.39003 0.54132 0.44411 1.59194 0.48020 0.38563 43 | 38 1.36108 0.55088 0.45673 1.58001 0.48384 0.39001 44 | 39 1.36597 0.54716 0.45269 1.58739 0.48864 0.39587 45 | 40 1.36544 0.54980 0.45459 1.58358 0.48544 0.39161 46 | 41 1.36627 0.54824 0.45436 1.57509 0.49092 0.39826 47 | 42 1.35644 0.54880 0.45436 1.55376 0.49508 0.40135 48 | 43 1.35301 0.55248 0.45728 1.59102 0.48400 0.38910 49 | 44 1.33225 0.56120 0.46835 1.56709 0.49052 0.39580 50 | 45 1.36268 0.54840 0.45391 1.54801 0.49568 0.40032 51 | 46 1.33460 0.56160 0.46676 1.57671 0.48816 0.39122 52 | 47 1.34911 0.55460 0.46036 1.57677 0.48972 0.39431 53 | 48 1.34384 0.55504 0.46124 1.55930 0.49488 0.40287 54 | 49 1.33174 0.55992 0.46488 1.56200 0.49204 0.39814 55 | 50 1.32320 0.56148 0.46717 1.56158 0.49108 0.39584 56 | -------------------------------------------------------------------------------- /models/2020-09-23_004245/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-23_004245` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-23_004245/config.json` 3 | cuda 4 | Namespace(batch_size=25, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.001, num_batches=100, num_epochs=50, num_shots=5, num_shots_test=5, num_steps=5, num_training_samples=100, num_ways=10, num_workers=1, output_folder='./models', random_seed=123, step_size=0.01, use_cuda=True, verbose=True) 5 | epoch train loss train acc train prec val loss val acc val prec 6 | 1 1.97283 0.32575 0.31822 1.69638 0.45321 0.45612 7 | 2 1.49963 0.52145 0.53143 1.44051 0.53807 0.54968 8 | 3 1.29093 0.58535 0.60365 1.30018 0.58781 0.60185 9 | 4 1.18346 0.62008 0.63849 1.24870 0.60346 0.61852 10 | 5 1.09776 0.65070 0.67010 1.18570 0.62482 0.64220 11 | 6 1.04106 0.66991 0.68980 1.13266 0.64475 0.66305 12 | 7 0.99666 0.68388 0.70494 1.09537 0.65602 0.67377 13 | 8 0.96618 0.69302 0.71246 1.09222 0.65708 0.67593 14 | 9 0.94203 0.70331 0.72444 1.08697 0.65973 0.67758 15 | 10 0.97210 0.69319 0.71361 1.13130 0.64349 0.66277 16 | 11 0.94185 0.70034 0.72077 1.06927 0.66422 0.68254 17 | 12 0.90681 0.71162 0.73203 1.10544 0.65385 0.67225 18 | 13 0.89681 0.71664 0.73748 1.06477 0.66634 0.68383 19 | 14 0.88594 0.71862 0.73899 1.05005 0.67086 0.68977 20 | 15 0.85348 0.72882 0.74860 1.03753 0.67561 0.69378 21 | 16 0.83900 0.73219 0.75253 1.02125 0.68006 0.69842 22 | 17 0.85546 0.72762 0.74804 1.02231 0.67975 0.69702 23 | 18 0.88970 0.71781 0.73993 1.09059 0.65702 0.67535 24 | 19 0.85222 0.72879 0.74975 1.03357 0.67499 0.69340 25 | 20 0.82390 0.73606 0.75666 1.04206 0.67519 0.69332 26 | 21 0.81828 0.73935 0.75946 1.03102 0.67692 0.69402 27 | 22 0.82729 0.73598 0.75632 1.03026 0.67652 0.69518 28 | 23 0.81493 0.74029 0.76056 1.02104 0.68062 0.69915 29 | 24 0.80424 0.74274 0.76298 1.02383 0.67890 0.69737 30 | 25 0.79010 0.74704 0.76666 1.01882 0.68234 0.70142 31 | 26 0.83546 0.73324 0.75373 1.04469 0.67254 0.69111 32 | 27 0.84069 0.73183 0.75251 1.04490 0.67246 0.69196 33 | 28 0.80630 0.74415 0.76468 1.03620 0.67650 0.69472 34 | 29 0.81101 0.73976 0.75974 1.03646 0.67529 0.69339 35 | 30 0.80122 0.74420 0.76318 1.03366 0.67632 0.69395 36 | 31 0.79146 0.74794 0.76751 1.01649 0.68105 0.69818 37 | 32 0.77188 0.75212 0.77151 1.01227 0.68379 0.70140 38 | 33 0.78085 0.74998 0.77018 1.05390 0.66893 0.68683 39 | 34 0.80553 0.74276 0.76285 1.02599 0.67986 0.69858 40 | 35 0.77162 0.75390 0.77375 1.02161 0.68229 0.70121 41 | 36 0.75931 0.75585 0.77496 1.02276 0.68218 0.69859 42 | 37 0.76624 0.75386 0.77317 1.03584 0.67669 0.69334 43 | 38 0.76845 0.75410 0.77429 1.02457 0.67940 0.69615 44 | 39 0.75667 0.75565 0.77572 1.13145 0.64526 0.66538 45 | 40 0.79653 0.74554 0.76628 1.02358 0.67857 0.69565 46 | 41 0.76865 0.75551 0.77562 1.03407 0.67770 0.69487 47 | 42 0.78176 0.74936 0.77012 1.01066 0.68474 0.70089 48 | 43 0.74570 0.76056 0.77944 1.01879 0.67994 0.69609 49 | 44 0.74710 0.76194 0.78118 1.03853 0.67402 0.69088 50 | 45 0.74391 0.76042 0.78006 1.01421 0.68294 0.70049 51 | 46 0.73429 0.76276 0.78202 1.01107 0.68648 0.70384 52 | 47 0.72454 0.76598 0.78492 1.02472 0.67910 0.69592 53 | 48 0.74477 0.75851 0.77822 1.05119 0.67329 0.69301 54 | 49 0.82890 0.73700 0.75960 1.04717 0.67097 0.68957 55 | 50 0.75401 0.75596 0.77483 1.02777 0.67779 0.69461 56 | -------------------------------------------------------------------------------- /models/2020-09-23_112843/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-23_112843` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-23_112843/config.json` 3 | cuda 4 | Namespace(batch_size=25, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.001, num_batches=100, num_epochs=50, num_shots=10, num_shots_test=10, num_steps=5, num_training_samples=100, num_ways=10, num_workers=1, output_folder='./models', random_seed=123, step_size=0.01, use_cuda=True, verbose=True) 5 | epoch train loss train acc train prec val loss val acc val prec 6 | 1 1.83773 0.38704 0.38881 1.53362 0.52109 0.52592 7 | 2 1.27224 0.60436 0.61833 1.24854 0.60988 0.62419 8 | 3 1.07649 0.66488 0.68016 1.12595 0.64985 0.66237 9 | 4 0.99270 0.69046 0.70575 1.07114 0.66636 0.67792 10 | 5 0.95171 0.70480 0.71913 1.06959 0.66811 0.68035 11 | 6 0.89183 0.72058 0.73391 1.00972 0.68381 0.69641 12 | 7 0.85825 0.73192 0.74557 1.01470 0.68528 0.69774 13 | 8 0.84111 0.73675 0.75089 0.99380 0.68883 0.70224 14 | 9 0.80266 0.74880 0.76235 0.96008 0.70154 0.71411 15 | 10 0.79084 0.75332 0.76631 0.95014 0.70329 0.71594 16 | 11 0.77013 0.75908 0.77187 0.93503 0.70790 0.71840 17 | 12 0.77508 0.75812 0.77102 0.94907 0.70344 0.71496 18 | 13 0.76222 0.76196 0.77457 0.91884 0.71292 0.72438 19 | 14 0.72339 0.77535 0.78818 0.91174 0.71681 0.72942 20 | 15 0.75481 0.76402 0.77683 0.94989 0.70357 0.71568 21 | 16 0.72151 0.77422 0.78673 0.90336 0.71938 0.73000 22 | 17 0.72758 0.77355 0.78626 0.93596 0.70984 0.72104 23 | 18 0.69474 0.78011 0.79247 0.90050 0.71875 0.72921 24 | 19 0.69161 0.78114 0.79290 0.92286 0.71286 0.72248 25 | 20 0.68119 0.78446 0.79593 0.90446 0.71864 0.72904 26 | 21 0.67238 0.78774 0.79900 0.91251 0.71640 0.72737 27 | 22 0.68286 0.78361 0.79542 0.90615 0.71885 0.72945 28 | 23 0.66534 0.78990 0.80141 0.89427 0.72138 0.73182 29 | 24 0.65586 0.79200 0.80317 0.89065 0.72379 0.73250 30 | 25 0.65480 0.79224 0.80392 0.89069 0.72104 0.73134 31 | 26 0.64099 0.79607 0.80729 0.90247 0.71985 0.73065 32 | 27 0.63508 0.79832 0.80932 0.89617 0.72097 0.73111 33 | 28 0.64047 0.79644 0.80784 0.91738 0.71390 0.72569 34 | 29 0.67086 0.78746 0.80002 0.89190 0.72296 0.73496 35 | 30 0.62256 0.80184 0.81343 0.89312 0.72210 0.73219 36 | 31 0.62119 0.80156 0.81260 0.90626 0.71910 0.72905 37 | 32 0.65610 0.79288 0.80476 0.90557 0.71769 0.72893 38 | 33 0.63721 0.79869 0.81064 0.89674 0.72086 0.73188 39 | 34 0.69812 0.77900 0.79154 0.92456 0.71085 0.72251 40 | 35 0.66193 0.79080 0.80284 0.89948 0.72056 0.73237 41 | 36 0.62954 0.79958 0.81081 0.89138 0.72111 0.73220 42 | 37 0.62425 0.80053 0.81184 0.89953 0.71975 0.72960 43 | 38 0.62496 0.80091 0.81211 0.89404 0.72215 0.73279 44 | 39 0.62052 0.80140 0.81262 0.92626 0.71115 0.72183 45 | 40 0.61482 0.80259 0.81383 0.90034 0.72167 0.73227 46 | 41 0.67484 0.78748 0.79992 0.94778 0.70470 0.71664 47 | 42 0.67451 0.78850 0.80101 0.94502 0.70580 0.71675 48 | 43 0.63897 0.79794 0.80986 0.89730 0.72123 0.73238 49 | 44 0.61397 0.80443 0.81537 0.90618 0.71925 0.72953 50 | 45 0.62623 0.80054 0.81234 0.90894 0.71681 0.72763 51 | 46 0.61982 0.80257 0.81387 0.87820 0.72647 0.73740 52 | 47 0.59374 0.80944 0.82067 0.88513 0.72660 0.73660 53 | 48 0.59558 0.80853 0.81977 0.89674 0.72057 0.73100 54 | 49 0.60711 0.80537 0.81634 0.89657 0.72155 0.73089 55 | 50 0.61937 0.80216 0.81363 0.89517 0.72203 0.73196 56 | -------------------------------------------------------------------------------- /models/2020-09-24_175224/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-24_175224` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-24_175224/config.json` 3 | cuda 4 | Namespace(batch_size=10, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.0005, num_batches=100, num_epochs=50, num_shots=5, num_shots_test=5, num_steps=5, num_training_samples=20, num_ways=10, num_workers=1, output_folder='./models', random_seed=123, step_size=0.005, use_cuda=True, verbose=True) 5 | epoch train loss train acc train prec val loss val acc val prec 6 | 1 2.25284 0.16770 0.15065 2.16956 0.21560 0.20670 7 | 2 2.05237 0.28216 0.27570 1.94560 0.34102 0.33378 8 | 3 1.81313 0.39736 0.40108 1.74709 0.42626 0.42913 9 | 4 1.62882 0.46952 0.47700 1.60610 0.47402 0.47799 10 | 5 1.50197 0.51014 0.52460 1.54158 0.49958 0.50384 11 | 6 1.41479 0.53390 0.54960 1.46171 0.52604 0.53595 12 | 7 1.34532 0.56160 0.57918 1.39404 0.55214 0.56490 13 | 8 1.30660 0.57606 0.59536 1.38022 0.55452 0.56733 14 | 9 1.24658 0.59556 0.61566 1.33345 0.56956 0.58551 15 | 10 1.20131 0.61162 0.63280 1.35940 0.55872 0.57634 16 | 11 1.17779 0.61956 0.64050 1.35021 0.56776 0.58441 17 | 12 1.13270 0.63958 0.66103 1.29688 0.58628 0.60068 18 | 13 1.06553 0.65712 0.67833 1.26701 0.59512 0.61364 19 | 14 1.02987 0.66664 0.68869 1.29414 0.58658 0.60375 20 | 15 1.00868 0.67546 0.69792 1.26386 0.59704 0.61160 21 | 16 0.97869 0.68294 0.70424 1.26821 0.59108 0.60696 22 | 17 0.95648 0.69296 0.71641 1.27562 0.59290 0.61211 23 | 18 0.93728 0.69542 0.71815 1.29006 0.58872 0.60729 24 | 19 0.92075 0.70220 0.72545 1.29956 0.58666 0.60359 25 | 20 0.92053 0.70144 0.72516 1.28584 0.58792 0.60608 26 | 21 0.91788 0.70296 0.72648 1.28803 0.58886 0.60753 27 | 22 0.86233 0.71636 0.73931 1.31195 0.58320 0.60130 28 | 23 0.85721 0.72000 0.74259 1.32907 0.57564 0.59205 29 | 24 0.90943 0.70352 0.72697 1.35191 0.56360 0.57968 30 | 25 0.87562 0.71572 0.73934 1.31588 0.58250 0.59888 31 | 26 0.83069 0.72742 0.74899 1.34317 0.57546 0.59153 32 | 27 0.83969 0.72114 0.74285 1.31305 0.58460 0.60309 33 | 28 0.80977 0.73438 0.75651 1.43735 0.53630 0.55494 34 | 29 0.95004 0.69402 0.71696 1.33192 0.57844 0.59483 35 | 30 0.81444 0.73570 0.75758 1.33447 0.57752 0.59407 36 | 31 0.81311 0.73264 0.75640 1.34619 0.57036 0.58783 37 | 32 0.78236 0.74138 0.76337 1.35899 0.57668 0.59328 38 | 33 0.74932 0.75296 0.77389 1.34346 0.58206 0.59874 39 | 34 0.74671 0.75436 0.77711 1.36873 0.57428 0.58824 40 | 35 0.72468 0.75982 0.78319 1.37206 0.57284 0.58833 41 | 36 0.71378 0.76354 0.78474 1.36834 0.57472 0.58831 42 | 37 0.69868 0.76692 0.78939 1.39146 0.56168 0.57725 43 | 38 0.73776 0.75562 0.77754 1.35370 0.57144 0.58620 44 | 39 0.70389 0.76880 0.79049 1.35539 0.57938 0.59474 45 | 40 0.84930 0.71926 0.74180 1.55633 0.49886 0.51302 46 | 41 1.07845 0.65116 0.67407 1.36626 0.56084 0.57899 47 | 42 0.87683 0.71644 0.74035 1.35882 0.56292 0.58235 48 | 43 0.77645 0.74792 0.77018 1.37335 0.56868 0.58759 49 | 44 0.70891 0.76738 0.79114 1.37505 0.57162 0.58969 50 | 45 0.72367 0.76100 0.78248 1.37393 0.57246 0.58881 51 | 46 0.67520 0.77442 0.79789 1.42210 0.56190 0.57737 52 | 47 0.66887 0.77754 0.79806 1.41311 0.56710 0.58194 53 | 48 0.68296 0.77360 0.79577 1.41960 0.56040 0.57516 54 | 49 0.64036 0.78296 0.80373 1.42530 0.56598 0.58104 55 | 50 0.65217 0.78152 0.80293 1.42890 0.55902 0.57353 56 | -------------------------------------------------------------------------------- /models/2020-09-24_175635/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-24_175635` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-24_175635/config.json` 3 | cuda 4 | Namespace(batch_size=10, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.0005, num_batches=100, num_epochs=50, num_shots=1, num_shots_test=1, num_steps=5, num_training_samples=20, num_ways=10, num_workers=1, output_folder='./models', random_seed=123, step_size=0.005, use_cuda=True, verbose=True) 5 | epoch train loss train acc train prec val loss val acc val prec 6 | 1 2.30065 0.13700 0.07756 2.27198 0.14950 0.09897 7 | 2 2.26413 0.15510 0.10110 2.25105 0.16940 0.11038 8 | 3 2.22249 0.18640 0.12251 2.19765 0.20890 0.14079 9 | 4 2.16536 0.22800 0.15569 2.14452 0.23840 0.15957 10 | 5 2.10605 0.25400 0.17205 2.09693 0.26280 0.17654 11 | 6 2.07247 0.28020 0.19361 2.05832 0.28400 0.19351 12 | 7 2.01203 0.30810 0.21810 2.01176 0.30890 0.21777 13 | 8 1.96432 0.32890 0.23309 1.98339 0.32290 0.23279 14 | 9 1.93509 0.33700 0.24347 1.96043 0.32480 0.23261 15 | 10 1.91289 0.34060 0.24462 1.96993 0.33040 0.23653 16 | 11 1.88340 0.35690 0.26131 1.96351 0.32740 0.23596 17 | 12 1.86454 0.36320 0.26781 1.92800 0.34270 0.24862 18 | 13 1.83334 0.37410 0.27505 1.90366 0.34920 0.25310 19 | 14 1.83696 0.37480 0.27717 1.90260 0.35340 0.25763 20 | 15 1.79722 0.38780 0.29093 1.90043 0.35530 0.26068 21 | 16 1.80889 0.38750 0.28874 1.89901 0.35100 0.25577 22 | 17 1.78383 0.39300 0.29603 1.86455 0.36640 0.27211 23 | 18 1.77918 0.39280 0.29815 1.87229 0.37350 0.27767 24 | 19 1.74481 0.40980 0.30882 1.88713 0.36590 0.27043 25 | 20 1.74497 0.40530 0.30594 1.84760 0.38070 0.28803 26 | 21 1.72947 0.42010 0.32071 1.84602 0.37860 0.28443 27 | 22 1.72450 0.42080 0.32328 1.84737 0.38430 0.28973 28 | 23 1.69132 0.43090 0.32899 1.85054 0.37640 0.28282 29 | 24 1.69701 0.42820 0.33254 1.85205 0.38060 0.28761 30 | 25 1.68719 0.43010 0.33194 1.80997 0.40190 0.30482 31 | 26 1.64793 0.44860 0.34776 1.82341 0.39030 0.29634 32 | 27 1.65395 0.44570 0.34806 1.84151 0.38700 0.29437 33 | 28 1.65666 0.43940 0.34249 1.81335 0.39490 0.29928 34 | 29 1.62889 0.45370 0.35519 1.80998 0.39740 0.30062 35 | 30 1.62929 0.44970 0.35134 1.81332 0.39340 0.29727 36 | 31 1.59699 0.46520 0.36630 1.78841 0.40390 0.30961 37 | 32 1.59531 0.46430 0.36442 1.79829 0.40190 0.30851 38 | 33 1.57815 0.47020 0.37083 1.78136 0.40350 0.30702 39 | 34 1.58164 0.47030 0.37038 1.78284 0.40980 0.31543 40 | 35 1.55151 0.47840 0.37940 1.79633 0.40780 0.31097 41 | 36 1.56419 0.47220 0.37392 1.81190 0.40130 0.30562 42 | 37 1.57455 0.47550 0.37963 1.79243 0.40170 0.30756 43 | 38 1.54881 0.48290 0.38443 1.77711 0.41190 0.31578 44 | 39 1.54408 0.47460 0.37707 1.79328 0.40560 0.30923 45 | 40 1.52803 0.48620 0.38547 1.79710 0.40020 0.30698 46 | 41 1.51969 0.49020 0.39175 1.81702 0.39880 0.30479 47 | 42 1.53506 0.48450 0.38403 1.78195 0.40330 0.30776 48 | 43 1.51303 0.49090 0.39277 1.78928 0.40350 0.30990 49 | 44 1.51474 0.49500 0.39469 1.79349 0.40660 0.31369 50 | 45 1.50772 0.49710 0.39872 1.81354 0.39880 0.30455 51 | 46 1.49820 0.49910 0.39866 1.79745 0.40090 0.30893 52 | 47 1.49830 0.49490 0.39396 1.79297 0.40710 0.31049 53 | 48 1.47912 0.50360 0.40367 1.78672 0.41250 0.31562 54 | 49 1.46118 0.51520 0.41542 1.81763 0.40440 0.30984 55 | 50 1.47469 0.49970 0.39892 1.76470 0.41380 0.31864 56 | -------------------------------------------------------------------------------- /models/2020-09-24_182934/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-24_182934` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-24_182934/config.json` 3 | cuda 4 | Namespace(batch_size=10, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.0005, num_batches=100, num_epochs=50, num_shots=5, num_shots_test=5, num_steps=5, num_training_samples=20, num_ways=5, num_workers=1, output_folder='./models', random_seed=123, step_size=0.005, use_cuda=True, verbose=True) 5 | epoch train loss train acc train prec val loss val acc val prec 6 | 1 1.40506 0.42980 0.43685 1.28350 0.49936 0.50513 7 | 2 1.17499 0.55260 0.56167 1.13042 0.57372 0.58392 8 | 3 1.05465 0.60300 0.61647 1.06727 0.60336 0.61248 9 | 4 0.98270 0.62932 0.64237 1.03539 0.61448 0.62525 10 | 5 0.94142 0.64980 0.66382 0.98793 0.63676 0.64981 11 | 6 0.90115 0.66660 0.68245 0.97970 0.63856 0.65119 12 | 7 0.87899 0.67492 0.69146 0.95085 0.65516 0.66805 13 | 8 0.83588 0.69788 0.71675 0.94317 0.65172 0.66630 14 | 9 0.80797 0.70736 0.72475 0.91723 0.66772 0.68365 15 | 10 0.81374 0.70564 0.72189 0.90365 0.67580 0.69170 16 | 11 0.79365 0.71600 0.73437 0.89101 0.67560 0.68999 17 | 12 0.79244 0.71728 0.73587 0.89566 0.67764 0.69264 18 | 13 0.77952 0.72156 0.73901 0.86481 0.68756 0.70335 19 | 14 0.75431 0.72624 0.74379 0.86240 0.69420 0.71168 20 | 15 0.75105 0.73056 0.74647 0.85705 0.69044 0.70666 21 | 16 0.74121 0.73336 0.74937 0.83507 0.69972 0.71571 22 | 17 0.72611 0.74204 0.75993 0.85281 0.69184 0.70985 23 | 18 0.71138 0.74656 0.76532 0.84903 0.69596 0.71417 24 | 19 0.71290 0.74340 0.76123 0.80787 0.71532 0.72973 25 | 20 0.68643 0.75408 0.77282 0.81855 0.71232 0.72829 26 | 21 0.68005 0.76300 0.78010 0.81984 0.71160 0.72796 27 | 22 0.66654 0.76232 0.78066 0.79092 0.71800 0.73416 28 | 23 0.66203 0.76536 0.78491 0.77740 0.72400 0.74054 29 | 24 0.65137 0.76684 0.78540 0.77503 0.72432 0.74293 30 | 25 0.63795 0.77580 0.79316 0.77465 0.72748 0.74383 31 | 26 0.62320 0.78080 0.79912 0.78778 0.72172 0.73982 32 | 27 0.66548 0.76676 0.78672 0.82112 0.71648 0.73569 33 | 28 0.63293 0.77984 0.79608 0.76783 0.72720 0.74203 34 | 29 0.59464 0.79124 0.80830 0.76520 0.72728 0.74435 35 | 30 0.57814 0.79404 0.81217 0.73264 0.74116 0.75609 36 | 31 0.55938 0.80360 0.82101 0.76360 0.73092 0.74788 37 | 32 0.55426 0.80408 0.82122 0.73410 0.74272 0.76038 38 | 33 0.54688 0.80596 0.82362 0.75699 0.73616 0.75432 39 | 34 0.51460 0.81940 0.83539 0.72725 0.74832 0.76301 40 | 35 0.50658 0.82136 0.83676 0.72386 0.74464 0.76271 41 | 36 0.50210 0.82256 0.83798 0.72475 0.74780 0.76342 42 | 37 0.46162 0.83632 0.85177 0.72598 0.75052 0.76607 43 | 38 0.46680 0.83452 0.84980 0.71079 0.75504 0.77078 44 | 39 0.46666 0.83732 0.85320 0.71328 0.75272 0.76745 45 | 40 0.44470 0.84168 0.85729 0.72798 0.74736 0.76354 46 | 41 0.43680 0.84452 0.86000 0.72203 0.75412 0.76751 47 | 42 0.41825 0.85104 0.86486 0.73781 0.74796 0.76264 48 | 43 0.45068 0.84180 0.85763 0.73730 0.74296 0.76101 49 | 44 0.43126 0.84724 0.86123 0.72786 0.74328 0.75892 50 | 45 0.43608 0.84624 0.86060 0.69876 0.75660 0.77393 51 | 46 0.40962 0.85496 0.86976 0.72479 0.75016 0.76622 52 | 47 0.40841 0.85472 0.86987 0.72160 0.75072 0.76490 53 | 48 0.39904 0.85748 0.87226 0.72770 0.74816 0.76431 54 | 49 0.38350 0.86332 0.87793 0.72270 0.74612 0.76116 55 | 50 0.37276 0.86760 0.88046 0.71607 0.75544 0.77183 56 | -------------------------------------------------------------------------------- /models/2020-09-24_220708/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-24_220708` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-24_220708/config.json` 3 | cuda 4 | Namespace(batch_size=10, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.0005, num_batches=100, num_epochs=50, num_shots=1, num_shots_test=1, num_steps=5, num_training_samples=20, num_ways=5, num_workers=1, output_folder='./models', random_seed=123, step_size=0.005, use_cuda=True, verbose=True) 5 | epoch train loss train acc train prec val loss val acc val prec 6 | 1 1.54804 0.31580 0.21949 1.52547 0.33320 0.24405 7 | 2 1.49343 0.36400 0.27088 1.48668 0.36880 0.27523 8 | 3 1.44901 0.39140 0.29728 1.44543 0.39760 0.30283 9 | 4 1.41746 0.41340 0.31727 1.40729 0.42700 0.33033 10 | 5 1.38532 0.42460 0.32735 1.37060 0.44260 0.34343 11 | 6 1.33943 0.44800 0.34705 1.35836 0.45080 0.34933 12 | 7 1.33711 0.46460 0.36637 1.34018 0.45060 0.35278 13 | 8 1.29725 0.47700 0.37732 1.31802 0.46640 0.36950 14 | 9 1.29145 0.48220 0.38530 1.31037 0.48000 0.38122 15 | 10 1.26545 0.49660 0.39930 1.31622 0.47260 0.37538 16 | 11 1.25381 0.50120 0.40072 1.29429 0.48860 0.39092 17 | 12 1.23944 0.51460 0.41900 1.30152 0.47580 0.38063 18 | 13 1.21442 0.52180 0.42353 1.27961 0.49520 0.40185 19 | 14 1.22395 0.51780 0.42415 1.27594 0.50060 0.40523 20 | 15 1.21182 0.52280 0.42598 1.28044 0.48960 0.39511 21 | 16 1.19398 0.53480 0.43938 1.25297 0.50460 0.40635 22 | 17 1.20292 0.52940 0.43587 1.23181 0.52000 0.42500 23 | 18 1.19350 0.52700 0.43155 1.25308 0.50480 0.41263 24 | 19 1.17469 0.54380 0.45052 1.22044 0.51860 0.42545 25 | 20 1.14732 0.55840 0.46365 1.22801 0.51960 0.42453 26 | 21 1.16509 0.54260 0.44552 1.24204 0.50680 0.41042 27 | 22 1.14744 0.55440 0.46332 1.24093 0.50740 0.41300 28 | 23 1.12945 0.56280 0.46713 1.22083 0.52580 0.42857 29 | 24 1.09446 0.57520 0.47907 1.19823 0.54080 0.44685 30 | 25 1.11239 0.56600 0.47465 1.21334 0.53700 0.44608 31 | 26 1.08236 0.58040 0.49082 1.18138 0.54220 0.45300 32 | 27 1.09683 0.58740 0.50082 1.19860 0.53780 0.44680 33 | 28 1.08882 0.58520 0.49545 1.17093 0.55420 0.46157 34 | 29 1.06821 0.59260 0.50205 1.15487 0.55580 0.46847 35 | 30 1.04571 0.59780 0.51033 1.19200 0.53920 0.44710 36 | 31 1.02184 0.60760 0.51632 1.16940 0.55640 0.46840 37 | 32 1.04361 0.60300 0.51762 1.15988 0.55380 0.46512 38 | 33 1.01612 0.61380 0.52517 1.12350 0.57620 0.48707 39 | 34 0.99522 0.62460 0.53840 1.13639 0.56680 0.47655 40 | 35 1.00956 0.61300 0.52590 1.13370 0.56980 0.47783 41 | 36 1.01552 0.61980 0.53630 1.10910 0.58360 0.49683 42 | 37 0.94634 0.64220 0.55307 1.10409 0.58620 0.49880 43 | 38 0.98890 0.61440 0.52545 1.11215 0.57900 0.49518 44 | 39 0.98068 0.63040 0.54553 1.08471 0.59060 0.50460 45 | 40 0.96085 0.63900 0.55503 1.11902 0.57060 0.48233 46 | 41 0.97089 0.63180 0.54923 1.12898 0.58780 0.49803 47 | 42 0.95039 0.63780 0.55490 1.08918 0.59980 0.51198 48 | 43 0.94910 0.64440 0.56088 1.09976 0.58800 0.50247 49 | 44 0.96468 0.63080 0.54778 1.08039 0.58400 0.49277 50 | 45 0.97756 0.63900 0.55240 1.10253 0.57380 0.48443 51 | 46 0.96297 0.63600 0.54937 1.10792 0.58240 0.49590 52 | 47 0.92932 0.64800 0.56543 1.10893 0.58500 0.49980 53 | 48 0.95206 0.63680 0.55093 1.09730 0.58540 0.49967 54 | 49 0.91934 0.65400 0.56907 1.09468 0.58780 0.50205 55 | 50 0.91694 0.66020 0.57808 1.06168 0.59860 0.51305 56 | -------------------------------------------------------------------------------- /models/2020-09-24_220801/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-24_220801` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-24_220801/config.json` 3 | cuda 4 | Namespace(batch_size=10, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.0005, num_batches=100, num_epochs=50, num_shots=10, num_shots_test=10, num_steps=5, num_training_samples=20, num_ways=5, num_workers=1, output_folder='./models', random_seed=123, step_size=0.005, use_cuda=True, verbose=True) 5 | epoch train loss train acc train prec val loss val acc val prec 6 | 1 1.31541 0.48180 0.48725 1.13849 0.57836 0.58065 7 | 2 1.02484 0.62696 0.63431 0.98288 0.64014 0.64650 8 | 3 0.89400 0.67408 0.68393 0.93114 0.65912 0.66703 9 | 4 0.86402 0.68490 0.69346 0.89747 0.67498 0.68312 10 | 5 0.80686 0.70894 0.71948 0.87769 0.68258 0.69157 11 | 6 0.76717 0.72214 0.73113 0.84183 0.69812 0.70598 12 | 7 0.75316 0.72694 0.73715 0.83137 0.70444 0.71357 13 | 8 0.72644 0.74216 0.75172 0.80769 0.71490 0.72364 14 | 9 0.71134 0.74978 0.76079 0.76856 0.73026 0.74017 15 | 10 0.67314 0.76290 0.77362 0.77749 0.72894 0.73943 16 | 11 0.77026 0.72916 0.73880 0.82870 0.70748 0.71700 17 | 12 0.69024 0.75780 0.76855 0.76737 0.72866 0.73758 18 | 13 0.65611 0.76860 0.77703 0.74644 0.73842 0.74828 19 | 14 0.64289 0.77218 0.78223 0.76148 0.73174 0.74081 20 | 15 0.64161 0.77386 0.78383 0.71430 0.75286 0.76285 21 | 16 0.69326 0.75546 0.76655 0.76632 0.73396 0.74329 22 | 17 0.64490 0.77260 0.78144 0.74482 0.73852 0.74818 23 | 18 0.61638 0.78236 0.79309 0.72920 0.74680 0.75565 24 | 19 0.60985 0.78704 0.79728 0.71608 0.75238 0.76268 25 | 20 0.56702 0.80270 0.81282 0.71740 0.75054 0.76093 26 | 21 0.57212 0.80092 0.81141 0.71478 0.75024 0.75992 27 | 22 0.55658 0.80496 0.81498 0.71623 0.75140 0.76113 28 | 23 0.52604 0.81762 0.82743 0.69581 0.75786 0.76798 29 | 24 0.52751 0.81710 0.82649 0.71713 0.74956 0.76004 30 | 25 0.54986 0.81148 0.82250 0.71203 0.75126 0.76072 31 | 26 0.49431 0.83094 0.84071 0.71910 0.74614 0.75689 32 | 27 0.47450 0.83616 0.84576 0.72152 0.74618 0.75515 33 | 28 0.49999 0.82680 0.83695 0.71449 0.75182 0.76478 34 | 29 0.46327 0.83954 0.84949 0.71206 0.75286 0.76309 35 | 30 0.51995 0.82058 0.83146 0.70272 0.75454 0.76548 36 | 31 0.54461 0.81158 0.82246 0.72145 0.75024 0.76085 37 | 32 0.45552 0.84238 0.85196 0.70616 0.75634 0.76683 38 | 33 0.41817 0.85532 0.86381 0.70399 0.75692 0.76711 39 | 34 0.39793 0.86192 0.86982 0.72272 0.75122 0.76009 40 | 35 0.38496 0.86518 0.87352 0.71094 0.75606 0.76530 41 | 36 0.40119 0.86064 0.86909 0.71770 0.75632 0.76596 42 | 37 0.40959 0.85722 0.86617 0.70178 0.75856 0.76828 43 | 38 0.35923 0.87208 0.88060 0.70900 0.76002 0.76923 44 | 39 0.35759 0.87570 0.88307 0.71872 0.75562 0.76596 45 | 40 0.35000 0.87708 0.88476 0.71145 0.75842 0.76763 46 | 41 0.32834 0.88280 0.89021 0.71397 0.75656 0.76674 47 | 42 0.30522 0.89268 0.89977 0.72637 0.75824 0.76704 48 | 43 0.30888 0.88934 0.89682 0.71941 0.75790 0.76749 49 | 44 0.30092 0.89342 0.90040 0.73393 0.75116 0.76064 50 | 45 0.31948 0.88950 0.89739 0.73018 0.75840 0.76775 51 | 46 0.35518 0.87506 0.88268 0.72287 0.75500 0.76411 52 | 47 0.30570 0.89166 0.89848 0.74065 0.75282 0.76202 53 | 48 0.31553 0.89122 0.89856 0.72289 0.75546 0.76403 54 | 49 0.27288 0.90492 0.91154 0.73867 0.75430 0.76349 55 | 50 0.27333 0.90368 0.91027 0.77121 0.74714 0.75571 56 | -------------------------------------------------------------------------------- /models/2020-09-24_181123/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-24_181123` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-24_181123/config.json` 3 | cuda 4 | Namespace(batch_size=10, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.0005, num_batches=100, num_epochs=50, num_shots=10, num_shots_test=10, num_steps=5, num_training_samples=20, num_ways=10, num_workers=1, output_folder='./models', random_seed=123, step_size=0.005, use_cuda=True, verbose=True) 5 | epoch train loss train acc train prec val loss val acc val prec 6 | 1 2.21670 0.18830 0.17845 2.09227 0.26671 0.26321 7 | 2 1.90070 0.36185 0.36171 1.76134 0.42953 0.42894 8 | 3 1.61254 0.48093 0.48991 1.55545 0.50399 0.50970 9 | 4 1.45039 0.53997 0.55261 1.44512 0.53863 0.54847 10 | 5 1.32141 0.58249 0.59789 1.34732 0.57708 0.58727 11 | 6 1.21462 0.61683 0.63253 1.26815 0.59705 0.61374 12 | 7 1.12133 0.64629 0.66335 1.26631 0.59797 0.61068 13 | 8 1.07132 0.66359 0.67859 1.18511 0.62646 0.63890 14 | 9 1.01891 0.67879 0.69387 1.17282 0.62897 0.64089 15 | 10 0.95250 0.69943 0.71599 1.12048 0.64593 0.65843 16 | 11 0.98044 0.69268 0.70793 1.18015 0.62578 0.63928 17 | 12 0.92576 0.70500 0.72078 1.14571 0.63526 0.64858 18 | 13 0.90284 0.71339 0.72986 1.24078 0.60548 0.62157 19 | 14 0.93934 0.70454 0.72240 1.21061 0.61352 0.62772 20 | 15 0.87427 0.72102 0.73721 1.19284 0.61800 0.63057 21 | 16 0.83880 0.73244 0.74904 1.18263 0.62227 0.63257 22 | 17 0.81602 0.74057 0.75740 1.21323 0.61361 0.62463 23 | 18 0.79183 0.74839 0.76458 1.21557 0.61316 0.62467 24 | 19 0.77759 0.75113 0.76637 1.19905 0.61759 0.63034 25 | 20 0.73910 0.76362 0.77909 1.22865 0.60748 0.62014 26 | 21 0.74678 0.75942 0.77529 1.23100 0.60748 0.61817 27 | 22 0.71901 0.77090 0.78537 1.23707 0.60558 0.61756 28 | 23 0.71448 0.77150 0.78676 1.26018 0.59809 0.61234 29 | 24 0.71825 0.77104 0.78664 1.25524 0.60001 0.61618 30 | 25 0.72720 0.76802 0.78387 1.23980 0.60334 0.61871 31 | 26 0.70772 0.77643 0.79106 1.24580 0.60336 0.61799 32 | 27 0.71051 0.77198 0.78776 1.32257 0.57397 0.58564 33 | 28 0.75325 0.75994 0.77642 1.27240 0.59420 0.61055 34 | 29 0.69438 0.78002 0.79659 1.23693 0.60785 0.62320 35 | 30 0.63234 0.79526 0.80992 1.24842 0.60437 0.61672 36 | 31 0.62771 0.80251 0.81641 1.26612 0.60662 0.62083 37 | 32 0.57072 0.81415 0.82678 1.29221 0.60110 0.61307 38 | 33 0.57612 0.81355 0.82685 1.28667 0.60072 0.61331 39 | 34 0.62352 0.79876 0.81235 1.27268 0.60068 0.61503 40 | 35 0.57317 0.81634 0.82941 1.30731 0.59235 0.60454 41 | 36 0.71150 0.76816 0.78326 1.26816 0.59479 0.60665 42 | 37 0.63034 0.79779 0.81128 1.29344 0.59255 0.60689 43 | 38 0.56204 0.81861 0.83208 1.32906 0.58593 0.59968 44 | 39 0.55911 0.81985 0.83329 1.28347 0.60415 0.61662 45 | 40 0.64687 0.79253 0.80730 1.28120 0.59749 0.61147 46 | 41 0.56468 0.81870 0.83249 1.29691 0.59163 0.60695 47 | 42 0.53984 0.82541 0.83805 1.31599 0.59513 0.61178 48 | 43 0.53956 0.82648 0.83880 1.32141 0.59442 0.60593 49 | 44 0.51689 0.83130 0.84371 1.34051 0.59326 0.60608 50 | 45 0.54231 0.82095 0.83310 1.37322 0.56817 0.58292 51 | 46 0.61965 0.79915 0.81317 1.31508 0.59459 0.60792 52 | 47 0.54352 0.82468 0.83783 1.32309 0.59380 0.60740 53 | 48 0.50282 0.83729 0.84917 1.29680 0.59495 0.60860 54 | 49 0.54454 0.82719 0.83929 1.27873 0.59461 0.60619 55 | 50 0.53562 0.82865 0.84077 1.34717 0.58874 0.59982 56 | -------------------------------------------------------------------------------- /models/2020-09-22_164258/run.txt: -------------------------------------------------------------------------------- 1 | DEBUG:root:Creating folder `./models/2020-09-22_164258` 2 | INFO:root:Saving configuration file in `/home/paperspace/learning-to-learn/models/2020-09-22_164258/config.json` 3 | cuda 4 | Namespace(batch_size=25, dataset='quickdraw', first_order=False, folder='/storage/smb79ck2/ndata/', hidden_size=20, meta_lr=0.001, num_batches=100, num_epochs=50, num_shots=1, num_shots_test=1, num_steps=5, num_training_samples=100, num_ways=5, num_workers=1, output_folder='./models', random_seed=123, step_size=0.01, use_cuda=True, verbose=True) 5 | MetaConvModel( 6 | (features): MetaSequential( 7 | (layer1): MetaSequential( 8 | (conv): MetaConv2d(1, 20, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) 9 | (norm): BatchNorm2d(20, eps=1e-05, momentum=1.0, affine=True, track_running_stats=False) 10 | (relu): ReLU() 11 | (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False) 12 | ) 13 | (layer2): MetaSequential( 14 | (conv): MetaConv2d(20, 20, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) 15 | (norm): BatchNorm2d(20, eps=1e-05, momentum=1.0, affine=True, track_running_stats=False) 16 | (relu): ReLU() 17 | (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False) 18 | ) 19 | (layer3): MetaSequential( 20 | (conv): MetaConv2d(20, 20, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) 21 | (norm): BatchNorm2d(20, eps=1e-05, momentum=1.0, affine=True, track_running_stats=False) 22 | (relu): ReLU() 23 | (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False) 24 | ) 25 | (layer4): MetaSequential( 26 | (conv): MetaConv2d(20, 20, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)) 27 | (norm): BatchNorm2d(20, eps=1e-05, momentum=1.0, affine=True, track_running_stats=False) 28 | (relu): ReLU() 29 | (pool): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False) 30 | ) 31 | ) 32 | (classifier): MetaLinear(in_features=20, out_features=5, bias=True) 33 | ) 34 | epoch train loss train acc train prec val loss val acc val prec 35 | 1 1.43258 0.41000 0.30794 1.34697 0.46200 0.36286 36 | 2 1.27732 0.49824 0.40205 1.27291 0.49056 0.39125 37 | 3 1.21528 0.52520 0.42943 1.24274 0.50664 0.41299 38 | 4 1.18446 0.54248 0.44713 1.21986 0.52344 0.42933 39 | 5 1.16230 0.55352 0.46089 1.20545 0.53320 0.44253 40 | 6 1.12908 0.56496 0.47144 1.18460 0.54176 0.45179 41 | 7 1.10608 0.57680 0.48703 1.15630 0.55056 0.46065 42 | 8 1.08517 0.58888 0.49931 1.14657 0.56168 0.47015 43 | 9 1.06485 0.59904 0.51055 1.13285 0.56504 0.47594 44 | 10 1.06584 0.59344 0.50635 1.15150 0.56048 0.47311 45 | 11 1.03341 0.61160 0.52754 1.10767 0.57736 0.49070 46 | 12 1.03014 0.60952 0.52385 1.11119 0.57080 0.48429 47 | 13 1.02316 0.60744 0.52292 1.11651 0.57824 0.49289 48 | 14 1.00536 0.61944 0.53717 1.06608 0.60304 0.52180 49 | 15 0.98760 0.62624 0.54156 1.08593 0.58672 0.50081 50 | 16 1.00209 0.62608 0.54125 1.06412 0.59392 0.50824 51 | 17 0.98773 0.62672 0.54491 1.06586 0.59752 0.51188 52 | 18 0.95390 0.64568 0.56524 1.04173 0.60608 0.52205 53 | 19 0.95373 0.64192 0.55932 1.06629 0.59664 0.51292 54 | 20 0.94982 0.64432 0.56549 1.04962 0.60240 0.51925 55 | 21 0.96630 0.64104 0.56019 1.05901 0.60240 0.51893 56 | 22 0.95844 0.63360 0.55091 1.06405 0.59864 0.51595 57 | 23 0.94965 0.64312 0.56180 1.05830 0.59944 0.51612 58 | 24 0.92860 0.65616 0.57479 1.06499 0.60064 0.51708 59 | 25 0.92924 0.65088 0.57200 1.03842 0.60888 0.52429 60 | 26 0.91816 0.65440 0.57545 1.02428 0.61312 0.53297 61 | 27 0.92433 0.65160 0.57477 1.05353 0.60496 0.52317 62 | 28 0.92169 0.65512 0.57841 1.00611 0.62320 0.54132 63 | 29 0.90502 0.66168 0.58333 1.02839 0.61320 0.53233 64 | 30 0.90046 0.66264 0.58479 1.00423 0.62128 0.54068 65 | 31 0.88652 0.67152 0.59379 0.99726 0.62152 0.54511 66 | 32 0.88753 0.67112 0.59683 0.99392 0.63104 0.55195 67 | 33 0.86202 0.67576 0.60213 0.98030 0.63528 0.55886 68 | 34 0.88066 0.67056 0.59445 0.98431 0.62496 0.54501 69 | 35 0.87158 0.67088 0.59423 0.95904 0.64880 0.57223 70 | 36 0.84659 0.68288 0.60839 0.98200 0.63240 0.55367 71 | 37 0.84283 0.68528 0.61055 0.99188 0.63536 0.55887 72 | 38 0.85007 0.68448 0.60845 0.95659 0.64680 0.56832 73 | 39 0.84692 0.68536 0.61317 0.98955 0.63752 0.56008 74 | 40 0.85023 0.68304 0.60741 0.96548 0.64160 0.56612 75 | 41 0.83905 0.68680 0.61304 0.96598 0.63920 0.56155 76 | 42 0.82323 0.69152 0.62059 0.97056 0.64336 0.56805 77 | 43 0.84417 0.69168 0.61886 0.96475 0.63648 0.56037 78 | 44 0.85217 0.68024 0.60508 0.94662 0.64936 0.57192 79 | 45 0.81862 0.69448 0.62307 0.95371 0.64336 0.56560 80 | 46 0.83087 0.69096 0.62005 0.94937 0.64600 0.56808 81 | 47 0.82711 0.69008 0.61553 0.95410 0.64784 0.57008 82 | 48 0.80454 0.69824 0.62419 0.97184 0.64136 0.56522 83 | 49 0.80730 0.69432 0.62164 0.95347 0.64352 0.56769 84 | 50 0.83465 0.68840 0.61553 0.92987 0.65312 0.57544 85 | -------------------------------------------------------------------------------- /models/2020-09-22_165037/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 1.1174318635463714, "train_acc": 0.5856799879670144, "train_prec": 0.5974414840340617, "val_loss": 0.9788511574268338, "val_acc": 0.6452799820899963, "val_prec": 0.6587047219276427}, {"epoch": 2, "train_loss": 0.9070145517587661, "train_acc": 0.6708799827098846, "train_prec": 0.6860543644428254, "val_loss": 0.9162632828950878, "val_acc": 0.6681119805574416, "val_prec": 0.6816402620077135}, {"epoch": 3, "train_loss": 0.8033794438838958, "train_acc": 0.7127359724044796, "train_prec": 0.7281833875179289, "val_loss": 0.8375817167758942, "val_acc": 0.7003679627180102, "val_prec": 0.7157961148023606}, {"epoch": 4, "train_loss": 0.7640438151359559, "train_acc": 0.7300479698181153, "train_prec": 0.7478310871124265, "val_loss": 0.8309402322769166, "val_acc": 0.7051999664306641, "val_prec": 0.7226480716466904}, {"epoch": 5, "train_loss": 0.7235986661911008, "train_acc": 0.7443679714202879, "train_prec": 0.7606946289539336, "val_loss": 0.8015123653411867, "val_acc": 0.7178239655494691, "val_prec": 0.7342559522390364}, {"epoch": 6, "train_loss": 0.7084317201375964, "train_acc": 0.7507199674844741, "train_prec": 0.767477856874466, "val_loss": 0.7920805931091307, "val_acc": 0.7203999626636507, "val_prec": 0.7376162636280057}, {"epoch": 7, "train_loss": 0.7053783553838732, "train_acc": 0.7511199754476547, "train_prec": 0.768643326163292, "val_loss": 0.7608615821599957, "val_acc": 0.7323039615154268, "val_prec": 0.749841529726982}, {"epoch": 8, "train_loss": 0.6814947551488876, "train_acc": 0.7614079689979553, "train_prec": 0.7790688276290895, "val_loss": 0.76163525223732, "val_acc": 0.7326079589128494, "val_prec": 0.7508297991752623}, {"epoch": 9, "train_loss": 0.6483605748414992, "train_acc": 0.7718239706754685, "train_prec": 0.7884527409076695, "val_loss": 0.7071695101261138, "val_acc": 0.7529919731616974, "val_prec": 0.7705396443605422}, {"epoch": 10, "train_loss": 0.6164935940504072, "train_acc": 0.7846239787340161, "train_prec": 0.8015791600942611, "val_loss": 0.6970281207561492, "val_acc": 0.7575839698314665, "val_prec": 0.7736925399303436}, {"epoch": 11, "train_loss": 0.5985865098237992, "train_acc": 0.790111978650093, "train_prec": 0.8066679280996324, "val_loss": 0.6889461773633954, "val_acc": 0.761775969862938, "val_prec": 0.778341872692108}, {"epoch": 12, "train_loss": 0.5865057399868967, "train_acc": 0.797055974006653, "train_prec": 0.8134855324029925, "val_loss": 0.6706309437751768, "val_acc": 0.7675999754667283, "val_prec": 0.783244482278824}, {"epoch": 13, "train_loss": 0.5653117620944978, "train_acc": 0.8029759711027146, "train_prec": 0.819780222773552, "val_loss": 0.6639613741636277, "val_acc": 0.7690559715032578, "val_prec": 0.7859745365381243}, {"epoch": 14, "train_loss": 0.5530585691332814, "train_acc": 0.807247977256775, "train_prec": 0.823450071811676, "val_loss": 0.6541480416059496, "val_acc": 0.7737439715862274, "val_prec": 0.7897339153289796}, {"epoch": 15, "train_loss": 0.5493743106722829, "train_acc": 0.8082399803400043, "train_prec": 0.8241627317667007, "val_loss": 0.6411738902330396, "val_acc": 0.7785119777917863, "val_prec": 0.7952133977413178}, {"epoch": 16, "train_loss": 0.5578201761841775, "train_acc": 0.8064479726552964, "train_prec": 0.8223449617624281, "val_loss": 0.6663291579484938, "val_acc": 0.7681919699907301, "val_prec": 0.7843450486660002}, {"epoch": 17, "train_loss": 0.5417287144064905, "train_acc": 0.8111839801073076, "train_prec": 0.8269578880071637, "val_loss": 0.6293922603130342, "val_acc": 0.7833439779281614, "val_prec": 0.7997347825765612}, {"epoch": 18, "train_loss": 0.5242030882835389, "train_acc": 0.818095975518227, "train_prec": 0.8331326949596404, "val_loss": 0.6465851527452469, "val_acc": 0.775167970657349, "val_prec": 0.7913132268190382}, {"epoch": 19, "train_loss": 0.53195502191782, "train_acc": 0.8147999745607375, "train_prec": 0.8316587793827058, "val_loss": 0.6168914806842802, "val_acc": 0.7860479718446728, "val_prec": 0.8011816310882569}, {"epoch": 20, "train_loss": 0.5155640074610712, "train_acc": 0.8209279739856719, "train_prec": 0.8364618015289307, "val_loss": 0.6416532754898069, "val_acc": 0.778527979850769, "val_prec": 0.7948160701990128}, {"epoch": 21, "train_loss": 0.5229974532127379, "train_acc": 0.8201599699258805, "train_prec": 0.8364210903644562, "val_loss": 0.6450294390320777, "val_acc": 0.7769439649581911, "val_prec": 0.7942221689224245}, {"epoch": 22, "train_loss": 0.5085125207900996, "train_acc": 0.8231679743528366, "train_prec": 0.8389878195524215, "val_loss": 0.61439311593771, "val_acc": 0.7874239712953568, "val_prec": 0.803338545560837}, {"epoch": 23, "train_loss": 0.5002868643403052, "train_acc": 0.827247977256775, "train_prec": 0.8430283898115156, "val_loss": 0.6164802080392837, "val_acc": 0.7874079763889313, "val_prec": 0.804130850434303}, {"epoch": 24, "train_loss": 0.5077125498652457, "train_acc": 0.8231039690971373, "train_prec": 0.8390725159645079, "val_loss": 0.6413125795125961, "val_acc": 0.7782559746503828, "val_prec": 0.7960376030206682}, {"epoch": 25, "train_loss": 0.5002046954631804, "train_acc": 0.8251679724454876, "train_prec": 0.8406594568490979, "val_loss": 0.6193484777212143, "val_acc": 0.7864639806747437, "val_prec": 0.8030675321817398}, {"epoch": 26, "train_loss": 0.4871800535917283, "train_acc": 0.8296799713373185, "train_prec": 0.8446068459749223, "val_loss": 0.626044153571129, "val_acc": 0.7841759693622592, "val_prec": 0.800503343939781}, {"epoch": 27, "train_loss": 0.4914166614413261, "train_acc": 0.8297279697656632, "train_prec": 0.8456077194213867, "val_loss": 0.6331527465581895, "val_acc": 0.7805279773473741, "val_prec": 0.7955446785688399}, {"epoch": 28, "train_loss": 0.480070493221283, "train_acc": 0.8327839750051497, "train_prec": 0.8481401693820952, "val_loss": 0.6118014696240426, "val_acc": 0.7890879756212232, "val_prec": 0.8043533724546432}, {"epoch": 29, "train_loss": 0.4703092247247698, "train_acc": 0.835439971089363, "train_prec": 0.8505934596061704, "val_loss": 0.5965093213319775, "val_acc": 0.794527979493141, "val_prec": 0.8099969804286955}, {"epoch": 30, "train_loss": 0.47384844869375226, "train_acc": 0.8337119603157044, "train_prec": 0.8487456035614016, "val_loss": 0.6016733920574189, "val_acc": 0.791823978424072, "val_prec": 0.8075460100173951}, {"epoch": 31, "train_loss": 0.46145181447267525, "train_acc": 0.8381279677152633, "train_prec": 0.8528833013772963, "val_loss": 0.5997274905443192, "val_acc": 0.7924479740858075, "val_prec": 0.8085000145435333}, {"epoch": 32, "train_loss": 0.4566760078072548, "train_acc": 0.8401919680833814, "train_prec": 0.8545577996969222, "val_loss": 0.6124146217107772, "val_acc": 0.7898559796810151, "val_prec": 0.8060140603780748}, {"epoch": 33, "train_loss": 0.4570046254992485, "train_acc": 0.8385279667377473, "train_prec": 0.8535563772916795, "val_loss": 0.6080713868141174, "val_acc": 0.7904319798946382, "val_prec": 0.8064796584844591}, {"epoch": 34, "train_loss": 0.4594721069931983, "train_acc": 0.8408479666709897, "train_prec": 0.8560281676054, "val_loss": 0.599514936208725, "val_acc": 0.7937919706106189, "val_prec": 0.809234010577202}, {"epoch": 35, "train_loss": 0.4533955818414687, "train_acc": 0.841279969215393, "train_prec": 0.8557501804828648, "val_loss": 0.6013991653919217, "val_acc": 0.7920799827575679, "val_prec": 0.8073575425148013}, {"epoch": 36, "train_loss": 0.44683992028236397, "train_acc": 0.8438239675760268, "train_prec": 0.8580043882131575, "val_loss": 0.5905808901786802, "val_acc": 0.7953119832277301, "val_prec": 0.8101857548952103}, {"epoch": 37, "train_loss": 0.4447757944464683, "train_acc": 0.8435359656810761, "train_prec": 0.8571686673164368, "val_loss": 0.600944826602936, "val_acc": 0.7924639791250228, "val_prec": 0.8077600312232971}, {"epoch": 38, "train_loss": 0.43946882069110865, "train_acc": 0.8466239696741106, "train_prec": 0.8606646752357481, "val_loss": 0.5985150057077406, "val_acc": 0.7953919863700868, "val_prec": 0.8105984079837799}, {"epoch": 39, "train_loss": 0.4327558439970016, "train_acc": 0.848335964679718, "train_prec": 0.8620394378900531, "val_loss": 0.5922472175955773, "val_acc": 0.7940159726142882, "val_prec": 0.8082892382144927}, {"epoch": 40, "train_loss": 0.43436533957719814, "train_acc": 0.8476479637622836, "train_prec": 0.861710319519043, "val_loss": 0.5955748695135119, "val_acc": 0.7957759779691699, "val_prec": 0.8095985615253446}, {"epoch": 41, "train_loss": 0.42845013260841364, "train_acc": 0.8498559683561326, "train_prec": 0.8639562594890595, "val_loss": 0.5953555122017857, "val_acc": 0.7934239715337754, "val_prec": 0.808783383965492}, {"epoch": 42, "train_loss": 0.4330865895748139, "train_acc": 0.8468959641456604, "train_prec": 0.8605391192436216, "val_loss": 0.5909539127349857, "val_acc": 0.7956959801912308, "val_prec": 0.8103143179416656}, {"epoch": 43, "train_loss": 0.42881637632846825, "train_acc": 0.8498879712820054, "train_prec": 0.8639243477582933, "val_loss": 0.6026123067736626, "val_acc": 0.7922719782590867, "val_prec": 0.806552473902702}, {"epoch": 44, "train_loss": 0.42558187872171394, "train_acc": 0.8498239678144456, "train_prec": 0.8632663154602054, "val_loss": 0.6023366647958754, "val_acc": 0.791599977016449, "val_prec": 0.8062134838104247}, {"epoch": 45, "train_loss": 0.423841605782509, "train_acc": 0.8508799642324446, "train_prec": 0.8641870421171187, "val_loss": 0.6034347766637802, "val_acc": 0.793487978577614, "val_prec": 0.8084927731752397}, {"epoch": 46, "train_loss": 0.42315999478101735, "train_acc": 0.8522079634666438, "train_prec": 0.8659115827083591, "val_loss": 0.5916930985450743, "val_acc": 0.7967039757966995, "val_prec": 0.8109911382198336}, {"epoch": 47, "train_loss": 0.4212264382839205, "train_acc": 0.8509279638528826, "train_prec": 0.8645780277252195, "val_loss": 0.594048038125038, "val_acc": 0.7943999826908114, "val_prec": 0.8086849302053452}, {"epoch": 48, "train_loss": 0.4279768300056456, "train_acc": 0.8484319692850115, "train_prec": 0.8623901867866518, "val_loss": 0.5911350440979003, "val_acc": 0.7967039799690245, "val_prec": 0.8112468522787095}, {"epoch": 49, "train_loss": 0.4220471030473708, "train_acc": 0.8504959702491759, "train_prec": 0.8633405667543409, "val_loss": 0.5985773536562918, "val_acc": 0.791711975336075, "val_prec": 0.8061399090290068}, {"epoch": 50, "train_loss": 0.4197248461842537, "train_acc": 0.8511999642848973, "train_prec": 0.8646534603834152, "val_loss": 0.6012388819456099, "val_acc": 0.7946879756450653, "val_prec": 0.8102384489774704}] -------------------------------------------------------------------------------- /models/2020-09-23_112843/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 1.837729442119598, "train_acc": 0.3870439915359019, "train_prec": 0.3888050927966833, "val_loss": 1.5336221051216128, "val_acc": 0.5210919865965841, "val_prec": 0.5259170824289321}, {"epoch": 2, "train_loss": 1.2722350096702575, "train_acc": 0.6043599796295164, "train_prec": 0.61833387196064, "val_loss": 1.2485357081890105, "val_acc": 0.6098839896917344, "val_prec": 0.6241893738508226}, {"epoch": 3, "train_loss": 1.0764940381050105, "train_acc": 0.6648799860477447, "train_prec": 0.6801563698053362, "val_loss": 1.1259526288509365, "val_acc": 0.6498519825935364, "val_prec": 0.6623674720525741}, {"epoch": 4, "train_loss": 0.9926963567733765, "train_acc": 0.6904559880495068, "train_prec": 0.7057548034191133, "val_loss": 1.0711390686035152, "val_acc": 0.666363993883133, "val_prec": 0.6779244232177736}, {"epoch": 5, "train_loss": 0.9517059552669525, "train_acc": 0.7048039799928666, "train_prec": 0.719133288860321, "val_loss": 1.0695942378044128, "val_acc": 0.668107990026474, "val_prec": 0.680347791314125}, {"epoch": 6, "train_loss": 0.8918332391977309, "train_acc": 0.7205839806795117, "train_prec": 0.7339074391126634, "val_loss": 1.009721340537071, "val_acc": 0.6838119906187058, "val_prec": 0.6964146262407305}, {"epoch": 7, "train_loss": 0.8582476484775542, "train_acc": 0.7319239854812624, "train_prec": 0.745571271777153, "val_loss": 1.0146978628635401, "val_acc": 0.6852799862623217, "val_prec": 0.6977446210384366}, {"epoch": 8, "train_loss": 0.8411091846227646, "train_acc": 0.7367519849538802, "train_prec": 0.7508868831396102, "val_loss": 0.9938007670640948, "val_acc": 0.6888279813528062, "val_prec": 0.7022389858961103}, {"epoch": 9, "train_loss": 0.8026572787761687, "train_acc": 0.7487999838590622, "train_prec": 0.7623503357172012, "val_loss": 0.9600849670171735, "val_acc": 0.7015359747409823, "val_prec": 0.7141124701499938}, {"epoch": 10, "train_loss": 0.7908393281698226, "train_acc": 0.7533239817619322, "train_prec": 0.7663136982917785, "val_loss": 0.950135452747345, "val_acc": 0.7032919853925705, "val_prec": 0.715941413640976}, {"epoch": 11, "train_loss": 0.7701292777061467, "train_acc": 0.7590799796581268, "train_prec": 0.7718666321039203, "val_loss": 0.935033552646637, "val_acc": 0.7079039794206619, "val_prec": 0.7183999359607698}, {"epoch": 12, "train_loss": 0.7750782483816147, "train_acc": 0.7581239861249925, "train_prec": 0.7710159271955491, "val_loss": 0.9490733838081359, "val_acc": 0.7034439933300015, "val_prec": 0.7149553203582762}, {"epoch": 13, "train_loss": 0.7622238802909846, "train_acc": 0.761959976553917, "train_prec": 0.774568688869476, "val_loss": 0.9188381212949753, "val_acc": 0.7129239875078199, "val_prec": 0.7243841683864594}, {"epoch": 14, "train_loss": 0.72339453458786, "train_acc": 0.7753479826450348, "train_prec": 0.7881812369823457, "val_loss": 0.9117382168769838, "val_acc": 0.7168119776248932, "val_prec": 0.7294240349531174}, {"epoch": 15, "train_loss": 0.7548109817504881, "train_acc": 0.7640199744701384, "train_prec": 0.7768291926383974, "val_loss": 0.9498909425735477, "val_acc": 0.7035719811916354, "val_prec": 0.7156782823801037}, {"epoch": 16, "train_loss": 0.7215060198307035, "train_acc": 0.774219980239868, "train_prec": 0.7867345130443572, "val_loss": 0.9033594787120818, "val_acc": 0.7193759882450106, "val_prec": 0.7299953132867817}, {"epoch": 17, "train_loss": 0.72758487701416, "train_acc": 0.7735479760169978, "train_prec": 0.7862624043226242, "val_loss": 0.9359609979391099, "val_acc": 0.7098359721899035, "val_prec": 0.7210436898469923}, {"epoch": 18, "train_loss": 0.6947447443008422, "train_acc": 0.7801119810342793, "train_prec": 0.7924737477302553, "val_loss": 0.9005001956224442, "val_acc": 0.718751984834671, "val_prec": 0.7292080998420716}, {"epoch": 19, "train_loss": 0.6916106158494949, "train_acc": 0.7811439758539199, "train_prec": 0.792896748185158, "val_loss": 0.9228589123487471, "val_acc": 0.7128559809923174, "val_prec": 0.7224813508987424}, {"epoch": 20, "train_loss": 0.6811869513988499, "train_acc": 0.7844639754295346, "train_prec": 0.7959338605403901, "val_loss": 0.9044613200426105, "val_acc": 0.7186439907550813, "val_prec": 0.7290367782115937}, {"epoch": 21, "train_loss": 0.6723784905672078, "train_acc": 0.7877399766445158, "train_prec": 0.7990021663904193, "val_loss": 0.9125131118297576, "val_acc": 0.7163959842920304, "val_prec": 0.7273663097620007}, {"epoch": 22, "train_loss": 0.6828552842140194, "train_acc": 0.783607979416847, "train_prec": 0.7954171609878538, "val_loss": 0.9061501264572145, "val_acc": 0.718847982883453, "val_prec": 0.7294549077749254}, {"epoch": 23, "train_loss": 0.6653421080112458, "train_acc": 0.7899039906263348, "train_prec": 0.8014062994718552, "val_loss": 0.8942743629217147, "val_acc": 0.7213799810409546, "val_prec": 0.7318193280696872}, {"epoch": 24, "train_loss": 0.6558612722158431, "train_acc": 0.7919959777593615, "train_prec": 0.8031711858510971, "val_loss": 0.8906518238782878, "val_acc": 0.7237879878282546, "val_prec": 0.7324993193149565}, {"epoch": 25, "train_loss": 0.6548033481836317, "train_acc": 0.7922359842061997, "train_prec": 0.8039244514703752, "val_loss": 0.8906921607255937, "val_acc": 0.7210359853506085, "val_prec": 0.7313364851474763}, {"epoch": 26, "train_loss": 0.640985333919525, "train_acc": 0.7960679841041568, "train_prec": 0.8072899365425112, "val_loss": 0.9024674010276795, "val_acc": 0.7198519796133043, "val_prec": 0.7306460028886798}, {"epoch": 27, "train_loss": 0.6350839608907698, "train_acc": 0.7983199757337572, "train_prec": 0.8093151819705963, "val_loss": 0.8961657613515852, "val_acc": 0.7209679859876634, "val_prec": 0.7311135649681091}, {"epoch": 28, "train_loss": 0.6404676198959351, "train_acc": 0.7964399760961536, "train_prec": 0.8078374874591827, "val_loss": 0.9173755007982257, "val_acc": 0.7138999879360197, "val_prec": 0.7256871294975281}, {"epoch": 29, "train_loss": 0.6708600860834122, "train_acc": 0.7874639892578127, "train_prec": 0.8000196528434752, "val_loss": 0.8918977439403534, "val_acc": 0.7229599791765213, "val_prec": 0.7349608409404752}, {"epoch": 30, "train_loss": 0.622559751868248, "train_acc": 0.8018359792232509, "train_prec": 0.8134338611364367, "val_loss": 0.8931233000755306, "val_acc": 0.722099986076355, "val_prec": 0.7321945834159854}, {"epoch": 31, "train_loss": 0.6211858707666399, "train_acc": 0.80156398832798, "train_prec": 0.8126027196645733, "val_loss": 0.9062614428997041, "val_acc": 0.7190999794006349, "val_prec": 0.7290548759698872}, {"epoch": 32, "train_loss": 0.6560962653160093, "train_acc": 0.7928799766302107, "train_prec": 0.804764040708542, "val_loss": 0.9055724102258681, "val_acc": 0.7176919740438461, "val_prec": 0.7289305239915848}, {"epoch": 33, "train_loss": 0.6372148382663725, "train_acc": 0.798691982626915, "train_prec": 0.8106400299072265, "val_loss": 0.8967388385534288, "val_acc": 0.7208559894561768, "val_prec": 0.7318841016292575}, {"epoch": 34, "train_loss": 0.6981246113777162, "train_acc": 0.7789959824085239, "train_prec": 0.7915390324592588, "val_loss": 0.9245616000890733, "val_acc": 0.7108519876003268, "val_prec": 0.7225063711404801}, {"epoch": 35, "train_loss": 0.6619340789318087, "train_acc": 0.7907999783754348, "train_prec": 0.8028394001722334, "val_loss": 0.8994772791862489, "val_acc": 0.7205559861660001, "val_prec": 0.7323708927631378}, {"epoch": 36, "train_loss": 0.6295415407419206, "train_acc": 0.7995799839496608, "train_prec": 0.8108117395639418, "val_loss": 0.891381733417511, "val_acc": 0.7211079883575437, "val_prec": 0.7321987169981005}, {"epoch": 37, "train_loss": 0.6242511963844296, "train_acc": 0.8005319821834564, "train_prec": 0.8118370610475539, "val_loss": 0.8995271384716035, "val_acc": 0.7197479826211928, "val_prec": 0.7295971840620036}, {"epoch": 38, "train_loss": 0.6249633580446244, "train_acc": 0.8009119862318039, "train_prec": 0.8121117359399799, "val_loss": 0.8940429192781446, "val_acc": 0.7221479862928393, "val_prec": 0.7327886486053469}, {"epoch": 39, "train_loss": 0.6205199420452117, "train_acc": 0.8013999772071838, "train_prec": 0.81261732339859, "val_loss": 0.9262643229961395, "val_acc": 0.711147971749306, "val_prec": 0.7218321663141254}, {"epoch": 40, "train_loss": 0.6148243600130081, "train_acc": 0.8025919771194457, "train_prec": 0.8138325846195222, "val_loss": 0.9003377199172973, "val_acc": 0.7216719865798951, "val_prec": 0.7322735643386841}, {"epoch": 41, "train_loss": 0.6748398602008818, "train_acc": 0.7874839782714842, "train_prec": 0.7999205589294434, "val_loss": 0.9477770882844924, "val_acc": 0.7047039747238162, "val_prec": 0.7166373550891877}, {"epoch": 42, "train_loss": 0.6745110422372818, "train_acc": 0.7885039782524108, "train_prec": 0.8010109329223631, "val_loss": 0.9450198554992676, "val_acc": 0.7057999777793884, "val_prec": 0.7167509365081786}, {"epoch": 43, "train_loss": 0.6389703857898712, "train_acc": 0.7979399836063384, "train_prec": 0.8098639470338821, "val_loss": 0.8972990989685058, "val_acc": 0.7212279832363132, "val_prec": 0.7323848623037335}, {"epoch": 44, "train_loss": 0.6139744436740879, "train_acc": 0.8044279891252516, "train_prec": 0.8153691649436952, "val_loss": 0.9061795324087143, "val_acc": 0.719247980117798, "val_prec": 0.7295252668857575}, {"epoch": 45, "train_loss": 0.6262306869029997, "train_acc": 0.8005439853668214, "train_prec": 0.8123430925607676, "val_loss": 0.9089382505416871, "val_acc": 0.7168079769611362, "val_prec": 0.7276279401779173}, {"epoch": 46, "train_loss": 0.6198192763328554, "train_acc": 0.8025719863176346, "train_prec": 0.8138747817277906, "val_loss": 0.8781958967447281, "val_acc": 0.7264719891548156, "val_prec": 0.7373987793922424}, {"epoch": 47, "train_loss": 0.5937358617782592, "train_acc": 0.8094399797916412, "train_prec": 0.8206734538078307, "val_loss": 0.8851297444105147, "val_acc": 0.7265999847650529, "val_prec": 0.73659559071064}, {"epoch": 48, "train_loss": 0.5955776447057723, "train_acc": 0.808531984090805, "train_prec": 0.8197664064168929, "val_loss": 0.8967425179481505, "val_acc": 0.7205679863691328, "val_prec": 0.7310015958547592}, {"epoch": 49, "train_loss": 0.6071104496717455, "train_acc": 0.8053679740428922, "train_prec": 0.8163449388742449, "val_loss": 0.8965736830234526, "val_acc": 0.7215519815683364, "val_prec": 0.7308944314718246}, {"epoch": 50, "train_loss": 0.619367762207985, "train_acc": 0.8021559727191924, "train_prec": 0.8136311626434327, "val_loss": 0.8951659339666366, "val_acc": 0.7220279890298843, "val_prec": 0.7319636088609696}] -------------------------------------------------------------------------------- /models/2020-09-24_175224/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 2.252838449478149, "train_acc": 0.16769999474287023, "train_prec": 0.1506528915092349, "val_loss": 2.1695634412765497, "val_acc": 0.2155999937653541, "val_prec": 0.20670039713382718}, {"epoch": 2, "train_loss": 2.052367020845414, "train_acc": 0.2821599921584129, "train_prec": 0.2756986126303674, "val_loss": 1.9456037580966947, "val_acc": 0.34101998776197434, "val_prec": 0.33378462344408055}, {"epoch": 3, "train_loss": 1.8131281375885009, "train_acc": 0.39735999017953877, "train_prec": 0.4010809156298637, "val_loss": 1.7470901572704318, "val_acc": 0.4262599918246271, "val_prec": 0.4291304168105124}, {"epoch": 4, "train_loss": 1.628822088241577, "train_acc": 0.46951998800039285, "train_prec": 0.47700254499912265, "val_loss": 1.6061009609699248, "val_acc": 0.4740199875831604, "val_prec": 0.47799169957637777}, {"epoch": 5, "train_loss": 1.5019722926616668, "train_acc": 0.51013998568058, "train_prec": 0.5246027335524558, "val_loss": 1.541578319072724, "val_acc": 0.49957998245954527, "val_prec": 0.5038367334008216}, {"epoch": 6, "train_loss": 1.4147890603542324, "train_acc": 0.5338999795913699, "train_prec": 0.5495980125665668, "val_loss": 1.461706359386444, "val_acc": 0.5260399848222734, "val_prec": 0.5359534075856209}, {"epoch": 7, "train_loss": 1.3453168535232545, "train_acc": 0.5615999746322632, "train_prec": 0.5791793575882908, "val_loss": 1.3940356576442718, "val_acc": 0.5521399766206738, "val_prec": 0.564897908270359}, {"epoch": 8, "train_loss": 1.3065978109836573, "train_acc": 0.5760599696636199, "train_prec": 0.5953631323575974, "val_loss": 1.3802175080776222, "val_acc": 0.5545199760794639, "val_prec": 0.5673261168599126}, {"epoch": 9, "train_loss": 1.2465759813785549, "train_acc": 0.5955599677562716, "train_prec": 0.6156571441888808, "val_loss": 1.333454498052597, "val_acc": 0.5695599749684334, "val_prec": 0.5855085912346841}, {"epoch": 10, "train_loss": 1.2013100612163545, "train_acc": 0.6116199684143069, "train_prec": 0.6328032875061035, "val_loss": 1.3594020724296572, "val_acc": 0.5587199774384501, "val_prec": 0.5763421085476879}, {"epoch": 11, "train_loss": 1.1777920424938197, "train_acc": 0.6195599746704101, "train_prec": 0.6404979848861695, "val_loss": 1.3502086269855504, "val_acc": 0.567759981751442, "val_prec": 0.5844054812192918}, {"epoch": 12, "train_loss": 1.1327003073692319, "train_acc": 0.6395799666643144, "train_prec": 0.6610318440198898, "val_loss": 1.2968762493133543, "val_acc": 0.5862799727916718, "val_prec": 0.6006780940294266}, {"epoch": 13, "train_loss": 1.0655316144227984, "train_acc": 0.6571199661493301, "train_prec": 0.6783258503675461, "val_loss": 1.267009853124618, "val_acc": 0.5951199775934218, "val_prec": 0.613644403219223}, {"epoch": 14, "train_loss": 1.0298709148168566, "train_acc": 0.6666399705410001, "train_prec": 0.6886942636966705, "val_loss": 1.2941381609439848, "val_acc": 0.5865799790620805, "val_prec": 0.6037467926740646}, {"epoch": 15, "train_loss": 1.0086766088008878, "train_acc": 0.6754599732160567, "train_prec": 0.6979177832603455, "val_loss": 1.263861980438232, "val_acc": 0.5970399743318557, "val_prec": 0.611595317721367}, {"epoch": 16, "train_loss": 0.9786947077512742, "train_acc": 0.682939971089363, "train_prec": 0.7042366397380826, "val_loss": 1.26820718050003, "val_acc": 0.591079974770546, "val_prec": 0.6069601953029633}, {"epoch": 17, "train_loss": 0.9564782553911211, "train_acc": 0.6929599708318712, "train_prec": 0.716405344605446, "val_loss": 1.2756248235702508, "val_acc": 0.5928999686241153, "val_prec": 0.6121092718839647}, {"epoch": 18, "train_loss": 0.9372757923603054, "train_acc": 0.6954199677705765, "train_prec": 0.7181492710113526, "val_loss": 1.2900593304634098, "val_acc": 0.5887199783325193, "val_prec": 0.6072949856519695}, {"epoch": 19, "train_loss": 0.9207458227872852, "train_acc": 0.7021999806165693, "train_prec": 0.7254477161169052, "val_loss": 1.2995612359046937, "val_acc": 0.5866599774360656, "val_prec": 0.6035929065942764}, {"epoch": 20, "train_loss": 0.9205304425954819, "train_acc": 0.7014399677515027, "train_prec": 0.7251622211933139, "val_loss": 1.285842435359955, "val_acc": 0.5879199755191805, "val_prec": 0.6060847994685173}, {"epoch": 21, "train_loss": 0.9178842943906786, "train_acc": 0.7029599756002426, "train_prec": 0.7264788681268691, "val_loss": 1.2880313372611996, "val_acc": 0.58885997235775, "val_prec": 0.6075334417819973}, {"epoch": 22, "train_loss": 0.8623279881477357, "train_acc": 0.7163599669933317, "train_prec": 0.7393087881803512, "val_loss": 1.3119527232646941, "val_acc": 0.5831999754905698, "val_prec": 0.6012987279891965}, {"epoch": 23, "train_loss": 0.8572061944007873, "train_acc": 0.7199999779462816, "train_prec": 0.7425928658246996, "val_loss": 1.3290675067901607, "val_acc": 0.5756399732828144, "val_prec": 0.5920544427633286}, {"epoch": 24, "train_loss": 0.9094286155700684, "train_acc": 0.7035199785232545, "train_prec": 0.7269707500934603, "val_loss": 1.3519127643108366, "val_acc": 0.5635999754071237, "val_prec": 0.5796782350540163}, {"epoch": 25, "train_loss": 0.8756196743249892, "train_acc": 0.7157199734449388, "train_prec": 0.7393371921777723, "val_loss": 1.3158756494522101, "val_acc": 0.5824999749660494, "val_prec": 0.5988827943801883}, {"epoch": 26, "train_loss": 0.8306871294975283, "train_acc": 0.7274199718236923, "train_prec": 0.7489914655685422, "val_loss": 1.3431731927394872, "val_acc": 0.5754599738121031, "val_prec": 0.5915305495262146}, {"epoch": 27, "train_loss": 0.8396945637464523, "train_acc": 0.7211399787664414, "train_prec": 0.7428525638580321, "val_loss": 1.313051364421845, "val_acc": 0.5845999717712406, "val_prec": 0.6030915069580078}, {"epoch": 28, "train_loss": 0.8097671228647231, "train_acc": 0.734379978179932, "train_prec": 0.756510316133499, "val_loss": 1.4373523247241973, "val_acc": 0.5362999773025511, "val_prec": 0.5549391409754753}, {"epoch": 29, "train_loss": 0.9500411915779113, "train_acc": 0.694019976258278, "train_prec": 0.7169598674774172, "val_loss": 1.3319216299057002, "val_acc": 0.5784399768710136, "val_prec": 0.5948265826702122}, {"epoch": 30, "train_loss": 0.8144351023435594, "train_acc": 0.7356999826431273, "train_prec": 0.7575841987133026, "val_loss": 1.334465037584305, "val_acc": 0.5775199759006501, "val_prec": 0.5940713715553284}, {"epoch": 31, "train_loss": 0.813107968568802, "train_acc": 0.7326399743556977, "train_prec": 0.7564043509960173, "val_loss": 1.3461892724037172, "val_acc": 0.5703599745035173, "val_prec": 0.5878344726562501}, {"epoch": 32, "train_loss": 0.7823578315973281, "train_acc": 0.741379982829094, "train_prec": 0.7633678221702579, "val_loss": 1.358994520902634, "val_acc": 0.5766799747943879, "val_prec": 0.5932755774259567}, {"epoch": 33, "train_loss": 0.7493152469396592, "train_acc": 0.752959983944893, "train_prec": 0.7738928240537641, "val_loss": 1.34346158862114, "val_acc": 0.5820599704980849, "val_prec": 0.5987449446320532}, {"epoch": 34, "train_loss": 0.7467066752910614, "train_acc": 0.7543599838018415, "train_prec": 0.7771054762601852, "val_loss": 1.3687268531322478, "val_acc": 0.5742799735069278, "val_prec": 0.5882428687810899}, {"epoch": 35, "train_loss": 0.7246802592277528, "train_acc": 0.7598199754953384, "train_prec": 0.7831872671842575, "val_loss": 1.3720559811592101, "val_acc": 0.5728399688005449, "val_prec": 0.5883310812711716}, {"epoch": 36, "train_loss": 0.7137826722860341, "train_acc": 0.7635399770736693, "train_prec": 0.7847385412454606, "val_loss": 1.3683416986465449, "val_acc": 0.5747199743986131, "val_prec": 0.5883068150281907}, {"epoch": 37, "train_loss": 0.6986781042814253, "train_acc": 0.7669199812412265, "train_prec": 0.7893908911943437, "val_loss": 1.3914601838588707, "val_acc": 0.5616799727082253, "val_prec": 0.5772533991932868}, {"epoch": 38, "train_loss": 0.7377603411674499, "train_acc": 0.755619986653328, "train_prec": 0.7775359421968461, "val_loss": 1.35369899392128, "val_acc": 0.5714399746060369, "val_prec": 0.5862014517188073}, {"epoch": 39, "train_loss": 0.7038901460170749, "train_acc": 0.7687999808788298, "train_prec": 0.790486869812012, "val_loss": 1.3553852427005768, "val_acc": 0.5793799686431886, "val_prec": 0.5947361093759537}, {"epoch": 40, "train_loss": 0.8492987364530565, "train_acc": 0.7192599782347678, "train_prec": 0.7418037873506544, "val_loss": 1.5563311100006103, "val_acc": 0.4988599857687949, "val_prec": 0.5130154207348823}, {"epoch": 41, "train_loss": 1.078452450633049, "train_acc": 0.6511599719524385, "train_prec": 0.6740743541717527, "val_loss": 1.3662560498714442, "val_acc": 0.5608399790525436, "val_prec": 0.5789895206689833}, {"epoch": 42, "train_loss": 0.8768309140205385, "train_acc": 0.7164399814605713, "train_prec": 0.7403499943017957, "val_loss": 1.3588173186779027, "val_acc": 0.5629199796915053, "val_prec": 0.5823460510373116}, {"epoch": 43, "train_loss": 0.776446767449379, "train_acc": 0.7479199790954587, "train_prec": 0.7701791644096375, "val_loss": 1.373345011472702, "val_acc": 0.5686799740791318, "val_prec": 0.5875896126031875}, {"epoch": 44, "train_loss": 0.7089075928926465, "train_acc": 0.7673799765110013, "train_prec": 0.7911437594890594, "val_loss": 1.3750489413738252, "val_acc": 0.5716199785470962, "val_prec": 0.5896905088424683}, {"epoch": 45, "train_loss": 0.7236656421422958, "train_acc": 0.7609999734163284, "train_prec": 0.7824799078702926, "val_loss": 1.3739315116405486, "val_acc": 0.5724599713087081, "val_prec": 0.588813087940216}, {"epoch": 46, "train_loss": 0.6752007192373277, "train_acc": 0.7744199734926226, "train_prec": 0.7978881448507308, "val_loss": 1.4221049296855925, "val_acc": 0.5618999761343001, "val_prec": 0.5773731300234796}, {"epoch": 47, "train_loss": 0.66886834025383, "train_acc": 0.7775399923324582, "train_prec": 0.7980574524402618, "val_loss": 1.4131059682369238, "val_acc": 0.5670999711751936, "val_prec": 0.5819387286901472}, {"epoch": 48, "train_loss": 0.6829634606838224, "train_acc": 0.7735999727249147, "train_prec": 0.7957728910446167, "val_loss": 1.419599621295929, "val_acc": 0.5603999754786491, "val_prec": 0.5751579374074938}, {"epoch": 49, "train_loss": 0.6403639948368071, "train_acc": 0.7829599779844282, "train_prec": 0.8037270206212997, "val_loss": 1.4252987670898438, "val_acc": 0.5659799748659134, "val_prec": 0.5810436087846754}, {"epoch": 50, "train_loss": 0.6521690702438356, "train_acc": 0.7815199840068816, "train_prec": 0.802932870388031, "val_loss": 1.4289024925231935, "val_acc": 0.5590199774503707, "val_prec": 0.5735268208384513}] -------------------------------------------------------------------------------- /models/2020-09-24_181123/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 2.2166969823837266, "train_acc": 0.1882999945431947, "train_prec": 0.17845303196460008, "val_loss": 2.0922728848457335, "val_acc": 0.2667099905014037, "val_prec": 0.2632149904966354}, {"epoch": 2, "train_loss": 1.900700335502624, "train_acc": 0.36184998750686653, "train_prec": 0.36170785531401617, "val_loss": 1.7613352334499355, "val_acc": 0.42952998518943764, "val_prec": 0.4289359486103059}, {"epoch": 3, "train_loss": 1.6125436878204347, "train_acc": 0.4809299859404564, "train_prec": 0.4899080458283424, "val_loss": 1.5554496920108793, "val_acc": 0.5039899832010272, "val_prec": 0.5097022873163223}, {"epoch": 4, "train_loss": 1.4503931629657743, "train_acc": 0.539969986379147, "train_prec": 0.5526113861799242, "val_loss": 1.4451245999336246, "val_acc": 0.5386299887299536, "val_prec": 0.5484705442190168}, {"epoch": 5, "train_loss": 1.3214096176624301, "train_acc": 0.5824899888038636, "train_prec": 0.5978941535949707, "val_loss": 1.3473157548904422, "val_acc": 0.5770799827575684, "val_prec": 0.5872683525085448}, {"epoch": 6, "train_loss": 1.2146248853206632, "train_acc": 0.6168299871683125, "train_prec": 0.632527142763138, "val_loss": 1.2681507074832914, "val_acc": 0.5970499885082247, "val_prec": 0.6137389057874678}, {"epoch": 7, "train_loss": 1.1213293677568434, "train_acc": 0.6462899833917618, "train_prec": 0.6633536463975908, "val_loss": 1.266308770179749, "val_acc": 0.5979699867963792, "val_prec": 0.6106753230094908}, {"epoch": 8, "train_loss": 1.071324408054352, "train_acc": 0.6635899800062179, "train_prec": 0.6785899412631988, "val_loss": 1.1851075279712684, "val_acc": 0.6264599883556364, "val_prec": 0.6389009821414949}, {"epoch": 9, "train_loss": 1.0189149689674375, "train_acc": 0.6787899804115296, "train_prec": 0.6938708633184432, "val_loss": 1.1728151810169216, "val_acc": 0.628969980478287, "val_prec": 0.640893357396126}, {"epoch": 10, "train_loss": 0.9525032484531399, "train_acc": 0.6994299733638764, "train_prec": 0.7159942656755448, "val_loss": 1.1204814445972444, "val_acc": 0.6459299820661546, "val_prec": 0.6584265387058258}, {"epoch": 11, "train_loss": 0.9804448109865188, "train_acc": 0.692679980993271, "train_prec": 0.7079257231950761, "val_loss": 1.1801468050479886, "val_acc": 0.6257799869775773, "val_prec": 0.6392752814292906}, {"epoch": 12, "train_loss": 0.9257571804523468, "train_acc": 0.7049999868869783, "train_prec": 0.7207753056287761, "val_loss": 1.1457103544473644, "val_acc": 0.6352599853277204, "val_prec": 0.6485802602767946}, {"epoch": 13, "train_loss": 0.9028393226861953, "train_acc": 0.7133899772167206, "train_prec": 0.7298595106601717, "val_loss": 1.2407751524448392, "val_acc": 0.6054799938201902, "val_prec": 0.6215678048133849}, {"epoch": 14, "train_loss": 0.9393442779779428, "train_acc": 0.7045399808883666, "train_prec": 0.722401278614998, "val_loss": 1.210613217353821, "val_acc": 0.6135199880599977, "val_prec": 0.627721029520035}, {"epoch": 15, "train_loss": 0.8742744064331056, "train_acc": 0.7210199809074402, "train_prec": 0.7372067314386369, "val_loss": 1.192841616868973, "val_acc": 0.6179999852180479, "val_prec": 0.6305732953548433}, {"epoch": 16, "train_loss": 0.8387978875637053, "train_acc": 0.7324399799108504, "train_prec": 0.7490351259708405, "val_loss": 1.18263313293457, "val_acc": 0.6222699862718584, "val_prec": 0.6325710767507553}, {"epoch": 17, "train_loss": 0.816023004055023, "train_acc": 0.7405699861049652, "train_prec": 0.7573978090286253, "val_loss": 1.2132331192493433, "val_acc": 0.6136099815368654, "val_prec": 0.6246263462305071}, {"epoch": 18, "train_loss": 0.791825411915779, "train_acc": 0.7483899843692781, "train_prec": 0.7645789188146591, "val_loss": 1.2155687141418463, "val_acc": 0.6131599885225294, "val_prec": 0.6246709394454957}, {"epoch": 19, "train_loss": 0.7775904405117035, "train_acc": 0.751129979491234, "train_prec": 0.7663741189241408, "val_loss": 1.199048173427582, "val_acc": 0.6175899863243103, "val_prec": 0.6303419685363775}, {"epoch": 20, "train_loss": 0.7391044652462008, "train_acc": 0.7636199760437009, "train_prec": 0.7790866780281068, "val_loss": 1.2286482071876528, "val_acc": 0.6074799883365632, "val_prec": 0.6201358455419542}, {"epoch": 21, "train_loss": 0.7467830371856687, "train_acc": 0.7594199788570404, "train_prec": 0.7752907812595369, "val_loss": 1.2309952235221864, "val_acc": 0.6074799865484237, "val_prec": 0.6181728172302248}, {"epoch": 22, "train_loss": 0.7190111589431764, "train_acc": 0.7708999806642534, "train_prec": 0.7853652799129485, "val_loss": 1.2370697331428528, "val_acc": 0.6055799889564516, "val_prec": 0.6175637066364289}, {"epoch": 23, "train_loss": 0.7144810146093369, "train_acc": 0.7714999836683272, "train_prec": 0.7867601567506793, "val_loss": 1.2601806962490087, "val_acc": 0.5980899846553801, "val_prec": 0.6123409813642503}, {"epoch": 24, "train_loss": 0.7182484120130542, "train_acc": 0.771039991378784, "train_prec": 0.7866360485553742, "val_loss": 1.255240197181702, "val_acc": 0.6000099879503251, "val_prec": 0.6161822658777237}, {"epoch": 25, "train_loss": 0.7272011131048203, "train_acc": 0.7680199825763702, "train_prec": 0.7838653433322906, "val_loss": 1.2398046481609344, "val_acc": 0.6033399933576582, "val_prec": 0.6187077474594116}, {"epoch": 26, "train_loss": 0.707720316648483, "train_acc": 0.7764299887418746, "train_prec": 0.7910624480247498, "val_loss": 1.245798035860062, "val_acc": 0.6033599823713304, "val_prec": 0.6179938644170757}, {"epoch": 27, "train_loss": 0.7105082315206527, "train_acc": 0.7719799751043319, "train_prec": 0.7877629685401919, "val_loss": 1.3225712978839876, "val_acc": 0.5739699858427048, "val_prec": 0.5856419277191162}, {"epoch": 28, "train_loss": 0.7532527542114257, "train_acc": 0.7599399811029433, "train_prec": 0.7764218109846115, "val_loss": 1.2723962652683256, "val_acc": 0.5941999864578248, "val_prec": 0.6105485683679585}, {"epoch": 29, "train_loss": 0.694380440711975, "train_acc": 0.7800199824571609, "train_prec": 0.7965935081243517, "val_loss": 1.2369264674186706, "val_acc": 0.6078499811887742, "val_prec": 0.6231976789236069}, {"epoch": 30, "train_loss": 0.6323411089181901, "train_acc": 0.7952599859237672, "train_prec": 0.8099234282970426, "val_loss": 1.2484197330474847, "val_acc": 0.6043699854612349, "val_prec": 0.6167211937904356}, {"epoch": 31, "train_loss": 0.627711738348007, "train_acc": 0.8025099760293961, "train_prec": 0.816410816311836, "val_loss": 1.2661165893077853, "val_acc": 0.6066199910640717, "val_prec": 0.620832182765007}, {"epoch": 32, "train_loss": 0.5707218551635743, "train_acc": 0.8141499876976012, "train_prec": 0.8267829418182373, "val_loss": 1.2922059500217435, "val_acc": 0.6010999858379363, "val_prec": 0.6130731230974197}, {"epoch": 33, "train_loss": 0.5761187514662746, "train_acc": 0.8135499864816665, "train_prec": 0.8268465185165405, "val_loss": 1.2866664683818816, "val_acc": 0.6007199913263319, "val_prec": 0.6133084714412689}, {"epoch": 34, "train_loss": 0.6235150974988941, "train_acc": 0.7987599831819533, "train_prec": 0.8123467975854874, "val_loss": 1.2726834940910339, "val_acc": 0.6006799846887586, "val_prec": 0.6150304681062697}, {"epoch": 35, "train_loss": 0.5731698101758957, "train_acc": 0.8163399821519853, "train_prec": 0.8294059264659878, "val_loss": 1.30730709195137, "val_acc": 0.592349990606308, "val_prec": 0.6045382881164552}, {"epoch": 36, "train_loss": 0.7115020301938058, "train_acc": 0.7681599807739258, "train_prec": 0.7832599669694903, "val_loss": 1.2681592786312097, "val_acc": 0.5947899866104125, "val_prec": 0.6066483563184738}, {"epoch": 37, "train_loss": 0.6303417050838471, "train_acc": 0.7977899771928787, "train_prec": 0.8112757289409641, "val_loss": 1.293437011241913, "val_acc": 0.5925499898195269, "val_prec": 0.606892485022545}, {"epoch": 38, "train_loss": 0.5620350062847138, "train_acc": 0.8186099833250047, "train_prec": 0.8320807629823687, "val_loss": 1.3290630972385404, "val_acc": 0.5859299886226654, "val_prec": 0.5996816867589951}, {"epoch": 39, "train_loss": 0.5591115728020668, "train_acc": 0.819849982857704, "train_prec": 0.8332901948690417, "val_loss": 1.2834656083583829, "val_acc": 0.6041499906778336, "val_prec": 0.6166184294223787}, {"epoch": 40, "train_loss": 0.6468747270107271, "train_acc": 0.792529970407486, "train_prec": 0.8073015737533569, "val_loss": 1.2811957252025608, "val_acc": 0.5974899911880498, "val_prec": 0.611474596261978}, {"epoch": 41, "train_loss": 0.5646808683872222, "train_acc": 0.8186999851465224, "train_prec": 0.8324933081865312, "val_loss": 1.2969131088256833, "val_acc": 0.5916299921274187, "val_prec": 0.6069542121887208}, {"epoch": 42, "train_loss": 0.539842362999916, "train_acc": 0.8254099828004839, "train_prec": 0.8380486369132998, "val_loss": 1.3159943962097167, "val_acc": 0.5951299864053728, "val_prec": 0.6117792147397996}, {"epoch": 43, "train_loss": 0.5395570808649063, "train_acc": 0.8264799821376798, "train_prec": 0.8388049858808517, "val_loss": 1.3214120078086848, "val_acc": 0.5944199860095978, "val_prec": 0.6059263384342196}, {"epoch": 44, "train_loss": 0.5168934500217438, "train_acc": 0.8312999820709231, "train_prec": 0.8437095659971234, "val_loss": 1.340514987707138, "val_acc": 0.593259990811348, "val_prec": 0.6060788422822956}, {"epoch": 45, "train_loss": 0.5423104453086851, "train_acc": 0.8209499824047088, "train_prec": 0.8330966401100157, "val_loss": 1.373218876123428, "val_acc": 0.5681699907779696, "val_prec": 0.5829232424497605}, {"epoch": 46, "train_loss": 0.6196532604098323, "train_acc": 0.7991499781608585, "train_prec": 0.8131660681962968, "val_loss": 1.3150757133960722, "val_acc": 0.5945899921655656, "val_prec": 0.6079198372364042}, {"epoch": 47, "train_loss": 0.5435176414251329, "train_acc": 0.8246799844503402, "train_prec": 0.8378344696760176, "val_loss": 1.3230914819240571, "val_acc": 0.5937999832630158, "val_prec": 0.6073965001106265}, {"epoch": 48, "train_loss": 0.5028242173790931, "train_acc": 0.8372899925708769, "train_prec": 0.8491651570796966, "val_loss": 1.2967957079410553, "val_acc": 0.5949499887228015, "val_prec": 0.608596567511559}, {"epoch": 49, "train_loss": 0.5445363625884059, "train_acc": 0.8271899890899659, "train_prec": 0.8392948609590534, "val_loss": 1.278725360631943, "val_acc": 0.5946099847555163, "val_prec": 0.606190210580826}, {"epoch": 50, "train_loss": 0.5356240147352218, "train_acc": 0.8286499786376954, "train_prec": 0.8407684981822967, "val_loss": 1.3471678245067595, "val_acc": 0.5887399852275845, "val_prec": 0.5998238754272461}] -------------------------------------------------------------------------------- /models/2020-09-24_220801/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 1.3154082870483401, "train_acc": 0.4817999821901321, "train_prec": 0.4872522516548634, "val_loss": 1.1384859585762017, "val_acc": 0.5783599752187728, "val_prec": 0.580650478601456}, {"epoch": 2, "train_loss": 1.0248434054851536, "train_acc": 0.6269599759578705, "train_prec": 0.6343052875995638, "val_loss": 0.9828801167011258, "val_acc": 0.6401399731636049, "val_prec": 0.6465035307407379}, {"epoch": 3, "train_loss": 0.8939951181411743, "train_acc": 0.6740799736976625, "train_prec": 0.6839299541711807, "val_loss": 0.9311447685956952, "val_acc": 0.6591199731826782, "val_prec": 0.6670330333709715}, {"epoch": 4, "train_loss": 0.8640159428119658, "train_acc": 0.6848999726772307, "train_prec": 0.6934626871347427, "val_loss": 0.8974728232622143, "val_acc": 0.674979968070984, "val_prec": 0.6831197649240497}, {"epoch": 5, "train_loss": 0.8068580609560012, "train_acc": 0.7089399683475491, "train_prec": 0.7194775772094728, "val_loss": 0.8776917266845702, "val_acc": 0.6825799643993375, "val_prec": 0.6915654629468917}, {"epoch": 6, "train_loss": 0.76716824233532, "train_acc": 0.7221399790048602, "train_prec": 0.7311292713880536, "val_loss": 0.8418311727046967, "val_acc": 0.6981199741363526, "val_prec": 0.705981622338295}, {"epoch": 7, "train_loss": 0.7531627786159516, "train_acc": 0.7269399827718737, "train_prec": 0.7371544760465624, "val_loss": 0.8313687288761136, "val_acc": 0.7044399774074553, "val_prec": 0.713569536805153}, {"epoch": 8, "train_loss": 0.7264365935325624, "train_acc": 0.7421599841117861, "train_prec": 0.7517228251695633, "val_loss": 0.8076892554759979, "val_acc": 0.7148999744653703, "val_prec": 0.7236419230699539}, {"epoch": 9, "train_loss": 0.7113370966911317, "train_acc": 0.7497799777984622, "train_prec": 0.7607886606454849, "val_loss": 0.7685601240396498, "val_acc": 0.7302599775791166, "val_prec": 0.7401655077934263}, {"epoch": 10, "train_loss": 0.6731372261047364, "train_acc": 0.7628999912738799, "train_prec": 0.7736237317323682, "val_loss": 0.7774925911426546, "val_acc": 0.7289399707317349, "val_prec": 0.7394344228506087}, {"epoch": 11, "train_loss": 0.7702608495950698, "train_acc": 0.7291599708795546, "train_prec": 0.7387961196899414, "val_loss": 0.8286973285675048, "val_acc": 0.7074799734354018, "val_prec": 0.7169960337877269}, {"epoch": 12, "train_loss": 0.690235145688057, "train_acc": 0.7577999764680862, "train_prec": 0.7685458099842071, "val_loss": 0.7673737287521363, "val_acc": 0.7286599719524386, "val_prec": 0.737575805783272}, {"epoch": 13, "train_loss": 0.656106671690941, "train_acc": 0.7685999822616578, "train_prec": 0.7770327860116957, "val_loss": 0.7464372527599334, "val_acc": 0.7384199774265288, "val_prec": 0.7482829278707506}, {"epoch": 14, "train_loss": 0.6428913876414298, "train_acc": 0.772179981470108, "train_prec": 0.7822302937507627, "val_loss": 0.7614768493175504, "val_acc": 0.73173997759819, "val_prec": 0.7408112710714337}, {"epoch": 15, "train_loss": 0.6416050389409064, "train_acc": 0.7738599848747253, "train_prec": 0.7838261914253236, "val_loss": 0.7142957675456998, "val_acc": 0.7528599810600282, "val_prec": 0.7628514695167541}, {"epoch": 16, "train_loss": 0.6932601749897006, "train_acc": 0.755459970831871, "train_prec": 0.7665528374910355, "val_loss": 0.7663169473409654, "val_acc": 0.7339599829912183, "val_prec": 0.7432942098379135}, {"epoch": 17, "train_loss": 0.644900156259537, "train_acc": 0.7725999772548677, "train_prec": 0.7814429414272309, "val_loss": 0.7448224824666975, "val_acc": 0.738519980311394, "val_prec": 0.74818023622036}, {"epoch": 18, "train_loss": 0.6163819095492366, "train_acc": 0.7823599797487257, "train_prec": 0.7930880516767503, "val_loss": 0.7291986083984375, "val_acc": 0.7467999863624575, "val_prec": 0.7556538045406348}, {"epoch": 19, "train_loss": 0.6098508432507517, "train_acc": 0.7870399785041806, "train_prec": 0.7972841453552247, "val_loss": 0.7160803747177126, "val_acc": 0.7523799753189089, "val_prec": 0.7626793050765992}, {"epoch": 20, "train_loss": 0.5670204719901085, "train_acc": 0.8026999813318251, "train_prec": 0.8128221732378007, "val_loss": 0.7174017602205276, "val_acc": 0.750539979338646, "val_prec": 0.7609341615438462}, {"epoch": 21, "train_loss": 0.5721167054772376, "train_acc": 0.8009199762344359, "train_prec": 0.811408874988556, "val_loss": 0.7147784113883973, "val_acc": 0.7502399766445159, "val_prec": 0.7599161887168884}, {"epoch": 22, "train_loss": 0.5565840521454812, "train_acc": 0.8049599778652191, "train_prec": 0.8149836844205858, "val_loss": 0.7162337040901184, "val_acc": 0.7513999783992766, "val_prec": 0.7611323744058608}, {"epoch": 23, "train_loss": 0.5260350671410559, "train_acc": 0.8176199805736541, "train_prec": 0.827430967092514, "val_loss": 0.6958083361387254, "val_acc": 0.7578599798679353, "val_prec": 0.76798448741436}, {"epoch": 24, "train_loss": 0.5275110462307928, "train_acc": 0.8170999789237976, "train_prec": 0.8264937955141068, "val_loss": 0.717134047150612, "val_acc": 0.7495599770545959, "val_prec": 0.7600438559055328}, {"epoch": 25, "train_loss": 0.5498597544431687, "train_acc": 0.8114799779653551, "train_prec": 0.8225022357702253, "val_loss": 0.7120341128110889, "val_acc": 0.7512599802017212, "val_prec": 0.7607167291641234}, {"epoch": 26, "train_loss": 0.4943070977926254, "train_acc": 0.8309399777650833, "train_prec": 0.8407144784927368, "val_loss": 0.7191004216670991, "val_acc": 0.7461399829387664, "val_prec": 0.7568891763687136}, {"epoch": 27, "train_loss": 0.4744992044568064, "train_acc": 0.8361599880456925, "train_prec": 0.8457605284452435, "val_loss": 0.7215171468257904, "val_acc": 0.7461799824237828, "val_prec": 0.7551503509283067}, {"epoch": 28, "train_loss": 0.4999916005134583, "train_acc": 0.8267999744415285, "train_prec": 0.8369454729557038, "val_loss": 0.714485804438591, "val_acc": 0.7518199890851975, "val_prec": 0.7647755718231202}, {"epoch": 29, "train_loss": 0.46326758056879047, "train_acc": 0.839539973139763, "train_prec": 0.8494915807247162, "val_loss": 0.7120579576492307, "val_acc": 0.7528599774837494, "val_prec": 0.7630881077051164}, {"epoch": 30, "train_loss": 0.5199487623572349, "train_acc": 0.8205799669027328, "train_prec": 0.8314604383707048, "val_loss": 0.702720713019371, "val_acc": 0.7545399779081345, "val_prec": 0.7654761683940886}, {"epoch": 31, "train_loss": 0.544611468911171, "train_acc": 0.8115799790620807, "train_prec": 0.8224620056152342, "val_loss": 0.7214455753564835, "val_acc": 0.7502399760484693, "val_prec": 0.76085025370121}, {"epoch": 32, "train_loss": 0.455515955388546, "train_acc": 0.8423799794912341, "train_prec": 0.8519593900442123, "val_loss": 0.7061560070514681, "val_acc": 0.7563399749994276, "val_prec": 0.7668271070718764}, {"epoch": 33, "train_loss": 0.4181705504655839, "train_acc": 0.8553199768066406, "train_prec": 0.8638129580020908, "val_loss": 0.7039949399232869, "val_acc": 0.7569199824333193, "val_prec": 0.767112758755684}, {"epoch": 34, "train_loss": 0.39792736470699314, "train_acc": 0.8619199836254117, "train_prec": 0.8698160612583159, "val_loss": 0.7227171635627747, "val_acc": 0.7512199831008916, "val_prec": 0.7600918972492218}, {"epoch": 35, "train_loss": 0.3849626001715662, "train_acc": 0.8651799774169917, "train_prec": 0.8735229831933976, "val_loss": 0.7109422230720522, "val_acc": 0.7560599786043167, "val_prec": 0.7653034955263137}, {"epoch": 36, "train_loss": 0.4011904630064965, "train_acc": 0.8606399780511854, "train_prec": 0.8690852004289624, "val_loss": 0.7176981931924818, "val_acc": 0.7563199847936628, "val_prec": 0.7659614229202272}, {"epoch": 37, "train_loss": 0.4095932424068451, "train_acc": 0.8572199857234953, "train_prec": 0.8661720132827758, "val_loss": 0.7017793822288515, "val_acc": 0.758559980392456, "val_prec": 0.7682777768373489}, {"epoch": 38, "train_loss": 0.3592332828044892, "train_acc": 0.8720799756050107, "train_prec": 0.8805969810485841, "val_loss": 0.7090010043978688, "val_acc": 0.7600199806690212, "val_prec": 0.769234290122986}, {"epoch": 39, "train_loss": 0.35758514046669015, "train_acc": 0.8756999790668489, "train_prec": 0.8830735313892366, "val_loss": 0.718720741868019, "val_acc": 0.7556199812889098, "val_prec": 0.7659639984369279}, {"epoch": 40, "train_loss": 0.3500005473196506, "train_acc": 0.8770799756050109, "train_prec": 0.8847636157274246, "val_loss": 0.7114540696144102, "val_acc": 0.7584199744462965, "val_prec": 0.7676289892196655}, {"epoch": 41, "train_loss": 0.3283409409224987, "train_acc": 0.8827999806404115, "train_prec": 0.890210076570511, "val_loss": 0.713974276185036, "val_acc": 0.7565599870681764, "val_prec": 0.7667412710189818}, {"epoch": 42, "train_loss": 0.3052236208319664, "train_acc": 0.8926799631118774, "train_prec": 0.899774826169014, "val_loss": 0.7263717275857924, "val_acc": 0.7582399833202361, "val_prec": 0.7670392000675204}, {"epoch": 43, "train_loss": 0.3088780747354031, "train_acc": 0.8893399643898008, "train_prec": 0.8968157947063443, "val_loss": 0.7194139677286148, "val_acc": 0.7578999823331838, "val_prec": 0.7674858403205871}, {"epoch": 44, "train_loss": 0.300919253975153, "train_acc": 0.8934199601411821, "train_prec": 0.9004045325517652, "val_loss": 0.7339319053292278, "val_acc": 0.7511599791049957, "val_prec": 0.7606399381160736}, {"epoch": 45, "train_loss": 0.3194838719069957, "train_acc": 0.8894999718666071, "train_prec": 0.8973948061466214, "val_loss": 0.730180557668209, "val_acc": 0.7583999824523925, "val_prec": 0.7677506518363952}, {"epoch": 46, "train_loss": 0.35517937362194063, "train_acc": 0.875059980750084, "train_prec": 0.8826822263002396, "val_loss": 0.7228685134649274, "val_acc": 0.7549999791383742, "val_prec": 0.7641144257783888}, {"epoch": 47, "train_loss": 0.3057018488645554, "train_acc": 0.8916599726676938, "train_prec": 0.8984758222103122, "val_loss": 0.7406522804498675, "val_acc": 0.7528199833631515, "val_prec": 0.762018730044365}, {"epoch": 48, "train_loss": 0.31553102940320965, "train_acc": 0.8912199598550794, "train_prec": 0.8985586023330691, "val_loss": 0.7228934127092361, "val_acc": 0.7554599833488465, "val_prec": 0.7640307146310807}, {"epoch": 49, "train_loss": 0.2728763288259505, "train_acc": 0.9049199533462523, "train_prec": 0.9115372896194458, "val_loss": 0.7386689829826354, "val_acc": 0.7542999750375744, "val_prec": 0.7634867769479748}, {"epoch": 50, "train_loss": 0.2733266896009445, "train_acc": 0.9036799561977391, "train_prec": 0.910268543958664, "val_loss": 0.7712080425024034, "val_acc": 0.7471399807929991, "val_prec": 0.7557066929340366}] -------------------------------------------------------------------------------- /models/2020-09-24_182934/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 1.4050552248954773, "train_acc": 0.42979999020695697, "train_prec": 0.4368456469476223, "val_loss": 1.2835045754909515, "val_acc": 0.49935998380184177, "val_prec": 0.5051347509026528}, {"epoch": 2, "train_loss": 1.1749920588731761, "train_acc": 0.5525999706983565, "train_prec": 0.5616728046536444, "val_loss": 1.1304177451133726, "val_acc": 0.5737199783325195, "val_prec": 0.5839154139161112}, {"epoch": 3, "train_loss": 1.0546518218517311, "train_acc": 0.6029999732971196, "train_prec": 0.616466352343559, "val_loss": 1.0672698831558227, "val_acc": 0.6033599781990052, "val_prec": 0.6124762451648712}, {"epoch": 4, "train_loss": 0.9827031916379929, "train_acc": 0.6293199753761288, "train_prec": 0.6423687440156933, "val_loss": 1.0353948813676834, "val_acc": 0.6144799786806106, "val_prec": 0.6252452754974362}, {"epoch": 5, "train_loss": 0.9414234316349029, "train_acc": 0.6497999703884126, "train_prec": 0.6638156563043592, "val_loss": 0.9879324036836625, "val_acc": 0.6367599773406983, "val_prec": 0.6498115307092668}, {"epoch": 6, "train_loss": 0.9011464482545852, "train_acc": 0.6665999680757522, "train_prec": 0.6824519711732865, "val_loss": 0.9796951276063917, "val_acc": 0.6385599732398984, "val_prec": 0.651194198131561}, {"epoch": 7, "train_loss": 0.8789881974458698, "train_acc": 0.6749199682474137, "train_prec": 0.6914626920223237, "val_loss": 0.950852748155594, "val_acc": 0.6551599735021588, "val_prec": 0.6680511605739594}, {"epoch": 8, "train_loss": 0.8358849543333052, "train_acc": 0.6978799742460249, "train_prec": 0.7167492038011551, "val_loss": 0.9431724530458451, "val_acc": 0.6517199748754502, "val_prec": 0.6663001894950865}, {"epoch": 9, "train_loss": 0.8079701203107832, "train_acc": 0.7073599791526791, "train_prec": 0.7247548878192902, "val_loss": 0.9172266811132435, "val_acc": 0.6677199691534045, "val_prec": 0.6836534136533735}, {"epoch": 10, "train_loss": 0.8137411302328111, "train_acc": 0.7056399726867675, "train_prec": 0.7218947499990463, "val_loss": 0.9036535602807999, "val_acc": 0.6757999718189236, "val_prec": 0.6917045545578001}, {"epoch": 11, "train_loss": 0.7936496502161027, "train_acc": 0.7159999781847, "train_prec": 0.7343740046024325, "val_loss": 0.8910063886642459, "val_acc": 0.6755999785661698, "val_prec": 0.6899944585561751}, {"epoch": 12, "train_loss": 0.7924365496635437, "train_acc": 0.7172799831628799, "train_prec": 0.7358734452724455, "val_loss": 0.8956568276882173, "val_acc": 0.6776399761438365, "val_prec": 0.6926411819458009}, {"epoch": 13, "train_loss": 0.7795248574018478, "train_acc": 0.7215599811077118, "train_prec": 0.7390091288089748, "val_loss": 0.864805569052696, "val_acc": 0.6875599741935731, "val_prec": 0.7033459967374803}, {"epoch": 14, "train_loss": 0.7543122035264965, "train_acc": 0.726239985823631, "train_prec": 0.7437854832410813, "val_loss": 0.8623955178260804, "val_acc": 0.6941999775171277, "val_prec": 0.7116834169626236}, {"epoch": 15, "train_loss": 0.7510536777973174, "train_acc": 0.730559982061386, "train_prec": 0.7464714485406878, "val_loss": 0.8570504212379455, "val_acc": 0.6904399669170377, "val_prec": 0.7066621387004853}, {"epoch": 16, "train_loss": 0.7412149387598039, "train_acc": 0.7333599746227265, "train_prec": 0.7493678057193758, "val_loss": 0.8350716954469682, "val_acc": 0.6997199720144274, "val_prec": 0.7157055068016053}, {"epoch": 17, "train_loss": 0.7261148291826244, "train_acc": 0.742039983868599, "train_prec": 0.7599253034591674, "val_loss": 0.8528078430891038, "val_acc": 0.6918399727344512, "val_prec": 0.7098454499244685}, {"epoch": 18, "train_loss": 0.71138476729393, "train_acc": 0.746559979915619, "train_prec": 0.765315904021263, "val_loss": 0.8490276604890825, "val_acc": 0.6959599775075914, "val_prec": 0.7141678684949873}, {"epoch": 19, "train_loss": 0.7128977370262145, "train_acc": 0.7433999764919278, "train_prec": 0.7612257468700409, "val_loss": 0.8078671759366989, "val_acc": 0.7153199738264087, "val_prec": 0.7297253018617631}, {"epoch": 20, "train_loss": 0.6864338034391404, "train_acc": 0.7540799856185911, "train_prec": 0.7728213274478917, "val_loss": 0.8185456234216693, "val_acc": 0.7123199778795245, "val_prec": 0.7282874017953872}, {"epoch": 21, "train_loss": 0.6800500470399857, "train_acc": 0.7629999876022339, "train_prec": 0.780096513032913, "val_loss": 0.8198396950960162, "val_acc": 0.7115999728441238, "val_prec": 0.7279580295085907}, {"epoch": 22, "train_loss": 0.6665360778570175, "train_acc": 0.7623199802637098, "train_prec": 0.7806569826602937, "val_loss": 0.790915312170983, "val_acc": 0.7179999804496765, "val_prec": 0.7341582310199739}, {"epoch": 23, "train_loss": 0.6620331674814227, "train_acc": 0.7653599882125852, "train_prec": 0.7849110168218613, "val_loss": 0.7773974233865741, "val_acc": 0.7239999735355375, "val_prec": 0.7405413496494293}, {"epoch": 24, "train_loss": 0.6513699081540109, "train_acc": 0.7668399852514262, "train_prec": 0.7854015249013905, "val_loss": 0.7750326842069623, "val_acc": 0.7243199843168258, "val_prec": 0.7429328131675721}, {"epoch": 25, "train_loss": 0.6379456120729448, "train_acc": 0.7757999825477604, "train_prec": 0.7931571465730666, "val_loss": 0.774650807976723, "val_acc": 0.7274799859523774, "val_prec": 0.7438287603855135}, {"epoch": 26, "train_loss": 0.623195051252842, "train_acc": 0.7807999831438064, "train_prec": 0.7991160702705384, "val_loss": 0.7877795946598051, "val_acc": 0.72171997487545, "val_prec": 0.7398155856132507}, {"epoch": 27, "train_loss": 0.6654837289452552, "train_acc": 0.7667599844932557, "train_prec": 0.7867247956991199, "val_loss": 0.8211203497648236, "val_acc": 0.7164799797534944, "val_prec": 0.7356896495819092}, {"epoch": 28, "train_loss": 0.6329267334938049, "train_acc": 0.7798399871587752, "train_prec": 0.7960822582244874, "val_loss": 0.7678331911563873, "val_acc": 0.7271999835968019, "val_prec": 0.7420283299684529}, {"epoch": 29, "train_loss": 0.5946444025635718, "train_acc": 0.7912399864196775, "train_prec": 0.8083026587963106, "val_loss": 0.7652000689506534, "val_acc": 0.7272799813747406, "val_prec": 0.7443503385782237}, {"epoch": 30, "train_loss": 0.5781415128707887, "train_acc": 0.7940399813652037, "train_prec": 0.812173887491226, "val_loss": 0.7326393711566922, "val_acc": 0.7411599814891816, "val_prec": 0.7560919040441513}, {"epoch": 31, "train_loss": 0.5593811991810801, "train_acc": 0.8035999810695647, "train_prec": 0.821012636423111, "val_loss": 0.763602088689804, "val_acc": 0.730919979214668, "val_prec": 0.7478819048404695}, {"epoch": 32, "train_loss": 0.5542616236209867, "train_acc": 0.8040799838304518, "train_prec": 0.8212158715724943, "val_loss": 0.7340987169742584, "val_acc": 0.7427199828624726, "val_prec": 0.760375400185585}, {"epoch": 33, "train_loss": 0.546882818043232, "train_acc": 0.8059599840641023, "train_prec": 0.8236212188005447, "val_loss": 0.7569942003488539, "val_acc": 0.7361599797010423, "val_prec": 0.7543194860219955}, {"epoch": 34, "train_loss": 0.514598584473133, "train_acc": 0.8193999844789505, "train_prec": 0.8353895539045334, "val_loss": 0.7272539722919463, "val_acc": 0.7483199828863143, "val_prec": 0.7630108726024628}, {"epoch": 35, "train_loss": 0.5065781682729722, "train_acc": 0.8213599890470504, "train_prec": 0.8367599254846569, "val_loss": 0.7238623860478405, "val_acc": 0.7446399813890457, "val_prec": 0.762705191373825}, {"epoch": 36, "train_loss": 0.5020992264151574, "train_acc": 0.8225599837303165, "train_prec": 0.8379787635803222, "val_loss": 0.7247524765133857, "val_acc": 0.7477999830245972, "val_prec": 0.7634207135438921}, {"epoch": 37, "train_loss": 0.4616243097186089, "train_acc": 0.8363199937343596, "train_prec": 0.8517710250616075, "val_loss": 0.7259755194187165, "val_acc": 0.7505199772119521, "val_prec": 0.7660659623146058}, {"epoch": 38, "train_loss": 0.4668039381504058, "train_acc": 0.8345199978351592, "train_prec": 0.8497984111309047, "val_loss": 0.7107876047492027, "val_acc": 0.7550399827957155, "val_prec": 0.7707787311077118}, {"epoch": 39, "train_loss": 0.4666645941138269, "train_acc": 0.8373199898004531, "train_prec": 0.8532004684209825, "val_loss": 0.7132843664288521, "val_acc": 0.7527199834585189, "val_prec": 0.7674548810720444}, {"epoch": 40, "train_loss": 0.4447044402360915, "train_acc": 0.8416799861192701, "train_prec": 0.8572906237840653, "val_loss": 0.7279847693443299, "val_acc": 0.7473599857091904, "val_prec": 0.763543018102646}, {"epoch": 41, "train_loss": 0.4367961537837983, "train_acc": 0.8445199918746952, "train_prec": 0.8599977731704712, "val_loss": 0.7220338633656502, "val_acc": 0.7541199856996535, "val_prec": 0.7675092881917954}, {"epoch": 42, "train_loss": 0.41824514955282194, "train_acc": 0.8510399925708774, "train_prec": 0.8648580104112624, "val_loss": 0.7378065669536591, "val_acc": 0.7479599940776823, "val_prec": 0.762643804550171}, {"epoch": 43, "train_loss": 0.45068127900362004, "train_acc": 0.8417999833822251, "train_prec": 0.857628918290138, "val_loss": 0.73729720056057, "val_acc": 0.7429599857330326, "val_prec": 0.7610118120908737}, {"epoch": 44, "train_loss": 0.43125742107629783, "train_acc": 0.8472399979829786, "train_prec": 0.8612327736616135, "val_loss": 0.7278633427619938, "val_acc": 0.7432799834012982, "val_prec": 0.7589177632331848}, {"epoch": 45, "train_loss": 0.4360764262080194, "train_acc": 0.8462399905920026, "train_prec": 0.8605958580970761, "val_loss": 0.6987554997205729, "val_acc": 0.7565999835729602, "val_prec": 0.7739319902658461}, {"epoch": 46, "train_loss": 0.4096176478266715, "train_acc": 0.8549599903821944, "train_prec": 0.869761263728142, "val_loss": 0.7247921460866926, "val_acc": 0.7501599782705305, "val_prec": 0.7662192869186405}, {"epoch": 47, "train_loss": 0.4084139943122863, "train_acc": 0.8547199928760532, "train_prec": 0.8698664987087255, "val_loss": 0.7215953201055528, "val_acc": 0.7507199811935424, "val_prec": 0.7649012207984925}, {"epoch": 48, "train_loss": 0.3990401557087898, "train_acc": 0.8574799883365631, "train_prec": 0.8722553026676179, "val_loss": 0.7276958471536634, "val_acc": 0.7481599849462509, "val_prec": 0.764310591816902}, {"epoch": 49, "train_loss": 0.3835043624043464, "train_acc": 0.8633200007677077, "train_prec": 0.8779260957241056, "val_loss": 0.7227001488208772, "val_acc": 0.7461199826002121, "val_prec": 0.7611614584922792}, {"epoch": 50, "train_loss": 0.3727562059462069, "train_acc": 0.8675999957323075, "train_prec": 0.88045529961586, "val_loss": 0.7160706382989882, "val_acc": 0.7554399794340133, "val_prec": 0.7718309456110003}] -------------------------------------------------------------------------------- /models/2020-09-22_164258/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 1.4325762498378751, "train_acc": 0.41000000178813945, "train_prec": 0.3079386709630489, "val_loss": 1.3469718682765957, "val_acc": 0.4620000079274178, "val_prec": 0.36286000072956087}, {"epoch": 2, "train_loss": 1.277319979667663, "train_acc": 0.4982400089502334, "train_prec": 0.4020533370971681, "val_loss": 1.2729087495803837, "val_acc": 0.49056001067161553, "val_prec": 0.3912466698884963}, {"epoch": 3, "train_loss": 1.2152833783626549, "train_acc": 0.525200008749962, "train_prec": 0.4294266682863237, "val_loss": 1.2427418231964114, "val_acc": 0.5066400095820426, "val_prec": 0.41298666715621923}, {"epoch": 4, "train_loss": 1.1844554221630104, "train_acc": 0.5424800130724908, "train_prec": 0.4471266674995422, "val_loss": 1.2198567175865178, "val_acc": 0.5234400099515915, "val_prec": 0.4293266689777374}, {"epoch": 5, "train_loss": 1.1623015260696408, "train_acc": 0.5535200080275535, "train_prec": 0.4608866661787034, "val_loss": 1.2054544520378114, "val_acc": 0.5332000097632411, "val_prec": 0.4425266686081886}, {"epoch": 6, "train_loss": 1.1290760636329653, "train_acc": 0.5649600106477739, "train_prec": 0.4714400058984756, "val_loss": 1.1846018248796466, "val_acc": 0.5417600080370906, "val_prec": 0.45179333448410036}, {"epoch": 7, "train_loss": 1.1060804700851443, "train_acc": 0.5768000069260597, "train_prec": 0.48702666729688654, "val_loss": 1.156299984455108, "val_acc": 0.5505600053071974, "val_prec": 0.46064666658639897}, {"epoch": 8, "train_loss": 1.0851660585403449, "train_acc": 0.5888800138235092, "train_prec": 0.4993133306503296, "val_loss": 1.1465697252750398, "val_acc": 0.5616800040006639, "val_prec": 0.47014666855335235}, {"epoch": 9, "train_loss": 1.0648492342233657, "train_acc": 0.5990400063991546, "train_prec": 0.5105533337593077, "val_loss": 1.1328476852178575, "val_acc": 0.5650400087237356, "val_prec": 0.47594000369310374}, {"epoch": 10, "train_loss": 1.0658352082967755, "train_acc": 0.5934400105476378, "train_prec": 0.5063466665148731, "val_loss": 1.151495976448059, "val_acc": 0.560480005741119, "val_prec": 0.4731133311986923}, {"epoch": 11, "train_loss": 1.0334090352058414, "train_acc": 0.6116000026464459, "train_prec": 0.5275400066375732, "val_loss": 1.107668511271477, "val_acc": 0.577360011637211, "val_prec": 0.4906999990344048}, {"epoch": 12, "train_loss": 1.0301395314931876, "train_acc": 0.6095200085639955, "train_prec": 0.5238466656208037, "val_loss": 1.1111869961023326, "val_acc": 0.570800005495548, "val_prec": 0.48428666710853574}, {"epoch": 13, "train_loss": 1.023163477182388, "train_acc": 0.6074400067329407, "train_prec": 0.5229199984669683, "val_loss": 1.1165091425180436, "val_acc": 0.5782400086522099, "val_prec": 0.4928933361172676}, {"epoch": 14, "train_loss": 1.0053579396009442, "train_acc": 0.6194400128722192, "train_prec": 0.5371733349561691, "val_loss": 1.0660837560892107, "val_acc": 0.6030400061607362, "val_prec": 0.5218000066280366}, {"epoch": 15, "train_loss": 0.9875981205701829, "train_acc": 0.626240004301071, "train_prec": 0.5415599986910822, "val_loss": 1.0859326332807544, "val_acc": 0.58672001093626, "val_prec": 0.5008066686987876}, {"epoch": 16, "train_loss": 1.0020873379707338, "train_acc": 0.6260800039768217, "train_prec": 0.5412533372640612, "val_loss": 1.0641237628459936, "val_acc": 0.5939200067520145, "val_prec": 0.5082399973273274}, {"epoch": 17, "train_loss": 0.9877267688512799, "train_acc": 0.6267200046777724, "train_prec": 0.5449133375287056, "val_loss": 1.065859721302986, "val_acc": 0.5975200110673907, "val_prec": 0.5118799990415571}, {"epoch": 18, "train_loss": 0.9539020019769667, "train_acc": 0.6456800049543381, "train_prec": 0.565239999592304, "val_loss": 1.0417309725284574, "val_acc": 0.6060800072550775, "val_prec": 0.5220533347129823}, {"epoch": 19, "train_loss": 0.9537293505668641, "train_acc": 0.6419200116395951, "train_prec": 0.5593199998140337, "val_loss": 1.0662927329540257, "val_acc": 0.5966400098800655, "val_prec": 0.5129199978709222}, {"epoch": 20, "train_loss": 0.9498166656494138, "train_acc": 0.6443200039863586, "train_prec": 0.5654933404922485, "val_loss": 1.0496212643384937, "val_acc": 0.60240001142025, "val_prec": 0.5192533320188523}, {"epoch": 21, "train_loss": 0.96630233168602, "train_acc": 0.6410400009155275, "train_prec": 0.5601933357119561, "val_loss": 1.059010350704193, "val_acc": 0.6024000084400177, "val_prec": 0.5189333385229112}, {"epoch": 22, "train_loss": 0.9584412837028505, "train_acc": 0.6336000084877014, "train_prec": 0.5509066656231881, "val_loss": 1.0640463459491727, "val_acc": 0.5986400079727171, "val_prec": 0.5159466654062271}, {"epoch": 23, "train_loss": 0.949653570652008, "train_acc": 0.643120008111, "train_prec": 0.5618000039458274, "val_loss": 1.0583042007684704, "val_acc": 0.5994400104880332, "val_prec": 0.5161200007796289}, {"epoch": 24, "train_loss": 0.9286003386974334, "train_acc": 0.6561600065231326, "train_prec": 0.5747866687178609, "val_loss": 1.064986595511437, "val_acc": 0.6006400060653686, "val_prec": 0.5170799961686133}, {"epoch": 25, "train_loss": 0.9292438799142833, "train_acc": 0.6508800095319747, "train_prec": 0.5720000076293941, "val_loss": 1.0384239786863332, "val_acc": 0.6088800075650214, "val_prec": 0.5242866680026054}, {"epoch": 26, "train_loss": 0.9181583386659623, "train_acc": 0.6544000029563908, "train_prec": 0.5754533359408378, "val_loss": 1.0242801088094706, "val_acc": 0.6131200075149538, "val_prec": 0.5329733347892759}, {"epoch": 27, "train_loss": 0.9243269807100298, "train_acc": 0.6516000050306321, "train_prec": 0.5747733312845229, "val_loss": 1.053532037138939, "val_acc": 0.6049600067734718, "val_prec": 0.5231733369827273}, {"epoch": 28, "train_loss": 0.9216891157627107, "train_acc": 0.655120005011559, "train_prec": 0.578413337171078, "val_loss": 1.0061104083061219, "val_acc": 0.6232000088691712, "val_prec": 0.5413199993968008}, {"epoch": 29, "train_loss": 0.9050189054012298, "train_acc": 0.6616800069808959, "train_prec": 0.5833266696333883, "val_loss": 1.0283850228786466, "val_acc": 0.6132000073790549, "val_prec": 0.5323266682028772}, {"epoch": 30, "train_loss": 0.9004579931497575, "train_acc": 0.6626400023698805, "train_prec": 0.5847866734862326, "val_loss": 1.0042273759841915, "val_acc": 0.6212800112366674, "val_prec": 0.5406800034642218}, {"epoch": 31, "train_loss": 0.886519719958305, "train_acc": 0.6715200054645537, "train_prec": 0.5937866741418839, "val_loss": 0.9972615408897401, "val_acc": 0.6215200012922286, "val_prec": 0.5451133358478547}, {"epoch": 32, "train_loss": 0.8875252383947372, "train_acc": 0.6711200070381166, "train_prec": 0.5968333351612091, "val_loss": 0.9939192974567416, "val_acc": 0.6310400047898294, "val_prec": 0.5519466665387153}, {"epoch": 33, "train_loss": 0.8620206326246262, "train_acc": 0.6757600015401842, "train_prec": 0.6021333342790605, "val_loss": 0.9802996331453323, "val_acc": 0.6352800035476686, "val_prec": 0.5588600039482112}, {"epoch": 34, "train_loss": 0.8806593769788739, "train_acc": 0.6705600076913835, "train_prec": 0.5944466736912722, "val_loss": 0.9843089032173155, "val_acc": 0.6249600127339364, "val_prec": 0.5450133335590364}, {"epoch": 35, "train_loss": 0.8715845322608948, "train_acc": 0.6708800077438354, "train_prec": 0.5942266663908956, "val_loss": 0.9590445488691328, "val_acc": 0.6488000071048737, "val_prec": 0.5722266694903372}, {"epoch": 36, "train_loss": 0.8465874814987181, "train_acc": 0.6828800034523008, "train_prec": 0.6083866703510286, "val_loss": 0.9820033359527586, "val_acc": 0.6324000078439712, "val_prec": 0.5536666697263711}, {"epoch": 37, "train_loss": 0.8428265839815139, "train_acc": 0.6852800005674364, "train_prec": 0.6105533364415167, "val_loss": 0.9918826150894166, "val_acc": 0.635360006093979, "val_prec": 0.5588666692376135}, {"epoch": 38, "train_loss": 0.8500686001777649, "train_acc": 0.6844799977540966, "train_prec": 0.6084466698765753, "val_loss": 0.956585420370102, "val_acc": 0.6468000036478041, "val_prec": 0.5683200013637543}, {"epoch": 39, "train_loss": 0.8469164371490481, "train_acc": 0.6853600049018858, "train_prec": 0.6131733387708661, "val_loss": 0.9895469117164611, "val_acc": 0.637520005106926, "val_prec": 0.560080001652241}, {"epoch": 40, "train_loss": 0.8502310425043105, "train_acc": 0.6830400019884113, "train_prec": 0.6074133390188218, "val_loss": 0.9654778504371643, "val_acc": 0.6416000109910963, "val_prec": 0.5661200037598612}, {"epoch": 41, "train_loss": 0.8390482497215268, "train_acc": 0.6867999979853634, "train_prec": 0.6130400049686433, "val_loss": 0.9659836781024932, "val_acc": 0.6392000037431718, "val_prec": 0.5615466740727424}, {"epoch": 42, "train_loss": 0.8232296389341357, "train_acc": 0.6915200072526934, "train_prec": 0.6205866709351541, "val_loss": 0.9705560606718064, "val_acc": 0.6433600020408629, "val_prec": 0.5680533361434935}, {"epoch": 43, "train_loss": 0.8441742151975632, "train_acc": 0.6916800051927565, "train_prec": 0.6188600039482114, "val_loss": 0.9647520208358764, "val_acc": 0.6364800044894218, "val_prec": 0.5603733322024347}, {"epoch": 44, "train_loss": 0.8521667963266369, "train_acc": 0.6802400052547456, "train_prec": 0.6050800007581711, "val_loss": 0.9466189789772038, "val_acc": 0.6493600076436994, "val_prec": 0.5719200032949449}, {"epoch": 45, "train_loss": 0.818618735074997, "train_acc": 0.6944800007343291, "train_prec": 0.6230666723847391, "val_loss": 0.9537053227424621, "val_acc": 0.6433600032329558, "val_prec": 0.5656000024080277}, {"epoch": 46, "train_loss": 0.8308690625429153, "train_acc": 0.6909600061178208, "train_prec": 0.6200533401966093, "val_loss": 0.9493724924325941, "val_acc": 0.6460000079870222, "val_prec": 0.568079999089241}, {"epoch": 47, "train_loss": 0.8271123605966566, "train_acc": 0.6900799989700321, "train_prec": 0.6155333414673805, "val_loss": 0.9541021364927295, "val_acc": 0.6478400027751919, "val_prec": 0.5700800055265428}, {"epoch": 48, "train_loss": 0.8045426124334335, "train_acc": 0.6982400137186052, "train_prec": 0.6241866749525067, "val_loss": 0.97183674633503, "val_acc": 0.6413600033521651, "val_prec": 0.565220002233982}, {"epoch": 49, "train_loss": 0.8073032730817795, "train_acc": 0.6943200033903123, "train_prec": 0.6216400051116949, "val_loss": 0.9534719204902651, "val_acc": 0.6435200053453444, "val_prec": 0.5676933386921882}, {"epoch": 50, "train_loss": 0.8346536362171169, "train_acc": 0.6884000110626219, "train_prec": 0.6155333384871481, "val_loss": 0.929871670603752, "val_acc": 0.6531200003623964, "val_prec": 0.5754400017857552}] -------------------------------------------------------------------------------- /models/2020-09-23_004245/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 1.9728326046466829, "train_acc": 0.32575198873877514, "train_prec": 0.31822306655347354, "val_loss": 1.6963836109638213, "val_acc": 0.45320798397064205, "val_prec": 0.4561181920766831}, {"epoch": 2, "train_loss": 1.499625157117843, "train_acc": 0.5214479872584343, "train_prec": 0.5314260283112524, "val_loss": 1.4405085372924809, "val_acc": 0.5380719825625419, "val_prec": 0.5496755048632624}, {"epoch": 3, "train_loss": 1.2909336626529693, "train_acc": 0.5853519839048383, "train_prec": 0.6036539113521575, "val_loss": 1.30018131017685, "val_acc": 0.5878079897165296, "val_prec": 0.6018509382009507}, {"epoch": 4, "train_loss": 1.1834617960453029, "train_acc": 0.6200799858570096, "train_prec": 0.6384901672601697, "val_loss": 1.248702585697174, "val_acc": 0.6034639889001846, "val_prec": 0.6185230261087415}, {"epoch": 5, "train_loss": 1.0977587848901746, "train_acc": 0.650695988535881, "train_prec": 0.6700976693630221, "val_loss": 1.1857040619850157, "val_acc": 0.6248159879446029, "val_prec": 0.6421977829933168}, {"epoch": 6, "train_loss": 1.0410634332895277, "train_acc": 0.6699119865894315, "train_prec": 0.6898040634393695, "val_loss": 1.132655682563782, "val_acc": 0.6447519844770432, "val_prec": 0.6630499792099}, {"epoch": 7, "train_loss": 0.9966586452722549, "train_acc": 0.6838799923658373, "train_prec": 0.7049359488487245, "val_loss": 1.0953712344169615, "val_acc": 0.6560239928960802, "val_prec": 0.673769527673721}, {"epoch": 8, "train_loss": 0.9661781901121137, "train_acc": 0.6930159813165666, "train_prec": 0.7124602800607681, "val_loss": 1.0922237098217016, "val_acc": 0.6570799887180329, "val_prec": 0.6759321451187135}, {"epoch": 9, "train_loss": 0.9420299875736236, "train_acc": 0.7033119785785671, "train_prec": 0.7244375103712082, "val_loss": 1.0869740802049639, "val_acc": 0.6597279882431031, "val_prec": 0.677577006816864}, {"epoch": 10, "train_loss": 0.9721045446395872, "train_acc": 0.6931919795274734, "train_prec": 0.7136072069406507, "val_loss": 1.1312975943088535, "val_acc": 0.6434879916906356, "val_prec": 0.6627676904201506}, {"epoch": 11, "train_loss": 0.9418459415435791, "train_acc": 0.7003439843654635, "train_prec": 0.7207694792747499, "val_loss": 1.0692703634500502, "val_acc": 0.6642159950733186, "val_prec": 0.6825372862815856}, {"epoch": 12, "train_loss": 0.9068096011877064, "train_acc": 0.7116239815950394, "train_prec": 0.7320320636034015, "val_loss": 1.105440361499786, "val_acc": 0.6538479834795001, "val_prec": 0.6722495323419571}, {"epoch": 13, "train_loss": 0.8968131434917448, "train_acc": 0.7166399842500686, "train_prec": 0.737482545375824, "val_loss": 1.064770380854607, "val_acc": 0.6663439863920211, "val_prec": 0.683833700418472}, {"epoch": 14, "train_loss": 0.8859411066770554, "train_acc": 0.7186159843206409, "train_prec": 0.7389922231435777, "val_loss": 1.0500524252653123, "val_acc": 0.6708559924364093, "val_prec": 0.6897689795494079}, {"epoch": 15, "train_loss": 0.8534770518541337, "train_acc": 0.7288159716129301, "train_prec": 0.7486007714271544, "val_loss": 1.0375267708301537, "val_acc": 0.6756079936027528, "val_prec": 0.693779907822609}, {"epoch": 16, "train_loss": 0.8389956456422808, "train_acc": 0.7321919775009155, "train_prec": 0.7525337946414948, "val_loss": 1.0212490755319599, "val_acc": 0.6800559961795805, "val_prec": 0.6984249871969221}, {"epoch": 17, "train_loss": 0.855463479757309, "train_acc": 0.7276159852743145, "train_prec": 0.7480412912368775, "val_loss": 1.0223137819766999, "val_acc": 0.6797519826889037, "val_prec": 0.6970163047313693}, {"epoch": 18, "train_loss": 0.8896973448991774, "train_acc": 0.7178079807758331, "train_prec": 0.7399313539266587, "val_loss": 1.0905907267332067, "val_acc": 0.6570239865779878, "val_prec": 0.6753527504205707}, {"epoch": 19, "train_loss": 0.8522163790464399, "train_acc": 0.7287919837236405, "train_prec": 0.7497522389888761, "val_loss": 1.0335731244087212, "val_acc": 0.6749919962882994, "val_prec": 0.6933972245454791}, {"epoch": 20, "train_loss": 0.8239048117399213, "train_acc": 0.7360639858245849, "train_prec": 0.7566585528850555, "val_loss": 1.0420613420009615, "val_acc": 0.6751919806003571, "val_prec": 0.6933221495151523}, {"epoch": 21, "train_loss": 0.8182825785875323, "train_acc": 0.7393519788980486, "train_prec": 0.7594584101438523, "val_loss": 1.0310210055112836, "val_acc": 0.6769199943542479, "val_prec": 0.694017077088356}, {"epoch": 22, "train_loss": 0.8272908782958986, "train_acc": 0.7359759789705279, "train_prec": 0.7563235503435134, "val_loss": 1.0302603107690813, "val_acc": 0.6765199851989743, "val_prec": 0.6951820790767671}, {"epoch": 23, "train_loss": 0.8149255192279816, "train_acc": 0.7402879852056505, "train_prec": 0.7605631101131438, "val_loss": 1.0210409176349637, "val_acc": 0.6806159883737564, "val_prec": 0.6991478031873705}, {"epoch": 24, "train_loss": 0.8042396110296247, "train_acc": 0.7427359777688982, "train_prec": 0.7629808443784711, "val_loss": 1.023834362626076, "val_acc": 0.6789039868116379, "val_prec": 0.6973717069625858}, {"epoch": 25, "train_loss": 0.7901008427143099, "train_acc": 0.7470399844646454, "train_prec": 0.7666625565290452, "val_loss": 1.0188184356689454, "val_acc": 0.682343988418579, "val_prec": 0.7014199614524841}, {"epoch": 26, "train_loss": 0.8354630154371259, "train_acc": 0.7332399821281432, "train_prec": 0.7537290072441103, "val_loss": 1.0446905255317676, "val_acc": 0.6725359839200977, "val_prec": 0.6911116325855254}, {"epoch": 27, "train_loss": 0.8406923168897629, "train_acc": 0.7318319815397262, "train_prec": 0.7525067001581189, "val_loss": 1.0448952740430832, "val_acc": 0.6724639892578124, "val_prec": 0.6919636863470079}, {"epoch": 28, "train_loss": 0.8062977933883668, "train_acc": 0.7441519773006436, "train_prec": 0.7646819233894346, "val_loss": 1.0362032425403593, "val_acc": 0.6765039896965026, "val_prec": 0.6947171491384506}, {"epoch": 29, "train_loss": 0.8110136234760282, "train_acc": 0.7397599822282791, "train_prec": 0.7597387534379961, "val_loss": 1.0364571392536166, "val_acc": 0.675287984609604, "val_prec": 0.6933915591239932}, {"epoch": 30, "train_loss": 0.8012198144197463, "train_acc": 0.7441999816894533, "train_prec": 0.7631837207078931, "val_loss": 1.0336602103710173, "val_acc": 0.6763199841976167, "val_prec": 0.6939477378129959}, {"epoch": 31, "train_loss": 0.7914568454027178, "train_acc": 0.7479439789056778, "train_prec": 0.7675126802921293, "val_loss": 1.016493408083916, "val_acc": 0.6810479867458342, "val_prec": 0.698177787065506}, {"epoch": 32, "train_loss": 0.7718811672925953, "train_acc": 0.7521199834346769, "train_prec": 0.7715055030584337, "val_loss": 1.0122655844688417, "val_acc": 0.6837919872999192, "val_prec": 0.7014006888866426}, {"epoch": 33, "train_loss": 0.7808473390340805, "train_acc": 0.7499839705228807, "train_prec": 0.7701760417222974, "val_loss": 1.053896716833115, "val_acc": 0.6689279866218572, "val_prec": 0.6868326765298844}, {"epoch": 34, "train_loss": 0.8055335325002668, "train_acc": 0.7427599847316745, "train_prec": 0.7628480929136278, "val_loss": 1.0259887093305589, "val_acc": 0.6798639857769012, "val_prec": 0.6985782855749129}, {"epoch": 35, "train_loss": 0.7716230666637421, "train_acc": 0.753895977139473, "train_prec": 0.7737512481212615, "val_loss": 1.0216137140989303, "val_acc": 0.6822879797220234, "val_prec": 0.7012148189544677}, {"epoch": 36, "train_loss": 0.7593057847023011, "train_acc": 0.7558479791879652, "train_prec": 0.7749556064605718, "val_loss": 1.0227641564607617, "val_acc": 0.682175990343094, "val_prec": 0.6985937243700027}, {"epoch": 37, "train_loss": 0.7662419945001602, "train_acc": 0.7538559740781787, "train_prec": 0.7731657195091247, "val_loss": 1.0358408641815187, "val_acc": 0.6766879880428316, "val_prec": 0.6933444398641586}, {"epoch": 38, "train_loss": 0.7684490126371384, "train_acc": 0.7541039794683456, "train_prec": 0.7742867904901503, "val_loss": 1.0245736980438238, "val_acc": 0.6793999880552292, "val_prec": 0.6961476910114289}, {"epoch": 39, "train_loss": 0.7566710388660431, "train_acc": 0.7556479817628862, "train_prec": 0.7757221227884292, "val_loss": 1.131450783610344, "val_acc": 0.6452559840679166, "val_prec": 0.6653777992725369}, {"epoch": 40, "train_loss": 0.7965342462062834, "train_acc": 0.7455359733104706, "train_prec": 0.7662786281108855, "val_loss": 1.0235798710584638, "val_acc": 0.6785679936408997, "val_prec": 0.6956517565250396}, {"epoch": 41, "train_loss": 0.7686463302373886, "train_acc": 0.7555119842290876, "train_prec": 0.775622881054878, "val_loss": 1.0340698969364166, "val_acc": 0.6777039927244187, "val_prec": 0.6948725861310961}, {"epoch": 42, "train_loss": 0.7817636477947235, "train_acc": 0.7493599861860276, "train_prec": 0.7701224333047867, "val_loss": 1.0106595361232757, "val_acc": 0.684743984937668, "val_prec": 0.7008879494667054}, {"epoch": 43, "train_loss": 0.7457030421495439, "train_acc": 0.7605599731206897, "train_prec": 0.7794405168294907, "val_loss": 1.0187864392995833, "val_acc": 0.6799439829587934, "val_prec": 0.6960863286256793}, {"epoch": 44, "train_loss": 0.7470970171689988, "train_acc": 0.7619359868764878, "train_prec": 0.7811837762594223, "val_loss": 1.0385328441858288, "val_acc": 0.6740159904956817, "val_prec": 0.6908808547258378}, {"epoch": 45, "train_loss": 0.743912025690079, "train_acc": 0.7604159766435623, "train_prec": 0.7800613135099408, "val_loss": 1.014207464456558, "val_acc": 0.6829439878463747, "val_prec": 0.7004889136552812}, {"epoch": 46, "train_loss": 0.7342916417121887, "train_acc": 0.7627599751949308, "train_prec": 0.782019454240799, "val_loss": 1.0110663002729414, "val_acc": 0.6864799886941909, "val_prec": 0.7038409984111786}, {"epoch": 47, "train_loss": 0.7245367217063902, "train_acc": 0.7659839731454848, "train_prec": 0.7849150085449216, "val_loss": 1.0247193819284441, "val_acc": 0.6791039884090426, "val_prec": 0.6959215831756594}, {"epoch": 48, "train_loss": 0.7447661614418026, "train_acc": 0.7585119807720182, "train_prec": 0.7782155442237851, "val_loss": 1.0511870831251147, "val_acc": 0.6732879889011382, "val_prec": 0.6930071806907653}, {"epoch": 49, "train_loss": 0.8288962960243225, "train_acc": 0.7369999849796293, "train_prec": 0.7596005767583849, "val_loss": 1.0471739983558652, "val_acc": 0.6709679919481274, "val_prec": 0.689572339653969}, {"epoch": 50, "train_loss": 0.7540131300687792, "train_acc": 0.7559599763154985, "train_prec": 0.7748254096508027, "val_loss": 1.0277682662010195, "val_acc": 0.6777919840812682, "val_prec": 0.6946132719516754}] -------------------------------------------------------------------------------- /models/2020-09-22_171209/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 0.990300028324127, "train_acc": 0.6488559785485266, "train_prec": 0.6584834283590317, "val_loss": 0.8433761131763458, "val_acc": 0.7023519831895829, "val_prec": 0.7109386199712755}, {"epoch": 2, "train_loss": 0.7373145675659181, "train_acc": 0.7424079757928849, "train_prec": 0.7518334317207337, "val_loss": 0.7593908047676086, "val_acc": 0.7338239812850952, "val_prec": 0.74267580807209}, {"epoch": 3, "train_loss": 0.6807151472568512, "train_acc": 0.761303975582123, "train_prec": 0.7714842444658281, "val_loss": 0.7159196120500563, "val_acc": 0.7493999779224395, "val_prec": 0.7581304055452346}, {"epoch": 4, "train_loss": 0.6416766381263737, "train_acc": 0.7779199838638304, "train_prec": 0.7881541633605956, "val_loss": 0.7195500719547271, "val_acc": 0.7489759773015976, "val_prec": 0.7581865262985228}, {"epoch": 5, "train_loss": 0.6251133102178575, "train_acc": 0.7838319742679593, "train_prec": 0.7943957126140597, "val_loss": 0.6798626911640167, "val_acc": 0.7644159722328189, "val_prec": 0.7742574435472488}, {"epoch": 6, "train_loss": 0.5885234922170638, "train_acc": 0.7988239765167234, "train_prec": 0.8088133442401888, "val_loss": 0.6433255404233934, "val_acc": 0.7783279848098753, "val_prec": 0.7883628875017166}, {"epoch": 7, "train_loss": 0.55068959236145, "train_acc": 0.8092719787359238, "train_prec": 0.819160084724426, "val_loss": 0.626183398962021, "val_acc": 0.786327982544899, "val_prec": 0.7964568984508514}, {"epoch": 8, "train_loss": 0.5236247342824938, "train_acc": 0.8208319759368897, "train_prec": 0.8304827445745466, "val_loss": 0.5967852425575255, "val_acc": 0.7962239772081376, "val_prec": 0.8065490198135377}, {"epoch": 9, "train_loss": 0.4992018321156499, "train_acc": 0.8303279763460159, "train_prec": 0.8397287327051164, "val_loss": 0.5697041231393811, "val_acc": 0.8053519767522814, "val_prec": 0.8150186300277706}, {"epoch": 10, "train_loss": 0.48146041214466107, "train_acc": 0.835327968597412, "train_prec": 0.8444522124528885, "val_loss": 0.5652450522780417, "val_acc": 0.8072079783678053, "val_prec": 0.8164730960130692}, {"epoch": 11, "train_loss": 0.47600845247507095, "train_acc": 0.83767196893692, "train_prec": 0.8468652611970903, "val_loss": 0.5553170740604403, "val_acc": 0.810783984065056, "val_prec": 0.8197699761390688}, {"epoch": 12, "train_loss": 0.4569365853071213, "train_acc": 0.8441119790077213, "train_prec": 0.8526248466968536, "val_loss": 0.5423040658235551, "val_acc": 0.8144479757547378, "val_prec": 0.8238279020786287}, {"epoch": 13, "train_loss": 0.4557616743445398, "train_acc": 0.8446079695224761, "train_prec": 0.8535700082778931, "val_loss": 0.5411956775188449, "val_acc": 0.8148319852352142, "val_prec": 0.8239356559515}, {"epoch": 14, "train_loss": 0.4362233799695969, "train_acc": 0.8513199710845945, "train_prec": 0.8598888188600543, "val_loss": 0.539693603217602, "val_acc": 0.8159199774265289, "val_prec": 0.8252380412816998}, {"epoch": 15, "train_loss": 0.44081905037164676, "train_acc": 0.8497279626131056, "train_prec": 0.85771759390831, "val_loss": 0.5522679930925367, "val_acc": 0.812207972407341, "val_prec": 0.8214015674591065}, {"epoch": 16, "train_loss": 0.5426825734972952, "train_acc": 0.8195999735593797, "train_prec": 0.8295498538017274, "val_loss": 0.5795465260744096, "val_acc": 0.8048159801959992, "val_prec": 0.8150006759166718}, {"epoch": 17, "train_loss": 0.45895769119262697, "train_acc": 0.8450639641284944, "train_prec": 0.8543180459737776, "val_loss": 0.5472912514209748, "val_acc": 0.8151359808444977, "val_prec": 0.8249759751558301}, {"epoch": 18, "train_loss": 0.4412469625473024, "train_acc": 0.8499679630994795, "train_prec": 0.8591689383983612, "val_loss": 0.5337914064526558, "val_acc": 0.8185759758949279, "val_prec": 0.8280939877033231}, {"epoch": 19, "train_loss": 0.44205163180828105, "train_acc": 0.8491199761629107, "train_prec": 0.8582306259870529, "val_loss": 0.5362377232313155, "val_acc": 0.8191439855098726, "val_prec": 0.8289662408828734}, {"epoch": 20, "train_loss": 0.42327534705400466, "train_acc": 0.8551839768886565, "train_prec": 0.863516670465469, "val_loss": 0.5360849416255951, "val_acc": 0.818719983100891, "val_prec": 0.8283834403753282}, {"epoch": 21, "train_loss": 0.40661114573478696, "train_acc": 0.8612799650430683, "train_prec": 0.8694434452056884, "val_loss": 0.5224995818734169, "val_acc": 0.8217439746856691, "val_prec": 0.8305009800195693}, {"epoch": 22, "train_loss": 0.5355608984827992, "train_acc": 0.8191199755668642, "train_prec": 0.8296981620788574, "val_loss": 0.6223387360572817, "val_acc": 0.7887119764089584, "val_prec": 0.7988537842035295}, {"epoch": 23, "train_loss": 0.4591644129157067, "train_acc": 0.8440559768676759, "train_prec": 0.8533314764499665, "val_loss": 0.5411795672774311, "val_acc": 0.8150879764556885, "val_prec": 0.8243160498142241}, {"epoch": 24, "train_loss": 0.4224553495645523, "train_acc": 0.8560879778862001, "train_prec": 0.8643499237298966, "val_loss": 0.5255212861299514, "val_acc": 0.8196799826622009, "val_prec": 0.8286504513025279}, {"epoch": 25, "train_loss": 0.40217674404382703, "train_acc": 0.8619119662046433, "train_prec": 0.8699077504873272, "val_loss": 0.5160117191076279, "val_acc": 0.8243599760532375, "val_prec": 0.8330408638715745}, {"epoch": 26, "train_loss": 0.39542454868555066, "train_acc": 0.8645359712839127, "train_prec": 0.8727480530738834, "val_loss": 0.5179047185182573, "val_acc": 0.8234239721298215, "val_prec": 0.832421066761017}, {"epoch": 27, "train_loss": 0.3805914464592933, "train_acc": 0.8703359746932982, "train_prec": 0.8781799203157425, "val_loss": 0.5106959453225134, "val_acc": 0.8266879713535308, "val_prec": 0.8350976788997652}, {"epoch": 28, "train_loss": 0.379205667078495, "train_acc": 0.8690479785203936, "train_prec": 0.8766763770580293, "val_loss": 0.5080220288038255, "val_acc": 0.8279839754104611, "val_prec": 0.8367386567592623}, {"epoch": 29, "train_loss": 0.3775455671548843, "train_acc": 0.8688159763813015, "train_prec": 0.8766803550720214, "val_loss": 0.5153549581766129, "val_acc": 0.8259919703006741, "val_prec": 0.83450246155262}, {"epoch": 30, "train_loss": 0.3684132984280587, "train_acc": 0.8720079708099365, "train_prec": 0.8793512016534802, "val_loss": 0.5133274018764501, "val_acc": 0.8263839739561083, "val_prec": 0.8347107577323913}, {"epoch": 31, "train_loss": 0.3591170713305474, "train_acc": 0.8756239700317381, "train_prec": 0.8827980214357373, "val_loss": 0.5114043766260147, "val_acc": 0.827327973842621, "val_prec": 0.8355770313739774}, {"epoch": 32, "train_loss": 0.3569085946679115, "train_acc": 0.8752239775657656, "train_prec": 0.8821355748176577, "val_loss": 0.5080931821465494, "val_acc": 0.8286479717493058, "val_prec": 0.836865885257721}, {"epoch": 33, "train_loss": 0.3579825580120087, "train_acc": 0.8754159724712371, "train_prec": 0.882781271338463, "val_loss": 0.517532984614372, "val_acc": 0.8227679699659346, "val_prec": 0.8306443661451343}, {"epoch": 34, "train_loss": 0.3501792216300964, "train_acc": 0.8784079670906066, "train_prec": 0.8854026383161543, "val_loss": 0.49476369053125374, "val_acc": 0.8313919699192046, "val_prec": 0.8390858453512192}, {"epoch": 35, "train_loss": 0.347747040987015, "train_acc": 0.8787519693374632, "train_prec": 0.8853910034894945, "val_loss": 0.5208290445804595, "val_acc": 0.8238879793882372, "val_prec": 0.83182220518589}, {"epoch": 36, "train_loss": 0.34711520642042165, "train_acc": 0.878975963592529, "train_prec": 0.8858379656076432, "val_loss": 0.5192544382810594, "val_acc": 0.822983976006508, "val_prec": 0.8307366091012957}, {"epoch": 37, "train_loss": 0.3465311029553413, "train_acc": 0.8789839661121366, "train_prec": 0.8857464116811755, "val_loss": 0.5090281650424006, "val_acc": 0.8262479722499849, "val_prec": 0.8344502186775202}, {"epoch": 38, "train_loss": 0.35013315826654434, "train_acc": 0.8779599636793134, "train_prec": 0.8849358600378036, "val_loss": 0.5116226232051849, "val_acc": 0.8253039717674254, "val_prec": 0.8336730670928958}, {"epoch": 39, "train_loss": 0.34219930678606036, "train_acc": 0.88150395989418, "train_prec": 0.8882028603553771, "val_loss": 0.5192811280488968, "val_acc": 0.8254319757223129, "val_prec": 0.8333383291959762}, {"epoch": 40, "train_loss": 0.3386273276805876, "train_acc": 0.8811119693517684, "train_prec": 0.8877341681718824, "val_loss": 0.5065223044157033, "val_acc": 0.8299759739637376, "val_prec": 0.8374856740236281}, {"epoch": 41, "train_loss": 0.33544057101011293, "train_acc": 0.8838079667091372, "train_prec": 0.8905071634054187, "val_loss": 0.511192305982113, "val_acc": 0.8278319776058198, "val_prec": 0.8357055455446243}, {"epoch": 42, "train_loss": 0.3378760847449303, "train_acc": 0.8823359680175781, "train_prec": 0.8892135679721832, "val_loss": 0.5164300855994225, "val_acc": 0.8248799735307697, "val_prec": 0.8329390394687654}, {"epoch": 43, "train_loss": 0.33317898675799357, "train_acc": 0.8832239681482312, "train_prec": 0.8900185358524322, "val_loss": 0.5028942081332206, "val_acc": 0.8276959776878359, "val_prec": 0.8351801335811614}, {"epoch": 44, "train_loss": 0.3317114378511905, "train_acc": 0.8844479721784588, "train_prec": 0.8909974861145021, "val_loss": 0.5191986390948298, "val_acc": 0.8227919667959214, "val_prec": 0.8306124693155289}, {"epoch": 45, "train_loss": 0.332431573867798, "train_acc": 0.883999965786934, "train_prec": 0.8908221858739854, "val_loss": 0.5204880219697954, "val_acc": 0.8257919788360596, "val_prec": 0.8335478693246843}, {"epoch": 46, "train_loss": 0.33039422631263726, "train_acc": 0.8849919575452805, "train_prec": 0.8916619884967808, "val_loss": 0.5148388627171517, "val_acc": 0.8271759778261187, "val_prec": 0.8351306807994842}, {"epoch": 47, "train_loss": 0.3328833296895029, "train_acc": 0.8829519623517991, "train_prec": 0.889819096326828, "val_loss": 0.5206260144710542, "val_acc": 0.8243359720706936, "val_prec": 0.8315929007530212}, {"epoch": 48, "train_loss": 0.3304998382925987, "train_acc": 0.8845679682493213, "train_prec": 0.8910441684722901, "val_loss": 0.5201475524902344, "val_acc": 0.8235039800405499, "val_prec": 0.8313850647211074}, {"epoch": 49, "train_loss": 0.32742479950189585, "train_acc": 0.885623960494995, "train_prec": 0.8921878713369368, "val_loss": 0.5201727464795114, "val_acc": 0.8232159805297852, "val_prec": 0.8312631475925446}, {"epoch": 50, "train_loss": 0.3235572212934493, "train_acc": 0.8870639550685882, "train_prec": 0.8935663694143294, "val_loss": 0.5219363924860952, "val_acc": 0.8245919740200042, "val_prec": 0.8322865700721739}] -------------------------------------------------------------------------------- /models/2020-09-24_220708/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 1.548035700321198, "train_acc": 0.31580001249909395, "train_prec": 0.21948666960000987, "val_loss": 1.525468569993973, "val_acc": 0.33320000886917106, "val_prec": 0.24405000321567058}, {"epoch": 2, "train_loss": 1.49343337893486, "train_acc": 0.3640000087022782, "train_prec": 0.27088333770632744, "val_loss": 1.486681180000305, "val_acc": 0.36880001142621044, "val_prec": 0.2752333371341229}, {"epoch": 3, "train_loss": 1.4490105760097505, "train_acc": 0.39140000671148306, "train_prec": 0.29728333696722975, "val_loss": 1.4454304862022405, "val_acc": 0.3976000070571899, "val_prec": 0.30283333778381344}, {"epoch": 4, "train_loss": 1.4174588549137113, "train_acc": 0.4134000045061112, "train_prec": 0.3172666671872141, "val_loss": 1.407286273241043, "val_acc": 0.4270000085234642, "val_prec": 0.33033333569765094}, {"epoch": 5, "train_loss": 1.3853238523006444, "train_acc": 0.4246000051498414, "train_prec": 0.3273500053584576, "val_loss": 1.3706016337871545, "val_acc": 0.44260000497102725, "val_prec": 0.3434333339333535}, {"epoch": 6, "train_loss": 1.339432947635651, "train_acc": 0.44800000607967383, "train_prec": 0.347050003707409, "val_loss": 1.358361994028092, "val_acc": 0.4508000072836877, "val_prec": 0.3493333384394645}, {"epoch": 7, "train_loss": 1.3371131205558775, "train_acc": 0.46460000544786456, "train_prec": 0.36636667013168334, "val_loss": 1.3401762592792505, "val_acc": 0.4506000041961669, "val_prec": 0.35278333589434624}, {"epoch": 8, "train_loss": 1.2972483527660374, "train_acc": 0.47700000584125524, "train_prec": 0.37731666937470454, "val_loss": 1.318019446134567, "val_acc": 0.46640000790357594, "val_prec": 0.36950000345706946}, {"epoch": 9, "train_loss": 1.2914458143711098, "train_acc": 0.4822000056505203, "train_prec": 0.38530000358819966, "val_loss": 1.3103686916828157, "val_acc": 0.480000005364418, "val_prec": 0.38121667265892034}, {"epoch": 10, "train_loss": 1.2654485499858854, "train_acc": 0.49660001039505003, "train_prec": 0.3992999988794326, "val_loss": 1.3162231838703156, "val_acc": 0.47260000497102744, "val_prec": 0.3753833344578742}, {"epoch": 11, "train_loss": 1.2538113033771512, "train_acc": 0.5012000101804736, "train_prec": 0.4007166677713393, "val_loss": 1.2942929065227506, "val_acc": 0.488600010573864, "val_prec": 0.3909166625142096}, {"epoch": 12, "train_loss": 1.2394357961416242, "train_acc": 0.5146000027656552, "train_prec": 0.41900000184774405, "val_loss": 1.3015229338407515, "val_acc": 0.4758000054955483, "val_prec": 0.3806333336234092}, {"epoch": 13, "train_loss": 1.2144193857908248, "train_acc": 0.5218000081181526, "train_prec": 0.4235333302617072, "val_loss": 1.2796146285533907, "val_acc": 0.49520001173019407, "val_prec": 0.40185000658035264}, {"epoch": 14, "train_loss": 1.2239495736360546, "train_acc": 0.5178000095486639, "train_prec": 0.42415000081062315, "val_loss": 1.275937528610229, "val_acc": 0.5006000116467474, "val_prec": 0.40523333519697186}, {"epoch": 15, "train_loss": 1.211818354725838, "train_acc": 0.5228000092506406, "train_prec": 0.425983339548111, "val_loss": 1.2804424321651455, "val_acc": 0.4896000054478645, "val_prec": 0.395106672346592}, {"epoch": 16, "train_loss": 1.1939777237176894, "train_acc": 0.5348000133037566, "train_prec": 0.43938333690166465, "val_loss": 1.2529684740304947, "val_acc": 0.504600011110306, "val_prec": 0.40635000079870215}, {"epoch": 17, "train_loss": 1.2029240584373475, "train_acc": 0.529400010704994, "train_prec": 0.4358666688203812, "val_loss": 1.2318064153194428, "val_acc": 0.5200000110268591, "val_prec": 0.4250000014901161}, {"epoch": 18, "train_loss": 1.1934989303350447, "train_acc": 0.527000013589859, "train_prec": 0.43154999941587446, "val_loss": 1.2530813133716585, "val_acc": 0.5048000082373616, "val_prec": 0.41263333231210714}, {"epoch": 19, "train_loss": 1.1746938508749005, "train_acc": 0.5438000166416168, "train_prec": 0.4505166676640512, "val_loss": 1.2204395473003387, "val_acc": 0.5186000090837481, "val_prec": 0.4254499974846839}, {"epoch": 20, "train_loss": 1.1473168057203291, "train_acc": 0.5584000098705288, "train_prec": 0.4636499965190888, "val_loss": 1.2280127966403955, "val_acc": 0.519600012898445, "val_prec": 0.424533334672451}, {"epoch": 21, "train_loss": 1.165086647868157, "train_acc": 0.5426000171899796, "train_prec": 0.4455166667699814, "val_loss": 1.242037518024444, "val_acc": 0.5068000108003616, "val_prec": 0.41041666626930234}, {"epoch": 22, "train_loss": 1.147443958520889, "train_acc": 0.5544000139832495, "train_prec": 0.46331666916608827, "val_loss": 1.2409343361854557, "val_acc": 0.5074000048637386, "val_prec": 0.41300000071525583}, {"epoch": 23, "train_loss": 1.129449819922447, "train_acc": 0.5628000095486639, "train_prec": 0.46713333457708356, "val_loss": 1.2208286678791047, "val_acc": 0.5258000186085702, "val_prec": 0.42856666728854176}, {"epoch": 24, "train_loss": 1.094458042383194, "train_acc": 0.5752000176906585, "train_prec": 0.4790666615962983, "val_loss": 1.1982301807403561, "val_acc": 0.5408000084757802, "val_prec": 0.4468499997258186}, {"epoch": 25, "train_loss": 1.1123936659097677, "train_acc": 0.5660000100731849, "train_prec": 0.4746499964594842, "val_loss": 1.213341708779335, "val_acc": 0.5370000120997425, "val_prec": 0.4460833314061165}, {"epoch": 26, "train_loss": 1.0823603343963621, "train_acc": 0.5804000097513199, "train_prec": 0.49081666827201836, "val_loss": 1.1813781601190567, "val_acc": 0.5422000107169154, "val_prec": 0.45300000041723254}, {"epoch": 27, "train_loss": 1.0968278044462199, "train_acc": 0.5874000155925749, "train_prec": 0.5008166667819024, "val_loss": 1.1985983306169508, "val_acc": 0.5378000113368033, "val_prec": 0.44679999977350227}, {"epoch": 28, "train_loss": 1.0888183933496483, "train_acc": 0.585200015306473, "train_prec": 0.49545000255107896, "val_loss": 1.1709304589033123, "val_acc": 0.5542000102996824, "val_prec": 0.46156666725873946}, {"epoch": 29, "train_loss": 1.0682064038515091, "train_acc": 0.5926000127196311, "train_prec": 0.502049995958805, "val_loss": 1.1548665910959248, "val_acc": 0.5558000123500825, "val_prec": 0.46846667051315305}, {"epoch": 30, "train_loss": 1.0457147538661953, "train_acc": 0.5978000143170354, "train_prec": 0.5103333282470705, "val_loss": 1.1919988852739336, "val_acc": 0.5392000138759611, "val_prec": 0.44709999769926073}, {"epoch": 31, "train_loss": 1.0218365508317957, "train_acc": 0.607600014805794, "train_prec": 0.516316671669483, "val_loss": 1.1693974220752723, "val_acc": 0.556400010883808, "val_prec": 0.46839999765157697}, {"epoch": 32, "train_loss": 1.0436139327287675, "train_acc": 0.6030000159144401, "train_prec": 0.5176166665554047, "val_loss": 1.1598804426193234, "val_acc": 0.5538000094890589, "val_prec": 0.4651166674494744}, {"epoch": 33, "train_loss": 1.0161234891414639, "train_acc": 0.6138000160455706, "train_prec": 0.5251666611433031, "val_loss": 1.1234965604543685, "val_acc": 0.5762000137567521, "val_prec": 0.4870666654407978}, {"epoch": 34, "train_loss": 0.9952230173349381, "train_acc": 0.6246000149846075, "train_prec": 0.5383999985456467, "val_loss": 1.13639033794403, "val_acc": 0.5668000116944312, "val_prec": 0.47654999941587445}, {"epoch": 35, "train_loss": 1.0095609825849532, "train_acc": 0.6130000144243241, "train_prec": 0.5259000000357629, "val_loss": 1.1336975914239884, "val_acc": 0.5698000156879424, "val_prec": 0.47783333241939546}, {"epoch": 36, "train_loss": 1.0155237466096878, "train_acc": 0.6198000150918961, "train_prec": 0.5362999960780145, "val_loss": 1.1090972965955739, "val_acc": 0.5836000138521192, "val_prec": 0.4968333338201045}, {"epoch": 37, "train_loss": 0.9463390469551085, "train_acc": 0.6422000092267993, "train_prec": 0.5530666673183443, "val_loss": 1.104088000059128, "val_acc": 0.5862000152468682, "val_prec": 0.4987999978661537}, {"epoch": 38, "train_loss": 0.9888993722200392, "train_acc": 0.6144000142812728, "train_prec": 0.5254499971866607, "val_loss": 1.1121503269672395, "val_acc": 0.5790000101923943, "val_prec": 0.49518333375453943}, {"epoch": 39, "train_loss": 0.9806794697046278, "train_acc": 0.630400012135506, "train_prec": 0.545533336400986, "val_loss": 1.0847051548957825, "val_acc": 0.59060001373291, "val_prec": 0.5046000024676328}, {"epoch": 40, "train_loss": 0.9608481961488725, "train_acc": 0.639000014066696, "train_prec": 0.555033332705498, "val_loss": 1.11901649236679, "val_acc": 0.5706000068783758, "val_prec": 0.4823333311080933}, {"epoch": 41, "train_loss": 0.970893940925598, "train_acc": 0.6318000093102456, "train_prec": 0.5492333343625071, "val_loss": 1.128979561328888, "val_acc": 0.5878000128269193, "val_prec": 0.4980333271622657}, {"epoch": 42, "train_loss": 0.9503899258375168, "train_acc": 0.6378000098466874, "train_prec": 0.5548999997973444, "val_loss": 1.0891772305965421, "val_acc": 0.5998000076413157, "val_prec": 0.5119833347201349}, {"epoch": 43, "train_loss": 0.9491039556264879, "train_acc": 0.6444000208377837, "train_prec": 0.5608833283185958, "val_loss": 1.0997618728876117, "val_acc": 0.5880000153183939, "val_prec": 0.5024666672945023}, {"epoch": 44, "train_loss": 0.964681804180145, "train_acc": 0.6308000153303147, "train_prec": 0.5477833288908003, "val_loss": 1.080387797355651, "val_acc": 0.5840000125765803, "val_prec": 0.4927666667103768}, {"epoch": 45, "train_loss": 0.9775589138269423, "train_acc": 0.6390000182390215, "train_prec": 0.5523999994993213, "val_loss": 1.1025304281711574, "val_acc": 0.5738000187277794, "val_prec": 0.48443332940340034}, {"epoch": 46, "train_loss": 0.9629743158817289, "train_acc": 0.6360000127553942, "train_prec": 0.5493666657805446, "val_loss": 1.107923645377159, "val_acc": 0.582400015592575, "val_prec": 0.4959000006318092}, {"epoch": 47, "train_loss": 0.9293153607845309, "train_acc": 0.6480000135302545, "train_prec": 0.5654333317279814, "val_loss": 1.1089279764890665, "val_acc": 0.5850000146031381, "val_prec": 0.49980000257492063}, {"epoch": 48, "train_loss": 0.952064675092697, "train_acc": 0.6368000102043149, "train_prec": 0.5509333375096318, "val_loss": 1.0973035645484919, "val_acc": 0.5854000183939936, "val_prec": 0.4996666705608369}, {"epoch": 49, "train_loss": 0.9193438357114792, "train_acc": 0.6540000116825107, "train_prec": 0.5690666648745537, "val_loss": 1.0946840226650234, "val_acc": 0.5878000193834306, "val_prec": 0.5020499980449677}, {"epoch": 50, "train_loss": 0.9169359111785887, "train_acc": 0.6602000117301942, "train_prec": 0.5780833294987678, "val_loss": 1.0616761183738714, "val_acc": 0.59860001295805, "val_prec": 0.5130499982833859}] -------------------------------------------------------------------------------- /models/2020-09-23_004223/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 2.2013394880294808, "train_acc": 0.21044000431895263, "train_prec": 0.13559049405157564, "val_loss": 2.1047941446304326, "val_acc": 0.2646400061249732, "val_prec": 0.17925800368189815}, {"epoch": 2, "train_loss": 2.013835191726684, "train_acc": 0.3087200048565866, "train_prec": 0.21842000216245652, "val_loss": 1.962383533716202, "val_acc": 0.3304000034928321, "val_prec": 0.2370380023121834}, {"epoch": 3, "train_loss": 1.9108132147788996, "train_acc": 0.35320000290870657, "train_prec": 0.25839133650064483, "val_loss": 1.9092359924316409, "val_acc": 0.35540000349283196, "val_prec": 0.2602000005543231}, {"epoch": 4, "train_loss": 1.8497356855869294, "train_acc": 0.3715200039744377, "train_prec": 0.27422333329916, "val_loss": 1.8611567676067353, "val_acc": 0.3729600086808206, "val_prec": 0.275675999224186}, {"epoch": 5, "train_loss": 1.8045753812789915, "train_acc": 0.386960006058216, "train_prec": 0.29189266920089724, "val_loss": 1.8397554183006284, "val_acc": 0.38260000497102736, "val_prec": 0.28594066649675365}, {"epoch": 6, "train_loss": 1.7389223551750184, "train_acc": 0.42016000390052805, "train_prec": 0.3203733336925506, "val_loss": 1.8061982846260072, "val_acc": 0.39731999874115, "val_prec": 0.2990546682476998}, {"epoch": 7, "train_loss": 1.7102956593036647, "train_acc": 0.4313600033521651, "train_prec": 0.3321193373203277, "val_loss": 1.7882431292533874, "val_acc": 0.4062800058722497, "val_prec": 0.3091900016367436}, {"epoch": 8, "train_loss": 1.6895096564292904, "train_acc": 0.4362800040841101, "train_prec": 0.3381180050969122, "val_loss": 1.7798411965370182, "val_acc": 0.4105200037360192, "val_prec": 0.31327533349394804}, {"epoch": 9, "train_loss": 1.6746349370479587, "train_acc": 0.44132000535726573, "train_prec": 0.34361933767795566, "val_loss": 1.7823796224594117, "val_acc": 0.4068800044059754, "val_prec": 0.310270001143217}, {"epoch": 10, "train_loss": 1.664201763868331, "train_acc": 0.4445200040936471, "train_prec": 0.347270002067089, "val_loss": 1.758236298561096, "val_acc": 0.4172000056505202, "val_prec": 0.3194560009241103}, {"epoch": 11, "train_loss": 1.6288426530361173, "train_acc": 0.45760000497102743, "train_prec": 0.3603033357858659, "val_loss": 1.7443221962451931, "val_acc": 0.4197200033068657, "val_prec": 0.32280466765165333}, {"epoch": 12, "train_loss": 1.613242493867874, "train_acc": 0.46192001014947887, "train_prec": 0.36330467104911796, "val_loss": 1.743767058849335, "val_acc": 0.42468000262975686, "val_prec": 0.32799933403730386}, {"epoch": 13, "train_loss": 1.5883236062526702, "train_acc": 0.4750400054454803, "train_prec": 0.3779146710038184, "val_loss": 1.7048893141746517, "val_acc": 0.4391200038790703, "val_prec": 0.34229333400726314}, {"epoch": 14, "train_loss": 1.5597595477104185, "train_acc": 0.4832800075411795, "train_prec": 0.3838760030269623, "val_loss": 1.696821974515915, "val_acc": 0.43856000185012817, "val_prec": 0.34098800420761105}, {"epoch": 15, "train_loss": 1.5539360451698303, "train_acc": 0.4868000051379204, "train_prec": 0.38805466890335083, "val_loss": 1.6794712233543403, "val_acc": 0.44744000285863883, "val_prec": 0.348707335293293}, {"epoch": 16, "train_loss": 1.5372382390499115, "train_acc": 0.4874400064349173, "train_prec": 0.3902626678347588, "val_loss": 1.658001055717468, "val_acc": 0.45404000103473674, "val_prec": 0.357585333585739}, {"epoch": 17, "train_loss": 1.5412349367141722, "train_acc": 0.4902400046586991, "train_prec": 0.39236600250005726, "val_loss": 1.669124457836151, "val_acc": 0.4464000073075295, "val_prec": 0.3498333358764647}, {"epoch": 18, "train_loss": 1.514766932725907, "train_acc": 0.4958000043034553, "train_prec": 0.39757733523845673, "val_loss": 1.6594024229049684, "val_acc": 0.4543200039863587, "val_prec": 0.35622599959373474}, {"epoch": 19, "train_loss": 1.5151534533500668, "train_acc": 0.5001200056076052, "train_prec": 0.40257066875696174, "val_loss": 1.6629812848567969, "val_acc": 0.45480000466108317, "val_prec": 0.3581186687946319}, {"epoch": 20, "train_loss": 1.5056339967250825, "train_acc": 0.5006000062823296, "train_prec": 0.40269600331783306, "val_loss": 1.6487855112552638, "val_acc": 0.4591200044751166, "val_prec": 0.36283333390951156}, {"epoch": 21, "train_loss": 1.500652970075608, "train_acc": 0.5018800064921377, "train_prec": 0.4056979972124099, "val_loss": 1.667568930387497, "val_acc": 0.4524000036716462, "val_prec": 0.3548253348469733}, {"epoch": 22, "train_loss": 1.4922503113746641, "train_acc": 0.5049200078845024, "train_prec": 0.40731267184019093, "val_loss": 1.641991719007492, "val_acc": 0.45624000459909425, "val_prec": 0.3600726687908172}, {"epoch": 23, "train_loss": 1.4890205371379848, "train_acc": 0.5079200085997584, "train_prec": 0.41008733332157127, "val_loss": 1.6355600869655604, "val_acc": 0.45984000504016886, "val_prec": 0.362498004734516}, {"epoch": 24, "train_loss": 1.4806120097637179, "train_acc": 0.5118800041079522, "train_prec": 0.41474266678094857, "val_loss": 1.6227408480644228, "val_acc": 0.4696000036597252, "val_prec": 0.3717660039663317}, {"epoch": 25, "train_loss": 1.4710409975051886, "train_acc": 0.5126000094413761, "train_prec": 0.4146426701545715, "val_loss": 1.6385863387584683, "val_acc": 0.4602000069618225, "val_prec": 0.3642293372750282}, {"epoch": 26, "train_loss": 1.468399274349213, "train_acc": 0.5105200079083441, "train_prec": 0.41334399998188015, "val_loss": 1.6410379755496973, "val_acc": 0.4618800076842308, "val_prec": 0.3665146684646607}, {"epoch": 27, "train_loss": 1.4558462703228, "train_acc": 0.5178800067305567, "train_prec": 0.41992533415555955, "val_loss": 1.6349030101299287, "val_acc": 0.46676000624895103, "val_prec": 0.3724873366951942}, {"epoch": 28, "train_loss": 1.4426559114456174, "train_acc": 0.520680001974106, "train_prec": 0.4246199998259545, "val_loss": 1.6292848134040832, "val_acc": 0.46984000384807595, "val_prec": 0.37632199823856344}, {"epoch": 29, "train_loss": 1.4527926588058473, "train_acc": 0.5212400048971177, "train_prec": 0.42393933743238454, "val_loss": 1.6214969134330748, "val_acc": 0.46776000946760155, "val_prec": 0.3710073342919349}, {"epoch": 30, "train_loss": 1.4436347126960751, "train_acc": 0.521520009040833, "train_prec": 0.4252226632833482, "val_loss": 1.6082977581024167, "val_acc": 0.4731200036406516, "val_prec": 0.3782926687598228}, {"epoch": 31, "train_loss": 1.4293667578697205, "train_acc": 0.5256800052523616, "train_prec": 0.4302806660532952, "val_loss": 1.607538499832153, "val_acc": 0.4760400047898294, "val_prec": 0.3796613362431526}, {"epoch": 32, "train_loss": 1.4075157201290132, "train_acc": 0.5358800062537198, "train_prec": 0.44119000375270834, "val_loss": 1.612525110244751, "val_acc": 0.46960000693798076, "val_prec": 0.3726933330297469}, {"epoch": 33, "train_loss": 1.4120084524154664, "train_acc": 0.532280005514622, "train_prec": 0.43570999890565865, "val_loss": 1.5736454439163206, "val_acc": 0.4862000027298928, "val_prec": 0.3907400012016298}, {"epoch": 34, "train_loss": 1.4080660259723665, "train_acc": 0.5314400082826614, "train_prec": 0.4360293307900429, "val_loss": 1.5899921476840977, "val_acc": 0.48140000909566866, "val_prec": 0.38561000108718874}, {"epoch": 35, "train_loss": 1.3901891636848445, "train_acc": 0.5410800024867057, "train_prec": 0.44536266654729856, "val_loss": 1.5990192854404452, "val_acc": 0.4773600080609324, "val_prec": 0.3833166640996934}, {"epoch": 36, "train_loss": 1.3877139103412632, "train_acc": 0.5411200013756752, "train_prec": 0.44506000280380237, "val_loss": 1.5850938975811, "val_acc": 0.47936000227928155, "val_prec": 0.3850133356451989}, {"epoch": 37, "train_loss": 1.3900349533557887, "train_acc": 0.5413200074434277, "train_prec": 0.4441133341193199, "val_loss": 1.5919446587562565, "val_acc": 0.4802000033855439, "val_prec": 0.38562600255012514}, {"epoch": 38, "train_loss": 1.361080836057663, "train_acc": 0.5508800050616266, "train_prec": 0.45672999829053873, "val_loss": 1.5800068426132199, "val_acc": 0.4838400030136109, "val_prec": 0.390007337331772}, {"epoch": 39, "train_loss": 1.3659708368778223, "train_acc": 0.5471600079536442, "train_prec": 0.452686671614647, "val_loss": 1.587386211156845, "val_acc": 0.48864000886678693, "val_prec": 0.3958713349699974}, {"epoch": 40, "train_loss": 1.3654448926448826, "train_acc": 0.549800008237362, "train_prec": 0.4545899981260301, "val_loss": 1.5835803556442256, "val_acc": 0.4854400059580802, "val_prec": 0.3916066697239876}, {"epoch": 41, "train_loss": 1.3662698209285735, "train_acc": 0.5482400062680243, "train_prec": 0.45435666918754575, "val_loss": 1.5750894963741298, "val_acc": 0.49092000693082816, "val_prec": 0.3982633340358734}, {"epoch": 42, "train_loss": 1.3564435517787938, "train_acc": 0.5488000100851058, "train_prec": 0.45436067163944255, "val_loss": 1.5537641048431396, "val_acc": 0.4950800049304962, "val_prec": 0.4013466671109198}, {"epoch": 43, "train_loss": 1.3530120742321012, "train_acc": 0.5524800059199331, "train_prec": 0.4572766664624213, "val_loss": 1.5910179483890534, "val_acc": 0.4840000104904174, "val_prec": 0.38910333216190346}, {"epoch": 44, "train_loss": 1.332252120971679, "train_acc": 0.5612000080943108, "train_prec": 0.4683500003814698, "val_loss": 1.5670860922336576, "val_acc": 0.4905200049281121, "val_prec": 0.39579933375120163}, {"epoch": 45, "train_loss": 1.3626828956604005, "train_acc": 0.5484000074863433, "train_prec": 0.45390666753053677, "val_loss": 1.5480085086822515, "val_acc": 0.495680003464222, "val_prec": 0.4003173372149467}, {"epoch": 46, "train_loss": 1.3345988070964816, "train_acc": 0.5616000041365625, "train_prec": 0.46676466763019564, "val_loss": 1.5767065048217772, "val_acc": 0.48816000431776063, "val_prec": 0.3912180057168006}, {"epoch": 47, "train_loss": 1.349110052585602, "train_acc": 0.5546000051498412, "train_prec": 0.46035667091608035, "val_loss": 1.576768794059754, "val_acc": 0.4897200033068658, "val_prec": 0.3943133392930031}, {"epoch": 48, "train_loss": 1.3438351786136633, "train_acc": 0.5550400072336193, "train_prec": 0.4612400019168853, "val_loss": 1.5593031990528106, "val_acc": 0.49488000512123104, "val_prec": 0.4028733345866204}, {"epoch": 49, "train_loss": 1.3317411673068997, "train_acc": 0.5599200081825255, "train_prec": 0.4648833376169206, "val_loss": 1.5620008349418641, "val_acc": 0.49204000264406206, "val_prec": 0.39814000159502033}, {"epoch": 50, "train_loss": 1.3231967413425445, "train_acc": 0.561480004787445, "train_prec": 0.4671726682782173, "val_loss": 1.561577708721161, "val_acc": 0.4910800057649613, "val_prec": 0.39583800405263914}] -------------------------------------------------------------------------------- /models/2020-09-24_175635/model_results.json: -------------------------------------------------------------------------------- 1 | [{"epoch": 1, "train_loss": 2.3006541728973393, "train_acc": 0.13700000531971457, "train_prec": 0.07755900915712119, "val_loss": 2.2719828939437856, "val_acc": 0.14950000658631324, "val_prec": 0.09897428749129177}, {"epoch": 2, "train_loss": 2.2641300129890443, "train_acc": 0.1551000048220158, "train_prec": 0.10109833642840389, "val_loss": 2.2510451078414913, "val_acc": 0.16940000392496582, "val_prec": 0.110381668433547}, {"epoch": 3, "train_loss": 2.2224850940704353, "train_acc": 0.18640000373125076, "train_prec": 0.12251000095158815, "val_loss": 2.197645134925842, "val_acc": 0.20890000462532043, "val_prec": 0.14078666858375075}, {"epoch": 4, "train_loss": 2.1653594708442667, "train_acc": 0.22800000190734862, "train_prec": 0.15569333508610725, "val_loss": 2.14451977968216, "val_acc": 0.23840000271797188, "val_prec": 0.159565002694726}, {"epoch": 5, "train_loss": 2.106053891181945, "train_acc": 0.25400000363588326, "train_prec": 0.17205000154674047, "val_loss": 2.096926736831665, "val_acc": 0.2628000020980836, "val_prec": 0.17654166646301742}, {"epoch": 6, "train_loss": 2.0724728333950044, "train_acc": 0.2802000062167645, "train_prec": 0.1936050007492304, "val_loss": 2.0583220648765557, "val_acc": 0.28400000348687165, "val_prec": 0.19350500188767902}, {"epoch": 7, "train_loss": 2.0120339953899387, "train_acc": 0.30810000345110894, "train_prec": 0.21809500113129618, "val_loss": 2.0117573416233068, "val_acc": 0.30890000969171516, "val_prec": 0.21776500180363656}, {"epoch": 8, "train_loss": 1.9643163645267483, "train_acc": 0.3289000068604945, "train_prec": 0.233085002452135, "val_loss": 1.9833893334865573, "val_acc": 0.32290000826120374, "val_prec": 0.23278666749596594}, {"epoch": 9, "train_loss": 1.935094916820526, "train_acc": 0.3370000010728837, "train_prec": 0.24347000256180773, "val_loss": 1.9604254531860352, "val_acc": 0.3248000088334083, "val_prec": 0.23260666832327842}, {"epoch": 10, "train_loss": 1.9128879356384276, "train_acc": 0.34060000509023675, "train_prec": 0.24461833387613288, "val_loss": 1.9699258363246916, "val_acc": 0.3304000069200992, "val_prec": 0.23653000175952912}, {"epoch": 11, "train_loss": 1.8834039640426636, "train_acc": 0.35690000414848333, "train_prec": 0.26130500167608256, "val_loss": 1.9635104167461392, "val_acc": 0.327400004118681, "val_prec": 0.23596166871488097}, {"epoch": 12, "train_loss": 1.8645381140708923, "train_acc": 0.36320000588893897, "train_prec": 0.26781333535909646, "val_loss": 1.927998756170273, "val_acc": 0.3427000033855438, "val_prec": 0.24861833602190017}, {"epoch": 13, "train_loss": 1.8333408331871033, "train_acc": 0.37410000592470166, "train_prec": 0.2750483326613902, "val_loss": 1.9036609029769898, "val_acc": 0.34920000821352004, "val_prec": 0.25310333445668226}, {"epoch": 14, "train_loss": 1.8369647479057305, "train_acc": 0.3748000058531762, "train_prec": 0.2771650019288064, "val_loss": 1.9025964033603664, "val_acc": 0.353400005698204, "val_prec": 0.2576266677677631}, {"epoch": 15, "train_loss": 1.7972249710559849, "train_acc": 0.38780000418424615, "train_prec": 0.29093000099062916, "val_loss": 1.900426850318909, "val_acc": 0.3553000053763388, "val_prec": 0.26067666858434674}, {"epoch": 16, "train_loss": 1.8088883996009828, "train_acc": 0.3875000044703484, "train_prec": 0.2887350016832353, "val_loss": 1.8990055787563322, "val_acc": 0.3510000026226044, "val_prec": 0.25576500102877603}, {"epoch": 17, "train_loss": 1.7838301169872286, "train_acc": 0.39300000190734874, "train_prec": 0.29603166908025735, "val_loss": 1.8645518577098847, "val_acc": 0.3664000065624715, "val_prec": 0.2721133352816104}, {"epoch": 18, "train_loss": 1.779184337854385, "train_acc": 0.3928000029921531, "train_prec": 0.29814500048756615, "val_loss": 1.8722860503196714, "val_acc": 0.37350000739097605, "val_prec": 0.27767000168561917}, {"epoch": 19, "train_loss": 1.7448138034343719, "train_acc": 0.4098000025749206, "train_prec": 0.3088183341920376, "val_loss": 1.887132687568664, "val_acc": 0.365900006890297, "val_prec": 0.2704250045120715}, {"epoch": 20, "train_loss": 1.7449721670150755, "train_acc": 0.4053000029921531, "train_prec": 0.30594166576862347, "val_loss": 1.847603453397752, "val_acc": 0.38070000171661367, "val_prec": 0.2880299998819829}, {"epoch": 21, "train_loss": 1.7294664943218236, "train_acc": 0.42010000377893464, "train_prec": 0.32071333497762666, "val_loss": 1.8460189712047574, "val_acc": 0.37860000282526013, "val_prec": 0.2844316677749157}, {"epoch": 22, "train_loss": 1.724504134654999, "train_acc": 0.4208000013232231, "train_prec": 0.3232833318412303, "val_loss": 1.847365502119065, "val_acc": 0.38430000424385086, "val_prec": 0.28973167032003394}, {"epoch": 23, "train_loss": 1.6913246166706077, "train_acc": 0.4309000015258788, "train_prec": 0.32899333611130716, "val_loss": 1.8505395507812505, "val_acc": 0.3764000064134598, "val_prec": 0.2828200018405914}, {"epoch": 24, "train_loss": 1.6970137155055998, "train_acc": 0.42820000499486915, "train_prec": 0.332539998739958, "val_loss": 1.8520524227619166, "val_acc": 0.3806000074744225, "val_prec": 0.2876066651940346}, {"epoch": 25, "train_loss": 1.687191712856293, "train_acc": 0.43010000169277196, "train_prec": 0.3319350014626979, "val_loss": 1.809969359636307, "val_acc": 0.4019000044465065, "val_prec": 0.3048233361542224}, {"epoch": 26, "train_loss": 1.6479310762882229, "train_acc": 0.448600001037121, "train_prec": 0.3477566662430762, "val_loss": 1.8234130311012273, "val_acc": 0.3903000038862228, "val_prec": 0.2963416656851767}, {"epoch": 27, "train_loss": 1.6539493596553803, "train_acc": 0.4457000058889389, "train_prec": 0.3480616688728334, "val_loss": 1.84150906920433, "val_acc": 0.38700000286102293, "val_prec": 0.29436833515763283}, {"epoch": 28, "train_loss": 1.6566621077060697, "train_acc": 0.4393999999761582, "train_prec": 0.34248666644096376, "val_loss": 1.8133540272712707, "val_acc": 0.3949000024795531, "val_prec": 0.29927666723728186}, {"epoch": 29, "train_loss": 1.6288926470279694, "train_acc": 0.45370000094175345, "train_prec": 0.35519262313842775, "val_loss": 1.8099831843376162, "val_acc": 0.39740000277757637, "val_prec": 0.3006200030446051}, {"epoch": 30, "train_loss": 1.6292867052555084, "train_acc": 0.44970000237226493, "train_prec": 0.35133666530251495, "val_loss": 1.813319506645203, "val_acc": 0.3934000039100647, "val_prec": 0.29727333471179007}, {"epoch": 31, "train_loss": 1.5969907927513127, "train_acc": 0.4652000114321709, "train_prec": 0.36630000472068786, "val_loss": 1.7884107398986817, "val_acc": 0.4039000016450883, "val_prec": 0.3096066699922085}, {"epoch": 32, "train_loss": 1.59531410574913, "train_acc": 0.46430000185966486, "train_prec": 0.3644199979305268, "val_loss": 1.7982918894290925, "val_acc": 0.4019000020623207, "val_prec": 0.3085100030899049}, {"epoch": 33, "train_loss": 1.578153797388077, "train_acc": 0.47020000427961367, "train_prec": 0.370828334093094, "val_loss": 1.7813649737834931, "val_acc": 0.4035000005364418, "val_prec": 0.30701500162482254}, {"epoch": 34, "train_loss": 1.5816438925266272, "train_acc": 0.4703000041842461, "train_prec": 0.37037833154201505, "val_loss": 1.7828439366817475, "val_acc": 0.4098000055551528, "val_prec": 0.315431669652462}, {"epoch": 35, "train_loss": 1.5515113055706022, "train_acc": 0.47839999973773956, "train_prec": 0.37940166920423507, "val_loss": 1.796329233646393, "val_acc": 0.40780000030994434, "val_prec": 0.31096500381827347}, {"epoch": 36, "train_loss": 1.5641868996620174, "train_acc": 0.4721999999880792, "train_prec": 0.3739200022816659, "val_loss": 1.8118958747386926, "val_acc": 0.40130000621080386, "val_prec": 0.3056249980628491}, {"epoch": 37, "train_loss": 1.5745545470714566, "train_acc": 0.475499997138977, "train_prec": 0.37962666690349556, "val_loss": 1.7924254167079932, "val_acc": 0.40170000314712534, "val_prec": 0.3075583356618881}, {"epoch": 38, "train_loss": 1.548807293176651, "train_acc": 0.48290000498294827, "train_prec": 0.3844266641139985, "val_loss": 1.7771063351631164, "val_acc": 0.41190000265836707, "val_prec": 0.3157816667854787}, {"epoch": 39, "train_loss": 1.544079921245575, "train_acc": 0.47460000544786457, "train_prec": 0.3770650008320809, "val_loss": 1.793283921480179, "val_acc": 0.40560000687837594, "val_prec": 0.30922500118613244}, {"epoch": 40, "train_loss": 1.5280344498157492, "train_acc": 0.48620000243186956, "train_prec": 0.38547333419322966, "val_loss": 1.7970995521545408, "val_acc": 0.4002000015974046, "val_prec": 0.30697500139474876}, {"epoch": 41, "train_loss": 1.5196930563449857, "train_acc": 0.49020000159740457, "train_prec": 0.391750001013279, "val_loss": 1.8170241975784303, "val_acc": 0.3988000023365023, "val_prec": 0.30478666767477963}, {"epoch": 42, "train_loss": 1.535062592029572, "train_acc": 0.48450000137090693, "train_prec": 0.38402999609708777, "val_loss": 1.781952008008957, "val_acc": 0.40330000340938565, "val_prec": 0.3077583339810372}, {"epoch": 43, "train_loss": 1.5130258357524866, "train_acc": 0.49090000540018086, "train_prec": 0.3927699986100199, "val_loss": 1.7892804539203646, "val_acc": 0.40350000470876696, "val_prec": 0.3098999992012976}, {"epoch": 44, "train_loss": 1.5147447001934051, "train_acc": 0.49500000357627855, "train_prec": 0.39469166874885564, "val_loss": 1.793489049673081, "val_acc": 0.40660000443458544, "val_prec": 0.3136866655945778}, {"epoch": 45, "train_loss": 1.5077183377742764, "train_acc": 0.4971000066399573, "train_prec": 0.3987183317542074, "val_loss": 1.8135356569290157, "val_acc": 0.3988000047206879, "val_prec": 0.30454500153660785}, {"epoch": 46, "train_loss": 1.4982023799419397, "train_acc": 0.4991000023484232, "train_prec": 0.39865666955709467, "val_loss": 1.797445766925812, "val_acc": 0.4009000068902969, "val_prec": 0.3089316692948342}, {"epoch": 47, "train_loss": 1.498297749757766, "train_acc": 0.4949000048637388, "train_prec": 0.39395999938249576, "val_loss": 1.7929652702808387, "val_acc": 0.4071000057458877, "val_prec": 0.31048666849732387}, {"epoch": 48, "train_loss": 1.479121466875076, "train_acc": 0.5036000090837478, "train_prec": 0.40366833567619326, "val_loss": 1.7867204058170312, "val_acc": 0.4125000056624412, "val_prec": 0.3156150025129318}, {"epoch": 49, "train_loss": 1.4611806440353394, "train_acc": 0.5152000078558918, "train_prec": 0.4154166659712793, "val_loss": 1.817631156444549, "val_acc": 0.40440000563859957, "val_prec": 0.3098350028693677}, {"epoch": 50, "train_loss": 1.4746904444694517, "train_acc": 0.4997000083327293, "train_prec": 0.3989233350753784, "val_loss": 1.764700692892074, "val_acc": 0.4138000026345254, "val_prec": 0.3186350028216837}] --------------------------------------------------------------------------------