├── plots ├── conf_matrix_valid.png ├── lr_comparison_lr.png ├── arch_comparison_lr.png ├── batch_comparison_lr.png ├── fianl_comparison_lr.png ├── lr_comparison2_loss.png ├── lr_comparison_loss.png ├── arch_comparison2_loss.png ├── arch_comparison_loss.png ├── batch_comparison2_loss.png ├── batch_comparison_loss.png ├── final_comparison2_loss.png ├── final_comparison_loss.png ├── lr_comparison_accuracy.png ├── lr_comparison_val_loss.png ├── optim_comparison_loss.png ├── vgg_val_val_accuracy.png ├── arch_comparison_accuracy.png ├── arch_comparison_val_loss.png ├── lr_comparison2_accuracy.png ├── lr_comparison2_val_loss.png ├── resnet_val_val_accuracy.png ├── arch_comparison2_accuracy.png ├── arch_comparison2_val_loss.png ├── batch_comparison2_accuracy.png ├── batch_comparison2_val_loss.png ├── batch_comparison_accuracy.png ├── batch_comparison_val_loss.png ├── final_comparison2_accuracy.png ├── final_comparison2_val_loss.png ├── final_comparison_accuracy.png ├── final_comparison_val_loss.png ├── lr_comparison2_val_accuracy.png ├── lr_comparison_val_accuracy.png ├── optim_comparison_accuracy.png ├── optim_comparison_val_loss.png ├── arch_comparison2_val_accuracy.png ├── arch_comparison_val_accuracy.png ├── batch_comparison_val_accuracy.png ├── final_comparison_val_accuracy.png ├── optim_comparison_val_accuracy.png ├── batch_comparison2_val_accuracy.png └── final_comparison2_val_accuracy.png ├── test_results ├── final_model.csv ├── lr_comparison.csv ├── arch_comparison.csv ├── batch_comparison.csv └── optim_comparison.csv ├── prepare_data.py ├── LICENSE ├── .gitignore ├── cnn_test.py ├── DR2.py ├── DR1.py ├── util.py ├── additional_model.py ├── optimizers_test.py ├── WB1.py ├── prediction.py ├── final_test.py ├── transfer_learning.py ├── lr_test.py ├── arch_test.py ├── batch_test.py ├── WB2.py └── histories ├── resnet_1_history.csv ├── resnet_2_history.csv ├── resnet_0_history.csv ├── vgg_2_history.csv ├── vgg_0_history.csv └── vgg_1_history.csv /plots/conf_matrix_valid.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/conf_matrix_valid.png -------------------------------------------------------------------------------- /plots/lr_comparison_lr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/lr_comparison_lr.png -------------------------------------------------------------------------------- /plots/arch_comparison_lr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/arch_comparison_lr.png -------------------------------------------------------------------------------- /plots/batch_comparison_lr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/batch_comparison_lr.png -------------------------------------------------------------------------------- /plots/fianl_comparison_lr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/fianl_comparison_lr.png -------------------------------------------------------------------------------- /plots/lr_comparison2_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/lr_comparison2_loss.png -------------------------------------------------------------------------------- /plots/lr_comparison_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/lr_comparison_loss.png -------------------------------------------------------------------------------- /plots/arch_comparison2_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/arch_comparison2_loss.png -------------------------------------------------------------------------------- /plots/arch_comparison_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/arch_comparison_loss.png -------------------------------------------------------------------------------- /plots/batch_comparison2_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/batch_comparison2_loss.png -------------------------------------------------------------------------------- /plots/batch_comparison_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/batch_comparison_loss.png -------------------------------------------------------------------------------- /plots/final_comparison2_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/final_comparison2_loss.png -------------------------------------------------------------------------------- /plots/final_comparison_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/final_comparison_loss.png -------------------------------------------------------------------------------- /plots/lr_comparison_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/lr_comparison_accuracy.png -------------------------------------------------------------------------------- /plots/lr_comparison_val_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/lr_comparison_val_loss.png -------------------------------------------------------------------------------- /plots/optim_comparison_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/optim_comparison_loss.png -------------------------------------------------------------------------------- /plots/vgg_val_val_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/vgg_val_val_accuracy.png -------------------------------------------------------------------------------- /plots/arch_comparison_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/arch_comparison_accuracy.png -------------------------------------------------------------------------------- /plots/arch_comparison_val_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/arch_comparison_val_loss.png -------------------------------------------------------------------------------- /plots/lr_comparison2_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/lr_comparison2_accuracy.png -------------------------------------------------------------------------------- /plots/lr_comparison2_val_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/lr_comparison2_val_loss.png -------------------------------------------------------------------------------- /plots/resnet_val_val_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/resnet_val_val_accuracy.png -------------------------------------------------------------------------------- /plots/arch_comparison2_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/arch_comparison2_accuracy.png -------------------------------------------------------------------------------- /plots/arch_comparison2_val_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/arch_comparison2_val_loss.png -------------------------------------------------------------------------------- /plots/batch_comparison2_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/batch_comparison2_accuracy.png -------------------------------------------------------------------------------- /plots/batch_comparison2_val_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/batch_comparison2_val_loss.png -------------------------------------------------------------------------------- /plots/batch_comparison_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/batch_comparison_accuracy.png -------------------------------------------------------------------------------- /plots/batch_comparison_val_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/batch_comparison_val_loss.png -------------------------------------------------------------------------------- /plots/final_comparison2_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/final_comparison2_accuracy.png -------------------------------------------------------------------------------- /plots/final_comparison2_val_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/final_comparison2_val_loss.png -------------------------------------------------------------------------------- /plots/final_comparison_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/final_comparison_accuracy.png -------------------------------------------------------------------------------- /plots/final_comparison_val_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/final_comparison_val_loss.png -------------------------------------------------------------------------------- /plots/lr_comparison2_val_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/lr_comparison2_val_accuracy.png -------------------------------------------------------------------------------- /plots/lr_comparison_val_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/lr_comparison_val_accuracy.png -------------------------------------------------------------------------------- /plots/optim_comparison_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/optim_comparison_accuracy.png -------------------------------------------------------------------------------- /plots/optim_comparison_val_loss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/optim_comparison_val_loss.png -------------------------------------------------------------------------------- /plots/arch_comparison2_val_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/arch_comparison2_val_accuracy.png -------------------------------------------------------------------------------- /plots/arch_comparison_val_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/arch_comparison_val_accuracy.png -------------------------------------------------------------------------------- /plots/batch_comparison_val_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/batch_comparison_val_accuracy.png -------------------------------------------------------------------------------- /plots/final_comparison_val_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/final_comparison_val_accuracy.png -------------------------------------------------------------------------------- /plots/optim_comparison_val_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/optim_comparison_val_accuracy.png -------------------------------------------------------------------------------- /plots/batch_comparison2_val_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/batch_comparison2_val_accuracy.png -------------------------------------------------------------------------------- /plots/final_comparison2_val_accuracy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/DominikRafacz/weepnet/master/plots/final_comparison2_val_accuracy.png -------------------------------------------------------------------------------- /test_results/final_model.csv: -------------------------------------------------------------------------------- 1 | ,loss,acc 2 | 0,0.39671429991722107,0.8988000154495239 3 | 1,0.39921054244041443,0.8960999846458435 4 | 2,0.39933517575263977,0.8996999859809875 5 | -------------------------------------------------------------------------------- /test_results/lr_comparison.csv: -------------------------------------------------------------------------------- 1 | ,loss,acc 2 | 0,0.7796329855918884,0.8180000185966492 3 | 1,0.7655537724494934,0.8269000053405762 4 | 2,0.807333767414093,0.8094000220298767 5 | 3,0.5778830051422119,0.8654999732971191 6 | 4,0.6569296717643738,0.8555999994277954 7 | 5,0.5997810363769531,0.8565999865531921 8 | -------------------------------------------------------------------------------- /test_results/arch_comparison.csv: -------------------------------------------------------------------------------- 1 | ,loss,acc 2 | 0,0.5259724259376526,0.8733000159263611 3 | 1,0.5090545415878296,0.8752999901771545 4 | 2,0.5591164231300354,0.8664000034332275 5 | 3,0.6619057059288025,0.8395000100135803 6 | 4,0.6264746785163879,0.8564000129699707 7 | 5,0.827524721622467,0.8073999881744385 8 | -------------------------------------------------------------------------------- /test_results/batch_comparison.csv: -------------------------------------------------------------------------------- 1 | ,loss,acc 2 | 0,0.641349732875824,0.843500018119812 3 | 1,0.6848436594009399,0.8356999754905701 4 | 2,0.6000865697860718,0.8533999919891357 5 | 3,0.8307650685310364,0.7912999987602234 6 | 4,0.6138059496879578,0.8531000018119812 7 | 5,0.5000298619270325,0.8755999803543091 8 | 6,0.5290393829345703,0.8677999973297119 9 | 7,0.5488694310188293,0.8668000102043152 10 | 8,0.5938078165054321,0.8504999876022339 11 | -------------------------------------------------------------------------------- /test_results/optim_comparison.csv: -------------------------------------------------------------------------------- 1 | ,loss,acc 2 | 0,0.5063427686691284,0.8543999791145325 3 | 1,0.5442593693733215,0.8447999954223633 4 | 2,0.5417337417602539,0.8396999835968018 5 | 3,0.5378807187080383,0.8450000286102295 6 | 4,0.5108172297477722,0.8551999926567078 7 | 5,0.5375484228134155,0.8464999794960022 8 | 6,0.7617402076721191,0.7502999901771545 9 | 7,0.7247334122657776,0.7580000162124634 10 | 8,0.8686668872833252,0.7179999947547913 11 | -------------------------------------------------------------------------------- /prepare_data.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import os 4 | 5 | img_size = (32, 32) 6 | batch_size = 100 7 | # build a structure for imagedatagenerator 8 | img_dir = "data/train" 9 | labels = pd.read_csv("data/trainLabels.csv").loc[:, 'label'] 10 | np.random.seed(420) 11 | perm = np.random.permutation(50000) 12 | inds_train = perm[0:40000] 13 | inds_val = perm[40000:50000] 14 | for ind in inds_train: 15 | cls = labels[ind] 16 | if not os.path.exists("data/train/{}".format(cls)): 17 | os.makedirs("data/train/{}".format(cls)) 18 | if os.path.exists("data/train/{}.png".format(ind+1)): 19 | os.rename("data/train/{}.png".format(ind+1), "data/train/{0}/{1}.png".format(cls, ind+1)) 20 | for ind in inds_val: 21 | cls = labels[ind] 22 | if not os.path.exists("data/validation/{}".format(cls)): 23 | os.makedirs("data/validation/{}".format(cls)) 24 | if os.path.exists("data/train/{}.png".format(ind+1)): 25 | os.rename("data/train/{}.png".format(ind+1), "data/validation/{0}/{1}.png".format(cls, ind+1)) 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Paras Dahal 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 6 | 7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | 91 | *.sublime* 92 | MNIST_data/ 93 | data/ 94 | models/ 95 | .idea/ 96 | 97 | venv2/ 98 | -------------------------------------------------------------------------------- /cnn_test.py: -------------------------------------------------------------------------------- 1 | ''' 2 | https://www.kaggle.com/kedarsai/cifar-10-88-accuracy-using-keras 3 | ''' 4 | from PIL import Image 5 | import numpy as np 6 | import pandas as pd 7 | import matplotlib.pyplot as plt 8 | from sklearn.model_selection import train_test_split 9 | from tensorflow.keras.models import Sequential 10 | from tensorflow.keras.layers import Conv2D, MaxPool2D, Dense, Flatten, Dropout, Input, AveragePooling2D, Activation, \ 11 | Conv2D, MaxPooling2D, BatchNormalization, Concatenate 12 | from tensorflow.keras.callbacks import EarlyStopping, TensorBoard 13 | from tensorflow.keras import regularizers, optimizers 14 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 15 | 16 | np.random.seed(420) 17 | perm = np.random.permutation(50000) 18 | inds_train = perm[0:40000] 19 | 20 | X_train = np.concatenate( 21 | [np.array(Image.open('data/train/' + str(i + 1) + '.png')).reshape((1, 32, 32, 3)) for i in inds_train]) 22 | X_train = X_train / 255.0 23 | 24 | labels = pd.get_dummies(pd.read_csv('data/trainLabels.csv')['label']) 25 | 26 | y_train = np.array(labels.iloc[inds_train, :]) 27 | x_train, x_val, y_train, y_val = train_test_split(X_train, y_train, test_size=.3) 28 | 29 | model6 = Sequential() 30 | model6.add( 31 | Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same', input_shape=(32, 32, 3))) 32 | model6.add(BatchNormalization()) 33 | model6.add(Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same')) 34 | model6.add(BatchNormalization()) 35 | model6.add(MaxPool2D((2, 2))) 36 | model6.add(Dropout(0.2)) 37 | model6.add(Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same')) 38 | model6.add(BatchNormalization()) 39 | model6.add(Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same')) 40 | model6.add(BatchNormalization()) 41 | model6.add(MaxPool2D((2, 2))) 42 | model6.add(Dropout(0.3)) 43 | model6.add(Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same')) 44 | model6.add(BatchNormalization()) 45 | model6.add(Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same')) 46 | model6.add(BatchNormalization()) 47 | model6.add(MaxPool2D((2, 2))) 48 | model6.add(Dropout(0.4)) 49 | model6.add(Flatten()) 50 | model6.add(Dense(128, activation='relu', kernel_initializer='he_uniform')) 51 | model6.add(BatchNormalization()) 52 | model6.add(Dropout(0.5)) 53 | model6.add(Dense(10, activation='softmax')) 54 | # compile model 55 | # opt = SGD(lr=0.001, momentum=0.9) 56 | model6.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) 57 | 58 | # Image Data Generator , we are shifting image accross width and height also we are flipping the image horizantally. 59 | datagen = ImageDataGenerator(width_shift_range=0.1, height_shift_range=0.1, horizontal_flip=True, rotation_range=20) 60 | it_train = datagen.flow(x_train, y_train) 61 | steps = int(x_train.shape[0] / 64) 62 | history6 = model6.fit_generator(it_train, epochs=200, steps_per_epoch=steps, validation_data=(x_val, y_val)) 63 | 64 | evaluation = model6.evaluate(x_val, y_val) 65 | print('Test Accuracy: {}'.format(evaluation[1])) -------------------------------------------------------------------------------- /DR2.py: -------------------------------------------------------------------------------- 1 | from tensorflow.keras.applications import VGG19 2 | from tensorflow.keras.applications import vgg19 3 | from keras.optimizers import SGD 4 | import tensorflow as tf 5 | import numpy as np 6 | import random 7 | import os 8 | from tensorflow.keras.models import Sequential 9 | from tensorflow.keras.layers import Dense, Flatten 10 | from keras.preprocessing.image import ImageDataGenerator 11 | from keras.callbacks import ReduceLROnPlateau 12 | from util import visualize 13 | import csv 14 | 15 | os.environ['TF_DETERMINISTIC_OPS'] = '1' 16 | SEED = 666 17 | random.seed(666) 18 | np.random.seed(666) 19 | tf.random.set_seed(666) 20 | 21 | train_datagen = ImageDataGenerator( 22 | width_shift_range=0.2, 23 | height_shift_range=0.2, 24 | horizontal_flip=True, 25 | rotation_range=20, 26 | shear_range=0.2, 27 | preprocessing_function=vgg19.preprocess_input) 28 | 29 | test_datagen = ImageDataGenerator(preprocessing_function=vgg19.preprocess_input) 30 | 31 | img_size = (32, 32) 32 | batch_size = 128 33 | 34 | train_generator = train_datagen.flow_from_directory( 35 | 'data/train', 36 | target_size=img_size, 37 | batch_size=batch_size, 38 | class_mode='sparse') 39 | 40 | validation_generator = test_datagen.flow_from_directory( 41 | 'data/validation', 42 | target_size=img_size, 43 | batch_size=batch_size, 44 | class_mode='sparse') 45 | 46 | lrr = ReduceLROnPlateau( 47 | monitor='val_accuracy', # Metric to be measured 48 | factor=.01, # Factor by which learning rate will be reduced 49 | patience=3, # No. of epochs after which if there is no improvement in the val_acc, the learning rate is reduced 50 | min_lr=1e-5, 51 | verbose=1) # The minimum learning rate 52 | 53 | lr = .001 54 | 55 | mdl2 = Sequential() 56 | mdl2.add(VGG19(include_top=False, weights='imagenet', input_shape=(32, 32, 3), classes=10)) 57 | mdl2.add(Flatten()) 58 | mdl2.add(Dense(1024, activation='relu', input_dim=512)) 59 | mdl2.add(Dense(512, activation='relu')) 60 | mdl2.add(Dense(256, activation='relu')) 61 | mdl2.add(Dense(128, activation='relu')) 62 | mdl2.add(Dense(10, activation='softmax')) 63 | 64 | sgd = SGD(lr=lr, momentum=.9, nesterov=False) 65 | 66 | results = [None] * 3 67 | 68 | for i in range(3): 69 | mdl2.compile(optimizer=sgd, loss='sparse_categorical_crossentropy', metrics=['accuracy']) 70 | results[i] = mdl2.fit(train_generator, 71 | epochs=100, 72 | validation_data=validation_generator, 73 | callbacks=[lrr], verbose=1) 74 | 75 | visualize(results, filename='vgg_val') 76 | 77 | for i, history in enumerate(results): 78 | with open('histories/vgg_' + str(i) + '_history.csv', 'x') as f: 79 | wrtr = csv.DictWriter(f, ['loss', 'val_loss', 'accuracy', 'val_accuracy']) 80 | wrtr.writeheader() 81 | wrtr.writerows([ 82 | {'loss': history.history['loss'][i], 83 | 'val_loss': history.history['val_loss'][i], 84 | 'accuracy': history.history['accuracy'][i], 85 | 'val_accuracy': history.history['val_accuracy'][i] 86 | } for i in range(len(history.history['loss'])) 87 | ]) 88 | -------------------------------------------------------------------------------- /DR1.py: -------------------------------------------------------------------------------- 1 | ''' 2 | testing other transfer learning architectures 3 | 4 | https://www.kaggle.com/adi160/cifar-10-keras-transfer-learning 5 | ''' 6 | 7 | import tensorflow as tf 8 | import numpy as np 9 | import random 10 | import os 11 | from tensorflow.keras.models import Sequential 12 | from tensorflow.keras.layers import Dense, Flatten, Dropout 13 | from tensorflow.keras.applications import ResNet50V2 14 | from tensorflow.keras.applications.resnet_v2 import preprocess_input 15 | from keras.preprocessing.image import ImageDataGenerator 16 | from keras.optimizers import Adam 17 | from keras.callbacks import ReduceLROnPlateau 18 | from util import visualize 19 | import csv 20 | 21 | os.environ['TF_DETERMINISTIC_OPS'] = '1' 22 | SEED = 666 23 | random.seed(666) 24 | np.random.seed(666) 25 | tf.random.set_seed(666) 26 | 27 | # data augmentation 28 | train_datagen = ImageDataGenerator( 29 | width_shift_range=0.2, 30 | height_shift_range=0.2, 31 | horizontal_flip=True, 32 | rotation_range=20, 33 | shear_range=0.2, 34 | preprocessing_function=preprocess_input) 35 | 36 | test_datagen = ImageDataGenerator(preprocessing_function=preprocess_input) 37 | 38 | img_size = (32, 32) 39 | batch_size = 128 40 | 41 | train_generator = train_datagen.flow_from_directory( 42 | 'data/train', 43 | target_size=img_size, 44 | batch_size=batch_size, 45 | class_mode='sparse') 46 | 47 | validation_generator = test_datagen.flow_from_directory( 48 | 'data/validation', 49 | target_size=img_size, 50 | batch_size=batch_size, 51 | class_mode='sparse') 52 | 53 | # learning rate annealer 54 | lrr = ReduceLROnPlateau( 55 | monitor='val_accuracy', # Metric to be measured 56 | factor=.01, # Factor by which learning rate will be reduced 57 | patience=3, # No. of epochs after which if there is no improvement in the val_acc, the learning rate is reduced 58 | min_lr=1e-5, 59 | verbose=1) # The minimum learning rate 60 | 61 | cifar10_dims = (32, 32, 3) 62 | # resnet model 63 | 64 | mdl = Sequential() 65 | mdl.add(ResNet50V2(include_top=False, weights='imagenet', input_shape=cifar10_dims, classes=10)) 66 | mdl.add(Flatten()) 67 | mdl.add(Dense(4000, activation='relu', input_dim=512)) 68 | mdl.add(Dense(2000, activation='relu')) 69 | mdl.add(Dropout(.4)) 70 | mdl.add(Dense(1000, activation='relu')) 71 | mdl.add(Dropout(.3)) 72 | mdl.add(Dense(500, activation='relu')) 73 | mdl.add(Dropout(.2)) 74 | mdl.add(Dense(10, activation='softmax')) 75 | 76 | adam = Adam(lr=.001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0, amsgrad=False) 77 | 78 | results = [None] * 3 79 | 80 | for i in range(3): 81 | mdl.compile(optimizer=adam, loss='sparse_categorical_crossentropy', metrics=['accuracy']) 82 | results[i] = mdl.fit(train_generator, 83 | epochs=100, 84 | validation_data=validation_generator, 85 | callbacks=[lrr], 86 | verbose=1) 87 | 88 | visualize(results, filename='resnet_val') 89 | 90 | for i, history in enumerate(results): 91 | with open('histories/resnet_' + str(i) + '_history.csv', 'x') as f: 92 | wrtr = csv.DictWriter(f, ['loss', 'val_loss', 'accuracy', 'val_accuracy']) 93 | wrtr.writeheader() 94 | wrtr.writerows([ 95 | {'loss': history.history['loss'][i], 96 | 'val_loss': history.history['val_loss'][i], 97 | 'accuracy': history.history['accuracy'][i], 98 | 'val_accuracy': history.history['val_accuracy'][i] 99 | } for i in range(len(history.history['loss'])) 100 | ]) 101 | -------------------------------------------------------------------------------- /util.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | import pandas as pd 4 | from sklearn.metrics import confusion_matrix 5 | 6 | 7 | def visualize(histories, labels=None, type="val_accuracy", filename=None, start_from=0, title=None): 8 | fig = plt.figure() 9 | if not title: 10 | title = type 11 | if not labels: 12 | labels = ["Model {0}".format(j) for j in range(1, len(histories) + 1)] 13 | for hist, label in zip(histories, labels): 14 | y = np.array(hist.history[type]) 15 | plt.plot(range(start_from + 1, len(y) + 1), y[start_from:], label=label) 16 | plt.title(title) 17 | plt.xlabel("number of epoch") 18 | plt.ylabel(type) 19 | plt.legend() 20 | if filename: 21 | plt.savefig("plots/{0}_{1}".format(filename, type)) 22 | plt.close(fig) 23 | else: 24 | plt.show() 25 | 26 | 27 | def visualize2(histories, labels=None, type="val_accuracy", filename=None, start_from=0, title=None): 28 | fig = plt.figure() 29 | ax = fig.add_subplot(111) 30 | cm = plt.get_cmap('gist_rainbow') 31 | NUM_COLORS = len(histories) 32 | ax.set_prop_cycle(color=[cm(1. * i / NUM_COLORS) for i in range(NUM_COLORS)]) 33 | if not title: 34 | title = type 35 | if not labels: 36 | labels = ["Model {0}".format(j) for j in range(1, len(histories) + 1)] 37 | for hist, label in zip(histories, labels): 38 | y = np.array(hist[type]) 39 | plt.plot(range(start_from + 1, len(y) + 1), y[start_from:], label=label) 40 | plt.title(title) 41 | plt.xlabel("number of epoch") 42 | plt.ylabel(type) 43 | plt.legend() 44 | if filename: 45 | plt.savefig("plots/{0}_{1}".format(filename, type)) 46 | plt.close(fig) 47 | else: 48 | plt.show() 49 | 50 | 51 | def plot_confusion_matrix(y_true, y_pred, classes, 52 | normalize=False, 53 | title=None, 54 | cmap=plt.cm.Blues, 55 | filename=None): 56 | """ 57 | This function prints and plots the confusion matrix. 58 | Normalization can be applied by setting `normalize=True`. 59 | """ 60 | if not title: 61 | if normalize: 62 | title = 'Normalized confusion matrix' 63 | else: 64 | title = 'Confusion matrix, without normalization' 65 | 66 | # Compute confusion matrix 67 | cm = confusion_matrix(y_true, y_pred) 68 | if normalize: 69 | cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] 70 | print("Normalized confusion matrix") 71 | else: 72 | print('Confusion matrix, without normalization') 73 | 74 | # print(cm) 75 | 76 | fig, ax = plt.subplots(figsize=(7,7)) 77 | im = ax.imshow(cm, interpolation='nearest', cmap=cmap) 78 | ax.figure.colorbar(im, ax=ax) 79 | # We want to show all ticks... 80 | ax.set(xticks=np.arange(cm.shape[1]), 81 | yticks=np.arange(cm.shape[0]), 82 | # ... and label them with the respective list entries 83 | xticklabels=classes, yticklabels=classes, 84 | title=title, 85 | ylabel='True label', 86 | xlabel='Predicted label') 87 | 88 | # Rotate the tick labels and set their alignment. 89 | plt.setp(ax.get_xticklabels(), rotation=45, ha="right", 90 | rotation_mode="anchor") 91 | # Loop over data dimensions and create text annotations. 92 | fmt = '.2f' if normalize else 'd' 93 | thresh = cm.max() / 2. 94 | for i in range(cm.shape[0]): 95 | for j in range(cm.shape[1]): 96 | ax.text(j, i, format(cm[i, j], fmt), 97 | ha="center", va="center", 98 | color="white" if cm[i, j] > thresh else "black") 99 | fig.tight_layout() 100 | if filename: 101 | plt.savefig("plots/{0}".format(filename)) 102 | plt.close(fig) 103 | return ax -------------------------------------------------------------------------------- /additional_model.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import tensorflow as tf 4 | import os 5 | import random 6 | import pickle 7 | import matplotlib.pyplot as plt 8 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 9 | from tensorflow.keras.utils import to_categorical 10 | from tensorflow.keras.models import Sequential, load_model 11 | from tensorflow.keras.layers import MaxPool2D, Dense, Flatten, Dropout, Conv2D, BatchNormalization, Activation 12 | from keras.optimizers import SGD, Adam, RMSprop 13 | from keras import regularizers 14 | from keras.callbacks import ReduceLROnPlateau, EarlyStopping, History 15 | import time 16 | 17 | # determinism 18 | os.environ['TF_DETERMINISTIC_OPS'] = '1' 19 | 20 | 21 | def reset_random_seeds(seed): 22 | os.environ['PYTHONHASHSEED'] = str(seed) 23 | tf.random.set_seed(seed) 24 | np.random.seed(seed) 25 | random.seed(seed) 26 | 27 | 28 | # generators 29 | train_datagen2 = ImageDataGenerator( 30 | rescale=1. / 255, 31 | featurewise_center=False, 32 | samplewise_center=False, 33 | featurewise_std_normalization=False, 34 | samplewise_std_normalization=False, 35 | zca_whitening=False, 36 | rotation_range=15, 37 | width_shift_range=0.1, 38 | height_shift_range=0.1, 39 | horizontal_flip=True, 40 | vertical_flip=False 41 | ) 42 | 43 | test_datagen = ImageDataGenerator(rescale=1. / 255) 44 | 45 | # params 46 | img_size = (32, 32) 47 | num_classes = 10 48 | baseMapNum = 32 49 | weight_decay = 1e-4 50 | 51 | 52 | def create_model(seed, epochs, batch_size): 53 | train_generator2 = train_datagen2.flow_from_directory( 54 | 'data/train', 55 | target_size=img_size, 56 | batch_size=batch_size, 57 | classes=['cat', 'dog', 'frog'], 58 | class_mode='categorical', 59 | seed=seed) 60 | validation_generator2 = test_datagen.flow_from_directory( 61 | 'data/validation', 62 | target_size=img_size, 63 | batch_size=batch_size, 64 | classes=['cat', 'dog', 'frog'], 65 | class_mode='categorical', 66 | seed=seed) 67 | 68 | reset_random_seeds(seed) 69 | model = Sequential([ 70 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay), 71 | input_shape=(32, 32, 3)), 72 | Activation('relu'), 73 | BatchNormalization(), 74 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 75 | Activation('relu'), 76 | BatchNormalization(), 77 | MaxPool2D(pool_size=(2, 2)), 78 | Dropout(0.2), 79 | 80 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 81 | Activation('relu'), 82 | BatchNormalization(), 83 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 84 | Activation('relu'), 85 | BatchNormalization(), 86 | MaxPool2D(pool_size=(2, 2)), 87 | Dropout(0.3), 88 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 89 | Activation('relu'), 90 | BatchNormalization(), 91 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 92 | Activation('relu'), 93 | BatchNormalization(), 94 | MaxPool2D(pool_size=(2, 2)), 95 | Dropout(0.4), 96 | Flatten(), 97 | Dense(128, activation='relu'), 98 | BatchNormalization(), 99 | Dropout(0.4), 100 | Dense(3, activation='softmax') 101 | ]) 102 | 103 | lrr = ReduceLROnPlateau( 104 | monitor='val_accuracy', 105 | factor=.5, 106 | patience=10, 107 | min_lr=1e-4, 108 | verbose=1) 109 | es = EarlyStopping(monitor='val_accuracy', patience=20, restore_best_weights=True) 110 | opt_adam = Adam(learning_rate=0.002, beta_1=0.9, beta_2=0.999) 111 | model.compile(loss='categorical_crossentropy', 112 | optimizer=opt_adam, 113 | metrics=['accuracy']) 114 | history = model.fit(train_generator2, epochs=epochs, validation_data=validation_generator2, callbacks=[lrr, es]) 115 | loss, acc = model.evaluate(validation_generator2) 116 | return model, history, loss, acc 117 | 118 | 119 | t3 = time.time() 120 | model_add, hist_add, loss_add, acc_add = create_model(84, 200, 64) 121 | t4 = time.time() 122 | print("Time: {0:.3f}".format(t4-t3)) 123 | 124 | model_add.save("models/add_model2") 125 | 126 | -------------------------------------------------------------------------------- /optimizers_test.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import tensorflow as tf 4 | import os 5 | import random 6 | import pickle 7 | import matplotlib.pyplot as plt 8 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 9 | from tensorflow.keras.utils import to_categorical 10 | from tensorflow.keras.models import Sequential, load_model 11 | from tensorflow.keras.layers import MaxPool2D, Dense, Flatten, Dropout, Conv2D, BatchNormalization, Activation 12 | from keras.optimizers import SGD, Adam, RMSprop 13 | from keras import regularizers 14 | from keras.callbacks import ReduceLROnPlateau, EarlyStopping, History 15 | from util import visualize 16 | 17 | # determinism 18 | os.environ['TF_DETERMINISTIC_OPS'] = '1' 19 | 20 | 21 | def reset_random_seeds(seed): 22 | os.environ['PYTHONHASHSEED'] = str(seed) 23 | tf.random.set_seed(seed) 24 | np.random.seed(seed) 25 | random.seed(seed) 26 | 27 | 28 | # generators 29 | train_datagen2 = ImageDataGenerator( 30 | rescale=1. / 255, 31 | featurewise_center=False, 32 | samplewise_center=False, 33 | featurewise_std_normalization=False, 34 | samplewise_std_normalization=False, 35 | zca_whitening=False, 36 | rotation_range=15, 37 | width_shift_range=0.1, 38 | height_shift_range=0.1, 39 | horizontal_flip=True, 40 | vertical_flip=False 41 | ) 42 | 43 | test_datagen = ImageDataGenerator(rescale=1. / 255) 44 | 45 | # params 46 | img_size = (32, 32) 47 | batch_size = 64 48 | num_classes = 10 49 | baseMapNum = 32 50 | weight_decay = 1e-4 51 | 52 | 53 | def create_model(seed, optim, epochs): 54 | train_generator2 = train_datagen2.flow_from_directory( 55 | 'data/train', 56 | target_size=img_size, 57 | batch_size=batch_size, 58 | class_mode='categorical', 59 | seed=seed) 60 | validation_generator2 = test_datagen.flow_from_directory( 61 | 'data/validation', 62 | target_size=img_size, 63 | batch_size=batch_size, 64 | class_mode='categorical', 65 | seed=seed) 66 | 67 | reset_random_seeds(seed) 68 | model = Sequential([ 69 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay), 70 | input_shape=(32, 32, 3)), 71 | Activation('relu'), 72 | BatchNormalization(), 73 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 74 | Activation('relu'), 75 | BatchNormalization(), 76 | MaxPool2D(pool_size=(2, 2)), 77 | Dropout(0.2), 78 | 79 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 80 | Activation('relu'), 81 | BatchNormalization(), 82 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 83 | Activation('relu'), 84 | BatchNormalization(), 85 | MaxPool2D(pool_size=(2, 2)), 86 | Dropout(0.3), 87 | 88 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 89 | Activation('relu'), 90 | BatchNormalization(), 91 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 92 | Activation('relu'), 93 | BatchNormalization(), 94 | MaxPool2D(pool_size=(2, 2)), 95 | Dropout(0.4), 96 | 97 | Flatten(), 98 | Dense(num_classes, activation='softmax') 99 | ]) 100 | model.compile(loss='categorical_crossentropy', 101 | optimizer=optim, 102 | metrics=['accuracy']) 103 | history = model.fit(train_generator2, epochs=epochs, validation_data=validation_generator2) 104 | loss, acc = model.evaluate(validation_generator2) 105 | return model, history, loss, acc 106 | 107 | 108 | opt_rms = RMSprop(lr=0.0003, decay=1e-6) 109 | opt_adam = Adam(learning_rate=0.0003, beta_1=0.9, beta_2=0.999) 110 | opt_sgd = SGD(learning_rate=0.0003, momentum=0.9) 111 | 112 | models = [] 113 | histories = [] 114 | losses = [] 115 | accs = [] 116 | for opt in [opt_rms, opt_adam, opt_sgd]: 117 | for seed in [420, 42, 402]: 118 | print("{0}, seed {1}".format(opt._name, seed)) 119 | model, hist, loss, acc = create_model(seed, opt, 50) 120 | models.append(model) 121 | histories.append(hist) 122 | losses.append(loss) 123 | accs.append(acc) 124 | 125 | print("RMSProp avg loss: {0:.4f}, avg accuracy: {1:.4f}".format(np.mean(losses[:3]), np.mean(accs[:3]))) 126 | print("Adam avg loss: {0:.4f}, avg accuracy: {1:.4f}".format(np.mean(losses[3:6]), np.mean(accs[3:6]))) 127 | print("SGD avg loss: {0:.4f}, avg accuracy: {1:.4f}".format(np.mean(losses[6:]), np.mean(accs[6:]))) 128 | df = pd.DataFrame({"loss": losses, "acc": accs}) 129 | df.to_csv("optim_comparison.csv") 130 | labels = list(np.array([[opt._name + str(i) for i in range(1,4)] for opt in [opt_rms, opt_adam, opt_sgd]]).flatten()) 131 | visualize(histories, labels=labels, type="accuracy", filename="optim_comparison", title="Comparison of accuracy on training set") 132 | visualize(histories, labels=labels, type="loss", filename="optim_comparison", title="Comparison of loss on training set") 133 | visualize(histories, labels=labels, type="val_accuracy", filename="optim_comparison", title="Comparison of accuracy on validation set") 134 | visualize(histories, labels=labels, type="val_loss", filename="optim_comparison", title="Comparison of loss on validation set") 135 | 136 | histories2 = [histories[i].history for i in range(len(histories))] 137 | with open("optim_hist.pickle", "wb") as f: 138 | pickle.dump(histories2, f) 139 | -------------------------------------------------------------------------------- /WB1.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import tensorflow as tf 4 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 5 | from tensorflow.keras.utils import to_categorical 6 | from tensorflow.keras.models import Sequential, load_model 7 | from tensorflow.keras.layers import MaxPool2D, Dense, Flatten, Dropout, Conv2D, BatchNormalization, Activation 8 | from keras.optimizers import SGD, Adam, RMSprop 9 | from keras import regularizers 10 | from keras.callbacks import ReduceLROnPlateau, EarlyStopping 11 | 12 | # params 13 | img_size = (32, 32) 14 | batch_size = 32 15 | num_classes = 10 16 | tf.random.set_seed(420) 17 | 18 | # creating generators 19 | train_datagen = ImageDataGenerator( 20 | rescale=1. / 255, 21 | width_shift_range=0.1, 22 | height_shift_range=0.1, 23 | horizontal_flip=True, 24 | rotation_range=20) 25 | 26 | test_datagen = ImageDataGenerator(rescale=1. / 255) 27 | 28 | train_generator = train_datagen.flow_from_directory( 29 | 'data/train', 30 | target_size=img_size, 31 | batch_size=batch_size, 32 | class_mode='categorical', 33 | seed=420) 34 | validation_generator = test_datagen.flow_from_directory( 35 | 'data/validation', 36 | target_size=img_size, 37 | batch_size=batch_size, 38 | class_mode='categorical', 39 | seed=420) 40 | 41 | # original model https://www.kaggle.com/kedarsai/cifar-10-88-accuracy-using-keras 42 | model = Sequential([ 43 | Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same', input_shape=(32, 32, 3)), 44 | BatchNormalization(), 45 | Conv2D(32, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same'), 46 | BatchNormalization(), 47 | MaxPool2D((2, 2)), 48 | Dropout(0.2), 49 | Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same'), 50 | BatchNormalization(), 51 | Conv2D(64, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same'), 52 | MaxPool2D((2, 2)), 53 | Dropout(0.3), 54 | Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same', input_shape=(32, 32, 3)), 55 | BatchNormalization(), 56 | Conv2D(128, (3, 3), activation='relu', kernel_initializer='he_uniform', padding='same'), 57 | BatchNormalization(), 58 | MaxPool2D((2, 2)), 59 | Dropout(0.4), 60 | Flatten(), 61 | Dense(128, activation='relu', kernel_initializer='he_uniform'), 62 | BatchNormalization(), 63 | Dropout(0.5), 64 | Dense(num_classes, activation='softmax') 65 | ]) 66 | model.summary() 67 | 68 | model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) 69 | 70 | steps = int(train_generator.n / 64) 71 | history = model.fit(train_generator, epochs=200, steps_per_epoch=steps, validation_data=validation_generator) 72 | 73 | model.save("models/wb_cnn_kaggle") 74 | model = load_model("models/wb_cnn_kaggle") 75 | model.evaluate(validation_generator) 76 | # loss - 0.45, accuracy - 0.856 77 | # second model from https://www.kaggle.com/c/cifar-10/discussion/40237 78 | 79 | baseMapNum = 32 80 | weight_decay = 1e-4 81 | batch_size = 64 82 | 83 | train_datagen2 = ImageDataGenerator( 84 | rescale=1. / 255, 85 | featurewise_center=False, 86 | samplewise_center=False, 87 | featurewise_std_normalization=False, 88 | samplewise_std_normalization=False, 89 | zca_whitening=False, 90 | rotation_range=15, 91 | width_shift_range=0.1, 92 | height_shift_range=0.1, 93 | horizontal_flip=True, 94 | vertical_flip=False 95 | ) 96 | 97 | train_generator2 = train_datagen2.flow_from_directory( 98 | 'data/train', 99 | target_size=img_size, 100 | batch_size=batch_size, 101 | class_mode='categorical', 102 | seed=420) 103 | validation_generator2 = test_datagen.flow_from_directory( 104 | 'data/validation', 105 | target_size=img_size, 106 | batch_size=batch_size, 107 | class_mode='categorical', 108 | seed=420) 109 | 110 | model2 = Sequential() 111 | model2.add(Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay), input_shape=(32, 32, 3))) 112 | model2.add(Activation('relu')) 113 | model2.add(BatchNormalization()) 114 | model2.add(Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay))) 115 | model2.add(Activation('relu')) 116 | model2.add(BatchNormalization()) 117 | model2.add(MaxPool2D(pool_size=(2, 2))) 118 | model2.add(Dropout(0.2)) 119 | 120 | model2.add(Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay))) 121 | model2.add(Activation('relu')) 122 | model2.add(BatchNormalization()) 123 | model2.add(Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay))) 124 | model2.add(Activation('relu')) 125 | model2.add(BatchNormalization()) 126 | model2.add(MaxPool2D(pool_size=(2, 2))) 127 | model2.add(Dropout(0.3)) 128 | 129 | model2.add(Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay))) 130 | model2.add(Activation('relu')) 131 | model2.add(BatchNormalization()) 132 | model2.add(Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay))) 133 | model2.add(Activation('relu')) 134 | model2.add(BatchNormalization()) 135 | model2.add(MaxPool2D(pool_size=(2, 2))) 136 | model2.add(Dropout(0.4)) 137 | 138 | model2.add(Flatten()) 139 | model2.add(Dense(num_classes, activation='softmax')) 140 | 141 | model2.summary() 142 | 143 | opt_rms = RMSprop(lr=0.0003, decay=1e-6) 144 | model2.compile(loss='categorical_crossentropy', 145 | optimizer=opt_rms, 146 | metrics=['accuracy']) 147 | history2 = model2.fit(train_generator2, epochs=125, verbose=1, validation_data=validation_generator2) 148 | model2.evaluate(validation_generator2) 149 | # loss - 0.49, accuracy - 0.866 150 | pd.DataFrame(history2.history['accuracy']).plot() 151 | model2.save('models/wb_cnn_kaggle2') 152 | -------------------------------------------------------------------------------- /prediction.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import tensorflow as tf 4 | import os 5 | import random 6 | import pickle 7 | import matplotlib.pyplot as plt 8 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 9 | from tensorflow.keras.utils import to_categorical 10 | from tensorflow.keras.models import Sequential, load_model 11 | from tensorflow.keras.layers import MaxPool2D, Dense, Flatten, Dropout, Conv2D, BatchNormalization, Activation 12 | from keras.optimizers import SGD, Adam, RMSprop 13 | from keras import regularizers 14 | from keras.callbacks import ReduceLROnPlateau, EarlyStopping, History 15 | import time 16 | 17 | # determinism 18 | os.environ['TF_DETERMINISTIC_OPS'] = '1' 19 | 20 | 21 | def reset_random_seeds(seed): 22 | os.environ['PYTHONHASHSEED'] = str(seed) 23 | tf.random.set_seed(seed) 24 | np.random.seed(seed) 25 | random.seed(seed) 26 | 27 | 28 | # generators 29 | train_datagen2 = ImageDataGenerator( 30 | rescale=1. / 255, 31 | featurewise_center=False, 32 | samplewise_center=False, 33 | featurewise_std_normalization=False, 34 | samplewise_std_normalization=False, 35 | zca_whitening=False, 36 | rotation_range=15, 37 | width_shift_range=0.1, 38 | height_shift_range=0.1, 39 | horizontal_flip=True, 40 | vertical_flip=False 41 | ) 42 | 43 | test_datagen = ImageDataGenerator(rescale=1. / 255) 44 | 45 | # params 46 | img_size = (32, 32) 47 | num_classes = 10 48 | baseMapNum = 32 49 | weight_decay = 1e-4 50 | 51 | 52 | def create_model(seed, epochs, batch_size): 53 | train_generator2 = train_datagen2.flow_from_directory( 54 | 'data/train', 55 | target_size=img_size, 56 | batch_size=batch_size, 57 | class_mode='categorical', 58 | seed=seed) 59 | validation_generator2 = test_datagen.flow_from_directory( 60 | 'data/validation', 61 | target_size=img_size, 62 | batch_size=batch_size, 63 | class_mode='categorical', 64 | seed=seed) 65 | 66 | reset_random_seeds(seed) 67 | model = Sequential([ 68 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay), 69 | input_shape=(32, 32, 3)), 70 | Activation('relu'), 71 | BatchNormalization(), 72 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 73 | Activation('relu'), 74 | BatchNormalization(), 75 | MaxPool2D(pool_size=(2, 2)), 76 | Dropout(0.2), 77 | 78 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 79 | Activation('relu'), 80 | BatchNormalization(), 81 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 82 | Activation('relu'), 83 | BatchNormalization(), 84 | MaxPool2D(pool_size=(2, 2)), 85 | Dropout(0.3), 86 | 87 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 88 | Activation('relu'), 89 | BatchNormalization(), 90 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 91 | Activation('relu'), 92 | BatchNormalization(), 93 | MaxPool2D(pool_size=(2, 2)), 94 | Dropout(0.4), 95 | Flatten(), 96 | Dense(128, activation='relu'), 97 | BatchNormalization(), 98 | Dropout(0.4), 99 | Dense(num_classes, activation='softmax') 100 | ]) 101 | 102 | lrr = ReduceLROnPlateau( 103 | monitor='val_accuracy', 104 | factor=.5, 105 | patience=10, 106 | min_lr=1e-4, 107 | verbose=1) 108 | es = EarlyStopping(monitor='val_accuracy', patience=20, restore_best_weights=True) 109 | opt_adam = Adam(learning_rate=0.002, beta_1=0.9, beta_2=0.999) 110 | model.compile(loss='categorical_crossentropy', 111 | optimizer=opt_adam, 112 | metrics=['accuracy']) 113 | history = model.fit(train_generator2, epochs=epochs, validation_data=validation_generator2, callbacks=[lrr, es]) 114 | loss, acc = model.evaluate(validation_generator2) 115 | return model, history, loss, acc 116 | 117 | 118 | t1 = time.time() 119 | model, hist, loss, acc = create_model(84, 200, 64) 120 | t2 = time.time() 121 | print("Time: {0:.3f}".format(t2-t1)) 122 | 123 | model.save("models/final_model") 124 | 125 | test_generator = test_datagen.flow_from_directory( 126 | 'data/', 127 | target_size=img_size, 128 | classes=['test'], 129 | batch_size=1, 130 | class_mode=None, 131 | shuffle=False) 132 | 133 | pred = model.predict(test_generator, verbose=1) 134 | 135 | classes = np.argmax(pred, axis=1) 136 | classes_dict = dict(list(zip(range(10), os.listdir("data/train")))) 137 | classes_names = [classes_dict[c] for c in classes] 138 | pred_df = pd.DataFrame(classes_names, columns=["label"]) 139 | 140 | filenames = list(test_generator.filenames) 141 | ids = [f.split(".")[0][5:] for f in filenames] 142 | pred_df.index = ids 143 | pred_df.to_csv("kaggle_pred1.csv", index_label="id") 144 | 145 | model_add = load_model("models/add_model2") 146 | pred_add = model_add.predict(test_generator, verbose=1) 147 | 148 | classes_add = np.argmax(pred_add, axis=1) 149 | classes_add_names = [{0:"cat", 1:"dog",2: "frog"}[c] for c in classes_add] 150 | pred_df_add = pd.DataFrame(classes_add_names, columns=["label"]) 151 | pred_df_add.index = ids 152 | 153 | pred_df_new = pred_df.copy() 154 | pred_df_new[pred_df_new.label.isin(["dog","cat","frog"])] = pred_df_add[pred_df_new.label.isin(["dog","cat","frog"])] 155 | pred_df_new.to_csv("kaggle_pred2.csv", index_label="id") 156 | 157 | ####### 158 | validation_generator_pred = test_datagen.flow_from_directory( 159 | 'data/validation', 160 | target_size=img_size, 161 | batch_size=1, 162 | class_mode="categorical", 163 | shuffle=False) 164 | 165 | pred_valid = model.predict(validation_generator_pred, verbose=1) 166 | 167 | pred_valid_classes = np.argmax(pred_valid, axis=1) 168 | 169 | plot_confusion_matrix(validation_generator_pred.classes, pred_valid_classes, os.listdir("data/train"), normalize=True, filename="conf_matrix_valid") -------------------------------------------------------------------------------- /final_test.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import tensorflow as tf 4 | import os 5 | import random 6 | import pickle 7 | import matplotlib.pyplot as plt 8 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 9 | from tensorflow.keras.utils import to_categorical 10 | from tensorflow.keras.models import Sequential, load_model 11 | from tensorflow.keras.layers import MaxPool2D, Dense, Flatten, Dropout, Conv2D, BatchNormalization, Activation 12 | from keras.optimizers import SGD, Adam, RMSprop 13 | from keras import regularizers 14 | from keras.callbacks import ReduceLROnPlateau, EarlyStopping, History 15 | from util import visualize 16 | 17 | # determinism 18 | os.environ['TF_DETERMINISTIC_OPS'] = '1' 19 | 20 | 21 | def reset_random_seeds(seed): 22 | os.environ['PYTHONHASHSEED'] = str(seed) 23 | tf.random.set_seed(seed) 24 | np.random.seed(seed) 25 | random.seed(seed) 26 | 27 | 28 | # generators 29 | train_datagen2 = ImageDataGenerator( 30 | rescale=1. / 255, 31 | featurewise_center=False, 32 | samplewise_center=False, 33 | featurewise_std_normalization=False, 34 | samplewise_std_normalization=False, 35 | zca_whitening=False, 36 | rotation_range=15, 37 | width_shift_range=0.1, 38 | height_shift_range=0.1, 39 | horizontal_flip=True, 40 | vertical_flip=False 41 | ) 42 | 43 | test_datagen = ImageDataGenerator(rescale=1. / 255) 44 | 45 | # params 46 | img_size = (32, 32) 47 | num_classes = 10 48 | baseMapNum = 32 49 | weight_decay = 1e-4 50 | 51 | 52 | def create_model(seed, epochs, batch_size): 53 | train_generator2 = train_datagen2.flow_from_directory( 54 | 'data/train', 55 | target_size=img_size, 56 | batch_size=batch_size, 57 | class_mode='categorical', 58 | seed=seed) 59 | validation_generator2 = test_datagen.flow_from_directory( 60 | 'data/validation', 61 | target_size=img_size, 62 | batch_size=batch_size, 63 | class_mode='categorical', 64 | seed=seed) 65 | 66 | reset_random_seeds(seed) 67 | model = Sequential([ 68 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay), 69 | input_shape=(32, 32, 3)), 70 | Activation('relu'), 71 | BatchNormalization(), 72 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 73 | Activation('relu'), 74 | BatchNormalization(), 75 | MaxPool2D(pool_size=(2, 2)), 76 | Dropout(0.2), 77 | 78 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 79 | Activation('relu'), 80 | BatchNormalization(), 81 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 82 | Activation('relu'), 83 | BatchNormalization(), 84 | MaxPool2D(pool_size=(2, 2)), 85 | Dropout(0.3), 86 | 87 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 88 | Activation('relu'), 89 | BatchNormalization(), 90 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 91 | Activation('relu'), 92 | BatchNormalization(), 93 | MaxPool2D(pool_size=(2, 2)), 94 | Dropout(0.4), 95 | Flatten(), 96 | Dense(128, activation='relu'), 97 | BatchNormalization(), 98 | Dropout(0.4), 99 | Dense(num_classes, activation='softmax') 100 | ]) 101 | 102 | lrr = ReduceLROnPlateau( 103 | monitor='val_accuracy', 104 | factor=.5, 105 | patience=10, 106 | min_lr=1e-4, 107 | verbose=1) 108 | opt_adam = Adam(learning_rate=0.002, beta_1=0.9, beta_2=0.999) 109 | model.compile(loss='categorical_crossentropy', 110 | optimizer=opt_adam, 111 | metrics=['accuracy']) 112 | history = model.fit(train_generator2, epochs=epochs, validation_data=validation_generator2, callbacks=[lrr]) 113 | loss, acc = model.evaluate(validation_generator2) 114 | return model, history, loss, acc 115 | 116 | 117 | models = [] 118 | histories = [] 119 | losses = [] 120 | accs = [] 121 | for batch in [64]: 122 | for seed in [420, 42, 402]: 123 | print("Batch size {0}, seed {1}".format(batch, seed)) 124 | model, hist, loss, acc = create_model(seed, 200, batch) 125 | models.append(model) 126 | histories.append(hist) 127 | losses.append(loss) 128 | accs.append(acc) 129 | 130 | print("Final model avg loss: {0:.4f}, avg accuracy: {1:.4f}".format(np.mean(losses[:3]), np.mean(accs[:3]))) 131 | 132 | 133 | df = pd.DataFrame({"loss": losses, "acc": accs}) 134 | df.to_csv("final_model.csv") 135 | 136 | 137 | 138 | histories_final = [histories[i].history for i in range(len(histories))] 139 | with open("final_hist.pickle", "wb") as f: 140 | pickle.dump(histories_final, f) 141 | 142 | 143 | labels = list(np.array([[name + " " +str(i) for i in range(1, 4)] for name in ["Batch_size=64", "Batch_size=32", "Batch_size=128", "Batch_size=256"]]).flatten()) 144 | visualize2(histories_final, type="accuracy", filename="final_comparison", 145 | title="Comparison of accuracy on training set") 146 | visualize2(histories_final, type="loss", filename="final_comparison", 147 | title="Comparison of loss on training set") 148 | visualize2(histories_final, type="val_accuracy", filename="final_comparison", 149 | title="Comparison of accuracy on validation set") 150 | visualize2(histories_final, type="val_loss", filename="final_comparison", 151 | title="Comparison of loss on validation set") 152 | 153 | visualize2(histories_final, type="accuracy", filename="final_comparison2", 154 | title="Comparison of accuracy on training set", start_from=100) 155 | visualize2(histories_final, type="loss", filename="final_comparison2", 156 | title="Comparison of loss on training set", start_from=100) 157 | visualize2(histories_final, type="val_accuracy", filename="final_comparison2", 158 | title="Comparison of accuracy on validation set", start_from=100) 159 | visualize2(histories_final, type="val_loss", filename="final_comparison2", 160 | title="Comparison of loss on validation set", start_from=100) 161 | 162 | visualize2(histories_final, type="lr", title="Changes of learning rate by callback",filename="fianl_comparison") 163 | -------------------------------------------------------------------------------- /transfer_learning.py: -------------------------------------------------------------------------------- 1 | ''' 2 | https://www.kaggle.com/adi160/cifar-10-keras-transfer-learning 3 | ''' 4 | 5 | from PIL import Image 6 | import numpy as np 7 | import pandas as pd 8 | import matplotlib.pyplot as plt 9 | import tensorflow as tf 10 | from tensorflow.keras.models import Sequential 11 | from tensorflow.keras.layers import Dense, Flatten, GlobalAveragePooling2D, Dropout, GlobalMaxPooling2D 12 | from tensorflow.keras.applications import ResNet50V2, VGG19 13 | from tensorflow.keras.applications.resnet_v2 import preprocess_input 14 | from tensorflow.keras.applications import vgg19 15 | from keras.preprocessing.image import ImageDataGenerator 16 | from keras.optimizers import SGD, Adam 17 | from sklearn.model_selection import train_test_split 18 | from sklearn.metrics import confusion_matrix 19 | from keras.callbacks import ReduceLROnPlateau 20 | 21 | # data augmentation 22 | train_datagen = ImageDataGenerator( 23 | width_shift_range=0.2, 24 | height_shift_range=0.2, 25 | horizontal_flip=True, 26 | rotation_range=20, 27 | shear_range=0.2, 28 | preprocessing_function=preprocess_input) 29 | 30 | test_datagen = ImageDataGenerator(preprocessing_function=preprocess_input) 31 | 32 | img_size = (32, 32) 33 | batch_size = 100 34 | 35 | train_generator = train_datagen.flow_from_directory( 36 | 'data/train', 37 | target_size=img_size, 38 | batch_size=batch_size, 39 | class_mode='sparse') 40 | 41 | validation_generator = test_datagen.flow_from_directory( 42 | 'data/validation', 43 | target_size=img_size, 44 | batch_size=batch_size, 45 | class_mode='sparse') 46 | 47 | # x, y = train_generator.next() 48 | # i = 7 49 | # plt.figure() 50 | # plt.imshow(x[i]) 51 | # plt.colorbar() 52 | # plt.grid(False) 53 | # plt.show() 54 | # print(y[i]) 55 | # learning rate annealer 56 | lrr = ReduceLROnPlateau( 57 | monitor='val_accuracy', # Metric to be measured 58 | factor=.01, # Factor by which learning rate will be reduced 59 | patience=3, # No. of epochs after which if there is no improvement in the val_acc, the learning rate is reduced 60 | min_lr=1e-5, 61 | verbose=1) # The minimum learning rate 62 | 63 | cifar10_dims = (32, 32, 3) 64 | 65 | # resnet model 66 | 67 | mdl = Sequential() 68 | mdl.add(ResNet50V2(include_top=False, weights='imagenet', input_shape=cifar10_dims, classes=10)) 69 | mdl.add(Flatten()) 70 | mdl.add(Dense(4000, activation='relu', input_dim=512)) 71 | mdl.add(Dense(2000, activation='relu')) 72 | mdl.add(Dropout(.4)) 73 | mdl.add(Dense(1000, activation='relu')) 74 | mdl.add(Dropout(.3)) 75 | mdl.add(Dense(500, activation='relu')) 76 | mdl.add(Dropout(.2)) 77 | mdl.add(Dense(10, activation='softmax')) 78 | 79 | lr = .001 80 | 81 | adam = Adam(lr=lr, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.0, amsgrad=False) 82 | mdl.compile(optimizer=adam, loss='sparse_categorical_crossentropy', metrics=['accuracy']) 83 | hist = mdl.fit(train_generator, 84 | epochs=100, 85 | validation_data=validation_generator, 86 | callbacks=[lrr], 87 | verbose=1) 88 | 89 | # mdl.summary() 90 | # # plots of loss and accuracy 91 | # f, ax = plt.subplots(2, 1) # Creates 2 subplots under 1 column 92 | # 93 | # # Assign the first subplot to graph training loss and validation loss 94 | # ax[0].plot(hist.history['loss'], color='b', label='Training Loss') 95 | # ax[0].plot(hist.history['val_loss'], color='r', label='Validation Loss') 96 | # 97 | # # Next lets plot the training accuracy and validation accuracy 98 | # ax[1].plot(hist.history['accuracy'], color='b', label='Training Accuracy') 99 | # ax[1].plot(hist.history['val_accuracy'], color='r', label='Validation Accuracy') 100 | 101 | # validation 102 | # mdl.evaluate(validation_generator) 103 | # 104 | # 105 | # test_generator = test_datagen.flow_from_directory( 106 | # 'data/validation', 107 | # target_size=img_size, 108 | # batch_size=1, 109 | # class_mode='sparse', 110 | # shuffle=False) 111 | # 112 | # y_pred = np.argmax(mdl.predict(test_generator), axis=-1) 113 | # 114 | # y_true = pd.get_dummies(pd.Series(test_generator.classes)).idxmax(axis=1) 115 | # print(np.mean(y_pred == y_true)) 116 | # 117 | # cm = confusion_matrix(y_true, y_pred) 118 | 119 | # saving model 120 | 121 | mdl.save("models/transfer_learning_renet50") 122 | mdl.save_weights("models/transfer_learning_resnet50_weights") 123 | 124 | # VGG model 125 | 126 | train_datagen = ImageDataGenerator( 127 | width_shift_range=0.2, 128 | height_shift_range=0.2, 129 | horizontal_flip=True, 130 | rotation_range=20, 131 | shear_range=0.2, 132 | preprocessing_function=vgg19.preprocess_input) 133 | 134 | test_datagen = ImageDataGenerator(preprocessing_function=vgg19.preprocess_input) 135 | 136 | img_size = (32, 32) 137 | batch_size = 100 138 | 139 | train_generator = train_datagen.flow_from_directory( 140 | 'data/train', 141 | target_size=img_size, 142 | batch_size=batch_size, 143 | class_mode='sparse') 144 | 145 | validation_generator = test_datagen.flow_from_directory( 146 | 'data/validation', 147 | target_size=img_size, 148 | batch_size=batch_size, 149 | class_mode='sparse') 150 | 151 | lr = .001 152 | 153 | mdl2 = Sequential() 154 | mdl2.add(VGG19(include_top=False, weights='imagenet', input_shape=(32, 32, 3), classes=10)) 155 | mdl2.add(Flatten()) 156 | mdl2.add(Dense(1024, activation='relu', input_dim=512)) 157 | mdl2.add(Dense(512, activation='relu')) 158 | mdl2.add(Dense(256, activation='relu')) 159 | mdl2.add(Dense(128, activation='relu')) 160 | mdl2.add(Dense(10, activation='softmax')) 161 | 162 | sgd = SGD(lr=lr, momentum=.9, nesterov=False) 163 | mdl2.compile(optimizer=sgd, loss='sparse_categorical_crossentropy', metrics=['accuracy']) 164 | hist2 = mdl2.fit(train_generator, 165 | epochs=40, 166 | validation_data=validation_generator, 167 | callbacks=[lrr], verbose=1) 168 | 169 | # f, ax = plt.subplots(2, 1) # Creates 2 subplots under 1 column 170 | # 171 | # # Assign the first subplot to graph training loss and validation loss 172 | # ax[0].plot(hist2.history['loss'], color='b', label='Training Loss') 173 | # ax[0].plot(hist2.history['val_loss'], color='r', label='Validation Loss') 174 | # 175 | # # Next lets plot the training accuracy and validation accuracy 176 | # ax[1].plot(hist2.history['accuracy'], color='b', label='Training Accuracy') 177 | # ax[1].plot(hist2.history['val_accuracy'], color='r', label='Validation Accuracy') 178 | 179 | mdl2.save("models/transfer_learning_vgg19") 180 | mdl2.save_weights("models/transfer_learning_vgg19_weights") 181 | -------------------------------------------------------------------------------- /lr_test.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import tensorflow as tf 4 | import os 5 | import random 6 | import pickle 7 | import matplotlib.pyplot as plt 8 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 9 | from tensorflow.keras.utils import to_categorical 10 | from tensorflow.keras.models import Sequential, load_model 11 | from tensorflow.keras.layers import MaxPool2D, Dense, Flatten, Dropout, Conv2D, BatchNormalization, Activation 12 | from keras.optimizers import SGD, Adam, RMSprop 13 | from keras import regularizers 14 | from keras.callbacks import ReduceLROnPlateau, EarlyStopping, History 15 | from util import visualize2, visualize 16 | 17 | # determinism 18 | os.environ['TF_DETERMINISTIC_OPS'] = '1' 19 | 20 | 21 | def reset_random_seeds(seed): 22 | os.environ['PYTHONHASHSEED'] = str(seed) 23 | tf.random.set_seed(seed) 24 | np.random.seed(seed) 25 | random.seed(seed) 26 | 27 | 28 | # generators 29 | train_datagen2 = ImageDataGenerator( 30 | rescale=1. / 255, 31 | featurewise_center=False, 32 | samplewise_center=False, 33 | featurewise_std_normalization=False, 34 | samplewise_std_normalization=False, 35 | zca_whitening=False, 36 | rotation_range=15, 37 | width_shift_range=0.1, 38 | height_shift_range=0.1, 39 | horizontal_flip=True, 40 | vertical_flip=False 41 | ) 42 | 43 | test_datagen = ImageDataGenerator(rescale=1. / 255) 44 | 45 | # params 46 | img_size = (32, 32) 47 | batch_size = 64 48 | num_classes = 10 49 | baseMapNum = 32 50 | weight_decay = 1e-4 51 | 52 | 53 | def create_model(seed, epochs, lr, callbacks): 54 | train_generator2 = train_datagen2.flow_from_directory( 55 | 'data/train', 56 | target_size=img_size, 57 | batch_size=batch_size, 58 | class_mode='categorical', 59 | seed=seed) 60 | validation_generator2 = test_datagen.flow_from_directory( 61 | 'data/validation', 62 | target_size=img_size, 63 | batch_size=batch_size, 64 | class_mode='categorical', 65 | seed=seed) 66 | 67 | reset_random_seeds(seed) 68 | model = Sequential([ 69 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay), 70 | input_shape=(32, 32, 3)), 71 | Activation('relu'), 72 | BatchNormalization(), 73 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 74 | Activation('relu'), 75 | BatchNormalization(), 76 | MaxPool2D(pool_size=(2, 2)), 77 | Dropout(0.2), 78 | 79 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 80 | Activation('relu'), 81 | BatchNormalization(), 82 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 83 | Activation('relu'), 84 | BatchNormalization(), 85 | MaxPool2D(pool_size=(2, 2)), 86 | Dropout(0.3), 87 | 88 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 89 | Activation('relu'), 90 | BatchNormalization(), 91 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 92 | Activation('relu'), 93 | BatchNormalization(), 94 | MaxPool2D(pool_size=(2, 2)), 95 | Dropout(0.4), 96 | 97 | Flatten(), 98 | Dense(num_classes, activation='softmax') 99 | ]) 100 | opt_adam = Adam(learning_rate=lr, beta_1=0.9, beta_2=0.999) 101 | model.compile(loss='categorical_crossentropy', 102 | optimizer=opt_adam, 103 | metrics=['accuracy']) 104 | history = model.fit(train_generator2, epochs=epochs, validation_data=validation_generator2, callbacks=callbacks) 105 | loss, acc = model.evaluate(validation_generator2) 106 | return model, history, loss, acc 107 | 108 | 109 | lrr = ReduceLROnPlateau( 110 | monitor='val_accuracy', 111 | factor=.5, 112 | patience=8, 113 | min_lr=1e-4, 114 | verbose=1) 115 | 116 | models = [] 117 | histories = [] 118 | losses = [] 119 | accs = [] 120 | callbacks = [[], [lrr]] 121 | lr = [0.002, 0.002] 122 | 123 | for l, call in zip(lr, callbacks): 124 | for seed in [420, 42, 402]: 125 | print("{0}, seed {1}".format(l, seed)) 126 | model, hist, loss, acc = create_model(seed, 50, l, call) 127 | models.append(model) 128 | histories.append(hist) 129 | losses.append(loss) 130 | accs.append(acc) 131 | 132 | print("No callback, lr=0.002 avg loss: {0:.4f}, avg accuracy: {1:.4f}".format(np.mean(losses[:3]), np.mean(accs[:3]))) 133 | print("Reduce on plateau, lr=0.002 avg loss: {0:.4f}, avg accuracy: {1:.4f}".format(np.mean(losses[3:6]), np.mean(accs[3:6]))) 134 | 135 | df = pd.DataFrame({"loss": losses, "acc": accs}) 136 | df.to_csv("lr_comparison.csv") 137 | 138 | with open("optim_hist.pickle", "rb") as f: 139 | hist_adam = pickle.load(f)[3:6] 140 | 141 | histories_lr = [histories[i].history for i in range(len(histories))] 142 | with open("lr_hist.pickle", "wb") as f: 143 | pickle.dump(histories_lr, f) 144 | histories3 = hist_adam + histories_lr 145 | 146 | labels = list(np.array([[name + " " +str(i) for i in range(1, 4)] for name in ["lr=0.0003", "lr=0.002", "lr=0.002 + callback" ]]).flatten()) 147 | visualize2(histories3, labels=labels, type="accuracy", filename="lr_comparison", 148 | title="Comparison of accuracy on training set") 149 | visualize2(histories3, labels=labels, type="loss", filename="lr_comparison", 150 | title="Comparison of loss on training set") 151 | visualize2(histories3, labels=labels, type="val_accuracy", filename="lr_comparison", 152 | title="Comparison of accuracy on validation set") 153 | visualize2(histories3, labels=labels, type="val_loss", filename="lr_comparison", 154 | title="Comparison of loss on validation set") 155 | 156 | visualize2(histories3, labels=labels, type="accuracy", filename="lr_comparison2", 157 | title="Comparison of accuracy on training set", start_from=20) 158 | visualize2(histories3, labels=labels, type="loss", filename="lr_comparison2", 159 | title="Comparison of loss on training set", start_from=20) 160 | visualize2(histories3, labels=labels, type="val_loss", filename="lr_comparison2", 161 | title="Comparison of loss on validation set", start_from=20) 162 | visualize2(histories3, labels=labels, type="val_accuracy", filename="lr_comparison2", 163 | title="Comparison of accuracy on validation set", start_from=20) 164 | 165 | visualize(histories[3:], type="lr", title="Changes of learning rate by callback",filename="lr_comparison") 166 | -------------------------------------------------------------------------------- /arch_test.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import tensorflow as tf 4 | import os 5 | import random 6 | import pickle 7 | import matplotlib.pyplot as plt 8 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 9 | from tensorflow.keras.utils import to_categorical 10 | from tensorflow.keras.models import Sequential, load_model 11 | from tensorflow.keras.layers import MaxPool2D, Dense, Flatten, Dropout, Conv2D, BatchNormalization, Activation 12 | from keras.optimizers import SGD, Adam, RMSprop 13 | from keras import regularizers 14 | from keras.callbacks import ReduceLROnPlateau, EarlyStopping, History 15 | from util import visualize, visualize2 16 | 17 | # determinism 18 | os.environ['TF_DETERMINISTIC_OPS'] = '1' 19 | 20 | 21 | def reset_random_seeds(seed): 22 | os.environ['PYTHONHASHSEED'] = str(seed) 23 | tf.random.set_seed(seed) 24 | np.random.seed(seed) 25 | random.seed(seed) 26 | 27 | 28 | # generators 29 | train_datagen2 = ImageDataGenerator( 30 | rescale=1. / 255, 31 | featurewise_center=False, 32 | samplewise_center=False, 33 | featurewise_std_normalization=False, 34 | samplewise_std_normalization=False, 35 | zca_whitening=False, 36 | rotation_range=15, 37 | width_shift_range=0.1, 38 | height_shift_range=0.1, 39 | horizontal_flip=True, 40 | vertical_flip=False 41 | ) 42 | 43 | test_datagen = ImageDataGenerator(rescale=1. / 255) 44 | 45 | # params 46 | img_size = (32, 32) 47 | batch_size = 64 48 | num_classes = 10 49 | baseMapNum = 32 50 | weight_decay = 1e-4 51 | 52 | 53 | def create_model(seed, epochs, dense): 54 | train_generator2 = train_datagen2.flow_from_directory( 55 | 'data/train', 56 | target_size=img_size, 57 | batch_size=batch_size, 58 | class_mode='categorical', 59 | seed=seed) 60 | validation_generator2 = test_datagen.flow_from_directory( 61 | 'data/validation', 62 | target_size=img_size, 63 | batch_size=batch_size, 64 | class_mode='categorical', 65 | seed=seed) 66 | 67 | reset_random_seeds(seed) 68 | model = Sequential([ 69 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay), 70 | input_shape=(32, 32, 3)), 71 | Activation('relu'), 72 | BatchNormalization(), 73 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 74 | Activation('relu'), 75 | BatchNormalization(), 76 | MaxPool2D(pool_size=(2, 2)), 77 | Dropout(0.2), 78 | 79 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 80 | Activation('relu'), 81 | BatchNormalization(), 82 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 83 | Activation('relu'), 84 | BatchNormalization(), 85 | MaxPool2D(pool_size=(2, 2)), 86 | Dropout(0.3), 87 | 88 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 89 | Activation('relu'), 90 | BatchNormalization(), 91 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 92 | Activation('relu'), 93 | BatchNormalization(), 94 | MaxPool2D(pool_size=(2, 2)), 95 | Dropout(0.4), 96 | 97 | Flatten() 98 | ]) 99 | for i in range(dense): 100 | model.add(Dense(128//(i+1), activation='relu')) 101 | model.add(BatchNormalization()) 102 | model.add(Dropout(0.4)) 103 | 104 | model.add(Dense(num_classes, activation='softmax')) 105 | lrr = ReduceLROnPlateau( 106 | monitor='val_accuracy', 107 | factor=.5, 108 | patience=8, 109 | min_lr=1e-4, 110 | verbose=1) 111 | opt_adam = Adam(learning_rate=0.002, beta_1=0.9, beta_2=0.999) 112 | model.compile(loss='categorical_crossentropy', 113 | optimizer=opt_adam, 114 | metrics=['accuracy']) 115 | history = model.fit(train_generator2, epochs=epochs, validation_data=validation_generator2, callbacks=[lrr]) 116 | loss, acc = model.evaluate(validation_generator2) 117 | return model, history, loss, acc 118 | 119 | 120 | models = [] 121 | histories = [] 122 | losses = [] 123 | accs = [] 124 | for d in [1, 2]: 125 | for seed in [420, 42, 402]: 126 | print("Added dense {0}, seed {1}".format(d, seed)) 127 | model, hist, loss, acc = create_model(seed, 50, d) 128 | models.append(model) 129 | histories.append(hist) 130 | losses.append(loss) 131 | accs.append(acc) 132 | 133 | print("1 dense layer avg loss: {0:.4f}, avg accuracy: {1:.4f}".format(np.mean(losses[:3]), np.mean(accs[:3]))) 134 | print("2 dense layers avg loss: {0:.4f}, avg accuracy: {1:.4f}".format(np.mean(losses[3:6]), np.mean(accs[3:6]))) 135 | 136 | df = pd.DataFrame({"loss": losses, "acc": accs}) 137 | df.to_csv("arch_comparison.csv") 138 | 139 | with open("lr_hist.pickle", "rb") as f: 140 | hist_lr = pickle.load(f)[3:] 141 | 142 | histories_arch = [histories[i].history for i in range(len(histories))] 143 | with open("arch_hist.pickle", "wb") as f: 144 | pickle.dump(histories_arch, f) 145 | histories3 = hist_lr + histories_arch 146 | 147 | labels = list(np.array([[name + " " +str(i) for i in range(1, 4)] for name in ["0 dense layers", "1 dense layer", "2 dense layers" ]]).flatten()) 148 | visualize2(histories3, labels=labels, type="accuracy", filename="arch_comparison", 149 | title="Comparison of accuracy on training set") 150 | visualize2(histories3, labels=labels, type="loss", filename="arch_comparison", 151 | title="Comparison of loss on training set") 152 | visualize2(histories3, labels=labels, type="val_accuracy", filename="arch_comparison", 153 | title="Comparison of accuracy on validation set") 154 | visualize2(histories3, labels=labels, type="val_loss", filename="arch_comparison", 155 | title="Comparison of loss on validation set") 156 | 157 | visualize2(histories3, labels=labels, type="accuracy", filename="arch_comparison2", 158 | title="Comparison of accuracy on training set", start_from=20) 159 | visualize2(histories3, labels=labels, type="loss", filename="arch_comparison2", 160 | title="Comparison of loss on training set", start_from=20) 161 | visualize2(histories3, labels=labels, type="val_accuracy", filename="arch_comparison2", 162 | title="Comparison of accuracy on validation set", start_from=20) 163 | visualize2(histories3, labels=labels, type="val_loss", filename="arch_comparison2", 164 | title="Comparison of loss on validation set", start_from=20) 165 | 166 | visualize2(histories3,labels=labels, type="lr", title="Changes of learning rate by callback",filename="arch_comparison") 167 | -------------------------------------------------------------------------------- /batch_test.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import tensorflow as tf 4 | import os 5 | import random 6 | import pickle 7 | import matplotlib.pyplot as plt 8 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 9 | from tensorflow.keras.utils import to_categorical 10 | from tensorflow.keras.models import Sequential, load_model 11 | from tensorflow.keras.layers import MaxPool2D, Dense, Flatten, Dropout, Conv2D, BatchNormalization, Activation 12 | from keras.optimizers import SGD, Adam, RMSprop 13 | from keras import regularizers 14 | from keras.callbacks import ReduceLROnPlateau, EarlyStopping, History 15 | from util import visualize, visualize2 16 | 17 | # determinism 18 | os.environ['TF_DETERMINISTIC_OPS'] = '1' 19 | 20 | 21 | def reset_random_seeds(seed): 22 | os.environ['PYTHONHASHSEED'] = str(seed) 23 | tf.random.set_seed(seed) 24 | np.random.seed(seed) 25 | random.seed(seed) 26 | 27 | 28 | # generators 29 | train_datagen2 = ImageDataGenerator( 30 | rescale=1. / 255, 31 | featurewise_center=False, 32 | samplewise_center=False, 33 | featurewise_std_normalization=False, 34 | samplewise_std_normalization=False, 35 | zca_whitening=False, 36 | rotation_range=15, 37 | width_shift_range=0.1, 38 | height_shift_range=0.1, 39 | horizontal_flip=True, 40 | vertical_flip=False 41 | ) 42 | 43 | test_datagen = ImageDataGenerator(rescale=1. / 255) 44 | 45 | # params 46 | img_size = (32, 32) 47 | num_classes = 10 48 | baseMapNum = 32 49 | weight_decay = 1e-4 50 | 51 | 52 | def create_model(seed, epochs, batch_size): 53 | train_generator2 = train_datagen2.flow_from_directory( 54 | 'data/train', 55 | target_size=img_size, 56 | batch_size=batch_size, 57 | class_mode='categorical', 58 | seed=seed) 59 | validation_generator2 = test_datagen.flow_from_directory( 60 | 'data/validation', 61 | target_size=img_size, 62 | batch_size=batch_size, 63 | class_mode='categorical', 64 | seed=seed) 65 | 66 | reset_random_seeds(seed) 67 | model = Sequential([ 68 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay), 69 | input_shape=(32, 32, 3)), 70 | Activation('relu'), 71 | BatchNormalization(), 72 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 73 | Activation('relu'), 74 | BatchNormalization(), 75 | MaxPool2D(pool_size=(2, 2)), 76 | Dropout(0.2), 77 | 78 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 79 | Activation('relu'), 80 | BatchNormalization(), 81 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 82 | Activation('relu'), 83 | BatchNormalization(), 84 | MaxPool2D(pool_size=(2, 2)), 85 | Dropout(0.3), 86 | 87 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 88 | Activation('relu'), 89 | BatchNormalization(), 90 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 91 | Activation('relu'), 92 | BatchNormalization(), 93 | MaxPool2D(pool_size=(2, 2)), 94 | Dropout(0.4), 95 | Flatten(), 96 | Dense(128, activation='relu'), 97 | BatchNormalization(), 98 | Dropout(0.4), 99 | Dense(num_classes, activation='softmax') 100 | ]) 101 | 102 | lrr = ReduceLROnPlateau( 103 | monitor='val_accuracy', 104 | factor=.5, 105 | patience=8, 106 | min_lr=1e-4, 107 | verbose=1) 108 | opt_adam = Adam(learning_rate=0.002, beta_1=0.9, beta_2=0.999) 109 | model.compile(loss='categorical_crossentropy', 110 | optimizer=opt_adam, 111 | metrics=['accuracy']) 112 | history = model.fit(train_generator2, epochs=epochs, validation_data=validation_generator2, callbacks=[lrr]) 113 | loss, acc = model.evaluate(validation_generator2) 114 | return model, history, loss, acc 115 | 116 | 117 | models = [] 118 | histories = [] 119 | losses = [] 120 | accs = [] 121 | for batch in [32, 128, 256]: 122 | for seed in [420, 42, 402]: 123 | print("Batch size {0}, seed {1}".format(batch, seed)) 124 | model, hist, loss, acc = create_model(seed, 50, batch) 125 | models.append(model) 126 | histories.append(hist) 127 | losses.append(loss) 128 | accs.append(acc) 129 | 130 | print("Batch size 32 avg loss: {0:.4f}, avg accuracy: {1:.4f}".format(np.mean(losses[:3]), np.mean(accs[:3]))) 131 | print("Batch size 128 avg loss: {0:.4f}, avg accuracy: {1:.4f}".format(np.mean(losses[3:6]), np.mean(accs[3:6]))) 132 | print("Batch size 256 avg loss: {0:.4f}, avg accuracy: {1:.4f}".format(np.mean(losses[6:]), np.mean(accs[6:]))) 133 | 134 | df = pd.DataFrame({"loss": losses, "acc": accs}) 135 | df.to_csv("batch_comparison.csv") 136 | 137 | with open("arch_hist.pickle", "rb") as f: 138 | hist_arch = pickle.load(f)[3:] 139 | 140 | histories_batch = [histories[i].history for i in range(len(histories))] 141 | with open("batch_hist.pickle", "wb") as f: 142 | pickle.dump(histories_batch, f) 143 | histories3 = hist_arch + histories_batch 144 | 145 | labels = list(np.array([[name + " " +str(i) for i in range(1, 4)] for name in ["Batch_size=64", "Batch_size=32", "Batch_size=128", "Batch_size=256"]]).flatten()) 146 | visualize2(histories3, labels=labels, type="accuracy", filename="batch_comparison", 147 | title="Comparison of accuracy on training set") 148 | visualize2(histories3, labels=labels, type="loss", filename="batch_comparison", 149 | title="Comparison of loss on training set") 150 | visualize2(histories3, labels=labels, type="val_accuracy", filename="batch_comparison", 151 | title="Comparison of accuracy on validation set") 152 | visualize2(histories3, labels=labels, type="val_loss", filename="batch_comparison", 153 | title="Comparison of loss on validation set") 154 | 155 | visualize2(histories3, labels=labels, type="accuracy", filename="batch_comparison2", 156 | title="Comparison of accuracy on training set", start_from=20) 157 | visualize2(histories3, labels=labels, type="loss", filename="batch_comparison2", 158 | title="Comparison of loss on training set", start_from=20) 159 | visualize2(histories3, labels=labels, type="val_accuracy", filename="batch_comparison2", 160 | title="Comparison of accuracy on validation set", start_from=20) 161 | visualize2(histories3, labels=labels, type="val_loss", filename="batch_comparison2", 162 | title="Comparison of loss on validation set", start_from=20) 163 | 164 | visualize2(histories3,labels=labels, type="lr", title="Changes of learning rate by callback",filename="batch_comparison") 165 | -------------------------------------------------------------------------------- /WB2.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import tensorflow as tf 4 | import os 5 | import random 6 | import matplotlib.pyplot as plt 7 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 8 | from tensorflow.keras.utils import to_categorical 9 | from tensorflow.keras.models import Sequential, load_model 10 | from tensorflow.keras.layers import MaxPool2D, Dense, Flatten, Dropout, Conv2D, BatchNormalization, Activation 11 | from keras.optimizers import SGD, Adam, RMSprop 12 | from keras import regularizers 13 | from keras.callbacks import ReduceLROnPlateau, EarlyStopping, History 14 | from util import visualize 15 | 16 | os.environ['TF_DETERMINISTIC_OPS'] = '1' 17 | # 18 | def reset_random_seeds(seed): 19 | os.environ['PYTHONHASHSEED'] = str(seed) 20 | tf.random.set_seed(seed) 21 | np.random.seed(seed) 22 | random.seed(seed) 23 | 24 | 25 | # params 26 | img_size = (32, 32) 27 | batch_size = 64 28 | num_classes = 10 29 | baseMapNum = 32 30 | weight_decay = 1e-4 31 | 32 | 33 | 34 | train_datagen2 = ImageDataGenerator( 35 | rescale=1. / 255, 36 | featurewise_center=False, 37 | samplewise_center=False, 38 | featurewise_std_normalization=False, 39 | samplewise_std_normalization=False, 40 | zca_whitening=False, 41 | rotation_range=15, 42 | width_shift_range=0.1, 43 | height_shift_range=0.1, 44 | horizontal_flip=True, 45 | vertical_flip=False 46 | ) 47 | 48 | test_datagen = ImageDataGenerator(rescale=1. / 255) 49 | 50 | train_generator2 = train_datagen2.flow_from_directory( 51 | 'data/train', 52 | target_size=img_size, 53 | batch_size=batch_size, 54 | class_mode='categorical', 55 | seed=420) 56 | validation_generator2 = test_datagen.flow_from_directory( 57 | 'data/validation', 58 | target_size=img_size, 59 | batch_size=batch_size, 60 | class_mode='categorical', 61 | seed=420) 62 | 63 | reset_random_seeds(420) 64 | 65 | model3 = Sequential([ 66 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay), 67 | input_shape=(32, 32, 3)), 68 | Activation('relu'), 69 | BatchNormalization(), 70 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 71 | Activation('relu'), 72 | BatchNormalization(), 73 | MaxPool2D(pool_size=(2, 2)), 74 | Dropout(0.2), 75 | 76 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 77 | Activation('relu'), 78 | BatchNormalization(), 79 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 80 | Activation('relu'), 81 | BatchNormalization(), 82 | MaxPool2D(pool_size=(2, 2)), 83 | Dropout(0.3), 84 | 85 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 86 | Activation('relu'), 87 | BatchNormalization(), 88 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 89 | Activation('relu'), 90 | BatchNormalization(), 91 | MaxPool2D(pool_size=(2, 2)), 92 | Dropout(0.4), 93 | 94 | Flatten(), 95 | Dense(num_classes, activation='softmax') 96 | ]) 97 | 98 | # model3.summary() 99 | 100 | lrr = ReduceLROnPlateau( 101 | monitor='val_accuracy', # Metric to be measured 102 | factor=.5, # Factor by which learning rate will be reduced 103 | patience=10, # No. of epochs after which if there is no improvement in the val_acc, the learning rate is reduced 104 | min_lr=1e-4, 105 | verbose=1) 106 | 107 | opt_adam = Adam(learning_rate=0.0005, beta_1=0.9, beta_2=0.999) 108 | model3.compile(loss='categorical_crossentropy', 109 | optimizer=opt_adam, 110 | metrics=['accuracy']) 111 | history3 = model3.fit(train_generator2, epochs=200, validation_data=validation_generator2, callbacks=[lrr]) 112 | model3.evaluate(validation_generator2) 113 | # Adam: 114 | # loss - 0.57, accuracy - 0.872 115 | # with lrr: loss - 0.464, accuracy - 0.889 116 | # on 200 epochs: loss - 0.462, accuracy - 0.887 117 | pd.DataFrame(history3.history).plot() 118 | visualize([history3]) 119 | visualize([history3], type="accuracy") 120 | model3.save('models/wb_cnn_kaggle2_adam_with_lrr') 121 | model3.save('models/wb_cnn_kaggle2_adam_with_lrr_200') 122 | ################# 123 | reset_random_seeds(420) 124 | train_generator2 = train_datagen2.flow_from_directory( 125 | 'data/train', 126 | target_size=img_size, 127 | batch_size=batch_size, 128 | class_mode='categorical', 129 | seed=420) 130 | validation_generator2 = test_datagen.flow_from_directory( 131 | 'data/validation', 132 | target_size=img_size, 133 | batch_size=batch_size, 134 | class_mode='categorical', 135 | seed=420) 136 | 137 | model4 = Sequential([ 138 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay), 139 | input_shape=(32, 32, 3)), 140 | Activation('relu'), 141 | BatchNormalization(), 142 | Conv2D(baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 143 | Activation('relu'), 144 | BatchNormalization(), 145 | MaxPool2D(pool_size=(2, 2)), 146 | Dropout(0.2), 147 | 148 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 149 | Activation('relu'), 150 | BatchNormalization(), 151 | Conv2D(2 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 152 | Activation('relu'), 153 | BatchNormalization(), 154 | MaxPool2D(pool_size=(2, 2)), 155 | Dropout(0.3), 156 | 157 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 158 | Activation('relu'), 159 | BatchNormalization(), 160 | Conv2D(4 * baseMapNum, (3, 3), padding='same', kernel_regularizer=regularizers.l2(weight_decay)), 161 | Activation('relu'), 162 | BatchNormalization(), 163 | MaxPool2D(pool_size=(2, 2)), 164 | Dropout(0.4), 165 | 166 | Flatten(), 167 | Dense(num_classes, activation='softmax') 168 | ]) 169 | 170 | # model4.summary() 171 | 172 | lrr2 = ReduceLROnPlateau( 173 | monitor='val_accuracy', # Metric to be measured 174 | factor=.5, # Factor by which learning rate will be reduced 175 | patience=10, # No. of epochs after which if there is no improvement in the val_acc, the learning rate is reduced 176 | min_lr=1e-4, 177 | verbose=1) 178 | 179 | opt_rms = RMSprop(lr=0.0005, decay=1e-6) 180 | model4.compile(loss='categorical_crossentropy', 181 | optimizer=opt_adam, 182 | metrics=['accuracy']) 183 | history4 = model4.fit(train_generator2, epochs=125, validation_data=validation_generator2, callbacks=[lrr2]) 184 | model4.evaluate(validation_generator2) 185 | # loss - 0.457, accuracy - 0.865 186 | pd.DataFrame(history4.history).plot() 187 | pd.DataFrame(history4.history["lr"]).plot() 188 | visualize([history3, history4], labels=["Adam", "RMSProp"], type="accuracy", filename="adam_vs_rmsp") 189 | visualize([history3, history4], labels=["Adam", "RMSProp"], type="val_accuracy", filename="adam_vs_rmsp") 190 | visualize([history3, history4], labels=["Adam", "RMSProp"], type="loss", filename="adam_vs_rmsp") 191 | model4.save('models/wb_cnn_kaggle2_rmsp_with_lrr') 192 | import pickle 193 | with open('hist3.pickle', 'wb') as file_pi: 194 | pickle.dump(history3.history, file_pi) 195 | with open('hist4.pickle', 'wb') as file_pi: 196 | pickle.dump(history4.history, file_pi) 197 | #### 198 | 199 | sgd_opt = SGD(learning_rate=0.0005, momentum=0.9) -------------------------------------------------------------------------------- /histories/resnet_1_history.csv: -------------------------------------------------------------------------------- 1 | loss,val_loss,accuracy,val_accuracy 2 | 1.2101426124572754,1.1396641731262207,0.5632749795913696,0.5924000144004822 3 | 1.2204368114471436,1.1441025733947754,0.5607249736785889,0.5916000008583069 4 | 1.2087291479110718,1.1393272876739502,0.5627750158309937,0.5942000150680542 5 | 1.2128491401672363,1.1434528827667236,0.5625249743461609,0.589900016784668 6 | 1.2066842317581177,1.1329272985458374,0.5690500140190125,0.5943999886512756 7 | 1.1966477632522583,1.1352174282073975,0.5689499974250793,0.5946999788284302 8 | 1.1884251832962036,1.1217656135559082,0.5697000026702881,0.6001999974250793 9 | 1.1916723251342773,1.1472185850143433,0.5734249949455261,0.5917999744415283 10 | 1.1872973442077637,1.1139570474624634,0.5731499791145325,0.6028000116348267 11 | 1.1791155338287354,1.1344513893127441,0.5745499730110168,0.5946000218391418 12 | 1.1744908094406128,1.0945817232131958,0.5780249834060669,0.6085000038146973 13 | 1.1731398105621338,1.1011306047439575,0.5777249932289124,0.6067000031471252 14 | 1.1757023334503174,1.1245145797729492,0.5780500173568726,0.6019999980926514 15 | 1.1704483032226562,1.1171629428863525,0.5798500180244446,0.6003000140190125 16 | 1.16738760471344,1.1118322610855103,0.5809749960899353,0.6065000295639038 17 | 1.1613373756408691,1.0915511846542358,0.5840749740600586,0.613099992275238 18 | 1.1549354791641235,1.092820167541504,0.5874000191688538,0.6129000186920166 19 | 1.1525076627731323,1.1079587936401367,0.5868499875068665,0.6110000014305115 20 | 1.1547670364379883,1.102347493171692,0.5878250002861023,0.6072999835014343 21 | 1.1477712392807007,1.0949194431304932,0.5878499746322632,0.6114000082015991 22 | 1.14706552028656,1.0841096639633179,0.5860999822616577,0.6172000169754028 23 | 1.1380999088287354,1.0664551258087158,0.5919250249862671,0.6252999901771545 24 | 1.1376031637191772,1.0743604898452759,0.5933250188827515,0.6205000281333923 25 | 1.1342538595199585,1.0835541486740112,0.5946750044822693,0.6141999959945679 26 | 1.1466283798217773,1.079634666442871,0.59232497215271,0.6162999868392944 27 | 1.1303529739379883,1.064818024635315,0.595300018787384,0.6207000017166138 28 | 1.1220868825912476,1.061537742614746,0.6018249988555908,0.621999979019165 29 | 1.1188610792160034,1.0697742700576782,0.600849986076355,0.6211000084877014 30 | 1.1235359907150269,1.067443609237671,0.6018999814987183,0.6169000267982483 31 | 1.1140339374542236,1.0606913566589355,0.6039749979972839,0.621399998664856 32 | 1.109968662261963,1.0596739053726196,0.602774977684021,0.6234999895095825 33 | 1.1108633279800415,1.0403423309326172,0.6065999865531921,0.6304000020027161 34 | 1.1053166389465332,1.0458699464797974,0.6064000129699707,0.6259999871253967 35 | 1.1036417484283447,1.0356078147888184,0.6087250113487244,0.6320000290870667 36 | 1.1085351705551147,1.0457537174224854,0.6069999933242798,0.6290000081062317 37 | 1.1014715433120728,1.0562946796417236,0.6075000166893005,0.6263999938964844 38 | 1.0895096063613892,1.0676649808883667,0.6131250262260437,0.6216999888420105 39 | 1.0937232971191406,1.0553829669952393,0.6094250082969666,0.6258999705314636 40 | 1.0889365673065186,1.0369471311569214,0.6133000254631042,0.6287000179290771 41 | 1.0884867906570435,1.0347501039505005,0.615024983882904,0.6333000063896179 42 | 1.085413932800293,1.0678141117095947,0.6171500086784363,0.6202999949455261 43 | 1.0824452638626099,1.0336247682571411,0.617900013923645,0.6355999708175659 44 | 1.0832935571670532,1.04380464553833,0.6182000041007996,0.6302000284194946 45 | 1.0735809803009033,1.023335337638855,0.6176999807357788,0.635200023651123 46 | 1.0816400051116943,1.006738305091858,0.6159250140190125,0.6442999839782715 47 | 1.0704565048217773,1.0483449697494507,0.6221749782562256,0.6276000142097473 48 | 1.0699642896652222,1.0279021263122559,0.6200500130653381,0.633400022983551 49 | 1.061873197555542,1.0153567790985107,0.6223750114440918,0.6421999931335449 50 | 1.0642962455749512,1.0485893487930298,0.6241999864578247,0.6273999810218811 51 | 1.0612703561782837,1.0209345817565918,0.6254500150680542,0.6383000016212463 52 | 1.0545384883880615,1.028199315071106,0.62642502784729,0.6335999965667725 53 | 1.0525450706481934,1.0354427099227905,0.6259750127792358,0.6337000131607056 54 | 1.0487842559814453,1.0059455633163452,0.6300749778747559,0.6442999839782715 55 | 1.0530248880386353,1.0242211818695068,0.6279249787330627,0.6396999955177307 56 | 1.0490648746490479,1.0092581510543823,0.6279500126838684,0.6442999839782715 57 | 1.0479753017425537,1.0103751420974731,0.6332749724388123,0.6402000188827515 58 | 1.0372555255889893,0.9961321353912354,0.6344749927520752,0.6438999772071838 59 | 1.0385489463806152,1.015338659286499,0.6338250041007996,0.6427000164985657 60 | 1.032884955406189,1.0160343647003174,0.6364750266075134,0.6384000182151794 61 | 1.0327707529067993,1.0213007926940918,0.6346499919891357,0.6419000029563904 62 | 1.029114842414856,0.9970958232879639,0.63714998960495,0.6474000215530396 63 | 1.0246995687484741,0.993915319442749,0.63919997215271,0.6502000093460083 64 | 1.0235055685043335,0.9762275218963623,0.6407750248908997,0.6550999879837036 65 | 1.026240348815918,0.9783048629760742,0.6363999843597412,0.6523000001907349 66 | 1.0182030200958252,0.9892728328704834,0.6427000164985657,0.649399995803833 67 | 1.0153168439865112,1.005276083946228,0.6394500136375427,0.6498000025749207 68 | 1.01540207862854,0.9851669073104858,0.6432750225067139,0.6523000001907349 69 | 1.0112873315811157,0.9927542209625244,0.6435750126838684,0.6545000076293945 70 | 1.0122674703598022,0.9845278263092041,0.6451249718666077,0.6524999737739563 71 | 1.013592004776001,0.9835063219070435,0.6447749733924866,0.6541000008583069 72 | 1.0062215328216553,0.979404866695404,0.6484000086784363,0.6567000150680542 73 | 1.0040218830108643,0.9974605441093445,0.6467999815940857,0.6496999859809875 74 | 1.0003690719604492,0.9897616505622864,0.6504250168800354,0.6528000235557556 75 | 1.0059959888458252,0.9958304762840271,0.645675003528595,0.652400016784668 76 | 0.9907951951026917,0.9731442332267761,0.6522250175476074,0.6572999954223633 77 | 1.0024077892303467,0.979875385761261,0.6496750116348267,0.6536999940872192 78 | 0.9916750192642212,0.9535403251647949,0.6538500189781189,0.6661999821662903 79 | 0.9925220012664795,0.9787374138832092,0.652400016784668,0.6585999727249146 80 | 0.9871850609779358,0.9763573408126831,0.6519749760627747,0.6571999788284302 81 | 0.9803065657615662,0.9607155323028564,0.654574990272522,0.6629999876022339 82 | 0.9846062660217285,0.9516174793243408,0.6536250114440918,0.6647999882698059 83 | 0.9849585890769958,0.9743540287017822,0.6541249752044678,0.656499981880188 84 | 0.982707142829895,0.9632132649421692,0.6530500054359436,0.6611999869346619 85 | 0.9778487086296082,0.9432426691055298,0.6576250195503235,0.66839998960495 86 | 0.9739691615104675,0.9738360643386841,0.656000018119812,0.6592000126838684 87 | 0.9728808403015137,0.9486117959022522,0.6583499908447266,0.6680999994277954 88 | 0.97186678647995,0.9602599740028381,0.6625249981880188,0.6639000177383423 89 | 0.9668326377868652,0.9587755799293518,0.659850001335144,0.666700005531311 90 | 0.9703467488288879,0.9476243257522583,0.6588749885559082,0.6704999804496765 91 | 0.9649512767791748,0.9526920914649963,0.6602749824523926,0.6662999987602234 92 | 0.9562476277351379,0.9643974900245667,0.6655499935150146,0.6664999723434448 93 | 0.9592558741569519,0.9385236501693726,0.661549985408783,0.6744999885559082 94 | 0.9559096693992615,0.9288598895072937,0.6650999784469604,0.6751000285148621 95 | 0.9469059705734253,0.9300585985183716,0.6672750115394592,0.6784999966621399 96 | 0.9517883062362671,0.9430265426635742,0.6672750115394592,0.6747999787330627 97 | 0.9485652446746826,0.917212724685669,0.6658999919891357,0.6825000047683716 98 | 0.9482266306877136,0.951310932636261,0.668524980545044,0.6693000197410583 99 | 0.9450908303260803,0.9260925054550171,0.6707000136375427,0.6769999861717224 100 | 0.9429183602333069,0.9261360168457031,0.6715750098228455,0.6758000254631042 101 | 0.9404078125953674,0.924347460269928,0.6704249978065491,0.676800012588501 102 | -------------------------------------------------------------------------------- /histories/resnet_2_history.csv: -------------------------------------------------------------------------------- 1 | loss,val_loss,accuracy,val_accuracy 2 | 0.9330421686172485,0.9238068461418152,0.6711999773979187,0.680400013923645 3 | 0.9348883032798767,0.9319629073143005,0.6715250015258789,0.6769999861717224 4 | 0.931929886341095,0.9147624969482422,0.6746500134468079,0.6804999709129333 5 | 0.9352202415466309,0.9238564372062683,0.6743999719619751,0.6812000274658203 6 | 0.9372321367263794,0.930320143699646,0.6728000044822693,0.676800012588501 7 | 0.9280036091804504,0.9228304624557495,0.6737250089645386,0.6793000102043152 8 | 0.9285340905189514,0.9289296865463257,0.6767749786376953,0.6790000200271606 9 | 0.9263155460357666,0.9410143494606018,0.6757500171661377,0.6714000105857849 10 | 0.921616792678833,0.9160926938056946,0.6777750253677368,0.6823999881744385 11 | 0.9261131882667542,0.9263017773628235,0.6776999831199646,0.6794999837875366 12 | 0.9175222516059875,0.9215880632400513,0.6791499853134155,0.6816999912261963 13 | 0.9212654232978821,0.9348101615905762,0.6767749786376953,0.678600013256073 14 | 0.923642098903656,0.9190251231193542,0.6789000034332275,0.6830000281333923 15 | 0.9215266704559326,0.9224401116371155,0.6791499853134155,0.6826000213623047 16 | 0.9173492193222046,0.9349305629730225,0.6808000206947327,0.679099977016449 17 | 0.9145882725715637,0.9079980254173279,0.6800249814987183,0.6888999938964844 18 | 0.9144461750984192,0.9058870077133179,0.6827750205993652,0.6881999969482422 19 | 0.9001711010932922,0.9034927487373352,0.6869750022888184,0.6891000270843506 20 | 0.9048855304718018,0.8942238092422485,0.6838750243186951,0.6895999908447266 21 | 0.9047791957855225,0.9062064290046692,0.6830000281333923,0.6854000091552734 22 | 0.898794949054718,0.9048668146133423,0.6879249811172485,0.6891999840736389 23 | 0.9026979207992554,0.9024352431297302,0.6863750219345093,0.6906999945640564 24 | 0.8914088010787964,0.9232890605926514,0.6887249946594238,0.685699999332428 25 | 0.8957116007804871,0.8975625038146973,0.6877750158309937,0.6916999816894531 26 | 0.886786937713623,0.9152808785438538,0.6902750134468079,0.6866000294685364 27 | 0.8908108472824097,0.8971341848373413,0.6890249848365784,0.6919999718666077 28 | 0.8859034180641174,0.9032403230667114,0.6895250082015991,0.6901000142097473 29 | 0.8893080353736877,0.8857943415641785,0.6916000247001648,0.6977999806404114 30 | 0.8795841932296753,0.8902367353439331,0.6924499869346619,0.6958000063896179 31 | 0.8864378929138184,0.8963705897331238,0.6916999816894531,0.6909999847412109 32 | 0.8822692632675171,0.894480288028717,0.694350004196167,0.694599986076355 33 | 0.8849475383758545,0.8862073421478271,0.690850019454956,0.6940000057220459 34 | 0.8829877972602844,0.8818804621696472,0.694225013256073,0.6967999935150146 35 | 0.8781960010528564,0.8863861560821533,0.6938750147819519,0.6980999708175659 36 | 0.8753093481063843,0.8990858197212219,0.6948249936103821,0.6922000050544739 37 | 0.8743244409561157,0.8626548051834106,0.6961749792098999,0.7019000053405762 38 | 0.8755132555961609,0.874384880065918,0.694225013256073,0.7001000046730042 39 | 0.8677115440368652,0.879676878452301,0.6964499950408936,0.6988000273704529 40 | 0.870231568813324,0.8804434537887573,0.6958000063896179,0.6955999732017517 41 | 0.8675612211227417,0.8753660917282104,0.6958500146865845,0.699400007724762 42 | 0.8688375949859619,0.8861280083656311,0.6955999732017517,0.6919000148773193 43 | 0.8622246980667114,0.904915452003479,0.696524977684021,0.6894999742507935 44 | 0.8556557893753052,0.8754419088363647,0.7017999887466431,0.7020000219345093 45 | 0.8611271381378174,0.8630355596542358,0.7013000249862671,0.7028999924659729 46 | 0.8565011024475098,0.8644095659255981,0.7022250294685364,0.7031000256538391 47 | 0.8553670644760132,0.871501088142395,0.7024000287055969,0.7016000151634216 48 | 0.8503556847572327,0.880577564239502,0.7039499878883362,0.6955000162124634 49 | 0.8481478691101074,0.856661319732666,0.7054250240325928,0.7052000164985657 50 | 0.8455957770347595,0.8606154322624207,0.7032250165939331,0.7042999863624573 51 | 0.8475669026374817,0.8823304772377014,0.7029749751091003,0.698199987411499 52 | 0.8483281135559082,0.8730161190032959,0.706250011920929,0.6980999708175659 53 | 0.8411851525306702,0.8881877064704895,0.7066749930381775,0.6959999799728394 54 | 0.8388422131538391,0.8515902161598206,0.7075250148773193,0.7080000042915344 55 | 0.8434527516365051,0.851087212562561,0.7055500149726868,0.7032999992370605 56 | 0.8457953929901123,0.8667382597923279,0.7064499855041504,0.7021999955177307 57 | 0.8344652056694031,0.8592128753662109,0.7088000178337097,0.7049999833106995 58 | 0.840194046497345,0.862873911857605,0.7099999785423279,0.7031999826431274 59 | 0.8319299817085266,0.8604199290275574,0.7109749913215637,0.704800009727478 60 | 0.8303574919700623,0.8757827281951904,0.7128999829292297,0.7009999752044678 61 | 0.8288384675979614,0.8466062545776367,0.7086250185966492,0.7074999809265137 62 | 0.8376484513282776,0.8651379942893982,0.7119250297546387,0.7003999948501587 63 | 0.8311750888824463,0.8681360483169556,0.7133499979972839,0.7013000249862671 64 | 0.8314615488052368,0.8536320328712463,0.7111250162124634,0.7057999968528748 65 | 0.8203608393669128,0.8558923006057739,0.7148749828338623,0.7078999876976013 66 | 0.8289450407028198,0.8612234592437744,0.7125999927520752,0.7042999863624573 67 | 0.8172776103019714,0.8460860252380371,0.7149749994277954,0.708899974822998 68 | 0.8204391598701477,0.876792311668396,0.7153499722480774,0.7027999758720398 69 | 0.8213480710983276,0.878839373588562,0.712975025177002,0.6995000243186951 70 | 0.8237664103507996,0.8453223705291748,0.7136250138282776,0.7107999920845032 71 | 0.820655345916748,0.8421511650085449,0.7153000235557556,0.7135999798774719 72 | 0.8112720251083374,0.8335950374603271,0.7196999788284302,0.7142000198364258 73 | 0.8110024929046631,0.8433663845062256,0.7170249819755554,0.7142000198364258 74 | 0.8029771447181702,0.8640860319137573,0.7204499840736389,0.7038999795913696 75 | 0.8074374794960022,0.8447147607803345,0.7188000082969666,0.7078999876976013 76 | 0.8187257647514343,0.845083475112915,0.7174000144004822,0.7110999822616577 77 | 0.8115153312683105,0.8359788060188293,0.7200000286102295,0.7146000266075134 78 | 0.8036967515945435,0.8245985507965088,0.7200999855995178,0.7149999737739563 79 | 0.8060058355331421,0.8492203950881958,0.7210999727249146,0.7095000147819519 80 | 0.8087990283966064,0.8347921967506409,0.7210249900817871,0.7156000137329102 81 | 0.7979586720466614,0.848019540309906,0.722000002861023,0.7123000025749207 82 | 0.7993918657302856,0.8326030373573303,0.7197499871253967,0.7124999761581421 83 | 0.7925154566764832,0.8565630912780762,0.723924994468689,0.7103999853134155 84 | 0.7932737469673157,0.8418372273445129,0.725350022315979,0.7159000039100647 85 | 0.7955723404884338,0.8380597829818726,0.723550021648407,0.715399980545044 86 | 0.7901369333267212,0.8192576169967651,0.7246249914169312,0.7193999886512756 87 | 0.7921425104141235,0.8208034038543701,0.7243499755859375,0.7196000218391418 88 | 0.7863297462463379,0.8329820036888123,0.7275750041007996,0.7179999947547913 89 | 0.787583589553833,0.829748809337616,0.7289999723434448,0.7149999737739563 90 | 0.7834802269935608,0.8483622074127197,0.7288749814033508,0.7143999934196472 91 | 0.7904915809631348,0.8342254161834717,0.7276750206947327,0.7129999995231628 92 | 0.7802822589874268,0.8314283490180969,0.7280499935150146,0.7156999707221985 93 | 0.7784367799758911,0.8313007950782776,0.7287999987602234,0.71670001745224 94 | 0.7813178300857544,0.8405784964561462,0.7308499813079834,0.7103999853134155 95 | 0.7722175121307373,0.8283582925796509,0.7309499979019165,0.7175999879837036 96 | 0.7760322690010071,0.8444297909736633,0.732200026512146,0.7120000123977661 97 | 0.7784382104873657,0.8211104273796082,0.7301999926567078,0.7208999991416931 98 | 0.7707101106643677,0.8175442218780518,0.7328749895095825,0.7246000170707703 99 | 0.7680756449699402,0.8217161893844604,0.7329999804496765,0.7235000133514404 100 | 0.7654728293418884,0.8118424415588379,0.734375,0.7250000238418579 101 | 0.7642310857772827,0.8135232925415039,0.7363250255584717,0.7249000072479248 102 | -------------------------------------------------------------------------------- /histories/resnet_0_history.csv: -------------------------------------------------------------------------------- 1 | loss,val_loss,accuracy,val_accuracy 2 | 2.0303540229797363,27.35186195373535,0.21562500298023224,0.13689999282360077 3 | 1.9006454944610596,1.7385737895965576,0.2404250055551529,0.29420000314712524 4 | 1.774049162864685,1.8496809005737305,0.27512499690055847,0.2694999873638153 5 | 1.707608938217163,2.116150379180908,0.31472501158714294,0.2727000117301941 6 | 1.8001149892807007,255.91107177734375,0.2918500006198883,0.13189999759197235 7 | 1.9224823713302612,1.8037898540496826,0.25482499599456787,0.27300000190734863 8 | 1.8618717193603516,1.7657538652420044,0.26637500524520874,0.2833999991416931 9 | 1.8359158039093018,1.7364171743392944,0.27322500944137573,0.299699991941452 10 | 1.8175586462020874,1.7229384183883667,0.2798500061035156,0.304500013589859 11 | 1.8045353889465332,1.7116200923919678,0.28459998965263367,0.30889999866485596 12 | 1.7818429470062256,1.6992884874343872,0.28927499055862427,0.3111000061035156 13 | 1.7735674381256104,1.6847609281539917,0.2934750020503998,0.32120001316070557 14 | 1.7638741731643677,1.6775462627410889,0.296999990940094,0.32429999113082886 15 | 1.749773383140564,1.6684222221374512,0.30434998869895935,0.32899999618530273 16 | 1.7402390241622925,1.6558641195297241,0.30855000019073486,0.334199994802475 17 | 1.7226537466049194,1.6567094326019287,0.3160499930381775,0.3370000123977661 18 | 1.7187060117721558,1.6371105909347534,0.3192000091075897,0.3452000021934509 19 | 1.7120310068130493,1.6292327642440796,0.32177498936653137,0.3499000072479248 20 | 1.697586178779602,1.6227210760116577,0.32737499475479126,0.3555000126361847 21 | 1.6854952573776245,1.614610195159912,0.33489999175071716,0.36250001192092896 22 | 1.6767419576644897,1.6083248853683472,0.3402999937534332,0.36570000648498535 23 | 1.6659284830093384,1.5922666788101196,0.3429250121116638,0.37450000643730164 24 | 1.6614571809768677,1.5857876539230347,0.34552499651908875,0.3804999887943268 25 | 1.654497504234314,1.5866514444351196,0.35272499918937683,0.3799000084400177 26 | 1.650259017944336,1.577663779258728,0.35705000162124634,0.383899986743927 27 | 1.637202262878418,1.5646045207977295,0.3640500009059906,0.38350000977516174 28 | 1.6328035593032837,1.56236732006073,0.36570000648498535,0.38850000500679016 29 | 1.6233140230178833,1.5501772165298462,0.3702000081539154,0.396699994802475 30 | 1.6107045412063599,1.5449440479278564,0.3774999976158142,0.39890000224113464 31 | 1.6108777523040771,1.539954662322998,0.38032498955726624,0.40149998664855957 32 | 1.6041702032089233,1.5308856964111328,0.3827250003814697,0.40950000286102295 33 | 1.5939372777938843,1.5214117765426636,0.3874500095844269,0.4174000024795532 34 | 1.5876470804214478,1.5161207914352417,0.39054998755455017,0.41690000891685486 35 | 1.5792421102523804,1.5025763511657715,0.39457499980926514,0.42660000920295715 36 | 1.5732866525650024,1.5014315843582153,0.3979499936103821,0.4253000020980835 37 | 1.562103033065796,1.4855999946594238,0.40560001134872437,0.4320000112056732 38 | 1.5584263801574707,1.4843182563781738,0.4054250121116638,0.43470001220703125 39 | 1.5477126836776733,1.487095832824707,0.4150499999523163,0.43149998784065247 40 | 1.543570637702942,1.4689348936080933,0.4162999987602234,0.4406999945640564 41 | 1.5370869636535645,1.4621248245239258,0.42067500948905945,0.44130000472068787 42 | 1.5266153812408447,1.4469798803329468,0.42457500100135803,0.45159998536109924 43 | 1.5209757089614868,1.4299105405807495,0.4281249940395355,0.459199994802475 44 | 1.5120432376861572,1.429327368736267,0.43209999799728394,0.4577000141143799 45 | 1.507556676864624,1.4310227632522583,0.43412500619888306,0.45410001277923584 46 | 1.4976950883865356,1.4163472652435303,0.4374749958515167,0.4643999934196472 47 | 1.4980814456939697,1.4206433296203613,0.4401249885559082,0.46380001306533813 48 | 1.4862549304962158,1.3960119485855103,0.44487500190734863,0.4758000075817108 49 | 1.4744288921356201,1.4023648500442505,0.45087501406669617,0.47209998965263367 50 | 1.4744420051574707,1.3977504968643188,0.45022499561309814,0.47540000081062317 51 | 1.4647715091705322,1.3828599452972412,0.4530999958515167,0.477400004863739 52 | 1.4606963396072388,1.376021146774292,0.45557498931884766,0.48170000314712524 53 | 1.451230525970459,1.3858646154403687,0.45570001006126404,0.47769999504089355 54 | 1.4436655044555664,1.3767287731170654,0.462799996137619,0.4860999882221222 55 | 1.4387669563293457,1.362397313117981,0.4634000062942505,0.4887999892234802 56 | 1.4298090934753418,1.3494869470596313,0.46939998865127563,0.49630001187324524 57 | 1.4336819648742676,1.350818395614624,0.46334999799728394,0.49729999899864197 58 | 1.4226762056350708,1.341893196105957,0.4719499945640564,0.4966999888420105 59 | 1.4153931140899658,1.3552489280700684,0.4730750024318695,0.4968999922275543 60 | 1.412455439567566,1.3180522918701172,0.47657498717308044,0.5077999830245972 61 | 1.4061129093170166,1.329653024673462,0.4794749915599823,0.505299985408783 62 | 1.4010097980499268,1.3220291137695312,0.48385000228881836,0.5095000267028809 63 | 1.3886501789093018,1.3154301643371582,0.48820000886917114,0.5110999941825867 64 | 1.387098789215088,1.3070298433303833,0.4878000020980835,0.5146999955177307 65 | 1.384861707687378,1.308907151222229,0.4882749915122986,0.5134999752044678 66 | 1.3803173303604126,1.312949776649475,0.4923250079154968,0.5084999799728394 67 | 1.3776003122329712,1.3026795387268066,0.49047499895095825,0.5144000053405762 68 | 1.3697798252105713,1.2935744524002075,0.4928250014781952,0.5236999988555908 69 | 1.364833116531372,1.2790416479110718,0.4984999895095825,0.5271000266075134 70 | 1.3602079153060913,1.2790616750717163,0.49822500348091125,0.526199996471405 71 | 1.3464646339416504,1.2733910083770752,0.5025500059127808,0.5325999855995178 72 | 1.3428618907928467,1.2664358615875244,0.5072500109672546,0.53329998254776 73 | 1.3462741374969482,1.2852650880813599,0.5048499703407288,0.5300999879837036 74 | 1.3344076871871948,1.2610111236572266,0.5088499784469604,0.538100004196167 75 | 1.33028244972229,1.2658567428588867,0.5127500295639038,0.5365999937057495 76 | 1.3281949758529663,1.2385616302490234,0.5109750032424927,0.5458999872207642 77 | 1.3258885145187378,1.2535220384597778,0.5143250226974487,0.5454999804496765 78 | 1.3188997507095337,1.2543275356292725,0.5157999992370605,0.5444999933242798 79 | 1.3158031702041626,1.2452341318130493,0.518625020980835,0.5526000261306763 80 | 1.3090698719024658,1.2426273822784424,0.5247750282287598,0.5532000064849854 81 | 1.3077551126480103,1.2422932386398315,0.5247750282287598,0.5504999756813049 82 | 1.3013277053833008,1.223591685295105,0.5243750214576721,0.5595999956130981 83 | 1.2957168817520142,1.222917914390564,0.5274749994277954,0.5565999746322632 84 | 1.291335105895996,1.2042770385742188,0.5302000045776367,0.5665000081062317 85 | 1.290436029434204,1.2142671346664429,0.530174970626831,0.560699999332428 86 | 1.2783589363098145,1.1985760927200317,0.5354999899864197,0.5703999996185303 87 | 1.2752753496170044,1.203628659248352,0.5351499915122986,0.5679000020027161 88 | 1.2735247611999512,1.1959987878799438,0.5384500026702881,0.572700023651123 89 | 1.2673660516738892,1.1961181163787842,0.5412999987602234,0.5723999738693237 90 | 1.2618855237960815,1.1909922361373901,0.541100025177002,0.5690000057220459 91 | 1.2607331275939941,1.1937576532363892,0.5447999835014343,0.5680999755859375 92 | 1.250360131263733,1.1733676195144653,0.5476499795913696,0.5770999789237976 93 | 1.2519021034240723,1.183740258216858,0.545799970626831,0.57669997215271 94 | 1.2506630420684814,1.1699124574661255,0.5503000020980835,0.5752000212669373 95 | 1.2437212467193604,1.1595937013626099,0.5518500208854675,0.5828999876976013 96 | 1.2450882196426392,1.162843108177185,0.5524749755859375,0.5841000080108643 97 | 1.234008550643921,1.1432416439056396,0.5518249869346619,0.5924000144004822 98 | 1.2346025705337524,1.1626665592193604,0.5562750101089478,0.5837000012397766 99 | 1.2323757410049438,1.1437773704528809,0.5552250146865845,0.5892000198364258 100 | 1.2298004627227783,1.1693394184112549,0.5584750175476074,0.5788000226020813 101 | 1.2247025966644287,1.1277118921279907,0.5605000257492065,0.59579998254776 102 | -------------------------------------------------------------------------------- /histories/vgg_2_history.csv: -------------------------------------------------------------------------------- 1 | loss,val_loss,accuracy,val_accuracy 2 | 0.3487599194049835,0.4328099489212036,0.8782250285148621,0.8597000241279602 3 | 0.3463471531867981,0.435744047164917,0.8776500225067139,0.8590999841690063 4 | 0.35154256224632263,0.43252915143966675,0.8782250285148621,0.8597000241279602 5 | 0.34881746768951416,0.4345245659351349,0.8759499788284302,0.859499990940094 6 | 0.3506227135658264,0.4316312372684479,0.8766999840736389,0.8598999977111816 7 | 0.3504045009613037,0.43125849962234497,0.87642502784729,0.8616999983787537 8 | 0.35006654262542725,0.430935263633728,0.8763499855995178,0.8608999848365784 9 | 0.34725990891456604,0.43154820799827576,0.8780750036239624,0.8618000149726868 10 | 0.34812718629837036,0.4309324324131012,0.8782749772071838,0.8605999946594238 11 | 0.3492105007171631,0.43405386805534363,0.87704998254776,0.859499990940094 12 | 0.3448878228664398,0.43615591526031494,0.8794000148773193,0.8604000210762024 13 | 0.3468034267425537,0.4308224022388458,0.878000020980835,0.8618000149726868 14 | 0.34344106912612915,0.4365176856517792,0.880050003528595,0.8600999712944031 15 | 0.34554779529571533,0.4316975474357605,0.878250002861023,0.8611999750137329 16 | 0.3469247817993164,0.4293690025806427,0.8765000104904175,0.8611999750137329 17 | 0.3477802872657776,0.43135496973991394,0.8774999976158142,0.8608999848365784 18 | 0.3466441333293915,0.4292874038219452,0.8786749839782715,0.8618000149726868 19 | 0.3403412401676178,0.43615972995758057,0.880774974822998,0.8607000112533569 20 | 0.34556567668914795,0.42804229259490967,0.8796250224113464,0.8622000217437744 21 | 0.347243070602417,0.4345337748527527,0.8766250014305115,0.8604999780654907 22 | 0.3403061032295227,0.4326479434967041,0.8794999718666077,0.8610000014305115 23 | 0.3438318967819214,0.43678104877471924,0.878849983215332,0.8593000173568726 24 | 0.3372225761413574,0.4355672001838684,0.8807250261306763,0.8607000112533569 25 | 0.34436067938804626,0.436497300863266,0.8791499733924866,0.8593000173568726 26 | 0.3411710262298584,0.4341549873352051,0.8794749975204468,0.8615000247955322 27 | 0.343059241771698,0.43140068650245667,0.8802250027656555,0.8611999750137329 28 | 0.3453708291053772,0.43482378125190735,0.8796749711036682,0.8607000112533569 29 | 0.34461426734924316,0.4295651912689209,0.8796749711036682,0.8616999983787537 30 | 0.341713547706604,0.4319664537906647,0.8796749711036682,0.8615999817848206 31 | 0.3416454493999481,0.4304140508174896,0.8797749876976013,0.8611000180244446 32 | 0.3403301537036896,0.42958301305770874,0.8794749975204468,0.8633000254631042 33 | 0.34122925996780396,0.43094921112060547,0.8786249756813049,0.8611999750137329 34 | 0.34286078810691833,0.4342610836029053,0.8792999982833862,0.8604999780654907 35 | 0.343135267496109,0.4325360357761383,0.8803250193595886,0.8611000180244446 36 | 0.3411981463432312,0.43021854758262634,0.8812749981880188,0.8616999983787537 37 | 0.34940558671951294,0.4330263137817383,0.8779249787330627,0.8601999878883362 38 | 0.3380047082901001,0.4326179325580597,0.8795250058174133,0.8611999750137329 39 | 0.3426736891269684,0.43149039149284363,0.8794999718666077,0.861299991607666 40 | 0.34261125326156616,0.43145614862442017,0.87847501039505,0.8605999946594238 41 | 0.3419612944126129,0.4320261776447296,0.879925012588501,0.8603000044822693 42 | 0.3415948450565338,0.4311341941356659,0.8797000050544739,0.8608999848365784 43 | 0.3422967493534088,0.43396690487861633,0.8801000118255615,0.8610000014305115 44 | 0.3375178873538971,0.43194580078125,0.8823249936103821,0.8607000112533569 45 | 0.3343975841999054,0.43102574348449707,0.8833249807357788,0.8622000217437744 46 | 0.3391934931278229,0.42937833070755005,0.8809000253677368,0.8615999817848206 47 | 0.33723416924476624,0.43052220344543457,0.8809750080108643,0.8615999817848206 48 | 0.3371462821960449,0.4342532157897949,0.8826749920845032,0.8608999848365784 49 | 0.33785316348075867,0.4354120194911957,0.8834999799728394,0.8604000210762024 50 | 0.3374647796154022,0.43045535683631897,0.880774974822998,0.861299991607666 51 | 0.33998721837997437,0.4313664138317108,0.8799750208854675,0.8616999983787537 52 | 0.33876273036003113,0.432559072971344,0.8813250064849854,0.86080002784729 53 | 0.3426930010318756,0.43294161558151245,0.8799999952316284,0.8615000247955322 54 | 0.33863261342048645,0.4290676712989807,0.8806750178337097,0.8618000149726868 55 | 0.33693206310272217,0.4313330054283142,0.8818249702453613,0.8611000180244446 56 | 0.3329131007194519,0.43049824237823486,0.882224977016449,0.8619999885559082 57 | 0.33322739601135254,0.43204912543296814,0.8827999830245972,0.8611000180244446 58 | 0.3361699879169464,0.43198058009147644,0.8830749988555908,0.8616999983787537 59 | 0.3390246033668518,0.426205039024353,0.8814250230789185,0.8636999726295471 60 | 0.3321557343006134,0.43356192111968994,0.8824999928474426,0.8604000210762024 61 | 0.3348837196826935,0.43467748165130615,0.883525013923645,0.8608999848365784 62 | 0.33621495962142944,0.4352492094039917,0.8822000026702881,0.8610000014305115 63 | 0.3323192894458771,0.428637832403183,0.8823999762535095,0.8608999848365784 64 | 0.335280179977417,0.43181705474853516,0.8815500140190125,0.862500011920929 65 | 0.33285120129585266,0.4255794286727905,0.8828499913215637,0.8632000088691711 66 | 0.33609503507614136,0.432475209236145,0.8821250200271606,0.8621000051498413 67 | 0.33467864990234375,0.4266688823699951,0.8807250261306763,0.8626000285148621 68 | 0.3350279927253723,0.4338037669658661,0.8808249831199646,0.8615000247955322 69 | 0.3341355323791504,0.435808926820755,0.8814499974250793,0.8601999878883362 70 | 0.3313731253147125,0.42928504943847656,0.8848000168800354,0.8622999787330627 71 | 0.33486154675483704,0.4259217381477356,0.8817999958992004,0.8623999953269958 72 | 0.3302859961986542,0.428112655878067,0.884024977684021,0.862500011920929 73 | 0.33240941166877747,0.43307825922966003,0.8826749920845032,0.8622000217437744 74 | 0.3331948518753052,0.4322581887245178,0.8830249905586243,0.8618999719619751 75 | 0.3297020494937897,0.428037166595459,0.8852249979972839,0.8626999855041504 76 | 0.33001723885536194,0.4372085928916931,0.8840500116348267,0.8619999885559082 77 | 0.3326079249382019,0.42869916558265686,0.8817999958992004,0.8634999990463257 78 | 0.33076193928718567,0.42746278643608093,0.8836249709129333,0.8636999726295471 79 | 0.3280826210975647,0.43089085817337036,0.883650004863739,0.8629000186920166 80 | 0.32743218541145325,0.43189537525177,0.8847249746322632,0.8632000088691711 81 | 0.3258691132068634,0.43036386370658875,0.8844749927520752,0.8626000285148621 82 | 0.3331778049468994,0.4311006963253021,0.8839750289916992,0.8629000186920166 83 | 0.3279581665992737,0.43506455421447754,0.8845499753952026,0.8626999855041504 84 | 0.3273901045322418,0.4297363758087158,0.8846750259399414,0.8633999824523926 85 | 0.33017757534980774,0.4319859445095062,0.8844249844551086,0.8629999756813049 86 | 0.3284282982349396,0.43141263723373413,0.884975016117096,0.862500011920929 87 | 0.32938894629478455,0.4216129779815674,0.8854749798774719,0.8677999973297119 88 | 0.33016717433929443,0.4330494701862335,0.8830500245094299,0.8618999719619751 89 | 0.32595008611679077,0.4306662976741791,0.884024977684021,0.863099992275238 90 | 0.3275040090084076,0.43369820713996887,0.8844000101089478,0.8623999953269958 91 | 0.32911285758018494,0.42749908566474915,0.8851000070571899,0.8636999726295471 92 | 0.32827383279800415,0.42864540219306946,0.8852999806404114,0.8636999726295471 93 | 0.3245316445827484,0.4283447861671448,0.8859750032424927,0.8646000027656555 94 | 0.327090859413147,0.42838090658187866,0.8852750062942505,0.8626000285148621 95 | 0.322091668844223,0.4319312572479248,0.8866000175476074,0.8628000020980835 96 | 0.3241378664970398,0.42945316433906555,0.8851249814033508,0.8633000254631042 97 | 0.32687318325042725,0.4315292537212372,0.8841750025749207,0.8626999855041504 98 | 0.32764002680778503,0.4258285164833069,0.8850749731063843,0.8655999898910522 99 | 0.32764336466789246,0.4280397891998291,0.8856250047683716,0.8647000193595886 100 | 0.32727620005607605,0.4300200641155243,0.8833500146865845,0.8634999990463257 101 | 0.3255157768726349,0.426129013299942,0.885450005531311,0.8652999997138977 102 | -------------------------------------------------------------------------------- /histories/vgg_0_history.csv: -------------------------------------------------------------------------------- 1 | loss,val_loss,accuracy,val_accuracy 2 | 2.0123894214630127,1.346420168876648,0.2632249891757965,0.48919999599456787 3 | 1.1736177206039429,0.81804358959198,0.58160001039505,0.722000002861023 4 | 0.9049549698829651,0.7021971940994263,0.6887249946594238,0.7616999745368958 5 | 0.7844120860099792,0.6909930109977722,0.733299970626831,0.7641000151634216 6 | 0.7166926264762878,0.5971537828445435,0.7538250088691711,0.7990000247955322 7 | 0.6624449491500854,0.5832552909851074,0.7728750109672546,0.7986999750137329 8 | 0.6236968040466309,0.6018316745758057,0.7853249907493591,0.8034999966621399 9 | 0.5979167222976685,0.5320319533348083,0.7938500046730042,0.8224999904632568 10 | 0.5623876452445984,0.6450626850128174,0.8059499859809875,0.7915999889373779 11 | 0.5368125438690186,0.5261845588684082,0.815850019454956,0.8208000063896179 12 | 0.5093357563018799,0.5553314089775085,0.8234750032424927,0.8217999935150146 13 | 0.5555510520935059,0.4766421318054199,0.8140000104904175,0.8385999798774719 14 | 0.45389580726623535,0.4703858494758606,0.8447750210762024,0.8421000242233276 15 | 0.4447934627532959,0.46662425994873047,0.8446750044822693,0.8435999751091003 16 | 0.4401317238807678,0.4577596187591553,0.847350001335144,0.8474000096321106 17 | 0.43235063552856445,0.457843154668808,0.8490250110626221,0.8485999703407288 18 | 0.4281435012817383,0.4546654224395752,0.8507750034332275,0.8496000170707703 19 | 0.42912057042121887,0.45348140597343445,0.8515750169754028,0.8496999740600586 20 | 0.4238818287849426,0.45138299465179443,0.8520500063896179,0.8500999808311462 21 | 0.41972047090530396,0.4525184631347656,0.8543750047683716,0.8499000072479248 22 | 0.42328667640686035,0.45058295130729675,0.8525500297546387,0.850600004196167 23 | 0.41924336552619934,0.4515259265899658,0.8547499775886536,0.8507000207901001 24 | 0.41830411553382874,0.45352786779403687,0.8540999889373779,0.8503000140190125 25 | 0.4192233383655548,0.4529915153980255,0.8536750078201294,0.8500999808311462 26 | 0.4172050356864929,0.45010650157928467,0.8559250235557556,0.8503000140190125 27 | 0.40997764468193054,0.4486948251724243,0.8569250106811523,0.8507000207901001 28 | 0.41277867555618286,0.4496476948261261,0.8579000234603882,0.8513000011444092 29 | 0.4076225161552429,0.45195356011390686,0.858299970626831,0.8511000275611877 30 | 0.408838152885437,0.44984564185142517,0.8581749796867371,0.8508999943733215 31 | 0.410201758146286,0.4488033056259155,0.8562250137329102,0.8503999710083008 32 | 0.4087977409362793,0.44758450984954834,0.8579000234603882,0.8510000109672546 33 | 0.4104864299297333,0.44677701592445374,0.8575500249862671,0.8507000207901001 34 | 0.40853145718574524,0.4493117332458496,0.8579249978065491,0.8503999710083008 35 | 0.4066947400569916,0.4495375454425812,0.8574000000953674,0.8507000207901001 36 | 0.41254252195358276,0.4458679258823395,0.8540499806404114,0.8526999950408936 37 | 0.40886685252189636,0.443386048078537,0.8576499819755554,0.8532999753952026 38 | 0.40592023730278015,0.44468533992767334,0.8587999939918518,0.8517000079154968 39 | 0.40983980894088745,0.447723925113678,0.8560749888420105,0.8521999716758728 40 | 0.40490642189979553,0.446574866771698,0.8568999767303467,0.8518999814987183 41 | 0.4090573787689209,0.4485350549221039,0.8571249842643738,0.852400004863739 42 | 0.406319797039032,0.4438488781452179,0.8577749729156494,0.8528000116348267 43 | 0.402798593044281,0.4446852505207062,0.8575500249862671,0.8521999716758728 44 | 0.4029969871044159,0.44510766863822937,0.8597750067710876,0.8519999980926514 45 | 0.4014189541339874,0.4455316364765167,0.8598750233650208,0.8528000116348267 46 | 0.4027453064918518,0.44722285866737366,0.8585249781608582,0.8518999814987183 47 | 0.4046024680137634,0.44295138120651245,0.8588250279426575,0.8526999950408936 48 | 0.40026339888572693,0.4439351558685303,0.8600999712944031,0.8525999784469604 49 | 0.401944637298584,0.44394707679748535,0.8611500263214111,0.8535000085830688 50 | 0.4003002643585205,0.44443798065185547,0.8616750240325928,0.8531000018119812 51 | 0.3988563120365143,0.4473503530025482,0.8598750233650208,0.8529000282287598 52 | 0.39948514103889465,0.44379153847694397,0.8607500195503235,0.8531000018119812 53 | 0.3983980119228363,0.44182655215263367,0.8593500256538391,0.8532999753952026 54 | 0.39640572667121887,0.44475454092025757,0.8636999726295471,0.8529000282287598 55 | 0.39866364002227783,0.446429580450058,0.8617249727249146,0.8531000018119812 56 | 0.3922394812107086,0.44310760498046875,0.8630250096321106,0.8536999821662903 57 | 0.39274364709854126,0.44176411628723145,0.8600999712944031,0.8539000153541565 58 | 0.3976094424724579,0.4413151741027832,0.8622999787330627,0.8540999889373779 59 | 0.39684030413627625,0.441517174243927,0.8616999983787537,0.8553000092506409 60 | 0.3943806290626526,0.44009169936180115,0.863099992275238,0.8543999791145325 61 | 0.3972143530845642,0.4448864161968231,0.8600749969482422,0.8539000153541565 62 | 0.3915275037288666,0.44118788838386536,0.8616499900817871,0.8553000092506409 63 | 0.3958324193954468,0.4421936869621277,0.8613250255584717,0.8550999760627747 64 | 0.3965901732444763,0.4428464472293854,0.8632749915122986,0.855400025844574 65 | 0.3964681029319763,0.44145962595939636,0.8611249923706055,0.8543000221252441 66 | 0.38856273889541626,0.44537705183029175,0.8662750124931335,0.8543000221252441 67 | 0.39116889238357544,0.4393374025821686,0.8626999855041504,0.8553000092506409 68 | 0.3916560411453247,0.44308212399482727,0.8622750043869019,0.8543999791145325 69 | 0.3916166424751282,0.4392767548561096,0.8634999990463257,0.8547000288963318 70 | 0.38934609293937683,0.4377216696739197,0.8642249703407288,0.8560000061988831 71 | 0.39000260829925537,0.4386807680130005,0.8632500171661377,0.8562999963760376 72 | 0.3873240351676941,0.4411095678806305,0.8651250004768372,0.8564000129699707 73 | 0.3893658220767975,0.4428689479827881,0.8638499975204468,0.8555999994277954 74 | 0.38988879323005676,0.44171684980392456,0.8657500147819519,0.8553000092506409 75 | 0.3875708281993866,0.4406910538673401,0.8658000230789185,0.8555999994277954 76 | 0.3897504508495331,0.440227746963501,0.8634750247001648,0.8564000129699707 77 | 0.38853952288627625,0.44063568115234375,0.863224983215332,0.8560000061988831 78 | 0.3863672912120819,0.44267645478248596,0.8639249801635742,0.8551999926567078 79 | 0.3915402293205261,0.4392447769641876,0.864175021648407,0.8565999865531921 80 | 0.38890722393989563,0.44061586260795593,0.8641999959945679,0.8560000061988831 81 | 0.384794145822525,0.4407763183116913,0.8649749755859375,0.8561999797821045 82 | 0.3854992091655731,0.4430354833602905,0.8646500110626221,0.8557000160217285 83 | 0.39018359780311584,0.4402695894241333,0.8637750148773193,0.8565999865531921 84 | 0.3868090510368347,0.4388076066970825,0.8640249967575073,0.8561000227928162 85 | 0.38954439759254456,0.4407997131347656,0.8636500239372253,0.8555999994277954 86 | 0.38826027512550354,0.4379749596118927,0.8653749823570251,0.8560000061988831 87 | 0.38570594787597656,0.44017744064331055,0.8647750020027161,0.8564000129699707 88 | 0.3828895390033722,0.43516063690185547,0.8653749823570251,0.8562999963760376 89 | 0.3828815221786499,0.4407748878002167,0.8669499754905701,0.8560000061988831 90 | 0.38270318508148193,0.43895062804222107,0.8649500012397766,0.8561000227928162 91 | 0.38622045516967773,0.4378497898578644,0.8655750155448914,0.857200026512146 92 | 0.38636070489883423,0.4393852651119232,0.8663250207901001,0.8560000061988831 93 | 0.38578176498413086,0.4392833113670349,0.8648499846458435,0.8565000295639038 94 | 0.38094520568847656,0.44060683250427246,0.8661999702453613,0.8574000000953674 95 | 0.3837727904319763,0.4381526708602905,0.8650749921798706,0.8560000061988831 96 | 0.38108158111572266,0.44204825162887573,0.8672749996185303,0.8564000129699707 97 | 0.37953153252601624,0.4373597800731659,0.8673750162124634,0.8574000000953674 98 | 0.381308913230896,0.43936410546302795,0.8658249974250793,0.8585000038146973 99 | 0.38348597288131714,0.4389726519584656,0.8657249808311462,0.8568000197410583 100 | 0.3803487718105316,0.43985113501548767,0.8672249913215637,0.8574000000953674 101 | 0.3807951807975769,0.43626460433006287,0.8679749965667725,0.8575999736785889 102 | -------------------------------------------------------------------------------- /histories/vgg_1_history.csv: -------------------------------------------------------------------------------- 1 | loss,val_loss,accuracy,val_accuracy 2 | 0.3783921003341675,0.4412308633327484,0.8678249716758728,0.8567000031471252 3 | 0.381144642829895,0.4357834458351135,0.8682000041007996,0.857699990272522 4 | 0.37629804015159607,0.4383547902107239,0.8687250018119812,0.8574000000953674 5 | 0.3816626965999603,0.43722838163375854,0.8668749928474426,0.8580999970436096 6 | 0.3775562644004822,0.4364493787288666,0.8658499717712402,0.8585000038146973 7 | 0.37880030274391174,0.44154423475265503,0.8687000274658203,0.8575000166893005 8 | 0.37952056527137756,0.43762093782424927,0.8670750260353088,0.8578000068664551 9 | 0.3787648677825928,0.439072847366333,0.8672249913215637,0.858299970626831 10 | 0.3806030750274658,0.43552759289741516,0.8668249845504761,0.8585000038146973 11 | 0.3771918714046478,0.4393637776374817,0.8687999844551086,0.8565999865531921 12 | 0.3753899037837982,0.43518126010894775,0.8692499995231628,0.8589000105857849 13 | 0.3754112422466278,0.44010600447654724,0.8680999875068665,0.8580999970436096 14 | 0.3783348500728607,0.43726709485054016,0.8661500215530396,0.8587999939918518 15 | 0.37428387999534607,0.4352739751338959,0.8682000041007996,0.8589000105857849 16 | 0.37444356083869934,0.4423637092113495,0.8693749904632568,0.8578000068664551 17 | 0.3755384385585785,0.4386765658855438,0.8683750033378601,0.8569999933242798 18 | 0.37703442573547363,0.44039395451545715,0.8679749965667725,0.858299970626831 19 | 0.37363049387931824,0.43819478154182434,0.8690249919891357,0.8572999835014343 20 | 0.37411606311798096,0.43724575638771057,0.8700249791145325,0.8590999841690063 21 | 0.3743285834789276,0.4380851984024048,0.8694750070571899,0.8582000136375427 22 | 0.37085193395614624,0.4405499994754791,0.8701500296592712,0.8589000105857849 23 | 0.370938777923584,0.4355282187461853,0.8705999851226807,0.8575999736785889 24 | 0.37423521280288696,0.4314177334308624,0.8683500289916992,0.8603000044822693 25 | 0.3715704679489136,0.43441927433013916,0.869225025177002,0.859499990940094 26 | 0.3728063702583313,0.4339815378189087,0.8690999746322632,0.8600000143051147 27 | 0.370563805103302,0.4382908344268799,0.8700249791145325,0.8593999743461609 28 | 0.373179167509079,0.4337553083896637,0.8698999881744385,0.8600000143051147 29 | 0.36919575929641724,0.44265976548194885,0.87152498960495,0.8575999736785889 30 | 0.3718104660511017,0.43440771102905273,0.870199978351593,0.8600000143051147 31 | 0.36592578887939453,0.4366765022277832,0.8699250221252441,0.8597999811172485 32 | 0.36990055441856384,0.4381929636001587,0.8695999979972839,0.8593999743461609 33 | 0.3681570291519165,0.43822020292282104,0.8708249926567078,0.859499990940094 34 | 0.36864688992500305,0.43230780959129333,0.8703250288963318,0.86080002784729 35 | 0.3690699338912964,0.43444663286209106,0.8703250288963318,0.8604000210762024 36 | 0.368574321269989,0.4351419508457184,0.870074987411499,0.8593999743461609 37 | 0.36936384439468384,0.4375709891319275,0.8706499934196472,0.8586000204086304 38 | 0.36727195978164673,0.431255578994751,0.8707749843597412,0.8603000044822693 39 | 0.36939653754234314,0.4372176229953766,0.8705000281333923,0.8586999773979187 40 | 0.3707650303840637,0.43594488501548767,0.8701249957084656,0.8607000112533569 41 | 0.36937740445137024,0.43763163685798645,0.8702250123023987,0.859000027179718 42 | 0.36806657910346985,0.4377841353416443,0.8713750243186951,0.8597000241279602 43 | 0.3641957938671112,0.4356801211833954,0.873075008392334,0.8610000014305115 44 | 0.36711937189102173,0.43872714042663574,0.8708999752998352,0.8600000143051147 45 | 0.3645825684070587,0.4365193247795105,0.8713750243186951,0.8605999946594238 46 | 0.36488646268844604,0.43453124165534973,0.871874988079071,0.8603000044822693 47 | 0.363696813583374,0.43491992354393005,0.8726000189781189,0.8605999946594238 48 | 0.36802351474761963,0.43494361639022827,0.8708500266075134,0.8619999885559082 49 | 0.3612458407878876,0.43366333842277527,0.8738999962806702,0.86080002784729 50 | 0.3677927553653717,0.4320959448814392,0.8702250123023987,0.8605999946594238 51 | 0.3665728271007538,0.4346997141838074,0.8724250197410583,0.8600000143051147 52 | 0.36547476053237915,0.4355473220348358,0.8730499744415283,0.8611000180244446 53 | 0.3622393310070038,0.43705785274505615,0.8737000226974487,0.8603000044822693 54 | 0.3631836771965027,0.4348948299884796,0.8725249767303467,0.8587999939918518 55 | 0.36167582869529724,0.43779146671295166,0.8730250000953674,0.8593999743461609 56 | 0.36118370294570923,0.4336394965648651,0.8745750188827515,0.8600999712944031 57 | 0.3641562759876251,0.4300994575023651,0.8714749813079834,0.8615999817848206 58 | 0.3618348240852356,0.43183159828186035,0.8721749782562256,0.8603000044822693 59 | 0.3632376194000244,0.42982953786849976,0.8719499707221985,0.8604000210762024 60 | 0.3620557487010956,0.43106386065483093,0.8734750151634216,0.8604000210762024 61 | 0.36197617650032043,0.43379512429237366,0.8733999729156494,0.8593000173568726 62 | 0.3671884834766388,0.43419700860977173,0.8708750009536743,0.8596000075340271 63 | 0.3599982261657715,0.43403950333595276,0.8743749856948853,0.8589000105857849 64 | 0.36038443446159363,0.4368361830711365,0.8734999895095825,0.8593999743461609 65 | 0.36050596833229065,0.4315691292285919,0.8734750151634216,0.8607000112533569 66 | 0.36126676201820374,0.433429092168808,0.8719499707221985,0.86080002784729 67 | 0.35617733001708984,0.4365326762199402,0.8737999796867371,0.8593999743461609 68 | 0.3626789450645447,0.43530067801475525,0.8720499873161316,0.8596000075340271 69 | 0.35585328936576843,0.43214648962020874,0.8725749850273132,0.8597000241279602 70 | 0.35983312129974365,0.43284425139427185,0.8740749955177307,0.8593000173568726 71 | 0.360555499792099,0.4338282346725464,0.87357497215271,0.8605999946594238 72 | 0.35705894231796265,0.4330846071243286,0.8748499751091003,0.8592000007629395 73 | 0.36271631717681885,0.4394460916519165,0.8709999918937683,0.8579000234603882 74 | 0.3613359034061432,0.43564996123313904,0.873075008392334,0.8601999878883362 75 | 0.35595452785491943,0.43667587637901306,0.8737999796867371,0.8586000204086304 76 | 0.3549181818962097,0.43595123291015625,0.8754749894142151,0.859000027179718 77 | 0.35995548963546753,0.43308213353157043,0.8731250166893005,0.8598999977111816 78 | 0.35295844078063965,0.4355950653553009,0.8760250210762024,0.8587999939918518 79 | 0.35312265157699585,0.4313778281211853,0.875124990940094,0.8601999878883362 80 | 0.36139971017837524,0.4329187870025635,0.8723499774932861,0.8601999878883362 81 | 0.35504069924354553,0.4335509240627289,0.875374972820282,0.8608999848365784 82 | 0.36076024174690247,0.43076103925704956,0.8723499774932861,0.8600999712944031 83 | 0.35302889347076416,0.4349392056465149,0.8766499757766724,0.859499990940094 84 | 0.35510241985321045,0.4318089783191681,0.8749499917030334,0.8611999750137329 85 | 0.3542971611022949,0.4349600076675415,0.8749250173568726,0.8600000143051147 86 | 0.35548001527786255,0.43232977390289307,0.8768500089645386,0.8600000143051147 87 | 0.3584282100200653,0.43486151099205017,0.8737750053405762,0.8600000143051147 88 | 0.35191792249679565,0.43213412165641785,0.8766499757766724,0.8618999719619751 89 | 0.3551742434501648,0.43445467948913574,0.8744249939918518,0.86080002784729 90 | 0.35172685980796814,0.43568047881126404,0.8763750195503235,0.86080002784729 91 | 0.34985318779945374,0.4331132769584656,0.8776999711990356,0.8601999878883362 92 | 0.35410889983177185,0.44068270921707153,0.876075029373169,0.8597999811172485 93 | 0.35499176383018494,0.4298037588596344,0.8755000233650208,0.861299991607666 94 | 0.3520319163799286,0.4320061504840851,0.8774999976158142,0.8603000044822693 95 | 0.3478444516658783,0.4318123459815979,0.8789499998092651,0.8604999780654907 96 | 0.3492227792739868,0.43408867716789246,0.875374972820282,0.8604000210762024 97 | 0.352295845746994,0.428852915763855,0.8773999810218811,0.8615000247955322 98 | 0.3506666123867035,0.4337020516395569,0.8755750060081482,0.8598999977111816 99 | 0.35258084535598755,0.4306526780128479,0.8763750195503235,0.86080002784729 100 | 0.3517856299877167,0.4337373971939087,0.8758500218391418,0.859499990940094 101 | 0.3494594097137451,0.43220698833465576,0.8766999840736389,0.8611000180244446 102 | --------------------------------------------------------------------------------