├── imgs
└── Transweather.png
├── .gitignore
├── data
├── test
│ ├── allfilter.txt
│ ├── allweather.txt
│ ├── raindroptesta.txt
│ ├── allfilter_many.txt
│ └── test1.txt
└── train
│ ├── raindroptesta.txt
│ └── test1.txt
├── LICENSE
├── perceptual.py
├── utils
├── move_files.py
├── generate_name.py
└── visualizer.py
├── environment.yml
├── training_log
├── warmcosLR1_log.txt
├── allfilter2_log.txt
├── expLR1_log.txt
├── allfilter1_log.txt
├── cosLR1_log.txt
└── 1977amaro1_log.txt
├── val_data_functions.py
├── test_raindrop.py
├── test_test1.py
├── test_1977.py
├── test_snow100k.py
├── test_raindropa.py
├── test_all.py
├── cosine_annealing.py
├── README.md
├── utils.py
├── base_networks.py
├── train_data_functions.py
├── train-individual.py
├── train.py
└── train_with_visdom.py
/imgs/Transweather.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/jel-lambda/new-Transweather/HEAD/imgs/Transweather.png
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # image files for input and ground truth
2 | data/*/*/gt
3 | data/*/*/input
4 |
5 | # cache
6 | __pycache__
7 |
8 | # weights
9 | weights
10 |
11 | # results
12 | results
13 |
14 | # vscode setting
15 | .vscode
16 |
--------------------------------------------------------------------------------
/data/test/allfilter.txt:
--------------------------------------------------------------------------------
1 | allfilter/input/0_Perpetua.jpg
2 | allfilter/input/0_Lo-Fi.jpg
3 | allfilter/input/0_Sutro.jpg
4 | allfilter/input/0_Clarendon.jpg
5 | allfilter/input/0_Valencia.jpg
6 | allfilter/input/0_Mayfair.jpg
7 | allfilter/input/0_Gingham.jpg
8 | allfilter/input/0_Toaster.jpg
9 | allfilter/input/0_Hudson.jpg
10 | allfilter/input/0_Nashville.jpg
11 | allfilter/input/0_Amaro.jpg
12 | allfilter/input/0_Brannan.jpg
13 | allfilter/input/0_He-Fe.jpg
14 | allfilter/input/0_X-ProII.jpg
15 | allfilter/input/0_1977.jpg
16 |
--------------------------------------------------------------------------------
/data/test/allweather.txt:
--------------------------------------------------------------------------------
1 | Amaro/input/496_Amaro.jpg
2 | Amaro/input/493_Amaro.jpg
3 | Amaro/input/495_Amaro.jpg
4 | Amaro/input/494_Amaro.jpg
5 | Amaro/input/492_Amaro.jpg
6 | Amaro/input/497_Amaro.jpg
7 | Amaro/input/499_Amaro.jpg
8 | Amaro/input/491_Amaro.jpg
9 | Amaro/input/490_Amaro.jpg
10 | Amaro/input/498_Amaro.jpg
11 | Amaro/input/489_Amaro.jpg
12 | 1977/input/496_1977.jpg
13 | 1977/input/493_1977.jpg
14 | 1977/input/495_1977.jpg
15 | 1977/input/494_1977.jpg
16 | 1977/input/492_1977.jpg
17 | 1977/input/497_1977.jpg
18 | 1977/input/499_1977.jpg
19 | 1977/input/491_1977.jpg
20 | 1977/input/490_1977.jpg
21 | 1977/input/498_1977.jpg
22 | 1977/input/489_1977.jpg
23 |
24 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Jeya Maria Jose
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/perceptual.py:
--------------------------------------------------------------------------------
1 |
2 | # --- Imports --- #
3 | import torch
4 | import torch.nn.functional as F
5 |
6 |
7 | # --- Perceptual loss network --- #
8 | class LossNetwork(torch.nn.Module):
9 | def __init__(self, vgg_model):
10 | super(LossNetwork, self).__init__()
11 | self.vgg_layers = vgg_model
12 | self.layer_name_mapping = {
13 | '3': "relu1_2",
14 | '8': "relu2_2",
15 | '15': "relu3_3"
16 | }
17 |
18 | def output_features(self, x):
19 | output = {}
20 | for name, module in self.vgg_layers._modules.items():
21 | x = module(x)
22 | if name in self.layer_name_mapping:
23 | output[self.layer_name_mapping[name]] = x
24 | return list(output.values())
25 |
26 | def forward(self, pred_im, gt):
27 | loss = []
28 | pred_im_features = self.output_features(pred_im)
29 | gt_features = self.output_features(gt)
30 | for pred_im_feature, gt_feature in zip(pred_im_features, gt_features):
31 | loss.append(F.mse_loss(pred_im_feature, gt_feature))
32 |
33 | return sum(loss)/len(loss)
34 |
35 |
--------------------------------------------------------------------------------
/utils/move_files.py:
--------------------------------------------------------------------------------
1 | import os.path
2 | import shutil
3 |
4 | """ data 폴더 내 파일 이동 함수,
5 | filter_name: 상위 폴더 이름대로 지정 """
6 |
7 | def move_file(path_before, path_after, filter_name):
8 | filelist = os.listdir(path_before)
9 | for file in filelist:
10 | # temp_list = file.split("_")
11 | # from train to test
12 | # if temp_list[0] == '0':
13 | # shutil.move(path_before+file, path_after+"/allfilter/gt/"+file)
14 | # from test to train
15 | # shutil.move(path_before+file, path_after+filter_name+"/input/"+file)
16 | # from test to train gt
17 | shutil.move(path_before+file, path_after+filter_name+"/gt/"+file)
18 |
19 | def in_all_dir(dir_path, target_path):
20 | dir_list = os.listdir(dir_path)
21 | for directory in dir_list:
22 | if directory.split('.')[-1] != 'txt' and directory !='rain' and directory !='allfilter':
23 | file_path = os.path.join(dir_path, directory + '/gt/')
24 | move_file(file_path, target_path, directory)
25 |
26 | def main():
27 | dir_path = r"/home/cvmlserver/Seohyeon/TransWeather/data/test/"
28 | target_path = r"/home/cvmlserver/Seohyeon/TransWeather/data/train/"
29 | in_all_dir(dir_path, target_path)
30 |
31 | if(__name__) == '__main__':
32 | main()
33 |
--------------------------------------------------------------------------------
/data/test/raindroptesta.txt:
--------------------------------------------------------------------------------
1 | rain/input/801_rain.png
2 | rain/input/802_rain.png
3 | rain/input/803_rain.png
4 | rain/input/804_rain.png
5 | rain/input/805_rain.png
6 | rain/input/806_rain.png
7 | rain/input/807_rain.png
8 | rain/input/808_rain.png
9 | rain/input/809_rain.png
10 | rain/input/811_rain.png
11 | rain/input/812_rain.png
12 | rain/input/813_rain.png
13 | rain/input/814_rain.png
14 | rain/input/815_rain.png
15 | rain/input/816_rain.png
16 | rain/input/817_rain.png
17 | rain/input/818_rain.png
18 | rain/input/820_rain.png
19 | rain/input/821_rain.png
20 | rain/input/822_rain.png
21 | rain/input/823_rain.png
22 | rain/input/824_rain.png
23 | rain/input/825_rain.png
24 | rain/input/826_rain.png
25 | rain/input/827_rain.png
26 | rain/input/828_rain.png
27 | rain/input/829_rain.png
28 | rain/input/830_rain.png
29 | rain/input/831_rain.png
30 | rain/input/832_rain.png
31 | rain/input/833_rain.png
32 | rain/input/834_rain.png
33 | rain/input/837_rain.png
34 | rain/input/839_rain.png
35 | rain/input/840_rain.png
36 | rain/input/841_rain.png
37 | rain/input/842_rain.png
38 | rain/input/843_rain.png
39 | rain/input/844_rain.png
40 | rain/input/845_rain.png
41 | rain/input/846_rain.png
42 | rain/input/847_rain.png
43 | rain/input/848_rain.png
44 | rain/input/849_rain.png
45 | rain/input/850_rain.png
46 | rain/input/851_rain.png
47 | rain/input/852_rain.png
48 | rain/input/853_rain.png
49 | rain/input/854_rain.png
50 | rain/input/855_rain.png
51 | rain/input/856_rain.png
52 | rain/input/857_rain.png
53 | rain/input/858_rain.png
54 | rain/input/859_rain.png
55 | rain/input/860_rain.png
56 |
--------------------------------------------------------------------------------
/environment.yml:
--------------------------------------------------------------------------------
1 | name: transweather
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | - _libgcc_mutex=0.1=main
7 | - _openmp_mutex=4.5=1_gnu
8 | - ca-certificates=2021.10.26=h06a4308_2
9 | - certifi=2021.5.30=py36h06a4308_0
10 | - ld_impl_linux-64=2.35.1=h7274673_9
11 | - libffi=3.3=he6710b0_2
12 | - libgcc-ng=9.3.0=h5101ec6_17
13 | - libgomp=9.3.0=h5101ec6_17
14 | - libstdcxx-ng=9.3.0=hd4cf53a_17
15 | - ncurses=6.3=h7f8727e_2
16 | - openssl=1.1.1l=h7f8727e_0
17 | - pip=21.2.2=py36h06a4308_0
18 | - python=3.6.13=h12debd9_1
19 | - readline=8.1=h27cfd23_0
20 | - setuptools=58.0.4=py36h06a4308_0
21 | - sqlite=3.36.0=hc218d9a_0
22 | - tk=8.6.11=h1ccaba5_0
23 | - wheel=0.37.0=pyhd3eb1b0_1
24 | - xz=5.2.5=h7b6447c_0
25 | - zlib=1.2.11=h7b6447c_3
26 | - pip:
27 | - addict==2.4.0
28 | - charset-normalizer==2.0.12
29 | - cycler==0.11.0
30 | - dataclasses==0.8
31 | - decorator==4.4.2
32 | - idna==3.3
33 | - imageio==2.15.0
34 | - jsonpatch==1.32
35 | - jsonpointer==2.3
36 | - kiwisolver==1.3.1
37 | - matplotlib==3.3.4
38 | - mmcv-full==1.2.7
39 | - networkx==2.5.1
40 | - numpy==1.19.5
41 | - opencv-python==4.5.1.48
42 | - perceptual==0.1
43 | - pillow==8.4.0
44 | - pyparsing==3.0.9
45 | - python-dateutil==2.8.2
46 | - pywavelets==1.1.1
47 | - pyyaml==6.0
48 | - pyzmq==23.2.0
49 | - requests==2.27.1
50 | - scikit-image==0.17.2
51 | - scipy==1.5.4
52 | - six==1.16.0
53 | - tifffile==2020.9.3
54 | - timm==0.3.2
55 | - torch==1.7.1
56 | - torchfile==0.1.0
57 | - torchvision==0.8.2
58 | - tornado==6.1
59 | - typing-extensions==4.0.0
60 | - urllib3==1.26.10
61 | - utils==1.0.1
62 | - visdom==0.1.8.9
63 | - websocket-client==1.3.1
64 | - yapf==0.31.0
65 | prefix: /home/cvmlserver/anaconda3/envs/transweather
66 |
--------------------------------------------------------------------------------
/training_log/warmcosLR1_log.txt:
--------------------------------------------------------------------------------
1 | Date: 2022-07-22 13:45:03s, Time_Cost: 119s, Epoch: [1/200], Train_PSNR: 0.05, Val_PSNR: 8.10, Val_SSIM: 0.0745
2 | Date: 2022-07-22 13:47:02s, Time_Cost: 119s, Epoch: [2/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
3 | Date: 2022-07-22 13:49:00s, Time_Cost: 118s, Epoch: [3/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
4 | Date: 2022-07-22 13:50:58s, Time_Cost: 118s, Epoch: [4/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
5 | Date: 2022-07-22 13:52:57s, Time_Cost: 118s, Epoch: [5/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
6 | Date: 2022-07-22 13:54:55s, Time_Cost: 118s, Epoch: [6/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
7 | Date: 2022-07-22 13:56:53s, Time_Cost: 118s, Epoch: [7/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
8 | Date: 2022-07-22 13:58:51s, Time_Cost: 118s, Epoch: [8/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
9 | Date: 2022-07-22 14:00:49s, Time_Cost: 118s, Epoch: [9/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
10 | Date: 2022-07-22 14:02:47s, Time_Cost: 118s, Epoch: [10/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
11 | Date: 2022-07-22 14:04:45s, Time_Cost: 118s, Epoch: [11/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
12 | Date: 2022-07-22 14:06:43s, Time_Cost: 118s, Epoch: [12/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
13 | Date: 2022-07-22 14:08:41s, Time_Cost: 118s, Epoch: [13/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
14 | Date: 2022-07-22 14:10:39s, Time_Cost: 118s, Epoch: [14/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
15 | Date: 2022-07-22 14:12:38s, Time_Cost: 118s, Epoch: [15/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
16 | Date: 2022-07-22 14:14:36s, Time_Cost: 118s, Epoch: [16/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
17 | Date: 2022-07-22 14:16:34s, Time_Cost: 118s, Epoch: [17/200], Train_PSNR: 0.03, Val_PSNR: 8.10, Val_SSIM: 0.0745
18 |
--------------------------------------------------------------------------------
/val_data_functions.py:
--------------------------------------------------------------------------------
1 | import torch.utils.data as data
2 | from PIL import Image
3 | from torchvision.transforms import Compose, ToTensor, Normalize
4 | import numpy as np
5 |
6 | # --- Validation/test dataset --- #
7 | class ValData(data.Dataset):
8 | def __init__(self, val_data_dir,val_filename):
9 | super().__init__()
10 | val_list = val_data_dir + val_filename
11 | with open(val_list) as f:
12 | contents = f.readlines()
13 | input_names = [i.strip() for i in contents]
14 | gt_names = [i.strip().replace('input','gt') for i in input_names]
15 |
16 | self.input_names = input_names
17 | self.gt_names = gt_names
18 | self.val_data_dir = val_data_dir
19 |
20 | def get_images(self, index):
21 | input_name = self.input_names[index]
22 | gt_name = self.gt_names[index]
23 | input_img = Image.open(self.val_data_dir + input_name)
24 | gt_img = Image.open(self.val_data_dir + gt_name)
25 |
26 | # Resizing image in the multiple of 16"
27 | wd_new,ht_new = input_img.size
28 | if ht_new>wd_new and ht_new>1024:
29 | wd_new = int(np.ceil(wd_new*1024/ht_new))
30 | ht_new = 1024
31 | elif ht_new<=wd_new and wd_new>1024:
32 | ht_new = int(np.ceil(ht_new*1024/wd_new))
33 | wd_new = 1024
34 | wd_new = int(16*np.ceil(wd_new/16.0))
35 | ht_new = int(16*np.ceil(ht_new/16.0))
36 | input_img = input_img.resize((wd_new,ht_new), Image.ANTIALIAS)
37 | gt_img = gt_img.resize((wd_new, ht_new), Image.ANTIALIAS)
38 |
39 | # --- Transform to tensor --- #
40 | transform_input = Compose([ToTensor(), Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
41 | transform_gt = Compose([ToTensor()])
42 | input_im = transform_input(input_img)
43 | gt = transform_gt(gt_img)
44 |
45 | return input_im, gt, input_name
46 |
47 | def __getitem__(self, index):
48 | res = self.get_images(index)
49 | return res
50 |
51 | def __len__(self):
52 | return len(self.input_names)
53 |
--------------------------------------------------------------------------------
/utils/generate_name.py:
--------------------------------------------------------------------------------
1 | import os.path
2 |
3 | """ data 폴더 내 파일 이름을 바꿔주는 함수,
4 | filter_name: 상위 폴더 이름대로 지정 """
5 | def rename_files(file_path, filter_name):
6 | files = os.listdir(file_path)
7 | for name in files:
8 | src = os.path.join(file_path, name)
9 | new_name = name.split('_')[0] + '_' + filter_name +'.jpg'
10 | dst = os.path.join(file_path, new_name)
11 | os.rename(src, dst)
12 |
13 | def make_textfile(file_path, filter_name):
14 | f = open("/home/cvmlserver/Seohyeon/TransWeather/data/train/allfilter.txt", 'a')
15 | files = os.listdir(file_path)
16 | for name in files:
17 | f.write(filter_name + "/input/"+ name + '\n')
18 | f.close
19 |
20 | def rename_and_textfile(file_path, filter_name):
21 | f = open("/home/cvmlserver/Seohyeon/TransWeather/data/train/allfilter.txt", 'a')
22 | files = os.listdir(file_path)
23 | for name in files:
24 | src = os.path.join(file_path, name)
25 | new_name = name.split('_')[0] + '_' + filter_name +'.jpg'
26 | dst = os.path.join(file_path, new_name)
27 | f.write(filter_name + "/input/"+ new_name + '\n')
28 | os.rename(src, dst)
29 | f.close
30 |
31 | def in_all_dir(dir_path,func):
32 | dir_list = os.listdir(dir_path)
33 | if func == 'rename_files':
34 | for directory in dir_list:
35 | if directory.split('.')[-1] != 'txt':
36 | file_path = os.path.join(dir_path, directory + '/gt/')
37 | rename_files(file_path, directory)
38 | else:
39 | f = open("/home/cvmlserver/Seohyeon/TransWeather/data/train/allfilter.txt", 'w')
40 | f.close
41 | for directory in dir_list:
42 | if directory.split('.')[-1] != 'txt':
43 | file_path = os.path.join(dir_path, directory + '/gt/')
44 | if func == "make_textfile":
45 | make_textfile(file_path,directory)
46 | else:
47 | rename_and_textfile(file_path, directory)
48 |
49 | def main():
50 | dir_path = r"/home/cvmlserver/Seohyeon/TransWeather/data/train/"
51 | in_all_dir(dir_path, 'make_textfile')
52 |
53 | if(__name__) == '__main__':
54 | main()
55 |
--------------------------------------------------------------------------------
/test_raindrop.py:
--------------------------------------------------------------------------------
1 | import time
2 | import torch
3 | import argparse
4 | import torch.nn as nn
5 | from torch.utils.data import DataLoader
6 | from val_data_functions import ValData
7 | from utils import validation
8 | import os
9 | import numpy as np
10 | import random
11 | from transweather_model import Transweather
12 |
13 | # --- Parse hyper-parameters --- #
14 | parser = argparse.ArgumentParser(description='Hyper-parameters for network')
15 | parser.add_argument('-val_batch_size', help='Set the validation/test batch size', default=1, type=int)
16 | parser.add_argument('-exp_name', help='directory for saving the networks of the experiment', type=str)
17 | parser.add_argument('-seed', help='set random seed', default=19, type=int)
18 | args = parser.parse_args()
19 |
20 | val_batch_size = args.val_batch_size
21 | exp_name = args.exp_name
22 |
23 | #set seed
24 | seed = args.seed
25 | if seed is not None:
26 | np.random.seed(seed)
27 | torch.manual_seed(seed)
28 | torch.cuda.manual_seed(seed)
29 | random.seed(seed)
30 | print('Seed:\t{}'.format(seed))
31 |
32 | # --- Set category-specific hyper-parameters --- #
33 | val_data_dir = './data/test/'
34 |
35 | # --- Gpu device --- #
36 | device_ids = [Id for Id in range(torch.cuda.device_count())]
37 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
38 |
39 |
40 | # --- Validation data loader --- #
41 |
42 | val_filename = 'raindroptest1a.txt' ## This text file should contain all the names of the images and must be placed in ./data/test/ directory
43 |
44 | val_data_loader = DataLoader(ValData(val_data_dir,val_filename), batch_size=val_batch_size, shuffle=False, num_workers=8)
45 |
46 | # --- Define the network --- #
47 |
48 | net = Transweather()
49 |
50 |
51 | net = nn.DataParallel(net, device_ids=device_ids)
52 |
53 |
54 | # --- Load the network weight --- #
55 | net.load_state_dict(torch.load('./{}/best'.format(exp_name))
56 |
57 | # --- Use the evaluation model in testing --- #
58 | net.eval()
59 | if os.path.exists('./{}_results/{}/'.format(category,exp_name))==False:
60 | os.mkdir('./{}_results/{}/'.format(category,exp_name))
61 | os.mkdir('./{}_results/{}/rain/'.format(category,exp_name))
62 | print('--- Testing starts! ---')
63 | start_time = time.time()
64 | val_psnr, val_ssim = validation(net, val_data_loader, device, category, exp_name, save_tag=True)
65 | end_time = time.time() - start_time
66 | print('val_psnr: {0:.2f}, val_ssim: {1:.4f}'.format(val_psnr, val_ssim))
67 | print('validation time is {0:.4f}'.format(end_time))
68 |
--------------------------------------------------------------------------------
/test_test1.py:
--------------------------------------------------------------------------------
1 | import time
2 | import torch
3 | import argparse
4 | import torch.nn as nn
5 | from torch.utils.data import DataLoader
6 | from val_data_functions import ValData
7 | from utils import validation, validation_val
8 | import os
9 | import numpy as np
10 | import random
11 | from transweather_model import Transweather
12 |
13 | # --- Parse hyper-parameters --- #
14 | parser = argparse.ArgumentParser(description='Hyper-parameters for network')
15 | parser.add_argument('-val_batch_size', help='Set the validation/test batch size', default=1, type=int)
16 | parser.add_argument('-exp_name', help='directory for saving the networks of the experiment', type=str)
17 | parser.add_argument('-seed', help='set random seed', default=19, type=int)
18 | args = parser.parse_args()
19 |
20 | val_batch_size = args.val_batch_size
21 | exp_name = args.exp_name
22 |
23 | #set seed
24 | seed = args.seed
25 | if seed is not None:
26 | np.random.seed(seed)
27 | torch.manual_seed(seed)
28 | torch.cuda.manual_seed(seed)
29 | random.seed(seed)
30 | print('Seed:\t{}'.format(seed))
31 |
32 | # --- Set category-specific hyper-parameters --- #
33 | val_data_dir = './data/test/'
34 |
35 | # --- Gpu device --- #
36 | device_ids = [Id for Id in range(torch.cuda.device_count())]
37 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
38 | print(device)
39 |
40 | # --- Validation data loader --- #
41 |
42 | val_filename = 'test1.txt' ## This text file should contain all the names of the images and must be placed in ./data/test/ directory
43 |
44 | val_data_loader = DataLoader(ValData(val_data_dir,val_filename), batch_size=val_batch_size, shuffle=False, num_workers=8)
45 |
46 | # --- Define the network --- #
47 |
48 | net = Transweather().cuda()
49 |
50 |
51 | net = nn.DataParallel(net, device_ids=device_ids)
52 |
53 |
54 | # --- Load the network weight --- #
55 | net.load_state_dict(torch.load('./{}/best'.format(exp_name)))
56 |
57 | # --- Use the evaluation model in testing --- #
58 | net.eval()
59 | category = "test1"
60 |
61 | if os.path.exists('./results/{}/{}/'.format(category,exp_name))==False:
62 | os.makedirs('./results/{}/{}/'.format(category,exp_name))
63 |
64 |
65 | print('--- Testing starts! ---')
66 | start_time = time.time()
67 | val_psnr, val_ssim = validation_val(net, val_data_loader, device, exp_name,category, save_tag=True)
68 | end_time = time.time() - start_time
69 | print('val_psnr: {0:.2f}, val_ssim: {1:.4f}'.format(val_psnr, val_ssim))
70 | print('validation time is {0:.4f}'.format(end_time))
71 |
--------------------------------------------------------------------------------
/test_1977.py:
--------------------------------------------------------------------------------
1 | import time
2 | import torch
3 | import argparse
4 | import torch.nn as nn
5 | from torch.utils.data import DataLoader
6 | from val_data_functions import ValData
7 | from utils import validation, validation_val
8 | import os
9 | import numpy as np
10 | import random
11 | from transweather_model import Transweather_base
12 |
13 | # --- Parse hyper-parameters --- #
14 | parser = argparse.ArgumentParser(description='Hyper-parameters for network')
15 | parser.add_argument('-val_batch_size', help='Set the validation/test batch size', default=1, type=int)
16 | parser.add_argument('-exp_name', help='directory for saving the networks of the experiment', type=str)
17 | parser.add_argument('-seed', help='set random seed', default=19, type=int)
18 | args = parser.parse_args()
19 |
20 | val_batch_size = args.val_batch_size
21 | exp_name = args.exp_name
22 |
23 | #set seed
24 | seed = args.seed
25 | if seed is not None:
26 | np.random.seed(seed)
27 | torch.manual_seed(seed)
28 | torch.cuda.manual_seed(seed)
29 | random.seed(seed)
30 | print('Seed:\t{}'.format(seed))
31 |
32 | # --- Set category-specific hyper-parameters --- #
33 | val_data_dir = './data/test/'
34 |
35 | # --- Gpu device --- #
36 | device_ids = [Id for Id in range(torch.cuda.device_count())]
37 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
38 | print(device)
39 |
40 | # --- Validation data loader --- #
41 |
42 | val_filename = '1977.txt' ## This text file should contain all the names of the images and must be placed in ./data/test/ directory
43 |
44 | val_data_loader = DataLoader(ValData(val_data_dir,val_filename), batch_size=val_batch_size, shuffle=False, num_workers=8)
45 |
46 | # --- Define the network --- #
47 |
48 | net = Transweather_base().cuda()
49 |
50 |
51 | net = nn.DataParallel(net, device_ids=device_ids)
52 |
53 |
54 | # --- Load the network weight --- #
55 | net.load_state_dict(torch.load('./{}/best'.format(exp_name)))
56 |
57 | # --- Use the evaluation model in testing --- #
58 | net.eval()
59 | category = "1977"
60 |
61 | if os.path.exists('./results/{}/{}/'.format(category,exp_name))==False:
62 | os.makedirs('./results/{}/{}/'.format(category,exp_name))
63 |
64 |
65 | print('--- Testing starts! ---')
66 | start_time = time.time()
67 | val_psnr, val_ssim = validation_val(net, val_data_loader, device, exp_name,category, save_tag=True)
68 | end_time = time.time() - start_time
69 | print('val_psnr: {0:.2f}, val_ssim: {1:.4f}'.format(val_psnr, val_ssim))
70 | print('validation time is {0:.4f}'.format(end_time))
71 |
--------------------------------------------------------------------------------
/test_snow100k.py:
--------------------------------------------------------------------------------
1 | import time
2 | import torch
3 | import argparse
4 | import torch.nn as nn
5 | from torch.utils.data import DataLoader
6 | from val_data_functions import ValData
7 | from utils import validation, validation_val
8 | import os
9 | import numpy as np
10 | import random
11 | from transweather_model import Transweather
12 |
13 | # --- Parse hyper-parameters --- #
14 | parser = argparse.ArgumentParser(description='Hyper-parameters for network')
15 | parser.add_argument('-val_batch_size', help='Set the validation/test batch size', default=1, type=int)
16 | parser.add_argument('-exp_name', help='directory for saving the networks of the experiment', type=str)
17 | parser.add_argument('-seed', help='set random seed', default=19, type=int)
18 | args = parser.parse_args()
19 |
20 | val_batch_size = args.val_batch_size
21 | exp_name = args.exp_name
22 |
23 | #set seed
24 | seed = args.seed
25 | if seed is not None:
26 | np.random.seed(seed)
27 | torch.manual_seed(seed)
28 | torch.cuda.manual_seed(seed)
29 | random.seed(seed)
30 | print('Seed:\t{}'.format(seed))
31 |
32 | # --- Set category-specific hyper-parameters --- #
33 | val_data_dir = './data/test/'
34 |
35 | # --- Gpu device --- #
36 | device_ids = [Id for Id in range(torch.cuda.device_count())]
37 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
38 | print(device)
39 |
40 | # --- Validation data loader --- #
41 |
42 | val_filename = 'snowtest100k_L.txt' ## This text file should contain all the names of the images and must be placed in ./data/test/ directory
43 |
44 | val_data_loader = DataLoader(ValData(val_data_dir,val_filename), batch_size=val_batch_size, shuffle=False, num_workers=8)
45 |
46 | # --- Define the network --- #
47 |
48 | net = Transweather().cuda()
49 |
50 |
51 | net = nn.DataParallel(net, device_ids=device_ids)
52 |
53 |
54 | # --- Load the network weight --- #
55 | net.load_state_dict(torch.load('./{}/best'.format(exp_name)))
56 |
57 | # --- Use the evaluation model in testing --- #
58 | net.eval()
59 | category = "snowtest100k"
60 |
61 | if os.path.exists('./results/{}/{}/'.format(category,exp_name))==False:
62 | os.makedirs('./results/{}/{}/'.format(category,exp_name))
63 |
64 |
65 | print('--- Testing starts! ---')
66 | start_time = time.time()
67 | val_psnr, val_ssim = validation_val(net, val_data_loader, device, exp_name,category, save_tag=True)
68 | end_time = time.time() - start_time
69 | print('val_psnr: {0:.2f}, val_ssim: {1:.4f}'.format(val_psnr, val_ssim))
70 | print('validation time is {0:.4f}'.format(end_time))
71 |
--------------------------------------------------------------------------------
/test_raindropa.py:
--------------------------------------------------------------------------------
1 | import time
2 | import torch
3 | import argparse
4 | import torch.nn as nn
5 | from torch.utils.data import DataLoader
6 | from val_data_functions import ValData
7 | from utils import validation, validation_val
8 | import os
9 | import numpy as np
10 | import random
11 | from transweather_model import Transweather
12 |
13 | # --- Parse hyper-parameters --- #
14 | parser = argparse.ArgumentParser(description='Hyper-parameters for network')
15 | parser.add_argument('-val_batch_size', help='Set the validation/test batch size', default=1, type=int)
16 | parser.add_argument('-exp_name', help='directory for saving the networks of the experiment', type=str)
17 | parser.add_argument('-seed', help='set random seed', default=19, type=int)
18 | args = parser.parse_args()
19 |
20 | val_batch_size = args.val_batch_size
21 | exp_name = args.exp_name
22 |
23 | #set seed
24 | seed = args.seed
25 | if seed is not None:
26 | np.random.seed(seed)
27 | torch.manual_seed(seed)
28 | torch.cuda.manual_seed(seed)
29 | random.seed(seed)
30 | print('Seed:\t{}'.format(seed))
31 |
32 | # --- Set category-specific hyper-parameters --- #
33 | val_data_dir = './data/test/'
34 |
35 | # --- Gpu device --- #
36 | device_ids = [Id for Id in range(torch.cuda.device_count())]
37 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
38 | print(device)
39 |
40 | # --- Validation data loader --- #
41 |
42 | val_filename = 'raindroptesta.txt' ## This text file should contain all the names of the images and must be placed in ./data/test/ directory
43 |
44 | val_data_loader = DataLoader(ValData(val_data_dir,val_filename), batch_size=val_batch_size, shuffle=False, num_workers=8)
45 |
46 | # --- Define the network --- #
47 |
48 | net = Transweather().cuda()
49 |
50 |
51 | net = nn.DataParallel(net, device_ids=device_ids)
52 |
53 |
54 | # --- Load the network weight --- #
55 | net.load_state_dict(torch.load('./{}/best'.format(exp_name)))
56 |
57 | # --- Use the evaluation model in testing --- #
58 | net.eval()
59 | category = "raindroptest"
60 |
61 | if os.path.exists('./results/{}/{}/'.format(category,exp_name))==False:
62 | os.makedirs('./results/{}/{}/'.format(category,exp_name))
63 |
64 |
65 | print('--- Testing starts! ---')
66 | start_time = time.time()
67 | val_psnr, val_ssim = validation_val(net, val_data_loader, device, exp_name,category, save_tag=True)
68 | end_time = time.time() - start_time
69 | print('val_psnr: {0:.2f}, val_ssim: {1:.4f}'.format(val_psnr, val_ssim))
70 | print('validation time is {0:.4f}'.format(end_time))
71 |
--------------------------------------------------------------------------------
/test_all.py:
--------------------------------------------------------------------------------
1 | import time
2 | import torch
3 | import argparse
4 | import torch.nn as nn
5 | from torch.utils.data import DataLoader
6 | from val_data_functions import ValData
7 | from utils import validation, validation_val
8 | import os
9 | import numpy as np
10 | import random
11 | from transweather_model import Transweather
12 |
13 | # --- Parse hyper-parameters --- #
14 | parser = argparse.ArgumentParser(description='Hyper-parameters for network')
15 | parser.add_argument('-val_batch_size', help='Set the validation/test batch size', default=1, type=int)
16 | parser.add_argument('-exp_name', help='directory for saving the networks of the experiment', default='1977amaro1', type=str)
17 | parser.add_argument('-seed', help='set random seed', default=19, type=int)
18 | args = parser.parse_args()
19 |
20 | val_batch_size = args.val_batch_size
21 | exp_name = args.exp_name
22 |
23 | #set seed
24 | seed = args.seed
25 | if seed is not None:
26 | np.random.seed(seed)
27 | torch.manual_seed(seed)
28 | torch.cuda.manual_seed(seed)
29 | random.seed(seed)
30 | print('Seed:\t{}'.format(seed))
31 |
32 | # --- Set category-specific hyper-parameters --- #
33 | val_data_dir = './data/test/'
34 |
35 | # --- Gpu device --- #
36 | device_ids = [Id for Id in range(torch.cuda.device_count())]
37 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
38 | print(device)
39 |
40 | # --- Validation data loader --- #
41 |
42 | val_filename = 'allfilter.txt' ## This text file should contain all the names of the images and must be placed in ./data/test/ directory
43 |
44 | val_data_loader = DataLoader(ValData(val_data_dir,val_filename), batch_size=val_batch_size, shuffle=False, num_workers=8)
45 |
46 | # --- Define the network --- #
47 |
48 | net = Transweather().cuda()
49 |
50 |
51 | net = nn.DataParallel(net, device_ids=device_ids)
52 |
53 |
54 | # --- Load the network weight --- #
55 | net.load_state_dict(torch.load('./weights/{}/best'.format(exp_name)))
56 |
57 | # --- Use the evaluation model in testing --- #
58 | net.eval()
59 | category = "allfilter"
60 |
61 | if os.path.exists('./results/{}/{}/'.format(category,exp_name))==False:
62 | os.makedirs('./results/{}/{}/'.format(category,exp_name))
63 |
64 |
65 | print('--- Testing starts! ---')
66 | start_time = time.time()
67 | val_psnr, val_ssim = validation_val(net, val_data_loader, device, exp_name,category, save_tag=True)
68 | end_time = time.time() - start_time
69 | print('val_psnr: {0:.2f}, val_ssim: {1:.4f}'.format(val_psnr, val_ssim))
70 | print('validation time is {0:.4f}'.format(end_time))
71 |
--------------------------------------------------------------------------------
/utils/visualizer.py:
--------------------------------------------------------------------------------
1 | import visdom
2 |
3 | from visdom import Visdom
4 | import numpy as np
5 | import argparse
6 |
7 | DEFAULT_PORT = 8097
8 | DEFAULT_HOSTNAME = "http://165.194.104.141"
9 |
10 |
11 | class Visualizer():
12 | def __init__(self):
13 | parser = argparse.ArgumentParser(description='Image display arguments')
14 | parser.add_argument('-port', metavar='port', type=int, default=DEFAULT_PORT,
15 | help='port the visdom server is running on.')
16 | parser.add_argument('-server', metavar='server', type=str,
17 | default=DEFAULT_HOSTNAME,
18 | help='Server address of the target to run the demo on.')
19 |
20 | FLAGS = parser.parse_args()
21 |
22 | self.viz = Visdom(port=FLAGS.port, server=FLAGS.server)
23 |
24 | def image_display(self, visuals, iter):
25 | idx = 1
26 | for label, image in visuals.items():
27 | self.viz.image(image.transpose([2, 0, 1]), opts=dict(title=label, width=240, height=240), win=idx, update='replace')
28 | idx += 1
29 | # if iter % 500 == 0:
30 | # save_image(image_numpy, 'D:\CODE\pytorch_code\WAE_base/train_results/' + str(iter) + '_' + label + '.png')
31 |
32 |
33 | def plot_current_errors(self, epoch, errors):
34 | if not hasattr(self, 'plot_data'):
35 | self.plot_data = {'X':[],'Y':[], 'legend':list(errors.keys())}
36 | self.plot_data['X'].append(epoch)
37 | self.plot_data['Y'].append([errors[k] for k in self.plot_data['legend']])
38 | self.viz.line(
39 | X=np.stack([np.array(self.plot_data['X'])]*len(self.plot_data['legend']), 1),
40 | Y=np.array(self.plot_data['Y']),
41 | opts={'title': 'Training' + ' loss over time',
42 | 'legend': self.plot_data['legend'],
43 | 'xlabel': 'epoch',
44 | 'ylabel': 'loss'},
45 | win=41, update='append')
46 |
47 | def plot_psnr(self, loss_step, loss_dict):
48 | if not hasattr(self, 'plot_data'):
49 | self.plot_data = {'X':[],'Y':[], 'legend':list(loss_dict.keys())}
50 | self.plot_data['X'].append(loss_step)
51 | self.plot_data['Y'].append(loss_dict)
52 | self.viz.line(
53 | X=np.array(self.plot_data['X']),
54 | Y=np.array(self.plot_data['Y']),
55 | win=50,
56 | opts=dict(xlabel='epoch',
57 | ylabel='PSNR',
58 | title='IFFI PSNR',
59 | legend=self.plot_data['legend_U']),
60 | update='append')
61 |
--------------------------------------------------------------------------------
/cosine_annealing.py:
--------------------------------------------------------------------------------
1 | import math
2 | from torch.optim.lr_scheduler import _LRScheduler
3 |
4 | class CosineAnnealingWarmUpRestarts(_LRScheduler):
5 | def __init__(self, optimizer, T_0, T_mult=1, eta_max=0.1, T_up=0, gamma=1., last_epoch=-1):
6 | if T_0 <= 0 or not isinstance(T_0, int):
7 | raise ValueError("Expected positive integer T_0, but got {}".format(T_0))
8 | if T_mult < 1 or not isinstance(T_mult, int):
9 | raise ValueError("Expected integer T_mult >= 1, but got {}".format(T_mult))
10 | if T_up < 0 or not isinstance(T_up, int):
11 | raise ValueError("Expected positive integer T_up, but got {}".format(T_up))
12 | self.T_0 = T_0
13 | self.T_mult = T_mult
14 | self.base_eta_max = eta_max
15 | self.eta_max = eta_max
16 | self.T_up = T_up
17 | self.T_i = T_0
18 | self.gamma = gamma
19 | self.cycle = 0
20 | self.T_cur = last_epoch
21 | super(CosineAnnealingWarmUpRestarts, self).__init__(optimizer, last_epoch)
22 |
23 | def get_lr(self):
24 | if self.T_cur == -1:
25 | return self.base_lrs
26 | elif self.T_cur < self.T_up:
27 | return [(self.eta_max - base_lr)*self.T_cur / self.T_up + base_lr for base_lr in self.base_lrs]
28 | else:
29 | return [base_lr + (self.eta_max - base_lr) * (1 + math.cos(math.pi * (self.T_cur-self.T_up) / (self.T_i - self.T_up))) / 2
30 | for base_lr in self.base_lrs]
31 |
32 | def step(self, epoch=None):
33 | if epoch is None:
34 | epoch = self.last_epoch + 1
35 | self.T_cur = self.T_cur + 1
36 | if self.T_cur >= self.T_i:
37 | self.cycle += 1
38 | self.T_cur = self.T_cur - self.T_i
39 | self.T_i = (self.T_i - self.T_up) * self.T_mult + self.T_up
40 | else:
41 | if epoch >= self.T_0:
42 | if self.T_mult == 1:
43 | self.T_cur = epoch % self.T_0
44 | self.cycle = epoch // self.T_0
45 | else:
46 | n = int(math.log((epoch / self.T_0 * (self.T_mult - 1) + 1), self.T_mult))
47 | self.cycle = n
48 | self.T_cur = epoch - self.T_0 * (self.T_mult ** n - 1) / (self.T_mult - 1)
49 | self.T_i = self.T_0 * self.T_mult ** (n)
50 | else:
51 | self.T_i = self.T_0
52 | self.T_cur = epoch
53 |
54 | self.eta_max = self.base_eta_max * (self.gamma**self.cycle)
55 | self.last_epoch = math.floor(epoch)
56 | for param_group, lr in zip(self.optimizer.param_groups, self.get_lr()):
57 | param_group['lr'] = lr
58 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # TransWeather
2 |
3 |
4 |
5 | This is not an official code for the paper [TransWeather: Transformer-based Restoration of Images Degraded by Adverse Weather Conditions](https://arxiv.org/abs/2111.14813)
6 |
7 | Please check [here](https://github.com/jeya-maria-jose/TransWeather).
8 |
9 | ### About this repo:
10 |
11 |
12 |
13 | ## Using the code:
14 |
15 | The code is stable while using Python 3.6.13, CUDA >=10.1
16 |
17 | - Clone this repository:
18 | ```bash
19 | git clone https://github.com/jeya-maria-jose/TransWeather
20 | cd TransWeather
21 | ```
22 |
23 | To install all the dependencies using conda:
24 |
25 | ```bash
26 | conda env create -f environment.yml
27 | conda activate transweather
28 | ```
29 |
30 | If you prefer pip, install following versions:
31 |
32 | ```bash
33 | timm==0.3.2
34 | mmcv-full==1.2.7
35 | torch==1.7.1
36 | torchvision==0.8.2
37 | opencv-python==4.5.1.48
38 | ```
39 |
40 |
41 | ## Datasets:
42 |
43 | ### Train Data:
44 |
45 | TransWeather is trained on a combination of images sampled from Outdoor-Rain, Snow100K, and Raindrop datasets (similar to [All-in-One (CVPR 2020)](https://openaccess.thecvf.com/content_CVPR_2020/papers/Li_All_in_One_Bad_Weather_Removal_Using_Architectural_Search_CVPR_2020_paper.pdf)), dubbed as "All-Weather", containing 18069 images. It can be downloaded from this [link](https://drive.google.com/file/d/1tfeBnjZX1wIhIFPl6HOzzOKOyo0GdGHl/view?usp=sharing).
46 |
47 |
48 | ### Dataset format:
49 |
50 | Download the datasets and arrange them in the following format.
51 |
52 | ```
53 | TransWeather
54 | ├── data
55 | | ├── train # Training
56 | | | ├──
57 | | | | ├── input # rain images
58 | | | | └── gt # clean images
59 | | | └── dataset_filename.txt
60 | | └── test # Testing
61 | | | ├──
62 | | | | ├── input # rain images
63 | | | | └── gt # clean images
64 | | | └── dataset_filename.txt
65 | ```
66 | - format which will be applicated later
67 |
68 | ```
69 | TransWeather
70 | ├── data
71 | | ├── train # Training
72 | | | ├──
73 | | | | ├── input # rain images
74 | | | | └── gt # clean images
75 | | | └── dataset_filename.txt
76 | | └── test # Testing
77 | | | ├──
78 | | | | ├── input # rain images
79 | | | | └── gt # clean images
80 | | | └── dataset_filename.txt
81 | ```
82 |
83 |
84 | ### Text Files:
85 |
86 | [Link](https://drive.google.com/file/d/1UsazX-P3sPcDGw3kxkyFWqUyNfhYN_AM/view?usp=sharing)
87 |
88 | Should check the path.
89 |
90 | ## Evaluation Code:
91 |
92 |
93 | To run the evaluation for specific test datasets, run the following commands:
94 |
95 | ```
96 | python test_snow100k.py -exp_name TransWeather_weights
97 | ```
98 |
99 | ```
100 | python test_test1.py -exp_name TransWeather_weights
101 | ```
102 |
103 | ```
104 | python test_raindropa.py -exp_name TransWeather_weights
105 | ```
106 |
107 | These scripts will calculate the performance metrics as well as save the predictions in the results folder.
108 |
109 |
110 | ## Training the network:
111 |
112 | To train the network on All-weather dataset, run the following command:
113 |
114 | ```
115 | python train.py -train_batch_size 32 -exp_name Transweather -epoch_start 0 -num_epochs 250
116 | ```
117 |
118 | ## Extensions:
119 |
120 | Note that Transweather is built to solve all adverse weather problem with a single model. We observe that, additionally TransWeather can be easilty modified (removing the transformer decoder) to just focus on an individual restoration task. To train just the Transweather-encoder on other datasets (like Rain-800), organize the dataset similar to all-weather and run the following command:
121 |
122 | ```
123 | python train-individual.py -train_batch_size 32 -exp_name Transweather-finetune -epoch_start 0 -num_epochs 250
124 | ```
125 |
126 | Change ```train-individual.py``` with the necesarry details of the data to be trained on. Note that the network used there is a sub-section of our original Transweather architecture without the weather queries.
127 |
128 | ### Acknowledgements:
129 |
130 | This code-base uses certain code-blocks and helper functions from [Syn2Real](https://github.com/rajeevyasarla/Syn2Real), [Segformer](https://github.com/NVlabs/SegFormer), and [ViT](https://github.com/lucidrains/vit-pytorch).
131 |
132 | ### Citation:
133 |
134 | ```
135 | @misc{valanarasu2021transweather,
136 | title={TransWeather: Transformer-based Restoration of Images Degraded by Adverse Weather Conditions},
137 | author={Jeya Maria Jose Valanarasu and Rajeev Yasarla and Vishal M. Patel},
138 | year={2021},
139 | eprint={2111.14813},
140 | archivePrefix={arXiv},
141 | primaryClass={cs.CV}
142 | }
143 | ```
144 |
--------------------------------------------------------------------------------
/data/test/allfilter_many.txt:
--------------------------------------------------------------------------------
1 | Mayfair/input/495_Mayfair.jpg
2 | Mayfair/input/499_Mayfair.jpg
3 | Mayfair/input/497_Mayfair.jpg
4 | Mayfair/input/490_Mayfair.jpg
5 | Mayfair/input/494_Mayfair.jpg
6 | Mayfair/input/489_Mayfair.jpg
7 | Mayfair/input/491_Mayfair.jpg
8 | Mayfair/input/496_Mayfair.jpg
9 | Mayfair/input/498_Mayfair.jpg
10 | Mayfair/input/493_Mayfair.jpg
11 | Mayfair/input/492_Mayfair.jpg
12 | Brannan/input/491_Brannan.jpg
13 | Brannan/input/493_Brannan.jpg
14 | Brannan/input/490_Brannan.jpg
15 | Brannan/input/496_Brannan.jpg
16 | Brannan/input/499_Brannan.jpg
17 | Brannan/input/494_Brannan.jpg
18 | Brannan/input/495_Brannan.jpg
19 | Brannan/input/489_Brannan.jpg
20 | Brannan/input/498_Brannan.jpg
21 | Brannan/input/492_Brannan.jpg
22 | Brannan/input/497_Brannan.jpg
23 | He-Fe/input/491_He-Fe.jpg
24 | He-Fe/input/497_He-Fe.jpg
25 | He-Fe/input/498_He-Fe.jpg
26 | He-Fe/input/495_He-Fe.jpg
27 | He-Fe/input/499_He-Fe.jpg
28 | He-Fe/input/489_He-Fe.jpg
29 | He-Fe/input/490_He-Fe.jpg
30 | He-Fe/input/496_He-Fe.jpg
31 | He-Fe/input/494_He-Fe.jpg
32 | He-Fe/input/492_He-Fe.jpg
33 | He-Fe/input/493_He-Fe.jpg
34 | X-ProII/input/496_X-ProII.jpg
35 | X-ProII/input/497_X-ProII.jpg
36 | X-ProII/input/494_X-ProII.jpg
37 | X-ProII/input/492_X-ProII.jpg
38 | X-ProII/input/493_X-ProII.jpg
39 | X-ProII/input/498_X-ProII.jpg
40 | X-ProII/input/490_X-ProII.jpg
41 | X-ProII/input/495_X-ProII.jpg
42 | X-ProII/input/491_X-ProII.jpg
43 | X-ProII/input/499_X-ProII.jpg
44 | X-ProII/input/489_X-ProII.jpg
45 | Nashville/input/499_Nashville.jpg
46 | Nashville/input/494_Nashville.jpg
47 | Nashville/input/492_Nashville.jpg
48 | Nashville/input/496_Nashville.jpg
49 | Nashville/input/497_Nashville.jpg
50 | Nashville/input/490_Nashville.jpg
51 | Nashville/input/489_Nashville.jpg
52 | Nashville/input/498_Nashville.jpg
53 | Nashville/input/495_Nashville.jpg
54 | Nashville/input/491_Nashville.jpg
55 | Nashville/input/493_Nashville.jpg
56 | Hudson/input/498_Hudson.jpg
57 | Hudson/input/494_Hudson.jpg
58 | Hudson/input/490_Hudson.jpg
59 | Hudson/input/499_Hudson.jpg
60 | Hudson/input/491_Hudson.jpg
61 | Hudson/input/489_Hudson.jpg
62 | Hudson/input/493_Hudson.jpg
63 | Hudson/input/495_Hudson.jpg
64 | Hudson/input/496_Hudson.jpg
65 | Hudson/input/497_Hudson.jpg
66 | Hudson/input/492_Hudson.jpg
67 | Valencia/input/492_Valencia.jpg
68 | Valencia/input/494_Valencia.jpg
69 | Valencia/input/491_Valencia.jpg
70 | Valencia/input/497_Valencia.jpg
71 | Valencia/input/489_Valencia.jpg
72 | Valencia/input/495_Valencia.jpg
73 | Valencia/input/493_Valencia.jpg
74 | Valencia/input/496_Valencia.jpg
75 | Valencia/input/498_Valencia.jpg
76 | Valencia/input/499_Valencia.jpg
77 | Valencia/input/490_Valencia.jpg
78 | Lo-Fi/input/499_Lo-Fi.jpg
79 | Lo-Fi/input/492_Lo-Fi.jpg
80 | Lo-Fi/input/494_Lo-Fi.jpg
81 | Lo-Fi/input/498_Lo-Fi.jpg
82 | Lo-Fi/input/491_Lo-Fi.jpg
83 | Lo-Fi/input/490_Lo-Fi.jpg
84 | Lo-Fi/input/496_Lo-Fi.jpg
85 | Lo-Fi/input/493_Lo-Fi.jpg
86 | Lo-Fi/input/489_Lo-Fi.jpg
87 | Lo-Fi/input/495_Lo-Fi.jpg
88 | Lo-Fi/input/497_Lo-Fi.jpg
89 | Toaster/input/494_Toaster.jpg
90 | Toaster/input/493_Toaster.jpg
91 | Toaster/input/499_Toaster.jpg
92 | Toaster/input/492_Toaster.jpg
93 | Toaster/input/495_Toaster.jpg
94 | Toaster/input/497_Toaster.jpg
95 | Toaster/input/490_Toaster.jpg
96 | Toaster/input/491_Toaster.jpg
97 | Toaster/input/489_Toaster.jpg
98 | Toaster/input/496_Toaster.jpg
99 | Toaster/input/498_Toaster.jpg
100 | Clarendon/input/494_Clarendon.jpg
101 | Clarendon/input/498_Clarendon.jpg
102 | Clarendon/input/493_Clarendon.jpg
103 | Clarendon/input/491_Clarendon.jpg
104 | Clarendon/input/499_Clarendon.jpg
105 | Clarendon/input/495_Clarendon.jpg
106 | Clarendon/input/490_Clarendon.jpg
107 | Clarendon/input/492_Clarendon.jpg
108 | Clarendon/input/489_Clarendon.jpg
109 | Clarendon/input/496_Clarendon.jpg
110 | Clarendon/input/497_Clarendon.jpg
111 | Sutro/input/496_Sutro.jpg
112 | Sutro/input/489_Sutro.jpg
113 | Sutro/input/494_Sutro.jpg
114 | Sutro/input/499_Sutro.jpg
115 | Sutro/input/495_Sutro.jpg
116 | Sutro/input/490_Sutro.jpg
117 | Sutro/input/491_Sutro.jpg
118 | Sutro/input/492_Sutro.jpg
119 | Sutro/input/497_Sutro.jpg
120 | Sutro/input/498_Sutro.jpg
121 | Sutro/input/493_Sutro.jpg
122 | Amaro/input/496_Amaro.jpg
123 | Amaro/input/489_Amaro.jpg
124 | Amaro/input/497_Amaro.jpg
125 | Amaro/input/494_Amaro.jpg
126 | Amaro/input/492_Amaro.jpg
127 | Amaro/input/493_Amaro.jpg
128 | Amaro/input/495_Amaro.jpg
129 | Amaro/input/491_Amaro.jpg
130 | Amaro/input/499_Amaro.jpg
131 | Amaro/input/498_Amaro.jpg
132 | Amaro/input/490_Amaro.jpg
133 | 1977/input/496_1977.jpg
134 | 1977/input/493_1977.jpg
135 | 1977/input/495_1977.jpg
136 | 1977/input/494_1977.jpg
137 | 1977/input/492_1977.jpg
138 | 1977/input/497_1977.jpg
139 | 1977/input/499_1977.jpg
140 | 1977/input/491_1977.jpg
141 | 1977/input/490_1977.jpg
142 | 1977/input/498_1977.jpg
143 | 1977/input/489_1977.jpg
144 | Perpetua/input/497_Perpetua.jpg
145 | Perpetua/input/499_Perpetua.jpg
146 | Perpetua/input/489_Perpetua.jpg
147 | Perpetua/input/492_Perpetua.jpg
148 | Perpetua/input/494_Perpetua.jpg
149 | Perpetua/input/493_Perpetua.jpg
150 | Perpetua/input/498_Perpetua.jpg
151 | Perpetua/input/496_Perpetua.jpg
152 | Perpetua/input/490_Perpetua.jpg
153 | Perpetua/input/491_Perpetua.jpg
154 | Perpetua/input/495_Perpetua.jpg
155 | Gingham/input/491_Gingham.jpg
156 | Gingham/input/495_Gingham.jpg
157 | Gingham/input/493_Gingham.jpg
158 | Gingham/input/490_Gingham.jpg
159 | Gingham/input/494_Gingham.jpg
160 | Gingham/input/496_Gingham.jpg
161 | Gingham/input/499_Gingham.jpg
162 | Gingham/input/498_Gingham.jpg
163 | Gingham/input/492_Gingham.jpg
164 | Gingham/input/489_Gingham.jpg
165 | Gingham/input/497_Gingham.jpg
166 |
--------------------------------------------------------------------------------
/utils.py:
--------------------------------------------------------------------------------
1 | import time
2 | import torch
3 | import torch.nn.functional as F
4 | import torchvision.utils as utils
5 | from math import log10
6 | from skimage import measure
7 | import cv2
8 |
9 | import skimage
10 | import cv2
11 | from skimage.measure import compare_psnr, compare_ssim
12 | import pdb
13 | def calc_psnr(im1, im2):
14 |
15 | im1 = im1[0].view(im1.shape[2],im1.shape[3],3).detach().cpu().numpy()
16 | im2 = im2[0].view(im2.shape[2],im2.shape[3],3).detach().cpu().numpy()
17 |
18 |
19 | im1_y = cv2.cvtColor(im1, cv2.COLOR_BGR2YCR_CB)[:, :, 0]
20 | im2_y = cv2.cvtColor(im2, cv2.COLOR_BGR2YCR_CB)[:, :, 0]
21 | ans = [compare_psnr(im1_y, im2_y)]
22 | return ans
23 |
24 | def calc_ssim(im1, im2):
25 | im1 = im1[0].view(im1.shape[2],im1.shape[3],3).detach().cpu().numpy()
26 | im2 = im2[0].view(im2.shape[2],im2.shape[3],3).detach().cpu().numpy()
27 |
28 | im1_y = cv2.cvtColor(im1, cv2.COLOR_BGR2YCR_CB)[:, :, 0]
29 | im2_y = cv2.cvtColor(im2, cv2.COLOR_BGR2YCR_CB)[:, :, 0]
30 | ans = [compare_ssim(im1_y, im2_y)]
31 | return ans
32 |
33 | def to_psnr(pred_image, gt):
34 | mse = F.mse_loss(pred_image, gt, reduction='none')
35 | mse_split = torch.split(mse, 1, dim=0)
36 | mse_list = [torch.mean(torch.squeeze(mse_split[ind])).item() for ind in range(len(mse_split))]
37 |
38 | intensity_max = 1.0
39 | psnr_list = [10.0 * log10(intensity_max / mse) for mse in mse_list]
40 | return psnr_list
41 |
42 |
43 | def to_ssim_skimage(pred_image, gt):
44 | pred_image_list = torch.split(pred_image, 1, dim=0)
45 | gt_list = torch.split(gt, 1, dim=0)
46 |
47 | pred_image_list_np = [pred_image_list[ind].permute(0, 2, 3, 1).data.cpu().numpy().squeeze() for ind in range(len(pred_image_list))]
48 | gt_list_np = [gt_list[ind].permute(0, 2, 3, 1).data.cpu().numpy().squeeze() for ind in range(len(pred_image_list))]
49 | ssim_list = [measure.compare_ssim(pred_image_list_np[ind], gt_list_np[ind], data_range=1, multichannel=True) for ind in range(len(pred_image_list))]
50 |
51 | return ssim_list
52 |
53 |
54 | def validation(net, val_data_loader, device, exp_name, save_tag=False):
55 |
56 | psnr_list = []
57 | ssim_list = []
58 |
59 | for batch_id, val_data in enumerate(val_data_loader):
60 |
61 | with torch.no_grad():
62 | input_im, gt, imgid = val_data
63 | input_im = input_im.to(device)
64 | gt = gt.to(device)
65 | pred_image = net(input_im)
66 |
67 | # --- Calculate the average PSNR --- #
68 | psnr_list.extend(calc_psnr(pred_image, gt))
69 |
70 | # --- Calculate the average SSIM --- #
71 | ssim_list.extend(calc_ssim(pred_image, gt))
72 |
73 | # --- Save image --- #
74 | if save_tag:
75 | # print()
76 | save_image(pred_image, imgid, exp_name)
77 |
78 | avr_psnr = sum(psnr_list) / len(psnr_list)
79 | avr_ssim = sum(ssim_list) / len(ssim_list)
80 | return avr_psnr, avr_ssim
81 |
82 |
83 | def validation_val(net, val_data_loader, device, exp_name, category, save_tag=False):
84 |
85 | psnr_list = []
86 | ssim_list = []
87 |
88 | for batch_id, val_data in enumerate(val_data_loader):
89 |
90 | with torch.no_grad():
91 | input_im, gt, imgid = val_data
92 | input_im = input_im.to(device)
93 | gt = gt.to(device)
94 | pred_image = net(input_im)
95 |
96 | # --- Calculate the average PSNR --- #
97 | psnr_list.extend(calc_psnr(pred_image, gt))
98 |
99 | # --- Calculate the average SSIM --- #
100 | ssim_list.extend(calc_ssim(pred_image, gt))
101 |
102 | # --- Save image --- #
103 | if save_tag:
104 | # print()
105 | save_image(pred_image, imgid, exp_name,category)
106 |
107 | avr_psnr = sum(psnr_list) / len(psnr_list)
108 | avr_ssim = sum(ssim_list) / len(ssim_list)
109 | return avr_psnr, avr_ssim
110 |
111 | def save_image(pred_image, image_name, exp_name, category):
112 | pred_image_images = torch.split(pred_image, 1, dim=0)
113 | batch_num = len(pred_image_images)
114 |
115 | for ind in range(batch_num):
116 | image_name_1 = image_name[ind].split('/')[-1]
117 | print(image_name_1)
118 | utils.save_image(pred_image_images[ind], './results/{}/{}/{}'.format(category,exp_name,image_name_1))
119 |
120 |
121 | def print_log(epoch, num_epochs, one_epoch_time, train_psnr, val_psnr, val_ssim, exp_name):
122 | print('({0:.0f}s) Epoch [{1}/{2}], Train_PSNR:{3:.2f}, Val_PSNR:{4:.2f}, Val_SSIM:{5:.4f}'
123 | .format(one_epoch_time, epoch, num_epochs, train_psnr, val_psnr, val_ssim))
124 |
125 | # --- Write the training log --- #
126 | with open('./training_log/{}_log.txt'.format( exp_name), 'a') as f:
127 | print('Date: {0}s, Time_Cost: {1:.0f}s, Epoch: [{2}/{3}], Train_PSNR: {4:.2f}, Val_PSNR: {5:.2f}, Val_SSIM: {6:.4f}'
128 | .format(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
129 | one_epoch_time, epoch, num_epochs, train_psnr, val_psnr, val_ssim), file=f)
130 |
131 |
132 |
133 | def adjust_learning_rate(optimizer, epoch, lr_decay=0.5):
134 |
135 | # --- Decay learning rate --- #
136 | step = 100
137 |
138 | torch.Exponential(optimizer, gamma=0.95)
139 |
140 | if not epoch % step and epoch > 0:
141 | for param_group in optimizer.param_groups:
142 | param_group['lr'] *= lr_decay
143 | print('Learning rate sets to {}.'.format(param_group['lr']))
144 | else:
145 | for param_group in optimizer.param_groups:
146 | print('Learning rate sets to {}.'.format(param_group['lr']))
147 |
--------------------------------------------------------------------------------
/base_networks.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 | import torch.nn.functional as F
4 | import math
5 | import torch
6 |
7 | from torch import nn
8 | from torch.nn import init
9 | from torch.nn import functional as F
10 | from torch.autograd import Function
11 |
12 | from math import sqrt
13 |
14 | import random
15 |
16 | class ConvBlock(torch.nn.Module):
17 | def __init__(self, input_size, output_size, kernel_size=3, stride=1, padding=1, bias=True, activation='prelu', norm=None):
18 | super(ConvBlock, self).__init__()
19 | self.conv = torch.nn.Conv2d(input_size, output_size, kernel_size, stride, padding, bias=bias)
20 |
21 | self.norm = norm
22 | if self.norm =='batch':
23 | self.bn = torch.nn.BatchNorm2d(output_size)
24 | elif self.norm == 'instance':
25 | self.bn = torch.nn.InstanceNorm2d(output_size)
26 |
27 | self.activation = activation
28 | if self.activation == 'relu':
29 | self.act = torch.nn.ReLU(True)
30 | elif self.activation == 'prelu':
31 | self.act = torch.nn.PReLU()
32 | elif self.activation == 'lrelu':
33 | self.act = torch.nn.LeakyReLU(0.2, True)
34 | elif self.activation == 'tanh':
35 | self.act = torch.nn.Tanh()
36 | elif self.activation == 'sigmoid':
37 | self.act = torch.nn.Sigmoid()
38 |
39 | def forward(self, x):
40 | if self.norm is not None:
41 | out = self.bn(self.conv(x))
42 | else:
43 | out = self.conv(x)
44 |
45 | if self.activation != 'no':
46 | return self.act(out)
47 | else:
48 | return out
49 |
50 | class DeconvBlock(torch.nn.Module):
51 | def __init__(self, input_size, output_size, kernel_size=4, stride=2, padding=1, bias=True, activation='prelu', norm=None):
52 | super(DeconvBlock, self).__init__()
53 | self.deconv = torch.nn.ConvTranspose2d(input_size, output_size, kernel_size, stride, padding, bias=bias)
54 |
55 | self.norm = norm
56 | if self.norm == 'batch':
57 | self.bn = torch.nn.BatchNorm2d(output_size)
58 | elif self.norm == 'instance':
59 | self.bn = torch.nn.InstanceNorm2d(output_size)
60 |
61 | self.activation = activation
62 | if self.activation == 'relu':
63 | self.act = torch.nn.ReLU(True)
64 | elif self.activation == 'prelu':
65 | self.act = torch.nn.PReLU()
66 | elif self.activation == 'lrelu':
67 | self.act = torch.nn.LeakyReLU(0.2, True)
68 | elif self.activation == 'tanh':
69 | self.act = torch.nn.Tanh()
70 | elif self.activation == 'sigmoid':
71 | self.act = torch.nn.Sigmoid()
72 |
73 | def forward(self, x):
74 | if self.norm is not None:
75 | out = self.bn(self.deconv(x))
76 | else:
77 | out = self.deconv(x)
78 |
79 | if self.activation is not None:
80 | return self.act(out)
81 | else:
82 | return out
83 |
84 |
85 | class ConvLayer(nn.Module):
86 | def __init__(self, in_channels, out_channels, kernel_size, stride, padding):
87 | super(ConvLayer, self).__init__()
88 | # reflection_padding = kernel_size // 2
89 | # self.reflection_pad = nn.ReflectionPad2d(reflection_padding)
90 | self.conv2d = nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding)
91 |
92 | def forward(self, x):
93 | # out = self.reflection_pad(x)
94 | out = self.conv2d(x)
95 | return out
96 |
97 |
98 | class UpsampleConvLayer(torch.nn.Module):
99 | def __init__(self, in_channels, out_channels, kernel_size, stride):
100 | super(UpsampleConvLayer, self).__init__()
101 | self.conv2d = nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride=stride, padding=1)
102 |
103 | def forward(self, x):
104 | out = self.conv2d(x)
105 | return out
106 |
107 |
108 | class ResidualBlock(torch.nn.Module):
109 | def __init__(self, channels):
110 | super(ResidualBlock, self).__init__()
111 | self.conv1 = ConvLayer(channels, channels, kernel_size=3, stride=1, padding=1)
112 | self.conv2 = ConvLayer(channels, channels, kernel_size=3, stride=1, padding=1)
113 | self.relu = nn.ReLU()
114 |
115 | def forward(self, x):
116 | residual = x
117 | out = self.relu(self.conv1(x))
118 | out = self.conv2(out) * 0.1
119 | out = torch.add(out, residual)
120 | return out
121 |
122 |
123 |
124 |
125 |
126 | def init_linear(linear):
127 | init.xavier_normal(linear.weight)
128 | linear.bias.data.zero_()
129 |
130 |
131 | def init_conv(conv, glu=True):
132 | init.kaiming_normal(conv.weight)
133 | if conv.bias is not None:
134 | conv.bias.data.zero_()
135 |
136 |
137 | class EqualLR:
138 | def __init__(self, name):
139 | self.name = name
140 |
141 | def compute_weight(self, module):
142 | weight = getattr(module, self.name + '_orig')
143 | fan_in = weight.data.size(1) * weight.data[0][0].numel()
144 |
145 | return weight * sqrt(2 / fan_in)
146 |
147 | @staticmethod
148 | def apply(module, name):
149 | fn = EqualLR(name)
150 |
151 | weight = getattr(module, name)
152 | del module._parameters[name]
153 | module.register_parameter(name + '_orig', nn.Parameter(weight.data))
154 | module.register_forward_pre_hook(fn)
155 |
156 | return fn
157 |
158 | def __call__(self, module, input):
159 | weight = self.compute_weight(module)
160 | setattr(module, self.name, weight)
161 |
162 |
163 | def equal_lr(module, name='weight'):
164 | EqualLR.apply(module, name)
165 |
166 | return module
--------------------------------------------------------------------------------
/train_data_functions.py:
--------------------------------------------------------------------------------
1 | import torch.utils.data as data
2 | from PIL import Image
3 | from random import randrange
4 | from torchvision.transforms import Compose, ToTensor, Normalize
5 | import re
6 | from PIL import ImageFile
7 | from os import path
8 | import numpy as np
9 | import torch
10 | ImageFile.LOAD_TRUNCATED_IMAGES = True
11 |
12 | # --- Training dataset --- #
13 | class TrainData(data.Dataset):
14 | def __init__(self, crop_size, train_data_dir,train_filename):
15 | super().__init__()
16 | train_list = train_data_dir + train_filename
17 | with open(train_list) as f:
18 | contents = f.readlines()
19 | input_names = [i.strip() for i in contents]
20 | gt_names = [i.strip().replace('input','gt') for i in input_names]
21 |
22 | self.input_names = input_names
23 | self.gt_names = gt_names
24 | self.crop_size = crop_size
25 | self.train_data_dir = train_data_dir
26 |
27 | def get_images(self, index):
28 | crop_width, crop_height = self.crop_size
29 | input_name = self.input_names[index]
30 | gt_name = self.gt_names[index]
31 |
32 | img_id = re.split('/',input_name)[-1][:-4]
33 |
34 | input_img = Image.open(self.train_data_dir + input_name)
35 |
36 |
37 | try:
38 | gt_img = Image.open(self.train_data_dir + gt_name)
39 | except:
40 | gt_img = Image.open(self.train_data_dir + gt_name).convert('RGB')
41 |
42 | width, height = input_img.size
43 |
44 | if width < crop_width and height < crop_height :
45 | input_img = input_img.resize((crop_width,crop_height), Image.ANTIALIAS)
46 | gt_img = gt_img.resize((crop_width, crop_height), Image.ANTIALIAS)
47 | elif width < crop_width :
48 | input_img = input_img.resize((crop_width,height), Image.ANTIALIAS)
49 | gt_img = gt_img.resize((crop_width,height), Image.ANTIALIAS)
50 | elif height < crop_height :
51 | input_img = input_img.resize((width,crop_height), Image.ANTIALIAS)
52 | gt_img = gt_img.resize((width, crop_height), Image.ANTIALIAS)
53 |
54 | width, height = input_img.size
55 |
56 | x, y = randrange(0, width - crop_width + 1), randrange(0, height - crop_height + 1)
57 | input_crop_img = input_img.crop((x, y, x + crop_width, y + crop_height))
58 | gt_crop_img = gt_img.crop((x, y, x + crop_width, y + crop_height))
59 |
60 | # --- Transform to tensor --- #
61 | transform_input = Compose([ToTensor(), Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
62 | transform_gt = Compose([ToTensor()])
63 | input_im = transform_input(input_crop_img)
64 | gt = transform_gt(gt_crop_img)
65 |
66 | # --- Check the channel is 3 or not --- #
67 | if list(input_im.shape)[0] is not 3 or list(gt.shape)[0] is not 3:
68 | raise Exception('Bad image channel: {}'.format(gt_name))
69 |
70 | return input_im, gt, img_id
71 |
72 | def __getitem__(self, index):
73 | res = self.get_images(index)
74 | return res
75 |
76 | def __len__(self):
77 | return len(self.input_names)
78 |
79 | class TrainData_new(data.Dataset):
80 | def __init__(self, crop_size, train_data_dir,train_filename):
81 | super().__init__()
82 | train_list = train_data_dir + train_filename
83 | with open(train_list) as f:
84 | contents = f.readlines()
85 | input_names = [i.strip() for i in contents]
86 | gt_names = [i.strip().replace('input','gt') for i in input_names]
87 |
88 | self.input_names = input_names
89 | self.gt_names = gt_names
90 | self.crop_size = crop_size
91 | self.train_data_dir = train_data_dir
92 |
93 | def get_images(self, index):
94 | crop_width, crop_height = self.crop_size
95 | input_name = self.input_names[index]
96 | gt_name = self.gt_names[index]
97 | img_id = re.split('/',input_name)[-1][:-4]
98 |
99 | input_img = Image.open(self.train_data_dir + input_name)
100 |
101 |
102 | try:
103 | gt_img = Image.open(self.train_data_dir + gt_name)
104 | except:
105 | gt_img = Image.open(self.train_data_dir + gt_name).convert('RGB')
106 |
107 | width, height = input_img.size
108 | tmp_ch = 0
109 |
110 | if width < crop_width and height < crop_height :
111 | input_img = input_img.resize((crop_width,crop_height), Image.ANTIALIAS)
112 | gt_img = gt_img.resize((crop_width, crop_height), Image.ANTIALIAS)
113 | elif width < crop_width :
114 | input_img = input_img.resize((crop_width,height), Image.ANTIALIAS)
115 | gt_img = gt_img.resize((crop_width,height), Image.ANTIALIAS)
116 | elif height < crop_height :
117 | input_img = input_img.resize((width,crop_height), Image.ANTIALIAS)
118 | gt_img = gt_img.resize((width, crop_height), Image.ANTIALIAS)
119 |
120 | width, height = input_img.size
121 | # --- x,y coordinate of left-top corner --- #
122 | x, y = randrange(0, width - crop_width + 1), randrange(0, height - crop_height + 1)
123 | input_crop_img = input_img.crop((x, y, x + crop_width, y + crop_height))
124 |
125 | # --- Transform to tensor --- #
126 | transform_input = Compose([ToTensor(), Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
127 | transform_gt = Compose([ToTensor()])
128 |
129 | input_im = transform_input(input_crop_img)
130 | gt = transform_gt(gt_crop_img)
131 |
132 |
133 | # --- Check the channel is 3 or not --- #
134 | # print(input_im.shape)
135 | if list(input_im.shape)[0] is not 3 or list(gt.shape)[0] is not 3:
136 | raise Exception('Bad image channel: {}'.format(gt_name))
137 |
138 |
139 | return input_im, gt, img_id,R_map,trans_map
140 |
141 | def __getitem__(self, index):
142 | res = self.get_images(index)
143 | return res
144 |
145 | def __len__(self):
146 | return len(self.input_names)
--------------------------------------------------------------------------------
/train-individual.py:
--------------------------------------------------------------------------------
1 | import time
2 | import torch
3 | import argparse
4 | import torch.nn as nn
5 | import torch.nn.functional as F
6 | import matplotlib.pyplot as plt
7 | from torch.utils.data import DataLoader
8 | from train_data_functions import TrainData
9 | from val_data_functions import ValData
10 | from utils import to_psnr, print_log, validation, adjust_learning_rate
11 | from torchvision.models import vgg16
12 | from perceptual import LossNetwork
13 | import os
14 | import numpy as np
15 | import random
16 |
17 | from transweather_model import Transweather,Transweather_base
18 |
19 | plt.switch_backend('agg')
20 |
21 | # --- Parse hyper-parameters --- #
22 | parser = argparse.ArgumentParser(description='Hyper-parameters for network')
23 | parser.add_argument('-learning_rate', help='Set the learning rate', default=2e-4, type=float)
24 | parser.add_argument('-crop_size', help='Set the crop_size', default=[256, 256], nargs='+', type=int)
25 | parser.add_argument('-train_batch_size', help='Set the training batch size', default=18, type=int)
26 | parser.add_argument('-epoch_start', help='Starting epoch number of the training', default=0, type=int)
27 | parser.add_argument('-lambda_loss', help='Set the lambda in loss function', default=0.04, type=float)
28 | parser.add_argument('-val_batch_size', help='Set the validation/test batch size', default=1, type=int)
29 | parser.add_argument('-exp_name', help='directory for saving the networks of the experiment', type=str)
30 | parser.add_argument('-seed', help='set random seed', default=19, type=int)
31 | parser.add_argument('-num_epochs', help='number of epochs', default=200, type=int)
32 |
33 | args = parser.parse_args()
34 |
35 | learning_rate = args.learning_rate
36 | crop_size = args.crop_size
37 | train_batch_size = args.train_batch_size
38 | epoch_start = args.epoch_start
39 | lambda_loss = args.lambda_loss
40 | val_batch_size = args.val_batch_size
41 | exp_name = args.exp_name
42 | num_epochs = args.num_epochs
43 |
44 |
45 | #set seed
46 | seed = args.seed
47 | if seed is not None:
48 | np.random.seed(seed)
49 | torch.manual_seed(seed)
50 | torch.cuda.manual_seed(seed)
51 | random.seed(seed)
52 | print('Seed:\t{}'.format(seed))
53 |
54 | print('--- Hyper-parameters for training ---')
55 | print('learning_rate: {}\ncrop_size: {}\ntrain_batch_size: {}\nval_batch_size: {}\nlambda_loss: {}'.format(learning_rate, crop_size,
56 | train_batch_size, val_batch_size, lambda_loss))
57 |
58 |
59 | train_data_dir = './data/train/'
60 | val_data_dir = './data/test/'
61 |
62 | # --- Gpu device --- #
63 | device_ids = [Id for Id in range(torch.cuda.device_count())]
64 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
65 |
66 |
67 | # --- Define the network --- #
68 | net = Transweather_base()
69 |
70 |
71 | # --- Build optimizer --- #
72 | optimizer = torch.optim.Adam(net.parameters(), lr=learning_rate)
73 |
74 |
75 | # --- Multi-GPU --- #
76 | net = net.to(device)
77 | net = nn.DataParallel(net, device_ids=device_ids)
78 |
79 |
80 | # --- Define the perceptual loss network --- #
81 | vgg_model = vgg16(pretrained=True).features[:16]
82 | vgg_model = vgg_model.to(device)
83 | # vgg_model = nn.DataParallel(vgg_model, device_ids=device_ids)
84 | for param in vgg_model.parameters():
85 | param.requires_grad = False
86 |
87 | # --- Load the network weight --- #
88 | if os.path.exists('./{}/'.format(exp_name))==False:
89 | os.mkdir('./{}/'.format(exp_name))
90 | try:
91 | net.load_state_dict(torch.load('./{}/best'.format(exp_name)))
92 | print('--- weight loaded ---')
93 | except:
94 | print('--- no weight loaded ---')
95 |
96 |
97 | loss_network = LossNetwork(vgg_model)
98 | loss_network.eval()
99 |
100 | # --- Load training data and validation/test data --- #
101 |
102 | ### The following file should be placed inside the directory "./data/train/"
103 |
104 | labeled_name = '1977.txt' # Change this based on the dataset you choose to train on
105 |
106 | ### The following files should be placed inside the directory "./data/test/"
107 |
108 | val_filename1 = '1977.txt' # Change this based on the dataset you choose to test on
109 |
110 | # --- Load training data and validation/test data --- #
111 | lbl_train_data_loader = DataLoader(TrainData(crop_size, train_data_dir,labeled_name), batch_size=train_batch_size, shuffle=True, num_workers=8)
112 | val_data_loader1 = DataLoader(ValData(val_data_dir,val_filename1), batch_size=val_batch_size, shuffle=False, num_workers=8)
113 |
114 | # --- Previous PSNR and SSIM in testing --- #
115 | net.eval()
116 |
117 |
118 | old_val_psnr1, old_val_ssim1 = validation(net, val_data_loader1, device, exp_name)
119 |
120 | print('1977 filter old_val_psnr: {0:.2f}, old_val_ssim: {1:.4f}'.format(old_val_psnr1, old_val_ssim1))
121 |
122 | net.train()
123 |
124 | for epoch in range(epoch_start,num_epochs):
125 | psnr_list = []
126 | start_time = time.time()
127 | adjust_learning_rate(optimizer, epoch)
128 | #-------------------------------------------------------------------------------------------------------------
129 | for batch_id, train_data in enumerate(lbl_train_data_loader):
130 |
131 | input_image, gt, imgid = train_data
132 | input_image = input_image.to(device)
133 | gt = gt.to(device)
134 |
135 | # --- Zero the parameter gradients --- #
136 | optimizer.zero_grad()
137 |
138 | # --- Forward + Backward + Optimize --- #
139 | net.train()
140 | pred_image = net(input_image)
141 |
142 | smooth_loss = F.smooth_l1_loss(pred_image, gt)
143 | perceptual_loss = loss_network(pred_image, gt)
144 |
145 | loss = smooth_loss + lambda_loss*perceptual_loss
146 |
147 | loss.backward()
148 | optimizer.step()
149 |
150 | # --- To calculate average PSNR --- #
151 | psnr_list.extend(to_psnr(pred_image, gt))
152 |
153 | if not (batch_id % 100):
154 | print('Epoch: {0}, Iteration: {1}'.format(epoch, batch_id))
155 |
156 | # --- Calculate the average training PSNR in one epoch --- #
157 | train_psnr = sum(psnr_list) / len(psnr_list)
158 |
159 | # --- Save the network parameters --- #
160 | torch.save(net.state_dict(), './{}/latest'.format(exp_name))
161 |
162 | # --- Use the evaluation model in testing --- #
163 | net.eval()
164 |
165 | val_psnr1, val_ssim1 = validation(net, val_data_loader1, device, exp_name)
166 |
167 | one_epoch_time = time.time() - start_time
168 |
169 | print("1977 filter")
170 | print_log(epoch+1, num_epochs, one_epoch_time, train_psnr, val_psnr1, val_ssim1, exp_name)
171 |
172 | # --- update the network weight --- #
173 | if val_psnr1 >= old_val_psnr1:
174 | torch.save(net.state_dict(), './{}/best'.format(exp_name))
175 | print('model saved')
176 | old_val_psnr1 = val_psnr1
177 |
--------------------------------------------------------------------------------
/train.py:
--------------------------------------------------------------------------------
1 | import time
2 | from cosine_annealing import CosineAnnealingWarmUpRestarts
3 | import torch
4 | import argparse
5 | import torch.nn as nn
6 | import torch.nn.functional as F
7 | import matplotlib.pyplot as plt
8 | from torch.utils.data import DataLoader
9 | from train_data_functions import TrainData
10 | from val_data_functions import ValData
11 | from utils import to_psnr, print_log, validation, adjust_learning_rate
12 | from torchvision.models import vgg16
13 | from perceptual import LossNetwork
14 | import os
15 | import numpy as np
16 | import random
17 |
18 | from transweather_model import Transweather
19 |
20 | plt.switch_backend('agg')
21 |
22 | # --- Parse hyper-parameters --- #
23 | parser = argparse.ArgumentParser(description='Hyper-parameters for network')
24 | parser.add_argument('-learning_rate', help='Set the learning rate', default=2e-4, type=float)
25 | parser.add_argument('-crop_size', help='Set the crop_size', default=[256, 256], nargs='+', type=int)
26 | parser.add_argument('-train_batch_size', help='Set the training batch size', default=18, type=int)
27 | parser.add_argument('-epoch_start', help='Starting epoch number of the training', default=0, type=int)
28 | parser.add_argument('-lambda_loss', help='Set the lambda in loss function', default=0.04, type=float)
29 | parser.add_argument('-val_batch_size', help='Set the validation/test batch size', default=1, type=int)
30 | parser.add_argument('-exp_name', help='directory for saving the networks of the experiment', type=str)
31 | parser.add_argument('-seed', help='set random seed', default=19, type=int)
32 | parser.add_argument('-num_epochs', help='number of epochs', default=200, type=int)
33 |
34 | args = parser.parse_args()
35 |
36 | learning_rate = args.learning_rate
37 | crop_size = args.crop_size
38 | train_batch_size = args.train_batch_size
39 | epoch_start = args.epoch_start
40 | lambda_loss = args.lambda_loss
41 | val_batch_size = args.val_batch_size
42 | exp_name = args.exp_name
43 | num_epochs = args.num_epochs
44 |
45 |
46 | #set seed
47 | seed = args.seed
48 | if seed is not None:
49 | np.random.seed(seed)
50 | torch.manual_seed(seed)
51 | torch.cuda.manual_seed(seed)
52 | random.seed(seed)
53 | print('Seed:\t{}'.format(seed))
54 |
55 | print('--- Hyper-parameters for training ---')
56 | print('learning_rate: {}\ncrop_size: {}\ntrain_batch_size: {}\nval_batch_size: {}\nlambda_loss: {}'.format(learning_rate, crop_size,
57 | train_batch_size, val_batch_size, lambda_loss))
58 |
59 |
60 | train_data_dir = './data/train/'
61 | val_data_dir = './data/test/'
62 |
63 | # --- Gpu device --- #
64 | device_ids = [Id for Id in range(torch.cuda.device_count())]
65 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
66 |
67 |
68 | # --- Define the network --- #
69 | net = Transweather()
70 |
71 |
72 | # --- Build optimizer --- #
73 | optimizer = torch.optim.Adam(net.parameters(), lr=learning_rate)
74 |
75 |
76 | # --- Multi-GPU --- #
77 | net = net.to(device)
78 | net = nn.DataParallel(net, device_ids=device_ids)
79 |
80 |
81 | # --- Define the perceptual loss network --- #
82 | vgg_model = vgg16(pretrained=True).features[:16]
83 | vgg_model = vgg_model.to(device)
84 | # vgg_model = nn.DataParallel(vgg_model, device_ids=device_ids)
85 | for param in vgg_model.parameters():
86 | param.requires_grad = False
87 |
88 | # --- Load the network weight --- #
89 | if os.path.exists('./weights/{}/'.format(exp_name))==False:
90 | os.mkdir('./weights/{}/'.format(exp_name))
91 | try:
92 | net.load_state_dict(torch.load('./weights/{}/best'.format(exp_name)))
93 | print('--- weight loaded ---')
94 | except:
95 | print('--- no weight loaded ---')
96 |
97 |
98 | # pytorch_total_params = sum(p.numel() for p in net.parameters() if p.requires_grad)
99 | # print("Total_params: {}".format(pytorch_total_params))
100 | loss_network = LossNetwork(vgg_model)
101 | loss_network.eval()
102 |
103 | # --- Load training data and validation/test data --- #
104 |
105 | ### The following file should be placed inside the directory "./data/train/"
106 |
107 | labeled_name = 'allfilter.txt'
108 |
109 | ### The following files should be placed inside the directory "./data/test/"
110 |
111 | # val_filename = 'val_list_rain800.txt'
112 | val_filename1 = 'allfilter.txt'
113 |
114 | # --- Load training data and validation/test data --- #
115 | lbl_train_data_loader = DataLoader(TrainData(crop_size, train_data_dir,labeled_name), batch_size=train_batch_size, shuffle=True, num_workers=8)
116 |
117 | ## Uncomment the other validation data loader to keep an eye on performance
118 | ## but note that validating while training significantly increases the train time
119 |
120 | # val_data_loader = DataLoader(ValData(val_data_dir,val_filename), batch_size=val_batch_size, shuffle=False, num_workers=8)
121 | val_data_loader1 = DataLoader(ValData(val_data_dir,val_filename1), batch_size=val_batch_size, shuffle=False, num_workers=8)
122 |
123 |
124 | # --- Previous PSNR and SSIM in testing --- #
125 | net.eval()
126 |
127 | ################ Note########################
128 |
129 | ## Uncomment the other validation data loader to keep an eye on performance
130 | ## but note that validating while training significantly increases the test time
131 |
132 | # old_val_psnr, old_val_ssim = validation(net, val_data_loader, device, exp_name)
133 | old_val_psnr1, old_val_ssim1 = validation(net, val_data_loader1, device, exp_name)
134 |
135 | # print('Rain 800 old_val_psnr: {0:.2f}, old_val_ssim: {1:.4f}'.format(old_val_psnr, old_val_ssim))
136 | print('old_val_psnr: {0:.2f}, old_val_ssim: {1:.4f}'.format(old_val_psnr1, old_val_ssim1))
137 |
138 | net.train()
139 | # scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.95)
140 | scheduler = CosineAnnealingWarmUpRestarts(optimizer, T_0=100, T_mult=1, eta_max=0.1, T_up=10, gamma=0.5)
141 |
142 | for epoch in range(epoch_start,num_epochs):
143 | psnr_list = []
144 | start_time = time.time()
145 | # adjust_learning_rate(optimizer, epoch)
146 | scheduler.step()
147 | print('Epoch-{0} lr: {1}'.format(epoch, optimizer.param_groups[0]['lr']))
148 | #-------------------------------------------------------------------------------------------------------------
149 | for batch_id, train_data in enumerate(lbl_train_data_loader):
150 |
151 | input_image, gt, imgid = train_data
152 | input_image = input_image.to(device)
153 | gt = gt.to(device)
154 |
155 | # --- Zero the parameter gradients --- #
156 | optimizer.zero_grad()
157 |
158 | # --- Forward + Backward + Optimize --- #
159 | net.train()
160 | pred_image = net(input_image)
161 |
162 | smooth_loss = F.smooth_l1_loss(pred_image, gt)
163 | perceptual_loss = loss_network(pred_image, gt)
164 |
165 | loss = smooth_loss + lambda_loss*perceptual_loss
166 |
167 | loss.backward()
168 | optimizer.step()
169 |
170 | # --- To calculate average PSNR --- #
171 | psnr_list.extend(to_psnr(pred_image, gt))
172 |
173 | if not (batch_id % 100):
174 | print('Epoch: {0}, Iteration: {1}'.format(epoch, batch_id))
175 |
176 | # --- Calculate the average training PSNR in one epoch --- #
177 | train_psnr = sum(psnr_list) / len(psnr_list)
178 |
179 | # --- Save the network parameters --- #
180 | torch.save(net.state_dict(), './weights/{}/latest'.format(exp_name))
181 |
182 | # --- Use the evaluation model in testing --- #
183 | net.eval()
184 |
185 | # val_psnr, val_ssim = validation(net, val_data_loader, device, exp_name)
186 | val_psnr1, val_ssim1 = validation(net, val_data_loader1, device, exp_name)
187 |
188 | one_epoch_time = time.time() - start_time
189 | # print("Rain 800")
190 | # print_log(epoch+1, num_epochs, one_epoch_time, train_psnr, val_psnr, val_ssim, exp_name)
191 | print("allfilter")
192 | print_log(epoch+1, num_epochs, one_epoch_time, train_psnr, val_psnr1, val_ssim1, exp_name)
193 |
194 | # --- update the network weight --- #
195 | if (val_psnr1) >= (old_val_psnr1):
196 | torch.save(net.state_dict(), './weights/{}/best'.format(exp_name))
197 | print('model saved')
198 | old_val_psnr1 = val_psnr1
199 |
200 | # Note that we find the best model based on validating with raindrop data.
--------------------------------------------------------------------------------
/train_with_visdom.py:
--------------------------------------------------------------------------------
1 | import time
2 | import torch
3 | import argparse
4 | import torch.nn as nn
5 | import torch.nn.functional as F
6 | import matplotlib.pyplot as plt
7 | from torch.utils.data import DataLoader
8 | from train_data_functions import TrainData
9 | from .utils import Visualizer
10 | from val_data_functions import ValData
11 | from utils import to_psnr, print_log, validation, adjust_learning_rate
12 | from torchvision.models import vgg16
13 | from perceptual import LossNetwork
14 | import os
15 | import numpy as np
16 | import random
17 |
18 | from transweather_model import Transweather
19 |
20 | plt.switch_backend('agg')
21 | viz = Visualizer()
22 |
23 | # --- Parse hyper-parameters --- #
24 | parser = argparse.ArgumentParser(description='Hyper-parameters for network')
25 | parser.add_argument('-learning_rate', help='Set the learning rate', default=2e-4, type=float)
26 | parser.add_argument('-crop_size', help='Set the crop_size', default=[256, 256], nargs='+', type=int)
27 | parser.add_argument('-train_batch_size', help='Set the training batch size', default=18, type=int)
28 | parser.add_argument('-epoch_start', help='Starting epoch number of the training', default=0, type=int)
29 | parser.add_argument('-lambda_loss', help='Set the lambda in loss function', default=0.04, type=float)
30 | parser.add_argument('-val_batch_size', help='Set the validation/test batch size', default=1, type=int)
31 | parser.add_argument('-exp_name', help='directory for saving the networks of the experiment', type=str)
32 | parser.add_argument('-seed', help='set random seed', default=19, type=int)
33 | parser.add_argument('-num_epochs', help='number of epochs', default=200, type=int)
34 |
35 | args = parser.parse_args()
36 |
37 | learning_rate = args.learning_rate
38 | crop_size = args.crop_size
39 | train_batch_size = args.train_batch_size
40 | epoch_start = args.epoch_start
41 | lambda_loss = args.lambda_loss
42 | val_batch_size = args.val_batch_size
43 | exp_name = args.exp_name
44 | num_epochs = args.num_epochs
45 |
46 |
47 | #set seed
48 | seed = args.seed
49 | if seed is not None:
50 | np.random.seed(seed)
51 | torch.manual_seed(seed)
52 | torch.cuda.manual_seed(seed)
53 | random.seed(seed)
54 | print('Seed:\t{}'.format(seed))
55 |
56 | print('--- Hyper-parameters for training ---')
57 | print('learning_rate: {}\ncrop_size: {}\ntrain_batch_size: {}\nval_batch_size: {}\nlambda_loss: {}'.format(learning_rate, crop_size,
58 | train_batch_size, val_batch_size, lambda_loss))
59 |
60 |
61 | train_data_dir = './data/train/'
62 | val_data_dir = './data/test/'
63 |
64 | # --- Gpu device --- #
65 | device_ids = [Id for Id in range(torch.cuda.device_count())]
66 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
67 |
68 |
69 | # --- Define the network --- #
70 | net = Transweather()
71 |
72 |
73 | # --- Build optimizer --- #
74 | optimizer = torch.optim.Adam(net.parameters(), lr=learning_rate)
75 |
76 |
77 | # --- Multi-GPU --- #
78 | net = net.to(device)
79 | net = nn.DataParallel(net, device_ids=device_ids)
80 |
81 |
82 | # --- Define the perceptual loss network --- #
83 | vgg_model = vgg16(pretrained=True).features[:16]
84 | vgg_model = vgg_model.to(device)
85 | # vgg_model = nn.DataParallel(vgg_model, device_ids=device_ids)
86 | for param in vgg_model.parameters():
87 | param.requires_grad = False
88 |
89 | # --- Load the network weight --- #
90 | if os.path.exists('./weights/{}/'.format(exp_name))==False:
91 | os.mkdir('./weights/{}/'.format(exp_name))
92 | try:
93 | net.load_state_dict(torch.load('./weights/{}/best'.format(exp_name)))
94 | print('--- weight loaded ---')
95 | except:
96 | print('--- no weight loaded ---')
97 |
98 |
99 | # pytorch_total_params = sum(p.numel() for p in net.parameters() if p.requires_grad)
100 | # print("Total_params: {}".format(pytorch_total_params))
101 | loss_network = LossNetwork(vgg_model)
102 | loss_network.eval()
103 |
104 | # --- Load training data and validation/test data --- #
105 |
106 | ### The following file should be placed inside the directory "./data/train/"
107 |
108 | labeled_name = 'allfilter.txt'
109 |
110 | ### The following files should be placed inside the directory "./data/test/"
111 |
112 | # val_filename = 'val_list_rain800.txt'
113 | val_filename1 = 'allfilter.txt'
114 |
115 | # --- Load training data and validation/test data --- #
116 | lbl_train_data_loader = DataLoader(TrainData(crop_size, train_data_dir,labeled_name), batch_size=train_batch_size, shuffle=True, num_workers=8)
117 |
118 | ## Uncomment the other validation data loader to keep an eye on performance
119 | ## but note that validating while training significantly increases the train time
120 |
121 | # val_data_loader = DataLoader(ValData(val_data_dir,val_filename), batch_size=val_batch_size, shuffle=False, num_workers=8)
122 | val_data_loader1 = DataLoader(ValData(val_data_dir,val_filename1), batch_size=val_batch_size, shuffle=False, num_workers=8)
123 |
124 |
125 | # --- Previous PSNR and SSIM in testing --- #
126 | net.eval()
127 |
128 | ################ Note########################
129 |
130 | ## Uncomment the other validation data loader to keep an eye on performance
131 | ## but note that validating while training significantly increases the test time
132 |
133 | # old_val_psnr, old_val_ssim = validation(net, val_data_loader, device, exp_name)
134 | old_val_psnr1, old_val_ssim1 = validation(net, val_data_loader1, device, exp_name)
135 |
136 | # print('Rain 800 old_val_psnr: {0:.2f}, old_val_ssim: {1:.4f}'.format(old_val_psnr, old_val_ssim))
137 | print('old_val_psnr: {0:.2f}, old_val_ssim: {1:.4f}'.format(old_val_psnr1, old_val_ssim1))
138 |
139 | net.train()
140 |
141 | for epoch in range(epoch_start,num_epochs):
142 | psnr_list = []
143 | start_time = time.time()
144 | adjust_learning_rate(optimizer, epoch)
145 | #-------------------------------------------------------------------------------------------------------------
146 | iter_num = 0
147 | for batch_id, train_data in enumerate(lbl_train_data_loader):
148 | image_list = dict()
149 | input_image, gt, imgid = train_data
150 | image_list[imgid+"input"] = input_image
151 | image_list[imgid+"gt"] = gt
152 | input_image = input_image.to(device)
153 | gt = gt.to(device)
154 |
155 | # --- Zero the parameter gradients --- #
156 | optimizer.zero_grad()
157 |
158 | # --- Forward + Backward + Optimize --- #
159 | net.train()
160 | pred_image = net(input_image)
161 |
162 | image_list[imgid+"pred"] = pred_image
163 |
164 | smooth_loss = F.smooth_l1_loss(pred_image, gt)
165 | perceptual_loss = loss_network(pred_image, gt)
166 |
167 | viz.image_display(image_list, iter_num)
168 |
169 | loss = smooth_loss + lambda_loss*perceptual_loss
170 |
171 | loss.backward()
172 | optimizer.step()
173 |
174 | # --- To calculate average PSNR --- #
175 | psnr_list.extend(to_psnr(pred_image, gt))
176 | iter_num += 1
177 |
178 | if not (batch_id % 100):
179 | print('Epoch: {0}, Iteration: {1}'.format(epoch, batch_id))
180 |
181 | # --- Calculate the average training PSNR in one epoch --- #
182 | train_psnr = sum(psnr_list) / len(psnr_list)
183 |
184 | # --- Save the network parameters --- #
185 | torch.save(net.state_dict(), './weights/{}/latest'.format(exp_name))
186 |
187 | # --- Use the evaluation model in testing --- #
188 | net.eval()
189 |
190 | # val_psnr, val_ssim = validation(net, val_data_loader, device, exp_name)
191 | val_psnr1, val_ssim1 = validation(net, val_data_loader1, device, exp_name)
192 |
193 | one_epoch_time = time.time() - start_time
194 | # print("Rain 800")
195 | # print_log(epoch+1, num_epochs, one_epoch_time, train_psnr, val_psnr, val_ssim, exp_name)
196 | print("allfilter")
197 | print_log(epoch+1, num_epochs, one_epoch_time, train_psnr, val_psnr1, val_ssim1, exp_name)
198 |
199 | # --- update the network weight --- #
200 | if (val_psnr1) >= (old_val_psnr1):
201 | torch.save(net.state_dict(), './weights/{}/best'.format(exp_name))
202 | print('model saved')
203 | old_val_psnr1 = val_psnr1
204 |
205 | # Note that we find the best model based on validating with raindrop data.
--------------------------------------------------------------------------------
/training_log/allfilter2_log.txt:
--------------------------------------------------------------------------------
1 | Date: 2022-07-14 15:04:59s, Time_Cost: 106s, Epoch: [1/100], Train_PSNR: 15.60, Val_PSNR: 23.38, Val_SSIM: 0.8337
2 | Date: 2022-07-14 15:06:39s, Time_Cost: 100s, Epoch: [2/100], Train_PSNR: 19.22, Val_PSNR: 26.54, Val_SSIM: 0.9024
3 | Date: 2022-07-14 15:08:21s, Time_Cost: 101s, Epoch: [3/100], Train_PSNR: 20.92, Val_PSNR: 26.01, Val_SSIM: 0.9170
4 | Date: 2022-07-14 15:10:02s, Time_Cost: 102s, Epoch: [4/100], Train_PSNR: 21.75, Val_PSNR: 27.82, Val_SSIM: 0.9169
5 | Date: 2022-07-14 15:11:44s, Time_Cost: 102s, Epoch: [5/100], Train_PSNR: 22.40, Val_PSNR: 27.15, Val_SSIM: 0.9247
6 | Date: 2022-07-14 15:13:26s, Time_Cost: 102s, Epoch: [6/100], Train_PSNR: 22.77, Val_PSNR: 28.54, Val_SSIM: 0.9373
7 | Date: 2022-07-14 15:15:07s, Time_Cost: 101s, Epoch: [7/100], Train_PSNR: 23.25, Val_PSNR: 28.24, Val_SSIM: 0.9344
8 | Date: 2022-07-14 15:16:49s, Time_Cost: 102s, Epoch: [8/100], Train_PSNR: 23.54, Val_PSNR: 27.53, Val_SSIM: 0.9348
9 | Date: 2022-07-14 15:18:31s, Time_Cost: 102s, Epoch: [9/100], Train_PSNR: 23.85, Val_PSNR: 27.10, Val_SSIM: 0.9420
10 | Date: 2022-07-14 15:20:13s, Time_Cost: 102s, Epoch: [10/100], Train_PSNR: 24.11, Val_PSNR: 28.03, Val_SSIM: 0.9356
11 | Date: 2022-07-14 15:21:55s, Time_Cost: 102s, Epoch: [11/100], Train_PSNR: 24.46, Val_PSNR: 28.17, Val_SSIM: 0.9424
12 | Date: 2022-07-14 15:23:36s, Time_Cost: 102s, Epoch: [12/100], Train_PSNR: 24.60, Val_PSNR: 25.89, Val_SSIM: 0.9311
13 | Date: 2022-07-14 15:25:18s, Time_Cost: 102s, Epoch: [13/100], Train_PSNR: 24.86, Val_PSNR: 26.31, Val_SSIM: 0.9397
14 | Date: 2022-07-14 15:27:00s, Time_Cost: 102s, Epoch: [14/100], Train_PSNR: 24.89, Val_PSNR: 27.05, Val_SSIM: 0.9425
15 | Date: 2022-07-14 15:28:42s, Time_Cost: 102s, Epoch: [15/100], Train_PSNR: 25.10, Val_PSNR: 27.96, Val_SSIM: 0.9441
16 | Date: 2022-07-14 15:30:25s, Time_Cost: 102s, Epoch: [16/100], Train_PSNR: 25.32, Val_PSNR: 27.17, Val_SSIM: 0.9403
17 | Date: 2022-07-14 15:32:07s, Time_Cost: 102s, Epoch: [17/100], Train_PSNR: 25.35, Val_PSNR: 27.68, Val_SSIM: 0.9314
18 | Date: 2022-07-14 15:33:49s, Time_Cost: 102s, Epoch: [18/100], Train_PSNR: 25.58, Val_PSNR: 26.25, Val_SSIM: 0.9389
19 | Date: 2022-07-14 15:35:31s, Time_Cost: 102s, Epoch: [19/100], Train_PSNR: 25.67, Val_PSNR: 26.53, Val_SSIM: 0.9284
20 | Date: 2022-07-14 15:37:13s, Time_Cost: 102s, Epoch: [20/100], Train_PSNR: 25.89, Val_PSNR: 27.82, Val_SSIM: 0.9421
21 | Date: 2022-07-14 15:38:55s, Time_Cost: 102s, Epoch: [21/100], Train_PSNR: 26.10, Val_PSNR: 26.80, Val_SSIM: 0.9409
22 | Date: 2022-07-14 15:40:37s, Time_Cost: 102s, Epoch: [22/100], Train_PSNR: 26.26, Val_PSNR: 26.54, Val_SSIM: 0.9401
23 | Date: 2022-07-14 15:42:20s, Time_Cost: 102s, Epoch: [23/100], Train_PSNR: 26.68, Val_PSNR: 29.07, Val_SSIM: 0.9501
24 | Date: 2022-07-14 15:44:02s, Time_Cost: 102s, Epoch: [24/100], Train_PSNR: 26.94, Val_PSNR: 28.65, Val_SSIM: 0.9476
25 | Date: 2022-07-14 15:45:44s, Time_Cost: 102s, Epoch: [25/100], Train_PSNR: 27.27, Val_PSNR: 27.07, Val_SSIM: 0.9387
26 | Date: 2022-07-14 15:47:26s, Time_Cost: 102s, Epoch: [26/100], Train_PSNR: 27.43, Val_PSNR: 28.22, Val_SSIM: 0.9440
27 | Date: 2022-07-14 15:49:09s, Time_Cost: 102s, Epoch: [27/100], Train_PSNR: 27.77, Val_PSNR: 28.15, Val_SSIM: 0.9456
28 | Date: 2022-07-14 15:50:51s, Time_Cost: 102s, Epoch: [28/100], Train_PSNR: 27.89, Val_PSNR: 28.46, Val_SSIM: 0.9482
29 | Date: 2022-07-14 15:52:33s, Time_Cost: 102s, Epoch: [29/100], Train_PSNR: 27.87, Val_PSNR: 26.41, Val_SSIM: 0.9364
30 | Date: 2022-07-14 15:54:15s, Time_Cost: 102s, Epoch: [30/100], Train_PSNR: 28.00, Val_PSNR: 25.04, Val_SSIM: 0.9318
31 | Date: 2022-07-14 15:55:57s, Time_Cost: 102s, Epoch: [31/100], Train_PSNR: 27.87, Val_PSNR: 26.40, Val_SSIM: 0.9348
32 | Date: 2022-07-14 15:57:40s, Time_Cost: 102s, Epoch: [32/100], Train_PSNR: 28.20, Val_PSNR: 27.04, Val_SSIM: 0.9426
33 | Date: 2022-07-14 15:59:22s, Time_Cost: 102s, Epoch: [33/100], Train_PSNR: 28.53, Val_PSNR: 27.68, Val_SSIM: 0.9484
34 | Date: 2022-07-14 16:01:04s, Time_Cost: 102s, Epoch: [34/100], Train_PSNR: 28.62, Val_PSNR: 27.61, Val_SSIM: 0.9448
35 | Date: 2022-07-14 16:02:47s, Time_Cost: 103s, Epoch: [35/100], Train_PSNR: 28.79, Val_PSNR: 27.92, Val_SSIM: 0.9475
36 | Date: 2022-07-14 16:04:29s, Time_Cost: 102s, Epoch: [36/100], Train_PSNR: 28.92, Val_PSNR: 27.15, Val_SSIM: 0.9452
37 | Date: 2022-07-14 16:06:12s, Time_Cost: 103s, Epoch: [37/100], Train_PSNR: 29.03, Val_PSNR: 27.94, Val_SSIM: 0.9422
38 | Date: 2022-07-14 16:07:54s, Time_Cost: 102s, Epoch: [38/100], Train_PSNR: 28.86, Val_PSNR: 28.06, Val_SSIM: 0.9446
39 | Date: 2022-07-14 16:09:37s, Time_Cost: 103s, Epoch: [39/100], Train_PSNR: 29.06, Val_PSNR: 27.86, Val_SSIM: 0.9460
40 | Date: 2022-07-14 16:11:19s, Time_Cost: 102s, Epoch: [40/100], Train_PSNR: 29.14, Val_PSNR: 27.55, Val_SSIM: 0.9455
41 | Date: 2022-07-14 16:13:01s, Time_Cost: 102s, Epoch: [41/100], Train_PSNR: 29.02, Val_PSNR: 27.65, Val_SSIM: 0.9471
42 | Date: 2022-07-14 16:14:44s, Time_Cost: 102s, Epoch: [42/100], Train_PSNR: 28.21, Val_PSNR: 27.60, Val_SSIM: 0.9473
43 | Date: 2022-07-14 16:16:26s, Time_Cost: 102s, Epoch: [43/100], Train_PSNR: 29.19, Val_PSNR: 27.66, Val_SSIM: 0.9481
44 | Date: 2022-07-14 16:18:09s, Time_Cost: 103s, Epoch: [44/100], Train_PSNR: 29.48, Val_PSNR: 27.39, Val_SSIM: 0.9473
45 | Date: 2022-07-14 16:19:51s, Time_Cost: 102s, Epoch: [45/100], Train_PSNR: 29.69, Val_PSNR: 27.76, Val_SSIM: 0.9434
46 | Date: 2022-07-14 16:21:34s, Time_Cost: 103s, Epoch: [46/100], Train_PSNR: 29.76, Val_PSNR: 27.85, Val_SSIM: 0.9466
47 | Date: 2022-07-14 16:23:16s, Time_Cost: 103s, Epoch: [47/100], Train_PSNR: 29.92, Val_PSNR: 27.89, Val_SSIM: 0.9471
48 | Date: 2022-07-14 16:24:59s, Time_Cost: 102s, Epoch: [48/100], Train_PSNR: 29.95, Val_PSNR: 27.93, Val_SSIM: 0.9493
49 | Date: 2022-07-14 16:26:41s, Time_Cost: 102s, Epoch: [49/100], Train_PSNR: 29.85, Val_PSNR: 27.59, Val_SSIM: 0.9474
50 | Date: 2022-07-14 16:28:23s, Time_Cost: 102s, Epoch: [50/100], Train_PSNR: 29.74, Val_PSNR: 28.34, Val_SSIM: 0.9428
51 | Date: 2022-07-14 16:30:06s, Time_Cost: 103s, Epoch: [51/100], Train_PSNR: 29.39, Val_PSNR: 28.03, Val_SSIM: 0.9507
52 | Date: 2022-07-14 16:31:48s, Time_Cost: 102s, Epoch: [52/100], Train_PSNR: 30.10, Val_PSNR: 27.04, Val_SSIM: 0.9462
53 | Date: 2022-07-14 16:33:31s, Time_Cost: 102s, Epoch: [53/100], Train_PSNR: 30.29, Val_PSNR: 27.52, Val_SSIM: 0.9493
54 | Date: 2022-07-14 16:35:13s, Time_Cost: 103s, Epoch: [54/100], Train_PSNR: 29.98, Val_PSNR: 27.71, Val_SSIM: 0.9444
55 | Date: 2022-07-14 16:36:56s, Time_Cost: 103s, Epoch: [55/100], Train_PSNR: 29.57, Val_PSNR: 27.60, Val_SSIM: 0.9503
56 | Date: 2022-07-14 16:38:38s, Time_Cost: 102s, Epoch: [56/100], Train_PSNR: 30.35, Val_PSNR: 27.25, Val_SSIM: 0.9482
57 | Date: 2022-07-14 16:40:20s, Time_Cost: 102s, Epoch: [57/100], Train_PSNR: 30.50, Val_PSNR: 26.90, Val_SSIM: 0.9435
58 | Date: 2022-07-14 16:42:03s, Time_Cost: 102s, Epoch: [58/100], Train_PSNR: 30.48, Val_PSNR: 26.81, Val_SSIM: 0.9460
59 | Date: 2022-07-14 16:43:45s, Time_Cost: 102s, Epoch: [59/100], Train_PSNR: 30.50, Val_PSNR: 27.45, Val_SSIM: 0.9490
60 | Date: 2022-07-14 16:45:28s, Time_Cost: 103s, Epoch: [60/100], Train_PSNR: 30.54, Val_PSNR: 26.74, Val_SSIM: 0.9448
61 | Date: 2022-07-14 16:47:10s, Time_Cost: 102s, Epoch: [61/100], Train_PSNR: 30.62, Val_PSNR: 27.07, Val_SSIM: 0.9475
62 | Date: 2022-07-14 16:48:52s, Time_Cost: 102s, Epoch: [62/100], Train_PSNR: 30.65, Val_PSNR: 26.29, Val_SSIM: 0.9462
63 | Date: 2022-07-14 16:50:35s, Time_Cost: 102s, Epoch: [63/100], Train_PSNR: 27.99, Val_PSNR: 23.94, Val_SSIM: 0.9362
64 | Date: 2022-07-14 16:52:17s, Time_Cost: 102s, Epoch: [64/100], Train_PSNR: 27.89, Val_PSNR: 26.90, Val_SSIM: 0.9470
65 | Date: 2022-07-14 16:53:59s, Time_Cost: 103s, Epoch: [65/100], Train_PSNR: 30.10, Val_PSNR: 26.36, Val_SSIM: 0.9445
66 | Date: 2022-07-14 16:55:42s, Time_Cost: 103s, Epoch: [66/100], Train_PSNR: 30.62, Val_PSNR: 26.30, Val_SSIM: 0.9434
67 | Date: 2022-07-14 16:57:24s, Time_Cost: 102s, Epoch: [67/100], Train_PSNR: 30.81, Val_PSNR: 26.60, Val_SSIM: 0.9451
68 | Date: 2022-07-14 16:59:07s, Time_Cost: 102s, Epoch: [68/100], Train_PSNR: 30.89, Val_PSNR: 26.99, Val_SSIM: 0.9448
69 | Date: 2022-07-14 17:00:49s, Time_Cost: 102s, Epoch: [69/100], Train_PSNR: 30.94, Val_PSNR: 26.06, Val_SSIM: 0.9439
70 | Date: 2022-07-14 17:02:32s, Time_Cost: 103s, Epoch: [70/100], Train_PSNR: 30.96, Val_PSNR: 26.62, Val_SSIM: 0.9456
71 | Date: 2022-07-14 17:04:15s, Time_Cost: 103s, Epoch: [71/100], Train_PSNR: 31.01, Val_PSNR: 26.21, Val_SSIM: 0.9433
72 | Date: 2022-07-14 17:05:57s, Time_Cost: 103s, Epoch: [72/100], Train_PSNR: 31.04, Val_PSNR: 26.98, Val_SSIM: 0.9465
73 | Date: 2022-07-14 17:07:40s, Time_Cost: 103s, Epoch: [73/100], Train_PSNR: 31.02, Val_PSNR: 26.67, Val_SSIM: 0.9460
74 | Date: 2022-07-14 17:09:23s, Time_Cost: 103s, Epoch: [74/100], Train_PSNR: 31.02, Val_PSNR: 26.77, Val_SSIM: 0.9455
75 | Date: 2022-07-14 17:11:05s, Time_Cost: 103s, Epoch: [75/100], Train_PSNR: 30.87, Val_PSNR: 26.46, Val_SSIM: 0.9469
76 | Date: 2022-07-14 17:12:48s, Time_Cost: 102s, Epoch: [76/100], Train_PSNR: 31.07, Val_PSNR: 27.02, Val_SSIM: 0.9477
77 | Date: 2022-07-14 17:14:30s, Time_Cost: 102s, Epoch: [77/100], Train_PSNR: 30.42, Val_PSNR: 26.44, Val_SSIM: 0.9462
78 | Date: 2022-07-14 17:16:13s, Time_Cost: 103s, Epoch: [78/100], Train_PSNR: 30.16, Val_PSNR: 26.96, Val_SSIM: 0.9486
79 | Date: 2022-07-14 17:17:56s, Time_Cost: 103s, Epoch: [79/100], Train_PSNR: 30.88, Val_PSNR: 26.97, Val_SSIM: 0.9485
80 | Date: 2022-07-14 17:19:38s, Time_Cost: 103s, Epoch: [80/100], Train_PSNR: 31.21, Val_PSNR: 26.63, Val_SSIM: 0.9468
81 | Date: 2022-07-14 17:21:21s, Time_Cost: 103s, Epoch: [81/100], Train_PSNR: 31.27, Val_PSNR: 26.89, Val_SSIM: 0.9467
82 | Date: 2022-07-14 17:23:04s, Time_Cost: 103s, Epoch: [82/100], Train_PSNR: 31.21, Val_PSNR: 26.44, Val_SSIM: 0.9468
83 | Date: 2022-07-14 17:24:46s, Time_Cost: 103s, Epoch: [83/100], Train_PSNR: 31.20, Val_PSNR: 26.59, Val_SSIM: 0.9425
84 | Date: 2022-07-14 17:26:30s, Time_Cost: 104s, Epoch: [84/100], Train_PSNR: 31.33, Val_PSNR: 25.84, Val_SSIM: 0.9428
85 | Date: 2022-07-14 17:28:12s, Time_Cost: 102s, Epoch: [85/100], Train_PSNR: 31.24, Val_PSNR: 27.23, Val_SSIM: 0.9466
86 | Date: 2022-07-14 17:29:55s, Time_Cost: 103s, Epoch: [86/100], Train_PSNR: 31.17, Val_PSNR: 25.71, Val_SSIM: 0.9415
87 | Date: 2022-07-14 17:31:38s, Time_Cost: 103s, Epoch: [87/100], Train_PSNR: 31.24, Val_PSNR: 26.86, Val_SSIM: 0.9482
88 |
--------------------------------------------------------------------------------
/training_log/expLR1_log.txt:
--------------------------------------------------------------------------------
1 | Date: 2022-07-20 13:44:50s, Time_Cost: 119s, Epoch: [1/250], Train_PSNR: 15.53, Val_PSNR: 24.15, Val_SSIM: 0.8322
2 | Date: 2022-07-20 13:46:52s, Time_Cost: 122s, Epoch: [2/250], Train_PSNR: 19.14, Val_PSNR: 25.83, Val_SSIM: 0.8940
3 | Date: 2022-07-20 13:48:55s, Time_Cost: 123s, Epoch: [3/250], Train_PSNR: 20.81, Val_PSNR: 25.49, Val_SSIM: 0.9130
4 | Date: 2022-07-20 13:50:58s, Time_Cost: 123s, Epoch: [4/250], Train_PSNR: 21.64, Val_PSNR: 28.24, Val_SSIM: 0.9175
5 | Date: 2022-07-20 13:53:00s, Time_Cost: 122s, Epoch: [5/250], Train_PSNR: 22.35, Val_PSNR: 27.35, Val_SSIM: 0.9236
6 | Date: 2022-07-20 13:55:04s, Time_Cost: 123s, Epoch: [6/250], Train_PSNR: 22.81, Val_PSNR: 27.14, Val_SSIM: 0.9328
7 | Date: 2022-07-20 13:57:06s, Time_Cost: 123s, Epoch: [7/250], Train_PSNR: 23.24, Val_PSNR: 27.00, Val_SSIM: 0.9330
8 | Date: 2022-07-20 13:59:10s, Time_Cost: 124s, Epoch: [8/250], Train_PSNR: 23.58, Val_PSNR: 24.32, Val_SSIM: 0.9253
9 | Date: 2022-07-20 14:01:13s, Time_Cost: 123s, Epoch: [9/250], Train_PSNR: 23.96, Val_PSNR: 27.58, Val_SSIM: 0.9366
10 | Date: 2022-07-20 14:03:15s, Time_Cost: 122s, Epoch: [10/250], Train_PSNR: 24.26, Val_PSNR: 26.79, Val_SSIM: 0.9322
11 | Date: 2022-07-20 14:05:17s, Time_Cost: 122s, Epoch: [11/250], Train_PSNR: 24.52, Val_PSNR: 27.87, Val_SSIM: 0.9349
12 | Date: 2022-07-20 14:07:21s, Time_Cost: 123s, Epoch: [12/250], Train_PSNR: 24.70, Val_PSNR: 28.63, Val_SSIM: 0.9401
13 | Date: 2022-07-20 14:09:24s, Time_Cost: 123s, Epoch: [13/250], Train_PSNR: 24.92, Val_PSNR: 27.11, Val_SSIM: 0.9343
14 | Date: 2022-07-20 14:11:27s, Time_Cost: 123s, Epoch: [14/250], Train_PSNR: 25.15, Val_PSNR: 27.07, Val_SSIM: 0.9383
15 | Date: 2022-07-20 14:13:28s, Time_Cost: 121s, Epoch: [15/250], Train_PSNR: 25.35, Val_PSNR: 28.82, Val_SSIM: 0.9441
16 | Date: 2022-07-20 14:15:33s, Time_Cost: 124s, Epoch: [16/250], Train_PSNR: 25.38, Val_PSNR: 27.71, Val_SSIM: 0.9393
17 | Date: 2022-07-20 14:17:36s, Time_Cost: 123s, Epoch: [17/250], Train_PSNR: 25.61, Val_PSNR: 28.61, Val_SSIM: 0.9447
18 | Date: 2022-07-20 14:19:38s, Time_Cost: 122s, Epoch: [18/250], Train_PSNR: 25.72, Val_PSNR: 27.61, Val_SSIM: 0.9430
19 | Date: 2022-07-20 14:21:42s, Time_Cost: 124s, Epoch: [19/250], Train_PSNR: 26.09, Val_PSNR: 28.30, Val_SSIM: 0.9432
20 | Date: 2022-07-20 14:23:45s, Time_Cost: 124s, Epoch: [20/250], Train_PSNR: 26.36, Val_PSNR: 27.79, Val_SSIM: 0.9407
21 | Date: 2022-07-20 14:25:49s, Time_Cost: 123s, Epoch: [21/250], Train_PSNR: 26.60, Val_PSNR: 27.01, Val_SSIM: 0.9399
22 | Date: 2022-07-20 14:27:52s, Time_Cost: 123s, Epoch: [22/250], Train_PSNR: 26.73, Val_PSNR: 27.64, Val_SSIM: 0.9411
23 | Date: 2022-07-20 14:29:54s, Time_Cost: 122s, Epoch: [23/250], Train_PSNR: 26.93, Val_PSNR: 28.27, Val_SSIM: 0.9431
24 | Date: 2022-07-20 14:31:57s, Time_Cost: 122s, Epoch: [24/250], Train_PSNR: 27.03, Val_PSNR: 28.09, Val_SSIM: 0.9428
25 | Date: 2022-07-20 14:34:00s, Time_Cost: 123s, Epoch: [25/250], Train_PSNR: 27.18, Val_PSNR: 27.84, Val_SSIM: 0.9431
26 | Date: 2022-07-20 14:36:02s, Time_Cost: 123s, Epoch: [26/250], Train_PSNR: 27.32, Val_PSNR: 27.59, Val_SSIM: 0.9398
27 | Date: 2022-07-20 14:38:07s, Time_Cost: 124s, Epoch: [27/250], Train_PSNR: 27.44, Val_PSNR: 28.33, Val_SSIM: 0.9432
28 | Date: 2022-07-20 14:40:09s, Time_Cost: 123s, Epoch: [28/250], Train_PSNR: 27.52, Val_PSNR: 27.43, Val_SSIM: 0.9405
29 | Date: 2022-07-20 14:42:13s, Time_Cost: 124s, Epoch: [29/250], Train_PSNR: 27.63, Val_PSNR: 27.92, Val_SSIM: 0.9394
30 | Date: 2022-07-20 14:44:16s, Time_Cost: 123s, Epoch: [30/250], Train_PSNR: 27.72, Val_PSNR: 26.32, Val_SSIM: 0.9345
31 | Date: 2022-07-20 14:46:19s, Time_Cost: 123s, Epoch: [31/250], Train_PSNR: 27.78, Val_PSNR: 28.09, Val_SSIM: 0.9421
32 | Date: 2022-07-20 14:48:22s, Time_Cost: 123s, Epoch: [32/250], Train_PSNR: 27.88, Val_PSNR: 27.13, Val_SSIM: 0.9393
33 | Date: 2022-07-20 14:50:26s, Time_Cost: 124s, Epoch: [33/250], Train_PSNR: 27.96, Val_PSNR: 27.51, Val_SSIM: 0.9385
34 | Date: 2022-07-20 14:52:29s, Time_Cost: 123s, Epoch: [34/250], Train_PSNR: 28.03, Val_PSNR: 27.45, Val_SSIM: 0.9407
35 | Date: 2022-07-20 14:54:31s, Time_Cost: 122s, Epoch: [35/250], Train_PSNR: 28.08, Val_PSNR: 27.62, Val_SSIM: 0.9405
36 | Date: 2022-07-20 14:56:34s, Time_Cost: 123s, Epoch: [36/250], Train_PSNR: 28.16, Val_PSNR: 27.77, Val_SSIM: 0.9410
37 | Date: 2022-07-20 14:58:36s, Time_Cost: 122s, Epoch: [37/250], Train_PSNR: 28.21, Val_PSNR: 27.70, Val_SSIM: 0.9392
38 | Date: 2022-07-20 15:00:39s, Time_Cost: 122s, Epoch: [38/250], Train_PSNR: 28.28, Val_PSNR: 27.35, Val_SSIM: 0.9386
39 | Date: 2022-07-20 15:02:41s, Time_Cost: 123s, Epoch: [39/250], Train_PSNR: 28.32, Val_PSNR: 27.26, Val_SSIM: 0.9369
40 | Date: 2022-07-20 15:04:44s, Time_Cost: 122s, Epoch: [40/250], Train_PSNR: 28.37, Val_PSNR: 27.19, Val_SSIM: 0.9371
41 | Date: 2022-07-20 15:06:48s, Time_Cost: 124s, Epoch: [41/250], Train_PSNR: 28.43, Val_PSNR: 27.36, Val_SSIM: 0.9380
42 | Date: 2022-07-20 15:08:50s, Time_Cost: 123s, Epoch: [42/250], Train_PSNR: 28.48, Val_PSNR: 27.49, Val_SSIM: 0.9365
43 | Date: 2022-07-20 15:10:54s, Time_Cost: 123s, Epoch: [43/250], Train_PSNR: 28.51, Val_PSNR: 27.24, Val_SSIM: 0.9365
44 | Date: 2022-07-20 15:12:56s, Time_Cost: 123s, Epoch: [44/250], Train_PSNR: 28.55, Val_PSNR: 27.27, Val_SSIM: 0.9368
45 | Date: 2022-07-20 15:14:59s, Time_Cost: 123s, Epoch: [45/250], Train_PSNR: 28.59, Val_PSNR: 27.02, Val_SSIM: 0.9358
46 | Date: 2022-07-20 15:17:03s, Time_Cost: 124s, Epoch: [46/250], Train_PSNR: 28.62, Val_PSNR: 26.97, Val_SSIM: 0.9365
47 | Date: 2022-07-20 15:19:05s, Time_Cost: 122s, Epoch: [47/250], Train_PSNR: 28.67, Val_PSNR: 27.00, Val_SSIM: 0.9373
48 | Date: 2022-07-20 15:21:08s, Time_Cost: 123s, Epoch: [48/250], Train_PSNR: 28.70, Val_PSNR: 27.13, Val_SSIM: 0.9362
49 | Date: 2022-07-20 15:23:10s, Time_Cost: 123s, Epoch: [49/250], Train_PSNR: 28.72, Val_PSNR: 27.25, Val_SSIM: 0.9364
50 | Date: 2022-07-20 15:25:14s, Time_Cost: 123s, Epoch: [50/250], Train_PSNR: 28.76, Val_PSNR: 26.41, Val_SSIM: 0.9319
51 | Date: 2022-07-20 15:27:16s, Time_Cost: 123s, Epoch: [51/250], Train_PSNR: 28.79, Val_PSNR: 26.89, Val_SSIM: 0.9352
52 | Date: 2022-07-20 15:29:19s, Time_Cost: 123s, Epoch: [52/250], Train_PSNR: 28.81, Val_PSNR: 26.79, Val_SSIM: 0.9345
53 | Date: 2022-07-20 15:31:21s, Time_Cost: 122s, Epoch: [53/250], Train_PSNR: 28.84, Val_PSNR: 26.90, Val_SSIM: 0.9345
54 | Date: 2022-07-20 15:33:25s, Time_Cost: 123s, Epoch: [54/250], Train_PSNR: 28.86, Val_PSNR: 26.81, Val_SSIM: 0.9354
55 | Date: 2022-07-20 15:35:27s, Time_Cost: 122s, Epoch: [55/250], Train_PSNR: 28.89, Val_PSNR: 26.99, Val_SSIM: 0.9342
56 | Date: 2022-07-20 15:37:32s, Time_Cost: 125s, Epoch: [56/250], Train_PSNR: 28.91, Val_PSNR: 26.84, Val_SSIM: 0.9335
57 | Date: 2022-07-20 15:39:35s, Time_Cost: 123s, Epoch: [57/250], Train_PSNR: 28.93, Val_PSNR: 26.83, Val_SSIM: 0.9343
58 | Date: 2022-07-20 15:41:38s, Time_Cost: 123s, Epoch: [58/250], Train_PSNR: 28.95, Val_PSNR: 26.73, Val_SSIM: 0.9332
59 | Date: 2022-07-20 15:43:41s, Time_Cost: 123s, Epoch: [59/250], Train_PSNR: 28.97, Val_PSNR: 26.60, Val_SSIM: 0.9330
60 | Date: 2022-07-20 15:45:44s, Time_Cost: 123s, Epoch: [60/250], Train_PSNR: 28.99, Val_PSNR: 26.68, Val_SSIM: 0.9340
61 | Date: 2022-07-20 15:47:47s, Time_Cost: 123s, Epoch: [61/250], Train_PSNR: 29.01, Val_PSNR: 26.82, Val_SSIM: 0.9329
62 | Date: 2022-07-20 15:49:50s, Time_Cost: 124s, Epoch: [62/250], Train_PSNR: 29.02, Val_PSNR: 26.62, Val_SSIM: 0.9329
63 | Date: 2022-07-20 15:51:52s, Time_Cost: 122s, Epoch: [63/250], Train_PSNR: 29.04, Val_PSNR: 26.69, Val_SSIM: 0.9336
64 | Date: 2022-07-20 15:53:56s, Time_Cost: 124s, Epoch: [64/250], Train_PSNR: 29.05, Val_PSNR: 26.58, Val_SSIM: 0.9335
65 | Date: 2022-07-20 15:55:59s, Time_Cost: 123s, Epoch: [65/250], Train_PSNR: 29.07, Val_PSNR: 26.51, Val_SSIM: 0.9327
66 | Date: 2022-07-20 15:58:02s, Time_Cost: 123s, Epoch: [66/250], Train_PSNR: 29.08, Val_PSNR: 26.63, Val_SSIM: 0.9331
67 | Date: 2022-07-20 16:00:05s, Time_Cost: 123s, Epoch: [67/250], Train_PSNR: 29.10, Val_PSNR: 26.44, Val_SSIM: 0.9321
68 | Date: 2022-07-20 16:02:09s, Time_Cost: 124s, Epoch: [68/250], Train_PSNR: 29.11, Val_PSNR: 26.40, Val_SSIM: 0.9319
69 | Date: 2022-07-20 16:04:13s, Time_Cost: 124s, Epoch: [69/250], Train_PSNR: 29.11, Val_PSNR: 26.30, Val_SSIM: 0.9320
70 | Date: 2022-07-20 16:06:17s, Time_Cost: 123s, Epoch: [70/250], Train_PSNR: 29.13, Val_PSNR: 26.54, Val_SSIM: 0.9333
71 | Date: 2022-07-20 16:08:20s, Time_Cost: 123s, Epoch: [71/250], Train_PSNR: 29.14, Val_PSNR: 26.33, Val_SSIM: 0.9312
72 | Date: 2022-07-20 16:10:22s, Time_Cost: 123s, Epoch: [72/250], Train_PSNR: 29.16, Val_PSNR: 26.37, Val_SSIM: 0.9316
73 | Date: 2022-07-20 16:12:26s, Time_Cost: 124s, Epoch: [73/250], Train_PSNR: 29.16, Val_PSNR: 26.52, Val_SSIM: 0.9322
74 | Date: 2022-07-20 16:14:30s, Time_Cost: 124s, Epoch: [74/250], Train_PSNR: 29.18, Val_PSNR: 26.42, Val_SSIM: 0.9318
75 | Date: 2022-07-20 16:16:34s, Time_Cost: 124s, Epoch: [75/250], Train_PSNR: 29.18, Val_PSNR: 26.44, Val_SSIM: 0.9320
76 | Date: 2022-07-20 16:18:39s, Time_Cost: 125s, Epoch: [76/250], Train_PSNR: 29.19, Val_PSNR: 26.37, Val_SSIM: 0.9316
77 | Date: 2022-07-20 16:20:42s, Time_Cost: 123s, Epoch: [77/250], Train_PSNR: 29.20, Val_PSNR: 26.41, Val_SSIM: 0.9323
78 | Date: 2022-07-20 16:22:45s, Time_Cost: 123s, Epoch: [78/250], Train_PSNR: 29.21, Val_PSNR: 26.51, Val_SSIM: 0.9323
79 | Date: 2022-07-20 16:24:48s, Time_Cost: 123s, Epoch: [79/250], Train_PSNR: 29.21, Val_PSNR: 26.53, Val_SSIM: 0.9328
80 | Date: 2022-07-20 16:26:51s, Time_Cost: 123s, Epoch: [80/250], Train_PSNR: 29.23, Val_PSNR: 26.37, Val_SSIM: 0.9318
81 | Date: 2022-07-20 16:28:53s, Time_Cost: 122s, Epoch: [81/250], Train_PSNR: 29.23, Val_PSNR: 26.38, Val_SSIM: 0.9316
82 | Date: 2022-07-20 16:30:55s, Time_Cost: 122s, Epoch: [82/250], Train_PSNR: 29.23, Val_PSNR: 26.32, Val_SSIM: 0.9314
83 | Date: 2022-07-20 16:32:58s, Time_Cost: 123s, Epoch: [83/250], Train_PSNR: 29.24, Val_PSNR: 26.33, Val_SSIM: 0.9318
84 | Date: 2022-07-20 16:35:00s, Time_Cost: 122s, Epoch: [84/250], Train_PSNR: 29.25, Val_PSNR: 26.42, Val_SSIM: 0.9317
85 | Date: 2022-07-20 16:37:03s, Time_Cost: 123s, Epoch: [85/250], Train_PSNR: 29.25, Val_PSNR: 26.43, Val_SSIM: 0.9318
86 | Date: 2022-07-20 16:39:06s, Time_Cost: 123s, Epoch: [86/250], Train_PSNR: 29.26, Val_PSNR: 26.33, Val_SSIM: 0.9314
87 | Date: 2022-07-20 16:41:09s, Time_Cost: 123s, Epoch: [87/250], Train_PSNR: 29.27, Val_PSNR: 26.34, Val_SSIM: 0.9311
88 | Date: 2022-07-20 16:43:12s, Time_Cost: 122s, Epoch: [88/250], Train_PSNR: 29.27, Val_PSNR: 26.35, Val_SSIM: 0.9316
89 | Date: 2022-07-20 16:45:14s, Time_Cost: 122s, Epoch: [89/250], Train_PSNR: 29.28, Val_PSNR: 26.34, Val_SSIM: 0.9316
90 | Date: 2022-07-20 16:47:17s, Time_Cost: 123s, Epoch: [90/250], Train_PSNR: 29.28, Val_PSNR: 26.42, Val_SSIM: 0.9313
91 | Date: 2022-07-20 16:49:19s, Time_Cost: 122s, Epoch: [91/250], Train_PSNR: 29.28, Val_PSNR: 26.43, Val_SSIM: 0.9315
92 | Date: 2022-07-20 16:51:22s, Time_Cost: 123s, Epoch: [92/250], Train_PSNR: 29.29, Val_PSNR: 26.37, Val_SSIM: 0.9311
93 | Date: 2022-07-20 16:53:25s, Time_Cost: 123s, Epoch: [93/250], Train_PSNR: 29.28, Val_PSNR: 26.36, Val_SSIM: 0.9314
94 | Date: 2022-07-20 16:55:29s, Time_Cost: 124s, Epoch: [94/250], Train_PSNR: 29.30, Val_PSNR: 26.40, Val_SSIM: 0.9317
95 | Date: 2022-07-20 16:57:31s, Time_Cost: 122s, Epoch: [95/250], Train_PSNR: 29.30, Val_PSNR: 26.42, Val_SSIM: 0.9317
96 | Date: 2022-07-20 16:59:34s, Time_Cost: 123s, Epoch: [96/250], Train_PSNR: 29.30, Val_PSNR: 26.31, Val_SSIM: 0.9311
97 |
--------------------------------------------------------------------------------
/training_log/allfilter1_log.txt:
--------------------------------------------------------------------------------
1 | Date: 2022-07-13 17:07:00s, Time_Cost: 21s, Epoch: [1/100], Train_PSNR: 10.03, Val_PSNR: 21.56, Val_SSIM: 0.5226
2 | Date: 2022-07-13 17:07:15s, Time_Cost: 15s, Epoch: [2/100], Train_PSNR: 15.23, Val_PSNR: 23.58, Val_SSIM: 0.6546
3 | Date: 2022-07-13 17:07:30s, Time_Cost: 15s, Epoch: [3/100], Train_PSNR: 16.56, Val_PSNR: 24.52, Val_SSIM: 0.7352
4 | Date: 2022-07-13 17:07:45s, Time_Cost: 15s, Epoch: [4/100], Train_PSNR: 17.57, Val_PSNR: 23.46, Val_SSIM: 0.7800
5 | Date: 2022-07-13 17:08:00s, Time_Cost: 15s, Epoch: [5/100], Train_PSNR: 18.36, Val_PSNR: 25.97, Val_SSIM: 0.8286
6 | Date: 2022-07-13 17:08:15s, Time_Cost: 15s, Epoch: [6/100], Train_PSNR: 19.00, Val_PSNR: 25.22, Val_SSIM: 0.8398
7 | Date: 2022-07-13 17:08:30s, Time_Cost: 15s, Epoch: [7/100], Train_PSNR: 19.38, Val_PSNR: 26.53, Val_SSIM: 0.8592
8 | Date: 2022-07-13 17:08:45s, Time_Cost: 15s, Epoch: [8/100], Train_PSNR: 19.83, Val_PSNR: 26.90, Val_SSIM: 0.8710
9 | Date: 2022-07-13 17:09:00s, Time_Cost: 15s, Epoch: [9/100], Train_PSNR: 20.21, Val_PSNR: 27.14, Val_SSIM: 0.8825
10 | Date: 2022-07-13 17:09:15s, Time_Cost: 15s, Epoch: [10/100], Train_PSNR: 20.66, Val_PSNR: 26.94, Val_SSIM: 0.8908
11 | Date: 2022-07-13 17:09:30s, Time_Cost: 15s, Epoch: [11/100], Train_PSNR: 21.29, Val_PSNR: 28.26, Val_SSIM: 0.9021
12 | Date: 2022-07-13 17:09:46s, Time_Cost: 15s, Epoch: [12/100], Train_PSNR: 21.74, Val_PSNR: 29.22, Val_SSIM: 0.9111
13 | Date: 2022-07-13 17:10:01s, Time_Cost: 15s, Epoch: [13/100], Train_PSNR: 22.10, Val_PSNR: 29.03, Val_SSIM: 0.9158
14 | Date: 2022-07-13 17:10:16s, Time_Cost: 15s, Epoch: [14/100], Train_PSNR: 22.57, Val_PSNR: 28.18, Val_SSIM: 0.9229
15 | Date: 2022-07-13 17:10:30s, Time_Cost: 15s, Epoch: [15/100], Train_PSNR: 22.83, Val_PSNR: 27.78, Val_SSIM: 0.9267
16 | Date: 2022-07-13 17:10:45s, Time_Cost: 15s, Epoch: [16/100], Train_PSNR: 23.09, Val_PSNR: 28.57, Val_SSIM: 0.9314
17 | Date: 2022-07-13 17:11:00s, Time_Cost: 15s, Epoch: [17/100], Train_PSNR: 23.33, Val_PSNR: 28.66, Val_SSIM: 0.9349
18 | Date: 2022-07-13 17:11:15s, Time_Cost: 15s, Epoch: [18/100], Train_PSNR: 23.55, Val_PSNR: 29.62, Val_SSIM: 0.9382
19 | Date: 2022-07-13 17:11:30s, Time_Cost: 15s, Epoch: [19/100], Train_PSNR: 23.43, Val_PSNR: 30.42, Val_SSIM: 0.9312
20 | Date: 2022-07-13 17:11:45s, Time_Cost: 15s, Epoch: [20/100], Train_PSNR: 23.47, Val_PSNR: 29.21, Val_SSIM: 0.9402
21 | Date: 2022-07-13 17:12:00s, Time_Cost: 15s, Epoch: [21/100], Train_PSNR: 24.04, Val_PSNR: 29.95, Val_SSIM: 0.9431
22 | Date: 2022-07-13 17:12:15s, Time_Cost: 15s, Epoch: [22/100], Train_PSNR: 24.15, Val_PSNR: 30.06, Val_SSIM: 0.9447
23 | Date: 2022-07-13 17:12:30s, Time_Cost: 15s, Epoch: [23/100], Train_PSNR: 24.21, Val_PSNR: 29.84, Val_SSIM: 0.9456
24 | Date: 2022-07-13 17:12:45s, Time_Cost: 15s, Epoch: [24/100], Train_PSNR: 24.38, Val_PSNR: 31.02, Val_SSIM: 0.9483
25 | Date: 2022-07-13 17:13:01s, Time_Cost: 15s, Epoch: [25/100], Train_PSNR: 24.45, Val_PSNR: 30.26, Val_SSIM: 0.9478
26 | Date: 2022-07-13 17:13:16s, Time_Cost: 15s, Epoch: [26/100], Train_PSNR: 24.49, Val_PSNR: 30.17, Val_SSIM: 0.9489
27 | Date: 2022-07-13 17:13:31s, Time_Cost: 15s, Epoch: [27/100], Train_PSNR: 24.54, Val_PSNR: 30.67, Val_SSIM: 0.9507
28 | Date: 2022-07-13 17:13:46s, Time_Cost: 15s, Epoch: [28/100], Train_PSNR: 24.71, Val_PSNR: 30.51, Val_SSIM: 0.9515
29 | Date: 2022-07-13 17:14:01s, Time_Cost: 15s, Epoch: [29/100], Train_PSNR: 24.78, Val_PSNR: 30.00, Val_SSIM: 0.9495
30 | Date: 2022-07-13 17:14:16s, Time_Cost: 15s, Epoch: [30/100], Train_PSNR: 24.82, Val_PSNR: 30.56, Val_SSIM: 0.9513
31 | Date: 2022-07-13 17:14:31s, Time_Cost: 15s, Epoch: [31/100], Train_PSNR: 24.86, Val_PSNR: 31.65, Val_SSIM: 0.9532
32 | Date: 2022-07-13 17:14:46s, Time_Cost: 15s, Epoch: [32/100], Train_PSNR: 25.06, Val_PSNR: 31.08, Val_SSIM: 0.9532
33 | Date: 2022-07-13 17:15:01s, Time_Cost: 15s, Epoch: [33/100], Train_PSNR: 24.99, Val_PSNR: 31.25, Val_SSIM: 0.9538
34 | Date: 2022-07-13 17:15:16s, Time_Cost: 15s, Epoch: [34/100], Train_PSNR: 25.09, Val_PSNR: 30.78, Val_SSIM: 0.9539
35 | Date: 2022-07-13 17:15:31s, Time_Cost: 15s, Epoch: [35/100], Train_PSNR: 25.25, Val_PSNR: 31.25, Val_SSIM: 0.9548
36 | Date: 2022-07-13 17:15:46s, Time_Cost: 15s, Epoch: [36/100], Train_PSNR: 25.35, Val_PSNR: 31.85, Val_SSIM: 0.9553
37 | Date: 2022-07-13 17:16:02s, Time_Cost: 15s, Epoch: [37/100], Train_PSNR: 25.39, Val_PSNR: 30.95, Val_SSIM: 0.9553
38 | Date: 2022-07-13 17:16:17s, Time_Cost: 15s, Epoch: [38/100], Train_PSNR: 25.24, Val_PSNR: 30.83, Val_SSIM: 0.9548
39 | Date: 2022-07-13 17:16:32s, Time_Cost: 15s, Epoch: [39/100], Train_PSNR: 25.45, Val_PSNR: 32.07, Val_SSIM: 0.9547
40 | Date: 2022-07-13 17:16:47s, Time_Cost: 15s, Epoch: [40/100], Train_PSNR: 25.51, Val_PSNR: 30.63, Val_SSIM: 0.9557
41 | Date: 2022-07-13 17:17:02s, Time_Cost: 15s, Epoch: [41/100], Train_PSNR: 25.69, Val_PSNR: 31.01, Val_SSIM: 0.9560
42 | Date: 2022-07-13 17:17:17s, Time_Cost: 15s, Epoch: [42/100], Train_PSNR: 25.59, Val_PSNR: 30.84, Val_SSIM: 0.9553
43 | Date: 2022-07-13 17:17:32s, Time_Cost: 15s, Epoch: [43/100], Train_PSNR: 25.65, Val_PSNR: 30.72, Val_SSIM: 0.9552
44 | Date: 2022-07-13 17:17:47s, Time_Cost: 15s, Epoch: [44/100], Train_PSNR: 25.74, Val_PSNR: 30.88, Val_SSIM: 0.9558
45 | Date: 2022-07-13 17:18:02s, Time_Cost: 15s, Epoch: [45/100], Train_PSNR: 25.90, Val_PSNR: 32.07, Val_SSIM: 0.9562
46 | Date: 2022-07-13 17:18:18s, Time_Cost: 15s, Epoch: [46/100], Train_PSNR: 26.00, Val_PSNR: 31.95, Val_SSIM: 0.9548
47 | Date: 2022-07-13 17:18:33s, Time_Cost: 15s, Epoch: [47/100], Train_PSNR: 25.96, Val_PSNR: 31.29, Val_SSIM: 0.9558
48 | Date: 2022-07-13 17:18:48s, Time_Cost: 15s, Epoch: [48/100], Train_PSNR: 26.11, Val_PSNR: 31.78, Val_SSIM: 0.9564
49 | Date: 2022-07-13 17:19:03s, Time_Cost: 15s, Epoch: [49/100], Train_PSNR: 26.20, Val_PSNR: 31.05, Val_SSIM: 0.9562
50 | Date: 2022-07-13 17:19:18s, Time_Cost: 15s, Epoch: [50/100], Train_PSNR: 26.24, Val_PSNR: 31.30, Val_SSIM: 0.9551
51 | Date: 2022-07-13 17:19:33s, Time_Cost: 15s, Epoch: [51/100], Train_PSNR: 26.26, Val_PSNR: 31.57, Val_SSIM: 0.9556
52 | Date: 2022-07-13 17:19:48s, Time_Cost: 15s, Epoch: [52/100], Train_PSNR: 26.36, Val_PSNR: 31.40, Val_SSIM: 0.9556
53 | Date: 2022-07-13 17:20:03s, Time_Cost: 15s, Epoch: [53/100], Train_PSNR: 26.45, Val_PSNR: 30.79, Val_SSIM: 0.9547
54 | Date: 2022-07-13 17:20:19s, Time_Cost: 15s, Epoch: [54/100], Train_PSNR: 24.02, Val_PSNR: 23.62, Val_SSIM: 0.7616
55 | Date: 2022-07-13 17:20:34s, Time_Cost: 15s, Epoch: [55/100], Train_PSNR: 21.44, Val_PSNR: 27.88, Val_SSIM: 0.9380
56 | Date: 2022-07-13 17:20:49s, Time_Cost: 15s, Epoch: [56/100], Train_PSNR: 23.43, Val_PSNR: 28.20, Val_SSIM: 0.9497
57 | Date: 2022-07-13 17:21:04s, Time_Cost: 15s, Epoch: [57/100], Train_PSNR: 23.94, Val_PSNR: 28.99, Val_SSIM: 0.9529
58 | Date: 2022-07-13 17:21:19s, Time_Cost: 15s, Epoch: [58/100], Train_PSNR: 24.72, Val_PSNR: 30.47, Val_SSIM: 0.9550
59 | Date: 2022-07-13 17:21:34s, Time_Cost: 15s, Epoch: [59/100], Train_PSNR: 25.20, Val_PSNR: 31.00, Val_SSIM: 0.9563
60 | Date: 2022-07-13 17:21:49s, Time_Cost: 15s, Epoch: [60/100], Train_PSNR: 25.26, Val_PSNR: 30.48, Val_SSIM: 0.9583
61 | Date: 2022-07-13 17:22:04s, Time_Cost: 15s, Epoch: [61/100], Train_PSNR: 25.56, Val_PSNR: 30.92, Val_SSIM: 0.9577
62 | Date: 2022-07-13 17:22:19s, Time_Cost: 15s, Epoch: [62/100], Train_PSNR: 25.65, Val_PSNR: 31.95, Val_SSIM: 0.9599
63 | Date: 2022-07-13 17:22:34s, Time_Cost: 15s, Epoch: [63/100], Train_PSNR: 25.65, Val_PSNR: 31.20, Val_SSIM: 0.9600
64 | Date: 2022-07-13 17:22:50s, Time_Cost: 15s, Epoch: [64/100], Train_PSNR: 25.76, Val_PSNR: 30.71, Val_SSIM: 0.9596
65 | Date: 2022-07-13 17:23:05s, Time_Cost: 15s, Epoch: [65/100], Train_PSNR: 25.98, Val_PSNR: 31.02, Val_SSIM: 0.9611
66 | Date: 2022-07-13 17:23:20s, Time_Cost: 15s, Epoch: [66/100], Train_PSNR: 25.93, Val_PSNR: 31.37, Val_SSIM: 0.9600
67 | Date: 2022-07-13 17:23:35s, Time_Cost: 15s, Epoch: [67/100], Train_PSNR: 26.04, Val_PSNR: 31.44, Val_SSIM: 0.9614
68 | Date: 2022-07-13 17:23:50s, Time_Cost: 15s, Epoch: [68/100], Train_PSNR: 26.12, Val_PSNR: 31.42, Val_SSIM: 0.9608
69 | Date: 2022-07-13 17:24:05s, Time_Cost: 15s, Epoch: [69/100], Train_PSNR: 25.84, Val_PSNR: 30.87, Val_SSIM: 0.9613
70 | Date: 2022-07-13 17:24:20s, Time_Cost: 15s, Epoch: [70/100], Train_PSNR: 26.09, Val_PSNR: 31.93, Val_SSIM: 0.9601
71 | Date: 2022-07-13 17:24:35s, Time_Cost: 15s, Epoch: [71/100], Train_PSNR: 26.15, Val_PSNR: 31.25, Val_SSIM: 0.9609
72 | Date: 2022-07-13 17:24:51s, Time_Cost: 15s, Epoch: [72/100], Train_PSNR: 26.31, Val_PSNR: 31.39, Val_SSIM: 0.9606
73 | Date: 2022-07-13 17:25:06s, Time_Cost: 15s, Epoch: [73/100], Train_PSNR: 26.46, Val_PSNR: 31.02, Val_SSIM: 0.9587
74 | Date: 2022-07-13 17:25:21s, Time_Cost: 15s, Epoch: [74/100], Train_PSNR: 26.63, Val_PSNR: 32.52, Val_SSIM: 0.9595
75 | Date: 2022-07-13 17:25:36s, Time_Cost: 15s, Epoch: [75/100], Train_PSNR: 26.75, Val_PSNR: 31.87, Val_SSIM: 0.9589
76 | Date: 2022-07-13 17:25:52s, Time_Cost: 15s, Epoch: [76/100], Train_PSNR: 26.85, Val_PSNR: 31.08, Val_SSIM: 0.9585
77 | Date: 2022-07-13 17:26:07s, Time_Cost: 15s, Epoch: [77/100], Train_PSNR: 26.85, Val_PSNR: 32.65, Val_SSIM: 0.9586
78 | Date: 2022-07-13 17:26:22s, Time_Cost: 15s, Epoch: [78/100], Train_PSNR: 27.02, Val_PSNR: 32.10, Val_SSIM: 0.9587
79 | Date: 2022-07-13 17:26:37s, Time_Cost: 15s, Epoch: [79/100], Train_PSNR: 27.09, Val_PSNR: 32.15, Val_SSIM: 0.9588
80 | Date: 2022-07-13 17:26:53s, Time_Cost: 15s, Epoch: [80/100], Train_PSNR: 27.10, Val_PSNR: 31.71, Val_SSIM: 0.9576
81 | Date: 2022-07-13 17:27:08s, Time_Cost: 15s, Epoch: [81/100], Train_PSNR: 27.10, Val_PSNR: 31.68, Val_SSIM: 0.9582
82 | Date: 2022-07-13 17:27:23s, Time_Cost: 15s, Epoch: [82/100], Train_PSNR: 27.20, Val_PSNR: 31.75, Val_SSIM: 0.9584
83 | Date: 2022-07-13 17:27:38s, Time_Cost: 15s, Epoch: [83/100], Train_PSNR: 27.26, Val_PSNR: 32.15, Val_SSIM: 0.9578
84 | Date: 2022-07-13 17:27:53s, Time_Cost: 15s, Epoch: [84/100], Train_PSNR: 27.30, Val_PSNR: 32.03, Val_SSIM: 0.9579
85 | Date: 2022-07-13 17:28:08s, Time_Cost: 15s, Epoch: [85/100], Train_PSNR: 27.31, Val_PSNR: 31.59, Val_SSIM: 0.9577
86 | Date: 2022-07-13 17:28:23s, Time_Cost: 15s, Epoch: [86/100], Train_PSNR: 27.37, Val_PSNR: 31.69, Val_SSIM: 0.9570
87 | Date: 2022-07-13 17:28:38s, Time_Cost: 15s, Epoch: [87/100], Train_PSNR: 27.44, Val_PSNR: 32.02, Val_SSIM: 0.9574
88 | Date: 2022-07-13 17:28:53s, Time_Cost: 15s, Epoch: [88/100], Train_PSNR: 27.31, Val_PSNR: 32.35, Val_SSIM: 0.9565
89 | Date: 2022-07-13 17:29:09s, Time_Cost: 15s, Epoch: [89/100], Train_PSNR: 27.44, Val_PSNR: 32.09, Val_SSIM: 0.9569
90 | Date: 2022-07-13 17:29:24s, Time_Cost: 15s, Epoch: [90/100], Train_PSNR: 27.49, Val_PSNR: 32.38, Val_SSIM: 0.9573
91 | Date: 2022-07-13 17:29:39s, Time_Cost: 15s, Epoch: [91/100], Train_PSNR: 27.53, Val_PSNR: 31.67, Val_SSIM: 0.9567
92 | Date: 2022-07-13 17:29:54s, Time_Cost: 15s, Epoch: [92/100], Train_PSNR: 27.46, Val_PSNR: 31.83, Val_SSIM: 0.9570
93 | Date: 2022-07-13 17:30:09s, Time_Cost: 15s, Epoch: [93/100], Train_PSNR: 27.54, Val_PSNR: 31.61, Val_SSIM: 0.9563
94 | Date: 2022-07-13 17:30:24s, Time_Cost: 15s, Epoch: [94/100], Train_PSNR: 27.60, Val_PSNR: 32.16, Val_SSIM: 0.9561
95 | Date: 2022-07-13 17:30:39s, Time_Cost: 15s, Epoch: [95/100], Train_PSNR: 27.70, Val_PSNR: 31.64, Val_SSIM: 0.9564
96 | Date: 2022-07-13 17:30:54s, Time_Cost: 15s, Epoch: [96/100], Train_PSNR: 27.72, Val_PSNR: 31.77, Val_SSIM: 0.9558
97 | Date: 2022-07-13 17:31:09s, Time_Cost: 15s, Epoch: [97/100], Train_PSNR: 27.73, Val_PSNR: 31.29, Val_SSIM: 0.9555
98 | Date: 2022-07-13 17:31:24s, Time_Cost: 15s, Epoch: [98/100], Train_PSNR: 27.68, Val_PSNR: 31.68, Val_SSIM: 0.9555
99 | Date: 2022-07-13 17:31:40s, Time_Cost: 15s, Epoch: [99/100], Train_PSNR: 27.32, Val_PSNR: 31.41, Val_SSIM: 0.9476
100 | Date: 2022-07-13 17:31:55s, Time_Cost: 15s, Epoch: [100/100], Train_PSNR: 27.16, Val_PSNR: 32.26, Val_SSIM: 0.9556
101 |
--------------------------------------------------------------------------------
/training_log/cosLR1_log.txt:
--------------------------------------------------------------------------------
1 | Date: 2022-07-21 17:43:36s, Time_Cost: 118s, Epoch: [1/100], Train_PSNR: 15.85, Val_PSNR: 23.79, Val_SSIM: 0.8147
2 | Date: 2022-07-21 17:45:40s, Time_Cost: 124s, Epoch: [2/100], Train_PSNR: 19.64, Val_PSNR: 24.41, Val_SSIM: 0.8992
3 | Date: 2022-07-21 17:47:44s, Time_Cost: 123s, Epoch: [3/100], Train_PSNR: 21.28, Val_PSNR: 26.81, Val_SSIM: 0.9250
4 | Date: 2022-07-21 17:49:47s, Time_Cost: 123s, Epoch: [4/100], Train_PSNR: 22.23, Val_PSNR: 28.08, Val_SSIM: 0.9319
5 | Date: 2022-07-21 17:51:50s, Time_Cost: 123s, Epoch: [5/100], Train_PSNR: 22.85, Val_PSNR: 29.27, Val_SSIM: 0.9366
6 | Date: 2022-07-21 17:53:53s, Time_Cost: 123s, Epoch: [6/100], Train_PSNR: 23.28, Val_PSNR: 26.36, Val_SSIM: 0.9413
7 | Date: 2022-07-21 17:55:55s, Time_Cost: 122s, Epoch: [7/100], Train_PSNR: 23.79, Val_PSNR: 29.12, Val_SSIM: 0.9474
8 | Date: 2022-07-21 17:57:58s, Time_Cost: 123s, Epoch: [8/100], Train_PSNR: 24.24, Val_PSNR: 27.85, Val_SSIM: 0.9530
9 | Date: 2022-07-21 18:00:00s, Time_Cost: 122s, Epoch: [9/100], Train_PSNR: 24.64, Val_PSNR: 29.86, Val_SSIM: 0.9533
10 | Date: 2022-07-21 18:02:03s, Time_Cost: 123s, Epoch: [10/100], Train_PSNR: 24.92, Val_PSNR: 29.84, Val_SSIM: 0.9524
11 | Date: 2022-07-21 18:04:06s, Time_Cost: 123s, Epoch: [11/100], Train_PSNR: 25.35, Val_PSNR: 29.01, Val_SSIM: 0.9558
12 | Date: 2022-07-21 18:06:08s, Time_Cost: 123s, Epoch: [12/100], Train_PSNR: 25.73, Val_PSNR: 30.85, Val_SSIM: 0.9571
13 | Date: 2022-07-21 18:08:11s, Time_Cost: 122s, Epoch: [13/100], Train_PSNR: 26.04, Val_PSNR: 29.08, Val_SSIM: 0.9574
14 | Date: 2022-07-21 18:10:14s, Time_Cost: 123s, Epoch: [14/100], Train_PSNR: 26.33, Val_PSNR: 30.06, Val_SSIM: 0.9568
15 | Date: 2022-07-21 18:12:16s, Time_Cost: 123s, Epoch: [15/100], Train_PSNR: 26.71, Val_PSNR: 29.15, Val_SSIM: 0.9588
16 | Date: 2022-07-21 18:14:19s, Time_Cost: 123s, Epoch: [16/100], Train_PSNR: 26.95, Val_PSNR: 29.41, Val_SSIM: 0.9607
17 | Date: 2022-07-21 18:16:22s, Time_Cost: 123s, Epoch: [17/100], Train_PSNR: 27.28, Val_PSNR: 29.85, Val_SSIM: 0.9616
18 | Date: 2022-07-21 18:18:25s, Time_Cost: 123s, Epoch: [18/100], Train_PSNR: 27.54, Val_PSNR: 29.29, Val_SSIM: 0.9592
19 | Date: 2022-07-21 18:20:27s, Time_Cost: 123s, Epoch: [19/100], Train_PSNR: 27.72, Val_PSNR: 29.72, Val_SSIM: 0.9622
20 | Date: 2022-07-21 18:22:30s, Time_Cost: 123s, Epoch: [20/100], Train_PSNR: 27.95, Val_PSNR: 30.81, Val_SSIM: 0.9602
21 | Date: 2022-07-21 18:24:33s, Time_Cost: 123s, Epoch: [21/100], Train_PSNR: 28.11, Val_PSNR: 29.66, Val_SSIM: 0.9623
22 | Date: 2022-07-21 18:26:36s, Time_Cost: 123s, Epoch: [22/100], Train_PSNR: 28.08, Val_PSNR: 29.36, Val_SSIM: 0.9615
23 | Date: 2022-07-21 18:28:38s, Time_Cost: 123s, Epoch: [23/100], Train_PSNR: 28.41, Val_PSNR: 28.93, Val_SSIM: 0.9620
24 | Date: 2022-07-21 18:30:41s, Time_Cost: 123s, Epoch: [24/100], Train_PSNR: 28.54, Val_PSNR: 29.05, Val_SSIM: 0.9550
25 | Date: 2022-07-21 18:32:44s, Time_Cost: 123s, Epoch: [25/100], Train_PSNR: 28.62, Val_PSNR: 29.53, Val_SSIM: 0.9597
26 | Date: 2022-07-21 18:34:47s, Time_Cost: 123s, Epoch: [26/100], Train_PSNR: 28.79, Val_PSNR: 28.89, Val_SSIM: 0.9598
27 | Date: 2022-07-21 18:36:50s, Time_Cost: 123s, Epoch: [27/100], Train_PSNR: 28.89, Val_PSNR: 28.47, Val_SSIM: 0.9598
28 | Date: 2022-07-21 18:38:52s, Time_Cost: 123s, Epoch: [28/100], Train_PSNR: 28.99, Val_PSNR: 28.11, Val_SSIM: 0.9595
29 | Date: 2022-07-21 18:40:55s, Time_Cost: 123s, Epoch: [29/100], Train_PSNR: 29.06, Val_PSNR: 28.29, Val_SSIM: 0.9578
30 | Date: 2022-07-21 18:42:58s, Time_Cost: 123s, Epoch: [30/100], Train_PSNR: 29.19, Val_PSNR: 27.42, Val_SSIM: 0.9589
31 | Date: 2022-07-21 18:45:01s, Time_Cost: 123s, Epoch: [31/100], Train_PSNR: 29.28, Val_PSNR: 27.81, Val_SSIM: 0.9608
32 | Date: 2022-07-21 18:47:04s, Time_Cost: 123s, Epoch: [32/100], Train_PSNR: 29.34, Val_PSNR: 28.87, Val_SSIM: 0.9581
33 | Date: 2022-07-21 18:49:07s, Time_Cost: 123s, Epoch: [33/100], Train_PSNR: 29.43, Val_PSNR: 27.68, Val_SSIM: 0.9583
34 | Date: 2022-07-21 18:51:09s, Time_Cost: 123s, Epoch: [34/100], Train_PSNR: 29.51, Val_PSNR: 27.27, Val_SSIM: 0.9583
35 | Date: 2022-07-21 18:53:12s, Time_Cost: 123s, Epoch: [35/100], Train_PSNR: 29.57, Val_PSNR: 27.39, Val_SSIM: 0.9583
36 | Date: 2022-07-21 18:55:15s, Time_Cost: 123s, Epoch: [36/100], Train_PSNR: 29.63, Val_PSNR: 27.27, Val_SSIM: 0.9583
37 | Date: 2022-07-21 18:57:18s, Time_Cost: 123s, Epoch: [37/100], Train_PSNR: 29.68, Val_PSNR: 26.98, Val_SSIM: 0.9586
38 | Date: 2022-07-21 18:59:20s, Time_Cost: 123s, Epoch: [38/100], Train_PSNR: 29.73, Val_PSNR: 28.02, Val_SSIM: 0.9583
39 | Date: 2022-07-21 19:01:23s, Time_Cost: 123s, Epoch: [39/100], Train_PSNR: 29.77, Val_PSNR: 27.23, Val_SSIM: 0.9578
40 | Date: 2022-07-21 19:03:26s, Time_Cost: 123s, Epoch: [40/100], Train_PSNR: 29.81, Val_PSNR: 26.88, Val_SSIM: 0.9578
41 | Date: 2022-07-21 19:05:29s, Time_Cost: 123s, Epoch: [41/100], Train_PSNR: 29.85, Val_PSNR: 27.06, Val_SSIM: 0.9573
42 | Date: 2022-07-21 19:07:32s, Time_Cost: 123s, Epoch: [42/100], Train_PSNR: 29.87, Val_PSNR: 27.17, Val_SSIM: 0.9578
43 | Date: 2022-07-21 19:09:35s, Time_Cost: 123s, Epoch: [43/100], Train_PSNR: 29.90, Val_PSNR: 26.92, Val_SSIM: 0.9564
44 | Date: 2022-07-21 19:11:38s, Time_Cost: 123s, Epoch: [44/100], Train_PSNR: 29.91, Val_PSNR: 27.13, Val_SSIM: 0.9573
45 | Date: 2022-07-21 19:13:41s, Time_Cost: 123s, Epoch: [45/100], Train_PSNR: 29.93, Val_PSNR: 26.98, Val_SSIM: 0.9567
46 | Date: 2022-07-21 19:15:43s, Time_Cost: 123s, Epoch: [46/100], Train_PSNR: 29.94, Val_PSNR: 27.03, Val_SSIM: 0.9569
47 | Date: 2022-07-21 19:17:46s, Time_Cost: 123s, Epoch: [47/100], Train_PSNR: 29.95, Val_PSNR: 27.01, Val_SSIM: 0.9569
48 | Date: 2022-07-21 19:19:49s, Time_Cost: 123s, Epoch: [48/100], Train_PSNR: 29.95, Val_PSNR: 27.03, Val_SSIM: 0.9570
49 | Date: 2022-07-21 19:21:52s, Time_Cost: 123s, Epoch: [49/100], Train_PSNR: 29.96, Val_PSNR: 27.01, Val_SSIM: 0.9569
50 | Date: 2022-07-21 19:23:55s, Time_Cost: 123s, Epoch: [50/100], Train_PSNR: 29.96, Val_PSNR: 27.01, Val_SSIM: 0.9569
51 | Date: 2022-07-21 19:25:58s, Time_Cost: 123s, Epoch: [51/100], Train_PSNR: 29.96, Val_PSNR: 27.03, Val_SSIM: 0.9570
52 | Date: 2022-07-21 19:28:01s, Time_Cost: 123s, Epoch: [52/100], Train_PSNR: 29.96, Val_PSNR: 26.98, Val_SSIM: 0.9569
53 | Date: 2022-07-21 19:30:03s, Time_Cost: 123s, Epoch: [53/100], Train_PSNR: 29.96, Val_PSNR: 27.01, Val_SSIM: 0.9570
54 | Date: 2022-07-21 19:32:06s, Time_Cost: 123s, Epoch: [54/100], Train_PSNR: 29.96, Val_PSNR: 26.98, Val_SSIM: 0.9568
55 | Date: 2022-07-21 19:34:09s, Time_Cost: 123s, Epoch: [55/100], Train_PSNR: 29.95, Val_PSNR: 26.96, Val_SSIM: 0.9568
56 | Date: 2022-07-21 19:36:12s, Time_Cost: 123s, Epoch: [56/100], Train_PSNR: 29.96, Val_PSNR: 27.08, Val_SSIM: 0.9574
57 | Date: 2022-07-21 19:38:15s, Time_Cost: 123s, Epoch: [57/100], Train_PSNR: 29.96, Val_PSNR: 26.92, Val_SSIM: 0.9566
58 | Date: 2022-07-21 19:40:18s, Time_Cost: 123s, Epoch: [58/100], Train_PSNR: 29.97, Val_PSNR: 27.00, Val_SSIM: 0.9568
59 | Date: 2022-07-21 19:42:20s, Time_Cost: 123s, Epoch: [59/100], Train_PSNR: 29.98, Val_PSNR: 26.95, Val_SSIM: 0.9566
60 | Date: 2022-07-21 19:44:23s, Time_Cost: 123s, Epoch: [60/100], Train_PSNR: 29.99, Val_PSNR: 26.95, Val_SSIM: 0.9566
61 | Date: 2022-07-21 19:46:26s, Time_Cost: 123s, Epoch: [61/100], Train_PSNR: 29.99, Val_PSNR: 26.74, Val_SSIM: 0.9572
62 | Date: 2022-07-21 19:48:29s, Time_Cost: 123s, Epoch: [62/100], Train_PSNR: 30.00, Val_PSNR: 26.72, Val_SSIM: 0.9567
63 | Date: 2022-07-21 19:50:32s, Time_Cost: 123s, Epoch: [63/100], Train_PSNR: 29.99, Val_PSNR: 26.87, Val_SSIM: 0.9570
64 | Date: 2022-07-21 19:52:35s, Time_Cost: 123s, Epoch: [64/100], Train_PSNR: 30.02, Val_PSNR: 26.98, Val_SSIM: 0.9566
65 | Date: 2022-07-21 19:54:38s, Time_Cost: 123s, Epoch: [65/100], Train_PSNR: 29.98, Val_PSNR: 26.56, Val_SSIM: 0.9568
66 | Date: 2022-07-21 19:56:41s, Time_Cost: 123s, Epoch: [66/100], Train_PSNR: 30.03, Val_PSNR: 26.74, Val_SSIM: 0.9570
67 | Date: 2022-07-21 19:58:44s, Time_Cost: 123s, Epoch: [67/100], Train_PSNR: 30.03, Val_PSNR: 26.41, Val_SSIM: 0.9579
68 | Date: 2022-07-21 20:00:47s, Time_Cost: 123s, Epoch: [68/100], Train_PSNR: 30.06, Val_PSNR: 26.34, Val_SSIM: 0.9554
69 | Date: 2022-07-21 20:02:50s, Time_Cost: 123s, Epoch: [69/100], Train_PSNR: 30.00, Val_PSNR: 26.95, Val_SSIM: 0.9589
70 | Date: 2022-07-21 20:04:53s, Time_Cost: 123s, Epoch: [70/100], Train_PSNR: 29.90, Val_PSNR: 26.04, Val_SSIM: 0.9576
71 | Date: 2022-07-21 20:06:56s, Time_Cost: 123s, Epoch: [71/100], Train_PSNR: 30.07, Val_PSNR: 26.51, Val_SSIM: 0.9563
72 | Date: 2022-07-21 20:08:58s, Time_Cost: 123s, Epoch: [72/100], Train_PSNR: 30.02, Val_PSNR: 26.94, Val_SSIM: 0.9567
73 | Date: 2022-07-21 20:11:01s, Time_Cost: 123s, Epoch: [73/100], Train_PSNR: 29.98, Val_PSNR: 26.29, Val_SSIM: 0.9593
74 | Date: 2022-07-21 20:13:03s, Time_Cost: 123s, Epoch: [74/100], Train_PSNR: 29.97, Val_PSNR: 26.37, Val_SSIM: 0.9580
75 | Date: 2022-07-21 20:15:06s, Time_Cost: 123s, Epoch: [75/100], Train_PSNR: 29.91, Val_PSNR: 25.38, Val_SSIM: 0.9536
76 | Date: 2022-07-21 20:17:09s, Time_Cost: 123s, Epoch: [76/100], Train_PSNR: 29.76, Val_PSNR: 26.42, Val_SSIM: 0.9581
77 | Date: 2022-07-21 20:19:12s, Time_Cost: 123s, Epoch: [77/100], Train_PSNR: 30.15, Val_PSNR: 26.26, Val_SSIM: 0.9573
78 | Date: 2022-07-21 20:21:14s, Time_Cost: 123s, Epoch: [78/100], Train_PSNR: 30.25, Val_PSNR: 25.50, Val_SSIM: 0.9556
79 | Date: 2022-07-21 20:23:17s, Time_Cost: 123s, Epoch: [79/100], Train_PSNR: 30.07, Val_PSNR: 26.15, Val_SSIM: 0.9577
80 | Date: 2022-07-21 20:25:20s, Time_Cost: 123s, Epoch: [80/100], Train_PSNR: 29.90, Val_PSNR: 27.14, Val_SSIM: 0.9584
81 | Date: 2022-07-21 20:27:23s, Time_Cost: 123s, Epoch: [81/100], Train_PSNR: 30.11, Val_PSNR: 26.48, Val_SSIM: 0.9573
82 | Date: 2022-07-21 20:29:25s, Time_Cost: 123s, Epoch: [82/100], Train_PSNR: 30.12, Val_PSNR: 25.96, Val_SSIM: 0.9580
83 | Date: 2022-07-21 20:31:28s, Time_Cost: 123s, Epoch: [83/100], Train_PSNR: 29.69, Val_PSNR: 26.53, Val_SSIM: 0.9593
84 | Date: 2022-07-21 20:33:31s, Time_Cost: 123s, Epoch: [84/100], Train_PSNR: 30.11, Val_PSNR: 25.77, Val_SSIM: 0.9578
85 | Date: 2022-07-21 20:35:33s, Time_Cost: 123s, Epoch: [85/100], Train_PSNR: 30.39, Val_PSNR: 26.37, Val_SSIM: 0.9595
86 | Date: 2022-07-21 20:37:36s, Time_Cost: 123s, Epoch: [86/100], Train_PSNR: 30.28, Val_PSNR: 26.08, Val_SSIM: 0.9587
87 | Date: 2022-07-21 20:39:39s, Time_Cost: 123s, Epoch: [87/100], Train_PSNR: 24.97, Val_PSNR: 28.48, Val_SSIM: 0.9597
88 | Date: 2022-07-21 20:41:42s, Time_Cost: 123s, Epoch: [88/100], Train_PSNR: 29.13, Val_PSNR: 25.90, Val_SSIM: 0.9576
89 | Date: 2022-07-21 20:43:45s, Time_Cost: 123s, Epoch: [89/100], Train_PSNR: 30.17, Val_PSNR: 26.07, Val_SSIM: 0.9585
90 | Date: 2022-07-21 20:45:47s, Time_Cost: 123s, Epoch: [90/100], Train_PSNR: 30.42, Val_PSNR: 26.14, Val_SSIM: 0.9565
91 | Date: 2022-07-21 20:47:50s, Time_Cost: 123s, Epoch: [91/100], Train_PSNR: 30.57, Val_PSNR: 26.16, Val_SSIM: 0.9580
92 | Date: 2022-07-21 20:49:53s, Time_Cost: 123s, Epoch: [92/100], Train_PSNR: 30.59, Val_PSNR: 26.20, Val_SSIM: 0.9589
93 | Date: 2022-07-21 20:51:55s, Time_Cost: 123s, Epoch: [93/100], Train_PSNR: 30.63, Val_PSNR: 26.96, Val_SSIM: 0.9562
94 | Date: 2022-07-21 20:53:58s, Time_Cost: 122s, Epoch: [94/100], Train_PSNR: 30.30, Val_PSNR: 26.52, Val_SSIM: 0.9587
95 | Date: 2022-07-21 20:56:01s, Time_Cost: 123s, Epoch: [95/100], Train_PSNR: 29.75, Val_PSNR: 26.81, Val_SSIM: 0.9589
96 | Date: 2022-07-21 20:58:01s, Time_Cost: 121s, Epoch: [96/100], Train_PSNR: 30.57, Val_PSNR: 26.00, Val_SSIM: 0.9576
97 | Date: 2022-07-21 21:00:01s, Time_Cost: 120s, Epoch: [97/100], Train_PSNR: 30.78, Val_PSNR: 26.23, Val_SSIM: 0.9588
98 | Date: 2022-07-21 21:02:00s, Time_Cost: 119s, Epoch: [98/100], Train_PSNR: 30.79, Val_PSNR: 26.03, Val_SSIM: 0.9577
99 | Date: 2022-07-21 21:04:00s, Time_Cost: 119s, Epoch: [99/100], Train_PSNR: 30.83, Val_PSNR: 26.78, Val_SSIM: 0.9564
100 | Date: 2022-07-21 21:05:59s, Time_Cost: 119s, Epoch: [100/100], Train_PSNR: 30.79, Val_PSNR: 27.07, Val_SSIM: 0.9613
101 |
--------------------------------------------------------------------------------
/data/train/raindroptesta.txt:
--------------------------------------------------------------------------------
1 | input/271_rain.png
2 | input/10_rain.png
3 | input/600_rain.png
4 | input/334_rain.png
5 | input/105_rain.png
6 | input/186_rain.png
7 | input/683_rain.png
8 | input/237_rain.png
9 | input/431_rain.png
10 | input/139_rain.png
11 | input/33_rain.png
12 | input/100_rain.png
13 | input/595_rain.png
14 | input/377_rain.png
15 | input/227_rain.png
16 | input/167_rain.png
17 | input/288_rain.png
18 | input/699_rain.png
19 | input/582_rain.png
20 | input/328_rain.png
21 | input/674_rain.png
22 | input/379_rain.png
23 | input/664_rain.png
24 | input/202_rain.png
25 | input/378_rain.png
26 | input/344_rain.png
27 | input/114_rain.png
28 | input/326_rain.png
29 | input/61_rain.png
30 | input/463_rain.png
31 | input/697_rain.png
32 | input/172_rain.png
33 | input/695_rain.png
34 | input/449_rain.png
35 | input/659_rain.png
36 | input/555_rain.png
37 | input/263_rain.png
38 | input/713_rain.png
39 | input/231_rain.png
40 | input/350_rain.png
41 | input/382_rain.png
42 | input/730_rain.png
43 | input/649_rain.png
44 | input/642_rain.png
45 | input/590_rain.png
46 | input/356_rain.png
47 | input/388_rain.png
48 | input/776_rain.png
49 | input/527_rain.png
50 | input/71_rain.png
51 | input/685_rain.png
52 | input/160_rain.png
53 | input/304_rain.png
54 | input/218_rain.png
55 | input/260_rain.png
56 | input/40_rain.png
57 | input/528_rain.png
58 | input/321_rain.png
59 | input/234_rain.png
60 | input/152_rain.png
61 | input/163_rain.png
62 | input/8_rain.png
63 | input/410_rain.png
64 | input/645_rain.png
65 | input/248_rain.png
66 | input/188_rain.png
67 | input/682_rain.png
68 | input/407_rain.png
69 | input/181_rain.png
70 | input/580_rain.png
71 | input/400_rain.png
72 | input/153_rain.png
73 | input/797_rain.png
74 | input/698_rain.png
75 | input/752_rain.png
76 | input/505_rain.png
77 | input/610_rain.png
78 | input/345_rain.png
79 | input/444_rain.png
80 | input/72_rain.png
81 | input/189_rain.png
82 | input/144_rain.png
83 | input/585_rain.png
84 | input/28_rain.png
85 | input/89_rain.png
86 | input/456_rain.png
87 | input/294_rain.png
88 | input/782_rain.png
89 | input/31_rain.png
90 | input/113_rain.png
91 | input/295_rain.png
92 | input/341_rain.png
93 | input/761_rain.png
94 | input/11_rain.png
95 | input/768_rain.png
96 | input/246_rain.png
97 | input/137_rain.png
98 | input/154_rain.png
99 | input/185_rain.png
100 | input/335_rain.png
101 | input/297_rain.png
102 | input/474_rain.png
103 | input/495_rain.png
104 | input/233_rain.png
105 | input/371_rain.png
106 | input/478_rain.png
107 | input/536_rain.png
108 | input/282_rain.png
109 | input/128_rain.png
110 | input/84_rain.png
111 | input/362_rain.png
112 | input/12_rain.png
113 | input/402_rain.png
114 | input/515_rain.png
115 | input/391_rain.png
116 | input/581_rain.png
117 | input/155_rain.png
118 | input/197_rain.png
119 | input/80_rain.png
120 | input/636_rain.png
121 | input/238_rain.png
122 | input/182_rain.png
123 | input/123_rain.png
124 | input/442_rain.png
125 | input/290_rain.png
126 | input/131_rain.png
127 | input/506_rain.png
128 | input/207_rain.png
129 | input/606_rain.png
130 | input/405_rain.png
131 | input/24_rain.png
132 | input/256_rain.png
133 | input/777_rain.png
134 | input/35_rain.png
135 | input/179_rain.png
136 | input/760_rain.png
137 | input/268_rain.png
138 | input/488_rain.png
139 | input/763_rain.png
140 | input/655_rain.png
141 | input/320_rain.png
142 | input/613_rain.png
143 | input/273_rain.png
144 | input/487_rain.png
145 | input/30_rain.png
146 | input/783_rain.png
147 | input/66_rain.png
148 | input/624_rain.png
149 | input/560_rain.png
150 | input/262_rain.png
151 | input/633_rain.png
152 | input/671_rain.png
153 | input/135_rain.png
154 | input/214_rain.png
155 | input/550_rain.png
156 | input/365_rain.png
157 | input/93_rain.png
158 | input/253_rain.png
159 | input/156_rain.png
160 | input/579_rain.png
161 | input/338_rain.png
162 | input/180_rain.png
163 | input/130_rain.png
164 | input/701_rain.png
165 | input/88_rain.png
166 | input/662_rain.png
167 | input/722_rain.png
168 | input/307_rain.png
169 | input/700_rain.png
170 | input/688_rain.png
171 | input/496_rain.png
172 | input/115_rain.png
173 | input/473_rain.png
174 | input/95_rain.png
175 | input/732_rain.png
176 | input/475_rain.png
177 | input/588_rain.png
178 | input/106_rain.png
179 | input/413_rain.png
180 | input/693_rain.png
181 | input/265_rain.png
182 | input/13_rain.png
183 | input/171_rain.png
184 | input/272_rain.png
185 | input/143_rain.png
186 | input/486_rain.png
187 | input/119_rain.png
188 | input/394_rain.png
189 | input/77_rain.png
190 | input/37_rain.png
191 | input/559_rain.png
192 | input/491_rain.png
193 | input/67_rain.png
194 | input/103_rain.png
195 | input/533_rain.png
196 | input/715_rain.png
197 | input/220_rain.png
198 | input/429_rain.png
199 | input/738_rain.png
200 | input/409_rain.png
201 | input/9_rain.png
202 | input/733_rain.png
203 | input/62_rain.png
204 | input/215_rain.png
205 | input/529_rain.png
206 | input/651_rain.png
207 | input/437_rain.png
208 | input/134_rain.png
209 | input/542_rain.png
210 | input/146_rain.png
211 | input/302_rain.png
212 | input/747_rain.png
213 | input/720_rain.png
214 | input/756_rain.png
215 | input/324_rain.png
216 | input/366_rain.png
217 | input/594_rain.png
218 | input/269_rain.png
219 | input/558_rain.png
220 | input/745_rain.png
221 | input/14_rain.png
222 | input/213_rain.png
223 | input/587_rain.png
224 | input/462_rain.png
225 | input/792_rain.png
226 | input/110_rain.png
227 | input/386_rain.png
228 | input/663_rain.png
229 | input/677_rain.png
230 | input/384_rain.png
231 | input/718_rain.png
232 | input/6_rain.png
233 | input/468_rain.png
234 | input/769_rain.png
235 | input/648_rain.png
236 | input/551_rain.png
237 | input/666_rain.png
238 | input/147_rain.png
239 | input/422_rain.png
240 | input/576_rain.png
241 | input/251_rain.png
242 | input/206_rain.png
243 | input/300_rain.png
244 | input/574_rain.png
245 | input/455_rain.png
246 | input/293_rain.png
247 | input/780_rain.png
248 | input/744_rain.png
249 | input/68_rain.png
250 | input/656_rain.png
251 | input/1_rain.png
252 | input/758_rain.png
253 | input/358_rain.png
254 | input/339_rain.png
255 | input/676_rain.png
256 | input/44_rain.png
257 | input/568_rain.png
258 | input/390_rain.png
259 | input/479_rain.png
260 | input/178_rain.png
261 | input/621_rain.png
262 | input/629_rain.png
263 | input/743_rain.png
264 | input/497_rain.png
265 | input/626_rain.png
266 | input/278_rain.png
267 | input/399_rain.png
268 | input/705_rain.png
269 | input/731_rain.png
270 | input/795_rain.png
271 | input/434_rain.png
272 | input/646_rain.png
273 | input/511_rain.png
274 | input/556_rain.png
275 | input/601_rain.png
276 | input/741_rain.png
277 | input/170_rain.png
278 | input/240_rain.png
279 | input/195_rain.png
280 | input/124_rain.png
281 | input/333_rain.png
282 | input/245_rain.png
283 | input/244_rain.png
284 | input/374_rain.png
285 | input/570_rain.png
286 | input/381_rain.png
287 | input/453_rain.png
288 | input/78_rain.png
289 | input/420_rain.png
290 | input/794_rain.png
291 | input/359_rain.png
292 | input/689_rain.png
293 | input/753_rain.png
294 | input/675_rain.png
295 | input/397_rain.png
296 | input/458_rain.png
297 | input/678_rain.png
298 | input/70_rain.png
299 | input/41_rain.png
300 | input/530_rain.png
301 | input/112_rain.png
302 | input/630_rain.png
303 | input/725_rain.png
304 | input/59_rain.png
305 | input/376_rain.png
306 | input/221_rain.png
307 | input/313_rain.png
308 | input/367_rain.png
309 | input/398_rain.png
310 | input/418_rain.png
311 | input/277_rain.png
312 | input/205_rain.png
313 | input/120_rain.png
314 | input/92_rain.png
315 | input/42_rain.png
316 | input/159_rain.png
317 | input/336_rain.png
318 | input/98_rain.png
319 | input/545_rain.png
320 | input/509_rain.png
321 | input/729_rain.png
322 | input/20_rain.png
323 | input/291_rain.png
324 | input/711_rain.png
325 | input/373_rain.png
326 | input/584_rain.png
327 | input/755_rain.png
328 | input/464_rain.png
329 | input/330_rain.png
330 | input/296_rain.png
331 | input/686_rain.png
332 | input/441_rain.png
333 | input/229_rain.png
334 | input/332_rain.png
335 | input/287_rain.png
336 | input/196_rain.png
337 | input/349_rain.png
338 | input/2_rain.png
339 | input/554_rain.png
340 | input/416_rain.png
341 | input/111_rain.png
342 | input/607_rain.png
343 | input/151_rain.png
344 | input/45_rain.png
345 | input/445_rain.png
346 | input/55_rain.png
347 | input/117_rain.png
348 | input/628_rain.png
349 | input/482_rain.png
350 | input/485_rain.png
351 | input/150_rain.png
352 | input/661_rain.png
353 | input/467_rain.png
354 | input/552_rain.png
355 | input/707_rain.png
356 | input/779_rain.png
357 | input/575_rain.png
358 | input/319_rain.png
359 | input/765_rain.png
360 | input/305_rain.png
361 | input/317_rain.png
362 | input/450_rain.png
363 | input/603_rain.png
364 | input/799_rain.png
365 | input/247_rain.png
366 | input/87_rain.png
367 | input/461_rain.png
368 | input/401_rain.png
369 | input/347_rain.png
370 | input/148_rain.png
371 | input/415_rain.png
372 | input/204_rain.png
373 | input/564_rain.png
374 | input/340_rain.png
375 | input/446_rain.png
376 | input/489_rain.png
377 | input/157_rain.png
378 | input/790_rain.png
379 | input/127_rain.png
380 | input/490_rain.png
381 | input/499_rain.png
382 | input/572_rain.png
383 | input/764_rain.png
384 | input/433_rain.png
385 | input/569_rain.png
386 | input/242_rain.png
387 | input/412_rain.png
388 | input/724_rain.png
389 | input/791_rain.png
390 | input/619_rain.png
391 | input/766_rain.png
392 | input/308_rain.png
393 | input/201_rain.png
394 | input/520_rain.png
395 | input/571_rain.png
396 | input/504_rain.png
397 | input/239_rain.png
398 | input/786_rain.png
399 | input/284_rain.png
400 | input/620_rain.png
401 | input/503_rain.png
402 | input/602_rain.png
403 | input/299_rain.png
404 | input/696_rain.png
405 | input/298_rain.png
406 | input/653_rain.png
407 | input/52_rain.png
408 | input/102_rain.png
409 | input/325_rain.png
410 | input/710_rain.png
411 | input/430_rain.png
412 | input/428_rain.png
413 | input/541_rain.png
414 | input/370_rain.png
415 | input/625_rain.png
416 | input/107_rain.png
417 | input/604_rain.png
418 | input/235_rain.png
419 | input/369_rain.png
420 | input/637_rain.png
421 | input/596_rain.png
422 | input/357_rain.png
423 | input/46_rain.png
424 | input/623_rain.png
425 | input/225_rain.png
426 | input/310_rain.png
427 | input/53_rain.png
428 | input/43_rain.png
429 | input/419_rain.png
430 | input/352_rain.png
431 | input/631_rain.png
432 | input/85_rain.png
433 | input/173_rain.png
434 | input/667_rain.png
435 | input/58_rain.png
436 | input/47_rain.png
437 | input/177_rain.png
438 | input/448_rain.png
439 | input/798_rain.png
440 | input/257_rain.png
441 | input/174_rain.png
442 | input/553_rain.png
443 | input/793_rain.png
444 | input/380_rain.png
445 | input/781_rain.png
446 | input/396_rain.png
447 | input/309_rain.png
448 | input/778_rain.png
449 | input/472_rain.png
450 | input/673_rain.png
451 | input/82_rain.png
452 | input/566_rain.png
453 | input/264_rain.png
454 | input/355_rain.png
455 | input/411_rain.png
456 | input/427_rain.png
457 | input/657_rain.png
458 | input/18_rain.png
459 | input/283_rain.png
460 | input/469_rain.png
461 | input/589_rain.png
462 | input/158_rain.png
463 | input/591_rain.png
464 | input/614_rain.png
465 | input/513_rain.png
466 | input/32_rain.png
467 | input/96_rain.png
468 | input/169_rain.png
469 | input/483_rain.png
470 | input/36_rain.png
471 | input/754_rain.png
472 | input/39_rain.png
473 | input/668_rain.png
474 | input/635_rain.png
475 | input/665_rain.png
476 | input/612_rain.png
477 | input/16_rain.png
478 | input/211_rain.png
479 | input/19_rain.png
480 | input/60_rain.png
481 | input/314_rain.png
482 | input/518_rain.png
483 | input/17_rain.png
484 | input/544_rain.png
485 | input/424_rain.png
486 | input/224_rain.png
487 | input/538_rain.png
488 | input/565_rain.png
489 | input/546_rain.png
490 | input/136_rain.png
491 | input/275_rain.png
492 | input/423_rain.png
493 | input/447_rain.png
494 | input/101_rain.png
495 | input/708_rain.png
496 | input/217_rain.png
497 | input/64_rain.png
498 | input/91_rain.png
499 | input/216_rain.png
500 | input/477_rain.png
501 | input/4_rain.png
502 | input/658_rain.png
503 | input/670_rain.png
504 | input/165_rain.png
505 | input/118_rain.png
506 | input/209_rain.png
507 | input/279_rain.png
508 | input/56_rain.png
509 | input/361_rain.png
510 | input/735_rain.png
511 | input/74_rain.png
512 | input/51_rain.png
513 | input/616_rain.png
514 | input/740_rain.png
515 | input/198_rain.png
516 | input/3_rain.png
517 | input/561_rain.png
518 | input/736_rain.png
519 | input/784_rain.png
520 | input/510_rain.png
521 | input/681_rain.png
522 | input/281_rain.png
523 | input/121_rain.png
524 | input/23_rain.png
525 | input/223_rain.png
526 | input/493_rain.png
527 | input/166_rain.png
528 | input/577_rain.png
529 | input/292_rain.png
530 | input/140_rain.png
531 | input/692_rain.png
532 | input/641_rain.png
533 | input/746_rain.png
534 | input/125_rain.png
535 | input/252_rain.png
536 | input/200_rain.png
537 | input/767_rain.png
538 | input/360_rain.png
539 | input/787_rain.png
540 | input/532_rain.png
541 | input/285_rain.png
542 | input/343_rain.png
543 | input/702_rain.png
544 | input/192_rain.png
545 | input/470_rain.png
546 | input/255_rain.png
547 | input/719_rain.png
548 | input/451_rain.png
549 | input/145_rain.png
550 | input/540_rain.png
551 | input/531_rain.png
552 | input/94_rain.png
553 | input/312_rain.png
554 | input/48_rain.png
555 | input/687_rain.png
556 | input/346_rain.png
557 | input/122_rain.png
558 | input/679_rain.png
559 | input/459_rain.png
560 | input/734_rain.png
561 | input/57_rain.png
562 | input/29_rain.png
563 | input/534_rain.png
564 | input/694_rain.png
565 | input/757_rain.png
566 | input/315_rain.png
567 | input/73_rain.png
568 | input/385_rain.png
569 | input/208_rain.png
570 | input/76_rain.png
571 | input/403_rain.png
572 | input/104_rain.png
573 | input/519_rain.png
574 | input/395_rain.png
575 | input/404_rain.png
576 | input/452_rain.png
577 | input/329_rain.png
578 | input/414_rain.png
579 | input/226_rain.png
580 | input/354_rain.png
581 | input/460_rain.png
582 | input/425_rain.png
583 | input/521_rain.png
584 | input/276_rain.png
585 | input/454_rain.png
586 | input/243_rain.png
587 | input/116_rain.png
588 | input/680_rain.png
589 | input/480_rain.png
590 | input/50_rain.png
591 | input/457_rain.png
592 | input/706_rain.png
593 | input/638_rain.png
594 | input/669_rain.png
595 | input/608_rain.png
596 | input/426_rain.png
597 | input/771_rain.png
598 | input/21_rain.png
599 | input/647_rain.png
600 | input/408_rain.png
601 | input/109_rain.png
602 | input/417_rain.png
603 | input/652_rain.png
604 | input/684_rain.png
605 | input/774_rain.png
606 | input/796_rain.png
607 | input/599_rain.png
608 | input/141_rain.png
609 | input/500_rain.png
610 | input/392_rain.png
611 | input/327_rain.png
612 | input/508_rain.png
613 | input/712_rain.png
614 | input/750_rain.png
615 | input/759_rain.png
616 | input/438_rain.png
617 | input/286_rain.png
618 | input/523_rain.png
619 | input/0_rain.png
620 | input/49_rain.png
621 | input/342_rain.png
622 | input/249_rain.png
623 | input/65_rain.png
624 | input/788_rain.png
625 | input/161_rain.png
626 | input/465_rain.png
627 | input/690_rain.png
628 | input/751_rain.png
629 | input/306_rain.png
630 | input/476_rain.png
631 | input/611_rain.png
632 | input/583_rain.png
633 | input/303_rain.png
634 | input/492_rain.png
635 | input/762_rain.png
636 | input/691_rain.png
637 | input/775_rain.png
638 | input/375_rain.png
639 | input/26_rain.png
640 | input/203_rain.png
641 | input/704_rain.png
642 | input/129_rain.png
643 | input/672_rain.png
644 | input/573_rain.png
645 | input/383_rain.png
646 | input/748_rain.png
647 | input/280_rain.png
648 | input/142_rain.png
649 | input/258_rain.png
650 | input/149_rain.png
651 | input/230_rain.png
652 | input/254_rain.png
653 | input/789_rain.png
654 | input/609_rain.png
655 | input/270_rain.png
656 | input/627_rain.png
657 | input/210_rain.png
658 | input/650_rain.png
659 | input/75_rain.png
660 | input/364_rain.png
661 | input/586_rain.png
662 | input/749_rain.png
663 | input/318_rain.png
664 | input/194_rain.png
665 | input/176_rain.png
666 | input/25_rain.png
667 | input/301_rain.png
668 | input/739_rain.png
669 | input/193_rain.png
670 | input/605_rain.png
671 | input/714_rain.png
672 | input/99_rain.png
673 | input/348_rain.png
674 | input/228_rain.png
675 | input/514_rain.png
676 | input/289_rain.png
677 | input/543_rain.png
678 | input/632_rain.png
679 | input/615_rain.png
680 | input/27_rain.png
681 | input/593_rain.png
682 | input/38_rain.png
683 | input/721_rain.png
684 | input/501_rain.png
685 | input/617_rain.png
686 | input/727_rain.png
687 | input/660_rain.png
688 | input/785_rain.png
689 | input/639_rain.png
690 | input/522_rain.png
691 | input/618_rain.png
692 | input/372_rain.png
693 | input/654_rain.png
694 | input/323_rain.png
695 | input/443_rain.png
696 | input/439_rain.png
697 | input/138_rain.png
698 | input/212_rain.png
699 | input/622_rain.png
700 | input/389_rain.png
701 | input/274_rain.png
702 | input/222_rain.png
703 | input/322_rain.png
704 | input/81_rain.png
705 | input/703_rain.png
706 | input/709_rain.png
707 | input/716_rain.png
708 | input/236_rain.png
709 | input/261_rain.png
710 | input/644_rain.png
711 | input/175_rain.png
712 | input/54_rain.png
713 | input/507_rain.png
714 | input/90_rain.png
715 | input/539_rain.png
716 | input/484_rain.png
717 | input/83_rain.png
718 | input/316_rain.png
719 | input/393_rain.png
720 | input/168_rain.png
721 | input/436_rain.png
722 | input/435_rain.png
723 | input/69_rain.png
724 | input/311_rain.png
725 | input/547_rain.png
726 | input/353_rain.png
727 | input/250_rain.png
728 | input/598_rain.png
729 | input/773_rain.png
730 | input/512_rain.png
731 | input/494_rain.png
732 | input/737_rain.png
733 | input/363_rain.png
734 | input/800_rain.png
735 | input/432_rain.png
736 | input/79_rain.png
737 | input/164_rain.png
738 | input/133_rain.png
739 | input/387_rain.png
740 | input/526_rain.png
741 | input/567_rain.png
742 | input/191_rain.png
743 | input/525_rain.png
744 | input/184_rain.png
745 | input/563_rain.png
746 | input/259_rain.png
747 | input/597_rain.png
748 | input/592_rain.png
749 | input/726_rain.png
750 | input/22_rain.png
751 | input/548_rain.png
752 | input/557_rain.png
753 | input/15_rain.png
754 | input/549_rain.png
755 | input/34_rain.png
756 | input/481_rain.png
757 | input/723_rain.png
758 | input/162_rain.png
759 | input/187_rain.png
760 | input/770_rain.png
761 | input/742_rain.png
762 | input/421_rain.png
763 | input/728_rain.png
764 |
--------------------------------------------------------------------------------
/training_log/1977amaro1_log.txt:
--------------------------------------------------------------------------------
1 | Date: 2022-07-13 13:35:32s, Time_Cost: 16s, Epoch: [1/100], Train_PSNR: 9.99, Val_PSNR: 21.73, Val_SSIM: 0.5208
2 | Date: 2022-07-13 13:35:32s, Time_Cost: 16s, Epoch: [1/100], Train_PSNR: 9.99, Val_PSNR: 21.06, Val_SSIM: 0.5067
3 | Date: 2022-07-13 13:35:48s, Time_Cost: 16s, Epoch: [2/100], Train_PSNR: 15.21, Val_PSNR: 24.13, Val_SSIM: 0.6656
4 | Date: 2022-07-13 13:35:48s, Time_Cost: 16s, Epoch: [2/100], Train_PSNR: 15.21, Val_PSNR: 22.89, Val_SSIM: 0.6462
5 | Date: 2022-07-13 13:36:05s, Time_Cost: 16s, Epoch: [3/100], Train_PSNR: 16.58, Val_PSNR: 25.41, Val_SSIM: 0.7468
6 | Date: 2022-07-13 13:36:05s, Time_Cost: 16s, Epoch: [3/100], Train_PSNR: 16.58, Val_PSNR: 24.27, Val_SSIM: 0.7329
7 | Date: 2022-07-13 13:36:22s, Time_Cost: 16s, Epoch: [4/100], Train_PSNR: 17.62, Val_PSNR: 25.82, Val_SSIM: 0.8043
8 | Date: 2022-07-13 13:36:22s, Time_Cost: 16s, Epoch: [4/100], Train_PSNR: 17.62, Val_PSNR: 25.14, Val_SSIM: 0.7913
9 | Date: 2022-07-13 13:36:39s, Time_Cost: 16s, Epoch: [5/100], Train_PSNR: 18.40, Val_PSNR: 24.34, Val_SSIM: 0.8314
10 | Date: 2022-07-13 13:36:39s, Time_Cost: 16s, Epoch: [5/100], Train_PSNR: 18.40, Val_PSNR: 25.26, Val_SSIM: 0.8201
11 | Date: 2022-07-13 13:36:56s, Time_Cost: 17s, Epoch: [6/100], Train_PSNR: 18.94, Val_PSNR: 26.08, Val_SSIM: 0.8582
12 | Date: 2022-07-13 13:36:56s, Time_Cost: 17s, Epoch: [6/100], Train_PSNR: 18.94, Val_PSNR: 26.32, Val_SSIM: 0.8422
13 | Date: 2022-07-13 13:37:13s, Time_Cost: 17s, Epoch: [7/100], Train_PSNR: 19.44, Val_PSNR: 26.61, Val_SSIM: 0.8687
14 | Date: 2022-07-13 13:37:13s, Time_Cost: 17s, Epoch: [7/100], Train_PSNR: 19.44, Val_PSNR: 26.71, Val_SSIM: 0.8575
15 | Date: 2022-07-13 13:37:31s, Time_Cost: 17s, Epoch: [8/100], Train_PSNR: 19.86, Val_PSNR: 26.89, Val_SSIM: 0.8739
16 | Date: 2022-07-13 13:37:31s, Time_Cost: 17s, Epoch: [8/100], Train_PSNR: 19.86, Val_PSNR: 27.33, Val_SSIM: 0.8684
17 | Date: 2022-07-13 13:37:48s, Time_Cost: 17s, Epoch: [9/100], Train_PSNR: 20.19, Val_PSNR: 27.54, Val_SSIM: 0.8816
18 | Date: 2022-07-13 13:37:48s, Time_Cost: 17s, Epoch: [9/100], Train_PSNR: 20.19, Val_PSNR: 27.50, Val_SSIM: 0.8773
19 | Date: 2022-07-13 13:38:06s, Time_Cost: 17s, Epoch: [10/100], Train_PSNR: 20.65, Val_PSNR: 24.76, Val_SSIM: 0.8944
20 | Date: 2022-07-13 13:38:06s, Time_Cost: 17s, Epoch: [10/100], Train_PSNR: 20.65, Val_PSNR: 28.16, Val_SSIM: 0.8895
21 | Date: 2022-07-13 13:38:24s, Time_Cost: 17s, Epoch: [11/100], Train_PSNR: 21.13, Val_PSNR: 27.52, Val_SSIM: 0.9002
22 | Date: 2022-07-13 13:38:24s, Time_Cost: 17s, Epoch: [11/100], Train_PSNR: 21.13, Val_PSNR: 28.98, Val_SSIM: 0.9030
23 | Date: 2022-07-13 13:38:41s, Time_Cost: 17s, Epoch: [12/100], Train_PSNR: 21.60, Val_PSNR: 26.20, Val_SSIM: 0.9032
24 | Date: 2022-07-13 13:38:41s, Time_Cost: 17s, Epoch: [12/100], Train_PSNR: 21.60, Val_PSNR: 29.23, Val_SSIM: 0.9087
25 | Date: 2022-07-13 13:38:59s, Time_Cost: 17s, Epoch: [13/100], Train_PSNR: 21.96, Val_PSNR: 28.36, Val_SSIM: 0.9004
26 | Date: 2022-07-13 13:38:59s, Time_Cost: 17s, Epoch: [13/100], Train_PSNR: 21.96, Val_PSNR: 29.47, Val_SSIM: 0.9104
27 | Date: 2022-07-13 13:39:17s, Time_Cost: 17s, Epoch: [14/100], Train_PSNR: 22.21, Val_PSNR: 27.97, Val_SSIM: 0.9172
28 | Date: 2022-07-13 13:39:17s, Time_Cost: 17s, Epoch: [14/100], Train_PSNR: 22.21, Val_PSNR: 29.21, Val_SSIM: 0.9208
29 | Date: 2022-07-13 13:39:34s, Time_Cost: 17s, Epoch: [15/100], Train_PSNR: 22.64, Val_PSNR: 28.10, Val_SSIM: 0.9221
30 | Date: 2022-07-13 13:39:34s, Time_Cost: 17s, Epoch: [15/100], Train_PSNR: 22.64, Val_PSNR: 30.44, Val_SSIM: 0.9279
31 | Date: 2022-07-13 13:39:52s, Time_Cost: 17s, Epoch: [16/100], Train_PSNR: 22.97, Val_PSNR: 25.79, Val_SSIM: 0.9259
32 | Date: 2022-07-13 13:39:52s, Time_Cost: 17s, Epoch: [16/100], Train_PSNR: 22.97, Val_PSNR: 30.59, Val_SSIM: 0.9313
33 | Date: 2022-07-13 13:40:09s, Time_Cost: 17s, Epoch: [17/100], Train_PSNR: 23.23, Val_PSNR: 27.14, Val_SSIM: 0.9290
34 | Date: 2022-07-13 13:40:09s, Time_Cost: 17s, Epoch: [17/100], Train_PSNR: 23.23, Val_PSNR: 30.93, Val_SSIM: 0.9364
35 | Date: 2022-07-13 13:40:27s, Time_Cost: 17s, Epoch: [18/100], Train_PSNR: 23.41, Val_PSNR: 27.01, Val_SSIM: 0.9291
36 | Date: 2022-07-13 13:40:27s, Time_Cost: 17s, Epoch: [18/100], Train_PSNR: 23.41, Val_PSNR: 31.24, Val_SSIM: 0.9399
37 | Date: 2022-07-13 13:40:45s, Time_Cost: 17s, Epoch: [19/100], Train_PSNR: 23.51, Val_PSNR: 27.87, Val_SSIM: 0.9331
38 | Date: 2022-07-13 13:40:45s, Time_Cost: 17s, Epoch: [19/100], Train_PSNR: 23.51, Val_PSNR: 30.36, Val_SSIM: 0.9401
39 | Date: 2022-07-13 13:41:02s, Time_Cost: 17s, Epoch: [20/100], Train_PSNR: 23.71, Val_PSNR: 27.61, Val_SSIM: 0.9360
40 | Date: 2022-07-13 13:41:02s, Time_Cost: 17s, Epoch: [20/100], Train_PSNR: 23.71, Val_PSNR: 31.85, Val_SSIM: 0.9456
41 | Date: 2022-07-13 13:41:20s, Time_Cost: 17s, Epoch: [21/100], Train_PSNR: 23.85, Val_PSNR: 27.70, Val_SSIM: 0.9378
42 | Date: 2022-07-13 13:41:20s, Time_Cost: 17s, Epoch: [21/100], Train_PSNR: 23.85, Val_PSNR: 31.89, Val_SSIM: 0.9475
43 | Date: 2022-07-13 13:41:37s, Time_Cost: 17s, Epoch: [22/100], Train_PSNR: 23.89, Val_PSNR: 28.49, Val_SSIM: 0.9384
44 | Date: 2022-07-13 13:41:37s, Time_Cost: 17s, Epoch: [22/100], Train_PSNR: 23.89, Val_PSNR: 31.97, Val_SSIM: 0.9484
45 | Date: 2022-07-13 13:41:55s, Time_Cost: 17s, Epoch: [23/100], Train_PSNR: 24.16, Val_PSNR: 29.42, Val_SSIM: 0.9407
46 | Date: 2022-07-13 13:41:55s, Time_Cost: 17s, Epoch: [23/100], Train_PSNR: 24.16, Val_PSNR: 32.08, Val_SSIM: 0.9515
47 | Date: 2022-07-13 13:42:13s, Time_Cost: 17s, Epoch: [24/100], Train_PSNR: 24.28, Val_PSNR: 27.50, Val_SSIM: 0.9417
48 | Date: 2022-07-13 13:42:13s, Time_Cost: 17s, Epoch: [24/100], Train_PSNR: 24.28, Val_PSNR: 32.20, Val_SSIM: 0.9515
49 | Date: 2022-07-13 13:42:30s, Time_Cost: 17s, Epoch: [25/100], Train_PSNR: 24.12, Val_PSNR: 27.24, Val_SSIM: 0.9390
50 | Date: 2022-07-13 13:42:30s, Time_Cost: 17s, Epoch: [25/100], Train_PSNR: 24.12, Val_PSNR: 32.72, Val_SSIM: 0.9536
51 | Date: 2022-07-13 13:42:48s, Time_Cost: 17s, Epoch: [26/100], Train_PSNR: 24.41, Val_PSNR: 27.67, Val_SSIM: 0.9438
52 | Date: 2022-07-13 13:42:48s, Time_Cost: 17s, Epoch: [26/100], Train_PSNR: 24.41, Val_PSNR: 32.84, Val_SSIM: 0.9556
53 | Date: 2022-07-13 13:43:05s, Time_Cost: 17s, Epoch: [27/100], Train_PSNR: 24.60, Val_PSNR: 28.86, Val_SSIM: 0.9431
54 | Date: 2022-07-13 13:43:05s, Time_Cost: 17s, Epoch: [27/100], Train_PSNR: 24.60, Val_PSNR: 32.39, Val_SSIM: 0.9572
55 | Date: 2022-07-13 13:43:23s, Time_Cost: 17s, Epoch: [28/100], Train_PSNR: 24.69, Val_PSNR: 27.77, Val_SSIM: 0.9439
56 | Date: 2022-07-13 13:43:23s, Time_Cost: 17s, Epoch: [28/100], Train_PSNR: 24.69, Val_PSNR: 33.19, Val_SSIM: 0.9581
57 | Date: 2022-07-13 13:43:40s, Time_Cost: 17s, Epoch: [29/100], Train_PSNR: 24.61, Val_PSNR: 28.08, Val_SSIM: 0.9451
58 | Date: 2022-07-13 13:43:40s, Time_Cost: 17s, Epoch: [29/100], Train_PSNR: 24.61, Val_PSNR: 32.79, Val_SSIM: 0.9579
59 | Date: 2022-07-13 13:43:58s, Time_Cost: 17s, Epoch: [30/100], Train_PSNR: 24.88, Val_PSNR: 27.86, Val_SSIM: 0.9460
60 | Date: 2022-07-13 13:43:58s, Time_Cost: 17s, Epoch: [30/100], Train_PSNR: 24.88, Val_PSNR: 33.24, Val_SSIM: 0.9589
61 | Date: 2022-07-13 13:44:15s, Time_Cost: 17s, Epoch: [31/100], Train_PSNR: 24.96, Val_PSNR: 27.81, Val_SSIM: 0.9455
62 | Date: 2022-07-13 13:44:15s, Time_Cost: 17s, Epoch: [31/100], Train_PSNR: 24.96, Val_PSNR: 33.08, Val_SSIM: 0.9599
63 | Date: 2022-07-13 13:44:32s, Time_Cost: 17s, Epoch: [32/100], Train_PSNR: 25.09, Val_PSNR: 27.81, Val_SSIM: 0.9459
64 | Date: 2022-07-13 13:44:32s, Time_Cost: 17s, Epoch: [32/100], Train_PSNR: 25.09, Val_PSNR: 33.40, Val_SSIM: 0.9608
65 | Date: 2022-07-13 13:44:50s, Time_Cost: 17s, Epoch: [33/100], Train_PSNR: 25.10, Val_PSNR: 29.09, Val_SSIM: 0.9475
66 | Date: 2022-07-13 13:44:50s, Time_Cost: 17s, Epoch: [33/100], Train_PSNR: 25.10, Val_PSNR: 33.04, Val_SSIM: 0.9607
67 | Date: 2022-07-13 13:45:07s, Time_Cost: 17s, Epoch: [34/100], Train_PSNR: 25.11, Val_PSNR: 27.83, Val_SSIM: 0.9460
68 | Date: 2022-07-13 13:45:07s, Time_Cost: 17s, Epoch: [34/100], Train_PSNR: 25.11, Val_PSNR: 33.40, Val_SSIM: 0.9615
69 | Date: 2022-07-13 13:45:25s, Time_Cost: 17s, Epoch: [35/100], Train_PSNR: 24.93, Val_PSNR: 27.75, Val_SSIM: 0.9453
70 | Date: 2022-07-13 13:45:25s, Time_Cost: 17s, Epoch: [35/100], Train_PSNR: 24.93, Val_PSNR: 33.05, Val_SSIM: 0.9589
71 | Date: 2022-07-13 13:45:42s, Time_Cost: 17s, Epoch: [36/100], Train_PSNR: 25.16, Val_PSNR: 27.98, Val_SSIM: 0.9474
72 | Date: 2022-07-13 13:45:42s, Time_Cost: 17s, Epoch: [36/100], Train_PSNR: 25.16, Val_PSNR: 33.49, Val_SSIM: 0.9617
73 | Date: 2022-07-13 13:45:59s, Time_Cost: 17s, Epoch: [37/100], Train_PSNR: 25.24, Val_PSNR: 28.80, Val_SSIM: 0.9470
74 | Date: 2022-07-13 13:45:59s, Time_Cost: 17s, Epoch: [37/100], Train_PSNR: 25.24, Val_PSNR: 32.83, Val_SSIM: 0.9626
75 | Date: 2022-07-13 13:46:17s, Time_Cost: 17s, Epoch: [38/100], Train_PSNR: 25.30, Val_PSNR: 27.99, Val_SSIM: 0.9475
76 | Date: 2022-07-13 13:46:17s, Time_Cost: 17s, Epoch: [38/100], Train_PSNR: 25.30, Val_PSNR: 33.83, Val_SSIM: 0.9628
77 | Date: 2022-07-13 13:46:34s, Time_Cost: 17s, Epoch: [39/100], Train_PSNR: 25.50, Val_PSNR: 28.57, Val_SSIM: 0.9479
78 | Date: 2022-07-13 13:46:34s, Time_Cost: 17s, Epoch: [39/100], Train_PSNR: 25.50, Val_PSNR: 33.91, Val_SSIM: 0.9639
79 | Date: 2022-07-13 13:46:52s, Time_Cost: 17s, Epoch: [40/100], Train_PSNR: 25.53, Val_PSNR: 28.14, Val_SSIM: 0.9488
80 | Date: 2022-07-13 13:46:52s, Time_Cost: 17s, Epoch: [40/100], Train_PSNR: 25.53, Val_PSNR: 33.69, Val_SSIM: 0.9633
81 | Date: 2022-07-13 13:47:09s, Time_Cost: 17s, Epoch: [41/100], Train_PSNR: 25.69, Val_PSNR: 29.86, Val_SSIM: 0.9474
82 | Date: 2022-07-13 13:47:09s, Time_Cost: 17s, Epoch: [41/100], Train_PSNR: 25.69, Val_PSNR: 33.22, Val_SSIM: 0.9638
83 | Date: 2022-07-13 13:47:26s, Time_Cost: 17s, Epoch: [42/100], Train_PSNR: 25.71, Val_PSNR: 28.44, Val_SSIM: 0.9481
84 | Date: 2022-07-13 13:47:26s, Time_Cost: 17s, Epoch: [42/100], Train_PSNR: 25.71, Val_PSNR: 33.76, Val_SSIM: 0.9639
85 | Date: 2022-07-13 13:47:44s, Time_Cost: 17s, Epoch: [43/100], Train_PSNR: 25.66, Val_PSNR: 28.66, Val_SSIM: 0.9488
86 | Date: 2022-07-13 13:47:44s, Time_Cost: 17s, Epoch: [43/100], Train_PSNR: 25.66, Val_PSNR: 34.16, Val_SSIM: 0.9648
87 | Date: 2022-07-13 13:48:01s, Time_Cost: 17s, Epoch: [44/100], Train_PSNR: 25.85, Val_PSNR: 29.23, Val_SSIM: 0.9485
88 | Date: 2022-07-13 13:48:01s, Time_Cost: 17s, Epoch: [44/100], Train_PSNR: 25.85, Val_PSNR: 33.61, Val_SSIM: 0.9634
89 | Date: 2022-07-13 13:48:18s, Time_Cost: 17s, Epoch: [45/100], Train_PSNR: 25.92, Val_PSNR: 30.42, Val_SSIM: 0.9490
90 | Date: 2022-07-13 13:48:18s, Time_Cost: 17s, Epoch: [45/100], Train_PSNR: 25.92, Val_PSNR: 34.01, Val_SSIM: 0.9645
91 | Date: 2022-07-13 13:48:36s, Time_Cost: 17s, Epoch: [46/100], Train_PSNR: 26.00, Val_PSNR: 28.74, Val_SSIM: 0.9483
92 | Date: 2022-07-13 13:48:36s, Time_Cost: 17s, Epoch: [46/100], Train_PSNR: 26.00, Val_PSNR: 34.32, Val_SSIM: 0.9652
93 | Date: 2022-07-13 13:48:53s, Time_Cost: 17s, Epoch: [47/100], Train_PSNR: 26.09, Val_PSNR: 28.44, Val_SSIM: 0.9471
94 | Date: 2022-07-13 13:48:53s, Time_Cost: 17s, Epoch: [47/100], Train_PSNR: 26.09, Val_PSNR: 34.01, Val_SSIM: 0.9644
95 | Date: 2022-07-13 13:49:11s, Time_Cost: 17s, Epoch: [48/100], Train_PSNR: 26.08, Val_PSNR: 29.83, Val_SSIM: 0.9484
96 | Date: 2022-07-13 13:49:11s, Time_Cost: 17s, Epoch: [48/100], Train_PSNR: 26.08, Val_PSNR: 34.37, Val_SSIM: 0.9650
97 | Date: 2022-07-13 13:49:28s, Time_Cost: 17s, Epoch: [49/100], Train_PSNR: 26.14, Val_PSNR: 29.84, Val_SSIM: 0.9468
98 | Date: 2022-07-13 13:49:28s, Time_Cost: 17s, Epoch: [49/100], Train_PSNR: 26.14, Val_PSNR: 33.45, Val_SSIM: 0.9643
99 | Date: 2022-07-13 13:49:45s, Time_Cost: 17s, Epoch: [50/100], Train_PSNR: 26.19, Val_PSNR: 30.38, Val_SSIM: 0.9485
100 | Date: 2022-07-13 13:49:45s, Time_Cost: 17s, Epoch: [50/100], Train_PSNR: 26.19, Val_PSNR: 33.99, Val_SSIM: 0.9640
101 | Date: 2022-07-13 13:50:03s, Time_Cost: 17s, Epoch: [51/100], Train_PSNR: 26.23, Val_PSNR: 29.15, Val_SSIM: 0.9475
102 | Date: 2022-07-13 13:50:03s, Time_Cost: 17s, Epoch: [51/100], Train_PSNR: 26.23, Val_PSNR: 34.09, Val_SSIM: 0.9632
103 | Date: 2022-07-13 13:50:20s, Time_Cost: 17s, Epoch: [52/100], Train_PSNR: 26.34, Val_PSNR: 30.19, Val_SSIM: 0.9476
104 | Date: 2022-07-13 13:50:20s, Time_Cost: 17s, Epoch: [52/100], Train_PSNR: 26.34, Val_PSNR: 34.13, Val_SSIM: 0.9649
105 | Date: 2022-07-13 13:50:37s, Time_Cost: 17s, Epoch: [53/100], Train_PSNR: 26.39, Val_PSNR: 30.80, Val_SSIM: 0.9460
106 | Date: 2022-07-13 13:50:37s, Time_Cost: 17s, Epoch: [53/100], Train_PSNR: 26.39, Val_PSNR: 33.43, Val_SSIM: 0.9633
107 | Date: 2022-07-13 13:50:55s, Time_Cost: 17s, Epoch: [54/100], Train_PSNR: 26.47, Val_PSNR: 30.35, Val_SSIM: 0.9478
108 | Date: 2022-07-13 13:50:55s, Time_Cost: 17s, Epoch: [54/100], Train_PSNR: 26.47, Val_PSNR: 34.30, Val_SSIM: 0.9644
109 | Date: 2022-07-13 13:51:12s, Time_Cost: 17s, Epoch: [55/100], Train_PSNR: 26.53, Val_PSNR: 29.64, Val_SSIM: 0.9470
110 | Date: 2022-07-13 13:51:12s, Time_Cost: 17s, Epoch: [55/100], Train_PSNR: 26.53, Val_PSNR: 34.27, Val_SSIM: 0.9643
111 | Date: 2022-07-13 13:51:29s, Time_Cost: 17s, Epoch: [56/100], Train_PSNR: 26.58, Val_PSNR: 29.81, Val_SSIM: 0.9477
112 | Date: 2022-07-13 13:51:29s, Time_Cost: 17s, Epoch: [56/100], Train_PSNR: 26.58, Val_PSNR: 34.35, Val_SSIM: 0.9648
113 | Date: 2022-07-13 13:51:47s, Time_Cost: 17s, Epoch: [57/100], Train_PSNR: 26.60, Val_PSNR: 30.26, Val_SSIM: 0.9467
114 | Date: 2022-07-13 13:51:47s, Time_Cost: 17s, Epoch: [57/100], Train_PSNR: 26.60, Val_PSNR: 34.46, Val_SSIM: 0.9642
115 | Date: 2022-07-13 13:52:04s, Time_Cost: 17s, Epoch: [58/100], Train_PSNR: 26.64, Val_PSNR: 30.75, Val_SSIM: 0.9467
116 | Date: 2022-07-13 13:52:04s, Time_Cost: 17s, Epoch: [58/100], Train_PSNR: 26.64, Val_PSNR: 34.28, Val_SSIM: 0.9641
117 | Date: 2022-07-13 13:52:21s, Time_Cost: 17s, Epoch: [59/100], Train_PSNR: 26.73, Val_PSNR: 30.29, Val_SSIM: 0.9471
118 | Date: 2022-07-13 13:52:21s, Time_Cost: 17s, Epoch: [59/100], Train_PSNR: 26.73, Val_PSNR: 34.49, Val_SSIM: 0.9645
119 | Date: 2022-07-13 13:52:39s, Time_Cost: 17s, Epoch: [60/100], Train_PSNR: 26.79, Val_PSNR: 30.69, Val_SSIM: 0.9466
120 | Date: 2022-07-13 13:52:39s, Time_Cost: 17s, Epoch: [60/100], Train_PSNR: 26.79, Val_PSNR: 34.32, Val_SSIM: 0.9642
121 | Date: 2022-07-13 13:52:56s, Time_Cost: 17s, Epoch: [61/100], Train_PSNR: 26.78, Val_PSNR: 29.65, Val_SSIM: 0.9466
122 | Date: 2022-07-13 13:52:56s, Time_Cost: 17s, Epoch: [61/100], Train_PSNR: 26.78, Val_PSNR: 34.35, Val_SSIM: 0.9640
123 | Date: 2022-07-13 13:53:13s, Time_Cost: 17s, Epoch: [62/100], Train_PSNR: 26.88, Val_PSNR: 29.24, Val_SSIM: 0.9469
124 | Date: 2022-07-13 13:53:13s, Time_Cost: 17s, Epoch: [62/100], Train_PSNR: 26.88, Val_PSNR: 34.27, Val_SSIM: 0.9643
125 | Date: 2022-07-13 13:53:31s, Time_Cost: 17s, Epoch: [63/100], Train_PSNR: 26.58, Val_PSNR: 26.79, Val_SSIM: 0.8978
126 | Date: 2022-07-13 13:53:31s, Time_Cost: 17s, Epoch: [63/100], Train_PSNR: 26.58, Val_PSNR: 29.16, Val_SSIM: 0.9397
127 | Date: 2022-07-13 13:53:48s, Time_Cost: 17s, Epoch: [64/100], Train_PSNR: 18.30, Val_PSNR: 28.62, Val_SSIM: 0.9112
128 | Date: 2022-07-13 13:53:48s, Time_Cost: 17s, Epoch: [64/100], Train_PSNR: 18.30, Val_PSNR: 27.65, Val_SSIM: 0.9161
129 | Date: 2022-07-13 13:54:05s, Time_Cost: 17s, Epoch: [65/100], Train_PSNR: 22.50, Val_PSNR: 25.87, Val_SSIM: 0.9349
130 | Date: 2022-07-13 13:54:05s, Time_Cost: 17s, Epoch: [65/100], Train_PSNR: 22.50, Val_PSNR: 29.48, Val_SSIM: 0.9507
131 | Date: 2022-07-13 13:54:22s, Time_Cost: 17s, Epoch: [66/100], Train_PSNR: 23.30, Val_PSNR: 25.93, Val_SSIM: 0.9412
132 | Date: 2022-07-13 13:54:22s, Time_Cost: 17s, Epoch: [66/100], Train_PSNR: 23.30, Val_PSNR: 29.45, Val_SSIM: 0.9553
133 | Date: 2022-07-13 13:54:40s, Time_Cost: 17s, Epoch: [67/100], Train_PSNR: 23.71, Val_PSNR: 27.31, Val_SSIM: 0.9465
134 | Date: 2022-07-13 13:54:40s, Time_Cost: 17s, Epoch: [67/100], Train_PSNR: 23.71, Val_PSNR: 29.77, Val_SSIM: 0.9558
135 | Date: 2022-07-13 13:54:57s, Time_Cost: 17s, Epoch: [68/100], Train_PSNR: 23.92, Val_PSNR: 28.28, Val_SSIM: 0.9468
136 | Date: 2022-07-13 13:54:57s, Time_Cost: 17s, Epoch: [68/100], Train_PSNR: 23.92, Val_PSNR: 29.33, Val_SSIM: 0.9557
137 | Date: 2022-07-13 13:55:14s, Time_Cost: 17s, Epoch: [69/100], Train_PSNR: 23.97, Val_PSNR: 27.10, Val_SSIM: 0.9466
138 | Date: 2022-07-13 13:55:14s, Time_Cost: 17s, Epoch: [69/100], Train_PSNR: 23.97, Val_PSNR: 29.67, Val_SSIM: 0.9595
139 | Date: 2022-07-13 13:55:31s, Time_Cost: 17s, Epoch: [70/100], Train_PSNR: 24.13, Val_PSNR: 27.65, Val_SSIM: 0.9465
140 | Date: 2022-07-13 13:55:31s, Time_Cost: 17s, Epoch: [70/100], Train_PSNR: 24.13, Val_PSNR: 30.72, Val_SSIM: 0.9612
141 | Date: 2022-07-13 13:55:49s, Time_Cost: 17s, Epoch: [71/100], Train_PSNR: 24.24, Val_PSNR: 27.81, Val_SSIM: 0.9472
142 | Date: 2022-07-13 13:55:49s, Time_Cost: 17s, Epoch: [71/100], Train_PSNR: 24.24, Val_PSNR: 30.61, Val_SSIM: 0.9617
143 | Date: 2022-07-13 13:56:06s, Time_Cost: 17s, Epoch: [72/100], Train_PSNR: 24.42, Val_PSNR: 26.69, Val_SSIM: 0.9491
144 | Date: 2022-07-13 13:56:06s, Time_Cost: 17s, Epoch: [72/100], Train_PSNR: 24.42, Val_PSNR: 30.32, Val_SSIM: 0.9591
145 | Date: 2022-07-13 13:56:23s, Time_Cost: 17s, Epoch: [73/100], Train_PSNR: 24.68, Val_PSNR: 28.00, Val_SSIM: 0.9500
146 | Date: 2022-07-13 13:56:23s, Time_Cost: 17s, Epoch: [73/100], Train_PSNR: 24.68, Val_PSNR: 31.69, Val_SSIM: 0.9628
147 | Date: 2022-07-13 13:56:40s, Time_Cost: 17s, Epoch: [74/100], Train_PSNR: 25.05, Val_PSNR: 29.75, Val_SSIM: 0.9507
148 | Date: 2022-07-13 13:56:40s, Time_Cost: 17s, Epoch: [74/100], Train_PSNR: 25.05, Val_PSNR: 31.91, Val_SSIM: 0.9631
149 | Date: 2022-07-13 13:56:58s, Time_Cost: 17s, Epoch: [75/100], Train_PSNR: 25.28, Val_PSNR: 28.40, Val_SSIM: 0.9520
150 | Date: 2022-07-13 13:56:58s, Time_Cost: 17s, Epoch: [75/100], Train_PSNR: 25.28, Val_PSNR: 32.22, Val_SSIM: 0.9642
151 | Date: 2022-07-13 13:57:15s, Time_Cost: 17s, Epoch: [76/100], Train_PSNR: 25.44, Val_PSNR: 27.70, Val_SSIM: 0.9533
152 | Date: 2022-07-13 13:57:15s, Time_Cost: 17s, Epoch: [76/100], Train_PSNR: 25.44, Val_PSNR: 33.04, Val_SSIM: 0.9660
153 | Date: 2022-07-13 13:57:32s, Time_Cost: 17s, Epoch: [77/100], Train_PSNR: 25.50, Val_PSNR: 29.05, Val_SSIM: 0.9536
154 | Date: 2022-07-13 13:57:32s, Time_Cost: 17s, Epoch: [77/100], Train_PSNR: 25.50, Val_PSNR: 32.03, Val_SSIM: 0.9638
155 | Date: 2022-07-13 13:57:49s, Time_Cost: 17s, Epoch: [78/100], Train_PSNR: 25.67, Val_PSNR: 27.72, Val_SSIM: 0.9543
156 | Date: 2022-07-13 13:57:49s, Time_Cost: 17s, Epoch: [78/100], Train_PSNR: 25.67, Val_PSNR: 32.82, Val_SSIM: 0.9645
157 | Date: 2022-07-13 13:58:06s, Time_Cost: 17s, Epoch: [79/100], Train_PSNR: 25.76, Val_PSNR: 27.87, Val_SSIM: 0.9545
158 | Date: 2022-07-13 13:58:06s, Time_Cost: 17s, Epoch: [79/100], Train_PSNR: 25.76, Val_PSNR: 32.65, Val_SSIM: 0.9640
159 | Date: 2022-07-13 13:58:24s, Time_Cost: 17s, Epoch: [80/100], Train_PSNR: 25.89, Val_PSNR: 27.99, Val_SSIM: 0.9522
160 | Date: 2022-07-13 13:58:24s, Time_Cost: 17s, Epoch: [80/100], Train_PSNR: 25.89, Val_PSNR: 33.93, Val_SSIM: 0.9675
161 | Date: 2022-07-13 13:58:41s, Time_Cost: 17s, Epoch: [81/100], Train_PSNR: 25.89, Val_PSNR: 27.55, Val_SSIM: 0.9523
162 | Date: 2022-07-13 13:58:41s, Time_Cost: 17s, Epoch: [81/100], Train_PSNR: 25.89, Val_PSNR: 34.14, Val_SSIM: 0.9680
163 | Date: 2022-07-13 13:58:58s, Time_Cost: 17s, Epoch: [82/100], Train_PSNR: 25.75, Val_PSNR: 28.45, Val_SSIM: 0.9553
164 | Date: 2022-07-13 13:58:58s, Time_Cost: 17s, Epoch: [82/100], Train_PSNR: 25.75, Val_PSNR: 33.04, Val_SSIM: 0.9673
165 | Date: 2022-07-13 13:59:15s, Time_Cost: 17s, Epoch: [83/100], Train_PSNR: 25.96, Val_PSNR: 28.51, Val_SSIM: 0.9551
166 | Date: 2022-07-13 13:59:15s, Time_Cost: 17s, Epoch: [83/100], Train_PSNR: 25.96, Val_PSNR: 34.01, Val_SSIM: 0.9680
167 | Date: 2022-07-13 13:59:33s, Time_Cost: 17s, Epoch: [84/100], Train_PSNR: 26.08, Val_PSNR: 27.69, Val_SSIM: 0.9529
168 | Date: 2022-07-13 13:59:33s, Time_Cost: 17s, Epoch: [84/100], Train_PSNR: 26.08, Val_PSNR: 34.15, Val_SSIM: 0.9682
169 | Date: 2022-07-13 13:59:50s, Time_Cost: 17s, Epoch: [85/100], Train_PSNR: 26.08, Val_PSNR: 28.52, Val_SSIM: 0.9555
170 | Date: 2022-07-13 13:59:50s, Time_Cost: 17s, Epoch: [85/100], Train_PSNR: 26.08, Val_PSNR: 33.77, Val_SSIM: 0.9684
171 | Date: 2022-07-13 14:00:07s, Time_Cost: 17s, Epoch: [86/100], Train_PSNR: 26.23, Val_PSNR: 28.54, Val_SSIM: 0.9532
172 | Date: 2022-07-13 14:00:07s, Time_Cost: 17s, Epoch: [86/100], Train_PSNR: 26.23, Val_PSNR: 33.70, Val_SSIM: 0.9670
173 | Date: 2022-07-13 14:00:24s, Time_Cost: 17s, Epoch: [87/100], Train_PSNR: 26.24, Val_PSNR: 29.25, Val_SSIM: 0.9565
174 | Date: 2022-07-13 14:00:24s, Time_Cost: 17s, Epoch: [87/100], Train_PSNR: 26.24, Val_PSNR: 34.19, Val_SSIM: 0.9695
175 | Date: 2022-07-13 14:00:42s, Time_Cost: 17s, Epoch: [88/100], Train_PSNR: 26.27, Val_PSNR: 28.32, Val_SSIM: 0.9536
176 | Date: 2022-07-13 14:00:42s, Time_Cost: 17s, Epoch: [88/100], Train_PSNR: 26.27, Val_PSNR: 34.57, Val_SSIM: 0.9697
177 | Date: 2022-07-13 14:00:59s, Time_Cost: 17s, Epoch: [89/100], Train_PSNR: 26.17, Val_PSNR: 28.68, Val_SSIM: 0.9563
178 | Date: 2022-07-13 14:00:59s, Time_Cost: 17s, Epoch: [89/100], Train_PSNR: 26.17, Val_PSNR: 33.68, Val_SSIM: 0.9687
179 | Date: 2022-07-13 14:01:16s, Time_Cost: 17s, Epoch: [90/100], Train_PSNR: 26.32, Val_PSNR: 28.66, Val_SSIM: 0.9567
180 | Date: 2022-07-13 14:01:16s, Time_Cost: 17s, Epoch: [90/100], Train_PSNR: 26.32, Val_PSNR: 34.57, Val_SSIM: 0.9696
181 | Date: 2022-07-13 14:01:34s, Time_Cost: 17s, Epoch: [91/100], Train_PSNR: 26.42, Val_PSNR: 28.30, Val_SSIM: 0.9541
182 | Date: 2022-07-13 14:01:34s, Time_Cost: 17s, Epoch: [91/100], Train_PSNR: 26.42, Val_PSNR: 34.53, Val_SSIM: 0.9685
183 | Date: 2022-07-13 14:01:51s, Time_Cost: 17s, Epoch: [92/100], Train_PSNR: 26.29, Val_PSNR: 28.63, Val_SSIM: 0.9570
184 | Date: 2022-07-13 14:01:51s, Time_Cost: 17s, Epoch: [92/100], Train_PSNR: 26.29, Val_PSNR: 34.15, Val_SSIM: 0.9680
185 | Date: 2022-07-13 14:02:08s, Time_Cost: 17s, Epoch: [93/100], Train_PSNR: 26.43, Val_PSNR: 28.53, Val_SSIM: 0.9562
186 | Date: 2022-07-13 14:02:08s, Time_Cost: 17s, Epoch: [93/100], Train_PSNR: 26.43, Val_PSNR: 34.71, Val_SSIM: 0.9702
187 | Date: 2022-07-13 14:02:26s, Time_Cost: 17s, Epoch: [94/100], Train_PSNR: 26.44, Val_PSNR: 28.18, Val_SSIM: 0.9568
188 | Date: 2022-07-13 14:02:26s, Time_Cost: 17s, Epoch: [94/100], Train_PSNR: 26.44, Val_PSNR: 34.50, Val_SSIM: 0.9695
189 | Date: 2022-07-13 14:02:43s, Time_Cost: 17s, Epoch: [95/100], Train_PSNR: 26.52, Val_PSNR: 28.59, Val_SSIM: 0.9565
190 | Date: 2022-07-13 14:02:43s, Time_Cost: 17s, Epoch: [95/100], Train_PSNR: 26.52, Val_PSNR: 34.62, Val_SSIM: 0.9714
191 | Date: 2022-07-13 14:03:00s, Time_Cost: 17s, Epoch: [96/100], Train_PSNR: 26.53, Val_PSNR: 28.18, Val_SSIM: 0.9567
192 | Date: 2022-07-13 14:03:00s, Time_Cost: 17s, Epoch: [96/100], Train_PSNR: 26.53, Val_PSNR: 34.61, Val_SSIM: 0.9699
193 | Date: 2022-07-13 14:03:17s, Time_Cost: 17s, Epoch: [97/100], Train_PSNR: 26.49, Val_PSNR: 28.48, Val_SSIM: 0.9569
194 | Date: 2022-07-13 14:03:17s, Time_Cost: 17s, Epoch: [97/100], Train_PSNR: 26.49, Val_PSNR: 33.75, Val_SSIM: 0.9674
195 | Date: 2022-07-13 14:03:35s, Time_Cost: 17s, Epoch: [98/100], Train_PSNR: 26.56, Val_PSNR: 28.48, Val_SSIM: 0.9564
196 | Date: 2022-07-13 14:03:35s, Time_Cost: 17s, Epoch: [98/100], Train_PSNR: 26.56, Val_PSNR: 35.09, Val_SSIM: 0.9718
197 | Date: 2022-07-13 14:03:52s, Time_Cost: 17s, Epoch: [99/100], Train_PSNR: 26.51, Val_PSNR: 28.61, Val_SSIM: 0.9554
198 | Date: 2022-07-13 14:03:52s, Time_Cost: 17s, Epoch: [99/100], Train_PSNR: 26.51, Val_PSNR: 34.53, Val_SSIM: 0.9703
199 | Date: 2022-07-13 14:04:09s, Time_Cost: 17s, Epoch: [100/100], Train_PSNR: 26.58, Val_PSNR: 28.79, Val_SSIM: 0.9576
200 | Date: 2022-07-13 14:04:09s, Time_Cost: 17s, Epoch: [100/100], Train_PSNR: 26.58, Val_PSNR: 34.69, Val_SSIM: 0.9699
201 |
--------------------------------------------------------------------------------
/data/test/test1.txt:
--------------------------------------------------------------------------------
1 | test1/input/im_0307_s95_a04.png
2 | test1/input/im_0302_s100_a06.png
3 | test1/input/im_0321_s95_a04.png
4 | test1/input/im_0315_s100_a04.png
5 | test1/input/im_0308_s85_a06.png
6 | test1/input/im_0312_s85_a05.png
7 | test1/input/im_0330_s100_a06.png
8 | test1/input/im_0314_s100_a06.png
9 | test1/input/im_0338_s100_a05.png
10 | test1/input/im_0347_s85_a04.png
11 | test1/input/im_0309_s85_a06.png
12 | test1/input/im_0332_s100_a04.png
13 | test1/input/im_0314_s100_a05.png
14 | test1/input/im_0336_s85_a05.png
15 | test1/input/im_0310_s85_a04.png
16 | test1/input/im_0317_s95_a06.png
17 | test1/input/im_0328_s95_a04.png
18 | test1/input/im_0347_s80_a05.png
19 | test1/input/im_0320_s90_a04.png
20 | test1/input/im_0314_s85_a04.png
21 | test1/input/im_0339_s85_a06.png
22 | test1/input/im_0301_s100_a04.png
23 | test1/input/im_0329_s85_a05.png
24 | test1/input/im_0331_s100_a05.png
25 | test1/input/im_0307_s100_a04.png
26 | test1/input/im_0338_s100_a06.png
27 | test1/input/im_0306_s90_a05.png
28 | test1/input/im_0304_s100_a06.png
29 | test1/input/im_0302_s95_a05.png
30 | test1/input/im_0343_s85_a05.png
31 | test1/input/im_0329_s80_a04.png
32 | test1/input/im_0312_s80_a05.png
33 | test1/input/im_0311_s90_a05.png
34 | test1/input/im_0325_s80_a05.png
35 | test1/input/im_0348_s100_a05.png
36 | test1/input/im_0337_s85_a06.png
37 | test1/input/im_0346_s80_a05.png
38 | test1/input/im_0310_s80_a04.png
39 | test1/input/im_0318_s80_a06.png
40 | test1/input/im_0332_s80_a04.png
41 | test1/input/im_0338_s90_a04.png
42 | test1/input/im_0331_s100_a06.png
43 | test1/input/im_0304_s85_a04.png
44 | test1/input/im_0338_s80_a06.png
45 | test1/input/im_0309_s80_a05.png
46 | test1/input/im_0304_s85_a05.png
47 | test1/input/im_0349_s85_a05.png
48 | test1/input/im_0341_s100_a05.png
49 | test1/input/im_0327_s80_a04.png
50 | test1/input/im_0336_s85_a04.png
51 | test1/input/im_0341_s80_a06.png
52 | test1/input/im_0348_s95_a06.png
53 | test1/input/im_0341_s100_a06.png
54 | test1/input/im_0340_s80_a04.png
55 | test1/input/im_0315_s95_a05.png
56 | test1/input/im_0322_s90_a05.png
57 | test1/input/im_0304_s85_a06.png
58 | test1/input/im_0323_s100_a05.png
59 | test1/input/im_0333_s85_a06.png
60 | test1/input/im_0349_s100_a04.png
61 | test1/input/im_0318_s95_a04.png
62 | test1/input/im_0308_s90_a05.png
63 | test1/input/im_0304_s100_a05.png
64 | test1/input/im_0345_s95_a04.png
65 | test1/input/im_0306_s100_a06.png
66 | test1/input/im_0309_s95_a05.png
67 | test1/input/im_0335_s80_a05.png
68 | test1/input/im_0303_s100_a05.png
69 | test1/input/im_0333_s100_a05.png
70 | test1/input/im_0311_s90_a06.png
71 | test1/input/im_0317_s100_a05.png
72 | test1/input/im_0313_s90_a05.png
73 | test1/input/im_0322_s80_a06.png
74 | test1/input/im_0329_s80_a05.png
75 | test1/input/im_0330_s80_a05.png
76 | test1/input/im_0333_s85_a04.png
77 | test1/input/im_0342_s100_a06.png
78 | test1/input/im_0329_s80_a06.png
79 | test1/input/im_0335_s95_a04.png
80 | test1/input/im_0340_s100_a06.png
81 | test1/input/im_0318_s80_a05.png
82 | test1/input/im_0310_s100_a04.png
83 | test1/input/im_0306_s85_a05.png
84 | test1/input/im_0319_s85_a05.png
85 | test1/input/im_0334_s95_a06.png
86 | test1/input/im_0342_s90_a05.png
87 | test1/input/im_0342_s85_a04.png
88 | test1/input/im_0342_s95_a06.png
89 | test1/input/im_0342_s85_a05.png
90 | test1/input/im_0331_s90_a04.png
91 | test1/input/im_0304_s90_a06.png
92 | test1/input/im_0342_s100_a04.png
93 | test1/input/im_0337_s80_a05.png
94 | test1/input/im_0350_s95_a06.png
95 | test1/input/im_0326_s85_a06.png
96 | test1/input/im_0324_s80_a05.png
97 | test1/input/im_0313_s100_a06.png
98 | test1/input/im_0329_s100_a05.png
99 | test1/input/im_0343_s85_a04.png
100 | test1/input/im_0348_s85_a06.png
101 | test1/input/im_0301_s100_a06.png
102 | test1/input/im_0322_s85_a04.png
103 | test1/input/im_0328_s100_a05.png
104 | test1/input/im_0303_s90_a05.png
105 | test1/input/im_0327_s95_a05.png
106 | test1/input/im_0334_s90_a06.png
107 | test1/input/im_0339_s95_a05.png
108 | test1/input/im_0321_s90_a04.png
109 | test1/input/im_0301_s85_a06.png
110 | test1/input/im_0346_s80_a04.png
111 | test1/input/im_0341_s85_a05.png
112 | test1/input/im_0348_s100_a06.png
113 | test1/input/im_0344_s80_a05.png
114 | test1/input/im_0347_s100_a05.png
115 | test1/input/im_0349_s85_a04.png
116 | test1/input/im_0335_s85_a05.png
117 | test1/input/im_0350_s85_a06.png
118 | test1/input/im_0307_s80_a04.png
119 | test1/input/im_0309_s90_a06.png
120 | test1/input/im_0330_s100_a04.png
121 | test1/input/im_0302_s85_a04.png
122 | test1/input/im_0307_s85_a06.png
123 | test1/input/im_0326_s90_a06.png
124 | test1/input/im_0312_s90_a04.png
125 | test1/input/im_0341_s80_a04.png
126 | test1/input/im_0335_s95_a06.png
127 | test1/input/im_0312_s85_a04.png
128 | test1/input/im_0303_s80_a04.png
129 | test1/input/im_0316_s90_a05.png
130 | test1/input/im_0350_s90_a06.png
131 | test1/input/im_0320_s100_a05.png
132 | test1/input/im_0345_s90_a05.png
133 | test1/input/im_0342_s90_a06.png
134 | test1/input/im_0345_s95_a06.png
135 | test1/input/im_0308_s100_a04.png
136 | test1/input/im_0346_s85_a04.png
137 | test1/input/im_0320_s95_a05.png
138 | test1/input/im_0331_s90_a06.png
139 | test1/input/im_0322_s100_a04.png
140 | test1/input/im_0304_s100_a04.png
141 | test1/input/im_0348_s90_a05.png
142 | test1/input/im_0332_s90_a05.png
143 | test1/input/im_0331_s85_a05.png
144 | test1/input/im_0335_s100_a06.png
145 | test1/input/im_0323_s95_a06.png
146 | test1/input/im_0310_s80_a06.png
147 | test1/input/im_0324_s90_a06.png
148 | test1/input/im_0336_s80_a05.png
149 | test1/input/im_0311_s95_a04.png
150 | test1/input/im_0304_s95_a04.png
151 | test1/input/im_0350_s90_a04.png
152 | test1/input/im_0344_s85_a05.png
153 | test1/input/im_0334_s90_a05.png
154 | test1/input/im_0328_s100_a06.png
155 | test1/input/im_0315_s80_a05.png
156 | test1/input/im_0330_s95_a05.png
157 | test1/input/im_0334_s100_a06.png
158 | test1/input/im_0304_s80_a04.png
159 | test1/input/im_0310_s85_a06.png
160 | test1/input/im_0342_s95_a05.png
161 | test1/input/im_0331_s85_a04.png
162 | test1/input/im_0324_s90_a04.png
163 | test1/input/im_0349_s90_a05.png
164 | test1/input/im_0350_s95_a04.png
165 | test1/input/im_0316_s85_a06.png
166 | test1/input/im_0307_s100_a05.png
167 | test1/input/im_0345_s85_a04.png
168 | test1/input/im_0337_s85_a04.png
169 | test1/input/im_0334_s80_a06.png
170 | test1/input/im_0314_s85_a05.png
171 | test1/input/im_0348_s80_a04.png
172 | test1/input/im_0302_s90_a04.png
173 | test1/input/im_0335_s90_a04.png
174 | test1/input/im_0305_s95_a06.png
175 | test1/input/im_0315_s80_a04.png
176 | test1/input/im_0344_s85_a04.png
177 | test1/input/im_0302_s95_a06.png
178 | test1/input/im_0327_s100_a06.png
179 | test1/input/im_0320_s95_a04.png
180 | test1/input/im_0307_s90_a04.png
181 | test1/input/im_0327_s90_a04.png
182 | test1/input/im_0331_s90_a05.png
183 | test1/input/im_0309_s100_a04.png
184 | test1/input/im_0339_s90_a06.png
185 | test1/input/im_0333_s85_a05.png
186 | test1/input/im_0329_s85_a04.png
187 | test1/input/im_0347_s80_a06.png
188 | test1/input/im_0315_s85_a06.png
189 | test1/input/im_0306_s85_a06.png
190 | test1/input/im_0336_s90_a04.png
191 | test1/input/im_0318_s90_a06.png
192 | test1/input/im_0327_s85_a05.png
193 | test1/input/im_0324_s95_a04.png
194 | test1/input/im_0316_s80_a04.png
195 | test1/input/im_0329_s100_a04.png
196 | test1/input/im_0311_s85_a04.png
197 | test1/input/im_0323_s85_a05.png
198 | test1/input/im_0346_s90_a06.png
199 | test1/input/im_0342_s95_a04.png
200 | test1/input/im_0338_s95_a05.png
201 | test1/input/im_0302_s85_a05.png
202 | test1/input/im_0310_s95_a04.png
203 | test1/input/im_0302_s80_a05.png
204 | test1/input/im_0306_s100_a04.png
205 | test1/input/im_0317_s80_a06.png
206 | test1/input/im_0301_s80_a05.png
207 | test1/input/im_0340_s90_a05.png
208 | test1/input/im_0327_s90_a05.png
209 | test1/input/im_0326_s80_a06.png
210 | test1/input/im_0318_s85_a05.png
211 | test1/input/im_0308_s95_a04.png
212 | test1/input/im_0328_s90_a06.png
213 | test1/input/im_0345_s100_a05.png
214 | test1/input/im_0341_s100_a04.png
215 | test1/input/im_0345_s80_a04.png
216 | test1/input/im_0326_s95_a06.png
217 | test1/input/im_0342_s100_a05.png
218 | test1/input/im_0311_s95_a06.png
219 | test1/input/im_0309_s80_a04.png
220 | test1/input/im_0310_s85_a05.png
221 | test1/input/im_0338_s80_a05.png
222 | test1/input/im_0307_s85_a04.png
223 | test1/input/im_0317_s80_a05.png
224 | test1/input/im_0332_s80_a05.png
225 | test1/input/im_0322_s95_a06.png
226 | test1/input/im_0325_s95_a06.png
227 | test1/input/im_0303_s100_a06.png
228 | test1/input/im_0350_s80_a06.png
229 | test1/input/im_0315_s90_a05.png
230 | test1/input/im_0302_s90_a05.png
231 | test1/input/im_0323_s90_a04.png
232 | test1/input/im_0332_s100_a05.png
233 | test1/input/im_0345_s90_a04.png
234 | test1/input/im_0332_s100_a06.png
235 | test1/input/im_0333_s90_a05.png
236 | test1/input/im_0321_s100_a05.png
237 | test1/input/im_0303_s90_a06.png
238 | test1/input/im_0344_s95_a06.png
239 | test1/input/im_0343_s95_a06.png
240 | test1/input/im_0344_s100_a06.png
241 | test1/input/im_0329_s90_a04.png
242 | test1/input/im_0327_s80_a06.png
243 | test1/input/im_0347_s95_a04.png
244 | test1/input/im_0339_s80_a06.png
245 | test1/input/im_0316_s85_a05.png
246 | test1/input/im_0346_s95_a04.png
247 | test1/input/im_0309_s85_a05.png
248 | test1/input/im_0315_s90_a04.png
249 | test1/input/im_0312_s90_a06.png
250 | test1/input/im_0318_s80_a04.png
251 | test1/input/im_0322_s100_a06.png
252 | test1/input/im_0314_s90_a04.png
253 | test1/input/im_0304_s90_a04.png
254 | test1/input/im_0313_s95_a06.png
255 | test1/input/im_0343_s100_a04.png
256 | test1/input/im_0318_s85_a06.png
257 | test1/input/im_0310_s90_a06.png
258 | test1/input/im_0325_s90_a05.png
259 | test1/input/im_0348_s95_a04.png
260 | test1/input/im_0347_s85_a06.png
261 | test1/input/im_0313_s85_a05.png
262 | test1/input/im_0340_s85_a06.png
263 | test1/input/im_0339_s85_a05.png
264 | test1/input/im_0330_s85_a06.png
265 | test1/input/im_0312_s80_a04.png
266 | test1/input/im_0330_s80_a04.png
267 | test1/input/im_0350_s100_a06.png
268 | test1/input/im_0314_s80_a04.png
269 | test1/input/im_0319_s100_a04.png
270 | test1/input/im_0317_s80_a04.png
271 | test1/input/im_0318_s100_a05.png
272 | test1/input/im_0319_s80_a04.png
273 | test1/input/im_0339_s90_a04.png
274 | test1/input/im_0339_s80_a04.png
275 | test1/input/im_0303_s95_a06.png
276 | test1/input/im_0320_s85_a05.png
277 | test1/input/im_0318_s100_a06.png
278 | test1/input/im_0313_s80_a04.png
279 | test1/input/im_0337_s90_a05.png
280 | test1/input/im_0343_s80_a06.png
281 | test1/input/im_0323_s100_a06.png
282 | test1/input/im_0313_s80_a05.png
283 | test1/input/im_0316_s80_a06.png
284 | test1/input/im_0306_s80_a05.png
285 | test1/input/im_0337_s95_a05.png
286 | test1/input/im_0313_s95_a05.png
287 | test1/input/im_0302_s100_a05.png
288 | test1/input/im_0339_s90_a05.png
289 | test1/input/im_0334_s90_a04.png
290 | test1/input/im_0325_s100_a04.png
291 | test1/input/im_0305_s80_a06.png
292 | test1/input/im_0320_s80_a05.png
293 | test1/input/im_0321_s90_a05.png
294 | test1/input/im_0315_s100_a05.png
295 | test1/input/im_0334_s95_a04.png
296 | test1/input/im_0320_s85_a04.png
297 | test1/input/im_0322_s100_a05.png
298 | test1/input/im_0325_s95_a05.png
299 | test1/input/im_0314_s100_a04.png
300 | test1/input/im_0331_s95_a04.png
301 | test1/input/im_0326_s90_a04.png
302 | test1/input/im_0321_s90_a06.png
303 | test1/input/im_0326_s100_a04.png
304 | test1/input/im_0345_s85_a06.png
305 | test1/input/im_0324_s80_a04.png
306 | test1/input/im_0328_s100_a04.png
307 | test1/input/im_0310_s100_a06.png
308 | test1/input/im_0349_s100_a06.png
309 | test1/input/im_0330_s85_a04.png
310 | test1/input/im_0323_s90_a05.png
311 | test1/input/im_0345_s100_a04.png
312 | test1/input/im_0328_s85_a06.png
313 | test1/input/im_0312_s80_a06.png
314 | test1/input/im_0336_s90_a06.png
315 | test1/input/im_0349_s95_a05.png
316 | test1/input/im_0317_s85_a05.png
317 | test1/input/im_0326_s100_a05.png
318 | test1/input/im_0336_s85_a06.png
319 | test1/input/im_0326_s90_a05.png
320 | test1/input/im_0340_s80_a05.png
321 | test1/input/im_0312_s95_a04.png
322 | test1/input/im_0330_s95_a06.png
323 | test1/input/im_0329_s95_a04.png
324 | test1/input/im_0333_s80_a06.png
325 | test1/input/im_0322_s90_a04.png
326 | test1/input/im_0324_s85_a06.png
327 | test1/input/im_0344_s80_a06.png
328 | test1/input/im_0341_s90_a04.png
329 | test1/input/im_0307_s90_a05.png
330 | test1/input/im_0327_s95_a06.png
331 | test1/input/im_0335_s90_a06.png
332 | test1/input/im_0314_s95_a06.png
333 | test1/input/im_0347_s95_a06.png
334 | test1/input/im_0345_s85_a05.png
335 | test1/input/im_0312_s100_a05.png
336 | test1/input/im_0338_s100_a04.png
337 | test1/input/im_0347_s100_a04.png
338 | test1/input/im_0334_s80_a05.png
339 | test1/input/im_0331_s95_a06.png
340 | test1/input/im_0316_s80_a05.png
341 | test1/input/im_0320_s85_a06.png
342 | test1/input/im_0346_s100_a06.png
343 | test1/input/im_0325_s85_a04.png
344 | test1/input/im_0342_s85_a06.png
345 | test1/input/im_0302_s95_a04.png
346 | test1/input/im_0326_s80_a04.png
347 | test1/input/im_0344_s90_a04.png
348 | test1/input/im_0346_s85_a06.png
349 | test1/input/im_0342_s80_a05.png
350 | test1/input/im_0302_s80_a04.png
351 | test1/input/im_0333_s80_a04.png
352 | test1/input/im_0343_s95_a04.png
353 | test1/input/im_0316_s100_a06.png
354 | test1/input/im_0324_s100_a05.png
355 | test1/input/im_0306_s80_a04.png
356 | test1/input/im_0332_s95_a05.png
357 | test1/input/im_0313_s85_a06.png
358 | test1/input/im_0329_s90_a06.png
359 | test1/input/im_0303_s100_a04.png
360 | test1/input/im_0333_s90_a06.png
361 | test1/input/im_0306_s100_a05.png
362 | test1/input/im_0336_s80_a04.png
363 | test1/input/im_0346_s85_a05.png
364 | test1/input/im_0316_s95_a05.png
365 | test1/input/im_0344_s100_a04.png
366 | test1/input/im_0332_s95_a04.png
367 | test1/input/im_0309_s100_a06.png
368 | test1/input/im_0319_s100_a05.png
369 | test1/input/im_0341_s95_a04.png
370 | test1/input/im_0308_s90_a06.png
371 | test1/input/im_0343_s90_a05.png
372 | test1/input/im_0331_s100_a04.png
373 | test1/input/im_0344_s80_a04.png
374 | test1/input/im_0335_s85_a04.png
375 | test1/input/im_0321_s85_a05.png
376 | test1/input/im_0311_s100_a05.png
377 | test1/input/im_0346_s100_a04.png
378 | test1/input/im_0348_s90_a06.png
379 | test1/input/im_0338_s85_a04.png
380 | test1/input/im_0314_s95_a04.png
381 | test1/input/im_0327_s85_a04.png
382 | test1/input/im_0340_s90_a04.png
383 | test1/input/im_0332_s85_a06.png
384 | test1/input/im_0342_s90_a04.png
385 | test1/input/im_0308_s85_a04.png
386 | test1/input/im_0311_s85_a05.png
387 | test1/input/im_0319_s90_a05.png
388 | test1/input/im_0344_s85_a06.png
389 | test1/input/im_0346_s100_a05.png
390 | test1/input/im_0348_s85_a05.png
391 | test1/input/im_0318_s100_a04.png
392 | test1/input/im_0308_s100_a06.png
393 | test1/input/im_0333_s100_a06.png
394 | test1/input/im_0347_s80_a04.png
395 | test1/input/im_0324_s90_a05.png
396 | test1/input/im_0349_s90_a06.png
397 | test1/input/im_0327_s90_a06.png
398 | test1/input/im_0305_s90_a06.png
399 | test1/input/im_0305_s80_a04.png
400 | test1/input/im_0337_s85_a05.png
401 | test1/input/im_0315_s90_a06.png
402 | test1/input/im_0303_s95_a05.png
403 | test1/input/im_0343_s90_a04.png
404 | test1/input/im_0311_s80_a06.png
405 | test1/input/im_0327_s85_a06.png
406 | test1/input/im_0349_s90_a04.png
407 | test1/input/im_0324_s80_a06.png
408 | test1/input/im_0315_s95_a06.png
409 | test1/input/im_0311_s85_a06.png
410 | test1/input/im_0343_s100_a05.png
411 | test1/input/im_0329_s85_a06.png
412 | test1/input/im_0335_s80_a06.png
413 | test1/input/im_0332_s90_a06.png
414 | test1/input/im_0339_s100_a06.png
415 | test1/input/im_0304_s95_a06.png
416 | test1/input/im_0325_s100_a06.png
417 | test1/input/im_0335_s90_a05.png
418 | test1/input/im_0344_s100_a05.png
419 | test1/input/im_0339_s80_a05.png
420 | test1/input/im_0336_s100_a05.png
421 | test1/input/im_0343_s90_a06.png
422 | test1/input/im_0314_s85_a06.png
423 | test1/input/im_0321_s100_a06.png
424 | test1/input/im_0318_s85_a04.png
425 | test1/input/im_0319_s80_a05.png
426 | test1/input/im_0349_s85_a06.png
427 | test1/input/im_0318_s90_a04.png
428 | test1/input/im_0336_s100_a06.png
429 | test1/input/im_0310_s100_a05.png
430 | test1/input/im_0316_s95_a06.png
431 | test1/input/im_0324_s95_a05.png
432 | test1/input/im_0338_s85_a05.png
433 | test1/input/im_0303_s95_a04.png
434 | test1/input/im_0348_s95_a05.png
435 | test1/input/im_0314_s90_a06.png
436 | test1/input/im_0345_s90_a06.png
437 | test1/input/im_0331_s95_a05.png
438 | test1/input/im_0339_s95_a06.png
439 | test1/input/im_0305_s100_a06.png
440 | test1/input/im_0348_s80_a05.png
441 | test1/input/im_0307_s85_a05.png
442 | test1/input/im_0312_s95_a05.png
443 | test1/input/im_0323_s95_a05.png
444 | test1/input/im_0330_s90_a04.png
445 | test1/input/im_0330_s80_a06.png
446 | test1/input/im_0346_s95_a06.png
447 | test1/input/im_0346_s95_a05.png
448 | test1/input/im_0316_s95_a04.png
449 | test1/input/im_0338_s95_a06.png
450 | test1/input/im_0312_s90_a05.png
451 | test1/input/im_0307_s100_a06.png
452 | test1/input/im_0328_s80_a05.png
453 | test1/input/im_0337_s95_a04.png
454 | test1/input/im_0314_s80_a06.png
455 | test1/input/im_0325_s85_a06.png
456 | test1/input/im_0311_s80_a04.png
457 | test1/input/im_0321_s80_a06.png
458 | test1/input/im_0322_s80_a04.png
459 | test1/input/im_0334_s85_a04.png
460 | test1/input/im_0335_s100_a05.png
461 | test1/input/im_0325_s95_a04.png
462 | test1/input/im_0344_s90_a05.png
463 | test1/input/im_0305_s85_a05.png
464 | test1/input/im_0306_s95_a04.png
465 | test1/input/im_0350_s85_a04.png
466 | test1/input/im_0325_s85_a05.png
467 | test1/input/im_0340_s90_a06.png
468 | test1/input/im_0334_s100_a04.png
469 | test1/input/im_0329_s95_a06.png
470 | test1/input/im_0301_s80_a04.png
471 | test1/input/im_0350_s80_a05.png
472 | test1/input/im_0305_s95_a05.png
473 | test1/input/im_0346_s80_a06.png
474 | test1/input/im_0312_s85_a06.png
475 | test1/input/im_0342_s80_a04.png
476 | test1/input/im_0309_s80_a06.png
477 | test1/input/im_0332_s90_a04.png
478 | test1/input/im_0344_s90_a06.png
479 | test1/input/im_0338_s95_a04.png
480 | test1/input/im_0327_s100_a04.png
481 | test1/input/im_0313_s100_a04.png
482 | test1/input/im_0341_s90_a06.png
483 | test1/input/im_0331_s80_a06.png
484 | test1/input/im_0322_s95_a05.png
485 | test1/input/im_0321_s95_a05.png
486 | test1/input/im_0303_s85_a04.png
487 | test1/input/im_0350_s100_a05.png
488 | test1/input/im_0305_s85_a06.png
489 | test1/input/im_0334_s100_a05.png
490 | test1/input/im_0312_s100_a04.png
491 | test1/input/im_0320_s95_a06.png
492 | test1/input/im_0335_s95_a05.png
493 | test1/input/im_0350_s95_a05.png
494 | test1/input/im_0308_s90_a04.png
495 | test1/input/im_0321_s80_a05.png
496 | test1/input/im_0324_s100_a04.png
497 | test1/input/im_0319_s80_a06.png
498 | test1/input/im_0301_s95_a05.png
499 | test1/input/im_0312_s95_a06.png
500 | test1/input/im_0350_s90_a05.png
501 | test1/input/im_0318_s95_a06.png
502 | test1/input/im_0338_s90_a06.png
503 | test1/input/im_0307_s80_a06.png
504 | test1/input/im_0327_s95_a04.png
505 | test1/input/im_0305_s90_a04.png
506 | test1/input/im_0307_s80_a05.png
507 | test1/input/im_0333_s95_a06.png
508 | test1/input/im_0347_s100_a06.png
509 | test1/input/im_0317_s100_a06.png
510 | test1/input/im_0340_s100_a05.png
511 | test1/input/im_0350_s100_a04.png
512 | test1/input/im_0325_s80_a06.png
513 | test1/input/im_0322_s90_a06.png
514 | test1/input/im_0337_s80_a06.png
515 | test1/input/im_0323_s80_a04.png
516 | test1/input/im_0301_s90_a05.png
517 | test1/input/im_0331_s85_a06.png
518 | test1/input/im_0341_s95_a05.png
519 | test1/input/im_0307_s95_a05.png
520 | test1/input/im_0335_s80_a04.png
521 | test1/input/im_0308_s80_a05.png
522 | test1/input/im_0305_s80_a05.png
523 | test1/input/im_0317_s90_a04.png
524 | test1/input/im_0313_s80_a06.png
525 | test1/input/im_0301_s80_a06.png
526 | test1/input/im_0306_s90_a04.png
527 | test1/input/im_0319_s95_a04.png
528 | test1/input/im_0319_s95_a06.png
529 | test1/input/im_0326_s80_a05.png
530 | test1/input/im_0335_s85_a06.png
531 | test1/input/im_0322_s85_a05.png
532 | test1/input/im_0321_s100_a04.png
533 | test1/input/im_0325_s90_a06.png
534 | test1/input/im_0326_s100_a06.png
535 | test1/input/im_0336_s95_a05.png
536 | test1/input/im_0339_s85_a04.png
537 | test1/input/im_0341_s80_a05.png
538 | test1/input/im_0336_s95_a06.png
539 | test1/input/im_0317_s95_a05.png
540 | test1/input/im_0340_s100_a04.png
541 | test1/input/im_0334_s80_a04.png
542 | test1/input/im_0340_s80_a06.png
543 | test1/input/im_0320_s90_a05.png
544 | test1/input/im_0311_s95_a05.png
545 | test1/input/im_0315_s100_a06.png
546 | test1/input/im_0349_s80_a05.png
547 | test1/input/im_0308_s80_a04.png
548 | test1/input/im_0349_s95_a06.png
549 | test1/input/im_0318_s90_a05.png
550 | test1/input/im_0306_s80_a06.png
551 | test1/input/im_0343_s80_a05.png
552 | test1/input/im_0332_s95_a06.png
553 | test1/input/im_0319_s95_a05.png
554 | test1/input/im_0341_s90_a05.png
555 | test1/input/im_0313_s85_a04.png
556 | test1/input/im_0303_s80_a06.png
557 | test1/input/im_0334_s85_a05.png
558 | test1/input/im_0341_s85_a06.png
559 | test1/input/im_0310_s90_a05.png
560 | test1/input/im_0307_s95_a06.png
561 | test1/input/im_0326_s95_a04.png
562 | test1/input/im_0328_s90_a04.png
563 | test1/input/im_0327_s100_a05.png
564 | test1/input/im_0333_s90_a04.png
565 | test1/input/im_0316_s100_a05.png
566 | test1/input/im_0303_s90_a04.png
567 | test1/input/im_0330_s90_a06.png
568 | test1/input/im_0303_s85_a06.png
569 | test1/input/im_0333_s100_a04.png
570 | test1/input/im_0305_s100_a04.png
571 | test1/input/im_0348_s85_a04.png
572 | test1/input/im_0348_s90_a04.png
573 | test1/input/im_0324_s85_a04.png
574 | test1/input/im_0347_s90_a05.png
575 | test1/input/im_0335_s100_a04.png
576 | test1/input/im_0311_s80_a05.png
577 | test1/input/im_0328_s90_a05.png
578 | test1/input/im_0346_s90_a05.png
579 | test1/input/im_0313_s90_a06.png
580 | test1/input/im_0326_s95_a05.png
581 | test1/input/im_0305_s85_a04.png
582 | test1/input/im_0342_s80_a06.png
583 | test1/input/im_0302_s100_a04.png
584 | test1/input/im_0322_s80_a05.png
585 | test1/input/im_0338_s85_a06.png
586 | test1/input/im_0301_s90_a06.png
587 | test1/input/im_0337_s100_a05.png
588 | test1/input/im_0310_s90_a04.png
589 | test1/input/im_0340_s85_a04.png
590 | test1/input/im_0316_s85_a04.png
591 | test1/input/im_0321_s80_a04.png
592 | test1/input/im_0338_s90_a05.png
593 | test1/input/im_0343_s85_a06.png
594 | test1/input/im_0311_s100_a04.png
595 | test1/input/im_0310_s95_a06.png
596 | test1/input/im_0340_s95_a06.png
597 | test1/input/im_0331_s80_a04.png
598 | test1/input/im_0320_s80_a06.png
599 | test1/input/im_0308_s100_a05.png
600 | test1/input/im_0341_s85_a04.png
601 | test1/input/im_0350_s85_a05.png
602 | test1/input/im_0347_s90_a04.png
603 | test1/input/im_0347_s95_a05.png
604 | test1/input/im_0330_s90_a05.png
605 | test1/input/im_0328_s85_a04.png
606 | test1/input/im_0349_s80_a04.png
607 | test1/input/im_0321_s95_a06.png
608 | test1/input/im_0301_s95_a04.png
609 | test1/input/im_0343_s100_a06.png
610 | test1/input/im_0340_s95_a04.png
611 | test1/input/im_0336_s95_a04.png
612 | test1/input/im_0319_s90_a04.png
613 | test1/input/im_0333_s95_a04.png
614 | test1/input/im_0309_s95_a06.png
615 | test1/input/im_0344_s95_a04.png
616 | test1/input/im_0329_s100_a06.png
617 | test1/input/im_0325_s100_a05.png
618 | test1/input/im_0323_s95_a04.png
619 | test1/input/im_0333_s80_a05.png
620 | test1/input/im_0314_s90_a05.png
621 | test1/input/im_0323_s90_a06.png
622 | test1/input/im_0314_s80_a05.png
623 | test1/input/im_0315_s85_a04.png
624 | test1/input/im_0336_s100_a04.png
625 | test1/input/im_0350_s80_a04.png
626 | test1/input/im_0332_s80_a06.png
627 | test1/input/im_0311_s90_a04.png
628 | test1/input/im_0324_s95_a06.png
629 | test1/input/im_0337_s90_a04.png
630 | test1/input/im_0333_s95_a05.png
631 | test1/input/im_0319_s85_a06.png
632 | test1/input/im_0309_s95_a04.png
633 | test1/input/im_0308_s85_a05.png
634 | test1/input/im_0301_s85_a05.png
635 | test1/input/im_0304_s80_a06.png
636 | test1/input/im_0320_s100_a04.png
637 | test1/input/im_0321_s85_a04.png
638 | test1/input/im_0302_s85_a06.png
639 | test1/input/im_0323_s85_a06.png
640 | test1/input/im_0305_s90_a05.png
641 | test1/input/im_0334_s85_a06.png
642 | test1/input/im_0326_s85_a04.png
643 | test1/input/im_0309_s90_a05.png
644 | test1/input/im_0310_s95_a05.png
645 | test1/input/im_0340_s85_a05.png
646 | test1/input/im_0345_s100_a06.png
647 | test1/input/im_0337_s80_a04.png
648 | test1/input/im_0309_s85_a04.png
649 | test1/input/im_0330_s100_a05.png
650 | test1/input/im_0319_s100_a06.png
651 | test1/input/im_0337_s100_a04.png
652 | test1/input/im_0325_s80_a04.png
653 | test1/input/im_0305_s100_a05.png
654 | test1/input/im_0307_s90_a06.png
655 | test1/input/im_0321_s85_a06.png
656 | test1/input/im_0323_s80_a05.png
657 | test1/input/im_0317_s90_a05.png
658 | test1/input/im_0317_s85_a06.png
659 | test1/input/im_0313_s90_a04.png
660 | test1/input/im_0322_s95_a04.png
661 | test1/input/im_0324_s85_a05.png
662 | test1/input/im_0301_s85_a04.png
663 | test1/input/im_0304_s95_a05.png
664 | test1/input/im_0328_s80_a04.png
665 | test1/input/im_0320_s80_a04.png
666 | test1/input/im_0306_s85_a04.png
667 | test1/input/im_0337_s90_a06.png
668 | test1/input/im_0318_s95_a05.png
669 | test1/input/im_0340_s95_a05.png
670 | test1/input/im_0343_s95_a05.png
671 | test1/input/im_0339_s100_a04.png
672 | test1/input/im_0319_s85_a04.png
673 | test1/input/im_0317_s85_a04.png
674 | test1/input/im_0317_s95_a04.png
675 | test1/input/im_0309_s90_a04.png
676 | test1/input/im_0314_s95_a05.png
677 | test1/input/im_0315_s85_a05.png
678 | test1/input/im_0345_s80_a06.png
679 | test1/input/im_0301_s90_a04.png
680 | test1/input/im_0339_s95_a04.png
681 | test1/input/im_0347_s85_a05.png
682 | test1/input/im_0328_s85_a05.png
683 | test1/input/im_0306_s95_a06.png
684 | test1/input/im_0308_s80_a06.png
685 | test1/input/im_0312_s100_a06.png
686 | test1/input/im_0349_s100_a05.png
687 | test1/input/im_0326_s85_a05.png
688 | test1/input/im_0334_s95_a05.png
689 | test1/input/im_0332_s85_a04.png
690 | test1/input/im_0317_s100_a04.png
691 | test1/input/im_0328_s95_a06.png
692 | test1/input/im_0306_s90_a06.png
693 | test1/input/im_0348_s80_a06.png
694 | test1/input/im_0315_s95_a04.png
695 | test1/input/im_0324_s100_a06.png
696 | test1/input/im_0303_s80_a05.png
697 | test1/input/im_0348_s100_a04.png
698 | test1/input/im_0336_s90_a05.png
699 | test1/input/im_0316_s90_a06.png
700 | test1/input/im_0339_s100_a05.png
701 | test1/input/im_0337_s100_a06.png
702 | test1/input/im_0349_s80_a06.png
703 | test1/input/im_0325_s90_a04.png
704 | test1/input/im_0322_s85_a06.png
705 | test1/input/im_0302_s90_a06.png
706 | test1/input/im_0317_s90_a06.png
707 | test1/input/im_0347_s90_a06.png
708 | test1/input/im_0344_s95_a05.png
709 | test1/input/im_0319_s90_a06.png
710 | test1/input/im_0316_s100_a04.png
711 | test1/input/im_0308_s95_a06.png
712 | test1/input/im_0338_s80_a04.png
713 | test1/input/im_0320_s100_a06.png
714 | test1/input/im_0327_s80_a05.png
715 | test1/input/im_0329_s90_a05.png
716 | test1/input/im_0331_s80_a05.png
717 | test1/input/im_0349_s95_a04.png
718 | test1/input/im_0310_s80_a05.png
719 | test1/input/im_0308_s95_a05.png
720 | test1/input/im_0328_s80_a06.png
721 | test1/input/im_0323_s85_a04.png
722 | test1/input/im_0301_s100_a05.png
723 | test1/input/im_0320_s90_a06.png
724 | test1/input/im_0313_s100_a05.png
725 | test1/input/im_0346_s90_a04.png
726 | test1/input/im_0301_s95_a06.png
727 | test1/input/im_0316_s90_a04.png
728 | test1/input/im_0337_s95_a06.png
729 | test1/input/im_0336_s80_a06.png
730 | test1/input/im_0302_s80_a06.png
731 | test1/input/im_0315_s80_a06.png
732 | test1/input/im_0343_s80_a04.png
733 | test1/input/im_0303_s85_a05.png
734 | test1/input/im_0306_s95_a05.png
735 | test1/input/im_0311_s100_a06.png
736 | test1/input/im_0305_s95_a04.png
737 | test1/input/im_0323_s80_a06.png
738 | test1/input/im_0304_s90_a05.png
739 | test1/input/im_0309_s100_a05.png
740 | test1/input/im_0304_s80_a05.png
741 | test1/input/im_0345_s95_a05.png
742 | test1/input/im_0313_s95_a04.png
743 | test1/input/im_0328_s95_a05.png
744 | test1/input/im_0332_s85_a05.png
745 | test1/input/im_0341_s95_a06.png
746 | test1/input/im_0330_s95_a04.png
747 | test1/input/im_0345_s80_a05.png
748 | test1/input/im_0330_s85_a05.png
749 | test1/input/im_0329_s95_a05.png
750 | test1/input/im_0323_s100_a04.png
751 |
--------------------------------------------------------------------------------
/data/train/test1.txt:
--------------------------------------------------------------------------------
1 | test1/input/im_0307_s95_a04.png
2 | test1/input/im_0302_s100_a06.png
3 | test1/input/im_0321_s95_a04.png
4 | test1/input/im_0315_s100_a04.png
5 | test1/input/im_0308_s85_a06.png
6 | test1/input/im_0312_s85_a05.png
7 | test1/input/im_0330_s100_a06.png
8 | test1/input/im_0314_s100_a06.png
9 | test1/input/im_0338_s100_a05.png
10 | test1/input/im_0347_s85_a04.png
11 | test1/input/im_0309_s85_a06.png
12 | test1/input/im_0332_s100_a04.png
13 | test1/input/im_0314_s100_a05.png
14 | test1/input/im_0336_s85_a05.png
15 | test1/input/im_0310_s85_a04.png
16 | test1/input/im_0317_s95_a06.png
17 | test1/input/im_0328_s95_a04.png
18 | test1/input/im_0347_s80_a05.png
19 | test1/input/im_0320_s90_a04.png
20 | test1/input/im_0314_s85_a04.png
21 | test1/input/im_0339_s85_a06.png
22 | test1/input/im_0301_s100_a04.png
23 | test1/input/im_0329_s85_a05.png
24 | test1/input/im_0331_s100_a05.png
25 | test1/input/im_0307_s100_a04.png
26 | test1/input/im_0338_s100_a06.png
27 | test1/input/im_0306_s90_a05.png
28 | test1/input/im_0304_s100_a06.png
29 | test1/input/im_0302_s95_a05.png
30 | test1/input/im_0343_s85_a05.png
31 | test1/input/im_0329_s80_a04.png
32 | test1/input/im_0312_s80_a05.png
33 | test1/input/im_0311_s90_a05.png
34 | test1/input/im_0325_s80_a05.png
35 | test1/input/im_0348_s100_a05.png
36 | test1/input/im_0337_s85_a06.png
37 | test1/input/im_0346_s80_a05.png
38 | test1/input/im_0310_s80_a04.png
39 | test1/input/im_0318_s80_a06.png
40 | test1/input/im_0332_s80_a04.png
41 | test1/input/im_0338_s90_a04.png
42 | test1/input/im_0331_s100_a06.png
43 | test1/input/im_0304_s85_a04.png
44 | test1/input/im_0338_s80_a06.png
45 | test1/input/im_0309_s80_a05.png
46 | test1/input/im_0304_s85_a05.png
47 | test1/input/im_0349_s85_a05.png
48 | test1/input/im_0341_s100_a05.png
49 | test1/input/im_0327_s80_a04.png
50 | test1/input/im_0336_s85_a04.png
51 | test1/input/im_0341_s80_a06.png
52 | test1/input/im_0348_s95_a06.png
53 | test1/input/im_0341_s100_a06.png
54 | test1/input/im_0340_s80_a04.png
55 | test1/input/im_0315_s95_a05.png
56 | test1/input/im_0322_s90_a05.png
57 | test1/input/im_0304_s85_a06.png
58 | test1/input/im_0323_s100_a05.png
59 | test1/input/im_0333_s85_a06.png
60 | test1/input/im_0349_s100_a04.png
61 | test1/input/im_0318_s95_a04.png
62 | test1/input/im_0308_s90_a05.png
63 | test1/input/im_0304_s100_a05.png
64 | test1/input/im_0345_s95_a04.png
65 | test1/input/im_0306_s100_a06.png
66 | test1/input/im_0309_s95_a05.png
67 | test1/input/im_0335_s80_a05.png
68 | test1/input/im_0303_s100_a05.png
69 | test1/input/im_0333_s100_a05.png
70 | test1/input/im_0311_s90_a06.png
71 | test1/input/im_0317_s100_a05.png
72 | test1/input/im_0313_s90_a05.png
73 | test1/input/im_0322_s80_a06.png
74 | test1/input/im_0329_s80_a05.png
75 | test1/input/im_0330_s80_a05.png
76 | test1/input/im_0333_s85_a04.png
77 | test1/input/im_0342_s100_a06.png
78 | test1/input/im_0329_s80_a06.png
79 | test1/input/im_0335_s95_a04.png
80 | test1/input/im_0340_s100_a06.png
81 | test1/input/im_0318_s80_a05.png
82 | test1/input/im_0310_s100_a04.png
83 | test1/input/im_0306_s85_a05.png
84 | test1/input/im_0319_s85_a05.png
85 | test1/input/im_0334_s95_a06.png
86 | test1/input/im_0342_s90_a05.png
87 | test1/input/im_0342_s85_a04.png
88 | test1/input/im_0342_s95_a06.png
89 | test1/input/im_0342_s85_a05.png
90 | test1/input/im_0331_s90_a04.png
91 | test1/input/im_0304_s90_a06.png
92 | test1/input/im_0342_s100_a04.png
93 | test1/input/im_0337_s80_a05.png
94 | test1/input/im_0350_s95_a06.png
95 | test1/input/im_0326_s85_a06.png
96 | test1/input/im_0324_s80_a05.png
97 | test1/input/im_0313_s100_a06.png
98 | test1/input/im_0329_s100_a05.png
99 | test1/input/im_0343_s85_a04.png
100 | test1/input/im_0348_s85_a06.png
101 | test1/input/im_0301_s100_a06.png
102 | test1/input/im_0322_s85_a04.png
103 | test1/input/im_0328_s100_a05.png
104 | test1/input/im_0303_s90_a05.png
105 | test1/input/im_0327_s95_a05.png
106 | test1/input/im_0334_s90_a06.png
107 | test1/input/im_0339_s95_a05.png
108 | test1/input/im_0321_s90_a04.png
109 | test1/input/im_0301_s85_a06.png
110 | test1/input/im_0346_s80_a04.png
111 | test1/input/im_0341_s85_a05.png
112 | test1/input/im_0348_s100_a06.png
113 | test1/input/im_0344_s80_a05.png
114 | test1/input/im_0347_s100_a05.png
115 | test1/input/im_0349_s85_a04.png
116 | test1/input/im_0335_s85_a05.png
117 | test1/input/im_0350_s85_a06.png
118 | test1/input/im_0307_s80_a04.png
119 | test1/input/im_0309_s90_a06.png
120 | test1/input/im_0330_s100_a04.png
121 | test1/input/im_0302_s85_a04.png
122 | test1/input/im_0307_s85_a06.png
123 | test1/input/im_0326_s90_a06.png
124 | test1/input/im_0312_s90_a04.png
125 | test1/input/im_0341_s80_a04.png
126 | test1/input/im_0335_s95_a06.png
127 | test1/input/im_0312_s85_a04.png
128 | test1/input/im_0303_s80_a04.png
129 | test1/input/im_0316_s90_a05.png
130 | test1/input/im_0350_s90_a06.png
131 | test1/input/im_0320_s100_a05.png
132 | test1/input/im_0345_s90_a05.png
133 | test1/input/im_0342_s90_a06.png
134 | test1/input/im_0345_s95_a06.png
135 | test1/input/im_0308_s100_a04.png
136 | test1/input/im_0346_s85_a04.png
137 | test1/input/im_0320_s95_a05.png
138 | test1/input/im_0331_s90_a06.png
139 | test1/input/im_0322_s100_a04.png
140 | test1/input/im_0304_s100_a04.png
141 | test1/input/im_0348_s90_a05.png
142 | test1/input/im_0332_s90_a05.png
143 | test1/input/im_0331_s85_a05.png
144 | test1/input/im_0335_s100_a06.png
145 | test1/input/im_0323_s95_a06.png
146 | test1/input/im_0310_s80_a06.png
147 | test1/input/im_0324_s90_a06.png
148 | test1/input/im_0336_s80_a05.png
149 | test1/input/im_0311_s95_a04.png
150 | test1/input/im_0304_s95_a04.png
151 | test1/input/im_0350_s90_a04.png
152 | test1/input/im_0344_s85_a05.png
153 | test1/input/im_0334_s90_a05.png
154 | test1/input/im_0328_s100_a06.png
155 | test1/input/im_0315_s80_a05.png
156 | test1/input/im_0330_s95_a05.png
157 | test1/input/im_0334_s100_a06.png
158 | test1/input/im_0304_s80_a04.png
159 | test1/input/im_0310_s85_a06.png
160 | test1/input/im_0342_s95_a05.png
161 | test1/input/im_0331_s85_a04.png
162 | test1/input/im_0324_s90_a04.png
163 | test1/input/im_0349_s90_a05.png
164 | test1/input/im_0350_s95_a04.png
165 | test1/input/im_0316_s85_a06.png
166 | test1/input/im_0307_s100_a05.png
167 | test1/input/im_0345_s85_a04.png
168 | test1/input/im_0337_s85_a04.png
169 | test1/input/im_0334_s80_a06.png
170 | test1/input/im_0314_s85_a05.png
171 | test1/input/im_0348_s80_a04.png
172 | test1/input/im_0302_s90_a04.png
173 | test1/input/im_0335_s90_a04.png
174 | test1/input/im_0305_s95_a06.png
175 | test1/input/im_0315_s80_a04.png
176 | test1/input/im_0344_s85_a04.png
177 | test1/input/im_0302_s95_a06.png
178 | test1/input/im_0327_s100_a06.png
179 | test1/input/im_0320_s95_a04.png
180 | test1/input/im_0307_s90_a04.png
181 | test1/input/im_0327_s90_a04.png
182 | test1/input/im_0331_s90_a05.png
183 | test1/input/im_0309_s100_a04.png
184 | test1/input/im_0339_s90_a06.png
185 | test1/input/im_0333_s85_a05.png
186 | test1/input/im_0329_s85_a04.png
187 | test1/input/im_0347_s80_a06.png
188 | test1/input/im_0315_s85_a06.png
189 | test1/input/im_0306_s85_a06.png
190 | test1/input/im_0336_s90_a04.png
191 | test1/input/im_0318_s90_a06.png
192 | test1/input/im_0327_s85_a05.png
193 | test1/input/im_0324_s95_a04.png
194 | test1/input/im_0316_s80_a04.png
195 | test1/input/im_0329_s100_a04.png
196 | test1/input/im_0311_s85_a04.png
197 | test1/input/im_0323_s85_a05.png
198 | test1/input/im_0346_s90_a06.png
199 | test1/input/im_0342_s95_a04.png
200 | test1/input/im_0338_s95_a05.png
201 | test1/input/im_0302_s85_a05.png
202 | test1/input/im_0310_s95_a04.png
203 | test1/input/im_0302_s80_a05.png
204 | test1/input/im_0306_s100_a04.png
205 | test1/input/im_0317_s80_a06.png
206 | test1/input/im_0301_s80_a05.png
207 | test1/input/im_0340_s90_a05.png
208 | test1/input/im_0327_s90_a05.png
209 | test1/input/im_0326_s80_a06.png
210 | test1/input/im_0318_s85_a05.png
211 | test1/input/im_0308_s95_a04.png
212 | test1/input/im_0328_s90_a06.png
213 | test1/input/im_0345_s100_a05.png
214 | test1/input/im_0341_s100_a04.png
215 | test1/input/im_0345_s80_a04.png
216 | test1/input/im_0326_s95_a06.png
217 | test1/input/im_0342_s100_a05.png
218 | test1/input/im_0311_s95_a06.png
219 | test1/input/im_0309_s80_a04.png
220 | test1/input/im_0310_s85_a05.png
221 | test1/input/im_0338_s80_a05.png
222 | test1/input/im_0307_s85_a04.png
223 | test1/input/im_0317_s80_a05.png
224 | test1/input/im_0332_s80_a05.png
225 | test1/input/im_0322_s95_a06.png
226 | test1/input/im_0325_s95_a06.png
227 | test1/input/im_0303_s100_a06.png
228 | test1/input/im_0350_s80_a06.png
229 | test1/input/im_0315_s90_a05.png
230 | test1/input/im_0302_s90_a05.png
231 | test1/input/im_0323_s90_a04.png
232 | test1/input/im_0332_s100_a05.png
233 | test1/input/im_0345_s90_a04.png
234 | test1/input/im_0332_s100_a06.png
235 | test1/input/im_0333_s90_a05.png
236 | test1/input/im_0321_s100_a05.png
237 | test1/input/im_0303_s90_a06.png
238 | test1/input/im_0344_s95_a06.png
239 | test1/input/im_0343_s95_a06.png
240 | test1/input/im_0344_s100_a06.png
241 | test1/input/im_0329_s90_a04.png
242 | test1/input/im_0327_s80_a06.png
243 | test1/input/im_0347_s95_a04.png
244 | test1/input/im_0339_s80_a06.png
245 | test1/input/im_0316_s85_a05.png
246 | test1/input/im_0346_s95_a04.png
247 | test1/input/im_0309_s85_a05.png
248 | test1/input/im_0315_s90_a04.png
249 | test1/input/im_0312_s90_a06.png
250 | test1/input/im_0318_s80_a04.png
251 | test1/input/im_0322_s100_a06.png
252 | test1/input/im_0314_s90_a04.png
253 | test1/input/im_0304_s90_a04.png
254 | test1/input/im_0313_s95_a06.png
255 | test1/input/im_0343_s100_a04.png
256 | test1/input/im_0318_s85_a06.png
257 | test1/input/im_0310_s90_a06.png
258 | test1/input/im_0325_s90_a05.png
259 | test1/input/im_0348_s95_a04.png
260 | test1/input/im_0347_s85_a06.png
261 | test1/input/im_0313_s85_a05.png
262 | test1/input/im_0340_s85_a06.png
263 | test1/input/im_0339_s85_a05.png
264 | test1/input/im_0330_s85_a06.png
265 | test1/input/im_0312_s80_a04.png
266 | test1/input/im_0330_s80_a04.png
267 | test1/input/im_0350_s100_a06.png
268 | test1/input/im_0314_s80_a04.png
269 | test1/input/im_0319_s100_a04.png
270 | test1/input/im_0317_s80_a04.png
271 | test1/input/im_0318_s100_a05.png
272 | test1/input/im_0319_s80_a04.png
273 | test1/input/im_0339_s90_a04.png
274 | test1/input/im_0339_s80_a04.png
275 | test1/input/im_0303_s95_a06.png
276 | test1/input/im_0320_s85_a05.png
277 | test1/input/im_0318_s100_a06.png
278 | test1/input/im_0313_s80_a04.png
279 | test1/input/im_0337_s90_a05.png
280 | test1/input/im_0343_s80_a06.png
281 | test1/input/im_0323_s100_a06.png
282 | test1/input/im_0313_s80_a05.png
283 | test1/input/im_0316_s80_a06.png
284 | test1/input/im_0306_s80_a05.png
285 | test1/input/im_0337_s95_a05.png
286 | test1/input/im_0313_s95_a05.png
287 | test1/input/im_0302_s100_a05.png
288 | test1/input/im_0339_s90_a05.png
289 | test1/input/im_0334_s90_a04.png
290 | test1/input/im_0325_s100_a04.png
291 | test1/input/im_0305_s80_a06.png
292 | test1/input/im_0320_s80_a05.png
293 | test1/input/im_0321_s90_a05.png
294 | test1/input/im_0315_s100_a05.png
295 | test1/input/im_0334_s95_a04.png
296 | test1/input/im_0320_s85_a04.png
297 | test1/input/im_0322_s100_a05.png
298 | test1/input/im_0325_s95_a05.png
299 | test1/input/im_0314_s100_a04.png
300 | test1/input/im_0331_s95_a04.png
301 | test1/input/im_0326_s90_a04.png
302 | test1/input/im_0321_s90_a06.png
303 | test1/input/im_0326_s100_a04.png
304 | test1/input/im_0345_s85_a06.png
305 | test1/input/im_0324_s80_a04.png
306 | test1/input/im_0328_s100_a04.png
307 | test1/input/im_0310_s100_a06.png
308 | test1/input/im_0349_s100_a06.png
309 | test1/input/im_0330_s85_a04.png
310 | test1/input/im_0323_s90_a05.png
311 | test1/input/im_0345_s100_a04.png
312 | test1/input/im_0328_s85_a06.png
313 | test1/input/im_0312_s80_a06.png
314 | test1/input/im_0336_s90_a06.png
315 | test1/input/im_0349_s95_a05.png
316 | test1/input/im_0317_s85_a05.png
317 | test1/input/im_0326_s100_a05.png
318 | test1/input/im_0336_s85_a06.png
319 | test1/input/im_0326_s90_a05.png
320 | test1/input/im_0340_s80_a05.png
321 | test1/input/im_0312_s95_a04.png
322 | test1/input/im_0330_s95_a06.png
323 | test1/input/im_0329_s95_a04.png
324 | test1/input/im_0333_s80_a06.png
325 | test1/input/im_0322_s90_a04.png
326 | test1/input/im_0324_s85_a06.png
327 | test1/input/im_0344_s80_a06.png
328 | test1/input/im_0341_s90_a04.png
329 | test1/input/im_0307_s90_a05.png
330 | test1/input/im_0327_s95_a06.png
331 | test1/input/im_0335_s90_a06.png
332 | test1/input/im_0314_s95_a06.png
333 | test1/input/im_0347_s95_a06.png
334 | test1/input/im_0345_s85_a05.png
335 | test1/input/im_0312_s100_a05.png
336 | test1/input/im_0338_s100_a04.png
337 | test1/input/im_0347_s100_a04.png
338 | test1/input/im_0334_s80_a05.png
339 | test1/input/im_0331_s95_a06.png
340 | test1/input/im_0316_s80_a05.png
341 | test1/input/im_0320_s85_a06.png
342 | test1/input/im_0346_s100_a06.png
343 | test1/input/im_0325_s85_a04.png
344 | test1/input/im_0342_s85_a06.png
345 | test1/input/im_0302_s95_a04.png
346 | test1/input/im_0326_s80_a04.png
347 | test1/input/im_0344_s90_a04.png
348 | test1/input/im_0346_s85_a06.png
349 | test1/input/im_0342_s80_a05.png
350 | test1/input/im_0302_s80_a04.png
351 | test1/input/im_0333_s80_a04.png
352 | test1/input/im_0343_s95_a04.png
353 | test1/input/im_0316_s100_a06.png
354 | test1/input/im_0324_s100_a05.png
355 | test1/input/im_0306_s80_a04.png
356 | test1/input/im_0332_s95_a05.png
357 | test1/input/im_0313_s85_a06.png
358 | test1/input/im_0329_s90_a06.png
359 | test1/input/im_0303_s100_a04.png
360 | test1/input/im_0333_s90_a06.png
361 | test1/input/im_0306_s100_a05.png
362 | test1/input/im_0336_s80_a04.png
363 | test1/input/im_0346_s85_a05.png
364 | test1/input/im_0316_s95_a05.png
365 | test1/input/im_0344_s100_a04.png
366 | test1/input/im_0332_s95_a04.png
367 | test1/input/im_0309_s100_a06.png
368 | test1/input/im_0319_s100_a05.png
369 | test1/input/im_0341_s95_a04.png
370 | test1/input/im_0308_s90_a06.png
371 | test1/input/im_0343_s90_a05.png
372 | test1/input/im_0331_s100_a04.png
373 | test1/input/im_0344_s80_a04.png
374 | test1/input/im_0335_s85_a04.png
375 | test1/input/im_0321_s85_a05.png
376 | test1/input/im_0311_s100_a05.png
377 | test1/input/im_0346_s100_a04.png
378 | test1/input/im_0348_s90_a06.png
379 | test1/input/im_0338_s85_a04.png
380 | test1/input/im_0314_s95_a04.png
381 | test1/input/im_0327_s85_a04.png
382 | test1/input/im_0340_s90_a04.png
383 | test1/input/im_0332_s85_a06.png
384 | test1/input/im_0342_s90_a04.png
385 | test1/input/im_0308_s85_a04.png
386 | test1/input/im_0311_s85_a05.png
387 | test1/input/im_0319_s90_a05.png
388 | test1/input/im_0344_s85_a06.png
389 | test1/input/im_0346_s100_a05.png
390 | test1/input/im_0348_s85_a05.png
391 | test1/input/im_0318_s100_a04.png
392 | test1/input/im_0308_s100_a06.png
393 | test1/input/im_0333_s100_a06.png
394 | test1/input/im_0347_s80_a04.png
395 | test1/input/im_0324_s90_a05.png
396 | test1/input/im_0349_s90_a06.png
397 | test1/input/im_0327_s90_a06.png
398 | test1/input/im_0305_s90_a06.png
399 | test1/input/im_0305_s80_a04.png
400 | test1/input/im_0337_s85_a05.png
401 | test1/input/im_0315_s90_a06.png
402 | test1/input/im_0303_s95_a05.png
403 | test1/input/im_0343_s90_a04.png
404 | test1/input/im_0311_s80_a06.png
405 | test1/input/im_0327_s85_a06.png
406 | test1/input/im_0349_s90_a04.png
407 | test1/input/im_0324_s80_a06.png
408 | test1/input/im_0315_s95_a06.png
409 | test1/input/im_0311_s85_a06.png
410 | test1/input/im_0343_s100_a05.png
411 | test1/input/im_0329_s85_a06.png
412 | test1/input/im_0335_s80_a06.png
413 | test1/input/im_0332_s90_a06.png
414 | test1/input/im_0339_s100_a06.png
415 | test1/input/im_0304_s95_a06.png
416 | test1/input/im_0325_s100_a06.png
417 | test1/input/im_0335_s90_a05.png
418 | test1/input/im_0344_s100_a05.png
419 | test1/input/im_0339_s80_a05.png
420 | test1/input/im_0336_s100_a05.png
421 | test1/input/im_0343_s90_a06.png
422 | test1/input/im_0314_s85_a06.png
423 | test1/input/im_0321_s100_a06.png
424 | test1/input/im_0318_s85_a04.png
425 | test1/input/im_0319_s80_a05.png
426 | test1/input/im_0349_s85_a06.png
427 | test1/input/im_0318_s90_a04.png
428 | test1/input/im_0336_s100_a06.png
429 | test1/input/im_0310_s100_a05.png
430 | test1/input/im_0316_s95_a06.png
431 | test1/input/im_0324_s95_a05.png
432 | test1/input/im_0338_s85_a05.png
433 | test1/input/im_0303_s95_a04.png
434 | test1/input/im_0348_s95_a05.png
435 | test1/input/im_0314_s90_a06.png
436 | test1/input/im_0345_s90_a06.png
437 | test1/input/im_0331_s95_a05.png
438 | test1/input/im_0339_s95_a06.png
439 | test1/input/im_0305_s100_a06.png
440 | test1/input/im_0348_s80_a05.png
441 | test1/input/im_0307_s85_a05.png
442 | test1/input/im_0312_s95_a05.png
443 | test1/input/im_0323_s95_a05.png
444 | test1/input/im_0330_s90_a04.png
445 | test1/input/im_0330_s80_a06.png
446 | test1/input/im_0346_s95_a06.png
447 | test1/input/im_0346_s95_a05.png
448 | test1/input/im_0316_s95_a04.png
449 | test1/input/im_0338_s95_a06.png
450 | test1/input/im_0312_s90_a05.png
451 | test1/input/im_0307_s100_a06.png
452 | test1/input/im_0328_s80_a05.png
453 | test1/input/im_0337_s95_a04.png
454 | test1/input/im_0314_s80_a06.png
455 | test1/input/im_0325_s85_a06.png
456 | test1/input/im_0311_s80_a04.png
457 | test1/input/im_0321_s80_a06.png
458 | test1/input/im_0322_s80_a04.png
459 | test1/input/im_0334_s85_a04.png
460 | test1/input/im_0335_s100_a05.png
461 | test1/input/im_0325_s95_a04.png
462 | test1/input/im_0344_s90_a05.png
463 | test1/input/im_0305_s85_a05.png
464 | test1/input/im_0306_s95_a04.png
465 | test1/input/im_0350_s85_a04.png
466 | test1/input/im_0325_s85_a05.png
467 | test1/input/im_0340_s90_a06.png
468 | test1/input/im_0334_s100_a04.png
469 | test1/input/im_0329_s95_a06.png
470 | test1/input/im_0301_s80_a04.png
471 | test1/input/im_0350_s80_a05.png
472 | test1/input/im_0305_s95_a05.png
473 | test1/input/im_0346_s80_a06.png
474 | test1/input/im_0312_s85_a06.png
475 | test1/input/im_0342_s80_a04.png
476 | test1/input/im_0309_s80_a06.png
477 | test1/input/im_0332_s90_a04.png
478 | test1/input/im_0344_s90_a06.png
479 | test1/input/im_0338_s95_a04.png
480 | test1/input/im_0327_s100_a04.png
481 | test1/input/im_0313_s100_a04.png
482 | test1/input/im_0341_s90_a06.png
483 | test1/input/im_0331_s80_a06.png
484 | test1/input/im_0322_s95_a05.png
485 | test1/input/im_0321_s95_a05.png
486 | test1/input/im_0303_s85_a04.png
487 | test1/input/im_0350_s100_a05.png
488 | test1/input/im_0305_s85_a06.png
489 | test1/input/im_0334_s100_a05.png
490 | test1/input/im_0312_s100_a04.png
491 | test1/input/im_0320_s95_a06.png
492 | test1/input/im_0335_s95_a05.png
493 | test1/input/im_0350_s95_a05.png
494 | test1/input/im_0308_s90_a04.png
495 | test1/input/im_0321_s80_a05.png
496 | test1/input/im_0324_s100_a04.png
497 | test1/input/im_0319_s80_a06.png
498 | test1/input/im_0301_s95_a05.png
499 | test1/input/im_0312_s95_a06.png
500 | test1/input/im_0350_s90_a05.png
501 | test1/input/im_0318_s95_a06.png
502 | test1/input/im_0338_s90_a06.png
503 | test1/input/im_0307_s80_a06.png
504 | test1/input/im_0327_s95_a04.png
505 | test1/input/im_0305_s90_a04.png
506 | test1/input/im_0307_s80_a05.png
507 | test1/input/im_0333_s95_a06.png
508 | test1/input/im_0347_s100_a06.png
509 | test1/input/im_0317_s100_a06.png
510 | test1/input/im_0340_s100_a05.png
511 | test1/input/im_0350_s100_a04.png
512 | test1/input/im_0325_s80_a06.png
513 | test1/input/im_0322_s90_a06.png
514 | test1/input/im_0337_s80_a06.png
515 | test1/input/im_0323_s80_a04.png
516 | test1/input/im_0301_s90_a05.png
517 | test1/input/im_0331_s85_a06.png
518 | test1/input/im_0341_s95_a05.png
519 | test1/input/im_0307_s95_a05.png
520 | test1/input/im_0335_s80_a04.png
521 | test1/input/im_0308_s80_a05.png
522 | test1/input/im_0305_s80_a05.png
523 | test1/input/im_0317_s90_a04.png
524 | test1/input/im_0313_s80_a06.png
525 | test1/input/im_0301_s80_a06.png
526 | test1/input/im_0306_s90_a04.png
527 | test1/input/im_0319_s95_a04.png
528 | test1/input/im_0319_s95_a06.png
529 | test1/input/im_0326_s80_a05.png
530 | test1/input/im_0335_s85_a06.png
531 | test1/input/im_0322_s85_a05.png
532 | test1/input/im_0321_s100_a04.png
533 | test1/input/im_0325_s90_a06.png
534 | test1/input/im_0326_s100_a06.png
535 | test1/input/im_0336_s95_a05.png
536 | test1/input/im_0339_s85_a04.png
537 | test1/input/im_0341_s80_a05.png
538 | test1/input/im_0336_s95_a06.png
539 | test1/input/im_0317_s95_a05.png
540 | test1/input/im_0340_s100_a04.png
541 | test1/input/im_0334_s80_a04.png
542 | test1/input/im_0340_s80_a06.png
543 | test1/input/im_0320_s90_a05.png
544 | test1/input/im_0311_s95_a05.png
545 | test1/input/im_0315_s100_a06.png
546 | test1/input/im_0349_s80_a05.png
547 | test1/input/im_0308_s80_a04.png
548 | test1/input/im_0349_s95_a06.png
549 | test1/input/im_0318_s90_a05.png
550 | test1/input/im_0306_s80_a06.png
551 | test1/input/im_0343_s80_a05.png
552 | test1/input/im_0332_s95_a06.png
553 | test1/input/im_0319_s95_a05.png
554 | test1/input/im_0341_s90_a05.png
555 | test1/input/im_0313_s85_a04.png
556 | test1/input/im_0303_s80_a06.png
557 | test1/input/im_0334_s85_a05.png
558 | test1/input/im_0341_s85_a06.png
559 | test1/input/im_0310_s90_a05.png
560 | test1/input/im_0307_s95_a06.png
561 | test1/input/im_0326_s95_a04.png
562 | test1/input/im_0328_s90_a04.png
563 | test1/input/im_0327_s100_a05.png
564 | test1/input/im_0333_s90_a04.png
565 | test1/input/im_0316_s100_a05.png
566 | test1/input/im_0303_s90_a04.png
567 | test1/input/im_0330_s90_a06.png
568 | test1/input/im_0303_s85_a06.png
569 | test1/input/im_0333_s100_a04.png
570 | test1/input/im_0305_s100_a04.png
571 | test1/input/im_0348_s85_a04.png
572 | test1/input/im_0348_s90_a04.png
573 | test1/input/im_0324_s85_a04.png
574 | test1/input/im_0347_s90_a05.png
575 | test1/input/im_0335_s100_a04.png
576 | test1/input/im_0311_s80_a05.png
577 | test1/input/im_0328_s90_a05.png
578 | test1/input/im_0346_s90_a05.png
579 | test1/input/im_0313_s90_a06.png
580 | test1/input/im_0326_s95_a05.png
581 | test1/input/im_0305_s85_a04.png
582 | test1/input/im_0342_s80_a06.png
583 | test1/input/im_0302_s100_a04.png
584 | test1/input/im_0322_s80_a05.png
585 | test1/input/im_0338_s85_a06.png
586 | test1/input/im_0301_s90_a06.png
587 | test1/input/im_0337_s100_a05.png
588 | test1/input/im_0310_s90_a04.png
589 | test1/input/im_0340_s85_a04.png
590 | test1/input/im_0316_s85_a04.png
591 | test1/input/im_0321_s80_a04.png
592 | test1/input/im_0338_s90_a05.png
593 | test1/input/im_0343_s85_a06.png
594 | test1/input/im_0311_s100_a04.png
595 | test1/input/im_0310_s95_a06.png
596 | test1/input/im_0340_s95_a06.png
597 | test1/input/im_0331_s80_a04.png
598 | test1/input/im_0320_s80_a06.png
599 | test1/input/im_0308_s100_a05.png
600 | test1/input/im_0341_s85_a04.png
601 | test1/input/im_0350_s85_a05.png
602 | test1/input/im_0347_s90_a04.png
603 | test1/input/im_0347_s95_a05.png
604 | test1/input/im_0330_s90_a05.png
605 | test1/input/im_0328_s85_a04.png
606 | test1/input/im_0349_s80_a04.png
607 | test1/input/im_0321_s95_a06.png
608 | test1/input/im_0301_s95_a04.png
609 | test1/input/im_0343_s100_a06.png
610 | test1/input/im_0340_s95_a04.png
611 | test1/input/im_0336_s95_a04.png
612 | test1/input/im_0319_s90_a04.png
613 | test1/input/im_0333_s95_a04.png
614 | test1/input/im_0309_s95_a06.png
615 | test1/input/im_0344_s95_a04.png
616 | test1/input/im_0329_s100_a06.png
617 | test1/input/im_0325_s100_a05.png
618 | test1/input/im_0323_s95_a04.png
619 | test1/input/im_0333_s80_a05.png
620 | test1/input/im_0314_s90_a05.png
621 | test1/input/im_0323_s90_a06.png
622 | test1/input/im_0314_s80_a05.png
623 | test1/input/im_0315_s85_a04.png
624 | test1/input/im_0336_s100_a04.png
625 | test1/input/im_0350_s80_a04.png
626 | test1/input/im_0332_s80_a06.png
627 | test1/input/im_0311_s90_a04.png
628 | test1/input/im_0324_s95_a06.png
629 | test1/input/im_0337_s90_a04.png
630 | test1/input/im_0333_s95_a05.png
631 | test1/input/im_0319_s85_a06.png
632 | test1/input/im_0309_s95_a04.png
633 | test1/input/im_0308_s85_a05.png
634 | test1/input/im_0301_s85_a05.png
635 | test1/input/im_0304_s80_a06.png
636 | test1/input/im_0320_s100_a04.png
637 | test1/input/im_0321_s85_a04.png
638 | test1/input/im_0302_s85_a06.png
639 | test1/input/im_0323_s85_a06.png
640 | test1/input/im_0305_s90_a05.png
641 | test1/input/im_0334_s85_a06.png
642 | test1/input/im_0326_s85_a04.png
643 | test1/input/im_0309_s90_a05.png
644 | test1/input/im_0310_s95_a05.png
645 | test1/input/im_0340_s85_a05.png
646 | test1/input/im_0345_s100_a06.png
647 | test1/input/im_0337_s80_a04.png
648 | test1/input/im_0309_s85_a04.png
649 | test1/input/im_0330_s100_a05.png
650 | test1/input/im_0319_s100_a06.png
651 | test1/input/im_0337_s100_a04.png
652 | test1/input/im_0325_s80_a04.png
653 | test1/input/im_0305_s100_a05.png
654 | test1/input/im_0307_s90_a06.png
655 | test1/input/im_0321_s85_a06.png
656 | test1/input/im_0323_s80_a05.png
657 | test1/input/im_0317_s90_a05.png
658 | test1/input/im_0317_s85_a06.png
659 | test1/input/im_0313_s90_a04.png
660 | test1/input/im_0322_s95_a04.png
661 | test1/input/im_0324_s85_a05.png
662 | test1/input/im_0301_s85_a04.png
663 | test1/input/im_0304_s95_a05.png
664 | test1/input/im_0328_s80_a04.png
665 | test1/input/im_0320_s80_a04.png
666 | test1/input/im_0306_s85_a04.png
667 | test1/input/im_0337_s90_a06.png
668 | test1/input/im_0318_s95_a05.png
669 | test1/input/im_0340_s95_a05.png
670 | test1/input/im_0343_s95_a05.png
671 | test1/input/im_0339_s100_a04.png
672 | test1/input/im_0319_s85_a04.png
673 | test1/input/im_0317_s85_a04.png
674 | test1/input/im_0317_s95_a04.png
675 | test1/input/im_0309_s90_a04.png
676 | test1/input/im_0314_s95_a05.png
677 | test1/input/im_0315_s85_a05.png
678 | test1/input/im_0345_s80_a06.png
679 | test1/input/im_0301_s90_a04.png
680 | test1/input/im_0339_s95_a04.png
681 | test1/input/im_0347_s85_a05.png
682 | test1/input/im_0328_s85_a05.png
683 | test1/input/im_0306_s95_a06.png
684 | test1/input/im_0308_s80_a06.png
685 | test1/input/im_0312_s100_a06.png
686 | test1/input/im_0349_s100_a05.png
687 | test1/input/im_0326_s85_a05.png
688 | test1/input/im_0334_s95_a05.png
689 | test1/input/im_0332_s85_a04.png
690 | test1/input/im_0317_s100_a04.png
691 | test1/input/im_0328_s95_a06.png
692 | test1/input/im_0306_s90_a06.png
693 | test1/input/im_0348_s80_a06.png
694 | test1/input/im_0315_s95_a04.png
695 | test1/input/im_0324_s100_a06.png
696 | test1/input/im_0303_s80_a05.png
697 | test1/input/im_0348_s100_a04.png
698 | test1/input/im_0336_s90_a05.png
699 | test1/input/im_0316_s90_a06.png
700 | test1/input/im_0339_s100_a05.png
701 | test1/input/im_0337_s100_a06.png
702 | test1/input/im_0349_s80_a06.png
703 | test1/input/im_0325_s90_a04.png
704 | test1/input/im_0322_s85_a06.png
705 | test1/input/im_0302_s90_a06.png
706 | test1/input/im_0317_s90_a06.png
707 | test1/input/im_0347_s90_a06.png
708 | test1/input/im_0344_s95_a05.png
709 | test1/input/im_0319_s90_a06.png
710 | test1/input/im_0316_s100_a04.png
711 | test1/input/im_0308_s95_a06.png
712 | test1/input/im_0338_s80_a04.png
713 | test1/input/im_0320_s100_a06.png
714 | test1/input/im_0327_s80_a05.png
715 | test1/input/im_0329_s90_a05.png
716 | test1/input/im_0331_s80_a05.png
717 | test1/input/im_0349_s95_a04.png
718 | test1/input/im_0310_s80_a05.png
719 | test1/input/im_0308_s95_a05.png
720 | test1/input/im_0328_s80_a06.png
721 | test1/input/im_0323_s85_a04.png
722 | test1/input/im_0301_s100_a05.png
723 | test1/input/im_0320_s90_a06.png
724 | test1/input/im_0313_s100_a05.png
725 | test1/input/im_0346_s90_a04.png
726 | test1/input/im_0301_s95_a06.png
727 | test1/input/im_0316_s90_a04.png
728 | test1/input/im_0337_s95_a06.png
729 | test1/input/im_0336_s80_a06.png
730 | test1/input/im_0302_s80_a06.png
731 | test1/input/im_0315_s80_a06.png
732 | test1/input/im_0343_s80_a04.png
733 | test1/input/im_0303_s85_a05.png
734 | test1/input/im_0306_s95_a05.png
735 | test1/input/im_0311_s100_a06.png
736 | test1/input/im_0305_s95_a04.png
737 | test1/input/im_0323_s80_a06.png
738 | test1/input/im_0304_s90_a05.png
739 | test1/input/im_0309_s100_a05.png
740 | test1/input/im_0304_s80_a05.png
741 | test1/input/im_0345_s95_a05.png
742 | test1/input/im_0313_s95_a04.png
743 | test1/input/im_0328_s95_a05.png
744 | test1/input/im_0332_s85_a05.png
745 | test1/input/im_0341_s95_a06.png
746 | test1/input/im_0330_s95_a04.png
747 | test1/input/im_0345_s80_a05.png
748 | test1/input/im_0330_s85_a05.png
749 | test1/input/im_0329_s95_a05.png
750 | test1/input/im_0323_s100_a04.png
751 |
--------------------------------------------------------------------------------