├── .idea
└── code.iml
├── basic_layers.py
├── blank_experiment.py
├── checks
├── biggan
│ ├── chk_0.pt
│ └── train_hypers.pt
├── cyclegan
│ ├── chk_0.pt
│ └── train_hypers.pt
├── dalle_2
│ ├── chk_0.pt
│ └── train_hypers.pt
├── dalle_mini
│ ├── chk_0.pt
│ └── train_hypers.pt
├── gaugan
│ ├── chk_0.pt
│ └── train_hypers.pt
├── glide
│ ├── chk_0.pt
│ └── train_hypers.pt
├── mj
│ ├── chk_0.pt
│ └── train_hypers.pt
├── progan
│ ├── chk_0.pt
│ └── train_hypers.pt
├── sd14
│ ├── chk_0.pt
│ └── train_hypers.pt
├── sd21
│ ├── chk_0.pt
│ └── train_hypers.pt
├── stargan
│ ├── chk_0.pt
│ └── train_hypers.pt
├── stylegan
│ ├── chk_0.pt
│ └── train_hypers.pt
└── stylegan2
│ ├── chk_0.pt
│ └── train_hypers.pt
├── data
├── dalle_2
│ ├── 0_real
│ │ ├── img_1121979.png
│ │ ├── img_1256926.png
│ │ ├── img_1318368.png
│ │ ├── img_1482420.png
│ │ ├── img_2101.png
│ │ ├── img_2431.png
│ │ ├── img_250006.png
│ │ ├── img_653783.png
│ │ ├── img_685915.png
│ │ └── img_69.png
│ └── 1_fake
│ │ ├── img_1341638.png
│ │ ├── img_1486737.png
│ │ ├── img_1535500.png
│ │ ├── img_1543532.png
│ │ ├── img_205082.png
│ │ ├── img_26962.png
│ │ ├── img_532821.png
│ │ ├── img_758685.png
│ │ ├── img_901534.png
│ │ └── img_942417.png
├── dalle_mini
│ ├── 0_real
│ │ ├── img_1125.png
│ │ ├── img_121274.png
│ │ ├── img_179228.png
│ │ ├── img_2899.png
│ │ ├── img_384396.png
│ │ ├── img_397555.png
│ │ ├── img_589795.png
│ │ ├── img_595583.png
│ │ ├── img_675501.png
│ │ └── img_97210.png
│ └── 1_fake
│ │ ├── img_1087131.png
│ │ ├── img_1189029.png
│ │ ├── img_1224023.png
│ │ ├── img_1313947.png
│ │ ├── img_241420.png
│ │ ├── img_524967.png
│ │ ├── img_586942.png
│ │ ├── img_761237.png
│ │ ├── img_782657.png
│ │ └── img_978369.png
├── glide
│ ├── 0_real
│ │ ├── img_235475.png
│ │ ├── img_28578.png
│ │ ├── img_3233.png
│ │ ├── img_4995.png
│ │ ├── img_571988.png
│ │ ├── img_574345.png
│ │ ├── img_594049.png
│ │ ├── img_633294.png
│ │ ├── img_737.png
│ │ └── img_80431.png
│ └── 1_fake
│ │ ├── img_1038562.png
│ │ ├── img_1055638.png
│ │ ├── img_1212279.png
│ │ ├── img_1488167.png
│ │ ├── img_215830.png
│ │ ├── img_253710.png
│ │ ├── img_696075.png
│ │ ├── img_706446.png
│ │ ├── img_868348.png
│ │ └── img_99127.png
├── mj
│ ├── 0_real
│ │ ├── img_153103.png
│ │ ├── img_177489.png
│ │ ├── img_263169.png
│ │ ├── img_415579.png
│ │ ├── img_4329.png
│ │ ├── img_4662.png
│ │ ├── img_503621.png
│ │ ├── img_567984.png
│ │ ├── img_592099.png
│ │ └── img_612276.png
│ └── 1_fake
│ │ ├── img_img_990849408058933258.png
│ │ ├── img_img_991166194365255741.png
│ │ ├── img_img_991455424593281054.png
│ │ ├── img_img_991620679730475028.png
│ │ ├── img_img_991814118162055238.png
│ │ ├── img_img_992081131874828308.png
│ │ ├── img_img_992483168701653004.png
│ │ ├── img_img_992511603599736853.png
│ │ ├── img_img_992890606516965376.png
│ │ └── img_img_993280554420154388.png
├── sd14
│ ├── 0_real
│ │ ├── img_102.png
│ │ ├── img_1181282.png
│ │ ├── img_1304491.png
│ │ ├── img_1373778.png
│ │ ├── img_202957.png
│ │ ├── img_332318.png
│ │ ├── img_338417.png
│ │ ├── img_3521.png
│ │ ├── img_590038.png
│ │ └── img_62051.png
│ └── 1_fake
│ │ ├── img_107403.png
│ │ ├── img_1134171.png
│ │ ├── img_1239067.png
│ │ ├── img_1265.png
│ │ ├── img_1402279.png
│ │ ├── img_1464360.png
│ │ ├── img_1586156.png
│ │ ├── img_628810.png
│ │ ├── img_635076.png
│ │ └── img_996540.png
└── sd21
│ ├── 0_real
│ ├── img_1023256.png
│ ├── img_1180182.png
│ ├── img_1310855.png
│ ├── img_1373696.png
│ ├── img_216000.png
│ ├── img_341347.png
│ ├── img_346558.png
│ ├── img_361478.png
│ ├── img_606665.png
│ └── img_636328.png
│ └── 1_fake
│ ├── img_107095.png
│ ├── img_1126950.png
│ ├── img_1236963.png
│ ├── img_1266460.png
│ ├── img_1408.png
│ ├── img_1474487.png
│ ├── img_160675.png
│ ├── img_645925.png
│ ├── img_652484.png
│ └── img_995393.png
├── data_dif.py
├── dcnn_loader.py
├── dncnn
├── chk_2000.pt
├── clean_real.npy
├── model.py
└── trainer.py
├── eval_dif.py
├── model.py
├── readme.md
├── repo_images
├── fingerFFT_dalle_2.png
├── fingerFFT_dalle_mini.png
├── fingerFFT_glide.png
├── fingerFFT_mj.png
├── fingerFFT_sd14.png
├── fingerFFT_sd21.png
├── finger_dalle_2.png
├── finger_dalle_mini.png
├── finger_glide.png
├── finger_mj.png
├── finger_sd14.png
└── finger_sd21.png
├── requirements.txt
├── train_dif.py
├── trainer_dif.py
└── utils.py
/.idea/code.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/basic_layers.py:
--------------------------------------------------------------------------------
1 | from torch import nn
2 | import torch.nn.functional as F
3 | import torch
4 |
5 |
6 | class Conv_Layer(nn.Module):
7 | def __init__(self, in_c, out_c, kernel, stride,
8 | padding=0, dilation=1, bias=True, activ=None, norm=None,
9 | pool=None):
10 | super(Conv_Layer, self).__init__()
11 | self.conv = nn.Sequential()
12 | self.conv.add_module('conv', nn.Conv2d(in_c, out_c, kernel_size=kernel,
13 | stride=stride, dilation=dilation, padding=padding, bias=bias))
14 |
15 | if activ == 'leak':
16 | activ = nn.LeakyReLU(inplace=True)
17 | elif activ == 'relu':
18 | activ = nn.ReLU(inplace=True)
19 | elif activ == 'pleak':
20 | activ = nn.PReLU()
21 | elif activ == 'gelu':
22 | activ = nn.GELU()
23 | elif activ == 'selu':
24 | activ = nn.SELU()
25 | elif activ == 'sigmoid':
26 | activ = nn.Sigmoid()
27 | elif activ == 'softmax':
28 | activ = nn.Softmax(dim=1)
29 | elif activ == 'tanh':
30 | activ = nn.Tanh()
31 | if norm == 'bn':
32 | norm = nn.BatchNorm2d(out_c)
33 | if pool == 'max':
34 | pool = nn.MaxPool2d(2, 2)
35 | elif pool == 'avg':
36 | pool = nn.AvgPool2d(2, 2)
37 |
38 | if not norm is None:
39 | self.conv.add_module('norm', norm)
40 |
41 | if not pool is None:
42 | self.conv.add_module('pool', pool)
43 |
44 | if not activ is None:
45 | self.conv.add_module('activ', activ)
46 |
47 | def forward(self, x):
48 | x = self.conv(x)
49 | return x
50 |
51 |
52 | class DeConv_Layer(nn.Module):
53 | def __init__(self, in_c, out_c, kernel, stride,
54 | padding=0, activ=None, norm=None,
55 | pool=None, bias=True):
56 | super(DeConv_Layer, self).__init__()
57 | self.deconv = nn.Sequential()
58 | self.deconv.add_module('deconv', nn.ConvTranspose2d(in_c, out_c, kernel_size=kernel,
59 | stride=stride, padding=padding, bias=bias))
60 |
61 | if activ == 'leak':
62 | activ = nn.LeakyReLU(inplace=True)
63 | elif activ == 'relu':
64 | activ = nn.ReLU(inplace=True)
65 | elif activ == 'pleak':
66 | activ = nn.PReLU()
67 | elif activ == 'gelu':
68 | activ = nn.GELU()
69 | elif activ == 'selu':
70 | activ = nn.SELU()
71 | elif activ == 'sigmoid':
72 | activ = nn.Sigmoid()
73 | elif activ == 'softmax':
74 | activ = nn.Softmax(dim=1)
75 | if norm == 'bn':
76 | norm = nn.BatchNorm2d(out_c)
77 | if pool == 'max':
78 | pool = nn.MaxPool2d(2, 2)
79 | elif pool == 'avg':
80 | pool = nn.AvgPool2d(2, 2)
81 |
82 | if not norm is None:
83 | self.deconv.add_module('norm', norm)
84 |
85 | if not pool is None:
86 | self.deconv.add_module('pool', pool)
87 |
88 | if not activ is None:
89 | self.deconv.add_module('activ', activ)
90 |
91 | def forward(self, x):
92 | x = self.deconv(x)
93 | return x
94 |
95 |
96 | class Conv_Block(nn.Module):
97 | def __init__(self, in_c, out_c, activ=None, pool=None, norm='bn'):
98 | super(Conv_Block, self).__init__()
99 | self.c1 = Conv_Layer(in_c, out_c, 3, 1, activ=activ, norm=norm, padding=1)
100 | self.c2 = Conv_Layer(out_c, out_c, 3, 1, activ=activ, norm=norm, padding=1)
101 |
102 | if pool == 'up_stride':
103 | self.pool = DeConv_Layer(out_c, out_c, 2, 2, norm=norm)
104 | elif pool == 'up_bilinear':
105 | self.pool = nn.Upsample(scale_factor=2, mode=pool[3:], align_corners=True)
106 | elif pool == 'up_nearest':
107 | self.pool = nn.Upsample(scale_factor=2, mode=pool[3:], align_corners=True)
108 | elif pool == 'down_max':
109 | self.pool = nn.MaxPool2d(2, 2)
110 | elif pool == 'down_stride':
111 | self.c2 = Conv_Layer(out_c, out_c, 3, 2, activ=activ, norm=norm, padding=1)
112 | self.pool = None
113 | else:
114 | self.pool = None
115 |
116 | def forward(self, x):
117 | x = self.c2(self.c1(x))
118 |
119 | if self.pool:
120 | return x, self.pool(x)
121 | else:
122 | return 0, x
123 |
124 |
125 | # -------- Functions ----------------------------------------------------------
126 |
127 | def concat_curr(prev, curr):
128 | diffY = prev.size()[2] - curr.size()[2]
129 | diffX = prev.size()[3] - curr.size()[3]
130 |
131 | curr = F.pad(curr, [diffX // 2, diffX - diffX // 2,
132 | diffY // 2, diffY - diffY // 2])
133 |
134 | x = torch.cat([prev, curr], dim=1)
135 | return x
136 |
--------------------------------------------------------------------------------
/blank_experiment.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | from pathlib import Path
3 | import torch
4 | from tqdm import tqdm
5 | import data_dif as data
6 | from trainer_dif import TrainerSingle
7 | from utils import *
8 |
9 |
10 | def parse_arguments() -> argparse.Namespace:
11 | parser = argparse.ArgumentParser(
12 | formatter_class=argparse.ArgumentDefaultsHelpFormatter,
13 | )
14 |
15 | parser.add_argument("--size", type=tuple, default=(128, 128), required=False,
16 | help="Crop size in pixels (h,w)")
17 | parser.add_argument("--output_dir", type=str, default='out', required=False,
18 | help="Path to the directory for saving images")
19 | parser.add_argument("--epochs", type=int, default=1000, required=False,
20 | help="Amount of epoch for train")
21 |
22 | parsed_args = parser.parse_args()
23 | return parsed_args
24 |
25 |
26 | def preform_blank(args: argparse.Namespace) -> None:
27 | epochs = args.epochs
28 | output_dir = Path(args.output_dir)
29 | crop_size = args.size
30 |
31 | check_existence(output_dir, True)
32 |
33 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
34 |
35 | hyper_pars = {'Epochs': epochs, 'Factor': 100, 'Noise Type': 'uniform',
36 | 'Noise STD': 0.03, 'Inp. Channel': 16,
37 | 'LR': 5e-5, 'Device': device, 'Crop Size': crop_size, 'Margin': 1.,
38 | 'Out. Channel': 3, 'Arch.': 32, 'Depth': 4,
39 | 'Concat': [1, 1, 1, 1]}
40 |
41 | # Generate tensor of the gray images
42 | img_lp = 0.5 * torch.ones((1, 3, *hyper_pars['Crop Size']))
43 | img_lp = img_lp.to(device).float()
44 |
45 | # Initialize trainer
46 | trainer = TrainerSingle(hyper_pars).to(hyper_pars['Device'])
47 | train_step_fun = trainer.train_step_blank
48 |
49 | # Initialize progress bar
50 | epochs = list(range(1, hyper_pars['Epochs'] + 1))
51 | pbar = tqdm(total=len(epochs), desc='')
52 | ep = 0
53 |
54 | fin_list = {}
55 |
56 | for ep in epochs:
57 | pbar.update()
58 | train_step_fun(img_lp)
59 |
60 | if (ep % hyper_pars['Factor']) == 0:
61 | if ep > 0:
62 | fin_np = trainer.produce_fingerprint(True)
63 | fin_list[ep] = fin_np
64 |
65 | pbar.postfix = f'Loss {trainer.train_loss[- 1]:.5f}'
66 |
67 | fin_np = trainer.produce_fingerprint(True)
68 | fin_list[ep] = fin_np
69 | fin_fft = data.produce_fft(fin_list[ep])
70 |
71 | # Saving output history and images from the last epoch
72 | np.save(str(output_dir / 'fin_history.npy'), fin_list)
73 | Image.fromarray((255*fin_np).astype('uint8')).save(output_dir / 'fin_image.png')
74 | Image.fromarray((255*data.rescale_img(fin_fft)).astype('uint8')).save(output_dir / 'fin_fft.png')
75 |
76 |
77 | if __name__ == '__main__':
78 | preform_blank(parse_arguments())
79 |
--------------------------------------------------------------------------------
/checks/biggan/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/biggan/chk_0.pt
--------------------------------------------------------------------------------
/checks/biggan/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/biggan/train_hypers.pt
--------------------------------------------------------------------------------
/checks/cyclegan/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/cyclegan/chk_0.pt
--------------------------------------------------------------------------------
/checks/cyclegan/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/cyclegan/train_hypers.pt
--------------------------------------------------------------------------------
/checks/dalle_2/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/dalle_2/chk_0.pt
--------------------------------------------------------------------------------
/checks/dalle_2/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/dalle_2/train_hypers.pt
--------------------------------------------------------------------------------
/checks/dalle_mini/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/dalle_mini/chk_0.pt
--------------------------------------------------------------------------------
/checks/dalle_mini/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/dalle_mini/train_hypers.pt
--------------------------------------------------------------------------------
/checks/gaugan/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/gaugan/chk_0.pt
--------------------------------------------------------------------------------
/checks/gaugan/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/gaugan/train_hypers.pt
--------------------------------------------------------------------------------
/checks/glide/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/glide/chk_0.pt
--------------------------------------------------------------------------------
/checks/glide/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/glide/train_hypers.pt
--------------------------------------------------------------------------------
/checks/mj/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/mj/chk_0.pt
--------------------------------------------------------------------------------
/checks/mj/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/mj/train_hypers.pt
--------------------------------------------------------------------------------
/checks/progan/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/progan/chk_0.pt
--------------------------------------------------------------------------------
/checks/progan/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/progan/train_hypers.pt
--------------------------------------------------------------------------------
/checks/sd14/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/sd14/chk_0.pt
--------------------------------------------------------------------------------
/checks/sd14/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/sd14/train_hypers.pt
--------------------------------------------------------------------------------
/checks/sd21/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/sd21/chk_0.pt
--------------------------------------------------------------------------------
/checks/sd21/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/sd21/train_hypers.pt
--------------------------------------------------------------------------------
/checks/stargan/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/stargan/chk_0.pt
--------------------------------------------------------------------------------
/checks/stargan/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/stargan/train_hypers.pt
--------------------------------------------------------------------------------
/checks/stylegan/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/stylegan/chk_0.pt
--------------------------------------------------------------------------------
/checks/stylegan/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/stylegan/train_hypers.pt
--------------------------------------------------------------------------------
/checks/stylegan2/chk_0.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/stylegan2/chk_0.pt
--------------------------------------------------------------------------------
/checks/stylegan2/train_hypers.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/checks/stylegan2/train_hypers.pt
--------------------------------------------------------------------------------
/data/dalle_2/0_real/img_1121979.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/0_real/img_1121979.png
--------------------------------------------------------------------------------
/data/dalle_2/0_real/img_1256926.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/0_real/img_1256926.png
--------------------------------------------------------------------------------
/data/dalle_2/0_real/img_1318368.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/0_real/img_1318368.png
--------------------------------------------------------------------------------
/data/dalle_2/0_real/img_1482420.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/0_real/img_1482420.png
--------------------------------------------------------------------------------
/data/dalle_2/0_real/img_2101.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/0_real/img_2101.png
--------------------------------------------------------------------------------
/data/dalle_2/0_real/img_2431.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/0_real/img_2431.png
--------------------------------------------------------------------------------
/data/dalle_2/0_real/img_250006.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/0_real/img_250006.png
--------------------------------------------------------------------------------
/data/dalle_2/0_real/img_653783.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/0_real/img_653783.png
--------------------------------------------------------------------------------
/data/dalle_2/0_real/img_685915.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/0_real/img_685915.png
--------------------------------------------------------------------------------
/data/dalle_2/0_real/img_69.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/0_real/img_69.png
--------------------------------------------------------------------------------
/data/dalle_2/1_fake/img_1341638.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/1_fake/img_1341638.png
--------------------------------------------------------------------------------
/data/dalle_2/1_fake/img_1486737.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/1_fake/img_1486737.png
--------------------------------------------------------------------------------
/data/dalle_2/1_fake/img_1535500.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/1_fake/img_1535500.png
--------------------------------------------------------------------------------
/data/dalle_2/1_fake/img_1543532.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/1_fake/img_1543532.png
--------------------------------------------------------------------------------
/data/dalle_2/1_fake/img_205082.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/1_fake/img_205082.png
--------------------------------------------------------------------------------
/data/dalle_2/1_fake/img_26962.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/1_fake/img_26962.png
--------------------------------------------------------------------------------
/data/dalle_2/1_fake/img_532821.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/1_fake/img_532821.png
--------------------------------------------------------------------------------
/data/dalle_2/1_fake/img_758685.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/1_fake/img_758685.png
--------------------------------------------------------------------------------
/data/dalle_2/1_fake/img_901534.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/1_fake/img_901534.png
--------------------------------------------------------------------------------
/data/dalle_2/1_fake/img_942417.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_2/1_fake/img_942417.png
--------------------------------------------------------------------------------
/data/dalle_mini/0_real/img_1125.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/0_real/img_1125.png
--------------------------------------------------------------------------------
/data/dalle_mini/0_real/img_121274.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/0_real/img_121274.png
--------------------------------------------------------------------------------
/data/dalle_mini/0_real/img_179228.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/0_real/img_179228.png
--------------------------------------------------------------------------------
/data/dalle_mini/0_real/img_2899.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/0_real/img_2899.png
--------------------------------------------------------------------------------
/data/dalle_mini/0_real/img_384396.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/0_real/img_384396.png
--------------------------------------------------------------------------------
/data/dalle_mini/0_real/img_397555.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/0_real/img_397555.png
--------------------------------------------------------------------------------
/data/dalle_mini/0_real/img_589795.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/0_real/img_589795.png
--------------------------------------------------------------------------------
/data/dalle_mini/0_real/img_595583.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/0_real/img_595583.png
--------------------------------------------------------------------------------
/data/dalle_mini/0_real/img_675501.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/0_real/img_675501.png
--------------------------------------------------------------------------------
/data/dalle_mini/0_real/img_97210.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/0_real/img_97210.png
--------------------------------------------------------------------------------
/data/dalle_mini/1_fake/img_1087131.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/1_fake/img_1087131.png
--------------------------------------------------------------------------------
/data/dalle_mini/1_fake/img_1189029.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/1_fake/img_1189029.png
--------------------------------------------------------------------------------
/data/dalle_mini/1_fake/img_1224023.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/1_fake/img_1224023.png
--------------------------------------------------------------------------------
/data/dalle_mini/1_fake/img_1313947.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/1_fake/img_1313947.png
--------------------------------------------------------------------------------
/data/dalle_mini/1_fake/img_241420.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/1_fake/img_241420.png
--------------------------------------------------------------------------------
/data/dalle_mini/1_fake/img_524967.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/1_fake/img_524967.png
--------------------------------------------------------------------------------
/data/dalle_mini/1_fake/img_586942.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/1_fake/img_586942.png
--------------------------------------------------------------------------------
/data/dalle_mini/1_fake/img_761237.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/1_fake/img_761237.png
--------------------------------------------------------------------------------
/data/dalle_mini/1_fake/img_782657.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/1_fake/img_782657.png
--------------------------------------------------------------------------------
/data/dalle_mini/1_fake/img_978369.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/dalle_mini/1_fake/img_978369.png
--------------------------------------------------------------------------------
/data/glide/0_real/img_235475.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/0_real/img_235475.png
--------------------------------------------------------------------------------
/data/glide/0_real/img_28578.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/0_real/img_28578.png
--------------------------------------------------------------------------------
/data/glide/0_real/img_3233.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/0_real/img_3233.png
--------------------------------------------------------------------------------
/data/glide/0_real/img_4995.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/0_real/img_4995.png
--------------------------------------------------------------------------------
/data/glide/0_real/img_571988.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/0_real/img_571988.png
--------------------------------------------------------------------------------
/data/glide/0_real/img_574345.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/0_real/img_574345.png
--------------------------------------------------------------------------------
/data/glide/0_real/img_594049.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/0_real/img_594049.png
--------------------------------------------------------------------------------
/data/glide/0_real/img_633294.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/0_real/img_633294.png
--------------------------------------------------------------------------------
/data/glide/0_real/img_737.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/0_real/img_737.png
--------------------------------------------------------------------------------
/data/glide/0_real/img_80431.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/0_real/img_80431.png
--------------------------------------------------------------------------------
/data/glide/1_fake/img_1038562.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/1_fake/img_1038562.png
--------------------------------------------------------------------------------
/data/glide/1_fake/img_1055638.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/1_fake/img_1055638.png
--------------------------------------------------------------------------------
/data/glide/1_fake/img_1212279.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/1_fake/img_1212279.png
--------------------------------------------------------------------------------
/data/glide/1_fake/img_1488167.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/1_fake/img_1488167.png
--------------------------------------------------------------------------------
/data/glide/1_fake/img_215830.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/1_fake/img_215830.png
--------------------------------------------------------------------------------
/data/glide/1_fake/img_253710.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/1_fake/img_253710.png
--------------------------------------------------------------------------------
/data/glide/1_fake/img_696075.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/1_fake/img_696075.png
--------------------------------------------------------------------------------
/data/glide/1_fake/img_706446.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/1_fake/img_706446.png
--------------------------------------------------------------------------------
/data/glide/1_fake/img_868348.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/1_fake/img_868348.png
--------------------------------------------------------------------------------
/data/glide/1_fake/img_99127.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/glide/1_fake/img_99127.png
--------------------------------------------------------------------------------
/data/mj/0_real/img_153103.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/0_real/img_153103.png
--------------------------------------------------------------------------------
/data/mj/0_real/img_177489.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/0_real/img_177489.png
--------------------------------------------------------------------------------
/data/mj/0_real/img_263169.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/0_real/img_263169.png
--------------------------------------------------------------------------------
/data/mj/0_real/img_415579.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/0_real/img_415579.png
--------------------------------------------------------------------------------
/data/mj/0_real/img_4329.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/0_real/img_4329.png
--------------------------------------------------------------------------------
/data/mj/0_real/img_4662.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/0_real/img_4662.png
--------------------------------------------------------------------------------
/data/mj/0_real/img_503621.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/0_real/img_503621.png
--------------------------------------------------------------------------------
/data/mj/0_real/img_567984.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/0_real/img_567984.png
--------------------------------------------------------------------------------
/data/mj/0_real/img_592099.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/0_real/img_592099.png
--------------------------------------------------------------------------------
/data/mj/0_real/img_612276.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/0_real/img_612276.png
--------------------------------------------------------------------------------
/data/mj/1_fake/img_img_990849408058933258.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/1_fake/img_img_990849408058933258.png
--------------------------------------------------------------------------------
/data/mj/1_fake/img_img_991166194365255741.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/1_fake/img_img_991166194365255741.png
--------------------------------------------------------------------------------
/data/mj/1_fake/img_img_991455424593281054.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/1_fake/img_img_991455424593281054.png
--------------------------------------------------------------------------------
/data/mj/1_fake/img_img_991620679730475028.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/1_fake/img_img_991620679730475028.png
--------------------------------------------------------------------------------
/data/mj/1_fake/img_img_991814118162055238.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/1_fake/img_img_991814118162055238.png
--------------------------------------------------------------------------------
/data/mj/1_fake/img_img_992081131874828308.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/1_fake/img_img_992081131874828308.png
--------------------------------------------------------------------------------
/data/mj/1_fake/img_img_992483168701653004.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/1_fake/img_img_992483168701653004.png
--------------------------------------------------------------------------------
/data/mj/1_fake/img_img_992511603599736853.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/1_fake/img_img_992511603599736853.png
--------------------------------------------------------------------------------
/data/mj/1_fake/img_img_992890606516965376.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/1_fake/img_img_992890606516965376.png
--------------------------------------------------------------------------------
/data/mj/1_fake/img_img_993280554420154388.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/mj/1_fake/img_img_993280554420154388.png
--------------------------------------------------------------------------------
/data/sd14/0_real/img_102.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/0_real/img_102.png
--------------------------------------------------------------------------------
/data/sd14/0_real/img_1181282.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/0_real/img_1181282.png
--------------------------------------------------------------------------------
/data/sd14/0_real/img_1304491.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/0_real/img_1304491.png
--------------------------------------------------------------------------------
/data/sd14/0_real/img_1373778.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/0_real/img_1373778.png
--------------------------------------------------------------------------------
/data/sd14/0_real/img_202957.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/0_real/img_202957.png
--------------------------------------------------------------------------------
/data/sd14/0_real/img_332318.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/0_real/img_332318.png
--------------------------------------------------------------------------------
/data/sd14/0_real/img_338417.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/0_real/img_338417.png
--------------------------------------------------------------------------------
/data/sd14/0_real/img_3521.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/0_real/img_3521.png
--------------------------------------------------------------------------------
/data/sd14/0_real/img_590038.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/0_real/img_590038.png
--------------------------------------------------------------------------------
/data/sd14/0_real/img_62051.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/0_real/img_62051.png
--------------------------------------------------------------------------------
/data/sd14/1_fake/img_107403.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/1_fake/img_107403.png
--------------------------------------------------------------------------------
/data/sd14/1_fake/img_1134171.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/1_fake/img_1134171.png
--------------------------------------------------------------------------------
/data/sd14/1_fake/img_1239067.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/1_fake/img_1239067.png
--------------------------------------------------------------------------------
/data/sd14/1_fake/img_1265.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/1_fake/img_1265.png
--------------------------------------------------------------------------------
/data/sd14/1_fake/img_1402279.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/1_fake/img_1402279.png
--------------------------------------------------------------------------------
/data/sd14/1_fake/img_1464360.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/1_fake/img_1464360.png
--------------------------------------------------------------------------------
/data/sd14/1_fake/img_1586156.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/1_fake/img_1586156.png
--------------------------------------------------------------------------------
/data/sd14/1_fake/img_628810.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/1_fake/img_628810.png
--------------------------------------------------------------------------------
/data/sd14/1_fake/img_635076.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/1_fake/img_635076.png
--------------------------------------------------------------------------------
/data/sd14/1_fake/img_996540.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd14/1_fake/img_996540.png
--------------------------------------------------------------------------------
/data/sd21/0_real/img_1023256.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/0_real/img_1023256.png
--------------------------------------------------------------------------------
/data/sd21/0_real/img_1180182.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/0_real/img_1180182.png
--------------------------------------------------------------------------------
/data/sd21/0_real/img_1310855.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/0_real/img_1310855.png
--------------------------------------------------------------------------------
/data/sd21/0_real/img_1373696.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/0_real/img_1373696.png
--------------------------------------------------------------------------------
/data/sd21/0_real/img_216000.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/0_real/img_216000.png
--------------------------------------------------------------------------------
/data/sd21/0_real/img_341347.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/0_real/img_341347.png
--------------------------------------------------------------------------------
/data/sd21/0_real/img_346558.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/0_real/img_346558.png
--------------------------------------------------------------------------------
/data/sd21/0_real/img_361478.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/0_real/img_361478.png
--------------------------------------------------------------------------------
/data/sd21/0_real/img_606665.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/0_real/img_606665.png
--------------------------------------------------------------------------------
/data/sd21/0_real/img_636328.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/0_real/img_636328.png
--------------------------------------------------------------------------------
/data/sd21/1_fake/img_107095.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/1_fake/img_107095.png
--------------------------------------------------------------------------------
/data/sd21/1_fake/img_1126950.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/1_fake/img_1126950.png
--------------------------------------------------------------------------------
/data/sd21/1_fake/img_1236963.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/1_fake/img_1236963.png
--------------------------------------------------------------------------------
/data/sd21/1_fake/img_1266460.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/1_fake/img_1266460.png
--------------------------------------------------------------------------------
/data/sd21/1_fake/img_1408.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/1_fake/img_1408.png
--------------------------------------------------------------------------------
/data/sd21/1_fake/img_1474487.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/1_fake/img_1474487.png
--------------------------------------------------------------------------------
/data/sd21/1_fake/img_160675.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/1_fake/img_160675.png
--------------------------------------------------------------------------------
/data/sd21/1_fake/img_645925.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/1_fake/img_645925.png
--------------------------------------------------------------------------------
/data/sd21/1_fake/img_652484.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/1_fake/img_652484.png
--------------------------------------------------------------------------------
/data/sd21/1_fake/img_995393.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/data/sd21/1_fake/img_995393.png
--------------------------------------------------------------------------------
/data_dif.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torchvision.transforms as transforms
3 | from torch.utils.data import Dataset, DataLoader
4 | import numpy as np
5 | from PIL import Image
6 |
7 | prnu_tens = transforms.ToTensor()
8 |
9 | def shuffle_split(data_list, n_train):
10 | idx_list = list(range(len(data_list)))
11 | np.random.shuffle(idx_list)
12 | train_idx_list = idx_list[:n_train]
13 | test_idx_list = idx_list[n_train:]
14 |
15 | return np.array(data_list)[train_idx_list], np.array(data_list)[test_idx_list]
16 |
17 |
18 | def prep_data_sets(real_dir, fake_dir, h_dict, test_only=False):
19 | real_path_list = [list(real_dir.glob('*.' + x)) for x in ['jpg', 'jpeg', 'png']]
20 | real_paths = [ele for ele in real_path_list if ele != []][0]
21 |
22 | fake_path_list = [list(fake_dir.glob('*.' + x)) for x in ['jpg', 'jpeg', 'png']]
23 | fake_paths = [ele for ele in fake_path_list if ele != []][0]
24 |
25 | if not test_only:
26 | n_train = h_dict["Train Size"]
27 |
28 | n_real = len(real_paths)
29 | n_fake = len(fake_paths)
30 | n = n_real
31 |
32 | if n_real > n_fake:
33 |
34 | idx_list = list(range(n_real))
35 | np.random.shuffle(idx_list)
36 | img_idx_list = idx_list[:n_fake]
37 | real_paths = [real_paths[i] for i in img_idx_list]
38 | n = n_fake
39 |
40 | elif n_real < n_fake:
41 |
42 | idx_list = list(range(n_fake))
43 | np.random.shuffle(idx_list)
44 | img_idx_list = idx_list[:n_real]
45 | fake_paths = [fake_paths[i] for i in img_idx_list]
46 | n = n_real
47 |
48 | if n < n_train:
49 | raise Exception(f"{n_train} images were requested for train, but there are only {n} images.")
50 |
51 | train_real_paths, test_real_paths = shuffle_split(real_paths, n_train)
52 | train_fake_paths, test_fake_paths = shuffle_split(fake_paths, n_train)
53 |
54 | train_set = PRNUData(train_real_paths, train_fake_paths, h_dict)
55 | test_set = PRNUData(test_real_paths, test_fake_paths, h_dict)
56 |
57 | file_dict = {"Train Real": train_real_paths,
58 | "Test Real": test_real_paths,
59 | "Train Fake": train_fake_paths,
60 | "Test Fake": test_fake_paths}
61 |
62 | return train_set, test_set, file_dict
63 |
64 | else:
65 | test_set = PRNUData(real_path_list, fake_path_list, h_dict)
66 |
67 | file_dict = {"Real": real_path_list,
68 | "Fake": fake_path_list}
69 |
70 | return None, test_set, file_dict
71 |
72 |
73 | def rescale_img(img):
74 | return (img - img.min()) / (img.max() - img.min() + 1e-8)
75 |
76 |
77 | def load_pil_image(img_path, img_size=None):
78 | img = Image.open(img_path).convert('RGB')
79 |
80 | if img_size is not None:
81 | w, h = img.size
82 | left = (w - img_size[1]) / 2
83 | top = (h - img_size[0]) / 2
84 | right = (w + img_size[1]) / 2
85 | bottom = (h + img_size[0]) / 2
86 |
87 | img = img.crop((left, top, right, bottom))
88 |
89 | return img
90 |
91 | def produce_fft(finger_npy):
92 | fft_f = np.fft.fft2(finger_npy - finger_npy.mean(), axes=(0, 1), norm='forward')
93 |
94 | finger_spec = rescale_img(np.log(np.abs(fft_f)))
95 | finger_spec = np.fft.fftshift(finger_spec) ** 4
96 |
97 | return finger_spec
98 |
99 | class PRNUData(Dataset):
100 | def __init__(self, real_paths, fake_paths, hyper_pars,
101 | demand_equal=True,
102 | train_mode=True):
103 |
104 | self.real_paths = real_paths
105 | self.fake_paths = fake_paths
106 | self.file_list = None
107 |
108 | self.real_labels = None
109 | self.fake_labels = None
110 | self.label_list = None
111 |
112 | self.crop_size = hyper_pars['Crop Size']
113 | self.batch_size = hyper_pars['Batch Size']
114 |
115 | self.prep_inputs(demand_equal)
116 | self.init_loader()
117 | self.train_mode = train_mode
118 |
119 | def prep_inputs(self, demand_equal):
120 |
121 | n_real = len(self.real_paths)
122 | n_fake = len(self.fake_paths)
123 | n = n_real
124 |
125 | if demand_equal:
126 | if n_real > n_fake:
127 | self.real_paths = self.real_paths[:n_fake]
128 | n = n_fake
129 |
130 | elif n_real < n_fake:
131 | self.fake_paths = self.fake_paths[:n_real]
132 | n = n_real
133 |
134 | self.real_labels = torch.zeros((len(self.real_paths),))
135 | self.fake_labels = torch.ones((len(self.fake_paths),))
136 |
137 | self.file_list = np.array(list(self.real_paths) + list(self.fake_paths))
138 | self.label_list = torch.cat((self.real_labels, self.fake_labels), dim=0).type(torch.bool)
139 |
140 | def init_loader(self):
141 | self.loader = DataLoader(self, batch_size=self.batch_size, shuffle=True, drop_last=False)
142 |
143 | def get_loader(self):
144 | return self.loader
145 |
146 | def __len__(self):
147 | return len(self.file_list)
148 |
149 | def __getitem__(self, idx):
150 | img_path = self.file_list[idx]
151 |
152 | image = load_pil_image(img_path, self.crop_size)
153 | label = self.label_list[idx]
154 | image = np.array(image)
155 |
156 | image = torch.tensor(image.transpose((2, 0, 1))).type(torch.float32).div(255)
157 |
158 | return image, label
159 |
--------------------------------------------------------------------------------
/dcnn_loader.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import torch
3 | import numpy as np
4 | from dncnn.trainer import TrainerDnCNN, load_model
5 |
6 |
7 | dncnn_root = Path("dncnn")
8 |
9 | def load_denoiser(device: str, trainable:bool=False)-> torch.nn.Module:
10 |
11 | denoiser_prnu_np = np.load(str(dncnn_root / r"clean_real.npy"), allow_pickle=True)
12 |
13 | trainer = load_model(TrainerDnCNN, dncnn_root / f"chk_2000.pt", device)
14 | model = trainer.denoiser.to(device)
15 |
16 | denoiser_prnu = torch.tensor(denoiser_prnu_np.transpose((2, 0, 1))).to(device).unsqueeze(0)
17 |
18 | model.prnu = denoiser_prnu
19 |
20 | if not trainable:
21 | model.eval()
22 | for param in model.parameters():
23 | param.requires_grad = False
24 | else:
25 | for param in model.parameters():
26 | param.requires_grad = True
27 |
28 | return model
29 |
--------------------------------------------------------------------------------
/dncnn/chk_2000.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/dncnn/chk_2000.pt
--------------------------------------------------------------------------------
/dncnn/clean_real.npy:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/dncnn/clean_real.npy
--------------------------------------------------------------------------------
/dncnn/model.py:
--------------------------------------------------------------------------------
1 | from torch import nn
2 | import torch.nn.functional as F
3 |
4 | class DnCNN(nn.Module):
5 | def __init__(self, channels, num_of_layers=17):
6 | super(DnCNN, self).__init__()
7 | kernel_size = 3
8 | padding = 1
9 | features = 64
10 | layers = []
11 | layers.append(nn.Conv2d(in_channels=channels, out_channels=features, kernel_size=kernel_size, padding=padding,
12 | bias=False))
13 | layers.append(nn.ReLU(inplace=True))
14 | for _ in range(num_of_layers - 2):
15 | layers.append(
16 | nn.Conv2d(in_channels=features, out_channels=features, kernel_size=kernel_size, padding=padding,
17 | bias=False))
18 | layers.append(nn.BatchNorm2d(features))
19 | layers.append(nn.ReLU(inplace=True))
20 | layers.append(nn.Conv2d(in_channels=features, out_channels=channels, kernel_size=kernel_size, padding=padding,
21 | bias=False))
22 | self.dncnn = nn.Sequential(*layers)
23 |
24 | self._initialize_weights()
25 |
26 | self.prnu = None
27 |
28 | def _initialize_weights(self):
29 | for m in self.modules():
30 | if isinstance(m, nn.Conv2d):
31 | nn.init.kaiming_normal_(m.weight)
32 | elif isinstance(m, nn.BatchNorm2d):
33 | nn.init.ones_(m.weight)
34 | nn.init.zeros_(m.bias)
35 |
36 | def forward(self, x):
37 | out = self.dncnn(x)
38 | return out
39 |
40 | def denoise(self, x):
41 | x = F.pad(x, (10, 10, 10, 10))
42 | res = self.dncnn(x)[:, :, 10:-10, 10:-10]
43 |
44 | if res.size()[2] != self.prnu.size()[2] or res.size()[3] != self.prnu.size()[3]:
45 | return res
46 | else:
47 | return res - self.prnu
--------------------------------------------------------------------------------
/dncnn/trainer.py:
--------------------------------------------------------------------------------
1 |
2 | from torch import optim, nn
3 | import torch
4 | from dncnn.model import DnCNN
5 |
6 |
7 | relu = nn.ReLU()
8 |
9 | class TrainerDnCNN(nn.Module):
10 | def __init__(self, hyperparams):
11 | super(TrainerDnCNN, self).__init__()
12 |
13 | # Hyperparameters
14 | self.device = hyperparams['Device']
15 | self.init_lr = hyperparams['LR']
16 | self.ch_o = hyperparams['Out. Channel']
17 | self.m = hyperparams['Margin']
18 | self.batch_size = hyperparams['Batch Size']
19 | self.crop_size = hyperparams['Crop Size']
20 | self.depth = hyperparams['Depth']
21 | self.crop_b = hyperparams['Crop Batch']
22 |
23 | self.train_loss = []
24 |
25 | self.train_mean_r = []
26 | self.train_mean_f = []
27 |
28 | self.test_mean_r = []
29 | self.test_mean_f = []
30 |
31 | # Model initialization
32 | self.denoiser = DnCNN(self.ch_o, self.depth).to(self.device)
33 | self.optimizer = optim.AdamW(self.denoiser.parameters(), lr=self.init_lr)
34 | self.scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, 0.998)
35 |
36 | self.loss_fun = nn.MSELoss(reduction='sum')
37 | self.loss_bce = nn.BCELoss(reduction='none')
38 |
39 |
40 | def load_model(trainer, path, device):
41 | if device.type == 'cpu':
42 | data_dict = torch.load(path, map_location=torch.device('cpu'))
43 | else:
44 | data_dict = torch.load(path)
45 |
46 | try:
47 | trainer.unet.load_state_dict(data_dict['G state'])
48 | trainer.train_loss = data_dict['Train G Loss']
49 |
50 | return len(trainer.train_loss)
51 | except:
52 | return data_dict
53 |
54 |
--------------------------------------------------------------------------------
/eval_dif.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import torch
3 |
4 | import data_dif as data
5 | from trainer_dif import TrainerMultiple
6 | from utils import *
7 | import pickle
8 | from pathlib import Path
9 |
10 |
11 | def parse_arguments() -> argparse.Namespace:
12 | parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter, )
13 |
14 | parser.add_argument("fingerprint_dir", type=str,
15 | help="Directory containing fingerprint and train values")
16 | parser.add_argument("image_dir", type=str,
17 | help="Directory containing real and fake images within 0_real and 1_fake subdirectories")
18 | parser.add_argument("--epoch", type=int, default=0, required=False,
19 | help="Check point epoch to load")
20 | parser.add_argument("--batch", type=int, default=64, required=False,
21 | help="Batch size")
22 |
23 | parsed_args = parser.parse_args()
24 | return parsed_args
25 |
26 |
27 | def test_dif_directory(args: argparse.Namespace) -> (float, float):
28 | '''
29 |
30 | :param args: parser arguments (image directory, fingerprint directory, checkpoint epoch)
31 | :return: Accuracies for real and fake images
32 | '''
33 |
34 | model_ep = args.epoch
35 | images_dir = Path(args.image_dir)
36 | check_dir = Path(args.fingerprint_dir)
37 |
38 | check_existence(check_dir, False)
39 | check_existence(images_dir, False)
40 |
41 | with open(check_dir / "train_hypers.pt", 'rb') as pickle_file:
42 | hyper_pars = pickle.load(pickle_file)
43 |
44 | hyper_pars['Device'] = torch.device("cuda" if torch.cuda.is_available() else "cpu")
45 | hyper_pars['Batch Size'] = args.batch
46 |
47 | print(f'Working on {images_dir.stem}')
48 |
49 | real_path_list = [list((images_dir / "0_real").glob('*.' + x)) for x in ['jpg', 'jpeg', 'png']]
50 | real_path_list = [ele for ele in real_path_list if ele != []][0]
51 |
52 | fake_path_list = [list((images_dir / "1_fake").glob('*.' + x)) for x in ['jpg', 'jpeg', 'png']]
53 | fake_path_list = [ele for ele in fake_path_list if ele != []][0]
54 |
55 | test_set = data.PRNUData(real_path_list, fake_path_list, hyper_pars, demand_equal=False,
56 | train_mode=False)
57 |
58 | trainer = TrainerMultiple(hyper_pars)
59 | trainer.load_stats(check_dir / f"chk_{model_ep}.pt")
60 |
61 | trainer.test_model(test_set.get_loader())
62 | acc_f, acc_r = trainer.calc_accuracy(print_res=False)
63 |
64 | return acc_f, acc_r
65 |
66 |
67 | if __name__ == '__main__':
68 | acc_f, acc_r = test_dif_directory(parse_arguments())
69 | print(f'Real Acc. {100 * acc_r:.1f}% | Fake Acc. {100 * acc_f:.1f}% ---> Acc. {50 * (acc_r + acc_f):.1f}%')
70 |
--------------------------------------------------------------------------------
/model.py:
--------------------------------------------------------------------------------
1 | from basic_layers import *
2 | from torch import nn
3 |
4 |
5 | class Unet(nn.Module):
6 | def __init__(self, device, inp_ch=1, out_ch=1,
7 | arch=16, depth=3, activ='leak', concat=None):
8 | super(Unet, self).__init__()
9 |
10 | self.activ = activ
11 | self.device = device
12 | self.out_ch = out_ch
13 | self.inp_ch = inp_ch
14 | self.depth = depth
15 | self.arch = arch
16 | self.concat = None
17 |
18 | self.arch_n = []
19 | self.enc = []
20 | self.dec = []
21 | self.layers = []
22 | self.skip = []
23 |
24 | self.check_concat(concat)
25 | self.prep_arch_list()
26 | self.organize_arch()
27 | self.prep_params()
28 |
29 | def check_concat(self, con):
30 | if con is None:
31 | self.concat = [1] * self.depth
32 | elif len(con) > self.depth:
33 | self.concat = con[:self.depth]
34 | self.concat = 2 * con
35 | self.concat[self.concat == 0] = 1
36 | elif len(con) < self.depth:
37 | self.concat = con + [0] * (self.depth - len(con))
38 | self.concat = 2 * con
39 | self.concat[self.concat == 0] = 1
40 | else:
41 | self.concat = 2 * con
42 | self.concat[self.concat == 0] = 1
43 |
44 | def prep_arch_list(self):
45 | for dl in range(0, self.depth + 1):
46 | self.arch_n.append((2 ** (dl - 1)) * self.arch)
47 |
48 | self.arch_n[0] = self.inp_ch
49 |
50 | def organize_arch(self):
51 | for idx in range(len(self.arch_n) - 1):
52 | self.enc.append(
53 | Conv_Block(self.arch_n[idx], self.arch_n[idx + 1], activ=self.activ, pool='down_max'))
54 |
55 | self.layers = [Conv_Block(self.arch_n[-1], self.arch_n[-1], activ=self.activ, pool='up_stride')]
56 |
57 | for idx in range(len(self.arch_n) - 2):
58 | self.dec.append(
59 | Conv_Block(self.concat[- (idx + 1)] * self.arch_n[- (idx + 1)], self.arch_n[- (idx + 2)],
60 | activ=self.activ, pool='up_stride'))
61 | self.dec.append(Conv_Block(self.concat[0] * self.arch, self.arch, activ=self.activ))
62 | self.layers.append(Conv_Layer(self.arch, self.out_ch, 1, 1, norm=None, activ='tanh'))
63 |
64 | def prep_params(self):
65 | for blk_idx in range(len(self.enc)):
66 | self.add_module(f'enc_{blk_idx + 1}', self.enc[blk_idx])
67 |
68 | self.add_module(f'mid', self.layers[0])
69 |
70 | for blk_idx in range(len(self.dec)):
71 | self.add_module(f'dec_{blk_idx + 1}', self.dec[blk_idx])
72 |
73 | self.add_module(f'final', self.layers[1])
74 |
75 | def forward(self, img):
76 | h = img
77 | h_skip = []
78 |
79 | for conv in self.enc:
80 | hs, h = conv(h)
81 | h_skip.append(hs)
82 |
83 | _, h = self.mid(h)
84 |
85 | for l_idx in range(len(self.dec)):
86 | if self.concat[-(l_idx + 1)] == 2:
87 | _, h = self.dec[l_idx](concat_curr(h_skip[-(l_idx + 1)], h))
88 | else:
89 | _, h = self.dec[l_idx](h)
90 |
91 | h = self.final(h)
92 |
93 | return h
94 |
95 |
96 | # ----------------Test---------------------------
97 |
98 | if __name__ == '__main__':
99 | import torch
100 |
101 | if torch.cuda.is_available():
102 | device = 'cuda'
103 | else:
104 | device = 'cpu'
105 |
106 | x = torch.randn(1, 1, 128, 128).to(device)
107 |
108 | net = Conv_Layer(1,1,4,2, 2).to(device)
109 | y = net(x)
110 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 |
Deep Image Fingerprint: Towards Low Budget Synthetic Image Detection and
2 | Model Lineage Analysis
3 |
4 | Sergey Sinitsa Ohad Fried
5 |
6 |
arXiv Project Page Test Dataset
7 |
8 |
9 |
10 | > The generation of high-quality images has become
11 | > widely accessible and is a rapidly evolving process. As a
12 | > result, anyone can generate images that are indistinguishable
13 | > from real ones. This leads to a wide range of applications,
14 | > including malicious usage with deceptive intentions.
15 | > Despite advances in detection techniques for generated images,
16 | > a robust detection method still eludes us. Furthermore,
17 | > model personalization techniques might affect the detection
18 | > capabilities of existing methods. In this work, we
19 | > utilize the architectural properties of convolutional neural
20 | > networks (CNNs) to develop a new detection method. Our
21 | > method can detect images from a known generative model
22 | > and enable us to establish relationships between fine-tuned
23 | > generative models. We tested the method on images produced
24 | > by both Generative Adversarial Networks (GANs)
25 | > and recent large text-to-image models (LTIMs) that rely
26 | > on Diffusion Models. Our approach outperforms others
27 | > trained under identical conditions and achieves comparable
28 | > performance to state-of-the-art pre-trained detection methods
29 | > on images generated by Stable Diffusion and MidJourney,
30 | > with significantly fewer required train samples.
31 |
32 |
33 | Fingerprints in Image Space
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 | Fingerprints in Fourier Space
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 | ### Installation
61 |
62 | This project was tested using Python 3.10 with a GPU. However, it is not necessary to have a GPU for the evaluation
63 | process.
64 | The required dependencies are specified in the `requirements.txt` file.
65 |
66 | ### Usage
67 |
68 | After setting up the repository you may train the model or reproduce experiments.
69 | We provide code for three experiments as described below.
70 |
71 | #### Gray image experiment
72 |
73 | To reproduce the artifacts with a gray image, simply run `blank_experiment.py` with the default parameters.
74 | An output directory will be created where you can find the reconstruction in both image space and Fourier space.
75 |
76 | Example:
77 |
78 | ```
79 | python blank_experment.py
80 | ```
81 |
82 | #### Training the Model
83 |
84 | To run `train_dif.py`, you need to specify the data directory and the model directory.
85 | The data directory should include two subdirectories: `0_real` and `1_fake`, for real and fake images, respectively. The
86 | model directory will be used to store the extracted fingerprints.
87 |
88 | Example for Dall•E-2 model:
89 |
90 | ```
91 | python train_dif.py data_root/dalle_2 checkpoint_directory/dalle_2
92 | ```
93 |
94 | #### Testing the Model
95 |
96 | We included extracted fingerprints of LTIMs and GAN models described in the paper.
97 | In both cases models were trained with 1024 samples. In addition, we provide 20 samples of images per each model
98 | in `/data` folder
99 |
100 | To reproduce the results per model run `eval_dif.py` and specify fingerprint directory and data directory.
101 | Example for Dall•E-2 model:
102 |
103 | ```
104 | python eval_dif.py checks/dalle_2 data_root/dalle_2 0
105 | ```
106 |
107 | `data_root` refers to folder which contain sub-folders for each generative model.
108 |
109 | The expected accuracy values (%) are below:
110 |
111 |
112 |
113 | SD 1.4
114 | SD 2.1
115 | MJ
116 | Dall•E-Mini
117 | GLIDE
118 | Dall•E-2
119 | CycleGAN
120 | StyleGAN
121 | StyleGAN2
122 | StarGAN
123 | BigGAN
124 | GauGAN
125 | ProGAN
126 |
127 |
128 | 99.3
129 | 89.5
130 | 99.0
131 | 99.0
132 | 90.3
133 | 79.5
134 | 94.4
135 | 91.5
136 | 99.9
137 | 96.9
138 | 91.8
139 | 92.6
140 | 57.7
141 |
142 |
143 |
144 |
--------------------------------------------------------------------------------
/repo_images/fingerFFT_dalle_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/fingerFFT_dalle_2.png
--------------------------------------------------------------------------------
/repo_images/fingerFFT_dalle_mini.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/fingerFFT_dalle_mini.png
--------------------------------------------------------------------------------
/repo_images/fingerFFT_glide.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/fingerFFT_glide.png
--------------------------------------------------------------------------------
/repo_images/fingerFFT_mj.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/fingerFFT_mj.png
--------------------------------------------------------------------------------
/repo_images/fingerFFT_sd14.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/fingerFFT_sd14.png
--------------------------------------------------------------------------------
/repo_images/fingerFFT_sd21.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/fingerFFT_sd21.png
--------------------------------------------------------------------------------
/repo_images/finger_dalle_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/finger_dalle_2.png
--------------------------------------------------------------------------------
/repo_images/finger_dalle_mini.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/finger_dalle_mini.png
--------------------------------------------------------------------------------
/repo_images/finger_glide.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/finger_glide.png
--------------------------------------------------------------------------------
/repo_images/finger_mj.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/finger_mj.png
--------------------------------------------------------------------------------
/repo_images/finger_sd14.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/finger_sd14.png
--------------------------------------------------------------------------------
/repo_images/finger_sd21.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Sergo2020/DIF_pytorch_official/96f482d823ce9f7b78fd2d399f7032c3c2cc11df/repo_images/finger_sd21.png
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | matplotlib==3.6.1
2 | numpy==1.23.4
3 | opencv_contrib_python==4.6.0.66
4 | Pillow==9.4.0
5 | torch==1.11.0
6 | torchvision==0.12.0
7 | tqdm==4.64.1
8 |
--------------------------------------------------------------------------------
/train_dif.py:
--------------------------------------------------------------------------------
1 | from tqdm import tqdm
2 | import torch
3 | import argparse
4 | import data_dif as data
5 | from trainer_dif import TrainerMultiple
6 | from utils import *
7 | from pathlib import Path
8 | import pickle
9 |
10 |
11 |
12 | def parse_arguments() -> argparse.Namespace:
13 | parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter, )
14 |
15 | parser.add_argument("image_dir", type=str,
16 | help="Directory containing real and fake images within 0_real and 1_fake subdirectories.")
17 | parser.add_argument("checkpoint_dir", type=str,
18 | help="Directory to save checkpoints. Model is not saved, only fingerprint and statistics.")
19 | parser.add_argument("--e", type=int, default=100, required=False,
20 | help="Amount of train iterations.")
21 | parser.add_argument("--f", type=int, default=5, required=False,
22 | help="Check point frequency.")
23 | parser.add_argument("--lr", type=float, default=5e-4, required=False,
24 | help="Learning rate")
25 | parser.add_argument("--tr", type=int, default=512, required=False,
26 | help="Amount of train samples per real/fake class.")
27 | parser.add_argument("--cs", type=int, default=256, required=False,
28 | help="Crop size (w=h)")
29 | parser.add_argument("--a", type=float, default=1.0, required=False,
30 | help="Alpha - augmentations")
31 | parser.add_argument("--b", type=bool, default=False, required=False,
32 | help="Booster loss")
33 | parser.add_argument("--bs", type=int, default=64, required=False,
34 | help="Booster loss")
35 |
36 | parsed_args = parser.parse_args()
37 | return parsed_args
38 |
39 |
40 | def train_model(args: argparse.Namespace) -> None:
41 |
42 | data_root = Path(args.image_dir)
43 | check_dir = Path(args.checkpoint_dir)
44 |
45 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
46 |
47 | hyper_pars = {'Epochs': args.e, 'Factor': args.f, 'Noise Type': 'uniform', "Train Size": args.tr,
48 | 'Noise STD': 0.03, 'Inp. Channel': 16, 'Batch Size': 64,
49 | 'LR': 5e-4, 'Device': device, 'Crop Size': (args.cs, args.cs), 'Margin': 0.01,
50 | 'Out. Channel': 3, 'Arch.': 32, 'Depth': 4, 'Alpha': args.a, 'Boost': args.b,
51 | 'Concat': [1, 1, 1, 1]}
52 |
53 | check_existence(check_dir, True)
54 | check_existence(data_root, False)
55 |
56 | print('Preparing Data Sets...')
57 |
58 | real_data_root = data_root / "0_real"
59 | fake_data_root = data_root / "1_fake"
60 |
61 | real_path_list = [list(real_data_root.glob('*.' + x)) for x in ['jpg', 'jpeg', 'png']]
62 | real_path_list = [ele for ele in real_path_list if ele != []][0]
63 |
64 | fake_path_list = [list(fake_data_root.glob('*.' + x)) for x in ['jpg', 'jpeg', 'png']]
65 | fake_path_list = [ele for ele in fake_path_list if ele != []][0]
66 |
67 | train_set = data.PRNUData(real_path_list, fake_path_list, hyper_pars, demand_equal=False,
68 | train_mode=False)
69 | train_loader = train_set.get_loader()
70 |
71 | pickle.dump(hyper_pars, open((check_dir / 'train_hypers.pt'), 'wb'))
72 |
73 | print('Preparing Trainer...')
74 | trainer = TrainerMultiple(hyper_pars).to(hyper_pars['Device'])
75 |
76 | epochs = list(range(1, hyper_pars['Epochs'] + 1))
77 | pbar = tqdm(total=len(epochs), desc='')
78 |
79 | for ep in epochs:
80 | pbar.update()
81 |
82 | for residual, labels in train_loader:
83 | trainer.train_step(residual, labels)
84 |
85 | if (ep % hyper_pars['Factor']) == 0:
86 | if ep > 0:
87 | trainer.save_stats(check_dir / ('chk_' + str(ep) + '.pt'))
88 |
89 | pbar.postfix = f'Loss C {np.mean(trainer.train_loss[-10:]):.3f} ' + \
90 | f'| Fake C {np.mean(trainer.train_corr_f[-10:]):.3f} | Real C {np.mean(trainer.train_corr_r[-10:]):.3f}'
91 |
92 | trainer.save_stats(check_dir / ('chk_' + str(hyper_pars['Epochs']) + '.pt'))
93 | torch.cuda.empty_cache()
94 |
95 |
96 | if __name__ == '__main__':
97 | train_model(parse_arguments())
98 |
--------------------------------------------------------------------------------
/trainer_dif.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import numpy as np
3 |
4 | from torch import optim, nn
5 | import torch
6 | from tqdm import tqdm
7 |
8 | from dcnn_loader import load_denoiser
9 | import model
10 | from utils import calc_even_size, produce_spectrum
11 |
12 | relu = nn.ReLU()
13 |
14 |
15 | class TrainerMultiple(nn.Module):
16 | def __init__(self, hyperparams):
17 | super(TrainerMultiple, self).__init__()
18 |
19 | # Hyperparameters
20 | self.device = hyperparams['Device']
21 | self.init_lr = hyperparams['LR']
22 | self.ch_i = hyperparams['Inp. Channel']
23 | self.ch_o = hyperparams['Out. Channel']
24 | self.arch = hyperparams['Arch.']
25 | self.depth = hyperparams['Depth']
26 | self.concat = np.array(hyperparams['Concat'])
27 | self.m = hyperparams['Margin']
28 | self.batch_size = hyperparams['Batch Size']
29 | self.alpha = hyperparams['Alpha']
30 | try:
31 | self.boost = hyperparams['Boost']
32 | except:
33 | self.boost = False
34 |
35 | self.train_loss = []
36 | self.train_corr_r = None
37 | self.train_corr_f = None
38 |
39 | self.test_loss = []
40 | self.test_corr_r = []
41 | self.test_corr_f = []
42 | self.test_labels = []
43 |
44 | self.noise_type = hyperparams['Noise Type']
45 | self.noise_std = hyperparams['Noise STD']
46 | self.noise_channel = hyperparams['Inp. Channel']
47 | self.crop_size = hyperparams['Crop Size']
48 |
49 | d_h, n_h, d_w, n_w = calc_even_size(self.crop_size, self.depth)
50 | self.crop_size = (n_h - d_h, n_w - d_w)
51 | self.d_h, self.n_h = d_h, n_h
52 | self.d_w, self.n_w = d_w, n_w
53 |
54 | # Model initialization
55 | self.noise = None
56 |
57 | self.denoiser = load_denoiser(self.device)
58 | self.unet = model.Unet(self.device, self.ch_i, self.ch_o, self.arch,
59 | activ='leak', depth=self.depth, concat=self.concat).to(self.device)
60 | self.optimizer = optim.AdamW(self.unet.parameters(), lr=self.init_lr)
61 |
62 | self.loss_mse = nn.MSELoss()
63 |
64 | self.init_train()
65 |
66 | def norm_val(self, arr):
67 | return (arr - arr.mean((1, 2, 3)).view(-1, 1, 1, 1)) / (arr.std((1, 2, 3)).view(-1, 1, 1, 1) + 1e-8)
68 |
69 | def init_train(self, n=1):
70 | self.noise = init_dummy(n, self.noise_type, self.crop_size, self.noise_channel)
71 | self.fingerprint = None
72 |
73 | def prep_noise(self, var=-1):
74 | if var == -1:
75 | return self.noise + torch.randn_like(self.noise.detach()) * self.noise_std
76 | else:
77 | return self.noise + torch.randn_like(self.noise.detach()) * var
78 |
79 | def corr_fun(self, out, target):
80 | # Pearson Correlation Coefficient (NNC(0,0))
81 | out = self.norm_val(out)
82 | target = self.norm_val(target)
83 |
84 | return out * target
85 |
86 | def loss_contrast(self, corrs, labs):
87 | # Label: 0 - Real, 1 - Fake
88 | # Similarity: 0 - Similar, 1 - Different
89 | n = len(corrs) // 2
90 | corr_a = corrs[:n]
91 | lab_a = labs[:n]
92 |
93 | corr_b = corrs[n:]
94 | lab_b = labs[n:]
95 |
96 | sim_label = torch.bitwise_xor(lab_a, lab_b).type(torch.float64) # .view(-1, 1)
97 | corr_delta = torch.sqrt(((corr_a - corr_b) ** 2))
98 | loss = sim_label * (self.m - corr_delta) + (1. - sim_label) * corr_delta
99 |
100 | return relu(loss)
101 |
102 | def train_step(self, images, labels):
103 |
104 | images = images.to(self.device)
105 | labels = labels.to(self.device)
106 |
107 | self.unet.train()
108 | self.optimizer.zero_grad()
109 |
110 | residuals = self.denoiser.denoise(images).detach()
111 | alpha = (1 - self.alpha) * torch.rand((len(images), 1, 1, 1)).to(self.device) + self.alpha
112 | residuals = alpha * residuals
113 |
114 | f_mean = residuals[labels].mean(0, keepdims=True)
115 | r_mean = residuals[~labels].mean(0, keepdims=True)
116 |
117 | residuals = torch.cat((residuals, f_mean, r_mean), dim=0)
118 |
119 | dmy = self.prep_noise().to(self.device)
120 | out = self.unet(dmy).repeat(len(images) + 2, 1, 1, 1)
121 |
122 | corr = self.corr_fun(out, residuals)
123 |
124 | loss = self.loss_contrast(corr[:-2].mean((1, 2, 3)), labels).mean() / self.m
125 |
126 | if self.boost:
127 | corr_mean_d = torch.sqrt((corr[-2].mean() - corr[-1].mean()) ** 2)
128 | loss_b = relu(self.m - corr_mean_d) / self.m
129 | loss += loss_b
130 | loss *= 0.5
131 |
132 | loss.backward()
133 | self.optimizer.step()
134 |
135 | if self.fingerprint is None:
136 | self.fingerprint = out[0:1].detach()
137 | else:
138 | self.fingerprint = self.fingerprint * 0.99 + out[0:1].detach() * (1 - 0.99)
139 |
140 | corr = self.corr_fun(self.fingerprint.repeat(len(images), 1, 1, 1), residuals[:-2]).mean((1, 2, 3))
141 |
142 | self.train_loss.append(loss.item())
143 |
144 | if self.train_corr_r is None:
145 | self.train_corr_r = [corr[~labels].mean().item()]
146 | self.train_corr_f = [corr[labels].mean().item()]
147 | else:
148 | corr_r = corr[~labels]
149 | corr_f = corr[labels]
150 | self.train_corr_r.append(corr_r.mean().item())
151 | self.train_corr_f.append(corr_f.mean().item())
152 |
153 | def reset_test(self):
154 | self.test_corr_r = None
155 | self.test_corr_f = None
156 |
157 | self.test_loss = []
158 | self.test_labels = []
159 |
160 | def test_model(self, test_loader, custom_finger=None):
161 |
162 | self.reset_test()
163 | self.calc_centers()
164 |
165 | if custom_finger is None:
166 | fingerprint = self.fingerprint.to(self.device)
167 | fingerprint.repeat((self.batch_size, 1, 1, 1))
168 |
169 |
170 | else:
171 | if isinstance(custom_finger, np.ndarray):
172 | custom_finger = torch.Tensor(custom_finger.transpose((2, 0, 1))).type(torch.float32)
173 |
174 | fingerprint = custom_finger.to(self.device)
175 | fingerprint = fingerprint.repeat((self.batch_size, 1, 1, 1))
176 |
177 | with torch.no_grad():
178 | for images, labels in tqdm(test_loader, desc='Testing Model'):
179 | images = images.to(self.device)
180 | labels = labels.to(self.device)
181 |
182 | residuals = self.denoiser.denoise(images).float()
183 |
184 | corr = self.corr_fun(fingerprint, residuals)
185 | loss = self.loss_contrast(corr.mean((1, 2, 3)), labels) / self.m
186 |
187 | corr = corr.mean((1, 2, 3))
188 |
189 | self.test_loss = self.test_loss + loss.tolist()
190 | self.test_labels = self.test_labels + labels.tolist()
191 |
192 | if self.test_corr_r is None:
193 | self.test_corr_r = corr[~labels].cpu().numpy()
194 | self.test_corr_f = corr[labels].cpu().numpy()
195 | else:
196 | self.test_corr_r = np.append(self.test_corr_r, corr[~labels].cpu().numpy(), axis=0)
197 | self.test_corr_f = np.append(self.test_corr_f, corr[labels].cpu().numpy(), axis=0)
198 |
199 | def produce_fingerprint(self, np=True):
200 | with torch.no_grad():
201 | out = self.fingerprint[0]
202 |
203 | if np:
204 | return out.cpu().numpy().transpose((1, 2, 0))
205 | else:
206 | return out
207 |
208 | def plot_loss(self, train=True):
209 | plt.figure(figsize=(10, 6))
210 |
211 | if train:
212 | plt.scatter(np.arange(1, len(self.train_loss) + 1), self.train_loss, s=3, label='Loss', c='g')
213 | plt.xlabel('Batch Index')
214 | plt.ylabel('Mean Sample Loss')
215 | plt.title('Train Loss')
216 |
217 | else:
218 | self.test_labels = np.array(self.test_labels)
219 | colors = np.array([(1., 0., 0.)] * len(self.test_labels))
220 | colors[self.test_labels == 0] = (0., 1., 0.)
221 |
222 | plt.scatter(np.arange(1, len(self.test_loss) + 1), self.test_loss, s=3, label='Loss', c=colors)
223 | plt.xlabel('Label Index')
224 | plt.ylabel('Sample Loss')
225 | plt.title('Test Loss')
226 |
227 | plt.grid(True)
228 | plt.ylim([0., 1.0])
229 | plt.legend(fontsize=12)
230 | plt.tight_layout()
231 |
232 | plt.show()
233 |
234 | def show_fingerprint(self):
235 | finger = self.produce_fingerprint()
236 | finger = 0.5 * finger + 0.5
237 |
238 | plt.figure(figsize=(4, 4))
239 |
240 | plt.imshow(finger)
241 | plt.axis(False)
242 | plt.title('Fingerprint')
243 |
244 | plt.show()
245 |
246 | dct_finger = produce_spectrum(finger)
247 | dct_finger = (dct_finger - dct_finger.min()) / (dct_finger.max() - dct_finger.min())
248 |
249 | plt.figure(figsize=(4, 4))
250 |
251 | plt.imshow(dct_finger, 'bone')
252 | plt.axis(False)
253 | plt.title('Fingerprint FFT')
254 |
255 | plt.show()
256 |
257 | def plot_corr(self, train=True):
258 |
259 | plt.figure(figsize=(10, 6))
260 |
261 | if train:
262 |
263 | plt.scatter(np.arange(len(self.train_corr_r)), self.train_corr_r, s=3,
264 | label='Real Corr.', c='g')
265 | plt.scatter(np.arange(len(self.train_corr_f)), self.train_corr_f, s=3,
266 | label='Fake Corr.', c='r')
267 |
268 | plt.xlabel('Batch Index')
269 | plt.ylabel('Mean Sample Corr.')
270 | plt.title('Train Correlation')
271 |
272 | else:
273 |
274 | plt.scatter(np.arange(1, len(self.test_corr_r) + 1), self.test_corr_r, s=3, label='Real Corr.', c='g')
275 | plt.scatter(np.arange(1, len(self.test_corr_f) + 1), self.test_corr_f, s=3, label='Fake Corr.', c='r')
276 | plt.xlabel('Label Index')
277 | plt.title('Test Correlation')
278 | plt.ylabel('Sample Corr.')
279 |
280 | plt.grid(True)
281 | plt.legend(fontsize=12)
282 |
283 | plt.show()
284 |
285 | def calc_centers(self):
286 | self.mu_real = np.mean(self.train_corr_r[-20:])
287 | self.mu_fake = np.mean(self.train_corr_f[-20:])
288 |
289 | def calc_distance(self):
290 | dist_real = distance(self.test_corr_r, self.mu_real, self.mu_fake)
291 | dist_fake = distance(self.test_corr_f, self.mu_real, self.mu_fake)
292 |
293 | return dist_fake, dist_real
294 |
295 | def calc_accuracy(self, val=None, print_res=True):
296 |
297 | if val is not None:
298 | dist = distance(val, self.mu_real, self.mu_fake)
299 | cls = np.argmin(dist, axis=1)
300 |
301 | return dist[0], cls[0]
302 |
303 | else:
304 | # Real - 0, Fake - 1
305 | dist_real = distance(self.test_corr_r, self.mu_real, self.mu_fake)
306 | dist_fake = distance(self.test_corr_f, self.mu_real, self.mu_fake)
307 |
308 | class_real = np.argmin(dist_real, axis=1) == 0
309 | class_fake = np.argmin(dist_fake, axis=1) == 1
310 |
311 | acc_real = class_real.sum() / len(class_real)
312 | acc_fake = class_fake.sum() / len(class_fake)
313 |
314 | if print_res:
315 | print("Accuracy by cluster means:")
316 | print(f" Real samples: {acc_real:.2f}")
317 | print(f" Fake samples: {acc_fake:.2f}")
318 | print(f" All samples: {(acc_real + acc_fake) / 2.:.2f}")
319 |
320 | return acc_fake, acc_real
321 |
322 | def show_prnu_density(self, title=None):
323 |
324 | corr_r = self.test_corr_r
325 | corr_f = self.test_corr_f
326 |
327 | fig, ax = plt.subplots(figsize=(4, 4))
328 | for val, data_type, mu in zip([corr_r, corr_f], ["Real", "Fake"], [self.mu_real, self.mu_fake]):
329 | hist = np.histogram(val, bins=100)
330 | width = (hist[1][-1] - hist[1][0]) / 100
331 | ax.bar(hist[1][1:], hist[0], width, alpha=0.5, label=f'{data_type}')
332 |
333 | ax.axvline(x=mu, ymin=0, ymax=np.max(hist[0]), linestyle="--", color='k',
334 | label=r'$\mu_{' + f'{data_type}' + '}$')
335 |
336 | ax.set_ylabel('Count')
337 | ax.set_xlabel(r'$\rho$')
338 | ax.legend()
339 | ax.grid()
340 | if title is not None:
341 | plt.title(title)
342 |
343 | fig.tight_layout()
344 | fig.show()
345 |
346 | def save_stats(self, path):
347 | self.calc_centers()
348 |
349 | data_dict = {'Fingerprint': self.fingerprint,
350 | 'Train Real': self.train_corr_r,
351 | 'Train Fake': self.train_corr_f,
352 | 'Loss': self.train_loss}
353 |
354 | torch.save(data_dict, path)
355 |
356 | def load_stats(self, path):
357 | if self.device.type == 'cpu':
358 | data_dict = torch.load(path, map_location=torch.device('cpu'))
359 | else:
360 | data_dict = torch.load(path)
361 |
362 | self.train_loss = data_dict['Loss']
363 | self.train_corr_r = data_dict['Train Real']
364 | self.train_corr_f = data_dict['Train Fake']
365 | self.fingerprint = data_dict['Fingerprint']
366 |
367 |
368 | class TrainerSingle(nn.Module):
369 | def __init__(self, hyperparams):
370 | super(TrainerSingle, self).__init__()
371 |
372 | # Hyperparameters
373 | self.device = hyperparams['Device']
374 | self.init_lr = hyperparams['LR']
375 | self.ch_i = hyperparams['Inp. Channel']
376 | self.ch_o = hyperparams['Out. Channel']
377 | self.arch = hyperparams['Arch.']
378 | self.depth = hyperparams['Depth']
379 | self.concat = np.array(hyperparams['Concat'])
380 | self.crop_size = hyperparams['Crop Size']
381 |
382 | self.train_corr_hp = []
383 | self.train_corr_lp = []
384 | self.train_loss = []
385 |
386 | self.noise_type = hyperparams['Noise Type']
387 | self.noise_std = hyperparams['Noise STD']
388 | self.noise_channel = hyperparams['Inp. Channel']
389 | self.noise = None
390 |
391 | d_h, n_h, d_w, n_w = calc_even_size(self.crop_size, self.depth)
392 | self.crop_size = (n_h - d_h, n_w - d_w)
393 | self.d_h, self.n_h = d_h, n_h
394 | self.d_w, self.n_w = d_w, n_w
395 |
396 | self.init_train()
397 |
398 | # Model initialization
399 | self.AE = model.Unet(self.device, self.ch_i, self.ch_o, self.arch,
400 | activ='leak', depth=self.depth, concat=self.concat).to(self.device)
401 | self.optimizer = optim.AdamW(self.AE.parameters(), lr=self.init_lr)
402 |
403 | self.loss_mse = nn.MSELoss()
404 |
405 | def init_train(self): # Check what STD is better for initial noise
406 | self.noise = init_dummy(1, self.noise_type, self.crop_size, self.noise_channel)
407 |
408 | def prep_noise(self, var=-1):
409 | if var == -1:
410 | return self.noise + torch.randn_like(self.noise.detach()) * self.noise_std
411 | else:
412 | return self.noise + torch.randn_like(self.noise.detach()) * var
413 |
414 | def train_step_blank(self, blank):
415 | self.AE.train()
416 |
417 | self.optimizer.zero_grad()
418 |
419 | dmy = self.prep_noise().to(self.device)
420 |
421 | out = self.AE(dmy)
422 | loss = self.loss_mse(out, blank).mean()
423 |
424 | loss.backward()
425 | self.optimizer.step()
426 | self.train_loss.append(loss.item())
427 |
428 | return out.detach().cpu()
429 |
430 | def plot_loss_corr(self):
431 | plt.figure(figsize=(10, 2 * 6))
432 |
433 | plt.subplot(2, 1, 1)
434 | plt.scatter(np.arange(1, len(self.train_loss) + 1), self.train_loss, c='g')
435 | plt.grid(True)
436 | plt.xlabel('Epochs')
437 | plt.ylabel('Loss')
438 |
439 | plt.subplot(2, 1, 2)
440 | plt.scatter(np.arange(1, len(self.train_corr_hp) + 1), self.train_corr_hp, c='r', label='HP')
441 | plt.scatter(np.arange(1, len(self.train_corr_lp) + 1), self.train_corr_lp, c='g', label='LP')
442 | plt.grid(True)
443 | plt.xlabel('Epochs')
444 | plt.ylabel('Correlation')
445 |
446 | plt.legend()
447 | plt.show()
448 |
449 | def produce_fingerprint(self, np=True):
450 | self.AE.eval()
451 | with torch.no_grad():
452 | out = self.AE(self.prep_noise().to(self.device)[0:1])[0]
453 | out = 0.5 * out + .5
454 | if np:
455 | return out.cpu().numpy().transpose((1, 2, 0))
456 | else:
457 | return out
458 |
459 |
460 | def distance(arr, mu_a, mu_b):
461 | dist_arr2a = np.sqrt(((arr - mu_a) ** 2)).reshape((-1, 1))
462 | dist_arr2b = np.sqrt(((arr - mu_b) ** 2)).reshape((-1, 1))
463 | return np.concatenate((dist_arr2a, dist_arr2b), axis=1)
464 |
465 |
466 | def init_dummy(bs, noise_type, img_dims, ch_n, var=0.1):
467 | if noise_type == 'uniform':
468 | img = var * torch.rand((bs, ch_n, img_dims[0], img_dims[1]))
469 | elif noise_type == 'normal':
470 | img = var * torch.randn((bs, ch_n, img_dims[0], img_dims[1]))
471 | elif noise_type == 'mesh':
472 | assert ch_n == 2
473 | X, Y = np.meshgrid(np.arange(0, img_dims[1]) / float(img_dims[1] - 1),
474 | np.arange(0, img_dims[0]) / float(img_dims[0] - 1))
475 | meshgrid = np.concatenate([X[None, :], Y[None, :]])
476 | img = torch.tensor(meshgrid).unsqueeze(0).type(torch.float)
477 |
478 | elif noise_type == 'special':
479 | X, Y = np.meshgrid(np.arange(0, img_dims[1]) / float(img_dims[1] - 1),
480 | np.arange(0, img_dims[0]) / float(img_dims[0] - 1))
481 | meshgrid = np.concatenate([X[None, :], Y[None, :]])
482 | img = torch.tensor(meshgrid).unsqueeze(0).type(torch.float)
483 | img = torch.cat((img, torch.ones((1, 1, img_dims[0], img_dims[1]))), dim=1)
484 | return img
485 |
--------------------------------------------------------------------------------
/utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import cv2 as cv
3 | import numpy as np
4 | from matplotlib import pyplot as plt
5 | from PIL import Image
6 |
7 |
8 |
9 | def check_existence(path, create=False):
10 | if not os.path.exists(path):
11 | print("Creating check point directory - " + str(path))
12 |
13 | if create:
14 | os.mkdir(path)
15 | else:
16 | print(f'{str(path)}\nPath not found')
17 | exit()
18 |
19 |
20 | def show_image(img, counters=None, centers=None, rectangles=None, title='Image',
21 | path=None,
22 | colors='gray'): # Simple function that shows image in pre set image size without axis and grid
23 | plt.figure(figsize=(6, 6), frameon=False)
24 | img_t = img.copy()
25 |
26 | if centers is not None:
27 | for idx in range(1, len(centers) + 1):
28 | cv.putText(img_t, str(idx), tuple(centers[idx - 1]), cv.FONT_HERSHEY_SIMPLEX, 0.2, (255, 0, 0), 3,
29 | cv.LINE_AA)
30 |
31 | if rectangles is not None:
32 | for r in rectangles:
33 | cv.rectangle(img_t, r[0], r[1], color=(255, 0, 0), thickness=3)
34 |
35 | if counters is not None:
36 | for cnt in counters:
37 | cv.drawContours(img_t, cnt, -1, (255, 0, 0), thickness=3)
38 |
39 | if len(img.shape) < 3:
40 | plt.imshow(img_t, colors)
41 | else:
42 | plt.imshow(img_t)
43 | plt.grid(False)
44 | plt.axis(False)
45 | if path:
46 | plt.tight_layout()
47 | plt.savefig(path)
48 | plt.close()
49 |
50 | else:
51 | plt.title(title)
52 | plt.show()
53 |
54 |
55 | def calc_even_size(img_size, d):
56 | d = int(np.power(2, d))
57 | h, w = img_size
58 |
59 | if h % 2 != 0:
60 | h -= 1
61 | if w % 2 != 0:
62 | w -= 1
63 |
64 | d_h = (h % d) // 2
65 | d_w = (w % d) // 2
66 |
67 | return d_h, h - d_h, d_w, w - d_w
68 |
69 |
70 | def make_even(img, d): # Force image size to power of 2
71 | d_h, n_h, d_w, n_w = calc_even_size(img.shape[2:], d)
72 | return img[:, :, d_h:n_h, d_w:n_w]
73 |
74 |
75 | def produce_spectrum(img_np):
76 | img_np = (img_np - img_np.min()) / (img_np.max() - img_np.min())
77 | img_fft = np.fft.fft2(img_np, axes=(0, 1), norm='forward')
78 | magnitude_spectrum = 20 * np.log(np.abs(np.fft.fftshift(img_fft)).mean(2) + 1e-8)
79 |
80 | return magnitude_spectrum
81 |
--------------------------------------------------------------------------------