├── core ├── __init__.py ├── test.py └── train.py ├── models ├── __init__.py ├── functions.py ├── alexnet.py └── model.py ├── requirements.txt ├── datasets ├── __init__.py ├── office.py ├── syndigits.py ├── officecaltech.py ├── mnist.py ├── svhn.py ├── synsigns.py ├── gtsrb.py ├── mnistm.py └── gtsrb_prepare.py ├── LICENSE ├── .gitignore ├── experiments ├── office31_10.py ├── office.py ├── mnist_mnistm.py ├── synsigns_gtsrb.py ├── syndigits_svhn.py ├── synsigns_gtsrb_src_only.py └── svhn_mnist.py ├── README.md ├── utils └── utils.py └── dann.ipynb /core/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /models/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | Pillow 3 | tensorboardX 4 | torch 5 | torchvision 6 | -------------------------------------------------------------------------------- /datasets/__init__.py: -------------------------------------------------------------------------------- 1 | from .mnist import get_mnist 2 | from .mnistm import get_mnistm 3 | from .svhn import get_svhn 4 | 5 | __all__ = (get_mnist, get_svhn, get_mnistm) 6 | -------------------------------------------------------------------------------- /models/functions.py: -------------------------------------------------------------------------------- 1 | from torch.autograd import Function 2 | 3 | 4 | class ReverseLayerF(Function): 5 | 6 | @staticmethod 7 | def forward(ctx, x, alpha): 8 | ctx.alpha = alpha 9 | 10 | return x.view_as(x) 11 | 12 | @staticmethod 13 | def backward(ctx, grad_output): 14 | output = grad_output.neg() * ctx.alpha 15 | return output, None 16 | 17 | 18 | -------------------------------------------------------------------------------- /datasets/office.py: -------------------------------------------------------------------------------- 1 | """Dataset setting and data loader for Office.""" 2 | 3 | import os 4 | import torch 5 | from torchvision import datasets, transforms 6 | import torch.utils.data as data 7 | 8 | 9 | def get_office(dataset_root, batch_size, category): 10 | """Get Office datasets loader.""" 11 | # image pre-processing 12 | pre_process = transforms.Compose([ 13 | transforms.Resize(227), 14 | transforms.ToTensor(), 15 | transforms.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)) 16 | ]) 17 | 18 | # datasets and data_loader 19 | office_dataset = datasets.ImageFolder( 20 | os.path.join(dataset_root, 'office', category, 'images'), transform=pre_process) 21 | 22 | office_dataloader = torch.utils.data.DataLoader( 23 | dataset=office_dataset, batch_size=batch_size, shuffle=True, num_workers=0) 24 | 25 | return office_dataloader -------------------------------------------------------------------------------- /datasets/syndigits.py: -------------------------------------------------------------------------------- 1 | """Dataset setting and data loader for syn-digits.""" 2 | 3 | import os 4 | import torch 5 | from torchvision import datasets, transforms 6 | import torch.utils.data as data 7 | 8 | 9 | def get_syndigits(dataset_root, batch_size, train): 10 | """Get synth digits datasets loader.""" 11 | # image pre-processing 12 | pre_process = transforms.Compose([ 13 | transforms.Resize(32), 14 | transforms.ToTensor(), 15 | transforms.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)) 16 | ]) 17 | 18 | # datasets and data loader 19 | if train: 20 | syndigits_dataset = datasets.ImageFolder(os.path.join(dataset_root, 'TRAIN_separate_dirs'), transform=pre_process) 21 | else: 22 | syndigits_dataset = datasets.ImageFolder(os.path.join(dataset_root, 'TEST_separate_dirs'), transform=pre_process) 23 | 24 | syndigits_dataloader = torch.utils.data.DataLoader( 25 | dataset=syndigits_dataset, batch_size=batch_size, shuffle=True, num_workers=0) 26 | 27 | return syndigits_dataloader -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Cheng Zhen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /datasets/officecaltech.py: -------------------------------------------------------------------------------- 1 | """Dataset setting and data loader for Office_Caltech_10.""" 2 | 3 | import torch 4 | from torchvision import datasets, transforms 5 | import torch.utils.data as data 6 | import os 7 | 8 | 9 | def get_officecaltech(dataset_root, batch_size, category): 10 | """Get Office_Caltech_10 datasets loader.""" 11 | # image pre-processing 12 | pre_process = transforms.Compose([transforms.Resize(227), 13 | transforms.ToTensor(), 14 | transforms.Normalize( 15 | mean=(0.485, 0.456, 0.406), 16 | std=(0.229, 0.224, 0.225) 17 | )]) 18 | 19 | # datasets and data_loader 20 | officecaltech_dataset = datasets.ImageFolder( 21 | os.path.join(dataset_root, 'office_caltech_10', category), 22 | transform=pre_process) 23 | 24 | officecaltech_dataloader = torch.utils.data.DataLoader( 25 | dataset=officecaltech_dataset, 26 | batch_size=batch_size, 27 | shuffle=True, 28 | num_workers=4) 29 | 30 | return officecaltech_dataloader -------------------------------------------------------------------------------- /datasets/mnist.py: -------------------------------------------------------------------------------- 1 | """Dataset setting and data loader for MNIST.""" 2 | 3 | 4 | import torch 5 | from torchvision import datasets, transforms 6 | import os 7 | 8 | def get_mnist(dataset_root, batch_size, train): 9 | """Get MNIST datasets loader.""" 10 | # image pre-processing 11 | pre_process = transforms.Compose([transforms.Resize(32), # different img size settings for mnist(28) and svhn(32). 12 | transforms.ToTensor(), 13 | transforms.Normalize( 14 | mean=(0.5, 0.5, 0.5), 15 | std=(0.5, 0.5, 0.5) 16 | )]) 17 | 18 | # datasets and data loader 19 | mnist_dataset = datasets.MNIST(root=os.path.join(dataset_root), 20 | train=train, 21 | transform=pre_process, 22 | download=False) 23 | 24 | 25 | mnist_data_loader = torch.utils.data.DataLoader( 26 | dataset=mnist_dataset, 27 | batch_size=batch_size, 28 | shuffle=True, 29 | drop_last=True, 30 | num_workers=8) 31 | 32 | return mnist_data_loader -------------------------------------------------------------------------------- /datasets/svhn.py: -------------------------------------------------------------------------------- 1 | """Dataset setting and data loader for SVHN.""" 2 | 3 | import torch 4 | from torchvision import datasets, transforms 5 | import os 6 | 7 | 8 | def get_svhn(dataset_root, batch_size, train): 9 | """Get SVHN datasets loader.""" 10 | # image pre-processing 11 | pre_process = transforms.Compose([transforms.Resize(32), 12 | transforms.ToTensor(), 13 | transforms.Normalize( 14 | mean=(0.5, 0.5, 0.5), 15 | std=(0.5, 0.5, 0.5) 16 | )]) 17 | 18 | # datasets and data loader 19 | if train: 20 | svhn_dataset = datasets.SVHN(root=os.path.join(dataset_root), 21 | split='train', 22 | transform=pre_process, 23 | download=True) 24 | else: 25 | svhn_dataset = datasets.SVHN(root=os.path.join(dataset_root), 26 | split='test', 27 | transform=pre_process, 28 | download=True) 29 | 30 | svhn_data_loader = torch.utils.data.DataLoader( 31 | dataset=svhn_dataset, 32 | batch_size=batch_size, 33 | shuffle=True, 34 | drop_last=True) 35 | 36 | return svhn_data_loader 37 | -------------------------------------------------------------------------------- /core/test.py: -------------------------------------------------------------------------------- 1 | import torch.utils.data 2 | import torch.nn as nn 3 | 4 | def test(model, data_loader, device, flag): 5 | """Evaluate model for dataset.""" 6 | # set eval state for Dropout and BN layers 7 | model.eval() 8 | 9 | # init loss and accuracy 10 | loss_ = 0.0 11 | acc_ = 0.0 12 | acc_domain_ = 0.0 13 | n_total = 0 14 | 15 | # set loss function 16 | criterion = nn.CrossEntropyLoss() 17 | 18 | # evaluate network 19 | for (images, labels) in data_loader: 20 | images = images.to(device) 21 | labels = labels.to(device) #labels = labels.squeeze(1) 22 | size = len(labels) 23 | if flag == 'target': 24 | labels_domain = torch.ones(size).long().to(device) 25 | else: 26 | labels_domain = torch.zeros(size).long().to(device) 27 | 28 | preds, domain = model(images, alpha=0) 29 | 30 | loss_ += criterion(preds, labels).item() 31 | 32 | pred_cls = preds.data.max(1)[1] 33 | pred_domain = domain.data.max(1)[1] 34 | acc_ += pred_cls.eq(labels.data).sum().item() 35 | acc_domain_ += pred_domain.eq(labels_domain.data).sum().item() 36 | n_total += size 37 | 38 | loss = loss_ / n_total 39 | acc = acc_ / n_total 40 | acc_domain = acc_domain_ / n_total 41 | 42 | print("Avg Loss = {:.6f}, Avg Accuracy = {:.2%}, {}/{}, Avg Domain Accuracy = {:2%}".format(loss, acc, acc_, n_total, acc_domain)) 43 | 44 | return loss, acc, acc_domain 45 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | _gsdata_ 6 | 7 | # C extensions 8 | *.so 9 | 10 | # Distribution / packaging 11 | .Python 12 | env/ 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # Sphinx documentation 66 | docs/_build/ 67 | 68 | # PyBuilder 69 | target/ 70 | 71 | # Jupyter Notebook 72 | .ipynb_checkpoints 73 | 74 | # pyenv 75 | .python-version 76 | 77 | # celery beat schedule file 78 | celerybeat-schedule 79 | 80 | # SageMath parsed files 81 | *.sage.py 82 | 83 | # dotenv 84 | .env 85 | 86 | # virtualenv 87 | .venv 88 | venv/ 89 | ENV/ 90 | 91 | # Spyder project settings 92 | .spyderproject 93 | .spyproject 94 | 95 | # Rope project settings 96 | .ropeproject 97 | 98 | # mkdocs documentation 99 | /site 100 | 101 | # mypy 102 | .mypy_cache/ 103 | 104 | # personal 105 | .idea 106 | .DS_Store 107 | main_legacy.py 108 | test.ipynb 109 | -------------------------------------------------------------------------------- /datasets/synsigns.py: -------------------------------------------------------------------------------- 1 | """Dataset setting and data loader for syn-signs.""" 2 | 3 | import os 4 | import torch 5 | from torchvision import datasets, transforms 6 | import torch.utils.data as data 7 | from PIL import Image 8 | 9 | 10 | class GetLoader(data.Dataset): 11 | def __init__(self, data_root, data_list, transform=None): 12 | self.root = data_root 13 | self.transform = transform 14 | 15 | f = open(data_list, 'r') 16 | data_list = f.readlines() 17 | f.close() 18 | 19 | self.n_data = len(data_list) 20 | 21 | self.img_paths = [] 22 | self.img_labels = [] 23 | 24 | for data in data_list: 25 | data = data.split(' ') 26 | self.img_paths.append(data[0]) 27 | self.img_labels.append(data[1]) 28 | 29 | def __getitem__(self, item): 30 | img_paths, labels = self.img_paths[item], self.img_labels[item] 31 | imgs = Image.open(os.path.join(self.root, img_paths)).convert('RGB') 32 | 33 | if self.transform is not None: 34 | imgs = self.transform(imgs) 35 | labels = int(labels) 36 | 37 | return imgs, labels 38 | 39 | def __len__(self): 40 | return self.n_data 41 | 42 | def get_synsigns(dataset_root, batch_size, train): 43 | """Get Synthetic Signs datasets loader.""" 44 | # image pre-processing 45 | pre_process = transforms.Compose([ 46 | transforms.Resize((40, 40)), 47 | transforms.ToTensor(), 48 | transforms.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)) 49 | ]) 50 | 51 | # datasets and data_loader 52 | train_list = os.path.join(dataset_root, 'train_labelling.txt') 53 | synsigns_dataset = GetLoader( 54 | data_root=os.path.join(dataset_root), 55 | data_list=train_list, 56 | transform=pre_process) 57 | 58 | synsigns_dataloader = torch.utils.data.DataLoader( 59 | dataset=synsigns_dataset, batch_size=batch_size, shuffle=True, num_workers=8) 60 | 61 | return synsigns_dataloader -------------------------------------------------------------------------------- /experiments/office31_10.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.append('../') 5 | from core.train import train_dann 6 | from core.test import test 7 | from models.model import AlexModel 8 | 9 | from utils.utils import get_data_loader, init_model, init_random_seed 10 | 11 | 12 | class Config(object): 13 | # params for path 14 | dataset_root = os.path.expanduser(os.path.join('~', 'Datasets')) 15 | model_root = os.path.expanduser(os.path.join('~', 'Models', 'pytorch-DANN')) 16 | 17 | # params for datasets and data loader 18 | batch_size = 32 19 | 20 | # params for source dataset 21 | src_dataset = "amazon31" 22 | src_model_trained = True 23 | src_classifier_restore = os.path.join(model_root, src_dataset + '-source-classifier-final.pt') 24 | 25 | # params for target dataset 26 | tgt_dataset = "webcam10" 27 | tgt_model_trained = True 28 | dann_restore = os.path.join(model_root, src_dataset + '-' + tgt_dataset + '-dann-final.pt') 29 | 30 | # params for pretrain 31 | num_epochs_src = 100 32 | log_step_src = 5 33 | save_step_src = 50 34 | eval_step_src = 20 35 | 36 | # params for training dann 37 | 38 | # for office 39 | num_epochs = 1000 40 | log_step = 10 # iters 41 | save_step = 500 42 | eval_step = 1 # epochs 43 | 44 | manual_seed = 8888 45 | alpha = 0 46 | 47 | # params for optimizing models 48 | lr = 2e-4 49 | 50 | 51 | params = Config() 52 | 53 | # init random seed 54 | init_random_seed(params.manual_seed) 55 | 56 | # load dataset 57 | src_data_loader = get_data_loader(params.src_dataset, params.dataset_root, params.batch_size) 58 | tgt_data_loader = get_data_loader(params.tgt_dataset, params.dataset_root, params.batch_size) 59 | 60 | # load dann model 61 | dann = init_model(net=AlexModel(), restore=None) 62 | 63 | # train dann model 64 | print("Start training dann model.") 65 | 66 | if not (dann.restored and params.dann_restore): 67 | dann = train_dann(dann, params, src_data_loader, tgt_data_loader, tgt_data_loader) 68 | 69 | print('done') 70 | -------------------------------------------------------------------------------- /experiments/office.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | import torch 5 | 6 | sys.path.append('../') 7 | from core.train import train_dann 8 | from core.test import test 9 | from models.model import AlexModel 10 | from utils.utils import get_data_loader, init_model, init_random_seed 11 | 12 | 13 | class Config(object): 14 | # params for path 15 | dataset_root = os.path.expanduser(os.path.join('~', 'Datasets')) 16 | model_root = os.path.expanduser(os.path.join('~', 'Models', 'pytorch-dann')) 17 | 18 | # params for datasets and data loader 19 | batch_size = 32 20 | 21 | # params for source dataset 22 | src_dataset = "amazon31" 23 | src_model_trained = True 24 | src_classifier_restore = os.path.join(model_root, src_dataset + '-source-classifier-final.pt') 25 | 26 | # params for target dataset 27 | tgt_dataset = "webcam31" 28 | tgt_model_trained = True 29 | dann_restore = os.path.join(model_root, src_dataset + '-' + tgt_dataset + '-dann-final.pt') 30 | 31 | # params for pretrain 32 | num_epochs_src = 100 33 | log_step_src = 5 34 | save_step_src = 50 35 | eval_step_src = 10 36 | 37 | # params for training dann 38 | gpu_id = '0' 39 | 40 | ## for office 41 | num_epochs = 1000 42 | log_step = 10 # iters 43 | save_step = 500 44 | eval_step = 10 # epochs 45 | 46 | manual_seed = 8888 47 | alpha = 0 48 | 49 | # params for optimizing models 50 | lr = 2e-4 51 | 52 | 53 | params = Config() 54 | 55 | # init random seed 56 | init_random_seed(params.manual_seed) 57 | 58 | # init device 59 | device = torch.device("cuda:" + params.gpu_id if torch.cuda.is_available() else "cpu") 60 | 61 | # load dataset 62 | src_data_loader = get_data_loader(params.src_dataset, params.dataset_root, params.batch_size) 63 | tgt_data_loader = get_data_loader(params.tgt_dataset, params.dataset_root, params.batch_size) 64 | 65 | # load dann model 66 | dann = init_model(net=AlexModel(), restore=None) 67 | 68 | # train dann model 69 | print("Start training dann model.") 70 | 71 | if not (dann.restored and params.dann_restore): 72 | dann = train_dann(dann, params, src_data_loader, tgt_data_loader, tgt_data_loader, device) 73 | 74 | print('done') -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PyTorch-DANN 2 | 3 | A PyTorch implementation for paper *[Unsupervised Domain Adaptation by Backpropagation](http://sites.skoltech.ru/compvision/projects/grl/)* 4 | 5 | InProceedings (icml2015-ganin15) 6 | Ganin, Y. & Lempitsky, V. 7 | Unsupervised Domain Adaptation by Backpropagation 8 | Proceedings of the 32nd International Conference on Machine Learning, 2015 9 | 10 | ## Environment 11 | 12 | - Python 3.6 13 | - PyTorch 1.0 14 | 15 | ## Note 16 | 17 | - `MNISTmodel()` 18 | - basically the same network structure as proposed in the paper, expect for adding dropout layer in feature extractor 19 | - large gap exsits between with and w/o dropout layer 20 | - better result than paper 21 | - `SVHNmodel()` 22 | - network structure proposed in the paper may be wrong for both 32x32 and 28x28 inputs 23 | - change last conv layer's filter to 4x4, get similar(actually higher) result 24 | - `GTSRBmodel()` 25 | - `AlexModel` 26 | - not successful, mainly due to the pretrained model difference 27 | 28 | ## Result 29 | 30 | | | MNIST-MNISTM | SVHN-MNIST | SYNDIGITS-SVHN | SYNSIGNS-GTSRB | 31 | | :------------------: | :------------: | :--------: |:-------------: |:-------------: | 32 | | Source Only | 0.5225 | 0.5490 | 0.8674 | 0.7900 | 33 | | DANN(paper) | 0.7666 | 0.7385 | 0.9109 | 0.8865 | 34 | | This Repo Source Only| - | - | - | 0.9100 | 35 | | This Repo | 0.8400 | 0.7339 | 0.8200 | - | 36 | 37 | | | AMAZON-WEBVCAM | DSLR-WEBCAM | WEBCAM-DSLR | 38 | | :------------------: | :------------: |:-----------: |:----------: | 39 | | Source Only | 0.6420 | 0.9610 | 0.9780 | 40 | | DANN(paper) | 0.7300 | 0.9640 | 0.9920 | 41 | | This Repo Source Only| - | - | - | 42 | | This Repo | 0.6528 | - | - | 43 | 44 | ## Credit 45 | 46 | - 47 | - 48 | - 49 | -------------------------------------------------------------------------------- /datasets/gtsrb.py: -------------------------------------------------------------------------------- 1 | """Dataset setting and data loader for GTSRB. Pickle format and use roi info. 2 | """ 3 | 4 | import os 5 | import torch 6 | from torchvision import transforms 7 | import torch.utils.data as data 8 | from torch.utils.data.sampler import SubsetRandomSampler 9 | import numpy as np 10 | import pickle 11 | from PIL import Image 12 | 13 | class GTSRB(data.Dataset): 14 | def __init__(self, filepath, transform=None): 15 | with open(filepath,'rb') as f: 16 | self.data = pickle.load(f) 17 | self.keys = ['images', 'labels'] 18 | self.images = self.data[self.keys[0]] 19 | self.labels = self.data[self.keys[1]] 20 | self.transform = transform 21 | self.n_data = len(self.labels) 22 | 23 | def __getitem__(self, index): 24 | image, label = self.images[index], self.labels[index] 25 | image = Image.fromarray(np.uint8(image)) 26 | if self.transform is not None: 27 | image = self.transform(image) 28 | label = int(label) 29 | return image, label 30 | 31 | def __len__(self): 32 | return self.n_data 33 | 34 | def get_gtsrb(dataset_root, batch_size, train): 35 | """Get GTSRB datasets loader.""" 36 | shuffle_dataset = True 37 | random_seed = 42 38 | train_size = 31367 39 | 40 | # image pre-processing 41 | pre_process = transforms.Compose([ 42 | transforms.Resize((40, 40)), 43 | transforms.ToTensor(), 44 | transforms.Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)) 45 | ]) 46 | 47 | # datasets and data_loader 48 | gtsrb_dataset = GTSRB(os.path.join(dataset_root, 'gtsrb_train.p'), transform=pre_process) 49 | 50 | dataset_size = len(gtsrb_dataset) 51 | indices = list(range(dataset_size)) 52 | if shuffle_dataset: 53 | #np.random.seed(random_seed) 54 | np.random.seed() 55 | np.random.shuffle(indices) 56 | train_indices, val_indices = indices[:train_size], indices[train_size:] 57 | 58 | # Creating PT data samplers and loaders: 59 | train_sampler = SubsetRandomSampler(train_indices) 60 | valid_sampler = SubsetRandomSampler(val_indices) 61 | 62 | if train: 63 | gtsrb_dataloader_train = torch.utils.data.DataLoader(gtsrb_dataset, batch_size=batch_size, sampler=train_sampler) 64 | return gtsrb_dataloader_train 65 | else: 66 | gtsrb_dataloader_test = torch.utils.data.DataLoader(gtsrb_dataset, batch_size=batch_size, sampler=valid_sampler) 67 | return gtsrb_dataloader_test -------------------------------------------------------------------------------- /datasets/mnistm.py: -------------------------------------------------------------------------------- 1 | """Dataset setting and data loader for MNIST_M.""" 2 | 3 | import torch 4 | from torchvision import transforms 5 | import torch.utils.data as data 6 | from PIL import Image 7 | import os 8 | 9 | class GetLoader(data.Dataset): 10 | def __init__(self, data_root, data_list, transform=None): 11 | self.root = data_root 12 | self.transform = transform 13 | 14 | f = open(data_list, 'r') 15 | data_list = f.readlines() 16 | f.close() 17 | 18 | self.n_data = len(data_list) 19 | 20 | self.img_paths = [] 21 | self.img_labels = [] 22 | 23 | for data in data_list: 24 | self.img_paths.append(data[:-3]) 25 | self.img_labels.append(data[-2]) 26 | 27 | def __getitem__(self, item): 28 | img_paths, labels = self.img_paths[item], self.img_labels[item] 29 | imgs = Image.open(os.path.join(self.root, img_paths)).convert('RGB') 30 | 31 | if self.transform is not None: 32 | imgs = self.transform(imgs) 33 | labels = int(labels) 34 | 35 | return imgs, labels 36 | 37 | def __len__(self): 38 | return self.n_data 39 | 40 | def get_mnistm(dataset_root, batch_size, train): 41 | """Get MNISTM datasets loader.""" 42 | # image pre-processing 43 | pre_process = transforms.Compose([transforms.Resize(28), 44 | transforms.ToTensor(), 45 | transforms.Normalize( 46 | mean=(0.5, 0.5, 0.5), 47 | std=(0.5, 0.5, 0.5) 48 | )]) 49 | 50 | # datasets and data_loader 51 | if train: 52 | train_list = os.path.join(dataset_root, 'mnist_m','mnist_m_train_labels.txt') 53 | mnistm_dataset = GetLoader( 54 | data_root=os.path.join(dataset_root, 'mnist_m', 'mnist_m_train'), 55 | data_list=train_list, 56 | transform=pre_process) 57 | else: 58 | train_list = os.path.join(dataset_root, 'mnist_m', 'mnist_m_test_labels.txt') 59 | mnistm_dataset = GetLoader( 60 | data_root=os.path.join(dataset_root, 'mnist_m', 'mnist_m_test'), 61 | data_list=train_list, 62 | transform=pre_process) 63 | 64 | mnistm_dataloader = torch.utils.data.DataLoader( 65 | dataset=mnistm_dataset, 66 | batch_size=batch_size, 67 | shuffle=True, 68 | num_workers=8) 69 | 70 | return mnistm_dataloader -------------------------------------------------------------------------------- /experiments/mnist_mnistm.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | import torch 5 | sys.path.append('../') 6 | from models.model import MNISTmodel, MNISTmodel_plain 7 | from core.train import train_dann 8 | from utils.utils import get_data_loader, init_model, init_random_seed 9 | 10 | 11 | class Config(object): 12 | # params for path 13 | dataset_root = os.path.expanduser(os.path.join('~', 'Datasets')) 14 | model_root = os.path.expanduser(os.path.join('~', 'Models', 'pytorch-DANN')) 15 | 16 | # params for datasets and data loader 17 | batch_size = 64 18 | 19 | # params for source dataset 20 | src_dataset = "mnist" 21 | src_model_trained = True 22 | src_classifier_restore = os.path.join(model_root, src_dataset + '-source-classifier-final.pt') 23 | class_num_src = 31 24 | 25 | # params for target dataset 26 | tgt_dataset = "mnistm" 27 | tgt_model_trained = True 28 | dann_restore = os.path.join(model_root, src_dataset + '-' + tgt_dataset + '-dann-final.pt') 29 | 30 | # params for pretrain 31 | num_epochs_src = 100 32 | log_step_src = 10 33 | save_step_src = 50 34 | eval_step_src = 20 35 | 36 | # params for training dann 37 | gpu_id = '0' 38 | 39 | ## for digit 40 | num_epochs = 100 41 | log_step = 20 42 | save_step = 50 43 | eval_step = 5 44 | 45 | ## for office 46 | # num_epochs = 1000 47 | # log_step = 10 # iters 48 | # save_step = 500 49 | # eval_step = 5 # epochs 50 | 51 | manual_seed = 8888 52 | alpha = 0 53 | 54 | # params for optimizing models 55 | lr = 2e-4 56 | 57 | 58 | params = Config() 59 | 60 | # init random seed 61 | init_random_seed(params.manual_seed) 62 | 63 | # init device 64 | device = torch.device("cuda:" + params.gpu_id if torch.cuda.is_available() else "cpu") 65 | 66 | # load dataset 67 | src_data_loader = get_data_loader(params.src_dataset, params.dataset_root, params.batch_size, train=True) 68 | src_data_loader_eval = get_data_loader(params.src_dataset, params.dataset_root, params.batch_size, train=False) 69 | tgt_data_loader = get_data_loader(params.tgt_dataset, params.dataset_root, params.batch_size, train=True) 70 | tgt_data_loader_eval = get_data_loader(params.tgt_dataset, params.dataset_root, params.batch_size, train=False) 71 | 72 | # load dann model 73 | dann = init_model(net=MNISTmodel_plain(), restore=None) 74 | 75 | # train dann model 76 | print("Training dann model") 77 | if not (dann.restored and params.dann_restore): 78 | dann = train_dann(dann, params, src_data_loader, tgt_data_loader, tgt_data_loader_eval, device) -------------------------------------------------------------------------------- /experiments/synsigns_gtsrb.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import datetime 4 | from tensorboardX import SummaryWriter 5 | 6 | import torch 7 | sys.path.append('../') 8 | from models.model import GTSRBmodel 9 | from core.train import train_dann 10 | from utils.utils import get_data_loader, init_model, init_random_seed 11 | 12 | class Config(object): 13 | # params for path 14 | model_name = "synsigns-gtsrb" 15 | model_base = '/home/wogong/models/pytorch-dann' 16 | note = '40-bn' 17 | model_root = os.path.join(model_base, model_name, note + '_' + datetime.datetime.now().strftime('%m%d_%H%M%S')) 18 | os.makedirs(model_root) 19 | config = os.path.join(model_root, 'config.txt') 20 | finetune_flag = False 21 | lr_adjust_flag = 'simple' 22 | src_only_flag = False 23 | 24 | # params for datasets and data loader 25 | batch_size = 128 26 | 27 | # params for source dataset 28 | src_dataset = "synsigns" 29 | src_image_root = os.path.join('/home/wogong/datasets', 'synsigns') 30 | src_model_trained = True 31 | src_classifier_restore = os.path.join(model_root, src_dataset + '-source-classifier-final.pt') 32 | 33 | # params for target dataset 34 | tgt_dataset = "gtsrb" 35 | tgt_image_root = os.path.join('/home/wogong/datasets', 'gtsrb') 36 | tgt_model_trained = True 37 | dann_restore = os.path.join(model_root, src_dataset + '-' + tgt_dataset + '-dann-final.pt') 38 | 39 | # params for GPU device 40 | gpu_id = '0' 41 | 42 | ## for digit 43 | num_epochs = 200 44 | log_step = 200 45 | save_step = 100 46 | eval_step = 1 47 | 48 | manual_seed = 42 49 | alpha = 0 50 | 51 | # params for SGD optimizer 52 | lr = 0.01 53 | momentum = 0.9 54 | weight_decay = 1e-6 55 | 56 | def __init__(self): 57 | public_props = (name for name in dir(self) if not name.startswith('_')) 58 | with open(self.config, 'w') as f: 59 | for name in public_props: 60 | f.write(name + ': ' + str(getattr(self, name)) + '\n') 61 | 62 | params = Config() 63 | logger = SummaryWriter(params.model_root) 64 | device = torch.device("cuda:" + params.gpu_id if torch.cuda.is_available() else "cpu") 65 | 66 | # init random seed 67 | init_random_seed(params.manual_seed) 68 | 69 | # load dataset 70 | src_data_loader = get_data_loader(params.src_dataset, params.src_image_root, params.batch_size, train=True) 71 | src_data_loader_eval = get_data_loader(params.src_dataset, params.src_image_root, params.batch_size, train=False) 72 | tgt_data_loader = get_data_loader(params.tgt_dataset, params.tgt_image_root, params.batch_size, train=True) 73 | tgt_data_loader_eval = get_data_loader(params.tgt_dataset, params.tgt_image_root, params.batch_size, train=False) 74 | 75 | # load dann model 76 | dann = init_model(net=GTSRBmodel(), restore=None) 77 | 78 | # train dann model 79 | print("Training dann model") 80 | if not (dann.restored and params.dann_restore): 81 | dann = train_dann(dann, params, src_data_loader, tgt_data_loader, tgt_data_loader_eval, device, logger) 82 | -------------------------------------------------------------------------------- /experiments/syndigits_svhn.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import datetime 4 | from tensorboardX import SummaryWriter 5 | 6 | import torch 7 | sys.path.append('../') 8 | from models.model import SVHNmodel 9 | from core.train import train_dann 10 | from utils.utils import get_data_loader, init_model, init_random_seed 11 | 12 | 13 | class Config(object): 14 | # params for path 15 | model_name = "syndigits-svhn" 16 | model_base = '/home/wogong/models/pytorch-dann' 17 | note = 'default' 18 | model_root = os.path.join(model_base, model_name, note + '_' + datetime.datetime.now().strftime('%m%d_%H%M%S')) 19 | os.makedirs(model_root) 20 | config = os.path.join(model_root, 'config.txt') 21 | finetune_flag = False 22 | lr_adjust_flag = 'simple' 23 | src_only_flag = False 24 | 25 | # params for datasets and data loader 26 | batch_size = 128 27 | 28 | # params for source dataset 29 | src_dataset = "syndigits" 30 | src_image_root = os.path.join('/home/wogong/datasets', 'syndigits') 31 | src_model_trained = True 32 | src_classifier_restore = os.path.join(model_root, src_dataset + '-source-classifier-final.pt') 33 | 34 | # params for target dataset 35 | tgt_dataset = "svhn" 36 | tgt_image_root = os.path.join('/home/wogong/datasets', 'svhn') 37 | tgt_model_trained = True 38 | dann_restore = os.path.join(model_root, src_dataset + '-' + tgt_dataset + '-dann-final.pt') 39 | 40 | # params for GPU device 41 | gpu_id = '0' 42 | 43 | ## for digit 44 | num_epochs = 200 45 | log_step = 200 46 | save_step = 100 47 | eval_step = 1 48 | 49 | manual_seed = 42 50 | alpha = 0 51 | 52 | # params for SGD optimizer 53 | lr = 0.01 54 | momentum = 0.9 55 | weight_decay = 1e-6 56 | 57 | def __init__(self): 58 | public_props = (name for name in dir(self) if not name.startswith('_')) 59 | with open(self.config, 'w') as f: 60 | for name in public_props: 61 | f.write(name + ': ' + str(getattr(self, name)) + '\n') 62 | 63 | params = Config() 64 | logger = SummaryWriter(params.model_root) 65 | device = torch.device("cuda:" + params.gpu_id if torch.cuda.is_available() else "cpu") 66 | 67 | # init random seed 68 | init_random_seed(params.manual_seed) 69 | 70 | # load dataset 71 | src_data_loader = get_data_loader(params.src_dataset, params.src_image_root, params.batch_size, train=True) 72 | src_data_loader_eval = get_data_loader(params.src_dataset, params.src_image_root, params.batch_size, train=False) 73 | tgt_data_loader = get_data_loader(params.tgt_dataset, params.tgt_image_root, params.batch_size, train=True) 74 | tgt_data_loader_eval = get_data_loader(params.tgt_dataset, params.tgt_image_root, params.batch_size, train=False) 75 | 76 | # load dann model 77 | dann = init_model(net=SVHNmodel(), restore=None) 78 | 79 | # train dann model 80 | print("Training dann model") 81 | if not (dann.restored and params.dann_restore): 82 | dann = train_dann(dann, params, src_data_loader, tgt_data_loader, tgt_data_loader_eval, device, logger) 83 | -------------------------------------------------------------------------------- /experiments/synsigns_gtsrb_src_only.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import datetime 4 | from tensorboardX import SummaryWriter 5 | 6 | import torch 7 | sys.path.append('../') 8 | from models.model import GTSRBmodel 9 | from core.train import train_src 10 | from utils.utils import get_data_loader, init_model, init_random_seed, init_weights 11 | 12 | class Config(object): 13 | # params for path 14 | model_name = "synsigns-gtsrb" 15 | model_base = '/home/wogong/models/pytorch-dann' 16 | note = 'srconly' 17 | model_root = os.path.join(model_base, model_name, note + '_' + datetime.datetime.now().strftime('%m%d_%H%M%S')) 18 | os.makedirs(model_root) 19 | config = os.path.join(model_root, 'config.txt') 20 | finetune_flag = False 21 | lr_adjust_flag = 'simple' 22 | 23 | # params for datasets and data loader 24 | batch_size = 128 25 | 26 | # params for source dataset 27 | src_dataset = "synsigns" 28 | src_image_root = os.path.join('/home/wogong/datasets', 'synsigns') 29 | src_model_trained = True 30 | src_classifier_restore = os.path.join(model_root, src_dataset + '-source-classifier-final.pt') 31 | 32 | # params for target dataset 33 | tgt_dataset = "gtsrb" 34 | tgt_image_root = os.path.join('/home/wogong/datasets', 'gtsrb') 35 | tgt_model_trained = True 36 | dann_restore = os.path.join(model_root, src_dataset + '-' + tgt_dataset + '-dann-final.pt') 37 | 38 | # params for training dann 39 | gpu_id = '0' 40 | 41 | ## for digit 42 | num_epochs = 200 43 | log_step = 50 44 | save_step = 100 45 | eval_step = 1 46 | 47 | manual_seed = 42 48 | alpha = 0 49 | 50 | # params for optimizing models 51 | lr = 0.01 52 | momentum = 0.9 53 | weight_decay = 1e-6 54 | 55 | def __init__(self): 56 | """save config to model root""" 57 | public_props = (name for name in dir(self) if not name.startswith('_')) 58 | with open(self.config, 'w') as f: 59 | for name in public_props: 60 | f.write(name + ': ' + str(getattr(self, name)) + '\n') 61 | 62 | params = Config() 63 | logger = SummaryWriter(params.model_root) 64 | device = torch.device("cuda:" + params.gpu_id if torch.cuda.is_available() else "cpu") 65 | 66 | # init random seed 67 | init_random_seed(params.manual_seed) 68 | 69 | # load dataset 70 | src_data_loader = get_data_loader(params.src_dataset, params.src_image_root, params.batch_size, train=True) 71 | src_data_loader_eval = get_data_loader(params.src_dataset, params.src_image_root, params.batch_size, train=False) 72 | 73 | tgt_data_loader = get_data_loader(params.tgt_dataset, params.tgt_image_root, params.batch_size, train=True) 74 | tgt_data_loader_eval = get_data_loader(params.tgt_dataset, params.tgt_image_root, params.batch_size, train=False) 75 | 76 | # load dann model 77 | dann = init_model(net=GTSRBmodel(), restore=None) 78 | #init_weights(dann) 79 | 80 | # train dann model 81 | print("Training dann model") 82 | if not (dann.restored and params.dann_restore): 83 | dann = train_src(dann, params, src_data_loader, tgt_data_loader, tgt_data_loader_eval, device, logger) 84 | -------------------------------------------------------------------------------- /experiments/svhn_mnist.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import datetime 4 | from tensorboardX import SummaryWriter 5 | 6 | import torch 7 | sys.path.append('../') 8 | from models.model import SVHNmodel 9 | from core.train import train_dann 10 | from utils.utils import get_data_loader, init_model, init_random_seed 11 | 12 | 13 | class Config(object): 14 | # params for path 15 | model_name = "svhn-mnist" 16 | model_base = '/home/wogong/models/pytorch-dann' 17 | model_root = os.path.expanduser(os.path.join('~', 'Models', 'pytorch-DANN', model_name)) 18 | note = 'paper-structure' 19 | model_root = os.path.join(model_base, model_name, note + '_' + datetime.datetime.now().strftime('%m%d_%H%M%S')) 20 | os.makedirs(model_root) 21 | config = os.path.join(model_root, 'config.txt') 22 | finetune_flag = False 23 | lr_adjust_flag = 'simple' 24 | src_only_flag = False 25 | 26 | # params for datasets and data loader 27 | batch_size = 128 28 | 29 | # params for source dataset 30 | src_dataset = "svhn" 31 | src_image_root = os.path.join('/home/wogong/datasets', 'svhn') 32 | src_model_trained = True 33 | src_classifier_restore = os.path.join(model_root, src_dataset + '-source-classifier-final.pt') 34 | 35 | # params for target dataset 36 | tgt_dataset = "mnist" 37 | tgt_image_root = os.path.join('/home/wogong/datasets', 'mnist') 38 | tgt_model_trained = True 39 | dann_restore = os.path.join(model_root, src_dataset + '-' + tgt_dataset + '-dann-final.pt') 40 | 41 | # params for training dann 42 | gpu_id = '0' 43 | 44 | ## for digit 45 | num_epochs = 200 46 | log_step = 50 47 | save_step = 100 48 | eval_step = 1 49 | 50 | ## for office 51 | # num_epochs = 1000 52 | # log_step = 10 # iters 53 | # save_step = 500 54 | # eval_step = 5 # epochs 55 | 56 | manual_seed = None 57 | alpha = 0 58 | 59 | # params for optimizing models 60 | lr = 0.01 61 | momentum = 0.9 62 | weight_decay = 1e-6 63 | 64 | def __init__(self): 65 | public_props = (name for name in dir(self) if not name.startswith('_')) 66 | with open(self.config, 'w') as f: 67 | for name in public_props: 68 | f.write(name + ': ' + str(getattr(self, name)) + '\n') 69 | 70 | params = Config() 71 | logger = SummaryWriter(params.model_root) 72 | device = torch.device("cuda:" + params.gpu_id if torch.cuda.is_available() else "cpu") 73 | 74 | # init random seed 75 | init_random_seed(params.manual_seed) 76 | 77 | # load dataset 78 | src_data_loader = get_data_loader(params.src_dataset, params.src_image_root, params.batch_size, train=True) 79 | src_data_loader_eval = get_data_loader(params.src_dataset, params.src_image_root, params.batch_size, train=False) 80 | tgt_data_loader = get_data_loader(params.tgt_dataset, params.tgt_image_root, params.batch_size, train=True) 81 | tgt_data_loader_eval = get_data_loader(params.tgt_dataset, params.tgt_image_root, params.batch_size, train=False) 82 | 83 | # load dann model 84 | dann = init_model(net=SVHNmodel(), restore=None) 85 | 86 | # train dann model 87 | print("Training dann model") 88 | if not (dann.restored and params.dann_restore): 89 | dann = train_dann(dann, params, src_data_loader, tgt_data_loader, tgt_data_loader_eval, device, logger) 90 | -------------------------------------------------------------------------------- /models/alexnet.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import os 3 | import torch.nn as nn 4 | import torch.utils.model_zoo as model_zoo 5 | 6 | 7 | __all__ = ['AlexNet', 'alexnet'] 8 | 9 | 10 | class LRN(nn.Module): 11 | def __init__(self, local_size=1, alpha=1.0, beta=0.75, ACROSS_CHANNELS=True): 12 | super(LRN, self).__init__() 13 | self.ACROSS_CHANNELS = ACROSS_CHANNELS 14 | if ACROSS_CHANNELS: 15 | self.average = nn.AvgPool3d(kernel_size=(local_size, 1, 1), 16 | stride=1, 17 | padding=(int((local_size-1.0)/2), 0, 0)) 18 | else: 19 | self.average = nn.AvgPool2d(kernel_size=local_size, 20 | stride=1, 21 | padding=int((local_size-1.0)/2)) 22 | self.alpha = alpha 23 | self.beta = beta 24 | 25 | def forward(self, x): 26 | if self.ACROSS_CHANNELS: 27 | div = x.pow(2).unsqueeze(1) 28 | div = self.average(div).squeeze(1) 29 | div = div.mul(self.alpha).add(1.0).pow(self.beta) 30 | else: 31 | div = x.pow(2) 32 | div = self.average(div) 33 | div = div.mul(self.alpha).add(1.0).pow(self.beta) 34 | x = x.div(div) 35 | return x 36 | 37 | 38 | class AlexNet(nn.Module): 39 | 40 | def __init__(self, num_classes=1000): 41 | super(AlexNet, self).__init__() 42 | self.features = nn.Sequential( 43 | nn.Conv2d(3, 96, kernel_size=11, stride=4, padding=0), 44 | nn.ReLU(inplace=True), 45 | LRN(local_size=5, alpha=0.0001, beta=0.75), 46 | nn.MaxPool2d(kernel_size=3, stride=2), 47 | nn.Conv2d(96, 256, kernel_size=5, padding=2, groups=2), 48 | nn.ReLU(inplace=True), 49 | LRN(local_size=5, alpha=0.0001, beta=0.75), 50 | nn.MaxPool2d(kernel_size=3, stride=2), 51 | nn.Conv2d(256, 384, kernel_size=3, padding=1), 52 | nn.ReLU(inplace=True), 53 | nn.Conv2d(384, 384, kernel_size=3, padding=1, groups=2), 54 | nn.ReLU(inplace=True), 55 | nn.Conv2d(384, 256, kernel_size=3, padding=1, groups=2), 56 | nn.ReLU(inplace=True), 57 | nn.MaxPool2d(kernel_size=3, stride=2), 58 | ) 59 | self.classifier = nn.Sequential( 60 | nn.Linear(256 * 6 * 6, 4096), 61 | nn.ReLU(inplace=True), 62 | nn.Dropout(), 63 | nn.Linear(4096, 4096), 64 | nn.ReLU(inplace=True), 65 | nn.Dropout(), 66 | nn.Linear(4096, num_classes), 67 | ) 68 | 69 | def forward(self, x): 70 | x = self.features(x) 71 | x = x.view(x.size(0), 256 * 6 * 6) 72 | x = self.classifier(x) 73 | return x 74 | 75 | 76 | def alexnet(pretrained=False, **kwargs): 77 | r"""AlexNet model architecture from the 78 | `"One weird trick..." `_ paper. 79 | 80 | Args: 81 | pretrained (bool): If True, returns a model pre-trained on ImageNet 82 | """ 83 | model = AlexNet(**kwargs) 84 | if pretrained: 85 | model_path = '/home/wogong/Models/alexnet.pth.tar' 86 | pretrained_model = torch.load(model_path) 87 | model.load_state_dict(pretrained_model['state_dict']) 88 | return model 89 | -------------------------------------------------------------------------------- /utils/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | 4 | import torch 5 | import torch.backends.cudnn as cudnn 6 | 7 | from datasets import get_mnist, get_mnistm, get_svhn 8 | from datasets.office import get_office 9 | from datasets.officecaltech import get_officecaltech 10 | from datasets.syndigits import get_syndigits 11 | from datasets.synsigns import get_synsigns 12 | from datasets.gtsrb import get_gtsrb 13 | 14 | def make_cuda(tensor): 15 | """Use CUDA if it's available.""" 16 | if torch.cuda.is_available(): 17 | tensor = tensor.cuda() 18 | return tensor 19 | 20 | 21 | def denormalize(x, std, mean): 22 | """Invert normalization, and then convert array into image.""" 23 | out = x * std + mean 24 | return out.clamp(0, 1) 25 | 26 | 27 | def init_weights(layer): 28 | """Init weights for layers w.r.t. the original paper.""" 29 | layer_name = layer.__class__.__name__ 30 | if layer_name.find("Conv") != -1: 31 | layer.weight.data.normal_(0.0, 0.02) 32 | layer.bias.data.fill(0.0) 33 | elif layer_name.find("BatchNorm") != -1: 34 | layer.weight.data.normal_(1.0, 0.02) 35 | layer.bias.data.fill_(0) 36 | 37 | 38 | def init_random_seed(manual_seed): 39 | """Init random seed.""" 40 | seed = None 41 | if manual_seed is None: 42 | seed = random.randint(1, 10000) 43 | else: 44 | seed = manual_seed 45 | print("use random seed: {}".format(seed)) 46 | random.seed(seed) 47 | torch.manual_seed(seed) 48 | if torch.cuda.is_available(): 49 | torch.cuda.manual_seed_all(seed) 50 | 51 | 52 | def get_data_loader(name, dataset_root, batch_size, train=True): 53 | """Get data loader by name.""" 54 | if name == "mnist": 55 | return get_mnist(dataset_root, batch_size, train) 56 | elif name == "mnistm": 57 | return get_mnistm(dataset_root, batch_size, train) 58 | elif name == "svhn": 59 | return get_svhn(dataset_root, batch_size, train) 60 | elif name == "amazon31": 61 | return get_office(dataset_root, batch_size, 'amazon') 62 | elif name == "webcam31": 63 | return get_office(dataset_root, batch_size, 'webcam') 64 | elif name == "webcam10": 65 | return get_officecaltech(dataset_root, batch_size, 'webcam') 66 | elif name == "syndigits": 67 | return get_syndigits(dataset_root, batch_size, train) 68 | elif name == "synsigns": 69 | return get_synsigns(dataset_root, batch_size, train) 70 | elif name == "gtsrb": 71 | return get_gtsrb(dataset_root, batch_size, train) 72 | 73 | 74 | def init_model(net, restore): 75 | """Init models with cuda and weights.""" 76 | # init weights of model 77 | # net.apply(init_weights) 78 | 79 | # restore model weights 80 | if restore is not None and os.path.exists(restore): 81 | net.load_state_dict(torch.load(restore)) 82 | net.restored = True 83 | print("Restore model from: {}".format(os.path.abspath(restore))) 84 | else: 85 | print("No trained model, train from scratch.") 86 | 87 | # check if cuda is available 88 | if torch.cuda.is_available(): 89 | cudnn.benchmark = True 90 | net.cuda() 91 | 92 | return net 93 | 94 | 95 | def save_model(net, model_root, filename): 96 | """Save trained model.""" 97 | if not os.path.exists(model_root): 98 | os.makedirs(model_root) 99 | torch.save(net.state_dict(), os.path.join(model_root, filename)) 100 | print("save pretrained model to: {}".format(os.path.join(model_root, filename))) -------------------------------------------------------------------------------- /datasets/gtsrb_prepare.py: -------------------------------------------------------------------------------- 1 | """modified from https://github.com/haeusser/learning_by_association/blob/master/semisup/tools/gtsrb.py, thanks @haeusser""" 2 | 3 | from __future__ import division 4 | from __future__ import print_function 5 | 6 | import csv 7 | import pickle 8 | 9 | import matplotlib.pyplot as plt 10 | from PIL import Image 11 | import numpy as np 12 | 13 | DATADIR = '/home/wogong/datasets/gtsrb' 14 | 15 | NUM_LABELS = 43 16 | IMAGE_SHAPE = [40, 40, 3] 17 | 18 | 19 | def get_data(name): 20 | """Utility for convenient data loading.""" 21 | if name in ['train', 'unlabeled']: 22 | return read_gtsrb_pickle(DATADIR + '/gtsrb_train.p') 23 | elif name == 'test': 24 | return read_gtsrb_pickle(DATADIR + '/gtsrb_test.p') 25 | 26 | 27 | def read_gtsrb_pickle(filename): 28 | """ 29 | Extract images from pickle file. 30 | :param filename: 31 | :return: 32 | """ 33 | with open(filename, mode='rb') as f: 34 | data = pickle.load(f) 35 | if not type(data['labels'][0]) == int: 36 | labels = [int(x) for x in data['labels']] 37 | else: 38 | labels = data['labels'] 39 | return np.array(data['images']), np.array(labels) 40 | 41 | 42 | def preprocess_gtsrb(images, roi_boxes, resize_to): 43 | """ 44 | Crops images to region-of-interest boxes and applies resizing with bilinear 45 | interpolation. 46 | :param images: np.array of images 47 | :param roi_boxes: np.array of region-of-interest boxes of the form 48 | (left, upper, right, lower) 49 | :return: 50 | """ 51 | preprocessed_images = [] 52 | for idx, img in enumerate(images): 53 | pil_img = Image.fromarray(img) 54 | cropped_pil_img = pil_img.crop(roi_boxes[idx]) 55 | resized_pil_img = cropped_pil_img.resize(resize_to, Image.BILINEAR) 56 | preprocessed_images.append(np.asarray(resized_pil_img)) 57 | 58 | return np.asarray(preprocessed_images) 59 | 60 | 61 | def load_and_append_image_class(prefix, gtFile, images, labels, roi_boxes): 62 | gtReader = csv.reader(gtFile, delimiter=';') # csv parser for annotations file 63 | next(gtReader) # skip header 64 | # loop over all images in current annotations file 65 | for row in gtReader: 66 | images.append( 67 | plt.imread(prefix + row[0])) # the 1st column is the filename 68 | roi_boxes.append( 69 | (float(row[3]), float(row[4]), float(row[5]), float(row[6]))) 70 | labels.append(row[7]) # the 8th column is the label 71 | gtFile.close() 72 | 73 | 74 | def preprocess_and_convert_gtsrb_to_pickle(rootpath, pickle_filename, type='train'): 75 | """ 76 | Reads traffic sign data for German Traffic Sign Recognition Benchmark. 77 | When loading the test dataset, make sure to have downloaded the EXTENDED 78 | annotaitons including the class ids. 79 | :param rootpath: path to the traffic sign data, 80 | for example './GTSRB/Training' 81 | :return: list of images, list of corresponding labels 82 | """ 83 | images = [] # images 84 | labels = [] # corresponding labels 85 | roi_boxes = [] # box coordinates for ROI (left, upper, right, lower) 86 | 87 | if type == 'train': 88 | # loop over all 42 classes 89 | for c in range(0, NUM_LABELS): 90 | prefix = rootpath + '/' + format(c, '05d') + '/' # subdir for class 91 | gtFile = open( 92 | prefix + 'GT-' + format(c, '05d') + '.csv') # annotations file 93 | 94 | load_and_append_image_class(prefix, gtFile, images, labels, 95 | roi_boxes) 96 | elif type == 'test': 97 | prefix = rootpath + '/' 98 | gtFile = open(prefix + 'GT-final_test' + '.csv') # annotations file 99 | load_and_append_image_class(prefix, gtFile, images, labels, roi_boxes) 100 | else: 101 | raise ValueError( 102 | 'The data partition type you have provided is not valid.') 103 | 104 | images = np.asarray(images) 105 | labels = np.asarray(labels) 106 | roi_boxes = np.asarray(roi_boxes) 107 | 108 | preprocessed_images = preprocess_gtsrb(images, roi_boxes, resize_to=IMAGE_SHAPE[:-1]) 109 | 110 | pickle.dump({'images': preprocessed_images, 'labels': labels}, 111 | open(pickle_filename, "wb")) 112 | 113 | 114 | if __name__ == '__main__': 115 | rootpath = DATADIR + '/Final_Training/Images' 116 | pickle_filename = '/home/wogong/datasets/gtsrb/gtsrb_train.p' 117 | preprocess_and_convert_gtsrb_to_pickle(rootpath, pickle_filename, type='train') -------------------------------------------------------------------------------- /core/train.py: -------------------------------------------------------------------------------- 1 | """Train dann.""" 2 | 3 | import numpy as np 4 | 5 | import torch 6 | import torch.nn as nn 7 | import torch.optim as optim 8 | from core.test import test 9 | from utils.utils import save_model 10 | import torch.backends.cudnn as cudnn 11 | cudnn.benchmark = True 12 | 13 | def train_src(model, params, src_data_loader, tgt_data_loader, tgt_data_loader_eval, device, logger): 14 | """Train dann.""" 15 | #################### 16 | # 1. setup network # 17 | #################### 18 | 19 | # setup criterion and optimizer 20 | 21 | if not params.finetune_flag: 22 | print("training non-office task") 23 | optimizer = optim.SGD(model.parameters(), lr=params.lr, momentum=params.momentum, weight_decay=params.weight_decay) 24 | else: 25 | print("training office task") 26 | parameter_list = [{ 27 | "params": model.features.parameters(), 28 | "lr": 0.001 29 | }, { 30 | "params": model.fc.parameters(), 31 | "lr": 0.001 32 | }, { 33 | "params": model.bottleneck.parameters() 34 | }, { 35 | "params": model.classifier.parameters() 36 | }, { 37 | "params": model.discriminator.parameters() 38 | }] 39 | optimizer = optim.SGD(parameter_list, lr=0.01, momentum=0.9) 40 | 41 | criterion = nn.CrossEntropyLoss() 42 | 43 | #################### 44 | # 2. train network # 45 | #################### 46 | global_step = 0 47 | for epoch in range(params.num_epochs): 48 | # set train state for Dropout and BN layers 49 | model.train() 50 | # zip source and target data pair 51 | len_dataloader = min(len(src_data_loader), len(tgt_data_loader)) 52 | data_zip = enumerate(zip(src_data_loader, tgt_data_loader)) 53 | for step, ((images_src, class_src), (images_tgt, _)) in data_zip: 54 | 55 | p = float(step + epoch * len_dataloader) / \ 56 | params.num_epochs / len_dataloader 57 | alpha = 2. / (1. + np.exp(-10 * p)) - 1 58 | 59 | if params.lr_adjust_flag == 'simple': 60 | lr = adjust_learning_rate(optimizer, p) 61 | else: 62 | lr = adjust_learning_rate_office(optimizer, p) 63 | logger.add_scalar('lr', lr, global_step) 64 | 65 | # prepare domain label 66 | size_src = len(images_src) 67 | size_tgt = len(images_tgt) 68 | 69 | # make images variable 70 | class_src = class_src.to(device) 71 | images_src = images_src.to(device) 72 | 73 | # zero gradients for optimizer 74 | model.zero_grad() 75 | 76 | # train on source domain 77 | src_class_output, src_domain_output = model(input_data=images_src, alpha=alpha) 78 | src_loss_class = criterion(src_class_output, class_src) 79 | 80 | loss = src_loss_class 81 | 82 | # optimize dann 83 | loss.backward() 84 | optimizer.step() 85 | 86 | global_step += 1 87 | 88 | # print step info 89 | logger.add_scalar('loss', loss.item(), global_step) 90 | 91 | if ((step + 1) % params.log_step == 0): 92 | print( 93 | "Epoch [{:4d}/{}] Step [{:2d}/{}]: loss={:.6f}".format(epoch + 1, params.num_epochs, step + 1, len_dataloader, loss.data.item())) 94 | 95 | # eval model 96 | if ((epoch + 1) % params.eval_step == 0): 97 | src_test_loss, src_acc, src_acc_domain = test(model, src_data_loader, device, flag='source') 98 | tgt_test_loss, tgt_acc, tgt_acc_domain = test(model, tgt_data_loader_eval, device, flag='target') 99 | logger.add_scalar('src_test_loss', src_test_loss, global_step) 100 | logger.add_scalar('src_acc', src_acc, global_step) 101 | 102 | 103 | # save model parameters 104 | if ((epoch + 1) % params.save_step == 0): 105 | save_model(model, params.model_root, 106 | params.src_dataset + '-' + params.tgt_dataset + "-dann-{}.pt".format(epoch + 1)) 107 | 108 | # save final model 109 | save_model(model, params.model_root, params.src_dataset + '-' + params.tgt_dataset + "-dann-final.pt") 110 | 111 | return model 112 | 113 | def train_dann(model, params, src_data_loader, tgt_data_loader, tgt_data_loader_eval, device, logger): 114 | """Train dann.""" 115 | #################### 116 | # 1. setup network # 117 | #################### 118 | 119 | # setup criterion and optimizer 120 | 121 | if not params.finetune_flag: 122 | print("training non-office task") 123 | optimizer = optim.SGD(model.parameters(), lr=params.lr, momentum=params.momentum, weight_decay=params.weight_decay) 124 | else: 125 | print("training office task") 126 | parameter_list = [{ 127 | "params": model.features.parameters(), 128 | "lr": 0.001 129 | }, { 130 | "params": model.fc.parameters(), 131 | "lr": 0.001 132 | }, { 133 | "params": model.bottleneck.parameters() 134 | }, { 135 | "params": model.classifier.parameters() 136 | }, { 137 | "params": model.discriminator.parameters() 138 | }] 139 | optimizer = optim.SGD(parameter_list, lr=0.01, momentum=0.9) 140 | 141 | criterion = nn.CrossEntropyLoss() 142 | 143 | #################### 144 | # 2. train network # 145 | #################### 146 | global_step = 0 147 | for epoch in range(params.num_epochs): 148 | # set train state for Dropout and BN layers 149 | model.train() 150 | # zip source and target data pair 151 | len_dataloader = min(len(src_data_loader), len(tgt_data_loader)) 152 | data_zip = enumerate(zip(src_data_loader, tgt_data_loader)) 153 | for step, ((images_src, class_src), (images_tgt, _)) in data_zip: 154 | 155 | p = float(step + epoch * len_dataloader) / \ 156 | params.num_epochs / len_dataloader 157 | alpha = 2. / (1. + np.exp(-10 * p)) - 1 158 | 159 | if params.lr_adjust_flag == 'simple': 160 | lr = adjust_learning_rate(optimizer, p) 161 | else: 162 | lr = adjust_learning_rate_office(optimizer, p) 163 | logger.add_scalar('lr', lr, global_step) 164 | 165 | # prepare domain label 166 | size_src = len(images_src) 167 | size_tgt = len(images_tgt) 168 | label_src = torch.zeros(size_src).long().to(device) # source 0 169 | label_tgt = torch.ones(size_tgt).long().to(device) # target 1 170 | 171 | # make images variable 172 | class_src = class_src.to(device) 173 | images_src = images_src.to(device) 174 | images_tgt = images_tgt.to(device) 175 | 176 | # zero gradients for optimizer 177 | optimizer.zero_grad() 178 | 179 | # train on source domain 180 | src_class_output, src_domain_output = model(input_data=images_src, alpha=alpha) 181 | src_loss_class = criterion(src_class_output, class_src) 182 | src_loss_domain = criterion(src_domain_output, label_src) 183 | 184 | # train on target domain 185 | _, tgt_domain_output = model(input_data=images_tgt, alpha=alpha) 186 | tgt_loss_domain = criterion(tgt_domain_output, label_tgt) 187 | 188 | loss = src_loss_class + src_loss_domain + tgt_loss_domain 189 | if params.src_only_flag: 190 | loss = src_loss_class 191 | 192 | # optimize dann 193 | loss.backward() 194 | optimizer.step() 195 | 196 | global_step += 1 197 | 198 | # print step info 199 | logger.add_scalar('src_loss_class', src_loss_class.item(), global_step) 200 | logger.add_scalar('src_loss_domain', src_loss_domain.item(), global_step) 201 | logger.add_scalar('tgt_loss_domain', tgt_loss_domain.item(), global_step) 202 | logger.add_scalar('loss', loss.item(), global_step) 203 | 204 | if ((step + 1) % params.log_step == 0): 205 | print( 206 | "Epoch [{:4d}/{}] Step [{:2d}/{}]: src_loss_class={:.6f}, src_loss_domain={:.6f}, tgt_loss_domain={:.6f}, loss={:.6f}" 207 | .format(epoch + 1, params.num_epochs, step + 1, len_dataloader, src_loss_class.data.item(), 208 | src_loss_domain.data.item(), tgt_loss_domain.data.item(), loss.data.item())) 209 | 210 | # eval model 211 | if ((epoch + 1) % params.eval_step == 0): 212 | tgt_test_loss, tgt_acc, tgt_acc_domain = test(model, tgt_data_loader_eval, device, flag='target') 213 | src_test_loss, src_acc, src_acc_domain = test(model, src_data_loader, device, flag='source') 214 | logger.add_scalar('src_test_loss', src_test_loss, global_step) 215 | logger.add_scalar('src_acc', src_acc, global_step) 216 | logger.add_scalar('src_acc_domain', src_acc_domain, global_step) 217 | logger.add_scalar('tgt_test_loss', tgt_test_loss, global_step) 218 | logger.add_scalar('tgt_acc', tgt_acc, global_step) 219 | logger.add_scalar('tgt_acc_domain', tgt_acc_domain, global_step) 220 | 221 | 222 | # save model parameters 223 | if ((epoch + 1) % params.save_step == 0): 224 | save_model(model, params.model_root, 225 | params.src_dataset + '-' + params.tgt_dataset + "-dann-{}.pt".format(epoch + 1)) 226 | 227 | # save final model 228 | save_model(model, params.model_root, params.src_dataset + '-' + params.tgt_dataset + "-dann-final.pt") 229 | 230 | return model 231 | 232 | def adjust_learning_rate(optimizer, p): 233 | lr_0 = 0.01 234 | alpha = 10 235 | beta = 0.75 236 | lr = lr_0 / (1 + alpha * p)**beta 237 | for param_group in optimizer.param_groups: 238 | param_group['lr'] = lr 239 | return lr 240 | 241 | def adjust_learning_rate_office(optimizer, p): 242 | lr_0 = 0.001 243 | alpha = 10 244 | beta = 0.75 245 | lr = lr_0 / (1 + alpha * p)**beta 246 | for param_group in optimizer.param_groups[:2]: 247 | param_group['lr'] = lr 248 | for param_group in optimizer.param_groups[2:]: 249 | param_group['lr'] = 10 * lr 250 | return lr 251 | -------------------------------------------------------------------------------- /models/model.py: -------------------------------------------------------------------------------- 1 | """DANN model.""" 2 | 3 | import torch.nn as nn 4 | from .functions import ReverseLayerF 5 | from torchvision import models 6 | from .alexnet import alexnet 7 | 8 | 9 | class Classifier(nn.Module): 10 | """ SVHN architecture without discriminator""" 11 | 12 | def __init__(self): 13 | super(Classifier, self).__init__() 14 | self.restored = False 15 | 16 | self.feature = nn.Sequential() 17 | self.feature.add_module('f_conv1', nn.Conv2d(1, 64, kernel_size=5)) 18 | self.feature.add_module('f_bn1', nn.BatchNorm2d(64)) 19 | self.feature.add_module('f_pool1', nn.MaxPool2d(2)) 20 | self.feature.add_module('f_relu1', nn.ReLU(True)) 21 | self.feature.add_module('f_conv2', nn.Conv2d(64, 50, kernel_size=5)) 22 | self.feature.add_module('f_bn2', nn.BatchNorm2d(50)) 23 | self.feature.add_module('f_drop1', nn.Dropout2d()) 24 | self.feature.add_module('f_pool2', nn.MaxPool2d(2)) 25 | self.feature.add_module('f_relu2', nn.ReLU(True)) 26 | 27 | self.class_classifier = nn.Sequential() 28 | self.class_classifier.add_module('c_fc1', nn.Linear(50 * 4 * 4, 100)) 29 | self.class_classifier.add_module('c_bn1', nn.BatchNorm2d(100)) 30 | self.class_classifier.add_module('c_relu1', nn.ReLU(True)) 31 | self.class_classifier.add_module('c_drop1', nn.Dropout2d()) 32 | self.class_classifier.add_module('c_fc2', nn.Linear(100, 100)) 33 | self.class_classifier.add_module('c_bn2', nn.BatchNorm2d(100)) 34 | self.class_classifier.add_module('c_relu2', nn.ReLU(True)) 35 | self.class_classifier.add_module('c_fc3', nn.Linear(100, 10)) 36 | self.class_classifier.add_module('c_softmax', nn.LogSoftmax(dim=1)) 37 | 38 | def forward(self, input_data): 39 | input_data = input_data.expand(input_data.data.shape[0], 1, 28, 28) 40 | feature = self.feature(input_data) 41 | feature = feature.view(-1, 50 * 4 * 4) 42 | class_output = self.class_classifier(feature) 43 | 44 | return class_output 45 | 46 | 47 | class MNISTmodel(nn.Module): 48 | """ MNIST architecture 49 | +Dropout2d, 84% ~ 73% 50 | -Dropout2d, 50% ~ 73% 51 | """ 52 | 53 | def __init__(self): 54 | super(MNISTmodel, self).__init__() 55 | self.restored = False 56 | 57 | self.feature = nn.Sequential( 58 | nn.Conv2d(in_channels=3, out_channels=32, 59 | kernel_size=(5, 5)), # 3 28 28, 32 24 24 60 | nn.BatchNorm2d(32), 61 | nn.ReLU(inplace=True), 62 | nn.MaxPool2d(kernel_size=(2, 2)), # 32 12 12 63 | nn.Conv2d(in_channels=32, out_channels=48, 64 | kernel_size=(5, 5)), # 48 8 8 65 | nn.BatchNorm2d(48), 66 | nn.Dropout2d(), 67 | nn.ReLU(inplace=True), 68 | nn.MaxPool2d(kernel_size=(2, 2)), # 48 4 4 69 | ) 70 | 71 | self.classifier = nn.Sequential( 72 | nn.Linear(48*4*4, 100), 73 | nn.BatchNorm1d(100), 74 | nn.ReLU(inplace=True), 75 | nn.Linear(100, 100), 76 | nn.BatchNorm1d(100), 77 | nn.ReLU(inplace=True), 78 | nn.Linear(100, 10), 79 | ) 80 | 81 | self.discriminator = nn.Sequential( 82 | nn.Linear(48*4*4, 100), 83 | nn.BatchNorm1d(100), 84 | nn.ReLU(inplace=True), 85 | nn.Linear(100, 2), 86 | ) 87 | 88 | def forward(self, input_data, alpha): 89 | input_data = input_data.expand(input_data.data.shape[0], 3, 28, 28) 90 | feature = self.feature(input_data) 91 | feature = feature.view(-1, 48 * 4 * 4) 92 | reverse_feature = ReverseLayerF.apply(feature, alpha) 93 | class_output = self.classifier(feature) 94 | domain_output = self.discriminator(reverse_feature) 95 | 96 | return class_output, domain_output 97 | 98 | 99 | class MNISTmodel_plain(nn.Module): 100 | """ MNIST architecture 101 | +Dropout2d, 84% ~ 73% 102 | -Dropout2d, 50% ~ 73% 103 | """ 104 | 105 | def __init__(self): 106 | super(MNISTmodel_plain, self).__init__() 107 | self.restored = False 108 | 109 | self.feature = nn.Sequential( 110 | nn.Conv2d(in_channels=3, out_channels=32, 111 | kernel_size=(5, 5)), # 3 28 28, 32 24 24 112 | #nn.BatchNorm2d(32), 113 | nn.ReLU(inplace=True), 114 | nn.MaxPool2d(kernel_size=(2, 2)), # 32 12 12 115 | nn.Conv2d(in_channels=32, out_channels=48, 116 | kernel_size=(5, 5)), # 48 8 8 117 | #nn.BatchNorm2d(48), 118 | #nn.Dropout2d(), 119 | nn.ReLU(inplace=True), 120 | nn.MaxPool2d(kernel_size=(2, 2)), # 48 4 4 121 | ) 122 | 123 | self.classifier = nn.Sequential( 124 | nn.Linear(48*4*4, 100), 125 | #nn.BatchNorm1d(100), 126 | nn.ReLU(inplace=True), 127 | nn.Linear(100, 100), 128 | #nn.BatchNorm1d(100), 129 | nn.ReLU(inplace=True), 130 | nn.Linear(100, 10), 131 | ) 132 | 133 | self.discriminator = nn.Sequential( 134 | nn.Linear(48*4*4, 100), 135 | #nn.BatchNorm1d(100), 136 | nn.ReLU(inplace=True), 137 | nn.Linear(100, 2), 138 | ) 139 | 140 | def forward(self, input_data, alpha): 141 | input_data = input_data.expand(input_data.data.shape[0], 3, 28, 28) 142 | feature = self.feature(input_data) 143 | feature = feature.view(-1, 48 * 4 * 4) 144 | reverse_feature = ReverseLayerF.apply(feature, alpha) 145 | class_output = self.classifier(feature) 146 | domain_output = self.discriminator(reverse_feature) 147 | 148 | return class_output, domain_output 149 | 150 | 151 | class SVHNmodel(nn.Module): 152 | """ SVHN architecture 153 | """ 154 | 155 | def __init__(self): 156 | super(SVHNmodel, self).__init__() 157 | self.restored = False 158 | 159 | self.feature = nn.Sequential( 160 | nn.Conv2d(in_channels=3, out_channels=64, kernel_size=(5, 5)), # 28 161 | nn.BatchNorm2d(64), 162 | nn.ReLU(inplace=True), 163 | nn.MaxPool2d(kernel_size=(3, 3), stride=(2, 2)), # 13 164 | nn.Conv2d(in_channels=64, out_channels=64, kernel_size=(5, 5)), # 9 165 | nn.BatchNorm2d(64), 166 | nn.Dropout2d(), 167 | nn.ReLU(inplace=True), 168 | nn.MaxPool2d(kernel_size=(3, 3), stride=(2, 2)), # 4 169 | nn.ReLU(inplace=True), 170 | nn.Conv2d(in_channels=64, out_channels=128, kernel_size=(4, 4)), # 1 171 | ) 172 | 173 | self.classifier = nn.Sequential( 174 | nn.Linear(128 * 1 * 1, 1024), 175 | nn.BatchNorm1d(1024), 176 | nn.ReLU(inplace=True), 177 | nn.Linear(1024, 256), 178 | nn.BatchNorm1d(256), 179 | nn.ReLU(inplace=True), 180 | nn.Linear(256, 10), 181 | ) 182 | 183 | self.discriminator = nn.Sequential( 184 | nn.Linear(128 * 1 * 1, 1024), 185 | nn.BatchNorm1d(1024), 186 | nn.ReLU(inplace=True), 187 | nn.Linear(1024, 256), 188 | nn.BatchNorm1d(256), 189 | nn.ReLU(inplace=True), 190 | nn.Linear(256, 2), 191 | ) 192 | 193 | def forward(self, input_data, alpha = 1.0): 194 | input_data = input_data.expand(input_data.data.shape[0], 3, 32, 32) 195 | feature = self.feature(input_data) 196 | feature = feature.view(-1, 128 * 1 * 1) 197 | reverse_feature = ReverseLayerF.apply(feature, alpha) 198 | class_output = self.classifier(feature) 199 | domain_output = self.discriminator(reverse_feature) 200 | 201 | return class_output, domain_output 202 | 203 | 204 | class GTSRBmodel(nn.Module): 205 | """ GTSRB architecture 206 | """ 207 | 208 | def __init__(self): 209 | super(GTSRBmodel, self).__init__() 210 | self.restored = False 211 | 212 | self.feature = nn.Sequential( 213 | nn.Conv2d(in_channels=3, out_channels=96, kernel_size=(5, 5), stride=1, padding=2), # 36 ; 44 214 | nn.BatchNorm2d(96), 215 | nn.ReLU(), 216 | nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2)), # 18 ; 22 217 | nn.Conv2d(in_channels=96, out_channels=144, kernel_size=(3, 3), stride=1, padding=1), # 16 ; 20 218 | nn.BatchNorm2d(144), 219 | nn.ReLU(), 220 | nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2)), # 8 ; 10 221 | nn.Conv2d(in_channels=144, out_channels=256, kernel_size=(5, 5), stride=1, padding=2), # 4 ; 6 222 | nn.BatchNorm2d(256), 223 | nn.ReLU(), 224 | nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2)), # 2 ; 3 225 | ) 226 | 227 | self.classifier = nn.Sequential( 228 | nn.Linear(256 * 5 * 5, 512), 229 | nn.BatchNorm1d(512), 230 | nn.ReLU(), 231 | nn.Dropout(), 232 | nn.Linear(512, 43), 233 | ) 234 | 235 | self.discriminator = nn.Sequential( 236 | nn.Linear(256 * 5 * 5, 1024), 237 | nn.BatchNorm1d(1024), 238 | nn.ReLU(), 239 | nn.Linear(1024, 1024), 240 | nn.BatchNorm1d(1024), 241 | nn.ReLU(), 242 | nn.Dropout(), 243 | nn.Linear(1024, 2), 244 | ) 245 | 246 | def forward(self, input_data, alpha = 1.0): 247 | input_data = input_data.expand(input_data.data.shape[0], 3, 40, 40) 248 | feature = self.feature(input_data) 249 | feature = feature.view(-1, 256 * 5 * 5) 250 | reverse_feature = ReverseLayerF.apply(feature, alpha) 251 | class_output = self.classifier(feature) 252 | domain_output = self.discriminator(reverse_feature) 253 | 254 | return class_output, domain_output 255 | 256 | 257 | class AlexModel(nn.Module): 258 | """ AlexNet pretrained on imagenet for Office dataset""" 259 | 260 | def __init__(self): 261 | super(AlexModel, self).__init__() 262 | self.restored = False 263 | model_alexnet = models.alexnet(pretrained=True) 264 | 265 | self.features = model_alexnet.features 266 | 267 | self.fc = nn.Sequential() 268 | for i in range(6): 269 | self.fc.add_module("classifier" + str(i), 270 | model_alexnet.classifier[i]) 271 | self.__in_features = model_alexnet.classifier[6].in_features # 4096 272 | 273 | self.bottleneck = nn.Sequential( 274 | nn.Linear(4096, 2048), 275 | nn.ReLU(inplace=True), 276 | ) 277 | 278 | self.classifier = nn.Sequential( 279 | nn.Linear(2048, 31), 280 | ) 281 | 282 | self.discriminator = nn.Sequential( 283 | nn.Linear(2048, 1024), 284 | nn.ReLU(inplace=True), 285 | nn.Dropout(), 286 | nn.Linear(1024, 1024), 287 | nn.ReLU(inplace=True), 288 | nn.Dropout(), 289 | nn.Linear(1024, 2), 290 | ) 291 | 292 | def forward(self, input_data, alpha): 293 | input_data = input_data.expand(input_data.data.shape[0], 3, 227, 227) 294 | feature = self.features(input_data) 295 | feature = feature.view(-1, 256*6*6) 296 | fc = self.fc(feature) 297 | bottleneck = self.bottleneck(fc) 298 | 299 | reverse_bottleneck = ReverseLayerF.apply(bottleneck, alpha) 300 | 301 | class_output = self.classifier(bottleneck) 302 | domain_output = self.discriminator(reverse_bottleneck) 303 | 304 | return class_output, domain_output 305 | -------------------------------------------------------------------------------- /dann.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 83, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "source images\n" 13 | ] 14 | }, 15 | { 16 | "data": { 17 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAADaCAYAAAC2Arl5AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJztnXuUHFd957+/qu6el95P64Uf2MbYPAwxYBbCw0BCCAHOHpLFeWASEm/OSTZkYTfAsruxFzgHziaQZJOFeGMWAwmPdQgGJ8QYYxmMwUK2ZEuW/JBlyZYsaaSRNO+Z7q767R9Vo67ft6a7ZkYzmqb5fc7RUd+u6lu/qrp9p/p7f/d7RVXhOI7j/PQTLHYAjuM4zvzgHbrjOE6H4B264zhOh+AduuM4TofgHbrjOE6H4B264zhOh+Ad+jlARFRERkXk43P8/GdF5L/Nd1xp3Rek8ZUWov7FYCGvV7sgIltF5HebbOuoeyoiN6bfn445p4XCO/Rzx4tV9SPAmS/cgakNInJARMZFZERETonIP4vIlqntqvr7qvrRRYj5p5L5vF4iMqOJGi3u6bCInBaR+0Tk90XEv3MzIL1+FwCAqv4pgCsWNaCfErxxtQ+/oqpLAGwAcAzA/1rkeM4JIhIudgwLyK+o6lIA5wP4BIAPArh5cUM6N/iT9OLgHXqboaoTAG4FcPnUeyLyeRH5WKb8dhHZKSJDIvKkiLxZRH5VRB7I1iUi7xeR29LXPSLy5yJyUEQGReReEenh44vIchG5WUSOiMhhEfnYVKcrIheLyD3p50+IyFebnYeIvDp9Kj0tIs+IyHsy5/IZEfkXERkF8Pr0mF8QkeNpfP916km22TEl4dMi0p9eh10i8gK+XiLyOhE5JCIfSPc9IiK/nYlztYh8K63jJ+n53jvb+9YKVR1U1W8C+HcArsvE2eq8bxCRL2XinE5Gea6IbEtjv01EVk13/Fb3dJp9QxH5L2m7GhaRB0Rky3THl4zsIyLvEZEfpvdkAMBH03v/gsz+a9NfLevS8lvTdjz1C+ZFc73GToL/FV0EVPUAgAum2yYivUi++D9usv3lAL4A4J0A7kLyRL8UwFMA/lZEnq+qe9PdfwvA1B+CP0Pys/XfADgK4BUA4mkO8XkA/QAuBtAH4HYAzwD4WwAfBfAdAK8HUAFwVZMYzwfwbQDXI/njtAzAlswuvw7gLQDemtZzE4DlAC4CsDo9xhEkT7PNjvkLAF4D4FIAgwAuA3B6ungAnJfWvwnAmwDcKiLfUNVTAP4GwGi6zwUA7gBwcOqDqipN6jS0uqeZfbaJyCEAPw9gN5JfYc3Oeya8G8AvIrn3XwDwVwB+c5r9Po/m95R5P4BrkdyfxwG8CMAYkjZWxCsAfAXAegDl9P9rAXwk3f5rAO5R1X4ReQmAzwH4FQDb07i/KSLPU9VJVb1gBsdzGFX1fwv8D4ACuLjF9gMARpB0SDUAzwJ4YWb75wF8LH39twA+3aSezwD4ePr6CgCnAHQh+SU2jkTH589ckMZXQvIFnATQk9l+LYC709dfQNL5bi443w8D+Kcm2z4P4AuZcgigCuDyzHv/HsDWVscEcA2SDudqAME0x5i6Xq9Lz72U2d6ffi5Mr/fzMts+BuDeebjnBwC8cZr3f4ykgys67xsAfGm6+5SWtwL4RGb75Wl94Wzu6TTxPQbg7a3aSea9rQB+N339HgBP02feCODJTPmHAN6daasfnebYr20SV+74/i//zyWX9uEdqroCQDeAPwRwj4icN81+WwA82aSOWwD8uogIkqfzr6nqJIA1ab3NPjfF+UierI6kP4NPI/kDsi7d/icABMA2EXlERH6nST2tYgSSp8Mp1qTHPJh57yCSp+mmx1TV7wH4ayRP2P0icpOILGtyvAFVrWfKYwCWAFiLpNPLxpN9vRBsAnASxec9E7KxHkzrW0P7FN1TpujezTQeALgbQK+IvEKSAc4rAfxTJq4PTMWUxrUFwMY5HtuBa+hth6pGqvp1ABGAV0+zyzMAntvksz9G8pT280hkjS+mm04AmGj2Oap7EsAaVV2R/lumqlek9R9V1d9T1Y1Inib/t4hcPJsYp0LNvD6B5Cn5/Mx7zwFwuOiYqvpXqvpzSJ5OLwXwnwvOjzkOoA5gc+a9LU32PWtE5GVIOux7UXDeSGSg3sy2Zn/cs5+tpfVmaXlPp6HZvRtN/28Vk8kIUtUIwNeQ/CK4FsDtqjqcOc7HMzGtUNVeVf1yk7icGeAdepuRDva9HcBKAHun2eVmAL8tIm8QkUBENonIZZntX0Dy5FpT1XsBQFVjJHrlp0RkYzrw9UoR6cpWrKpHkOi4fy4iy9L6nysir01j+1URmer8TiH5Ak+nw/89gDeKyK+JSCkdeLxyuvPNfOk/LiJLU/39/QC+1OqYIvKy9MmvjKSzmWgSS1PSY38dwA0i0ptex3c32z8dqNw6m2Okn1smIm9Foi9/SVV3FZ03gJ0AXiMizxGR5UhkLOY3ReTydNzlfwC4Na03e44t7+k0/B2SAc1L0rb4IhFZrarHkfyx+c20/fwOih8QAOAfkIwJ/Ub6eor/A+D303soItInIr8sIjPR6p0meIfePnxLREYADAH4OIDrVPUR3klVtwH4bQCfRjIYeA/sU94XAbwAjY5hiv8EYBeAnyD5yf9JTH//341k8HEPkg70ViQDrwDwMgD3p3F+E8D7VHX/NDE+jWRQ7QPpsXYCeHGLc/8PSDrl/UieXv8ByR+gVsdchqRTOIVEbhgA8D9bHKMZf4hkYPIokmv3ZSRPtNOxBYkOPFO+JSLDSJ5GPwLgU0ju3RRNz1tV7wTwVQAPA3gAyUAm80Uk4wVHkUhqf9Qkjlb3lPkUkj8030HSFm8GMJUN9XtIfgUNIBmjua9JHWdQ1fvTc9yIZKB86v3taX1/nca0D4kO75wFkg44OAuIiEwg6ST+SlUXdAajJKmI/QBeqqpPLOSxOhER+SSA81T1umm27QTwBlUdOPeR/ewiIn+K5NdLF4A+/hXiNPAOvcMQkfcDeKuqXrPYsfw0kMosFSS/Xl4G4F+QZG58Y1EDc5w54HnoHYQkU88FwDsWOZSfJpYikVk2Ipmh++cAblvUiBxnjvgTuuM4TodwVoOikkw5f0xE9onIh+YrKMdxHGf2zPkJXRIviMeRTKU+hCR74lpV3TN/4TmO4zgz5Ww09JcD2DeVtiYiXwHwdiSpUdPS29urK1asOItDOo7j/Oxx5MiRE6q6tmi/s+nQN8FO9T2ExJzHICLXIzFpwvLly3H99defxSEdx3F+9rjxxhsPFu91DiYWqepNqnqVql7V29tb/AHHcRxnTpxNh34Y1ktiMxo+FI7jOM455mw69J8AuERELhSRCoB3IZma7TiO4ywCc9bQVbUuIn+IZEGAEMDnpvMecRzHcc4NZzVTVFX/BclUacdxHGeRaaup/zf86Q30DuXIizTdFsOuFBZwen3BQmJK9eV353da5O/TqmW8iFluHXnaHlPdeV2saFW07OdpX553kKuqYH/envv8LC48m93yiVJVueuYO3Sr9pKv8IYbbmwa2r3jr6LYqL1R1SHZRUWh3aFEsdXpZAPYJT4V9czrqPW+autSurB8GXJ3iC8TvSGZ6kLhOG1suVtK5823KA4KYufvdWz3j7OxRvazQdi6LapQtIXNp3UDVeE+pFEBnze35Z+vzM8ytm6f6ziO0yF4h+44jtMheIfuOI7TIbSVhp4T9wJpullIxAxYNM8Jh623z1YzV9bJs7EU6PFacOziWFpFltcdC4KhYnMdcLrP87FztzC3vbGHFGjcitbXKafX5uqbO1KmY9WovYWk/Qak31J9vB57SNG3GgOK1QYjAevMdNXp4Nz0Ax7EIR07pu+WZLTmmNaWiGMq1yZMOarbchzYcymVe0xZwm4bWonbhI3VyOA0bpG7DmC93u5vRwe48vzYFvcBLMlLpv/KDxctjMutP6E7juN0CN6hO47jdAjeoTuO43QI7aWhF6R6ZyVSTgktVE8L9dXZbRclHTOjl7E2l6MwtZuFas6X5Via573nc5BZH22tmRfl0OdvWesPmPpZdOTni4LtBWnrsx6LsJVZRTWknGblRHQa74kjiob13VxOM22WxnbOp5aCzzJF10H5XGPSjjONKK6Nm221kZOmPDZ01NZVr5tyadkqUw56rSNsqc92SbGyZk7l7LXhxkrlIHfPCuaeRNQ98n1gZbxk95eoMb7AYy4t57GcBf6E7jiO0yF4h+44jtMheIfuOI7TIbSZhl6kgmZymHM6c2sBPpfnKwV55bmU1lwyL9Wf2VR0GrP0P5moWR3y8KFnTLn/2WdNec3adWdeb9qyxWzr7bF5vvw3vcgOJZen3vqyTJNLbjP27b507CKteJZp57NRLXlcImZJnDTxiMY5QtZMhcolGoOps66d2Z81bW5guRMLWhZD0pLz18XmllfHRs68njxlF84Z6X/SxjZ0yh66Z5kp93ADK9tFb8J6n/18xeapc7J4du6Bks8M2eugRPn7vD9/6WPazPn+PI8ioLkK9VKjgpAbEB97nvAndMdxnA7BO3THcZwOob0kl1mlDrbWBlhCyU13LpRYWsfWcop7kV1pgcSi9LP0+DGbCnbft+8w5Yfv+LYpX/qqhvXrG9/5q2bblgvON2WWBvLZnfQzkzK14oAlFbQsm3c49ZNlidycdQ6NUzBpc2EsrbA/iQPyx+Vf0EpfpSL5SGq8B30VSxmZrSD9TtmGgGoOaLq+5qQEko9I4qtnUhFHDu8x28YO7bWVkRVAedl6Uy51k8QyYiWZuLzUlIOwYvcv2+DjTBsJ6LzY0pjTMzntUKn9BZwuzHpinayESUYLM31OQJYHcc4MYH7wJ3THcZwOwTt0x3GcDsE7dMdxnA6hvTR0lpVygmxDk8qn1xXosUxhul3TQ6f1t6iPUyILUiy5romJSVN+9OHdprzjW98w5X3f3WrKYwON1LHNz73YbFuxyk69XrlyhT14UTooT5cuWk6PbpSd+t96OrTk7JNb+w7kYmFmkbcoJU56Ix2bpvZzGmPOWpWCi3NmraTnRpmp/7zsW0hWvTwexE4RJdLvef/InmutOmTK46cbabET/TZtcXLwkK2buhQJbez1EdveIkprrPbacoXSFsOwy5azbYTHNXiMhlMelZfTI/iLyf1TiTfzuF1DN4+ElxFcmK7Xn9Adx3E6BO/QHcdxOgTv0B3HcTqE9tLQmVzicOMN1iSL/zaRbljgZpmTz1hKbpVGylOCW4eCiPKEnzn4lCk/cs89pvzsjp22fsp/PfZoI1f44R/8wGy76PIXmvLS5VazLAWtdUXWXwtWkWu9HB4lc2uLJQcL65rmE7nYCkX2TE2cN85jAXRT68L5/DwWUTD2QISZaeMxOAfe1l3m9lShY5ENrGjN1levmnJtaMCUx08+feZ1dfiwPdbYoI2b28+EzTuPh06YMmvmpd7lplzptnnpWrEaeoCGTp3LIw94boC9jhHfIxqHC2L+5vLgBI8P8VJ+2Tx0yoHngY55cgLwJ3THcZwOwTt0x3GcDsE7dMdxnA6hvTT0Wfx5yeV88g6cqF6w9FrOC4b1WNZvg+a55rmV0woMTsZHx0x59/btpvzEffeacnVolEKxPhHVwYbd6f5t28y2Pa+42pTXnWeXAFux0uapc34/LwmWswpmWnjDzNZrJT/3oGhyAH2eB0JakLf+Ib8TXg6vaOyAPW9yyeLsY9PYHijndlNOOx2bU+hZCtaqjWVy7LQpVwePmXLtRCPXPCINHPUJeyyxGnd1xOa0S7fV52V4pa2ux9rvVilPXSpLTLmUyfVWmgsQ0EBXRInouSXp6JYI9Y5ayyWy2/3tVtMGlERyHi+aL/wJ3XEcp0PwDt1xHKdDKOzQReRzItIvIrsz760SkTtF5In0/5Wt6nAcx3EWnplo6J8H8NcAvpB570MA7lLVT4jIh9LyB886mgI9Npt+m1sSLmfsMTtNPCIBLeSc0tzRWviAF/yZrJPf9FNP2LzzPd//vikf2/OojSW2nwd7dWSCOb3PLhG25wdbTfn5L36RKS+jvHTOK+brzlYv+aUBwTtk4izKO+c3Zq26271noVtKwL7btJ0HSthrmw9Fem1E+muJBNs4OzeB2lNM+da8vFmUa5u2XIvGbX0jJ0158vjTplwfaWjqUd2O9wQ0sBHD5rjX4xFTDilvvT5y3MZGeehht31WDHvs9nImL11iO5ZU53EL1shDvm72upZ46T9bPUJesi7m72HjHrJXexRxMJgXCp/QVfX7AE7S228HcEv6+hYA75ifcBzHcZy5MlcNfb2qHklfHwWwvtmOInK9iGwXke1jY2PNdnMcx3HOkrMeFNVkvbSmYomq3qSqV6nqVb29vc12cxzHcc6SueahHxORDap6REQ2AOifn3BYn221XuUsDczzYq8h5DzgXNp5a4/zlinQlHM8OGR1xJ0/tH4rT99/vylHE/aXTVgmjwoqY6LhzTExZHOMn/yxrXvPKx805fUbzzPlFcspLz13mQrNXOzWzH3I5e0W3tLZGqDPXZgUyhsuscc9e72wnzl7jlP9QZEXfOb4Kjy+w97/NM7BPt0169WiE1bXHiNP84mTtlwbbuSOx/XW6w6wn4mQ13pcs/p9NG7zzuuU5x71WQ09GrdjPFFX4yGxROt25nxluH2xnQpdR0prR8yyN89FyfnOZz7L/QWbQc3Cq78Vc31C/yaA69LX1wG4bX7CcRzHcebKTNIWvwzgRwCeJyKHROS9AD4B4E0i8gSAN6Zlx3EcZxEplFxU9domm94wz7E4juM4Z0F7ebkUeHHYdTtpE2mchb7dOTjT3P54aa3n29BYDqtWbW7ugSdsbviu795lyicO2HUbycYZstrq2pVl1jO6eqqh0csp650x8rSte+dd3zXly15i89KXLqW8YF6YMad7t75ORljML+LY8rNFOe9FWeqtLOxztPDiB4CY14gk4TrgHOZcMrktR3S8cuZscr7dMeW8c9U53dq2v8kRO+RVPWE9znmd0LjW8GuRkL9X7FHCxt503apWQ58ct14vYZfNkA7HbJ56OGI19a6Mt4uW+uyRqcGEwte89SIHOb+e1hb3ubkJtrviORHU9fIStnPEp/47juN0CN6hO47jdAjeoTuO43QI7aWhF/qzZF7nckKpJtbUi/TWXAVcH+Xf8uczeclC+uqp41az/Mmd3zPlIzseMOV40npMhz12QlbXpg2m3L3xOTaWg/vPvJ4YsTnsVSrv/77NgX/kda815XXrNpny6rWr7bEKcnUDztXN5uYWrb3K+dY8rpEXMe32XH2zgIKJqDLOcc6vj0r15cZ0uH3yepelzL6tfYpyYi4FG01Ynbp66ogpT548YPcftfMkEDc0+LBE3uwlMjjhuQF0XerkQ9RFOfHVMdLQB+0YTqnb5qlPZLxdtNd6pXeVV1BsrInzl9wWW3n5J9tpLCO35mh2O4258IDQPOFP6I7jOB2Cd+iO4zgdgnfojuM4HUKbaegFOcwZiUoD1ldbe0AXrTGa85Hh+nJJ0KTXZoojI5Nm2+N79pryrjv/2ZRHB8idmHJUK2sp73yd1dCDNTY3tzzR2B71W82xNmD1+ZHjR0155x13mvIlV7zYlJevIj9qakIhJeuyp0lWO87dAR4XKZg8UOS9nvdPaVmd/Sz7o+T0UVvM5Znn1ju15bw3EH8+ux4lGw/xsUl/j2z7Gx+293h8wM5FqJ62Yzz1mtW1sxYlcYmuS0D+JXz/2Ycmshp6XLXtUSat3j85Yr2ISn22PZeWNrxdusdsHnpUsWWErKHTheTrGvE4B+Xc04ARn3ucGT8q0aBKzIvQ0hIHc8Wf0B3HcToE79Adx3E6BO/QHcdxOoT20tBzMncuqTlbmF1VrIGzhsVaL1fI+da0ORvNkcN2TcYdW+825f69do1QrVu/6pDyacO11qM8XGHza9kGorSqobmX160z26ojw3bnMath7v/hD0350TdZD7bznmPz0teut4tVhSwdg8m8w/vyPcndRdaxm+vO0x07l7feglwoVHeca042toB20BLnirfOYWb11kL7Uq53lfLIxwaeMeWJU7Z91iasJ7koecFII9dcSIeWkNZCZd/43PfU1q0xHYvy0iPydqkP2zGc+pLG+NF4tx1r6qH1R1GqmCJr3rnuxhZzzS0oGDcJM3noSg0qvyby/OBP6I7jOB2Cd+iO4zgdgnfojuM4HUJbaeh5v+vmf2/YX7jQSztgVZIPVuCHTsFxTvPIcEP7e+yhh8y2PXd+x5TrY1YnVLHaXnm91cwr660OLl20diLnb/d0n3ld2mhz1kv9lHM8afX7iePHTHnXd2xe+oWXXmrKq9ZY3TIknZLRzH3L5ZnnhjUK5hYUeLe01O8LiOmbEUSst7LgSho7ac0suvO5S85HPDvpIuccZPck75/qsPVqqQ5Yv/PqoM1LjyesRzlC276ynuchaeY5HTpn7mO92KUgHzuObHuUcZuHXid/9FrGH73Ua+djxDwWVbGeSNJLPjTUgAK6hwX2/YiU52REmV3pnrmXi+M4jtMK79Adx3E6hLaSXHi2dOtfyPQThrbm/1K1zjtUnsKcy6lr/fl9jz925vXOu6w97sCT+2xsdUphWt5typUtm005XL7MlPlnLafQaeaN0nL7M7Sy0co58aD9SauTNo1s/w9sGuMjV19tyizBrKM0xvzU/+Zpizz1HyE3AN6B7Rla784zvVsRKi/zxql8JD0o26Pa+iK2yyUrV2ErgMx25bhp+nx9wt7DyYFnqXzA7j9i0xRVbX1CkksQNgJgG2HQcnd8v0OysZCQpU8rsXAao46PmnI1tCmZQcZOt9RtvyfVPrKp6LJLNWqly9bVRRe6TlP92R6EGlTIqzNm5KWYJN04mkVjnAX+hO44jtMheIfuOI7TIXiH7jiO0yG0lYZenFXW0KF4SnHRX6Zc1TxrN58zZ7dTBacGrQ6540f3nXm9f+v3zbZowtqZBhV72TlNsbTapgJqmTRNShUrkVZs9NpuSoncsMWUa0dsClvthE1rnDhlp17vudvaGDz3RS8y5TWr19pYWVjMXGdegk5ymjnTeplARjmfM57580tuCUNebozSFllTzy1ZWGBTwAMA2c0BxV2dtO1pctDes+pxa49bI3tcrVJ7JJ2bl0erZ+1za1ZvZ9uJkFL9Qk7/ZRthGruqk41BFFuNHTU7XhCNNtIYdYJsAUbWmPJknx1PCqpWcw9LVmMHpZKyvYNy7jKNRWTbd0j3P+C2brM754w/oTuO43QI3qE7juN0CN6hO47jdAhtpaHHyhaTrDM2/v4Uyu0Fy5Hlqi7YHpNm+sguO71/79atZ14PPWunWodUWWmp1erK519oykG3XTor5Lxzzq9mW9iMBsp2puVldjp012ab8x4NWnvdaNJOC3/2wR2mvPtHNk/9siueb8rrz9toY8tci9wybDlZueAmFTQCXrotmMXjS95mgHYg3blELSzKLY/H3qtsl9t86T621q2NWh15/KS1xx2lckz2uKCcea2Q/S5ZUUQZ3bxetzoxX9NQ7XhPHNi6AtjtKrY+dkxQttul9lidaCxZN3ZqwGyTLmtjUV5iNfaobL+HYVeP/XyFxn/Y/oE7lZCXLcyeDE98cftcx3EcpwXeoTuO43QIhR26iGwRkbtFZI+IPCIi70vfXyUid4rIE+n/K4vqchzHcRaOmWjodQAfUNUHRWQpgAdE5E4A7wFwl6p+QkQ+BOBDAD54NsEErBXz9ozslMsT5so4x5krI/JWrbb+k8dPmPJDd9tc88PbHjjzOqrRknIVq82V1ltL2/Ka1fbYJcrdzTm15kxP7P7S0B0D9iTpojxh0tBLR6z1Ko7anOXxAatTPnGv1dD3XPUKU179BntupXLGP4Nzsema55Z5470L7HbyAyez0C1ZzGX/nYJggqBofKDAOyjToicnxsyWySGrDVf7D9hQT9rttarVnXOr4ZHtK3K54I32pKSh13N2unQd6LqEPHbAQw3Kx6b8baWE7dGGhl6v2DkT1RH7jFketNvDbruUIy/9WKL5H8L5+uRLozzPIXMdY8o756Ua54vCJ3RVPaKqD6avhwHsBbAJwNsB3JLudguAdyxMiI7jOM5MmJWGLiIXAHgJgPsBrFfVqce5owDWN/nM9SKyXUS2j42NTbeL4ziOMw/MuEMXkSUA/hHAH6vqUHabJr+Tpv0Roao3qepVqnpVb2/vdLs4juM488CM8tBFpIykM/97Vf16+vYxEdmgqkdEZAOA/uY1zI2A/YezMc1iOTEgnwZclNI8NmF18D27H7bl791lyiNHG9ozWV2jtMrmv3ZdcD7twH9XC3KUc3p/8/1DGmuISZgu9VEuLi1Zx3npOmrLx3buMuWdW60X/GVXWq+XDdnxA+H7S3njOc96Wxb+fCvv9VnC7StXF4mgEXlnl6gR8KiHhuSfTv7YcZzJ/aY88skBm2c+duqAKdfG7XgPH7xepvxqyp+OyfQ8ygziRDSgE5J/eUBdCnuI83eDPZlC0vP5OrEhexQ1lnOMyRc+Ig+b2hLr3VIatd/LCm0Pu2jJOrpucfNhj6SYGYfJtdWFWYFuRlkuAuBmAHtV9VOZTd8EcF36+joAt81/eI7jOM5MmckT+qsA/BaAXSKyM33vvwD4BICvich7ARwE8GsLE6LjOI4zEwo7dFW9F80nWb9hfsNxHMdx5kpbebnkJKkiLw9Da8G1yKslIn+L/qM2H3vbv37blI/v3mMPl3nd1WfzWcukS8tqmx8b07qMp4bMmDNCEut6l1ndu7ti1yTVqHEtJtXmked8vUl169pE3i7HbE5zPGnXeBwbsvruvh9sNeU9r3mtKa+4puH13tNNazryPcyt88r3uLVnfZ6CyQjZQ/MCpOStruTrUSqTPzq3XfLODqh9Kmny8UQjd3zytG2L4yf2mzJrxVKleRB07LjMYzIsbJOvd7Z102Vh/yUuS86DvrUPfD2XWkFvUH2SmXNRH7ffm1rFauqlITuHorLElqvDNi+9RGuQSmh9aZQ88Esluq71bJvg814YEd1gIoXTAAAb10lEQVSn/juO43QI3qE7juN0CN6hO47jdAhtpaGzRUleU89sy0lSrAvyZ1snjY4O2/zq3du3m/Lj37X51RNDVp8Lyo1LGa5bZ7ZVyC9Faa3CUwNWhz7WbzXR3m6rkVd6bTkuWc+JsfHGjNyTtPapkki5epVdv3TJUqv/VzZuMuVocMSUQRp6/6NPmvLOO79ryhdf8cIzr88/365vWrhmKGm9RX4++Ts+G0N0Xo+U6ibNmz1zeCVVIR2b1+IEjaNMZnLJRwaeNtuqJ6g8btuiUrCs/ZZgy3X20FHODW/EXqvTNvK8CXJriNpYqjT2wN4t9dh6tQR0XXj/7DwLrVvPmoCuSzBqNfNwmMo99rsQkX96UCGvlxL5n/O5ZTT1OHfDF+ZZ2p/QHcdxOgTv0B3HcToE79Adx3E6hLbS0PnPC3tEx9n1KHMfLsjrzK0RakWtp5+y/hg77rzDlAf2P2UroBzp0vKG3lYi3bm01GpxExNWJzx29KjdPjlhyl3kpz4yZHPBR4esi+XIaGN7f7+tO6Irt2K5jU1Cq8dXNloTzYh84cdH7NhDlcYinrzvR6a89/WvOfN6NfnAL+kj87ZZp+qyp01BHnsLhO5vzJ4kpJKzD3hOM6X6Apr3UK3asYnxU438/+qxA2bb5JAdY4mrtq6Qj0UeJGx/TqFDhD3LtfnOnCZOC95Wqa0rzYuIyRsmohx4shFHCex50zjXSmA/G03aaxqN2vGe+unjpjzZa78L5WHyfinbPHX00XXtapGnzh44s2/cM8Kf0B3HcToE79Adx3E6BO/QHcdxOoT20tBzsO93ltaL8uV9YGx54KTV03Ztu9+UD9z3Y1OOxq1OHVJub2V9Q2sO19p81jqFMjJiPSc4lpDOrbvHauhCCz9Fdauhjo01NPTRMZub21VmAdUW48jqkEGv1RGD86wvTemUzeWtnbbnMrD/CVPeedfWM68vuuwKs+3SSy6xwcxWZqT86YDHTWZjQk05xSKsx9trHuc0df48+3jbz9dG7NjE5MChM6+rp2zeeUSeJUFsdWntsh45MeeK03WIyCso7yPf2E6nkfNAimqssdv2VKPtwgvmkjd7ieZsBCV7vFK2kbA/U2zbfkx56TXKSy8NWQ29tsSO8VR7B22ovX22zIb92XUJ8gOEC4I/oTuO43QI3qE7juN0CN6hO47jdAjtraG3kMnzXi3k+cz6adXmwx584nFTfugOm3d++qkDdAD7t69M64SGazeeeT1JAtngUZs3fOqEzX+dIJ172Qrrw7xi2UraTro2xXZyoKHHTozbulmyPPLss6Y8Qjnz7O3SvYo8o9fZPPX4tNV3J4dtLvBT9zfy0h971avMto3n2bqWLLPnye0hp/VSkZtPMAsPaiUtlz8aUXvje8DeQZxvXa/agZCx03a+wPjxxryHiSHbXqKanYegof0ah5S8zXnpfG4B5cxTKjkkow1n1zoF8nnjWmu9Hi5ynkqWgK4TX/eY5kkgo7FHpGFX6ERqE/a6YfykKYbsh07jGqXelVSm72HZjl1IqTHOJjSOkbOJnyf8Cd1xHKdD8A7dcRynQ/AO3XEcp0Noaw2dvZSzecaSW6OvZREn+q0OueMH95ry4e3bTLk2addlLPVYfay0aaMpnw4asY6Sn/noKHmtjFktL6C7UKtaXXJw2Oa/Ll1ptbul5IFyejCjkdKFmKzbnOWjx2g9StKCh0esBr6kz+bEr1xpdceANPb6gNUpT+9rrIe563t3m22XvPAFpnzZMqvf8zhGEewFxLnlrQg4iZ3q4rziXM0cKmnLtUGbAz1BHueTmbKO2PxpzqevlDjH2ZYj0p1DIY9xCrVEA1D1zMmwHzlr4jmJnLxfAg6VYmWPE+W1V2Ma28iMB+RuGd8DmmMRjZG3Sw+vMWq/G1295Je+1GrqUY/NSy+XG9ddeZ5CwVjCXPEndMdxnA7BO3THcZwOoa0kF5qBnFvOajbTZSdoqv5jex8x5d133GnK4yTJhBVaUmy1/bkVk9QwNNGwvA1pev1ySr+r120K5bJ1Vr4ZG7f2uaOj1pL22UOUarjEShOnTmV/StqLumyJTYmsks9rzmaArvkYpVgGNDV72frzTLlOaYzVoUb5wE+svcIj26825U0brM3A0hVkX5qDfv6zmMBerK3gNDNqnDkXWWqqAf2kjmpWuqqesvdwrN8u3VcbbqQxRnUr/7G1b0TtLSxbiaWkLLGwLQHQ6g3JXLcSX0OSQKKQ74GNrUzyENsI17S15MKSDTJSZ0jHyolmdKy4ar9n9WErD9a7yAqgz0oy45TmGPZQudKQQgNari7XYOYJf0J3HMfpELxDdxzH6RC8Q3ccx+kQ2kpDz6UdEVlZknW/OqUkPXvYapQ7v/8Du333Q6YckZYckNbcfZ7VuUdCm8YINNIBly21n63QlGCeBs5T+48N2HSpqGZjq9ds6uGhI1anHs3Y85ZK1uZ3SY+95YFyOpW9sKtorOBETLYFZIe6cu1aUy4dO2bK2TTG00/bZf92b91qype++EpTfv5SWhIs5OeRnBiMucLL1YWxvW4a2vbG161Oundt2E4jHzl50G4/cciUq2ONVNVY7LFKlOcasC5N88rVNoH8dHya+s96bzkzhT6odJttcWCvUw+n51FslDGJamSvU1C355r36yVrgUrmWpClgXJ6ZpXGBiL7PapPsL2uvWfVQduWwz77va31Wg2+VGmMbclSGudYoLn//oTuOI7TIXiH7jiO0yEUdugi0i0i20TkIRF5RERuTN+/UETuF5F9IvJVEakU1eU4juMsHDPR0CcBXKOqIyJSBnCviHwbwPsBfFpVvyIinwXwXgCfOZtgOG+YdcnsMl4x6YDjw3Y6/aMPPWzKe7/7HVOun7bT6YMKLSm32i4/Vd5oNXSQxhpPNOIJKMe0p8vWvWKlzWnv7rK6ZOW0zSMuU254QHapdbLjHc7IkFW1Oe9jk7QkHR2bBdWQpo1LwDol6buUK64bbF56nMmprw/Ze3Dwfmu/8NgDD5ry5o02L33FGqvX5yEr1oK9zb48xbxM9sykgQZ03SYnbXscHTxiyuMDT5lybcTeQ82Mk5BhLITuCdvnRnSmAdnIstWvsiYf2DYTamMMqETXVKkuTrfm72k95px40swjnntCNsR0H7I2B0r7BqTnK/kE12gZwHLdzl2pT1hrgIkhO7ZVWmI19OpSykPva3zPS9009lDmuzo/FD6ha8LUrIhy+k8BXAPg1vT9WwC8Y0EidBzHcWbEjDR0EQlFZCeAfgB3AngSwGnVM+72hwBsavLZ60Vku4hsHxsbm24Xx3EcZx6YUYeuqpGqXglgM4CXA7hspgdQ1ZtU9SpVvaq3t7f4A47jOM6cmFUeuqqeFpG7AbwSwAoRKaVP6ZsBHD7bYFgzbyV61ietznfg4AFT3v39e0z5yK7dpsw57+ESm+Nc2UyaOeWW99Rs/izqDT1uhH6JLFluP7ukx3qvDJI96vik9ZhYusRqxStWW+1uNfnMDK5taH/H+m3uLJthbKKxgd4uq9ePkyfO5Ji97l29Nse+TNcJ5O1SzVgL16rWG2PokM3F3nX3XaZ88Qusve7yFXacg/Ox+XklZ/3aAvYgYR8QzuWuUf50fdRq4rVj+025epzy0CdsDrNmdO+4mzTubs47p8ZM8qzQ9pzHDU3qyGrmABB2ZXTz3BJxPI+BbIJJc0eJYq3xF7G5ZTaQX0bQeOqwBw1VzTbVfF2y32EAiMbs/I5al22vNbrH1cE1ptzd29g/7rbfi1LIY1fzw0yyXNaKyIr0dQ+ANwHYC+BuAO9Md7sOwG0LEqHjOI4zI2byhL4BwC2S/CkOAHxNVW8XkT0AviIiHwOwA8DNCxin4ziOU0Bhh66qDwN4yTTv70eipzuO4zhtQFt5ueQXw2qu/Q1SDvPu7dtN+fG7t9qqaEk56bPLRZXWWf0rWLfelin3t5t8oVcub+jaAyet1rb/SeuFLSTgsw/Nkj7S3JfaWCuch0w5rZV1jbx3zjk+etzqfsdoCbpSxe5fnbSx9dDA9irKO+cc+YC2V9Y3rmt9yPq8s0fNwW0/MeW9O3eY8sbnbDHlNevpnsEiOePv5gQh57Dbz0bkj16jZQUnBo6a8sixA6Y8ScubRVV77lLOLLcYsM83fS8Czjun3G0am6JUcXC+Pg8wZUs8dsCr+vEcDNbUhcYihIzlhe5aicvk15L1hmevdKXgeA5FTN/hOKZxMWqPddLUq4PW66XUZe/pxNJMHnrvStrXjlXNFz7133Ecp0PwDt1xHKdD8A7dcRynQ2gzDZ0gLXA8o4MfePwxs23vvTbvfGDfE6ac03YpN7y8weZjh+yfkktqtX8LV65qaGTlLqtpj4xbLS4gzZI9y5evtDnxPWWbsxqR7hhEthxmcn2XkSeNkK/MKGm/QnJq31KrOy7ts7H19trrFLOPeI/V3IPMda4M2HGQ2jGbhz56xPqfPHrPVlO+6Irnm/LKVaTnl9mzfuZ56DH7m5BPiFZtPn59zOqnYycOmPLEoJ2mUad1YgNepDQ79hGw6k0aOt809rgnnxlOv+Z1O9k/RTPfQx6HKJFPTO4KU9tkr/YwZJ2b1iAN7HcpLNNav9k8ePJa57VXlWIVfpylCxORD5KOkV/6sJ1PUu2zmnplqDGWVuu1Y3Rhjx0Xmy/8Cd1xHKdD8A7dcRynQ/AO3XEcp0NoLw29IE944ERDp9x1331m26Ft1jsbddIVyXMkZKMw9gEZsR4mNbE+4iHpjGEm9mVlq1P3sv805QWXyrTeYI1ip7UPwZ4k7Bmd0Vi7SG8Nu63mvaRijx1TTnwY2nMpUc57ULc6Y0x+1gHFmvWFrtNYQXDa3qPaGOWlb7d56I89uNOUz7/oIlNefx572M8cbopKPt61SauBj54k/b+fvFtO2bx0RNavJwxYF89cZ87dZhGcmksc2HuopKkHynnnJD7bW2p8xgOwvk653QXLugZ0nhUl4xlq2rw/r0majZ1z2GPuA9iPh8s0Zqc18moPbR9QpzVHa4N2DKea8UOfoPVHS0vsvtNMDpgT/oTuOI7TIXiH7jiO0yF4h+44jtMhtJWGrrwGJGnPj+146MzrPZSTPEh+6EFIHhF10pnJ57v2jNVAawF7RpC+Rj4RQSb3W+nvZBBw/qutu0Z5wHXOA+a/u2FrDTUrAAckOoqQLwhVnVuHkUTNiPT/OmumYc5BxX4+myxMnvIxeWvz2MDosWdN+Yn7fmTKF11xhSnn127lvPTmBKQ7V2tWP62OWJ/5yX67Rugk+Z2zD4jSepZK557VwYOIxjlYA6d7rHRThPKpSRmGxFwfFTO3lL5GuTVlJeK2S3Xl3qA1aSlPvc655FRBKTNmE5EXC/cf4Dx0Zc8bHnugdYNrNI4yTuvtDllNvdzXGDep9Nk89IjKsBL7nPEndMdxnA7BO3THcZwOwTt0x3GcDqGtNHTJeUrY4n3//K0zr4/usWuERuStIaTlat3m/eKY9QWfOG71r5znNGuHuYTZxv4sK/NSqWzbEefWSWRtz+7PsXExq7lzvjz7Wed8QnL6KWuY9HH6QMx5yuzVkdFI45hy3mmdWJZb6xNWIz20w+al79m+zZQ3bHmOKV9EeeqtENKl40mrgY/3W2+WCSrXhqzGrpR3zjc1Jn03nsysKco6NcWavwcWzq/mL32c84Zpvi5Brv1Q0rrQPAT2as+nqdOYDEUfxPa6xSGPPzViVXBOvG0vIT2/5r6nlOPO/k08fySu2djqE9brpTrcWCe2Pn7KbIuqdkxmvvAndMdxnA7BO3THcZwOwTt0x3GcDqGtNPQiu+rjTzb8MWLyOwmXWI8S9nVgZVHi1jmq7Osd5Cyn2dMio6+R70dOkuR1FqluzhNmmwdOG85duMwBaZVElFhvbaGXAnk9lfV/Zb2fPs8+4tnxAk6njjgUvkUkco4csTr1vh/92JTXXnChKV908cw19Ihy5KNRq4FOjFpvluq4XUcWdVojlExOOF2/zvnbWTv0Gq8haj8b87qcuQEfXgeU8855TVKKLfO6RDdNaV4DN3ahY0uuPfDHyY8lsGsBcFsPM7nm3H7KdJHjmNbipfGdIGQRnbzXeY3SMo0X8HXPflGprqCHz2t+8Cd0x3GcDsE7dMdxnA7BO3THcZwOob00dPrzwpLqS3/xrWdeP/vci822idERU45YJiSfBvZLZ302ou3sh8051HGtEW3EublVOlbMx6K6yJOcxeY6eUqwL0hWt45yul7B+pEsx+fWjGQNleu3RdbJTZy5cQuuis6zZmMv91iv9u5l1l89ZN+aXOZxc+K61dBrtPaqkkYeluwYTrj8PFvutnnHfG6VfFb0mVf5ZQIon5rOMzcUpc31eQAQGqVhnbuS0cHZn5zbE48t8RqkSv4qJfYwz62falHaXsoOXXFddCHCEuehU5nnXLD+z+fO3kO0hm3fhkYf1fsc6zPUs+a59rPVRzAf+BO64zhOh+AduuM4TofQXpIL/9ynXzjX/Nu3nXk9PPw6s61KsoWyVECpWjzNNzcVm36mRrydfjJn7XlZnok4lYtT1Oi8awW6BatHHEs2bZLPg6eBRyQdscQSkXYVU+w8ZZ2vY8SySvZ4ubn9Lfadpsz2Dms3bjHli573PMwVftIJKzblrWvZervDFht795pNpqxkYZtPq+Wf/1kLZLstYomEvze8nGJOsuFUQrJ7pi9eNpUwJlmCbQdCkkR46n+R3JO3b6bP52STzHY6D06BzFlgs4SSk57o83TsgCXikCSXlQ3riZ7Vtm1Weskv12bgzhl/Qnccx+kQvEN3HMfpEGbcoYtIKCI7ROT2tHyhiNwvIvtE5KsiUimqw3Ecx1k4ZqOhvw/AXgBTuWGfBPBpVf2KiHwWwHsBfOasouEpyaSBbbmwMXU7YF05l53HqV2kY/Ox87leFArbwNLxMvUHyqlaradiM0LH5un5PI1cOTUs83nWbnmudU5/pzXGctPxpUBT5zzFFrOh8+MWrccOopxlgj2XEunc3b02lZCtfVsRdvfZuigNEWW7vXetTaPlQZqAxeMSWSrUKeUus11AS53xNPKCdEwNWful9sQ6t9pns+wyhkHAqYH0PePp9HSPIrYZYM085GXiOHXQ7p+djc+2FDlbYR4b4LTD1i4YubTG3PMwp2T2NtpIV0htsXV25pyZ0RO6iGwG8MsA/i4tC4BrANya7nILgHcsRICO4zjOzJip5PIXAP4EjbkfqwGcVj2Tb3EIwKbpPigi14vIdhHZPjY2Nt0ujuM4zjxQ2KGLyFsB9KvqA3M5gKrepKpXqepVvb29c6nCcRzHmQEz0dBfBeBtIvIWAN1INPS/BLBCRErpU/pmAIdb1DFD+O8L6Yxmc+ulrcIiQaxQMCvQuUmwC43fKdfMmnfryHIWoVxhWHAu2VjE5sbmhEbQWHaZNvNYQoFem4+s+dgF15UbauC6Z3kLweMDBWMXWcpd1t5Ugo12+zI6eMTLxPFEB8pxpq08TyKbe14PycKAdOkgYN05569LobS2z5WALRMa++fsbXn5Q5r6zzntAd+S3PAS6/mcI0/nmrmSfMU51z+/HB63zdZjVTyfJOQ8dTp+PWsVzX3ZAiUYFtaqqh9W1c2qegGAdwH4nqr+BoC7Abwz3e06ALctSISO4zjOjDibPxMfBPB+EdmHRFO/eX5CchzHcebCrKb+q+pWAFvT1/sBvHz+Q3Icx3HmQlt5udxwww2LHYLjAAAuPfzZxQ7BcWaNT/13HMfpELxDdxzH6RC8Q3ccx+kQvEN3HMfpELxDdxzH6RC8Q3ccx+kQvEN3HMfpEETZTGEhDyZyHMBBAGsAnDhnB54dHtvcaNfY2jUuwGObKz+LsZ2vqmuLdjqnHfqZg4psV9WrzvmBZ4DHNjfaNbZ2jQvw2OaKx9Ycl1wcx3E6BO/QHcdxOoTF6tBvWqTjzgSPbW60a2ztGhfgsc0Vj60Ji6KhO47jOPOPSy6O4zgdwjnt0EXkzSLymIjsE5EPnctjN4nncyLSLyK7M++tEpE7ReSJ9P+VixDXFhG5W0T2iMgjIvK+NoqtW0S2ichDaWw3pu9fKCL3p/f2qyJSKaprAWMMRWSHiNzeTrGJyAER2SUiO0Vke/reot/TNI4VInKriDwqIntF5JXtEJuIPC+9XlP/hkTkj9shtjS+/5h+D3aLyJfT78eitbdz1qGLSAjgbwD8EoDLAVwrIpefq+M34fMA3kzvfQjAXap6CYC70vK5pg7gA6p6OYCrAfxBeq3aIbZJANeo6osBXAngzSJyNYBPAvi0ql4M4BSA9y5CbFO8D8DeTLmdYnu9ql6ZSW1rh3sKJOsE/6uqXgbgxUiu36LHpqqPpdfrSgA/B2AMwD+1Q2wisgnAHwG4SlVfgGTh2HdhMdubqp6TfwBeCeCOTPnDAD58ro7fIq4LAOzOlB8DsCF9vQHAY20Q420A3tRusQHoBfAggFcgmUxRmu5en+OYNiP5gl8D4HYka/e2S2wHAKyh9xb9ngJYDuAppGNq7RQbxfMLAH7YLrEB2ATgGQCrkCwWdDuAX1zM9nYuJZepk5/iUPpeu7FeVY+kr48CWL+YwYjIBQBeAuB+tElsqaSxE0A/gDsBPAngtKrW010W897+BYA/Ac4sVb8a7RObAviOiDwgIten77XDPb0QwHEA/zeVqv5ORPraJLYs7wLw5fT1osemqocB/BmApwEcATAI4AEsYnvzQdEWaPIndtHSgERkCYB/BPDHqjqU3baYsalqpMlP4M1I1pW9bDHiYETkrQD6VfWBxY6lCa9W1ZcikR3/QERek924iPe0BOClAD6jqi8BMAqSMNrgu1AB8DYA/4+3LVZsqW7/diR/EDcC6ENewj2nnMsO/TCALZny5vS9duOYiGwAgPT//sUIQkTKSDrzv1fVr7dTbFOo6mkAdyP5WblCRKbWqF2se/sqAG8TkQMAvoJEdvnLNolt6okOqtqPRAd+Odrjnh4CcEhV70/LtyLp4Nshtil+CcCDqnosLbdDbG8E8JSqHlfVGoCvI2mDi9bezmWH/hMAl6QjwBUkP5++eQ6PP1O+CeC69PV1SPTrc4qICICbAexV1U+1WWxrRWRF+roHiba/F0nH/s7FjE1VP6yqm1X1AiTt63uq+hvtEJuI9InI0qnXSPTg3WiDe6qqRwE8IyLPS996A4A97RBbhmvRkFuA9ojtaQBXi0hv+p2dum6L197O8SDCWwA8jkRz/ci5HsSYJp4vI9G+akieUt6LRHO9C8ATAL4LYNUixPVqJD8hHwawM/33ljaJ7UUAdqSx7Qbw39P3LwKwDcA+JD+Luxb53r4OwO3tElsaw0Ppv0em2n873NM0jisBbE/v6zcArGyj2PoADABYnnmvXWK7EcCj6XfhiwC6FrO9+UxRx3GcDsEHRR3HcToE79Adx3E6BO/QHcdxOgTv0B3HcToE79Adx3E6BO/QHcdxOgTv0B3HcToE79Adx3E6hP8PKj7dYW+Z38oAAAAASUVORK5CYII=\n", 18 | "text/plain": [ 19 | "
" 20 | ] 21 | }, 22 | "metadata": { 23 | "needs_background": "light" 24 | }, 25 | "output_type": "display_data" 26 | }, 27 | { 28 | "name": "stdout", 29 | "output_type": "stream", 30 | "text": [ 31 | "target images\n" 32 | ] 33 | }, 34 | { 35 | "data": { 36 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAADaCAYAAAC2Arl5AAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJztnXuULVdd57/fqnNO9+28biJMVl6SMLxE1xjgDoKIZgI4ARFYig4MOglkEZ3lIxggBB0xmVEHZ1TUcYTF8AoOAuEhYWWJEiCIcTCS8DIPIAESSLh58Ah59O3uc0795o+qTu/93X1q9+nnofL7rHXXPfU4tX9VtWt3ne/+7e+mmcFxHMf53qfY6wAcx3Gc7cEbdMdxnI7gDbrjOE5H8AbdcRynI3iD7jiO0xG8QXccx+kI3qBvApJG8n6Sv7dNx7uP5MObz28j+buZsh+xHeXKcd9A8re3+7jNsXck5p2E5HUkT9vrOKYhrEeb+O6LSH54u2PaCUieRvLWCdsuap5NI9nb7dj2Gm/QN88Pm9lvAQDJk0ne3Hx+NckPhTuSvHHCuhcAgJkdbmZf2aW418XMftnM/ttexjBLmNkPmtnHt3qcpvHZ0HFInkXybc3nk5tG6b7m380kL8jEvOl6ZGbvMLOfDGL5nvkj3FybkwHAzH4HwA/uaUB7iDfo288nAPwoyRIASB4HoA/gcbLuEc2+zhZ4ELyF7TezwwG8EMBrSJ6hO2z1Guz0NXwQ3KOZwRv07edTqBvwU5vlpwK4AsAXZd2XzewbQPvbEMlXkjxI8hskX9JWMMkXk7yB5L0kv0Lyl4Jtp5G8leTLSd7ZHPPFwfYHpJ5g3/ODfZ9H8lkkv0Ty2yR/M/juE0l+kuTdzb5/TnIwIcZnkby+ifE2kq9oOZ+XBudzPcnHN+tvJvkqkp8HcD/JHskfIPnxJobrSD4nVybJh5C8rPnOt0n+A8kiKOPpzecLSV5C8u3NMa4jeSA4/uNJfqbZ9h6S72aLbLYZzOyTAK4D8ENNmUbyV0jeCODGYN0jms9HNfHeRfIWkv8lOLezSP4jydeR/BaAC5t1VzbbV180Ptf8OvgPJK8l+dPBOfdJfpPk4zTWoP68iuTtAN7arH8pyZuaa/1BkscH3/lTkl8neQ/Ja0g+Ndi2r6mf3yF5PYB/u42XtluYmf+b8h8AA/CIlu1XAPiN5vOfA3gJgN+TdW9Z73gA3gbgd5vPZwC4A/VDfBiAv2orG8BPAfjXAAjgJwAsAnh8s+00ACMA/xX1H5xnNduPXqfc1X1f0+z7UgB3NeUfgfon7SEApzT7PwHAkwD0AJwM4AYAL5twfgcBPLX5fPRqfOucy88BuA31w0vUv2ge1my7GcBnAZwEYF8T400AfhPAAMDpAO4F8Oi2MgH8dwBvaL7fR/2HlkEZT28+XwhgqblmZfO9f2q2DQDcAuDc5hg/A2Bl9VpuoY6d3Fy3XnP+T2nu19OCa3o5gGMA7FvnOr8dwKXN/ToZwJcAnN1sO6u5v7/WHH9fs+7KSXUcwPkA3h0sPxfAv0yIfbX+/AGAueb4pwP4JoDHN+v+F4BPBN/5BQDf18TzcgC3A5hvtr0WwD8053oSgGsB3LqRa7fXbcVu/9vzAL4X/2llX2f7hQD+uvn8OQCPRN04h+vOXO94iBvWtwB4bbDfo3JlSxwfAHBu8/k01I1wL9h+J4AnrVPu6r5ls3xEU+6PBN+9BsDzJpT7stVzXef8vgbglwAcmYn971ZjX2fbzQBeEiw/tWkAimDdOwFc2FYm6j9ul653PZE26B8Jtj0WwKHm84+j/sPDYPuV2L4G/W4A30H9R/LX5Zqevl69RP1HZwXAY4NtvwTg483nswB8Tb57Ftob9ONR/5E8sll+L4DzJ8R+WlP+fLDuzQD+R7B8OIAhgJMnHOM7qPupAOArAM4Itp0Db9DX/eeSy87wCQA/RvIYAA81sxsB/D/U2voxqN+4N6KfHw/g68HyLW07k3wmyX9qftLejfqN8iHBLt8ys1GwvIj6wVqPb5nZuPl8qPn/jmD7odXvknxUI13cTvIeAL8v5Yb8bBPXLST/nuSTJ+x3EoAvT9gGxNfleABfN7MqWHcLgBMyZf5P1G/2H2YtUbV1Ot4efF4EMM9aGz4ewG3WtCTrxLZVHmJmR5vZD5jZn8m2SeU8BPWvhbC+hNdj6hitlgf/EcDPktwP4JkA3tHylbvMbClYPj6Mx8zuA/Ct1ZhIvqKR177b1N2jsFaHpnoOHsx4g74zfBJ1hXwp6ocAZnYPgG80675hZl/dwHEOom7YVvn+STuSnAPwPgB/COBYM9sP4G9Q/1zfaV4P4AsAHmlmR6KWPtYt18w+ZWbPBfCvUP+CuGTCMb+OWj6aRNiAfgPASasaccP3o35znlimmd1rZi83s4cDeA6A80g+rfVMUw4COIFkeL4nTdp5m5lklfpN1G+/DwvWPXA9Mt9t42LU0sjPAfikmd3Wsq8e/xthPCQPQy2x3Nbo5ecD+HnUEuB+AN/FWh3a8HPwYMcb9B3AzA4BuBrAeai1v1WubNZtNLvlEgBnkXwsyQUAv9Oy7wC1NnkXgBHJZwL4yZb9t5MjANwD4D6SjwHwn9fbieSAdb7zUWY2bL5TrbcvgDcBeAXJJ7DmESQfNmHfq1C/NZ/fdNadBuCnAbyrrUySz26OS9QNyLglnkl8svner7LunH0ugCdO2pl1x+2FU5YxFc0vq0sA/B7JI5rrdh6A/zvFYe4AoDntH0CtgZ+LWqOfhncCeDHJU5uXj98HcJWZ3Yy6/oxQ190eydcAODL47iUAXk3yaJInotb+nXXwBn3n+HvUb4RXBuv+oVm3oQbdzD4E4E8AfAy1NPCxln3vBfDrqCv/dwD8RwAf3Ezgm+AVTXn3Avg/AN7dsu8vAri5kWZ+GcCL1tvJzN6DuiP5r5rjfgB1p9h6+66gbsCfifrt9C8A/Ccz+0KmzEcC+AiA+1A3zH9hZlds4Hy17J8BcDZqvfsXAFwGYHnCV05C86tth/k1APej1p+vRH0d3zLF9y8EcDHrDKCfBx54UXkfgFMAvH+aYMzsIwB+u/n+QdS/vl7QbP47AH+LuuP2FtQd0KHEclGz/qsAPgzgL6cp+8HEao++MwUkl1A/sH9mZjsyutL53oXkVQDeYGZvlfUnArjEzH50byLbOs3b86PM7Bf2Opb1IPk7qH+NzAE4LOgHelDgDbrjbBGSP4F6nME3Ub/9vwHAw83s4J4Gts00HfqfAfCLZuaD4mYQl1wcZ+s8GnUq6t2oc6if38HG/KWoZZAPeWM+u/gbuuM4TkfY0hs6yTNIfrEZzttqHOQ4juPsLJt+Q2dtNPUlAM8AcCtqD5MXmtn12xee4ziOs1G24oL2RAA3WWPXSfJdqP0dJjboCwsLtn///i0U6TiO8+Dj4MGD3zSzh+b220qDfgLiXNFbAfyI7kTyHNTeCzjqqKNwzjnnbKFIx3GcBx8XXXTRhuwOdjzLxczeaGYHzOzAwsLCThfnOI7zoGUrDfptiP0VTkTsFeE4juPsIltp0D8F4JEkT2E9mcELsHtDzR3HcRxh0xq6mY1I/ipqH4YS9YQN121bZI7jOM5UbGmuPzP7G9QWrY7jOM4eM1OTt/7RH/2xrIkVodhgO86f13x6za6P7aoBqlu35uPLYiXLyfeDFYSWJZEnuf85x9acpblsD8pLrksy7EBXtI9LSCNJLmTr96NvJhdxe0luqax4xSvPm/jde797d7RcjeN7ZFIhCsZ1tbJ42eQe63JRxNeiKCeroZXF35XQUGlllfPWQ8dzngBFUcjyWjNRSVla1xWz3PY4Nq0SpNbfONZ43zjusZSt56U1dTyKj10WyYWaWPZ6WFCCxqax7D/66KmOPQn3cnEcx+kI3qA7juN0BG/QHcdxOsJMaeiJnqa6Y6RTxyQaebYw/b7oa6YaqX69TU9r1+NTTb1s/X4qNYs2KNvDq6ayX3qsnC6oJz6d5p6U1qJDbl6Nn3SEzTMWsVh1a70Oel7J/qIF66uUyY0Zh8dLjp3pF9F7nrmHWvfb+oD6pdTVdrk+uYdVJfNNZHVpbRMm3+Ncm6A9VXoddRnj9li1PNXF2dJe7ZTLrb+hO47jdARv0B3HcTqCN+iO4zgdYaY0dFWakjzllrzlnHqqklUlGmmap96eOz5NDrXq7YmunTlv1WN7vVjH7Bfxcrj/ELEOqOc9Lfmcer1nuhiuyAj8uc1ZPT9Tn1pQDT0pS+tDZhxEEouOqki+H2roWnRGqM7mhufqsr7nrS2r3p6W3NIXUK+JY8mJ8ELbViYVRnP/2/vJ0pEu0/UX6XPKlvflnRqB4W/ojuM4HcEbdMdxnI7gDbrjOE5HmCkNPckjleU4T7RdhcrlW6t3xgYOoNFMPrzqzLlDZzTR1Aci1sxVtywZbC/igw0r1TA1lHZ9NZc/m5HB4x0y2vC0ZeeYqt8jk2eey0On1q9Mkn3SzxJ+Qf1QEj+V9mMnvkeaEy/HtyT2tfpXaV3Ue1Rp3dc+G32H1L4KCS25LlN4BeXGg6RfkEXtg9HYJY9dPHTCcTSqp+/Um7S/oTuO43QEb9Adx3E6wkxJLtDh96nmsrYtk9M2peFs7ldq8vOtNeUpk8mXWvO2y0GlDLcukpS5yamIpf5al+HJI7VanZIpjQOic80LIJmjZ35Ca/2ZRrJJLosebDpFJWs7nFjetkhTqmIkColKKomWKe9xsr/KRUVQ//q9QRyKxDIWd1tN50xtCNTyNj6ApgJaUtcny7BF8sxmUnYT2axd6lQ09vjYaiWxM4mL/obuOI7TEbxBdxzH6QjeoDuO43SE2dLQEzLTxkXbpktxs0ySZDrMvD22TGFti+uUHWt1paaKmdh6SipiqFNS9Pee6IBjPVbGmnXaaePSlM3JQ/+TbLpcWmPL1HtpWdPNIJZI2rn7nXWpaE9lTbtkuP7n9Y5dZLRiubDaJ6NT0vV7/Xi5v6ab9wf75Fixpj4exwdbWh5Gy8tL98exSvXTOlAm2rPo1NGFa69PSRqjoOmcmpKp/Ry5frywvuq+2rewXfgbuuM4TkfwBt1xHKcjeIPuOI7TEWZMQ8/ot2EOc17knqqsaW1h26xcs8fKDDFO8tB1f0n+TfLSQ/lVy1b9VPT38ZS537nNqS3x5J2z6nzuumb6ObYy9H+dPdqKWqes9vqa2siG93jy1Gbrx6L1If6+auh9ybee68ca+r65ubV952MNHUW877iS/h7R400tbZfiwyV57JT+oWSAQHA81cBbr+l6U9Chdfu40v2ne66jUHbIP9ff0B3HcTqCN+iO4zgdwRt0x3GcjjBTGnpixSnb45TT9hzQFNVTVcPUhFidrkqPNlmfzefAx/TKnizLuYzjXF71bkmsgG1yLKpB9go9z/jY4+RU5B0gqzVP1pJTLbjda2N62bG9T6b9m1v0uMmYA+XOxSLrVSGZmrE9P7/QsQiaZy71b35ufuJyOYj3TTxJpC7OFbI/RIMXVkRTr0bL8Qpqf8Lk+6R554n9baV1vd0ON9HcNd1fX4/DvqxM+7Nd+Bu64zhOR/AG3XEcpyNkG3SSbyF5J8lrg3XHkLyc5I3N/0fvbJiO4zhOjo1o6G8D8OcA3h6suwDAR83stSQvaJZfte3RtYieiTeCbM/lAef8q9MpyDIaWIvPdxKrerOU6tUSi3NJ3jkysCUWMc8okhxn8Xxu9Z9eR87NhRbF0q79tuX6b45pRPRcjrEuTzldXiaXPKxelNzt1MNGfbvj5UGvJ8ux/8r8XKxrD+ZjDb2YW9t/LHXR1EdIYtV+sX3zc7JdvP6lPi4uSmK6lh/0NWj/j8m+mn+vYzL0niXeLUkbEB8uqRPB17WbS6/LdpFtG8zsEwC+LaufC+Di5vPFAJ63zXE5juM4U7JZDf1YMzvYfL4dwLGTdiR5DsmrSV69uLi4yeIcx3GcHFvuFLX6d8rE35dm9kYzO2BmBxYWFrZanOM4jjOBzeah30HyODM7SPI4AHduRzCJbp3useljp94v0+2fKl6TteR0jlDx0pDlvnpQiHZXJLm3otcmsU7O5078pql+6eIBrXM85uZWzeRfRzNAJom7SU9GtJTzYkl16/zMnpNILMezXuvt83Im+qz2k0h5ZXB8ndtSy07yzMt2b5bDRCPvS965asvLozWdXMdAmJiv6C3tSdmqgQ8GcVlEHEtl8RiMQ4fiX/mjYaiht3v7a11OXGG0KmfueYL2N4Xl56rmNrHZN/QPAjiz+XwmgEu3JxzHcRxns2wkbfGdAD4J4NEkbyV5NoDXAngGyRsBPL1ZdhzHcfaQrORiZi+csOlp2xyL4ziOswVmyssllwvO6LPqxu2apaE9lzvvxd2+IvaZUb2+Pdc2yTtXDVy1PPGg7g3ivOJqvOZ/YSPJ402QXN1C9fyct0tMLnc8voftB5t2ntjtJDsuIfmCmmlrvnSmPE17D3TzstD7r/7moplLfZgTzVzzzI2ajx0frwp8jiqpT+q1UkDrW9zElL04573oaf9SXLfLcn+8fxGf2+Kh+x74PBzeF23TuQB0OddHp/MOa1667l9p3nu4p/aDFDszSN+H/juO43QEb9Adx3E6gjfojuM4HWGmNHQlTfsMcnMzvh/Tu2fnfETal8N4VEKn5PVq3jBsJV5W/bWQ7w9iDbQUf4xytBbAsDoUH3ukubii+8mJa878aDydNpzOKdp2X9q9M3aVyRbzzYp2PVV9RNr88+vDTc7n1++qh/2gH+vK+2TezzmdB1T8z6tKxx5ov8lanbFxXFcrWTbEeeNYkbotF7Lo6ZiL+FwGA8mhPzwenBjmvS8ekjlDh7G5OiVPXb1fkjlHM5p54h2jGn04R4IcScdzbBf+hu44jtMRvEF3HMfpCN6gO47jdISZ1tBVQw3nzpxuBtEN+IConJbPHJalteOr77LOEUrxlNDC1fuFMgdkOYg1c5bx9vBcy1GsG45kflKIn3WcPbuOX7VcRtVbLdEdY8JbmptXMWNpn0UPrx47rd/V3G/ZnrGsyc4pmpTX4tevXj49ud+qmc/LciEa+3DU7sczqiTXfLymRRcW552XlD4ZqQ+h1woAWHV//P0qro9Fqb4ycV0fSI49uHau6iOzvCh9BZIzX1Wi/yfe7vpstN/ktvqq13iceU42i7+hO47jdARv0B3HcTqCN+iO4zgdYaY09DSVXLXntc95jXuLJL4P7T4PoeTa07xz1easPfdbfcLLvszDKHnEiXgYbGdP922fG1W1vkLOUzLop86vZSwstsY2vXWLfD/ZPoWGLtcl8QVJ5pOMV4wyRWk/idaZQeAjPjcX3/958WKZ2xfnZheisSd55pIjX4lBT+gFBABWrXmQs4x150I7VfS85dijYTwuQv3Oe3Pxs9Lvx/VXPc/DPPWyPDLaVhbxdVpavCdaXtYUeekfAGT+XT1Veey0vob7T9unt1n8Dd1xHKcjeIPuOI7TEbxBdxzH6QgzpaGrLt1qz5LRKDWHPSPXZg+Y6LmpKPbAx0LnCNVjV+p3Hv9dtUI8pPuqkYqumEzNGcQiOciF5LSPxWsj8bfI+IiMM0J3q1aYSTRP68OUovoWullSq5bM3Kl6T9XjXjVzEWT74gu+L9DN50Ujn9t3WFyWeP3oddN7WlWikcu4iBJxnWDgkV8mfQfqdy9e/3Jehej5Y4ltKPWxYjyHaL8n15Vry+qVvnDY/MR9ASQns7yk11G8YJLrqnnqMWH/U+ob43nojuM4TgveoDuO43SEmZJckqHgrSNtNd0uc/DML+YNGMG2lhdLC8mYczmSpJFB7VAlTbEX/xQc6xR1quAE5VPkmf58/PNdU9hsKNYA8nO8oMhBIhdVbLcUDWWWdEo5TQ1sT4nLZ37lphybTDp9mWyX/QuZBk5jUzuIwSC+jnNzsVwwv7A2pH0wp5ax8b4quVWSJluNxWpChrwXkGnkZDh/JFXkLGOTe6hTzIm8KHVXJbzRSiy5qG1GWYZD/9UqJL7G8/tiSwSKVEXEcuTSkjynif9D/KwkkkzwbI01NTSRNrcHf0N3HMfpCN6gO47jdARv0B3HcTrCTGnoiaCW0a3jbbkh57k1mo/VXnZqkbu2TLEEVT1W0xTLfpxe1ZOh3aCmaslm+bscFyd/s3ux/ppYA4wkZa1d5k6sXQsJbjRFulY67V+7pp7tN8l63m4ejUXrg2rFOoR9Xobzz4m+2w8scNUeeaz9QclQfukHkaH8FO0XkqaoQ97DNFjtc0mvqXxXU3jVSlquU6E2szLF3Qg6jVyQUil9Tckz2hPbacpzZpK2KHp9YgU8VmsSScEM+85yaa/bhL+hO47jdARv0B3HcTqCN+iO4zgdYaY09GlGw6Y5yu32pumMYKKH6TDzRPtTzVy1vzVtTzXzUv9sSv5rb6B5xarl6dBtmfar0vLWNNdEI4faCkjZQ7ECGGu/Rrzck1h1Zq1KLBhaZ97KVIC2nPYN7T8NiXXEdFPI9UQz3zcX67XzC3Fu+UCmjauCHGrJIk+njBvFudrVKLaoLaRPZ1Dq9+MSxvK0hFa/aqmR66ZIrlvmOVWbDH3OR2OZUtHW6qshvg7Jcyf6eyn5/Pv2xX0VRRFbLCwfimNZWY7LGy3HVgEWdHYkUzP60H/HcRynDW/QHcdxOkK2QSd5EskrSF5P8jqS5zbrjyF5Ockbm/+P3vlwHcdxnElsREMfAXi5mX2a5BEAriF5OYCzAHzUzF5L8gIAFwB41VaCSfS5RCNtyeuctixZ1rK06J7mHYuWXAQ6t+bSqmZeiL6qXi2F2HKujGINdDiSvGLNa58PpqCDoOchGvpYl6tYF1SbWM1Tz2rqoa6osa0zWiBmqzrkFPsncr3k+st1VJ+PUvpFVCPvz4lmTrVEXruOY7mIaT9HXB9KmUqtKFSFb8+hV8Jno9JYsl4uWnK7D1FiNa1jD3Sqv8C3ZjSMNW1YfJ36/cPl0O1+UHNyj0i9TuKxlHQ3ra1Q75Yq2Xl7yL6hm9lBM/t08/leADcAOAHAcwFc3Ox2MYDn7UiEjuM4zoaYSkMneTKAxwG4CsCxZnaw2XQ7gGMnfOcckleTvHpxcXG9XRzHcZxtYMMNOsnDAbwPwMvMLJo+2+rfXev+njWzN5rZATM7sCCpWo7jOM72saE8dJJ91I35O8zs/c3qO0geZ2YHSR4H4M6tBpOkGSfTW0VBte6bzYfV7VqWmEwnqeSiiZXh8VVrK2UqrF68XEl+7PLifdHycKw58/HxE5/noPz0vOW6iU9IbxDHNha9FtVkLw0AKLS/QPOMA6/t6a1Ychp7+9iEvEYf7tmeD13K1Gpz87FPyL4F8ecZxNfZ1Ece2vcw2T+lklzs+Mipj0zrNID1HvGi6uKBbp746+iR1Nun0CenfVxD4kOvx0/aiMBfP3lK4ytTiVdLkXhFSX6+eOLo8lhGCKhvURWGo9eBOzMEaCNZLgTwZgA3mNkfB5s+CODM5vOZAC7d/vAcx3GcjbKRPxNPAfCLAP6F5Gebdb8J4LUALiF5NoBbAPz8zoToOI7jbIRsg25mV2Lyb9WnbW84juM4zmaZKS8X1VuVeErRjD6a0eJSPVbmI1SvllL1t1jHjLQ90dBLmSNUl0fj2HtjWeb1XJHlwdwR8fHE57ntOum8ixpr0Yt1x1K8YKqMr7PelVKuY9gdMK1FtOqDiTas/So56+4WNF9aPecVS7Rh6WvI9fEknvbBuUh/jsaiPjLrBNdedsbzPl6R6ZPJ+uu0bl7n6Ll5YdeuBRHn/heUuVhlu4mmPpK+ipWVOJ9fx38MxTNHvdsjD6akz29nBun70H/HcZyO4A264zhOR/AG3XEcpyPMlIaekMwhOc18lO2HVg29VI9yzadOcuIn67fq1dKbi3Vp6vyTjPPIB7EkjqXl78YrRPvTWMKl1Bde+grizWCpPjOiz2tOvOSlFyJc65yjoyAg9bZOblmS06zzUWa040LPdeN56Kp5K5Wc99JSnJNM6tgByVvXOlFIPn9wnTS3G+K3MxqKt4/I+apDp9ch4/UextI2NgTreffr0TSRXDTypL5K30Ly3K3V10L8zcuy3YtlJJOzrgzje7gs8+sOV0QzXxH/80o9dtrmFN0jLxfHcRznewNv0B3HcTqCN+iO4zgdYbY19Cn8rvPzR2o+drxVPSdUG0y0Zl0OtOeiH2t30Hk91RdEfB366pcuWrDOKVqJ30oZ+IakV0U1S9Uk4zMdiCf0suTiwtTbRQXc+PiD/lpsqX20zI2qfQ06FkDumebIq97aS3xFJqN+OYTO8xofezxq19QrE/1VDNTn52Su1t6aHlyK3w5lrECFuJ9jPJQrO9b+IsmxL+LYE107qDOlatoqkstybh5YrX9j1bkldzycLxcAeoFu3hfPJL1uQ5k7VecZUA39kMwRuqJzhmrfhfYvWUv+vmvojuM4ThveoDuO43QEb9Adx3E6wkxp6Nl5PsN9M9pcooEnOcySd07NWdZ828SIOV4sA82zr/mv6tOhecGag6zLsd6mcycur4jeG5xr2Ve3bPW6RjuF+tKIH4bo+SKpJ5NtlsValZsbSI76WOa+TOZ5lb6EpMJIX4T0XfTKaTR00XZz+dQ612Usz8Jkns/0usXb5+bX5r/s92NPEvUV6sn2kvE9qtRzpLpfgouDteQmBhq6jg2QuqrzdCb1TbbqPUz81qWvQp+tXm9tWTXzJM9cNfSVuO9p6VB8XUaimUP80LU/IfG8b9PQdbDANuFv6I7jOB3BG3THcZyO4A264zhOR5gpDT0/J+TGSS2d479dpexQsmWOUADJ3z7xjSgCj3PVOFOv7PjYSf60auyiWw7FY+Le+6I5u7G4tKb9zc/FcfbnxJtdvVuSOUHlvHvx8She7RRtWOeIrAJ9NvEYKdS3I8ZEcNUc5p7M89nr6bltXkNv9whPlzVPvRIRfrQSLx9STT34vkk/RG9O5zeVfhI9T9GWTX3BdZ5YmSsz7BhRzTydlqDdL6cyvccy/sPiWPtFXF978tyF4wPGco1VM9e88kOimS/fH8/ly8SnSOprMk5CziW8D0l/kGvojuM4TgveoDuO43QEb9Adx3Eu4HuCAAAT1UlEQVQ6wkxp6JqvrfM6RiS+HcnR4qUiPtVE7zL1s5bi1Ee8p/OEzgf7tutjqpEPlyVHWfOxpWyNxcRXfBzMUToUz+biftGZJU99sO/waHluIF7tvdgvgz3Np854uwTXphStdyx5w5X4gqjve9FTXbLdcWeoWnMbiRSs4xLaJyxNxi0kmnq8fTnxhl+7h9rHMsf4mvcheepo19QL8d+3cawFV+LrXdlK8FkT7Nv7h9L5S2XMBNXDPH6u+r343Eye43Fw3YZj9WaJz0M186VD8XiOsXizDEQzL0utb4jRfrigf0rHvahfznbhb+iO4zgdwRt0x3GcjjBTkkua+jU5NUxlh2QaNvmZqT+XdIx6+hNaf97L8cTiNtw9TTts/6k2lp+4w+X4p2NlknooQ+Yr+akZTo+mP9crkXOWRdZYESkAh8swc5Fc1AqgGsUSj07VFt3ixGlXf47H29W+Ib2u7fYOVm68uuuxkmDTfL32zYkUoTJafE+XAxmuqkTmgNglq22FSFGlpPqpVcBoGMscw2FsFRDKZhVUUmuXDtJHWFJJRf7p9Q+LlwdxfVsSa+Dl4Nosq+SypEP5Y4mFsn+/p5bJ0kaIalLKtIE9TWuM7nl7Wut24W/ojuM4HcEbdMdxnI7gDbrjOE5HmCkNXVO9LMkFY7R3hOirapWaDNvVqbO0LB0CLylvyZR1LZJYch6yPJDh+Sa3hSOZFk6y0ipJuRwH2uB4pNtiXXAkQ/fHoism6Zxqb6rWAZoaKOWHw/eTof+qBcuQdZSqU8uUX/HeoNSBwVTTfmlZ2gfTbtec1ZaT/oHJ0+kNR/JcLMYaOhBrxfPzkuY4iJfHcqVUY2c/1tTDZ6eqZPo7tSwQvV+nqJvvx5q4auSU4fTJNHFi/7C4stbXcL+kIVZL8fJA7sn8ID5P7XcbyblIE5O0Mb1C+9UC+wYIOyOh+xu64zhOV/AG3XEcpyNkG3SS8yT/meTnSF5H8qJm/SkkryJ5E8l3U4d8OY7jOLvKRjT0ZQCnm9l9JPsAriT5IQDnAXidmb2L5BsAnA3g9VsLp324dKg75XKUS51yTrS8QvVXHTYu+q1OfVWINWurYppMs6Vacfy3cDAnmqncJbWJLYpYh4zyhuW8x6JJLi3dGy0fOhTnIFfjWK8djeOydJq3QvRXG6rGGhxPxgLo9GZGtX2FbFf7XLEzleX+FPa5qudrWem7UMZWdkrKSI/VaQPj5aVDS7I9vq46FmBuENs7qBasFgplGdzz5J5IjnwR3++e6s5SP1DItHHxVqxI/V1eiYfnh9PGDSXvvJAxF3oP1cq3hNYnscjWNkan4yu0zgRlISbjMrxpsjXcalaNgvvNPwNwOoD3NusvBvC8HYnQcRzH2RAbemUhWZL8LIA7AVwO4MsA7jZ7IAXiVgAnTPjuOSSvJnn14uLiers4juM428CGGnQzG5vZqQBOBPBEAI/ZaAFm9kYzO2BmBxYWFvJfcBzHcTbFVHnoZnY3ySsAPBnAfpK95i39RAC3bTWYXO4ug78/6t2ifhZlYncqyypiqR4mWl8pOauJF0cQe2q9m3iryqLks6rTpi5DSToUgljiY/eT8469M/Rv/PJIvFjEklRzd/tiK2z9WGMdhdPnVdq5kPNLkc2yrNPn9SWnucjYGodon0tq16yxb/jQGyLU8PUeqj/PUPopKrmOY526TyxsB33RjrXuB3p+Wag1b7xMyrRtia4sud5yLlrfFmW6xaVF8WcJcs2LoeaNt1v5jpOp92IGpWro4vWiUya2WC7vkGSesJEsl4eS3N983gfgGQBuAHAFgOc3u50J4NKdCtJxHMfJs5E39OMAXMzaCq8AcImZXUbyegDvIvm7AD4D4M07GKfjOI6TIdugm9nnATxunfVfQa2nO47jODPATHm5pMmZkz2kC9Hu1O6cmuOcHFk1c/Ecn4vzrVMme5yrx8i0OadJDrRqybq95VhJvr7oyoMi7qhO/M6XxOulUu+XWOPsiS+I+qUXK2vXXX0+kvM0HTsgecSZfpBkiruReqBMRn3jNbbENybxYhc1M1MH0inrJnv/p50q8fYV0dTVk2RssQ69sC++znOaDR6Mwegn4zF0Wj+ZIk76LZbF22dZrvOhlfge3SOZceP749j7wfcHSX2QxcyDqPn66XMsHkst/jt18eH2yfd3O/Gh/47jOB3BG3THcZyO4A264zhOR5gpDT3NQ1ddMsjNLfVvkcz/p/qX+leLj7fOs5gK3+26eBR6RvNONHDJE1YtLtk/8aFRPb8Id473jY+czs0quuDCfLy8shyXPZK84ZH60mtfR29Ngx2PZI5HqKau3tqSByz3MJlXVr1epvByGU+rcSb1Ld6ceL1r9dJ5ZqNljVvmSpW+gyQ/WrTh0P+kLjr2R6mq+FkYB30d47Fc075q6DFDuS5D8eNfWo59aJI8c9HQS3lWwvEnpc6BoGMk+jpnbXwuY/V7ytSXtIrEx49C1WdWvtx+FTeOv6E7juN0BG/QHcdxOoI36I7jOB1htjR0WVYNqxd4KfR0XsSMhqlacSnz/xXq5ZL4jEhuruqW4a5JPrV6abT7yqh3h55LW36+bk+1/rZc2fWQ+U9FM1W/87Esq186GfiEFKLdjuJlUPxUVHjO6tzaVzGesF9K6n8uoehy4o2tHuNy3ROvf9Ff2+ajzMxRqzdd+xbMtF9E7mEV69rDQDff15e6N5JxBnK/R3Je9y3FfvuJZr4cb++Jrt1Tz6agTugcCImmnowVEP/8st1/v73jDMmDGj3HSb/Zzri7+Bu64zhOR/AG3XEcpyN4g+44jtMRZkpDV9TjvBdoVKX4TaiGnuqrOrem5t6KvprJE05162DXFl/k9b+gvh+i9ekcpKqhZ/LcW7dp2YJBc3P1nugXxKtb8oxDz51c30Fy1aRfI7lsur/6go+HusdE0pz2eHuimSfn0n78dMRF25ylGe+epF9E0fojHjdS9ceVaOrheIGBeK+L/z1lWTX0xcV4DtuR1A+Kt0svefDicw1j15x37XPR/qIiMXuZMu886xUT+PFkvrtd+Bu64zhOR/AG3XEcpyN4g+44jtMRZkpDz3lxRJKY6mUZnVrnWdQU57Fod+pPnfVAiRPRZZvmAbeGmhVgU6VuGoFO9dj2uTITZVd9QpKce9lf/TLCY+d0RD12pfdQYpV+kGok85kON+6Hrl7qSV3MmLGoP4+i17mQcw2Pn/QGJT7y7c+CjmNIn6tY59a5NodB34OJvj4s5bmRsQXjSuYQXYnzzPWeJfMa6HyqLb5HI70MPfW0ES8X7R9Sv6fsc6g58bJDcJ9yY1O2C39DdxzH6QjeoDuO43QEb9Adx3E6woxp6Lk1oc93rH8lHuK5Y2W039THQRbbysvFoj4eUPGv3YsjFdEnl5f8xZbvptetXUNX3Tp34dpUyMwtSEjn3VQfmTineVn81FdWNq6h90UQTTzGZX/10k77NWJy2jCDTp5E85ZjV+ojX7U/G+oLX4r/ivYPjIJk76EI1Usr8TXXsvU5Gai3D4Sk/rT4o2isGrccayR9KpSOtJLtfujar5L49STnHt7D3FiB7cHf0B3HcTqCN+iO4zgdwRt0x3GcjjBTGrrKs5pvOwqEJ/V5yalSuZzSZP+Mx7T+LYz2t8m5suseK+MhkRPcksO37pvJj27xeV9/TdK5EC0mvQPBuSf9EJl7mnQ9qJYspiSqYy+vbNzLpT/QR6P9JvTkURpJ3R1n5o1N5hwNS87MR5mkxCe+8dono9ct7ovQMRZhnjrFM1y90/WOl6Xm17f78WvZZrospYXXRvshxPe9V8Z56GKHnuTAq796m1cLkLZXcZ57+7iW7cLf0B3HcTqCN+iO4zgdYaYkl+Tnug5BDn7CjOXXT/4vUy6NrP3bhfw+U3koDl1/4mYiycpB7WmMTGxCA1lDf+qZDI9XeSgTmyVSU3sKZpt0lfy8zso5bSUBpQxhV4vbKWagS2QwPVayu0oBck9Mfu4n57ISyx5F8PM9uSqJXe508uN65r3xkgyRL9ZkFpOpGyu1PK7ULlmG9hexZEPqcPxMnRAJJkz3rMSWIJmSLpN2qPurDGbpXJAS6+TlxL5hhzQXf0N3HMfpCN6gO47jdIQNN+gkS5KfIXlZs3wKyatI3kTy3SQHuWM4juM4O8c0Gvq5AG4AcGSz/AcAXmdm7yL5BgBnA3j9VoLJac1hupamL423ORFIh3qXapXZj/9+hVqxJWJtuwaeWti2a6BJWlmLJjoyTeUTXVlT4uTYI53+TrRgnYLOdD4zEQvDwyUWtKozJ/pp+5D25HBytGTKsRbm5vbJd9vTOZPMQa2fcm6aHlqVcUplL9LQp7RenXJcedqnEzcLxjXdeywWtD2tT6IzF4jtFnq9WEMvkrIy75iaxhhcp6pSjRyy3K6Rq6au+2f7mzTUYPfE4HgvNXSSJwL4KQBvapYJ4HQA7212uRjA83YiQMdxHGdjbFRy+RMA52PtD833Abjb7AF3pFsBnLDeF0meQ/JqklcvLi6ut4vjOI6zDWQbdJLPBnCnmV2zmQLM7I1mdsDMDiwsLGzmEI7jOM4G2IiG/hQAzyH5LADzqDX0PwWwn2SveUs/EcBtWw+nPX87zDnNS1DTDdvV4/Uk73wwPx8t9wfxcqixmti6qoBGOXZiw6n6quViVU198pBjHR6vtq+VaJTDodgvjGNNVPPaVYPXsQTRuevY62SqPr2HOv1dfJ31Oulw+/EUwuWRR+6Py06Gz6N1WWoAxnpd5FzLw0S/bYkttR3Oaezt563XpUpsDNbu09Ki2AQMY+3fZICIjpHoSx46dPq7pH8Jsixrgr6JQi2PM9MrWmLB0R570h+UGw8QbM+PHdgesm/oZvZqMzvRzE4G8AIAHzOzFwG4AsDzm93OBHDpjkToOI7jbIit5KG/CsB5JG9Cram/eXtCchzHcTbDVEP/zezjAD7efP4KgCduf0iO4zjOZpgpL5dXvPK8vQ7BcQAAy8u7m5GlmvteoupuqHr3464jHDGvex+JrtLP77Ln+NB/x3GcjuANuuM4TkfwBt1xHKcjeIPuOI7TEbxBdxzH6QjeoDuO43QEb9Adx3E6AtWPYEcLI+8CcAuAhwD45q4VPB0e2+aY1dhmNS7AY9ssD8bYHmZmD83ttKsN+gOFkleb2YFdL3gDeGybY1Zjm9W4AI9ts3hsk3HJxXEcpyN4g+44jtMR9qpBf+MelbsRPLbNMauxzWpcgMe2WTy2CeyJhu44juNsPy65OI7jdIRdbdBJnkHyiyRvInnBbpY9IZ63kLyT5LXBumNIXk7yxub/o/cgrpNIXkHyepLXkTx3hmKbJ/nPJD/XxHZRs/4Uklc19/bdJAe7HVsQY0nyMyQvm6XYSN5M8l9Ifpbk1c26Pb+nTRz7Sb6X5BdI3kDyybMQG8lHN9dr9d89JF82C7E18f1G8xxcS/KdzfOxZ/Vt1xp0kiWA/w3gmQAeC+CFJB+7W+VP4G0AzpB1FwD4qJk9EsBHm+XdZgTg5Wb2WABPAvArzbWahdiWAZxuZj8M4FQAZ5B8EoA/APA6M3sEgO8AOHsPYlvlXAA3BMuzFNu/M7NTg9S2WbinQD1P8N+a2WMA/DDq67fnsZnZF5vrdSqAJwBYBPDXsxAbyRMA/DqAA2b2QwBK1NN07l19M7Nd+QfgyQD+Llh+NYBX71b5LXGdDODaYPmLAI5rPh8H4IszEOOlAJ4xa7EBWADwaQA/gnowRW+9e73LMZ2I+gE/HcBlqOdrmJXYbgbwEFm35/cUwFEAvoqmT22WYpN4fhLAP85KbABOAPB1AMegnizoMgD/fi/r225KLqsnv8qtzbpZ41gzO9h8vh3AsXsZDMmTATwOwFWYkdgaSeOzAO4EcDmALwO428xWJ97Zy3v7JwDOB1A1y9+H2YnNAHyY5DUkz2nWzcI9PQXAXQDe2khVbyJ52IzEFvICAO9sPu95bGZ2G4A/BPA1AAcBfBfANdjD+uadoi1Y/Sd2z9KASB4O4H0AXmZm94Tb9jI2Mxtb/RP4RNTzyj5mL+JQSD4bwJ1mds1exzKBHzOzx6OWHX+F5I+HG/fwnvYAPB7A683scQDuh0gYM/AsDAA8B8B7dNtexdbo9s9F/QfxeACHIZVwd5XdbNBvA3BSsHxis27WuIPkcQDQ/H/nXgRBso+6MX+Hmb1/lmJbxczuBnAF6p+V+0muzlG7V/f2KQCeQ/JmAO9CLbv86YzEtvpGBzO7E7UO/ETMxj29FcCtZnZVs/xe1A38LMS2yjMBfNrM7miWZyG2pwP4qpndZWZDAO9HXQf3rL7tZoP+KQCPbHqAB6h/Pn1wF8vfKB8EcGbz+UzU+vWuQpIA3gzgBjP74xmL7aEk9zef96HW9m9A3bA/fy9jM7NXm9mJZnYy6vr1MTN70SzERvIwkkesfkatB1+LGbinZnY7gK+TfHSz6mkArp+F2AJeiDW5BZiN2L4G4EkkF5pndvW67V192+VOhGcB+BJqzfW3drsTY5143ola+xqifks5G7Xm+lEANwL4CIBj9iCuH0P9E/LzAD7b/HvWjMT2bwB8pontWgCvadY/HMA/A7gJ9c/iuT2+t6cBuGxWYmti+Fzz77rV+j8L97SJ41QAVzf39QMAjp6h2A4D8C0ARwXrZiW2iwB8oXkW/hLA3F7WNx8p6jiO0xG8U9RxHKcjeIPuOI7TEbxBdxzH6QjeoDuO43QEb9Adx3E6gjfojuM4HcEbdMdxnI7gDbrjOE5H+P8rjnxwqlk5WwAAAABJRU5ErkJggg==\n", 37 | "text/plain": [ 38 | "
" 39 | ] 40 | }, 41 | "metadata": { 42 | "needs_background": "light" 43 | }, 44 | "output_type": "display_data" 45 | }, 46 | { 47 | "name": "stdout", 48 | "output_type": "stream", 49 | "text": [ 50 | "target images legacy\n" 51 | ] 52 | }, 53 | { 54 | "data": { 55 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAbQAAADaCAYAAADOvbfYAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAIABJREFUeJztnXm8JVV173+/OsMdu/t2N00LNNAM0kwKKCIOz2dwiBojPp9JRKOQYMhgIkaN40sCeSYfNYlGn8YhoKBR0KBGQpwQIcaJSUHGlgYaep5vd9/xTOv9Ufv2qbXqnjr39nTOrbu+n8/93LNq17Bq17Cr9q/W2hQROI7jOM5cJ+q0A47jOI5zMPAGzXEcx8kF3qA5juM4ucAbNMdxHCcXeIPmOI7j5AJv0BzHcZxc0LZBIykkR0n+7eFw6HBDcoTkiYdgvST5eZK7SN5xsNd/MCC5MhzfYovy95G8aibzOp2F5AMkX9hpP/IKybUkX9yi7BqSHwi//wfJ1fu5jePC/agwy+V+TPKc/dmmWc8lJH90oOtpse59dTRNWQ/Jh0kuyygfIVlttY4pZvqGdpaIvD+sfCXJtYmNrSW5leRAYtqbSd42w3V3FBEZFJHHDsGqnw/gJQBWiMh5h2D9hxwR+TsReXOn/TgUkHwdydUkd4fz91qSCzPmn3qwGwl/Vx0mP1veCJKIyBkictss131YHlLsPaPNvC9M3jtIXkjyHpJ7SG4n+QOSJxwqXw8UEflvEVm1n8s+Ge5HdQAgeRvJzOuP5G8C2Csiv0hMO5HkTST3hjr7cKJsCclvhHP5CZKv3x9fZ0q4xk7JmkdEJgF8DsB7EstdQvKaqXIRGQTwpXbbO1hdjgUAlx+kdeWF4wGsFZHR2S7ob0EHlxb1+WMAzxORRQBOBFAE0K7hOCvccAa7paHP87lC8mQAXwDwDgCLAJwA4JMA6p30q8v4IwBfnDJIlgHcDOAHAJ4CYAWAf03M/0kAFQDLAbwBwKdInnEoHCN5EoCCiPxqBrN/GcDFJHsOZJsHq0H7ewDvJDk0XSHJ55K8MzwN30nyua1WFN743kvywdBd93mSvaFscXjy2BbKbiK5IrHsJSQfC08mj5N8Q5h+Msn/CtvfTvIriWUkXDhTT8OfJPmfYR23h4MyNe9LE0/1/xzWmbqxkbwUwFUAnhOe5q8M0/+A5BqSO0neSPJo48dbSD4C4JFp1vltkn9qpt1L8jXh96kkbw7rXk3ytxPz9ZH8x/BEtpvkj0j2JVb1BpJPhrp5f2K5K0gmL4bktheRvJrkJpIbSH6Aoaskq76nWc+rGHeXDYcn0tPC9HeTvMHM+zGSH5/B9i9h3A3zUZI7AFxhtysi60Rke2JSHcDJrfycDWE/PkDyJ+H4/wfJpSS/xPhN406SKxPzT3vsSF6G+Kbzrqn1hOlrQ/38EsAoySITXWIkC4y7ix8N5/HdJI+dxtUfhv/DYf3PIRmR/D/hXNlK8gskF4X1Tr3RXdzifDmP5F1hH7eQ/MgBVuXZAB4XkVskZq+IfE1Engzbu4LkDSS/Evbz5yTPSvhzNMmvMb5fPE7yrYmyiOR7Qh3tIPlVkksS5W8MdbAjuY/tYPyGuT5hryX5FyR/yfit6GqSyxlfz3tJfp/k4jDvvjdmxhLP/wDwiXBsPjHNtsoALgDwX4nJlwDYKCIfEZFREZkQkV+G+QcA/G8AfykiIyLyIwA3Anhji335e8b3ikXmmhpmfJ99bpi+LpwrF5tV/AaAbyXsxWxxbxWR9QB2ATh/ZjXdAhHJ/AMgAE7OKF8L4MUAvg7gA2HamwHcFn4vCY6+EfFT8EXBXpqxvvsBHBuW/XFivUsRH5B+AAsA/BuAfw9lAwD2AFgV7KMAnBF+Xwfg/Ygb8F4Az59u/wBcA2AHgPOCr18CcH0oOyKs/zWh7HIAVQBvbrEflwD4UcK+AMB2AM8A0APg/wH4ofHj5rDPfdOs700AfpywTwcwHNY1AGAdgN8Lvp0TtnV6mPeTAG4DcAzit+nnhuVWhu3+C4A+AGcBmARwWljuCgD/Gn5PzVsM9jcAfCZs+0gAdwD4w3b1bfbpFACjiLtmSwDeBWANgDLiN9wxAAvCvAUAmwCcP4PtXwKgBuDPQn2k6jPM93wAu8N+jQJ4aZvrYCOAzYjP9ZUZ894W9uMkxG8WDwL4FeLrpIj4rePzifM269hdg3D+m2vkHsTXSF/yOgy//wLAfQBWAWA4rqnrzR7TMO33g+8nAhgM+/pFM3+r8+WnAN4Yfg9OHav9/Qs+TAD4KIBfAzBoyq9AfA2+Npw/7wTwePgdAbgbwF+F8+lEAI8B+PWw7OUAfob4DaYnnEvXJa6tEQAvCGUfCefTi1v4ue8YAXghgPXmWP0M8RvRMQC2Avh5OM69iN+k/rrFNXYbWtxfQvkZAEbNtM8hfmP7djiPbgPwtFB2DoAxM/87AfxH8p4V6u5fAHwXQL+5pn4P8bX4AQBPIr639AB4KYC9yWME4DuJ+r4GLe6tiflvBPDWjP3dV88t55nBSTXTBu1MxDeHZdAN2hsB3GGW+SmASzLW90cJ+xUAHm0x79kAdiVuDMOIG7w+M98XAHwWsZ7Vcv9ChV1ltv1w+P0mAD9NlBHxjWimDdrVAD6csAcRX4wrE35ckFHPCxDfdI8P9t8C+Fz4/TsA/tvM/xkAfx1OznHE3WWtbmgrEtPuAPC6xA0j1aAhvjgnk/WM+EHl1nb1bbb/lwC+mrAjABsAvDDYPwLwpvD7JVPnwQy2fwmAJ9ud24lljwn7ekrGPC9AfGMcAvAJxA9dxRbz3gbg/Qn7HwF8O2H/JoB72h27xDk5XYP2+9Ndh+H3agAXzmC/9x3TxLRbAPxJwl4VztPiDM6XHwK4EsARM637Gfh4PoCvAtiGuHG7BuGmGY7Zz8z5swnxm82z7TkA4L1oPkg8BOBFibKjEvv5V0jcbBHfWyrY/wbtDQn7awA+lbD/DM2HcnU80L5Bex6AzWba98J+vDycr3+BuCEvh3qx8/8BmvfqSwDcDuArwc9yYr5LADySsJ8WfF2emLYDwNnhd3+wexJ1NO29NTHtSwD+KmN/99Vzq7+D9tm+iNwP4CYkhL3A0QCeMNOeQHwTacU6M+/RAECyn+RnQlfAHsQX0BDJgsRa1e8g7lPeFF5tTw3reBfiBugOxt1bv5+x7c2J32OIG56p/djnl8Q1vB4zR9WDiIwgPuDJelhnF0rMvxfAfwJ4XZh0EZoi6fEAnh26AoZJDiPuqnoK4jfLXgCPZvjWap9bcTzip+BNie19BvGbEjDz+rZ10kBcB1N18uWwnwDw+mDPZPtARl1aRGQD4qfJ6zPm+aGIVERkGPHT/QkATstY7ZbE7/Fp7Kk6zjp2WWTt37HIPt5Z2Ov1CTQfYqZodb5civit++HQrfrK/fRhHyLyMxH5bRFZhviG/ALEb/9TJK/JBuJr8mjE9Xq0qdf3JfbjeADfSJQ9hLjbeTnS1/oo4mt1f5npuTBbdiF+0E0yjvhB+tsiUgHwD4h7tk5D/NZpP3xaiPjNaoqTAVwI4MqwfBLrN0Sk1b68CMBPJP7gY4p295kFiF9K9puDHYf214hb/ORNeiPikyfJcYifxFuR7O8/LqwDiMXhVQCeLSILEZ/cQHzzhIh8V0Regvhp62HEr80Qkc0i8gcicjSAPwTwzwy62SzYhLh7It4gyaQ9A1Q9hP7spdD1IG3WcR2Ai0g+B3EjdWuYvg7Af4nIUOJvUET+GHG3wwTi7q+DxTrEb0hHJLa3UETOAGZV37ZOiPjYT9XJvwF4IWOd9H+h2aBlbj/Qri4tRcyujgThvDtAso7d1HZabT9rnTPZl+nWYa/X4xB3NW2ZZl69MpFHROQixA8WHwJwAxNfPx8oInIn4i7QMxOT990rSEaIr8mNiOvgcVOvC0TkFWH2dQBebsp7w8PNJrPefsTX6uGm3Tm8BvFlk7zf/jJjuV8BKJJ8amLaWQAeSNgPIe5W/DbJ/fpaM/AKaP1sJpwG4N4D2ObBbdBEZA3i19W3JiZ/C8ApJF8fxM7fQdxHfVPGqt5CckUQad8f1gnELfg4YhF7CeIGFAAQhNYLwwU0ifhppBHKfovNj0d2IT7gjVnu3n8CeBrJVzP+suwtaP8UneQ6AL9H8mzGX/L8HYDbRWTtLNbxLcQ3m78B8JXwRArEdXlKELJL4e9ZJE8L83wOwEeCSF5gLP7v99dEIrIJcdfGP5JcGAT2k0j+T2BW9f1VAL9B8kUkS4gfWCYB/CRsZxvibpfPI745PTST7c8Ekm8geVz4fTziLtxbWsx7RjhuBZKDiLsQNyC++A+UlsculG9BrP/MhqsA/F+ST2XM00lOd0Pehvi4JNd/HYA/J3lC2Ne/Q3yu1dptlOTvklwWzrmpJ+3UcWf88dU1M1jf8xl/SHVksE8F8CrEmtQUzyT5mnBNvg3x+fMzxF2hexl/PNMXjt2ZJJ8Vlvs0gL8Nxx4kl5G8MJTdAOCVYftlxNdbJ5JQZB778Ab1fQDJ8/5fAZxP8sWMP5J6G+KH2ofCm+bXAfwNyQGSz0P8NvZFs97rEL/Nfp+JDzdmycsR3zNnRGiUl0Af21lzKA7S3yDucwYAiMgOAK9EfLPagbg76pWivzCzfBnxDesxxF0nU59T/xNiMXo74h3/TmKZCMDbET+d7UR8kKeecp8F4HaSI4iFx8tllrFnwd/fAvDhsB+nA7gL8QU0k+W/j1gz+hriJ8CT0Ow+nKkPk4hPyBej+bYy1R350rC+qQ8XPoRYrAVi4fc+AHcirpsP4cCP/ZsQ98s/iLjRugHxmzEww/oWkdUAfhfxBzLbEWtLv2m6Or5s93cG258JpwP4CclRxB8erUbcuwBg31el7wvmcsQPVXsQn5MrEZ/D1Vlsb1pmcOyuBnB66Br79xmu9iOIHxa+F3y+GvF1Y7c9hrgh/3FY//loflTwQ8QfWEwg1nlmwssAPBCO+8cQa2vj08x3LOI6b8cw4gbsvrDO7yD+GOjDiXm+iVhqmPrw7DUiUpU4luuVCF9KIj6/rkL8kQ6CfzcC+B7JvYjvJ88GABF5APED65cRX6u7MDt54WDxMQCvZfxF98dbzPMZJL5STFxTn0bs94UAXpW4pv4E8bmwFfHDyx+H/VWIyLWI7+U/YOKL3JlA8kwAIxK+Rp0hrwdwreminDUMYluWcxOIb9ofF5G/PJCNzcihOADzzaEB6FpC98Z6xILvre3mdxxn36fm9wJ4+oE+EJC8AvEHXb97MHybq5D8MYA/lURwdSch+S7EcsC7Zjh/D+Jz4gUisrVF+RbEuvmHReTKVutqG5QpIr0zcWo+QPLXEX8FNI746yHiAF+RHWc+Ed4Usj6mcWaJiDyv0z4Y1gL4j5nOHN7KTm1TPm2MsyW3WQYOEc9B3A0x1dX16hZdKo7jOPMSEflqp7bdtsvRcRzHceYCXT18DMmXMU4FtIakjW9zHMdxnH107Rta+OT0V4gzRKxH/IXeRSLyYEcdcxzHcbqSbtbQzgOwZupzb5LXI/4EtWWD1t/fL0NDM9IOHcdxnMCmTZu2h2wsc5pubtCOgU7vsx4hTiQJ44zklwHAokWLcNlllx0e7xzHcXLClVdeadMTzkm6WkObCSLyWRE5V0TO7e/v77Q7juM4Tofo5gZtA3ROxxXIzv/oOI7jzGO6uUG7E8BTQ065MuLUQDd22CfHcRynS+laDU1EaoxHaP4u4gHlPjddzjHHcRzHAbq4QQMAEfkWZj8EgeM4jjMP6eoG7WBzxRVXdNoF56Bj4ygTo5yI7lEfnygo+4kNeszGxx/TY2JWR/RYg4PFCWUfvUyPT3jkYj1Cy8ev/8a0HgPA9dd+Qdn1uh6dpVLVeXsnJ/VYixMVXd4w8aTWTkebZsWf6mHeSiU90lBfr07vWtLViolJnQ2uYnytNYyvCeUjKuqVDZoPvUoFfUxrkzo5+2RF27W6Hb1G71ucY7xJVlxuuiy7ju38dluk9SXbhuh9Sa6/XTTxO9759jZz5INu1tAcx3EcZ8Z4g+Y4juPkAm/QHMdxnFwwrzQ0J3/s3mnGA2RTQ+kfWKyKJqsLlD06oZWHyXpd2ZWa1mPqFa2hFffo58EGjZiURWSeJRvZ+kkUtdFb2m0vSxtqq8C0K2+79cz5lZXSnUw9tNnvlG0mpLRFWy8HUE/t8uLOtjyy50iG792Zkffw429ojuM4Ti7wBs1xHMfJBd6gOY7jOLnANTRnTtPXq3WxkdHm7y1bdRzZWGW7sierurwR6bi0erRb2f39A7q8WFL2jtFRzJSGFXsiq5nZYquhmeVzKqLYJ25bDwdKJ8eDbLftRsPG0LWLLWxycGtp7uBvaI7jOE4u8AbNcRzHyQXeoDmO4zi5wDU0Z05T7utTtiRiy7bu0LkZi9Ca2anHLFL2mSuPV/aWJ7QSsWGrzlFYNrn5yoWZKxc2HspIaG3jrWx5nSZ+ywosNi1gpnyTHd2VzjmYta706kgbT9V6y3Y/29WLnSIyqx1PcSAKW8q3NhOspmbPkTQ5FU4PAH9DcxzHcXKBN2iO4zhOLvAuR2duY7veEl86V+r6ea0+qlNZNSa2KLvHdAmVJvVn08Wq7uIpRbq8YYaAyUJgPsm23XCpT7TtJ9xm8XYTMnqnotQQKtl9iA27LhtjkOpitMXZXW2qLNWfaYdgsc/k7ZJhWdqUJ3xLbyv7mGWsatrlUxVlim36M0kMJ9PJ8INuwt/QHMdxnFzgDZrjOI6TC7xBcxzHcXKBa2hOruhNfMZfKulP+ndP6uFdNk5Uld1vJIytk3o4mb0VXV4paM2sp7c8Yz9rda2J0ehQUVQ0tvYFKU3Nfs7eRp9RRdnajRjRrGHtVIjAzHUpS1opzA4RoFminW11rrR+aFydxafxbYf0scO/ZBen1mB1smRpetvzU1PzNzTHcRwnF3iD5jiO4+QCb9Acx3GcXOAampMr+vqaz2glI2ntndQi2N5xPdwLTRxZxegvUU+PsutG0xgeHZuxn/W6XrbQRtuJClr/s/FZKTsVS2b0l8T8ad2I2XZq/lnaGXFpKa+t7mRThpn5C2ZCzQ7BYn1vo2ul9MUM2mpo7ZZvU8/ZWufMNdM8429ojuM4Ti7wBs1xHMfJBd6gOY7jOLnANTQnVyjpQEwMkhElSuVeZfcWtU7F6oSya3Z5EzvWUy7NxlWF1ePSulO7nIWHjnaaWFvSwWNmhUkNzRYaDaxhdCpTb4UoO9djWoaahe7Vdsfb7Gf23G23Z2PL5mekWTb+huY4juPkAm/QHMdxnFzQFQ0ayc+R3Ery/sS0JSRvJvlI+L+4kz46juM43U23aGjXAPgEgC8kpr0HwC0i8kGS7wn2uzvg29xj1PSu24G+du5W5sTW7cqOKmbcsKrOeZjM5de7YKEuM8FfxSFdjmXGHjEayYLssbXakkwsaDQ0O54UTD7FWtXMbzbeW9SXS8mUT47MPA4ta2wrAKgbhaRhNTbauDJk2jYuLaknpmO9pve5JTb+ythRpLVJe0yl1txgZB1vZOugtrwQaR2zWMj2rZGK3zM5MxO2zWmZyrVoSrNyL05LOvAsuzxzVfNTYeuKNzQR+SGAnWbyhQCuDb+vBfDqw+qU4ziOM6foigatBctFZFP4vRnA8ulmInkZybtI3jU2NvMnZMdxHCdfdHODtg+J35+nfYcWkc+KyLkicm5/f/9h9sxxHMfpFrpFQ5uOLSSPEpFNJI8CsLXTDnUrjS3aHt64TdkT4yPKru7RGlp9dK+yWTMDfzW0rpDMM1gq63VLUWtohR6tz5UH9BhlPdSxYP3HHat9Oco8c7U5Y5nQpqxOVTA2TX5Em/dvYtJoh6K1xVpJ+9aozVy3KBRSAVHatCkIabUis8K2+Rj3f1wvSyqfopm/ZLTGgh1zzOxbLXFcrJZYm9R1bvejYI6ZkUVRpPalaHyTyGq2+lxv1JuxiNWaLrP5ONPHYH7qWJ2km9/QbgRwcfh9MYBvdtAXx3Ecp8vpigaN5HUAfgpgFcn1JC8F8EEALyH5CIAXB9txHMdxpqUruhxF5KIWRS86rI44juM4c5auaNAcpD95MeEwSMYdPblZFW17dIOyF5R1rFdtr9aC+st6XK/SESYHYaTHBbNECd1iclTnO5wY2aPshRxUdmWrFvx6Iu3LzvV634ZW6Xj6wqojtTN9WpMjm/uycOEiVbZg4RJlD+8aVna1pvWayZqNU9P7OjmhD1JvSe9LFpWKPiYlk0fSjofWSGloprxh49JsDFSWLpatBdklU9kW6/XMcpuDsGGErmQcWtHomjD7VTd6bnpsNuNLZPMfGpHNiJE0my8Vk9eG3TOrqVnhMztu7UCZnyOeZdMVXY6O4ziOc6B4g+Y4juPkAm/QHMdxnFzgGlqnsBrZhNEZxrTGsndzMwxv+5r7VdmiJTqWq+ekE5Qtu7S207NYP8cUtcyl9brpSGgD5arW2/qNXbJxQQ8/oewt969WdrWufasN630dGjtC2eUzz1E2Fx697/fipVoz27pLx8xt2ar1vuqkdnayYg+SHXtLlxYLM487snJLSYyWY9YdwY7rZfIl2pyEqRyFs8gDmJqSraKJzbdodK56rWZsm58xuW82VlDru5Gx7XhoqbyUKc3N2HWjF9vx1hKabDEyt0ujt9kAu7aSmqtgBx1/Q3Mcx3FygTdojuM4Ti7wBs1xHMfJBa6hHS5MfzomTYf6Lh3j1Ni0URevX7fv9xGLh1RZ/xkrlR0t0vFXfYv1YaZOt3hAZ4GVEaxtx6sqlbUGsmhMaxi7Vm9S9sBOParQ7jse1MtX9BZ7zm7uTDSkB2ioRXrHK1W9bMPIKZWKnlAompyFPVqbLBdnXpE0eozYfIewcWXZ43q1t60HzfU3zHOt9YU2B2YqHMvE61nNzMSpidleITmGntHIinZ8PVPHkdGtGkYzq1VNXtK61qbrxrYxctXEOVAq6fOlUDDH0BwzMbkfG/YecIDjqWWpou3yceYVf0NzHMdxcoE3aI7jOE4u8AbNcRzHyQWuoR0uTJd2dVL37e9Zq7WjiS3rlL1oeVMXW3TGSXplQzaQzGggJlXjYcXkHORSnWdy8PwzlF3gccqeWH27sseHtdZYv/shvb5EPsbSOc9WZUf0aW1xR7+Oads+PqrXbWKSTAgTGtAVK7PQ0GpG60nFoaVCv2zcmZnf5jC0GpzVd1Sh1cy0HRlhNILWyBpVo1uZceEKRaM19ur8m6Weph0ZXSplR1Z30tsq2nqt6xhNMXpevarzd1Ymx5U9PtE8J0SMhmbG/isW9H6K6HqqitHrbPwesknF2GXO7Rqa4ziO48xZvEFzHMdxcoE3aI7jOE4ucA3tcDGm++Yn1z6p7J0bdY7D5UctVfbAmSc3jaEFB9e3w4nRQLBM639952ubXKXskfu0htYzvF7Zo/c0c0MOFPVYasesfKaed6nW1LYPa42ERhMr95kYqT6tmVSjVKBRS1L5DhsmNsvqoG3yKabijjLizmKrOYPNA1k0GlrB5PZsmHHjGnWzfKlf2WUzZl25R+taTOpkdnwyq8FCk/LNlNuYNzvgma3XHqMX1qWpdVdrVivUWyvC1qM+fxo08Xi0OTBt7KH2JTVBpv0Z27PI3Zkn/A3NcRzHyQXeoDmO4zi5wLscDxW6hxETTwwre+/aNcruN0O6LDxTDwEzp7sZZ4P+kh69561U9lIZUHb9Pv1Z/94dj+z7Hf1qgypbMnSisgcH9WfXhZLu0ilU9THpKdpuQTNUDmaO7RKyKZvqptw+ebbrYWzXRZlcY8F0w0W2tyqVwkl3lRUiMzxRr+5yLJV1ue12ric+Z280tC8lE3Ni98vW26T5DL9huvHqdl/M8DFFs/PFREiB0A6DY9ZtQgLK9nwx+21cS4Ug2KOe+mw/o8uxfRBAPvE3NMdxHCcXeIPmOI7j5AJv0BzHcZxc4BrawaJqzB368/Lq1jFlD/boIWDKxz1Fr2BQf1KuOszNkCoTu3cru0eMXpIaOl5rIBww6s+A1qlQ1poIosOYS2uZ/uS7/9k6NdbkmP5sP7q/aUfbt+myPXpIntIxus77B7T+AtHHcMVyrWMOmOFkFpgUT3eiNamhQIygYj/xPnDs5+9NO6IZXiiV48vqMUZrNEO8lI1mFhW1LlY3KaCqtabgXDeaWWT8jkwerroZqqZSManRoKlVTdqumvalZNbfmwgxKJX0slLX26oZPc7eXttGVsya+amTZeFvaI7jOE4u8AbNcRzHyQXeoDmO4zi5wDW0g4XpEJ8c1/3ro2bYk4HiEr14aYVegenLT6ZKmnxCp8ka36i1ock9OgiuIWaYk4Iu7zlGa2a9q05RdmHZ8dq3WUVcHWSWaZPL9DNZI+FadfdWVVZLxKgBwNAKrUE87Vi9bon0UDdLl+ljVipo7ainYIbx+TZaUrATbFyaLTZ2aniYdJ6kNmYyDk3fBgpmOJm63brx1Q6jwsiuTy/eMMOq1KtNfbkRmeFeCsY2vtk4NFtTtlqKJRPbZfbdhBYiYnPfRMxRSQWSmdRWqfRkZv7WmaxaTHDa4W9ojuM4Ti7wBs1xHMfJBR1v0EgeS/JWkg+SfIDk5WH6EpI3k3wk/F/cbl2O4zjO/KUbNLQagHeIyM9JLgBwN8mbAVwC4BYR+SDJ9wB4D4B3d9DPbCo6nqWyZ4+yJya0brXgSKOhDWkdwoosTOSN4/Beva0nNym7b/eosmmG94iKWs+rVEzc0GIdI9e36GjtTI+ODeskXKBj5OrlpuZSN5IH9+i4tMFdetl+k4MQg/oZqmiGQcECo5nRLJ+BmJglO0yKDSW0YhCZ/SzaNuYpoecU7LmWEvDsysy2zQqM/JseKsdcK/VKs9yEoYFi4+f0yhui7cmqGdKlR5/bBRM7SNghYbRvySFgCkYzbRR03GK9XjG20fdMLsfI1FvB1HM9neyxJQce05YPOv6GJiKbROTn4fdeAA8BOAbAhQCuDbNdC+DVnfHQcRzHmQt0vEFLQnIlgHMA3A5guYhMvXpsBrC8xTKXkbyL5F1jY2PTzeI4juPMA7qmQSM5COBrAN4mIqq/TuJxE6Z94RaRz4rIuSKvR4meAAAaXUlEQVRybn//zLt8HMdxnHzRDRoaSJYQN2ZfEpGvh8lbSB4lIptIHgVga+s1dAFjuj+9Nqw1NGloXat/sdHQlmT3gidDf6JJE8ezW2+7Z0z35duoJhr9ZnSPie2xi9tYnS6itFIHj8ni5ot8Y6uOz5MxrWNGda1hcOBkZTdgHpCKVuCxMVD7r2SIOUYiVjMz+ktKJMuePxUDlXg+tGXpcbfMuoxIViqbODSzPpsv0Y5JxsR4bNLQ26rUTL2Y51qBjUszel5kx1PT1E1sWc1odo2EHRVMnsmCvi5s3Jlkh52lZmgXdpaONXQsHX9DY3yUrgbwkIh8JFF0I4CLw++LAXzzcPvmOI7jzB264dH7eQDeCOA+kveEae8D8EEAXyV5KYAnAPx2h/xzHMdx5gAdb9BE5Edo3VfzosPpi+M4jjN36XiDlhvMuExS0XqN1M1YW7RjJ80Csy6K1ucgWgSThonlMdtmKjdja72l6+jVeSil3IwNE7tfJv4JRq/hwqXKLvRqG/06fg/Q49KhMfNjakO9ItpjZPQYE6fGqI1mZvMITpMNMv0rbKvN8W6n5dichw0TEGhTIhYKzVixOrUGVq3ac1nrb5FJFNlIaZFGp7L1arZHk+uxkbGvdl2zxx6jRkZp++VVyTyV2zquoTmO4zjOwcAbNMdxHCcXeIPmOI7j5ALX0A4WRhgQk8fN2g1jp8bHSm9g369xHXeOgaP0nCMbte5QGdc5CCOTv67Qq+2ektEDzfhVSSVq0shSYuyFhzrW3cb6JGKmirS58szMJj4KNZ0js25imiojurxQ0NplMdJ5A7OwefqiYtHYRtsxmkhkYr2spma1n6zx0tIxa9bWSzZs7JbRuSJzW7H7KmLjuZpxbDbksVo18Z1GjyuZsdci6mMqDb28UMfMFUw9py7EqOl7o2bHO8vWvFJj1sHqpGb+NvVu9cBsLbOLde9DiL+hOY7jOLnAGzTHcRwnF3iD5jiO4+QC19AOEmLy27Foc8iZ8a7qs+zjTnSX14cWqqJFJz9D2TvXbDfO6byR9b06LWbhCK0FREfo+as9WnfYlMgV+ej63aqsslnrSkcvsGNCGZsmps7Yq1adruxy2YhyNa3fFBIxelHFxP4Z7aZuBt+qGY2ssluP3jDRo+PQ+vp1vRUGZ66h2ZyFjZo+H0x4FQolPcFKPw0z5l3dxqmZHIWSsO2paDWyuphjZrUcoyVNTOp6r5rwPIE+n3qipsZbLul19RT0wg2j51qNbLDPXIemImtGu540sYkTFX0+lQcW7PtdNPXAhs3laQfgs2O3mfg8oy1a2+bQtCpdUlNL301cQ3Mcx3GcOYs3aI7jOE4u8AbNcRzHyQWuoR0k2Kf1k8LgoC6XYWXX9ph8iru0PlNY3KvLE7n8ep9yot74cq15HXGk6XvvG9L2js3KrDZ26W0NGg3NBOfsHW/6vnGz0Zk2aA1tsqQ1ipoZF65SXafshYt0DsyTTjpO2eWyrhds2qZM7t6x77fA6C29i/WypQXKrJtx4BpGF+3p0/MXy3rfolnEoaXGwrIxckZvKZhnz1JB+1ajiYmi1XPM6hMTrFTDNnkj03Foup4LZR33WLD5Fm3MJprnUL1idCUzdl/DCn5WlzL7bfetbsdPs/VaNjFyiTg0qZoYt5rNiZodz9dod8xnrXvNT50sC39DcxzHcXKBN2iO4zhOLvAGzXEcx8kFrqEdLHqMhtavNbSCGM1jp8kLuFHrWIXBI5UtCR2i1qPHACuL1iywWJejbp5bFmlNrSBG+ylru2ximI7ub+pYpZOOUWXRsfqUGijpfv5qRWtu43u1L4uGtMbWU2yjS42MKLM+3sxzyT6thzSWPkXZ5aOOV3ZhkR7/rNfoN1G/ifWq62OIWQxxZ8ccs2OI2aSCkdHMIpv3z+heqfJUXsGkL5rIrstoaPWaiVOrmHoqmuULVlsy2iOb4qVJCwnC5l7UGmrBnNpVI4RW69k6V9n4VjC+J8cerFe1zt2wY7Ol6s3EodWzT5D0uHRtYPLnPB0AzeBvaI7jOE4u8AbNcRzHyQXeoDmO4zi5wDW0g4WJKenp1TkHZaHWiljT8VONCZN/sX6Enj8xXtaC5VpfQ1nrDOnBs4yv1Jpbo2r1Fr1AwQTzLOlrli85pt0zkQ2AMrkYK09V5kRFx+tF1FokhvX2Jp7cqezqSDO3ZO8SHTdWOmqFsuslWw9aj2NBa0XVXTpGrlHXWlCxYHJHZmA1s0bD2HYBo8+IzdXYJnYsNdZWQqFJ6WvmkKa0IZqx/cw4YXWTHzEq6+WLRT1/MaGzVmvm3CvqY1Tu0edPyYhokdG5UNXHjKberWQWGSG0MpGIkTNjs9k4sqIZ087WeVrX1KSPUXYcm54/e93zBX9DcxzHcXKBN2iO4zhOLvAux4NFr3426D9Wp1kqjOjP0YdXr1U2t29U9pGTunsMvYkhY3r05+iz7mAwXSPF4mCLGQ8BtjvUDE3T22O6U/eYoUju0121E488qVef6DaMhnQKr8YiPexO1XSljU2Y7s66DjFoTJjuKvNJebFkhxNpje2uSqWTMsOcVE2apd6SDmdIdVelbNNdpbavt5UatsSYRTMsCs2wKlLT5zoL+nwtFHW9lUrNbsSBku0+17btDq+b7tKiSV1lh92hGX6GpouyNq7Pt0ZiKByp2/RippvXdhHW7Gf6NnTCdsfbD/XtMTTFan02DMTOOz/wNzTHcRwnF3iD5jiO4+QCb9Acx3GcXOAa2qFCfzGO8nH68+P+bVrf2bFNa0Pj996r7N6nn7TvN4eMzoRZDFvS7ZhsUvKwHupm7wN3KrswslbZCxc1NZf+Y5freZfqgzIRaY1jvKI/8S7VtJ5SFp12Ser2c/TUx/atSQ0Xkz2UiC1tNMyUlMDSTp9J+JrS28zn5+Y20YjMZ/sNE67Q0PU6OWmGEKpbvad5zEq95lP3qGFsvSm72w2jczXEhhToY1yd0HpfdVzrppIMp7BVaoemsTpoww6TY3VTsz7zfpHS6GjrLTG0jVh9b35+uO9vaI7jOE4u8AbNcRzHyQUdb9BI9pK8g+S9JB8geWWYfgLJ20muIfkV0ny/6ziO4zgJukFDmwRwgYiMkCwB+BHJbwN4O4CPisj1JD8N4FIAn+qkoweClHRVR4Nazynv0mmX9q55TNmNWjNGauCcZ+mVD+k0WanhYgrmMHeyf91qOXv1cB+NB7WmgcdWK3N06/16dZGut6HTztj3e3DVSdCFOm0SJ7VeUjIxSj0mpqnY0Bpao2qGB4m0VpSJWI3M6ivarjVsebZGZuPcrJ3cXN2krhI7/IuJO7PDw8AML2SHVRGjJdXNGDHj9eaQP/WKjVkz140ZRsdqRfW6jQXUds1qaJPatrFmUUZ6KVulDXMM622OcTrsLDuWMHtAGevb/AxE6/gbmsRM3ZVK4U8AXADghjD9WgCv7oB7juM4zhyh4w0aAJAskLwHwFYANwN4FMCwyL5H5vUAjmmx7GUk7yJ519jY2HSzOI7jOPOArmjQRKQuImcDWAHgPACnzmLZz4rIuSJybn9/f/sFHMdxnFzSDRraPkRkmOStAJ4DYIhkMbylrQCwobPeHRhRn45D63mKfuEc2qu1gcJO3fe/9ZFmnBord6myvmeeojc2obUeLjpalw+YuDW96YOL0X5kWOsn1fu0Vjj+6K+UHW1+QNn12lZlL3v6WcoeOOe8ROEyvW0T09TXo0//8gKd63Fy3OgrZmiRyOYJLNscm/uPjTOz8VVtwtbSQ5Nk5Pu026I9ZpGNcTIbs8FhYuOnzAZNbFi10jwn6mYoo0LBamjZgWh1o9fZTdshf6xmVojMxZCIBUvpklb3bBc72EbWSh+h7PXNT5Usm46/oZFcRnIo/O4D8BIADwG4FcBrw2wXA/hmZzx0HMdx5gLd8IZ2FIBrSRYQN7BfFZGbSD4I4HqSHwDwCwBXd9JJx3Ecp7vpeIMmIr8EcM400x9DrKc5juM4Tls63qDNG/rN2FnH6XyMUUPHpY2YWJ5iQlvY+bDWmY6cWKdsRkfpbS8xOsFxOo9kNGh1CW2mNJJE772YGCYbPxUZTWT87k3afuRuZe8aXms2tVOZy8/RemHvOWfr+Rc2Y/KkanLrje9RttgchHaIe6MF1Wpaf7F5AhHtf05NK0ul4sps/kMjyETtNDQrZCXnT+UMbH28AaBmcjWmtKHI3lbsDLremNDo6jYXo9kW2oT6tdOZIlMPRavRGQ0tWe2pOLLUOHIml6eNK7O+pdJ5ZuTbxDT7lhFrNl/1tY5raI7jOI5zMPAGzXEcx8kF3qA5juM4ucA1tE6hw9IQnaA1tgEcp2xJaFGDRR0ftXuNzm/YU9C6w+jDWjsaWK01tOKAiZ8qmrxwJr4qOe5TZVTnUqxUtcjRU9I7uu3BNcp+ikk5XSxoHeqYZ16g7NLTdH7Gap+OuavXmnUjFR3zNjluM8lopaFc1s40alrDGBnR6xsZ1xpc76COY8siFXOUHvBMF5schTTlhVReQTOuWMYGrb4msDqWycXYsOOhWTEoe1y4yAh+pcT5ZWVJmpyVtDqn8bWeqkizvNGDo8jmoTTrS+yL1YdTGlcqf6aeWzJiAafmaG1Np5m1Xl+UPuDzAn9DcxzHcXKBN2iO4zhOLvAGzXEcx8kFrqF1C71mzKmTlyq7P1q87/fEJq1p1RZpe8+6LXrdY3qMMdmgdS+a2J96XWtw40YTGS02fa2asbHqZnyp3rLer9JirXlt6dEJpY89/Wna15OPV/ZYzeiH2/cqu1Jp2kWjORRoxjcrar1OjDY0Oan1wHrN5Bks6n3ZMzLz8dDSupWm3XhmtXp2fkU7XFrWGHjWF6tTpX3J1thsebv4q6TvjYY+X4oFc8xMjFxkYsHqNa1r1o3OZYPmrBaZpWPZKrQ6pa0HG98nqYNiZm8je6XjA1svUHANzXEcx3HmLt6gOY7jOLnAGzTHcRwnF7iG1qUYuQelU5rPHqXlJ6uygYWn6ZkfXK3MXfc/pOz+3VpTK0+avn8TSzbS0PbQEU19r7REa307N25U9vHHaQ0MR69QZq3HxLj16FiwdZv1MHhbdq7Xy5tcf+VCU4MZKGuNq9/odT09Rmcwel+5aMbGEn252Niwiszm+bCNxpEK7bL5FI1OFVlNzqzf6mTtBJssZ+x4aW01t+xEkyrez+RSLNi8kAV7TIyOZTW0Nr7YceJstSR1K7E6pZXntNk26ixNSm3M9C25K+nDOT+zOfobmuM4jpMLvEFzHMdxcoE3aI7jOE4ucA1trpDsIx8qt5wNAHDGKmUuNrbYMadGTRzRbh2n1lefUPbAQDM/Y9Snx3EbhNY4ajWtcVV36nyKO3bvUPbWLZuVvW2vtgs0+p/RyWrS3P6ecb3tyYqOYeuraG1w3MgzUWRinIxmVijqy2egqOsii3Sev2zNw2o/Nn9iw8a1pcQgm8MwEfvVTm5pp4mld2Y2xUp8srkVrabGyGpo2fVgQ8OsL+lwrVQAX8tlbbxdOs4su2Jnq7Gl9b/Wm0rnnZwf+Bua4ziOkwu8QXMcx3FygTdojuM4Ti5wDW0uYgeNEj1OF4zWg6rJzbhDj4+2d9NOZY9u1rFeUUnrVkNPPXHf70ULzlBllXH9jLR9q9bANm/TeSb3jmnfRye1rxWTV7JErXuVROuJE4kx0GysVbFk8k4a4WHUjG9Wq2nfbExUb0nngmyIrtdMrFbUZqwrW5zS0KJsnSudb1FazmvFnXaSWZbuBKRj5FK5I5N2KteitiOTO9TmN0znO2yTq7GN1pTUydJzGl2Sdg6bFzKVxDLTtsfYjnGmjuEs9yuv+Bua4ziOkwu8QXMcx3FygTdojuM4Ti5wDW0uQqORNUz+urrWmazusLOmY8GeMLrWxkfuVfbihVpDO+ukgaYr5VP0tup9yi6UTc7BovZ1ZFKPZzapXcPk2C5lL1hqdKyC1hV6+5q6ltUgiiY/Zu+gjmFrGD1u5w697cqE1tTq0PMvNuvLQoy2YzWQVI7BlLCVnUhQjM4qjdZjmNm8j3ZbKTUmPXibtlPSULamVkiU25yV5vAiapODMiVLpZ21S+jS1JhmM48WS8/ZZttW52wzXlra9eT89pi5huY4juM4cxZv0BzHcZxc4F2Oc4ZEF4LtThLdxVgo6OeURlF/Xr5LfwmPjaM6tdUIdbfhIvN5emVPs1umMTysfek3/Xr2kamou+lKvXr+otmXodKgtvv0KdvXo32NCs3106Smoul6bZj+rAmTCqtmro66GU5m97hOpVWrma7gDNJdYabc9jDCdk+ZtFyp9FS6vJFK05T85NscpFQ32+wHQtGrs5/pt/6c3aaPahchYLtLZ9vR1i6NV9Kafaoq233Zdgk9e5s0X1n7Sv9s33Ecx3HmLt6gOY7jOLmgaxo0kgWSvyB5U7BPIHk7yTUkv0KyTYp5x3EcZz7TTRra5QAeArAw2B8C8FERuZ7kpwFcCuBTnXKuq6Dpm4+MKBZpzatunlsmarp/vSK6vNQ3pNdX0vMP721uP9qwQZWVB/Rn+OVyv7KHevSn7ezV+l3FaD/laEDZPWWtK/T06H2VREqpmtEsrI4U1fV+9Rpthz1a3xs3n3TXjCgyWTUpyGZFO91K2/XsL+XTqbJSaZiSv9uN73JgtFu/ZAhV7T66P9ikIxISU9p+wm90TquJtcl8ZXe+7dvG/JTJMumKNzSSKwD8BoCrgk0AFwC4IcxyLYBXd8Y7x3EcZy7QFQ0agH8C8C40w0OXAhgWkalXj/UAjpluQZKXkbyL5F1jY2PTzeI4juPMAzreoJF8JYCtInL3/iwvIp8VkXNF5Nz+/v72CziO4zi5pBs0tOcBeBXJVwDoRayhfQzAEMlieEtbAWBDxjpyiB32ot6yrF28St2kyopM7BfK2mbZDItiNLuRseb6e3pNbFek1zWxe7d2rapjtxYV9LpN5izUzVA4/f16BjFplEYmmusfb1iRwaTJMhrGQI+uxz7zgFQ29TBuhmypFmdxOYmNzZqdhmbjtUDtu9hYLxsTpWK/bFnrYUr2B7t81upSQ6Sk4syyh1xpJ0Wmws6sA5mrazPUjE0hZoe2aRd7aO1ZpN2axpl5Scff0ETkvSKyQkRWAngdgB+IyBsA3ArgtWG2iwF8s0MuOo7jOHOAjjdoGbwbwNtJrkGsqV3dYX8cx3GcLqYbuhz3ISK3Abgt/H4MwHmd9MdxHMeZO3RVg3aoueKKKzrtwpzkkczSXx4mL/LF29/55512wXFyRzd3OTqO4zjOjPEGzXEcx8kF3qA5juM4ucAbNMdxHCcXeIPmOI7j5AJv0BzHcZxc4A2a4ziOkwt4oHnaugmS2wA8AeAIANs77E4r3Lf9w33bP9y3/aNbfTtUfh0vIssOwXoPK7lq0KYgeZeInNtpP6bDfds/3Lf9w33bP7rVt271q1vwLkfHcRwnF3iD5jiO4+SCvDZon+20Axm4b/uH+7Z/uG/7R7f61q1+dQW51NAcx3Gc+Ude39Acx3GceUauGjSSLyO5muQaku/pAn8+R3IryfsT05aQvJnkI+H/4g74dSzJW0k+SPIBkpd3kW+9JO8geW/w7cow/QSSt4dj+xWS5cPtW8LHAslfkLypm3wjuZbkfSTvIXlXmNbxYxr8GCJ5A8mHST5E8jnd4BvJVaG+pv72kHxbN/gW/PvzcB3cT/K6cH10xfnWjeSmQSNZAPBJAC8HcDqAi0ie3lmvcA2Al5lp7wFwi4g8FcAtwT7c1AC8Q0ROB3A+gLeEuuoG3yYBXCAiZwE4G8DLSJ4P4EMAPioiJwPYBeDSDvg2xeUAHkrY3eTbr4nI2YlPu7vhmALAxwB8R0ROBXAW4vrruG8isjrU19kAnglgDMA3usE3kscAeCuAc0XkTAAFAK9Dd51v3YWI5OIPwHMAfDdhvxfAe7vAr5UA7k/YqwEcFX4fBWB1F/j4TQAv6TbfAPQD+DmAZyMOJi1Od6wPs08rEN/gLgBwEwB2kW9rARxhpnX8mAJYBOBxBM2+m3wz/rwUwI+7xTcAxwBYB2AJ4sGYbwLw691yvnXjX27e0NA8+FOsD9O6jeUisin83gxgeSedIbkSwDkAbkeX+Ba69O4BsBXAzQAeBTAsIrUwSyeP7T8BeBeARrCXont8EwDfI3k3ycvCtG44picA2Abg86Gr9iqSA13iW5LXAbgu/O64byKyAcA/AHgSwCYAuwHcje4537qOPDVocw6JH7E69pkpyUEAXwPwNhHZkyzrpG8iUpe4C2gFgPMAnNoJPywkXwlgq4jc3WlfWvB8EXkG4m73t5B8QbKwg8e0COAZAD4lIucAGIXpwuuCa6EM4FUA/s2Wdcq3oNtdiPiB4GgAA0hLGE6CPDVoGwAcm7BXhGndxhaSRwFA+L+1E06QLCFuzL4kIl/vJt+mEJFhALci7lYZIlkMRZ06ts8D8CqSawFcj7jb8WNd4tvUEz1EZCtiHeg8dMcxXQ9gvYjcHuwbEDdw3eDbFC8H8HMR2RLsbvDtxQAeF5FtIlIF8HXE52BXnG/dSJ4atDsBPDV8AVRG3H1wY4d9mo4bAVwcfl+MWL86rJAkgKsBPCQiH+ky35aRHAq/+xBrew8hbthe20nfROS9IrJCRFYiPr9+ICJv6AbfSA6QXDD1G7EedD+64JiKyGYA60iuCpNeBODBbvAtwUVodjcC3eHbkwDOJ9kfrtmpeuv4+da1dFrEO5h/AF4B4FeINZf3d4E/1yHu+64ifkq9FLHmcguARwB8H8CSDvj1fMRdKL8EcE/4e0WX+PZ0AL8Ivt0P4K/C9BMB3AFgDeJuoZ4OH9sXAripW3wLPtwb/h6YOv+74ZgGP84GcFc4rv8OYHEX+TYAYAeARYlp3eLblQAeDtfCFwH0dMP51q1/ninEcRzHyQV56nJ0HMdx5jHeoDmO4zi5wBs0x3EcJxd4g+Y4juPkAm/QHMdxnFzgDZrjOI6TC7xBcxzHcXKBN2iO4zhOLvj/s8IhpjmruakAAAAASUVORK5CYII=\n", 56 | "text/plain": [ 57 | "
" 58 | ] 59 | }, 60 | "metadata": { 61 | "needs_background": "light" 62 | }, 63 | "output_type": "display_data" 64 | } 65 | ], 66 | "source": [ 67 | "%matplotlib inline\n", 68 | "%reload_ext autoreload\n", 69 | "%autoreload 2\n", 70 | "\n", 71 | "import torch\n", 72 | "import torchvision\n", 73 | "import numpy as np\n", 74 | "import matplotlib.pyplot as plt\n", 75 | "import pandas as pd\n", 76 | "\n", 77 | "from datasets.synsigns import get_synsigns\n", 78 | "from datasets.gtsrb import get_gtsrb\n", 79 | "from datasets.gtsrb_legacy import get_gtsrb as get_gtsrb_legacy\n", 80 | "\n", 81 | "# show input images\n", 82 | "def imshow(inp, title=None):\n", 83 | " \"\"\"Imshow for Tensor.\"\"\"\n", 84 | " inp = inp.numpy().transpose((1, 2, 0))\n", 85 | "# mean = np.array([0.485, 0.456, 0.406])\n", 86 | "# std = np.array([0.229, 0.224, 0.225])\n", 87 | " mean = np.array([0.5, 0.5, 0.5])\n", 88 | " std = np.array([0.5, 0.5, 0.5])\n", 89 | " inp = std * inp + mean\n", 90 | " inp = np.clip(inp, 0, 1)\n", 91 | " plt.imshow(inp)\n", 92 | " if title is not None:\n", 93 | " plt.title(title)\n", 94 | " plt.pause(0.001) # pause a bit so that plots are updated\n", 95 | "\n", 96 | "classes = pd.read_csv('/home/wogong/datasets/gtsrb/signnames.csv')\n", 97 | "class_names = {}\n", 98 | "for i, row in classes.iterrows():\n", 99 | " class_names[str(row[0])] = row[1]\n", 100 | "\n", 101 | "# Get a batch of training data\n", 102 | "# inputs contains 4 images because batch_size=4 for the dataloaders\n", 103 | "batch_size = 2\n", 104 | "src_dataloader = get_synsigns('/home/wogong/datasets/synsigns', batch_size, True)\n", 105 | "tgt_dataloader = get_gtsrb('/home/wogong/datasets/gtsrb', batch_size, True)\n", 106 | "tgt_dataloader_legacy = get_gtsrb_legacy('/home/wogong/datasets/gtsrb', batch_size, True)\n", 107 | "\n", 108 | "src_inputs, src_classes = next(iter(src_dataloader))\n", 109 | "tgt_inputs, tgt_classes = next(iter(tgt_dataloader))\n", 110 | "tgt_inputs_legacy, tgt_classes_legacy = next(iter(tgt_dataloader_legacy))\n", 111 | "\n", 112 | "# Make a grid from batch\n", 113 | "src_out = torchvision.utils.make_grid(src_inputs)\n", 114 | "tgt_out = torchvision.utils.make_grid(tgt_inputs)\n", 115 | "tgt_out_legacy = torchvision.utils.make_grid(tgt_inputs_legacy)\n", 116 | "\n", 117 | "print ('source images')\n", 118 | "imshow(src_out, title=[class_names[str(x.item())] for x in src_classes])\n", 119 | "print ('target images')\n", 120 | "imshow(tgt_out, title=[class_names[str(x.item())] for x in tgt_classes])\n", 121 | "print ('target images legacy')\n", 122 | "imshow(tgt_out_legacy, title=[class_names[str(x.item())] for x in tgt_classes_legacy])\n" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": 77, 128 | "metadata": {}, 129 | "outputs": [ 130 | { 131 | "name": "stdout", 132 | "output_type": "stream", 133 | "text": [ 134 | "39209\n" 135 | ] 136 | } 137 | ], 138 | "source": [ 139 | "import pprint, pickle\n", 140 | "\n", 141 | "pkl_file = open('/home/wogong/datasets/gtsrb/gtsrb_train.p', 'rb')\n", 142 | "\n", 143 | "data1 = pickle.load(pkl_file)\n", 144 | "print (len(data1['labels']))\n", 145 | "\n", 146 | "pkl_file.close()" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 80, 152 | "metadata": {}, 153 | "outputs": [ 154 | { 155 | "name": "stdout", 156 | "output_type": "stream", 157 | "text": [ 158 | "a 1\n", 159 | "b 2\n" 160 | ] 161 | }, 162 | { 163 | "data": { 164 | "text/plain": [ 165 | "3" 166 | ] 167 | }, 168 | "execution_count": 80, 169 | "metadata": {}, 170 | "output_type": "execute_result" 171 | } 172 | ], 173 | "source": [ 174 | "fs = {}\n", 175 | "for k, v in {'a': 1, 'b': 2}.items():\n", 176 | " print (k, v)\n", 177 | " fs[k] = lambda x: x + v\n", 178 | "fs['a'](1)" 179 | ] 180 | } 181 | ], 182 | "metadata": { 183 | "kernelspec": { 184 | "display_name": "Python (py3-pt1.0)", 185 | "language": "python", 186 | "name": "py3-pt1.0" 187 | }, 188 | "language_info": { 189 | "codemirror_mode": { 190 | "name": "ipython", 191 | "version": 3 192 | }, 193 | "file_extension": ".py", 194 | "mimetype": "text/x-python", 195 | "name": "python", 196 | "nbconvert_exporter": "python", 197 | "pygments_lexer": "ipython3", 198 | "version": "3.6.7" 199 | } 200 | }, 201 | "nbformat": 4, 202 | "nbformat_minor": 2 203 | } 204 | --------------------------------------------------------------------------------