├── run.sh
├── Figs
└── C3.jpg
├── utils
├── __pycache__
│ ├── network.cpython-37.pyc
│ ├── network.cpython-38.pyc
│ ├── resnet.cpython-37.pyc
│ ├── resnet.cpython-38.pyc
│ ├── transform.cpython-37.pyc
│ ├── transform.cpython-38.pyc
│ ├── contrastive_loss.cpython-37.pyc
│ └── contrastive_loss.cpython-38.pyc
├── save_model.py
└── yaml_config_hook.py
├── modules
├── __pycache__
│ ├── network.cpython-37.pyc
│ ├── network.cpython-38.pyc
│ ├── resnet.cpython-37.pyc
│ ├── resnet.cpython-38.pyc
│ ├── transform.cpython-37.pyc
│ ├── transform.cpython-38.pyc
│ ├── contrastive_loss.cpython-37.pyc
│ └── contrastive_loss.cpython-38.pyc
├── contrastive_loss.py
├── network.py
├── transform.py
└── resnet.py
├── evaluation
├── __pycache__
│ ├── evaluation.cpython-37.pyc
│ └── evaluation.cpython-38.pyc
└── evaluation.py
├── config
└── config.yaml
├── train.py
├── CIFAR_10_log.out
├── README.md
├── main.py
└── LICENSE
/run.sh:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/run.sh
--------------------------------------------------------------------------------
/Figs/C3.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/Figs/C3.jpg
--------------------------------------------------------------------------------
/utils/__pycache__/network.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/utils/__pycache__/network.cpython-37.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/network.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/utils/__pycache__/network.cpython-38.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/resnet.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/utils/__pycache__/resnet.cpython-37.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/resnet.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/utils/__pycache__/resnet.cpython-38.pyc
--------------------------------------------------------------------------------
/modules/__pycache__/network.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/modules/__pycache__/network.cpython-37.pyc
--------------------------------------------------------------------------------
/modules/__pycache__/network.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/modules/__pycache__/network.cpython-38.pyc
--------------------------------------------------------------------------------
/modules/__pycache__/resnet.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/modules/__pycache__/resnet.cpython-37.pyc
--------------------------------------------------------------------------------
/modules/__pycache__/resnet.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/modules/__pycache__/resnet.cpython-38.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/transform.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/utils/__pycache__/transform.cpython-37.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/transform.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/utils/__pycache__/transform.cpython-38.pyc
--------------------------------------------------------------------------------
/modules/__pycache__/transform.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/modules/__pycache__/transform.cpython-37.pyc
--------------------------------------------------------------------------------
/modules/__pycache__/transform.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/modules/__pycache__/transform.cpython-38.pyc
--------------------------------------------------------------------------------
/evaluation/__pycache__/evaluation.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/evaluation/__pycache__/evaluation.cpython-37.pyc
--------------------------------------------------------------------------------
/evaluation/__pycache__/evaluation.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/evaluation/__pycache__/evaluation.cpython-38.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/contrastive_loss.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/utils/__pycache__/contrastive_loss.cpython-37.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/contrastive_loss.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/utils/__pycache__/contrastive_loss.cpython-38.pyc
--------------------------------------------------------------------------------
/modules/__pycache__/contrastive_loss.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/modules/__pycache__/contrastive_loss.cpython-37.pyc
--------------------------------------------------------------------------------
/modules/__pycache__/contrastive_loss.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Armanfard-Lab/C3/HEAD/modules/__pycache__/contrastive_loss.cpython-38.pyc
--------------------------------------------------------------------------------
/utils/save_model.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 |
4 |
5 | def save_model(args, model, optimizer, current_epoch):
6 | out = os.path.join(args.model_path, "checkpoint_{}.tar".format(current_epoch))
7 | state = {'net': model.state_dict(), 'optimizer': optimizer.state_dict(), 'epoch': current_epoch}
8 | torch.save(state, out)
9 |
--------------------------------------------------------------------------------
/config/config.yaml:
--------------------------------------------------------------------------------
1 | # general
2 | seed: 42
3 | workers: 8
4 | dataset_dir: "./datasets"
5 |
6 | # train options
7 | dataset_size: 60000
8 | class_num: 10
9 | batch_size: 128
10 | test_batch_size: 500
11 | image_size: 224
12 | start_epoch: 0
13 | max_epochs: 20
14 | dataset: "CIFAR-10"
15 |
16 |
17 | # model options
18 | resnet: "ResNet34" # ResNet18 / ResNet34 / ResNet50
19 | feature_dim: 128
20 | model_path: "save/CIFAR-10"
21 | reload: False
22 |
23 | # loss options
24 | learning_rate: 0.00001
25 | weight_decay: 0.
26 | zeta: 0.6
27 |
28 |
29 |
--------------------------------------------------------------------------------
/modules/contrastive_loss.py:
--------------------------------------------------------------------------------
1 | import torch
2 |
3 |
4 | def C3_loss(z_i, z_j, batch_size, zeta):
5 |
6 | z = torch.cat((z_i, z_j), dim=0)
7 | multiply = torch.matmul(z, z.T)
8 |
9 | a = torch.ones([batch_size])
10 | mask = 2 * (torch.diag(a, -batch_size) + torch.diag(a, batch_size) + torch.eye(2 * batch_size))
11 | mask = mask.cuda()
12 |
13 | exp_mul = torch.exp(multiply)
14 | numerator = torch.sum(torch.where((multiply + mask) > zeta, exp_mul, torch.zeros(multiply.shape).cuda()), dim=1)
15 | den = torch.sum(exp_mul, dim=1)
16 |
17 | return -torch.sum(torch.log(torch.div(numerator, den))) / batch_size
18 |
--------------------------------------------------------------------------------
/utils/yaml_config_hook.py:
--------------------------------------------------------------------------------
1 | import os
2 | import yaml
3 |
4 |
5 | def yaml_config_hook(config_file):
6 | """
7 | Custom YAML config loader, which can include other yaml files (I like using config files
8 | insteaad of using argparser)
9 | """
10 |
11 | # load yaml files in the nested 'defaults' section, which include defaults for experiments
12 | with open(config_file) as f:
13 | cfg = yaml.safe_load(f)
14 | for d in cfg.get("defaults", []):
15 | config_dir, cf = d.popitem()
16 | cf = os.path.join(os.path.dirname(config_file), config_dir, cf + ".yaml")
17 | with open(cf) as f:
18 | l = yaml.safe_load(f)
19 | cfg.update(l)
20 |
21 | if "defaults" in cfg.keys():
22 | del cfg["defaults"]
23 |
24 | return cfg
25 |
--------------------------------------------------------------------------------
/train.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import torch
3 | from modules import transform, resnet, network, contrastive_loss
4 | from torch.nn.functional import normalize
5 |
6 | def train_net(net, data_loader, optimizer, batch_size, zeta):
7 |
8 | net.train()
9 | for param in net.parameters():
10 | param.requires_grad = True
11 |
12 | for step, ((x_i, x_j), _) in enumerate(data_loader):
13 | optimizer.zero_grad()
14 |
15 | x_i = x_i.to('cuda')
16 | x_j = x_j.to('cuda')
17 | h_i = net.resnet(x_i)
18 | h_j = net.resnet(x_j)
19 |
20 | z_i = normalize(net.instance_projector(h_i), dim=1)
21 | z_j = normalize(net.instance_projector(h_j), dim=1)
22 |
23 | loss = contrastive_loss.C3_loss(z_i, z_j, batch_size, zeta)
24 | loss.backward()
25 | optimizer.step()
26 |
27 | return net , optimizer
28 |
--------------------------------------------------------------------------------
/modules/network.py:
--------------------------------------------------------------------------------
1 | import torch.nn as nn
2 | import torch
3 | from torch.nn.functional import normalize
4 |
5 |
6 | class Network(nn.Module):
7 | def __init__(self, resnet, feature_dim, class_num):
8 | super(Network, self).__init__()
9 | self.resnet = resnet
10 | self.feature_dim = feature_dim
11 | self.cluster_num = class_num
12 | self.instance_projector = nn.Sequential(
13 | nn.Linear(self.resnet.rep_dim, self.resnet.rep_dim),
14 | nn.ReLU(),
15 | nn.Linear(self.resnet.rep_dim, self.feature_dim),
16 | )
17 | self.cluster_projector = nn.Sequential(
18 | nn.Linear(self.resnet.rep_dim, self.resnet.rep_dim),
19 | nn.ReLU(),
20 | nn.Linear(self.resnet.rep_dim, self.cluster_num),
21 | nn.Softmax(dim=1)
22 | )
23 |
24 | def forward(self, x_i, x_j):
25 | h_i = self.resnet(x_i)
26 | h_j = self.resnet(x_j)
27 |
28 | z_i = normalize(self.instance_projector(h_i), dim=1)
29 | z_j = normalize(self.instance_projector(h_j), dim=1)
30 |
31 | c_i = self.cluster_projector(h_i)
32 | c_j = self.cluster_projector(h_j)
33 |
34 | return z_i, z_j, c_i, c_j
35 |
36 | def forward_cluster(self, x):
37 | h = self.resnet(x)
38 | c = self.cluster_projector(h)
39 | c = torch.argmax(c, dim=1)
40 | return c
41 |
--------------------------------------------------------------------------------
/CIFAR_10_log.out:
--------------------------------------------------------------------------------
1 | epoch: 0
2 | ACC: 0.7701166666666667
3 | NMI: 0.677791163171621
4 | ARI: 0.6069096232114414
5 | epoch: 1
6 | ACC: 0.7786
7 | NMI: 0.6878734537062069
8 | ARI: 0.620410202264679
9 | epoch: 2
10 | ACC: 0.8102
11 | NMI: 0.712980578724114
12 | ARI: 0.6608287202941374
13 | epoch: 3
14 | ACC: 0.8254166666666667
15 | NMI: 0.72907347209768
16 | ARI: 0.6857929928175632
17 | epoch: 4
18 | ACC: 0.8313
19 | NMI: 0.736224580530419
20 | ARI: 0.6956548604007671
21 | epoch: 5
22 | ACC: 0.8312166666666667
23 | NMI: 0.7367812041865238
24 | ARI: 0.6943580934273499
25 | epoch: 6
26 | ACC: 0.8334333333333334
27 | NMI: 0.7406088679019149
28 | ARI: 0.6981705807349273
29 | epoch: 7
30 | ACC: 0.8321666666666667
31 | NMI: 0.7409049056249303
32 | ARI: 0.6953900294612881
33 | epoch: 8
34 | ACC: 0.8354333333333334
35 | NMI: 0.7439649890104261
36 | ARI: 0.7017268059720909
37 | epoch: 9
38 | ACC: 0.83585
39 | NMI: 0.7446183697891985
40 | ARI: 0.7017045031570542
41 | epoch: 10
42 | ACC: 0.83675
43 | NMI: 0.7453026426639774
44 | ARI: 0.7034332748395484
45 | epoch: 11
46 | ACC: 0.83925
47 | NMI: 0.7488559796687856
48 | ARI: 0.7080651781065155
49 | epoch: 12
50 | ACC: 0.8382833333333334
51 | NMI: 0.7478013607075835
52 | ARI: 0.7056364975676672
53 | epoch: 13
54 | ACC: 0.8384833333333334
55 | NMI: 0.7483681698995862
56 | ARI: 0.706346528737891
57 | epoch: 14
58 | ACC: 0.8386333333333333
59 | NMI: 0.7497345846553385
60 | ARI: 0.7067066636097123
61 | epoch: 15
62 | ACC: 0.8392833333333334
63 | NMI: 0.7475620710498015
64 | ARI: 0.7084747302785441
65 | epoch: 16
66 | ACC: 0.838
67 | NMI: 0.7477611369036427
68 | ARI: 0.7057603770174042
69 | epoch: 17
70 | ACC: 0.8395333333333334
71 | NMI: 0.748683708617457
72 | ARI: 0.7084227476299111
73 | epoch: 18
74 | ACC: 0.8384
75 | NMI: 0.7460593651920591
76 | ARI: 0.70574805383224
77 | epoch: 19
78 | ACC: 0.8407166666666667
79 | NMI: 0.7496362702834038
80 | ARI: 0.7093040509830213
81 |
--------------------------------------------------------------------------------
/modules/transform.py:
--------------------------------------------------------------------------------
1 | import torchvision
2 | import cv2
3 | import numpy as np
4 |
5 |
6 | class GaussianBlur:
7 | def __init__(self, kernel_size, min=0.1, max=2.0):
8 | self.min = min
9 | self.max = max
10 | self.kernel_size = kernel_size
11 |
12 | def __call__(self, sample):
13 | sample = np.array(sample)
14 | prob = np.random.random_sample()
15 | if prob < 0.5:
16 | sigma = (self.max - self.min) * np.random.random_sample() + self.min
17 | sample = cv2.GaussianBlur(sample, (self.kernel_size, self.kernel_size), sigma)
18 | return sample
19 |
20 |
21 | class Transforms:
22 | def __init__(self, size, s=1.0, mean=None, std=None, blur=False):
23 | self.train_transform = [
24 | torchvision.transforms.RandomResizedCrop(size=size),
25 | torchvision.transforms.RandomHorizontalFlip(),
26 | torchvision.transforms.RandomApply([torchvision.transforms.ColorJitter(0.8 * s, 0.8 * s, 0.8 * s, 0.2 * s)],
27 | p=0.8),
28 | torchvision.transforms.RandomGrayscale(p=0.2),
29 | ]
30 | if blur:
31 | self.train_transform.append(GaussianBlur(kernel_size=23))
32 | self.train_transform.append(torchvision.transforms.ToTensor())
33 | self.test_transform = [
34 | torchvision.transforms.Resize(size=(size, size)),
35 | torchvision.transforms.ToTensor(),
36 | ]
37 | if mean and std:
38 | self.train_transform.append(torchvision.transforms.Normalize(mean=mean, std=std))
39 | self.test_transform.append(torchvision.transforms.Normalize(mean=mean, std=std))
40 | self.train_transform = torchvision.transforms.Compose(self.train_transform)
41 | self.test_transform = torchvision.transforms.Compose(self.test_transform)
42 |
43 | def __call__(self, x):
44 | return self.train_transform(x), self.train_transform(x)
45 |
--------------------------------------------------------------------------------
/evaluation/evaluation.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import torch
3 | from sklearn.metrics import normalized_mutual_info_score, adjusted_rand_score
4 |
5 |
6 | nmi = normalized_mutual_info_score
7 | ari = adjusted_rand_score
8 |
9 | def acc(y_true, y_pred):
10 | """
11 | Calculate clustering accuracy. Require scikit-learn installed
12 | # Arguments
13 | y: true labels, numpy.array with shape `(n_samples,)`
14 | y_pred: predicted labels, numpy.array with shape `(n_samples,)`
15 | # Return
16 | accuracy, in [0,1]
17 | """
18 | y_true = y_true.astype(np.int64)
19 | y_pred = y_pred.astype(np.int64)
20 | assert y_pred.size == y_true.size
21 | D = max(y_pred.max(), y_true.max()) + 1
22 | w = np.zeros((D, D), dtype=np.int64)
23 | for i in range(y_pred.size):
24 | w[y_pred[i], y_true[i]] += 1
25 | from scipy.optimize import linear_sum_assignment
26 |
27 | ind = linear_sum_assignment(w.max() - w)
28 | accuracy = 0.0
29 | for i in ind[0]:
30 | accuracy = accuracy + w[i, ind[1][i]]
31 | return accuracy / y_pred.size
32 |
33 |
34 |
35 | def net_evaluation(net,test_loader,dataset_size,test_batch_size = 500):
36 |
37 | pred_label_c = torch.zeros([dataset_size]).cuda()
38 | true_label_new = torch.zeros([dataset_size])
39 |
40 | net.eval()
41 | for param in net.parameters():
42 | param.requires_grad = False
43 |
44 |
45 | my_counter = 0
46 | for step, (x, y) in enumerate(test_loader):
47 | x = x.cuda()
48 | h = net.resnet(x)
49 | c = net.cluster_projector(h)
50 | c = torch.argmax(c, dim=1)
51 |
52 | pred_label_c[my_counter * test_batch_size:(my_counter + 1) * test_batch_size] = c
53 | true_label_new[my_counter * test_batch_size:(my_counter + 1) * test_batch_size] = y
54 | my_counter += 1
55 |
56 | my_acc = acc(true_label_new.cpu().numpy(), pred_label_c.cpu().numpy())
57 | my_nmi = nmi(true_label_new.cpu().numpy(), pred_label_c.cpu().numpy())
58 | my_ari = ari(true_label_new.cpu().numpy(), pred_label_c.cpu().numpy())
59 |
60 | print("ACC:", my_acc)
61 | print("NMI:", my_nmi)
62 | print("ARI:", my_ari)
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # C3: Cross-instance guided Contrastive Clustering
2 | PyTorch implementation of the paper "C3: Cross-instance guided Contrastive Clustering"
3 |
4 |
5 |
6 | ## Implementation
7 |
8 | Please first downlaod the **`CIFAR_10_initial.zip`** from [this link](https://drive.google.com/file/d/1deqzG-eUztltgdQ0H2Y29N83_i9Tr_uN/view?usp=sharing) and put it in the same folder as **`main.py`** and then run the code.
9 |
10 | ## Citation
11 |
12 | You can find the preprint of our paper on [arXiv](https://arxiv.org/abs/2211.07136).
13 |
14 | Please cite our paper if you use the results or codes of our work.
15 |
16 | ```
17 | @article{sadeghi2022c3,
18 | title={C3: Cross-instance guided Contrastive Clustering},
19 | author={Sadeghi, Mohammadreza and Hojjati, Hadi and Armanfard, Narges},
20 | journal={British Machine Vision Conference 2023},
21 | year={2023}
22 | }
23 | ```
24 |
25 | ## Abstract
26 |
27 | >Clustering is the task of gathering similar data samples into clusters without using any predefined labels. It has been widely studied in machine learning literature, and recent advancements in deep learning have revived interest in this field. Contrastive clustering (CC) models are a staple of deep clustering in which positive and negative pairs of each data instance are generated through data augmentation. CC models aim to learn a feature space where instance-level and cluster-level representations of positive pairs are grouped together. Despite improving the SOTA, these algorithms ignore the cross-instance patterns, which carry essential information for improving clustering performance. In this paper, we propose a novel contrastive clustering method, Cross-instance guided Contrastive Clustering (C3), that considers the cross-sample relationships to increase the number of positive pairs. In particular, we define a new loss function that identifies similar instances using the instance-level representation and encourages them to aggregate together. Extensive experimental evaluations show that our proposed method can outperform state-of-the-art algorithms on benchmark computer vision datasets: we improve the clustering accuracy by 6.8%, 2.8%, 4.9%, 1.3% and 0.4% on CIFAR-10, CIFAR-100, ImageNet-10, ImageNet-Dogs, and Tiny-ImageNet, respectively.
28 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy as np
3 | import torch
4 | import torchvision
5 | import argparse
6 | from collections import OrderedDict
7 |
8 | from modules import transform, resnet, network
9 | from utils import yaml_config_hook
10 | from torch.utils import data
11 | import torch.utils.data.distributed
12 | from evaluation import evaluation
13 | from train import train_net
14 |
15 |
16 | def main():
17 | parser = argparse.ArgumentParser()
18 | config = yaml_config_hook.yaml_config_hook("config/config.yaml")
19 |
20 | for k, v in config.items():
21 | parser.add_argument(f"--{k}", default=v, type=type(v))
22 | args = parser.parse_args()
23 | if not os.path.exists(args.model_path):
24 | os.makedirs(args.model_path)
25 |
26 | torch.manual_seed(args.seed)
27 | torch.cuda.manual_seed_all(args.seed)
28 | torch.cuda.manual_seed(args.seed)
29 | np.random.seed(args.seed)
30 |
31 | # prepare data---------------------------------------------------------------------------------------------------------------------------------------------------
32 | #train data
33 | train_dataset = torchvision.datasets.CIFAR10(
34 | root=args.dataset_dir,
35 | download=True,
36 | train=True,
37 | transform=transform.Transforms(size=args.image_size, s=0.5),
38 | )
39 | test_dataset = torchvision.datasets.CIFAR10(
40 | root=args.dataset_dir,
41 | download=True,
42 | train=False,
43 | transform=transform.Transforms(size=args.image_size, s=0.5),
44 | )
45 | dataset = data.ConcatDataset([train_dataset, test_dataset])
46 | data_loader = torch.utils.data.DataLoader(dataset, batch_size=args.batch_size, shuffle=True, drop_last=True,
47 | pin_memory=True)
48 |
49 |
50 | # test data
51 | test_dataset_1 = torchvision.datasets.CIFAR10(
52 | root=args.dataset_dir,
53 | download=True,
54 | train=True,
55 | transform=transform.Transforms(size=args.image_size).test_transform,
56 | )
57 | test_dataset_2 = torchvision.datasets.CIFAR10(
58 | root=args.dataset_dir,
59 | download=True,
60 | train=False,
61 | transform=transform.Transforms(size=args.image_size).test_transform,
62 | )
63 | dataset_test = data.ConcatDataset([test_dataset_1, test_dataset_2])
64 | test_loader = torch.utils.data.DataLoader(
65 | dataset=dataset_test,
66 | batch_size=args.test_batch_size,
67 | shuffle=False)
68 |
69 |
70 | # Initializing our network with a network trained with CC -------------------------------------------------------------------------------------------------------
71 | res = resnet.get_resnet(args.resnet)
72 | net = network.Network(res, args.feature_dim, args.class_num)
73 | net = net.to('cuda')
74 | checkpoint = torch.load('CIFAR_10_initial', map_location=torch.device('cuda:0'))
75 | new_state_dict = OrderedDict()
76 | for k, v in checkpoint['net'].items():
77 | name = k[7:] # remove `module.`
78 | new_state_dict[name] = v
79 | net.load_state_dict(new_state_dict)
80 |
81 | # optimizer ---------------------------------------------------------------------------------------------------------------------------------------------
82 | optimizer = torch.optim.Adam(net.parameters(), lr=args.learning_rate)
83 |
84 | # train loop ---------------------------------------------------------------------------------------------------------------------------------------------------
85 | for epoch in range(args.start_epoch, args.max_epochs):
86 |
87 | print("epoch:", epoch)
88 | evaluation.net_evaluation(net,test_loader,args.dataset_size, args.test_batch_size)
89 | net, optimizer = train_net(net, data_loader, optimizer, args.batch_size, args.zeta)
90 |
91 | state = {'net': net.state_dict(), 'optimizer': optimizer.state_dict(), 'epoch': epoch}
92 | with open('CIFAR_10_C3_loss_epoch_{}'.format(epoch), 'wb') as out:
93 | torch.save(state, out)
94 |
95 |
96 | if __name__ == "__main__":
97 | main()
98 |
--------------------------------------------------------------------------------
/modules/resnet.py:
--------------------------------------------------------------------------------
1 | import torch.nn as nn
2 | import torch
3 | from torchvision.models.resnet import Bottleneck, BasicBlock, conv1x1
4 |
5 |
6 | class ResNet(nn.Module):
7 |
8 | def __init__(self, block, layers, num_classes=1000, zero_init_residual=False,
9 | groups=1, width_per_group=64, replace_stride_with_dilation=None,
10 | norm_layer=None):
11 | super(ResNet, self).__init__()
12 | if norm_layer is None:
13 | norm_layer = nn.BatchNorm2d
14 | self._norm_layer = norm_layer
15 |
16 | self.inplanes = 64
17 | self.dilation = 1
18 | if replace_stride_with_dilation is None:
19 | # each element in the tuple indicates if we should replace
20 | # the 2x2 stride with a dilated convolution instead
21 | replace_stride_with_dilation = [False, False, False]
22 | if len(replace_stride_with_dilation) != 3:
23 | raise ValueError("replace_stride_with_dilation should be None "
24 | "or a 3-element tuple, got {}".format(replace_stride_with_dilation))
25 | self.groups = groups
26 | self.base_width = width_per_group
27 | self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3,
28 | bias=False)
29 | self.bn1 = norm_layer(self.inplanes)
30 | self.relu = nn.ReLU(inplace=True)
31 | self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
32 | self.layer1 = self._make_layer(block, 64, layers[0])
33 | self.layer2 = self._make_layer(block, 128, layers[1], stride=2,
34 | dilate=replace_stride_with_dilation[0])
35 | self.layer3 = self._make_layer(block, 256, layers[2], stride=2,
36 | dilate=replace_stride_with_dilation[1])
37 | self.layer4 = self._make_layer(block, 512, layers[3], stride=2,
38 | dilate=replace_stride_with_dilation[2])
39 | self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
40 | self.rep_dim = 512 * block.expansion
41 |
42 | for m in self.modules():
43 | if isinstance(m, nn.Conv2d):
44 | nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
45 | elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
46 | nn.init.constant_(m.weight, 1)
47 | nn.init.constant_(m.bias, 0)
48 |
49 | # Zero-initialize the last BN in each residual branch,
50 | # so that the residual branch starts with zeros, and each residual block behaves like an identity.
51 | # This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
52 | if zero_init_residual:
53 | for m in self.modules():
54 | if isinstance(m, Bottleneck):
55 | nn.init.constant_(m.bn3.weight, 0)
56 | elif isinstance(m, BasicBlock):
57 | nn.init.constant_(m.bn2.weight, 0)
58 |
59 | def _make_layer(self, block, planes, blocks, stride=1, dilate=False):
60 | norm_layer = self._norm_layer
61 | downsample = None
62 | previous_dilation = self.dilation
63 | if dilate:
64 | self.dilation *= stride
65 | stride = 1
66 | if stride != 1 or self.inplanes != planes * block.expansion:
67 | downsample = nn.Sequential(
68 | conv1x1(self.inplanes, planes * block.expansion, stride),
69 | norm_layer(planes * block.expansion),
70 | )
71 |
72 | layers = []
73 | layers.append(block(self.inplanes, planes, stride, downsample, self.groups,
74 | self.base_width, previous_dilation, norm_layer))
75 | self.inplanes = planes * block.expansion
76 | for _ in range(1, blocks):
77 | layers.append(block(self.inplanes, planes, groups=self.groups,
78 | base_width=self.base_width, dilation=self.dilation,
79 | norm_layer=norm_layer))
80 |
81 | return nn.Sequential(*layers)
82 |
83 | def _forward_impl(self, x):
84 | # See note [TorchScript super()]
85 | x = self.conv1(x)
86 | x = self.bn1(x)
87 | x = self.relu(x)
88 | x = self.maxpool(x)
89 |
90 | x = self.layer1(x)
91 | x = self.layer2(x)
92 | x = self.layer3(x)
93 | x = self.layer4(x)
94 |
95 | x = self.avgpool(x)
96 | x = torch.flatten(x, 1)
97 |
98 | return x
99 |
100 | def forward(self, x):
101 | return self._forward_impl(x)
102 |
103 |
104 | def get_resnet(name):
105 | resnet18 = ResNet(block=BasicBlock, layers=[2, 2, 2, 2])
106 | resnet34 = ResNet(block=BasicBlock, layers=[3, 4, 6, 3])
107 | resnet50 = ResNet(block=Bottleneck, layers=[3, 4, 6, 3])
108 |
109 | resnets = {
110 | "ResNet18": resnet18,
111 | "ResNet34": resnet34,
112 | "ResNet50": resnet50,
113 | }
114 | if name not in resnets.keys():
115 | raise KeyError(f"{name} is not a valid ResNet version")
116 | return resnets[name]
117 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Creative Commons Legal Code
2 |
3 | Attribution-NonCommercial-ShareAlike 3.0 Unported
4 |
5 | CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
6 | LEGAL SERVICES. DISTRIBUTION OF THIS LICENSE DOES NOT CREATE AN
7 | ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
8 | INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
9 | REGARDING THE INFORMATION PROVIDED, AND DISCLAIMS LIABILITY FOR
10 | DAMAGES RESULTING FROM ITS USE.
11 |
12 | License
13 |
14 | THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE
15 | COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY
16 | COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS
17 | AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED.
18 |
19 | BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE
20 | TO BE BOUND BY THE TERMS OF THIS LICENSE. TO THE EXTENT THIS LICENSE MAY
21 | BE CONSIDERED TO BE A CONTRACT, THE LICENSOR GRANTS YOU THE RIGHTS
22 | CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND
23 | CONDITIONS.
24 |
25 | 1. Definitions
26 |
27 | a. "Adaptation" means a work based upon the Work, or upon the Work and
28 | other pre-existing works, such as a translation, adaptation,
29 | derivative work, arrangement of music or other alterations of a
30 | literary or artistic work, or phonogram or performance and includes
31 | cinematographic adaptations or any other form in which the Work may be
32 | recast, transformed, or adapted including in any form recognizably
33 | derived from the original, except that a work that constitutes a
34 | Collection will not be considered an Adaptation for the purpose of
35 | this License. For the avoidance of doubt, where the Work is a musical
36 | work, performance or phonogram, the synchronization of the Work in
37 | timed-relation with a moving image ("synching") will be considered an
38 | Adaptation for the purpose of this License.
39 | b. "Collection" means a collection of literary or artistic works, such as
40 | encyclopedias and anthologies, or performances, phonograms or
41 | broadcasts, or other works or subject matter other than works listed
42 | in Section 1(g) below, which, by reason of the selection and
43 | arrangement of their contents, constitute intellectual creations, in
44 | which the Work is included in its entirety in unmodified form along
45 | with one or more other contributions, each constituting separate and
46 | independent works in themselves, which together are assembled into a
47 | collective whole. A work that constitutes a Collection will not be
48 | considered an Adaptation (as defined above) for the purposes of this
49 | License.
50 | c. "Distribute" means to make available to the public the original and
51 | copies of the Work or Adaptation, as appropriate, through sale or
52 | other transfer of ownership.
53 | d. "License Elements" means the following high-level license attributes
54 | as selected by Licensor and indicated in the title of this License:
55 | Attribution, Noncommercial, ShareAlike.
56 | e. "Licensor" means the individual, individuals, entity or entities that
57 | offer(s) the Work under the terms of this License.
58 | f. "Original Author" means, in the case of a literary or artistic work,
59 | the individual, individuals, entity or entities who created the Work
60 | or if no individual or entity can be identified, the publisher; and in
61 | addition (i) in the case of a performance the actors, singers,
62 | musicians, dancers, and other persons who act, sing, deliver, declaim,
63 | play in, interpret or otherwise perform literary or artistic works or
64 | expressions of folklore; (ii) in the case of a phonogram the producer
65 | being the person or legal entity who first fixes the sounds of a
66 | performance or other sounds; and, (iii) in the case of broadcasts, the
67 | organization that transmits the broadcast.
68 | g. "Work" means the literary and/or artistic work offered under the terms
69 | of this License including without limitation any production in the
70 | literary, scientific and artistic domain, whatever may be the mode or
71 | form of its expression including digital form, such as a book,
72 | pamphlet and other writing; a lecture, address, sermon or other work
73 | of the same nature; a dramatic or dramatico-musical work; a
74 | choreographic work or entertainment in dumb show; a musical
75 | composition with or without words; a cinematographic work to which are
76 | assimilated works expressed by a process analogous to cinematography;
77 | a work of drawing, painting, architecture, sculpture, engraving or
78 | lithography; a photographic work to which are assimilated works
79 | expressed by a process analogous to photography; a work of applied
80 | art; an illustration, map, plan, sketch or three-dimensional work
81 | relative to geography, topography, architecture or science; a
82 | performance; a broadcast; a phonogram; a compilation of data to the
83 | extent it is protected as a copyrightable work; or a work performed by
84 | a variety or circus performer to the extent it is not otherwise
85 | considered a literary or artistic work.
86 | h. "You" means an individual or entity exercising rights under this
87 | License who has not previously violated the terms of this License with
88 | respect to the Work, or who has received express permission from the
89 | Licensor to exercise rights under this License despite a previous
90 | violation.
91 | i. "Publicly Perform" means to perform public recitations of the Work and
92 | to communicate to the public those public recitations, by any means or
93 | process, including by wire or wireless means or public digital
94 | performances; to make available to the public Works in such a way that
95 | members of the public may access these Works from a place and at a
96 | place individually chosen by them; to perform the Work to the public
97 | by any means or process and the communication to the public of the
98 | performances of the Work, including by public digital performance; to
99 | broadcast and rebroadcast the Work by any means including signs,
100 | sounds or images.
101 | j. "Reproduce" means to make copies of the Work by any means including
102 | without limitation by sound or visual recordings and the right of
103 | fixation and reproducing fixations of the Work, including storage of a
104 | protected performance or phonogram in digital form or other electronic
105 | medium.
106 |
107 | 2. Fair Dealing Rights. Nothing in this License is intended to reduce,
108 | limit, or restrict any uses free from copyright or rights arising from
109 | limitations or exceptions that are provided for in connection with the
110 | copyright protection under copyright law or other applicable laws.
111 |
112 | 3. License Grant. Subject to the terms and conditions of this License,
113 | Licensor hereby grants You a worldwide, royalty-free, non-exclusive,
114 | perpetual (for the duration of the applicable copyright) license to
115 | exercise the rights in the Work as stated below:
116 |
117 | a. to Reproduce the Work, to incorporate the Work into one or more
118 | Collections, and to Reproduce the Work as incorporated in the
119 | Collections;
120 | b. to create and Reproduce Adaptations provided that any such Adaptation,
121 | including any translation in any medium, takes reasonable steps to
122 | clearly label, demarcate or otherwise identify that changes were made
123 | to the original Work. For example, a translation could be marked "The
124 | original work was translated from English to Spanish," or a
125 | modification could indicate "The original work has been modified.";
126 | c. to Distribute and Publicly Perform the Work including as incorporated
127 | in Collections; and,
128 | d. to Distribute and Publicly Perform Adaptations.
129 |
130 | The above rights may be exercised in all media and formats whether now
131 | known or hereafter devised. The above rights include the right to make
132 | such modifications as are technically necessary to exercise the rights in
133 | other media and formats. Subject to Section 8(f), all rights not expressly
134 | granted by Licensor are hereby reserved, including but not limited to the
135 | rights described in Section 4(e).
136 |
137 | 4. Restrictions. The license granted in Section 3 above is expressly made
138 | subject to and limited by the following restrictions:
139 |
140 | a. You may Distribute or Publicly Perform the Work only under the terms
141 | of this License. You must include a copy of, or the Uniform Resource
142 | Identifier (URI) for, this License with every copy of the Work You
143 | Distribute or Publicly Perform. You may not offer or impose any terms
144 | on the Work that restrict the terms of this License or the ability of
145 | the recipient of the Work to exercise the rights granted to that
146 | recipient under the terms of the License. You may not sublicense the
147 | Work. You must keep intact all notices that refer to this License and
148 | to the disclaimer of warranties with every copy of the Work You
149 | Distribute or Publicly Perform. When You Distribute or Publicly
150 | Perform the Work, You may not impose any effective technological
151 | measures on the Work that restrict the ability of a recipient of the
152 | Work from You to exercise the rights granted to that recipient under
153 | the terms of the License. This Section 4(a) applies to the Work as
154 | incorporated in a Collection, but this does not require the Collection
155 | apart from the Work itself to be made subject to the terms of this
156 | License. If You create a Collection, upon notice from any Licensor You
157 | must, to the extent practicable, remove from the Collection any credit
158 | as required by Section 4(d), as requested. If You create an
159 | Adaptation, upon notice from any Licensor You must, to the extent
160 | practicable, remove from the Adaptation any credit as required by
161 | Section 4(d), as requested.
162 | b. You may Distribute or Publicly Perform an Adaptation only under: (i)
163 | the terms of this License; (ii) a later version of this License with
164 | the same License Elements as this License; (iii) a Creative Commons
165 | jurisdiction license (either this or a later license version) that
166 | contains the same License Elements as this License (e.g.,
167 | Attribution-NonCommercial-ShareAlike 3.0 US) ("Applicable License").
168 | You must include a copy of, or the URI, for Applicable License with
169 | every copy of each Adaptation You Distribute or Publicly Perform. You
170 | may not offer or impose any terms on the Adaptation that restrict the
171 | terms of the Applicable License or the ability of the recipient of the
172 | Adaptation to exercise the rights granted to that recipient under the
173 | terms of the Applicable License. You must keep intact all notices that
174 | refer to the Applicable License and to the disclaimer of warranties
175 | with every copy of the Work as included in the Adaptation You
176 | Distribute or Publicly Perform. When You Distribute or Publicly
177 | Perform the Adaptation, You may not impose any effective technological
178 | measures on the Adaptation that restrict the ability of a recipient of
179 | the Adaptation from You to exercise the rights granted to that
180 | recipient under the terms of the Applicable License. This Section 4(b)
181 | applies to the Adaptation as incorporated in a Collection, but this
182 | does not require the Collection apart from the Adaptation itself to be
183 | made subject to the terms of the Applicable License.
184 | c. You may not exercise any of the rights granted to You in Section 3
185 | above in any manner that is primarily intended for or directed toward
186 | commercial advantage or private monetary compensation. The exchange of
187 | the Work for other copyrighted works by means of digital file-sharing
188 | or otherwise shall not be considered to be intended for or directed
189 | toward commercial advantage or private monetary compensation, provided
190 | there is no payment of any monetary compensation in con-nection with
191 | the exchange of copyrighted works.
192 | d. If You Distribute, or Publicly Perform the Work or any Adaptations or
193 | Collections, You must, unless a request has been made pursuant to
194 | Section 4(a), keep intact all copyright notices for the Work and
195 | provide, reasonable to the medium or means You are utilizing: (i) the
196 | name of the Original Author (or pseudonym, if applicable) if supplied,
197 | and/or if the Original Author and/or Licensor designate another party
198 | or parties (e.g., a sponsor institute, publishing entity, journal) for
199 | attribution ("Attribution Parties") in Licensor's copyright notice,
200 | terms of service or by other reasonable means, the name of such party
201 | or parties; (ii) the title of the Work if supplied; (iii) to the
202 | extent reasonably practicable, the URI, if any, that Licensor
203 | specifies to be associated with the Work, unless such URI does not
204 | refer to the copyright notice or licensing information for the Work;
205 | and, (iv) consistent with Section 3(b), in the case of an Adaptation,
206 | a credit identifying the use of the Work in the Adaptation (e.g.,
207 | "French translation of the Work by Original Author," or "Screenplay
208 | based on original Work by Original Author"). The credit required by
209 | this Section 4(d) may be implemented in any reasonable manner;
210 | provided, however, that in the case of a Adaptation or Collection, at
211 | a minimum such credit will appear, if a credit for all contributing
212 | authors of the Adaptation or Collection appears, then as part of these
213 | credits and in a manner at least as prominent as the credits for the
214 | other contributing authors. For the avoidance of doubt, You may only
215 | use the credit required by this Section for the purpose of attribution
216 | in the manner set out above and, by exercising Your rights under this
217 | License, You may not implicitly or explicitly assert or imply any
218 | connection with, sponsorship or endorsement by the Original Author,
219 | Licensor and/or Attribution Parties, as appropriate, of You or Your
220 | use of the Work, without the separate, express prior written
221 | permission of the Original Author, Licensor and/or Attribution
222 | Parties.
223 | e. For the avoidance of doubt:
224 |
225 | i. Non-waivable Compulsory License Schemes. In those jurisdictions in
226 | which the right to collect royalties through any statutory or
227 | compulsory licensing scheme cannot be waived, the Licensor
228 | reserves the exclusive right to collect such royalties for any
229 | exercise by You of the rights granted under this License;
230 | ii. Waivable Compulsory License Schemes. In those jurisdictions in
231 | which the right to collect royalties through any statutory or
232 | compulsory licensing scheme can be waived, the Licensor reserves
233 | the exclusive right to collect such royalties for any exercise by
234 | You of the rights granted under this License if Your exercise of
235 | such rights is for a purpose or use which is otherwise than
236 | noncommercial as permitted under Section 4(c) and otherwise waives
237 | the right to collect royalties through any statutory or compulsory
238 | licensing scheme; and,
239 | iii. Voluntary License Schemes. The Licensor reserves the right to
240 | collect royalties, whether individually or, in the event that the
241 | Licensor is a member of a collecting society that administers
242 | voluntary licensing schemes, via that society, from any exercise
243 | by You of the rights granted under this License that is for a
244 | purpose or use which is otherwise than noncommercial as permitted
245 | under Section 4(c).
246 | f. Except as otherwise agreed in writing by the Licensor or as may be
247 | otherwise permitted by applicable law, if You Reproduce, Distribute or
248 | Publicly Perform the Work either by itself or as part of any
249 | Adaptations or Collections, You must not distort, mutilate, modify or
250 | take other derogatory action in relation to the Work which would be
251 | prejudicial to the Original Author's honor or reputation. Licensor
252 | agrees that in those jurisdictions (e.g. Japan), in which any exercise
253 | of the right granted in Section 3(b) of this License (the right to
254 | make Adaptations) would be deemed to be a distortion, mutilation,
255 | modification or other derogatory action prejudicial to the Original
256 | Author's honor and reputation, the Licensor will waive or not assert,
257 | as appropriate, this Section, to the fullest extent permitted by the
258 | applicable national law, to enable You to reasonably exercise Your
259 | right under Section 3(b) of this License (right to make Adaptations)
260 | but not otherwise.
261 |
262 | 5. Representations, Warranties and Disclaimer
263 |
264 | UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING AND TO THE
265 | FULLEST EXTENT PERMITTED BY APPLICABLE LAW, LICENSOR OFFERS THE WORK AS-IS
266 | AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE
267 | WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING, WITHOUT
268 | LIMITATION, WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
269 | PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS,
270 | ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, WHETHER OR NOT
271 | DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OF IMPLIED
272 | WARRANTIES, SO THIS EXCLUSION MAY NOT APPLY TO YOU.
273 |
274 | 6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE
275 | LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR
276 | ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES
277 | ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS
278 | BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
279 |
280 | 7. Termination
281 |
282 | a. This License and the rights granted hereunder will terminate
283 | automatically upon any breach by You of the terms of this License.
284 | Individuals or entities who have received Adaptations or Collections
285 | from You under this License, however, will not have their licenses
286 | terminated provided such individuals or entities remain in full
287 | compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will
288 | survive any termination of this License.
289 | b. Subject to the above terms and conditions, the license granted here is
290 | perpetual (for the duration of the applicable copyright in the Work).
291 | Notwithstanding the above, Licensor reserves the right to release the
292 | Work under different license terms or to stop distributing the Work at
293 | any time; provided, however that any such election will not serve to
294 | withdraw this License (or any other license that has been, or is
295 | required to be, granted under the terms of this License), and this
296 | License will continue in full force and effect unless terminated as
297 | stated above.
298 |
299 | 8. Miscellaneous
300 |
301 | a. Each time You Distribute or Publicly Perform the Work or a Collection,
302 | the Licensor offers to the recipient a license to the Work on the same
303 | terms and conditions as the license granted to You under this License.
304 | b. Each time You Distribute or Publicly Perform an Adaptation, Licensor
305 | offers to the recipient a license to the original Work on the same
306 | terms and conditions as the license granted to You under this License.
307 | c. If any provision of this License is invalid or unenforceable under
308 | applicable law, it shall not affect the validity or enforceability of
309 | the remainder of the terms of this License, and without further action
310 | by the parties to this agreement, such provision shall be reformed to
311 | the minimum extent necessary to make such provision valid and
312 | enforceable.
313 | d. No term or provision of this License shall be deemed waived and no
314 | breach consented to unless such waiver or consent shall be in writing
315 | and signed by the party to be charged with such waiver or consent.
316 | e. This License constitutes the entire agreement between the parties with
317 | respect to the Work licensed here. There are no understandings,
318 | agreements or representations with respect to the Work not specified
319 | here. Licensor shall not be bound by any additional provisions that
320 | may appear in any communication from You. This License may not be
321 | modified without the mutual written agreement of the Licensor and You.
322 | f. The rights granted under, and the subject matter referenced, in this
323 | License were drafted utilizing the terminology of the Berne Convention
324 | for the Protection of Literary and Artistic Works (as amended on
325 | September 28, 1979), the Rome Convention of 1961, the WIPO Copyright
326 | Treaty of 1996, the WIPO Performances and Phonograms Treaty of 1996
327 | and the Universal Copyright Convention (as revised on July 24, 1971).
328 | These rights and subject matter take effect in the relevant
329 | jurisdiction in which the License terms are sought to be enforced
330 | according to the corresponding provisions of the implementation of
331 | those treaty provisions in the applicable national law. If the
332 | standard suite of rights granted under applicable copyright law
333 | includes additional rights not granted under this License, such
334 | additional rights are deemed to be included in the License; this
335 | License is not intended to restrict the license of any rights under
336 | applicable law.
337 |
338 |
339 | Creative Commons Notice
340 |
341 | Creative Commons is not a party to this License, and makes no warranty
342 | whatsoever in connection with the Work. Creative Commons will not be
343 | liable to You or any party on any legal theory for any damages
344 | whatsoever, including without limitation any general, special,
345 | incidental or consequential damages arising in connection to this
346 | license. Notwithstanding the foregoing two (2) sentences, if Creative
347 | Commons has expressly identified itself as the Licensor hereunder, it
348 | shall have all rights and obligations of Licensor.
349 |
350 | Except for the limited purpose of indicating to the public that the
351 | Work is licensed under the CCPL, Creative Commons does not authorize
352 | the use by either party of the trademark "Creative Commons" or any
353 | related trademark or logo of Creative Commons without the prior
354 | written consent of Creative Commons. Any permitted use will be in
355 | compliance with Creative Commons' then-current trademark usage
356 | guidelines, as may be published on its website or otherwise made
357 | available upon request from time to time. For the avoidance of doubt,
358 | this trademark restriction does not form part of this License.
359 |
360 | Creative Commons may be contacted at https://creativecommons.org/.
361 |
--------------------------------------------------------------------------------