├── AC-CMNIST
├── .idea
│ ├── deployment.xml
│ ├── libraries
│ │ └── R_User_Library.xml
│ ├── misc.xml
│ ├── modules.xml
│ ├── prun_inv.iml
│ └── workspace.xml
├── README.md
├── experiments
│ └── cmnist
│ │ └── irm_baseline
│ │ ├── config.py
│ │ └── val_main.py
└── utils
│ ├── architectures
│ └── network.py
│ ├── data
│ └── cmnist_irm.py
│ └── utils.py
├── CS-CMNIST
├── .gitignore
├── .idea
│ ├── DomainBed-master.iml
│ ├── deployment.xml
│ ├── dictionaries
│ │ └── narsilzhang.xml
│ ├── libraries
│ │ └── R_User_Library.xml
│ ├── misc.xml
│ ├── modules.xml
│ └── workspace.xml
├── .ipynb_checkpoints
│ ├── Untitled-checkpoint.ipynb
│ ├── lth-checkpoint.ipynb
│ └── visualization-checkpoint.ipynb
├── README.md
└── domainbed
│ ├── algorithms.py
│ ├── datasets.py
│ ├── hparams_registry.py
│ ├── lib
│ ├── fast_data_loader.py
│ ├── misc.py
│ ├── query.py
│ └── wide_resnet.py
│ ├── networks.py
│ ├── scripts
│ ├── download.py
│ └── sweep_train.py
│ └── utils.py
├── InvarianceUnitTests
├── .idea
│ ├── InvarianceUnitTests-master.iml
│ ├── deployment.xml
│ ├── inspectionProfiles
│ │ └── Project_Default.xml
│ ├── libraries
│ │ └── R_User_Library.xml
│ ├── misc.xml
│ ├── modules.xml
│ └── workspace.xml
├── README.md
└── scripts
│ ├── collect_results.py
│ ├── datasets.py
│ ├── main.py
│ ├── models.py
│ ├── sweep.py
│ └── utils.py
├── README.md
└── TerraIncognita
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── domainbed
├── __init__.py
├── __init__.pyc
├── __pycache__
│ ├── __init__.cpython-36.pyc
│ ├── __init__.cpython-37.pyc
│ ├── __init__.cpython-38.pyc
│ ├── __init__.cpython-39.pyc
│ ├── algorithms.cpython-38.pyc
│ ├── algorithms.cpython-39.pyc
│ ├── command_launchers.cpython-38.pyc
│ ├── command_launchers.cpython-39.pyc
│ ├── datasets.cpython-36.pyc
│ ├── datasets.cpython-38.pyc
│ ├── datasets.cpython-39.pyc
│ ├── hparams_registry.cpython-38.pyc
│ ├── hparams_registry.cpython-39.pyc
│ ├── networks.cpython-38.pyc
│ └── networks.cpython-39.pyc
├── algorithms.py
├── command_launchers.py
├── data
│ └── MNIST
│ │ └── MNIST
│ │ ├── processed
│ │ ├── test.pt
│ │ └── training.pt
│ │ └── raw
│ │ ├── t10k-images-idx3-ubyte
│ │ ├── t10k-images-idx3-ubyte.gz
│ │ ├── t10k-labels-idx1-ubyte
│ │ ├── t10k-labels-idx1-ubyte.gz
│ │ ├── train-images-idx3-ubyte
│ │ ├── train-images-idx3-ubyte.gz
│ │ ├── train-labels-idx1-ubyte
│ │ └── train-labels-idx1-ubyte.gz
├── datasets.py
├── hparams_registry.py
├── job.sh
├── lib
│ ├── __pycache__
│ │ ├── fast_data_loader.cpython-38.pyc
│ │ ├── fast_data_loader.cpython-39.pyc
│ │ ├── misc.cpython-38.pyc
│ │ ├── misc.cpython-39.pyc
│ │ ├── wide_resnet.cpython-38.pyc
│ │ └── wide_resnet.cpython-39.pyc
│ ├── fast_data_loader.py
│ ├── misc.py
│ ├── query.py
│ ├── reporting.py
│ └── wide_resnet.py
├── misc
│ ├── domain_net_duplicates.txt
│ ├── test_sweep_data
│ │ ├── 0657090f9a83ff76efe083a104fde93a
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 06db52bd7fcbb8172f97f11a62015261
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 07ea1841921ad29c18ae52563b274925
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 0c53bbff83d887850721788187907586
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 0ec227d205744455c681614d9f55d841
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 0fe0ed57077c0c9291931a388ba21be2
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 1b0678ef843d122c17404ab8bd138523
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 1b424e4ac8bc11c9d3f36b1729e19547
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 24c1684361b7442877526ab118da7117
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 24cf797be205aaef612b14beefc4c1a3
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 2b696be39395e8830222b505f6aa45d8
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 2dd075c39b257eb019b4a8d813525113
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 3539ff8139b8f1797865a2f26e51c70f
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 371b3e2afe1e7a754e49b2324bf159b6
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 41b0ac2ee570d8ace449c34ada3fdd01
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 4a18a8be66b762f1ad5f45408bc27c78
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 4ccfd57ae38cfc8fd5fba4293614ab26
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 539c70bc47514b76736c480df7036b8b
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 63837f74bf4ac60044c74aa87114b386
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 66006bc6faa9f96db95a5bcfc3e4340a
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 66779ee52d1111eddfcc6dafa8ae983c
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 691f8b51c9f69b380113a6a2645392bb
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 6d481a40ca86768fad6a5088cb58458e
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 708942ac219532c45db7898ef9cfb955
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 728347e87d1c533379956bf94dca6fef
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 7a6119601f2d7f4ce36e0d5d478332dd
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 85964cf17f520330ea56101aed9602e5
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 86394db2b6c2ecd1e3b08e99e14759f2
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 8cfbf830754065d02f9723c57abc992e
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 90961e3a45300a2d4771fc090627166e
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── 9f1d308cb3d13c7358eefd027ba1de04
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── bf09cd8e443d5445cc15b7503c14264d
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── bfce2823ee1c49ab624fde5c5e2c1143
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── c62625063d3aee2f08e5c908e7677e83
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── ca571be94ad9fdb0c2bece0061ff3f89
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── cf42c3176baf91b96bb7dd0ff3c686cc
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── d093618124c5748762707da1c6804d75
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── ea7d2d5149dd9167b364d433bb355be1
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── ee8f05db2b9ae5a36273cc0d2161f8c0
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ ├── f61766414e6b0db40063d7bc4ecdaa2b
│ │ │ ├── done
│ │ │ ├── err.txt
│ │ │ ├── out.txt
│ │ │ └── results.jsonl
│ │ └── results.txt
│ ├── test_sweep_results.txt
│ └── vlcs_files.txt
├── model_selection.py
├── networks.py
├── results
│ └── 2020_10_06_7df6f06
│ │ ├── results.png
│ │ └── results.tex
├── scripts
│ ├── __init__.py
│ ├── __init__.pyc
│ ├── __pycache__
│ │ ├── __init__.cpython-36.pyc
│ │ ├── __init__.cpython-37.pyc
│ │ ├── __init__.cpython-38.pyc
│ │ ├── __init__.cpython-39.pyc
│ │ ├── download.cpython-37.pyc
│ │ ├── download.cpython-38.pyc
│ │ ├── sweep.cpython-36.pyc
│ │ ├── sweep.cpython-38.pyc
│ │ ├── sweep.cpython-39.pyc
│ │ ├── train.cpython-38.pyc
│ │ └── train.cpython-39.pyc
│ ├── collect_results.py
│ ├── download.py
│ ├── list_top_hparams.py
│ ├── save_images.py
│ ├── sweep.py
│ └── train.py
└── test
│ ├── __init__.py
│ ├── helpers.py
│ ├── lib
│ ├── __init__.py
│ ├── test_misc.py
│ └── test_query.py
│ ├── scripts
│ ├── __init__.py
│ ├── test_collect_results.py
│ ├── test_sweep.py
│ └── test_train.py
│ ├── test_datasets.py
│ ├── test_hparams_registry.py
│ ├── test_model_selection.py
│ ├── test_models.py
│ └── test_networks.py
└── train_output
├── done
├── err.txt
├── model.pkl
├── out.txt
└── results.jsonl
/AC-CMNIST/.idea/deployment.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/AC-CMNIST/.idea/libraries/R_User_Library.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/AC-CMNIST/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/AC-CMNIST/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/AC-CMNIST/.idea/prun_inv.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/AC-CMNIST/README.md:
--------------------------------------------------------------------------------
1 | # Comparisons on AC-CMNIST
2 |
3 | ## Run
4 | Two examples:
5 |
6 | ```bash
7 | cd ./experiments/cmnist/irm_baseline/
8 | python val_main.py --n_restarts 30 --val 0.05 --test_valid 0
9 | python val_main.py --n_restarts 30 --val 0.20 --test_valid 1
10 | ```
11 | where `--val` controls the ratio of data used for validation,
12 | `--test_valid` decides whether to split out part of test data for validation.
13 |
14 | The checkpoints are not stored,
15 | and the final results will be printed after the whole run.
16 |
17 | Our code is based on the [this repo](https://github.com/facebookresearch/InvariantRiskMinimization).
--------------------------------------------------------------------------------
/AC-CMNIST/experiments/cmnist/irm_baseline/config.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import os
3 | import sys
4 |
5 |
6 | def add_path(path):
7 | if path not in sys.path:
8 | print('Adding {}'.format(path))
9 | sys.path.append(path)
10 |
11 |
12 | parser = argparse.ArgumentParser(description='Colored MNIST')
13 | parser.add_argument('--hidden_dim', type=int, default=256)
14 | parser.add_argument('--l2_regularizer_weight', type=float, default=0.001)
15 | parser.add_argument('--lr', type=float, default=0.001)
16 | parser.add_argument('--n_restarts', '--nr', type=int, default=1)
17 | parser.add_argument('--penalty_anneal_iters', '--p_step', type=int, default=100)
18 | parser.add_argument('--penalty_weight', '--p', type=float, default=10000.0)
19 | parser.add_argument('--steps', type=int, default=501)
20 | parser.add_argument('--grayscale_model', action='store_true')
21 | parser.add_argument('--d', type=int, default=0)
22 |
23 | parser.add_argument('--method', type=str, choices=['irm', 'erm', 'vrex'], default='irm')
24 | parser.add_argument('--prob_flip', default=0.25, type=float)
25 | parser.add_argument('--nn', choices=['cnn', 'mlp'], default='mlp')
26 | parser.add_argument('--val', default=0, type=float, help="the ratio of valid dataset in test domain")
27 |
28 | parser.add_argument('--ib_lambda', default=0, type=float)
29 | parser.add_argument('--ib_step', default=0, type=int)
30 | parser.add_argument('--class_condition', '--cc', action='store_true')
31 | parser.add_argument('--inter', default=0, type=int, help='use which layer to compute variance')
32 | parser.add_argument("--test_valid", type=int, default=0, choices=[0, 1])
33 | args = parser.parse_args()
--------------------------------------------------------------------------------
/AC-CMNIST/utils/architectures/network.py:
--------------------------------------------------------------------------------
1 | import torch
2 | import torch.nn as nn
3 | import torch.nn.functional as F
4 | from torch.nn import init
5 | import torchvision.models
6 |
7 | import sys
8 | import pdb
9 |
10 |
11 | class MLP(nn.Module):
12 | '''
13 | oringal model from IRM paper
14 | '''
15 |
16 | def __init__(self, flags):
17 | super(MLP, self).__init__()
18 | self.flags = flags
19 | if flags.grayscale_model:
20 | self.lin1 = nn.Linear(14 * 14, flags.hidden_dim)
21 | else:
22 | self.lin1 = nn.Linear(2 * 14 * 14, flags.hidden_dim)
23 | self.lin2 = nn.Linear(flags.hidden_dim, flags.hidden_dim)
24 | self.lin3 = nn.Linear(flags.hidden_dim, 1)
25 | for lin in [self.lin1, self.lin2, self.lin3]:
26 | nn.init.xavier_uniform_(lin.weight)
27 | nn.init.zeros_(lin.bias)
28 | self._main = nn.Sequential(self.lin1, nn.ReLU(True), self.lin2, nn.ReLU(True), self.lin3)
29 |
30 | def forward(self, input, inter=0):
31 | if self.flags.grayscale_model:
32 | out = input.view(input.shape[0], 2, 14 * 14).sum(dim=1)
33 | else:
34 | out = input.view(input.shape[0], 2 * 14 * 14)
35 |
36 | out = self.lin1(out)
37 | if inter == 1:
38 | inter_out = out
39 | out = F.relu(out, True)
40 | if inter == 2:
41 | inter_out = out
42 | out = self.lin2(out)
43 | if inter == 3:
44 | inter_out = out
45 | out = F.relu(out, True)
46 | if inter == 4:
47 | inter_out = out
48 | out = self.lin3(out)
49 | if inter == 5:
50 | inter_out = out
51 |
52 | # out = self._main(out)
53 | if inter != 0:
54 | return out, inter_out
55 | else:
56 | return out
57 |
58 |
59 | class Flatten(nn.Module):
60 | def forward(self, x):
61 | return x.view(x.shape[0], -1)
62 |
63 |
64 | def net():
65 | return nn.Sequential(nn.Conv2d(1, 32, 5, padding=2), nn.ReLU(), nn.MaxPool2d(2, 2),
66 | nn.Conv2d(32, 64, 5, padding=2), nn.ReLU(), nn.MaxPool2d(2, 2), Flatten(),
67 | nn.Linear(7 * 7 * 64, 1024), nn.ReLU(), nn.Linear(1024, 10))
68 |
69 |
70 | class cmnist_cnn(nn.Module):
71 | '''
72 | '''
73 |
74 | def __init__(self, ):
75 | super(cmnist_cnn, self).__init__()
76 | self._main = nn.Sequential(nn.Conv2d(2, 32, 5, padding=2), nn.ReLU(True),
77 | nn.MaxPool2d(2, 2),
78 | nn.Conv2d(32, 64, 5, padding=2), nn.ReLU(True),
79 | # nn.MaxPool2d(2, 2),
80 | nn.Conv2d(64, 32, 3, padding=0), nn.ReLU(True),
81 | Flatten(),
82 | nn.Linear(32 * 5 * 5, 256), nn.ReLU(True),
83 | nn.Linear(256, 1),
84 | # nn.Linear(7 * 7 * 64, 256), nn.ReLU(True), nn.Linear(256, 1),
85 | )
86 |
87 | def forward(self, input):
88 | # out = input.view(input.shape[0], 2, 14, 14)
89 | out = self._main(input)
90 | return out
91 |
--------------------------------------------------------------------------------
/AC-CMNIST/utils/data/cmnist_irm.py:
--------------------------------------------------------------------------------
1 | import torch
2 | from torchvision import datasets
3 |
4 |
5 | def get_mnist():
6 | # Load MNIST, make train/val splits, and shuffle train set examples
7 | mnist = datasets.MNIST('/home/zhangdh/data/mnist', train=True, download=True)
8 | '''
9 | mnist has no attribute 'data'
10 | mnist.train_data (60000, 28, 28)
11 | mnist.train_labels (60000,)
12 | '''
13 | # mnist_train = (mnist.data[:50000], mnist.targets[:50000])
14 | # mnist_val = (mnist.data[50000:], mnist.targets[50000:])
15 | mnist_train = (mnist.data[:50000], mnist.targets[:50000])
16 | mnist_val = (mnist.data[50000:], mnist.targets[50000:])
17 | return mnist_train, mnist_val
18 |
19 |
20 | def make_environment(images, labels, e, device, flip_label=0.25):
21 | def torch_bernoulli(p, size):
22 | return (torch.rand(size) < p).float()
23 |
24 | def torch_xor(a, b):
25 | return (a - b).abs() # Assumes both inputs are either 0 or 1
26 |
27 | # 2x subsample for computational convenience
28 | images = images.reshape((-1, 28, 28))[:, ::2, ::2]
29 |
30 | # Assign a binary label based on the digit; flip label with probability 0.25
31 | labels = (labels < 5).float()
32 | # labels = torch_xor(labels, torch_bernoulli(0.25, len(labels)))
33 | labels = torch_xor(labels, torch_bernoulli(flip_label, len(labels)))
34 |
35 | # Assign a color based on the label; flip the color with probability e
36 | colors = torch_xor(labels, torch_bernoulli(e, len(labels)))
37 |
38 | # Apply the color to the image by zeroing out the other color channel
39 | images = torch.stack([images, images], dim=1)
40 | images[torch.tensor(range(len(images))), (1 - colors).long(), :, :] *= 0
41 |
42 | # images: (25000, 2, 14, 14) dtype=torch.unit8
43 | # labels: (25000, )
44 | return {
45 | 'images': (images.float() / 255.).to(device),
46 | 'labels': labels[:, None].to(device) # --> (25000, 1)
47 | }
48 |
49 |
--------------------------------------------------------------------------------
/AC-CMNIST/utils/utils.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import math
3 | import os
4 | from typing import Tuple, List, Dict
5 | import torch
6 | import sys
7 |
8 | import time
9 |
10 | import torch.nn as nn
11 | import torch.nn.init as init
12 |
13 |
14 | def add_path(path):
15 | if path not in sys.path:
16 | print('Adding {}'.format(path))
17 | sys.path.append(path)
18 |
19 |
20 | def torch_accuracy(output, target, topk=(1,)) -> List[torch.Tensor]:
21 | '''
22 | param output, target: should be torch Variable
23 | '''
24 | # assert isinstance(output, torch.cuda.Tensor), 'expecting Torch Tensor'
25 | # assert isinstance(target, torch.Tensor), 'expecting Torch Tensor'
26 | # print(type(output))
27 |
28 | topn = max(topk)
29 | batch_size = output.size(0)
30 |
31 | _, pred = output.topk(topn, 1, True, True)
32 | pred = pred.t()
33 |
34 | is_correct = pred.eq(target.view(1, -1).expand_as(pred))
35 |
36 | ans = []
37 | for i in topk:
38 | is_correct_i = is_correct[:i].view(-1).float().sum(0, keepdim=True)
39 | ans.append(is_correct_i.mul_(100.0 / batch_size))
40 |
41 | return ans
42 |
43 |
44 | def mkdir(path):
45 | if not os.path.exists(path):
46 | print('creating dir {}'.format(path))
47 | os.mkdir(path)
48 | else:
49 | print('{} already exists.'.format(path))
50 |
51 |
52 | def pretty_print(*values, col_width=13):
53 | # col_width = 13
54 |
55 | def format_val(v):
56 | if not isinstance(v, str):
57 | v = np.array2string(v, precision=5, floatmode='fixed')
58 | return v.ljust(col_width)
59 |
60 | str_values = [format_val(v) for v in values]
61 | print(" ".join(str_values))
62 |
63 |
64 | class AvgMeter(object):
65 | '''
66 | Computing mean
67 | '''
68 | name = 'No name'
69 |
70 | def __init__(self, name='No name'):
71 | self.name = name
72 | self.reset()
73 |
74 | def reset(self):
75 | self.sum = 0
76 | self.mean = 0
77 | self.num = 0
78 | self.now = 0
79 |
80 | def update(self, mean_var, count=1):
81 | if math.isnan(mean_var):
82 | mean_var = 1e6
83 | print('Avgmeter getting Nan!')
84 | self.now = mean_var
85 | self.num += count
86 |
87 | self.sum += mean_var * count
88 | self.mean = float(self.sum) / self.num
89 |
90 |
91 | def make_symlink(source, link_name):
92 | '''
93 | Note: overwriting enabled!
94 | '''
95 | if os.path.exists(link_name):
96 | print("Link name already exist! Removing '{}' and overwriting".format(link_name))
97 | os.remove(link_name)
98 | if os.path.exists(source):
99 | os.symlink(source, link_name)
100 | return
101 | else:
102 | print('Source path not exists')
103 |
104 |
105 | def to_onehot(inp, num_dim=10):
106 | # inp: (bs,) int
107 | # ret: (bs, num_dim) float
108 | # assert inp.dtype == torch.long
109 |
110 | batch_size = inp.shape[0]
111 | y_onehot = torch.FloatTensor(batch_size, num_dim).to(inp.device)
112 | y_onehot.zero_()
113 | y_onehot.scatter_(1, inp.reshape(batch_size, 1), 1)
114 |
115 | return y_onehot
--------------------------------------------------------------------------------
/CS-CMNIST/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | domainbed/facebook.py
3 |
--------------------------------------------------------------------------------
/CS-CMNIST/.idea/DomainBed-master.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/CS-CMNIST/.idea/deployment.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/CS-CMNIST/.idea/dictionaries/narsilzhang.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/CS-CMNIST/.idea/libraries/R_User_Library.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/CS-CMNIST/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/CS-CMNIST/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/CS-CMNIST/README.md:
--------------------------------------------------------------------------------
1 | # Comparisons on CS-CMNIST
2 |
3 | ## Run
4 | Two examples:
5 |
6 | ```bash
7 | python -m domainbed.scripts.sweep_train --holdout_fraction 0.2
8 | python -m domainbed.scripts.sweep_train --holdout_fraction 0.2 --test_val
9 | ```
10 | where `--holdout_fraction` controls the ratio of data used for validation,
11 | `--test_val` decides whether to set validation distribution to be same as test distribution.
12 |
13 | The checkpoints are not stored,
14 | and the final results will be printed after the whole run.
15 |
16 | Our code is based on the [DomainBed](https://github.com/facebookresearch/DomainBed) suite.
--------------------------------------------------------------------------------
/CS-CMNIST/domainbed/hparams_registry.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | import numpy as np
4 |
5 |
6 | def _hparams(algorithm, dataset, random_state):
7 | """
8 | Global registry of hyperparams. Each entry is a (default, random) tuple.
9 | New algorithms / networks / etc. should add entries here.
10 | """
11 | SMALL_IMAGES = ['Debug28', 'RotatedMNIST', 'ColoredMNIST']
12 |
13 | hparams = {}
14 |
15 | hparams['data_augmentation'] = (True, True)
16 | hparams['resnet18'] = (False, False)
17 |
18 | if dataset not in SMALL_IMAGES:
19 | hparams['lr'] = (5e-5, 10 ** random_state.uniform(-5, -3.5))
20 | if dataset == 'DomainNet':
21 | hparams['batch_size'] = (32, int(2 ** random_state.uniform(3, 5)))
22 | else:
23 | hparams['batch_size'] = (32, int(2 ** random_state.uniform(3, 5.5)))
24 | if algorithm == "ARM":
25 | hparams['batch_size'] = (8, 8)
26 | else:
27 | hparams['lr'] = (1e-3, 10 ** random_state.uniform(-4.5, -2.5))
28 | hparams['batch_size'] = (64, int(2 ** random_state.uniform(3, 9)))
29 |
30 | if dataset in SMALL_IMAGES:
31 | hparams['weight_decay'] = (0., 0.)
32 | else:
33 | hparams['weight_decay'] = (0., 10 ** random_state.uniform(-6, -2))
34 |
35 | hparams['class_balanced'] = (False, False)
36 |
37 | if algorithm in ['DANN', 'CDANN']:
38 |
39 | if dataset not in SMALL_IMAGES:
40 | hparams['lr_g'] = (5e-5, 10 ** random_state.uniform(-5, -3.5))
41 | hparams['lr_d'] = (5e-5, 10 ** random_state.uniform(-5, -3.5))
42 | else:
43 | hparams['lr_g'] = (1e-3, 10 ** random_state.uniform(-4.5, -2.5))
44 | hparams['lr_d'] = (1e-3, 10 ** random_state.uniform(-4.5, -2.5))
45 |
46 | if dataset in SMALL_IMAGES:
47 | hparams['weight_decay_g'] = (0., 0.)
48 | else:
49 | hparams['weight_decay_g'] = (0., 10 ** random_state.uniform(-6, -2))
50 |
51 | hparams['lambda'] = (1.0, 10 ** random_state.uniform(-2, 2))
52 | hparams['weight_decay_d'] = (0., 10 ** random_state.uniform(-6, -2))
53 | hparams['d_steps_per_g_step'] = (1, int(2 ** random_state.uniform(0, 3)))
54 | hparams['grad_penalty'] = (0., 10 ** random_state.uniform(-2, 1))
55 | hparams['beta1'] = (0.5, random_state.choice([0., 0.5]))
56 |
57 | if algorithm == "SagNet":
58 | hparams['resnet_dropout'] = (.5, random_state.choice([0., 0.1, 0.5]))
59 | else:
60 | hparams['resnet_dropout'] = (0., random_state.choice([0., 0.1, 0.5]))
61 |
62 | # TODO clean this up
63 | hparams.update({a: (b, c) for a, b, c in [
64 | # IRM
65 | ('irm_lambda', 1e2, 10 ** random_state.uniform(-1, 5)),
66 | ('irm_penalty_anneal_iters', 500, int(10 ** random_state.uniform(0, 4))),
67 | # VREx
68 | ('vrex_lambda', 1e1, 10 ** random_state.uniform(-1, 5)),
69 | ('vrex_penalty_anneal_iters', 500, int(10 ** random_state.uniform(0, 4))),
70 |
71 | ('ib_lambda', 1, 10 ** random_state.uniform(-1, 5)),
72 | ('ib_penalty_anneal_iters', 500, int(10 ** random_state.uniform(0, 4))),
73 |
74 | # Mixup
75 | ('mixup_alpha', 0.2, 10 ** random_state.uniform(-1, -1)),
76 | # GroupDRO
77 | ('groupdro_eta', 1e-2, 10 ** random_state.uniform(-3, -1)),
78 | # MMD
79 | ('mmd_gamma', 1., 10 ** random_state.uniform(-1, 1)),
80 | # MLP
81 | ('mlp_width', 256, int(2 ** random_state.uniform(6, 10))),
82 | ('mlp_depth', 3, int(random_state.choice([3, 4, 5]))),
83 | ('mlp_dropout', 0., random_state.choice([0., 0.1, 0.5])),
84 | # MLDG
85 | ('mldg_beta', 1., 10 ** random_state.uniform(-1, 1)),
86 | ('mtl_ema', .99, random_state.choice([0.5, 0.9, 0.99, 1.])),
87 | # SagNets
88 | ('sag_w_adv', 0.1, 10 ** random_state.uniform(-2, 1)),
89 | ]})
90 | return hparams
91 |
92 |
93 | def default_hparams(algorithm, dataset):
94 | dummy_random_state = np.random.RandomState(0)
95 | return {a: b for a, (b, c) in
96 | _hparams(algorithm, dataset, dummy_random_state).items()}
97 |
98 |
99 | def random_hparams(algorithm, dataset, seed):
100 | random_state = np.random.RandomState(seed)
101 | return {a: c for a, (b, c) in _hparams(algorithm, dataset, random_state).items()}
102 |
--------------------------------------------------------------------------------
/CS-CMNIST/domainbed/lib/fast_data_loader.py:
--------------------------------------------------------------------------------
1 | import torch
2 |
3 | class _InfiniteSampler(torch.utils.data.Sampler):
4 | """Wraps another Sampler to yield an infinite stream."""
5 | def __init__(self, sampler):
6 | self.sampler = sampler
7 |
8 | def __iter__(self):
9 | while True:
10 | for batch in self.sampler:
11 | yield batch
12 |
13 | class InfiniteDataLoader:
14 | def __init__(self, dataset, weights, batch_size, num_workers):
15 | super().__init__()
16 | if weights == None:
17 | weights = torch.ones(len(dataset))
18 |
19 | batch_sampler = torch.utils.data.BatchSampler(
20 | torch.utils.data.WeightedRandomSampler(weights,
21 | replacement=True,
22 | num_samples=batch_size),
23 | batch_size=batch_size,
24 | drop_last=True)
25 |
26 | self._infinite_iterator = iter(torch.utils.data.DataLoader(
27 | dataset,
28 | num_workers=num_workers,
29 | batch_sampler=_InfiniteSampler(batch_sampler)
30 | ))
31 |
32 | def __iter__(self):
33 | while True:
34 | yield next(self._infinite_iterator)
35 |
36 | def __len__(self):
37 | raise ValueError
38 |
39 | class FastDataLoader:
40 | """DataLoader wrapper with slightly improved speed by not respawning worker
41 | processes at every epoch."""
42 | def __init__(self, dataset, batch_size, num_workers):
43 | super().__init__()
44 |
45 | batch_sampler = torch.utils.data.BatchSampler(
46 | torch.utils.data.SequentialSampler(dataset),
47 | batch_size=batch_size,
48 | drop_last=False
49 | )
50 |
51 | self._infinite_iterator = iter(torch.utils.data.DataLoader(
52 | dataset,
53 | num_workers=num_workers,
54 | batch_sampler=_InfiniteSampler(batch_sampler)
55 | ))
56 |
57 | self._length = len(batch_sampler)
58 |
59 | def __iter__(self):
60 | for _ in range(len(self)):
61 | yield next(self._infinite_iterator)
62 |
63 | def __len__(self):
64 | return self._length
--------------------------------------------------------------------------------
/CS-CMNIST/domainbed/lib/misc.py:
--------------------------------------------------------------------------------
1 | """
2 | Things that don't belong anywhere else
3 | """
4 |
5 | import hashlib
6 | import json
7 | import os
8 | import sys
9 | from shutil import copyfile
10 | import pdb
11 | import numpy as np
12 | import torch
13 | import tqdm
14 | from collections import Counter
15 |
16 | def make_weights_for_balanced_classes(dataset):
17 | counts = Counter()
18 | classes = []
19 | for _, y in dataset:
20 | y = int(y)
21 | counts[y] += 1
22 | classes.append(y)
23 |
24 | n_classes = len(counts)
25 |
26 | weight_per_class = {}
27 | for y in counts:
28 | weight_per_class[y] = 1 / (counts[y] * n_classes)
29 |
30 | weights = torch.zeros(len(dataset))
31 | for i, y in enumerate(classes):
32 | weights[i] = weight_per_class[int(y)]
33 |
34 | return weights
35 |
36 | def pdb():
37 | sys.stdout = sys.__stdout__
38 | import pdb
39 | print("Launching PDB, enter 'n' to step to parent function.")
40 | pdb.set_trace()
41 |
42 | def seed_hash(*args):
43 | """
44 | Derive an integer hash from all args, for use as a random seed.
45 | """
46 | args_str = str(args)
47 | return int(hashlib.md5(args_str.encode("utf-8")).hexdigest(), 16) % (2**31)
48 |
49 | def print_separator():
50 | print("="*80)
51 |
52 | def print_row(row, colwidth=6, latex=False):
53 | if latex:
54 | sep = " & "
55 | end_ = "\\\\"
56 | else:
57 | sep = " "
58 | end_ = ""
59 |
60 | def format_val(x):
61 | if np.issubdtype(type(x), np.floating):
62 | x = "{:.6f}".format(x)
63 | return str(x).ljust(colwidth)[:colwidth]
64 | print(sep.join([format_val(x) for x in row]), end_)
65 |
66 | class _SplitDataset(torch.utils.data.Dataset):
67 | """Used by split_dataset"""
68 | def __init__(self, underlying_dataset, keys):
69 | super(_SplitDataset, self).__init__()
70 | self.underlying_dataset = underlying_dataset
71 | self.keys = keys
72 | def __getitem__(self, key):
73 | return self.underlying_dataset[self.keys[key]]
74 | def __len__(self):
75 | return len(self.keys)
76 |
77 | def split_dataset(dataset, n, seed=0):
78 | """
79 | Return a pair of datasets corresponding to a random split of the given
80 | dataset, with n datapoints in the first dataset and the rest in the last,
81 | using the given random seed
82 | """
83 | assert(n <= len(dataset))
84 | keys = list(range(len(dataset)))
85 | np.random.RandomState(seed).shuffle(keys)
86 | keys_1 = keys[:n]
87 | keys_2 = keys[n:]
88 | return _SplitDataset(dataset, keys_1), _SplitDataset(dataset, keys_2)
89 |
90 | def random_pairs_of_minibatches(minibatches):
91 | perm = torch.randperm(len(minibatches)).tolist()
92 | pairs = []
93 |
94 | for i in range(len(minibatches)):
95 | j = i + 1 if i < (len(minibatches) - 1) else 0
96 |
97 | xi, yi = minibatches[perm[i]][0], minibatches[perm[i]][1]
98 | xj, yj = minibatches[perm[j]][0], minibatches[perm[j]][1]
99 |
100 | min_n = min(len(xi), len(xj))
101 |
102 | pairs.append(((xi[:min_n], yi[:min_n]), (xj[:min_n], yj[:min_n])))
103 |
104 | return pairs
105 |
106 |
107 |
108 | def accuracy(network, loader, weights, device, with_color_label=False, set_color_y=False):
109 | correct = 0
110 | total = 0
111 | weights_offset = 0
112 |
113 | network.eval()
114 | with torch.no_grad():
115 | for item in loader:
116 | if with_color_label:
117 | # item: (x, digit, color)
118 | if set_color_y:
119 | x, _, y = item
120 | else:
121 | x, y, _ = item
122 | else:
123 | x, y = item
124 | x = x.to(device)
125 | y = y.to(device)
126 | p = network.predict(x)
127 | if weights is None:
128 | batch_weights = torch.ones(len(x))
129 | else:
130 | batch_weights = weights[weights_offset: weights_offset + len(x)]
131 | weights_offset += len(x)
132 | batch_weights = batch_weights.to(device)
133 | if p.size(1) == 1:
134 | correct += (p.gt(0).eq(y).float() * batch_weights).sum().item()
135 | else:
136 | correct += (p.argmax(1).eq(y).float() * batch_weights).sum().item()
137 | total += batch_weights.sum().item()
138 | network.train()
139 |
140 | return correct / total
141 |
142 | class Tee:
143 | def __init__(self, fname, mode="a"):
144 | self.stdout = sys.stdout
145 | self.file = open(fname, mode)
146 |
147 | def write(self, message):
148 | self.stdout.write(message)
149 | self.file.write(message)
150 | self.flush()
151 |
152 | def flush(self):
153 | self.stdout.flush()
154 | self.file.flush()
155 |
--------------------------------------------------------------------------------
/CS-CMNIST/domainbed/lib/wide_resnet.py:
--------------------------------------------------------------------------------
1 | """
2 | From https://github.com/meliketoy/wide-resnet.pytorch
3 | """
4 |
5 | import sys
6 | import pdb
7 |
8 | import numpy as np
9 | import torch
10 | import torch.nn as nn
11 | import torch.nn.functional as F
12 | import torch.nn.init as init
13 | from torch.autograd import Variable
14 |
15 |
16 | def conv3x3(in_planes, out_planes, stride=1):
17 | return nn.Conv2d(
18 | in_planes,
19 | out_planes,
20 | kernel_size=3,
21 | stride=stride,
22 | padding=1,
23 | bias=True)
24 |
25 |
26 | def conv_init(m):
27 | classname = m.__class__.__name__
28 | if classname.find('Conv') != -1:
29 | init.xavier_uniform_(m.weight, gain=np.sqrt(2))
30 | init.constant_(m.bias, 0)
31 | elif classname.find('BatchNorm') != -1:
32 | init.constant_(m.weight, 1)
33 | init.constant_(m.bias, 0)
34 |
35 |
36 | class wide_basic(nn.Module):
37 | def __init__(self, in_planes, planes, dropout_rate, stride=1):
38 | super(wide_basic, self).__init__()
39 | self.bn1 = nn.BatchNorm2d(in_planes)
40 | self.conv1 = nn.Conv2d(
41 | in_planes, planes, kernel_size=3, padding=1, bias=True)
42 | self.dropout = nn.Dropout(p=dropout_rate)
43 | self.bn2 = nn.BatchNorm2d(planes)
44 | self.conv2 = nn.Conv2d(
45 | planes, planes, kernel_size=3, stride=stride, padding=1, bias=True)
46 |
47 | self.shortcut = nn.Sequential()
48 | if stride != 1 or in_planes != planes:
49 | self.shortcut = nn.Sequential(
50 | nn.Conv2d(
51 | in_planes, planes, kernel_size=1, stride=stride,
52 | bias=True), )
53 |
54 | def forward(self, x):
55 | out = self.dropout(self.conv1(F.relu(self.bn1(x))))
56 | out = self.conv2(F.relu(self.bn2(out)))
57 | out += self.shortcut(x)
58 |
59 | return out
60 |
61 |
62 | class Wide_ResNet(nn.Module):
63 | """Wide Resnet with the softmax layer chopped off"""
64 | def __init__(self, input_shape, depth, widen_factor, dropout_rate):
65 | super(Wide_ResNet, self).__init__()
66 | self.in_planes = 16
67 |
68 | assert ((depth - 4) % 6 == 0), 'Wide-resnet depth should be 6n+4'
69 | n = (depth - 4) / 6
70 | k = widen_factor
71 |
72 | # print('| Wide-Resnet %dx%d' % (depth, k))
73 | nStages = [16, 16 * k, 32 * k, 64 * k]
74 |
75 | self.img_width = input_shape[-1]
76 | self.conv1 = conv3x3(input_shape[0], nStages[0])
77 | self.layer1 = self._wide_layer(
78 | wide_basic, nStages[1], n, dropout_rate, stride=1)
79 | self.layer2 = self._wide_layer(
80 | wide_basic, nStages[2], n, dropout_rate, stride=2)
81 | self.layer3 = self._wide_layer(
82 | wide_basic, nStages[3], n, dropout_rate, stride=2)
83 | self.bn1 = nn.BatchNorm2d(nStages[3], momentum=0.9)
84 |
85 | self.n_outputs = nStages[3]
86 |
87 | def _wide_layer(self, block, planes, num_blocks, dropout_rate, stride):
88 | strides = [stride] + [1] * (int(num_blocks) - 1)
89 | layers = []
90 |
91 | for stride in strides:
92 | layers.append(block(self.in_planes, planes, dropout_rate, stride))
93 | self.in_planes = planes
94 |
95 | return nn.Sequential(*layers)
96 |
97 | def forward(self, x):
98 | out = self.conv1(x)
99 | out = self.layer1(out)
100 | out = self.layer2(out)
101 | out = self.layer3(out)
102 | out = F.relu(self.bn1(out))
103 | # pdb.set_trace()
104 |
105 | if self.img_width == 32:
106 | out = F.avg_pool2d(out, 8)
107 | return out[:, :, 0, 0]
108 | elif self.img_width == 64:
109 | # todo xyl's modification
110 | out = F.avg_pool2d(out, 16)
111 | return out.view(-1, self.n_outputs)
112 | else:
113 | raise NotImplementedError
114 |
--------------------------------------------------------------------------------
/InvarianceUnitTests/.idea/InvarianceUnitTests-master.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/InvarianceUnitTests/.idea/deployment.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/InvarianceUnitTests/.idea/inspectionProfiles/Project_Default.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/InvarianceUnitTests/.idea/libraries/R_User_Library.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/InvarianceUnitTests/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/InvarianceUnitTests/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/InvarianceUnitTests/README.md:
--------------------------------------------------------------------------------
1 | # Comparisons on linear unit-tests
2 |
3 | ## Run
4 | For experiments with three environments
5 | ```bash
6 | python3 scripts/sweep.py \
7 | --models ERM IRMv1 IB_ERM IB_IRM \
8 | --num_iterations 10000 \
9 | --datasets Example1 Example1s Example2 Example2s Example3 Example3s \
10 | --dim_inv 5 --dim_spu 5 \
11 | --n_envs 3 \
12 | --num_data_seeds 50 --num_model_seeds 20 \
13 | --output_dir test_results/3envs
14 | ```
15 |
16 | For experiments with six environments
17 | ```bash
18 | python3 scripts/sweep.py \
19 | --models ERM IRMv1 IB_ERM IB_IRM \
20 | --num_iterations 10000 \
21 | --datasets Example1 Example1s Example2 Example2s Example3 Example3s \
22 | --dim_inv 5 --dim_spu 5 \
23 | --n_envs 6 \
24 | --num_data_seeds 50 --num_model_seeds 20 \
25 | --output_dir test_results/6envs
26 | ```
27 |
28 | ## Analyze
29 | `test_peak` means the number of test sample used for validation.
30 | It can take values in `0, 20, 100, 500`.
31 | `0` means using data from train distribution for validation.
32 |
33 | ```bash
34 | python scripts/collect_results.py test_results/3envs --test_peak 0
35 | python scripts/collect_results.py test_results/6envs --test_peak 0
36 | ```
37 |
38 | Our code is based on the [InvarianceUnitTests](https://github.com/facebookresearch/InvarianceUnitTests) suite.
--------------------------------------------------------------------------------
/InvarianceUnitTests/scripts/main.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | import argparse
4 | import hashlib
5 | import pprint
6 | import json
7 | # import git
8 | import os
9 | import datasets
10 | import models
11 | import pdb
12 | import utils
13 |
14 |
15 | def run_experiment(args, device):
16 | # build directory name
17 | # commit = git.Repo(search_parent_directories=True).head.object.hexsha[:10]
18 | # results_dirname = os.path.join(args["output_dir"], commit + "/")
19 | results_dirname = args["output_dir"]
20 | os.makedirs(results_dirname, exist_ok=True)
21 |
22 | # build file name
23 | md5_fname = hashlib.md5(str(args).encode('utf-8')).hexdigest()
24 | results_fname = os.path.join(results_dirname, md5_fname + ".jsonl")
25 | # results_file = open(results_fname, "w")
26 |
27 | utils.set_seed(args["data_seed"])
28 | dataset = datasets.DATASETS[args["dataset"]](
29 | dim_inv=args["dim_inv"],
30 | dim_spu=args["dim_spu"],
31 | n_envs=args["n_envs"]
32 | )
33 |
34 | # Oracle trained on test mode (scrambled)
35 | train_split = "train" if args["model"] != "Oracle" else "test"
36 |
37 | # sample the envs
38 | envs = {}
39 | for key_split, split in zip(("train", "validation", "test", "test_peak20", "test_peak100", "test_peak500"),
40 | (train_split, train_split, "test", "test", "test", "test")):
41 | envs[key_split] = {"keys": [], "envs": []}
42 |
43 | if key_split == "test_peak20":
44 | num_samples = 20
45 | elif key_split == "test_peak100":
46 | num_samples = 100
47 | elif key_split == "test_peak500":
48 | num_samples = 500
49 | else:
50 | num_samples = args["num_samples"]
51 |
52 | for env in dataset.envs:
53 | envs[key_split]["envs"].append(dataset.sample(
54 | n=num_samples,
55 | env=env, split=split)
56 | )
57 | envs[key_split]["keys"].append(env)
58 |
59 | # offsetting model seed to avoid overlap with data_seed
60 | utils.set_seed(args["model_seed"] + 1000)
61 |
62 | # selecting model
63 | args["num_dim"] = args["dim_inv"] + args["dim_spu"]
64 | model = models.MODELS[args["model"]](
65 | in_features=args["num_dim"],
66 | out_features=1,
67 | task=dataset.task,
68 | hparams=args["hparams"],
69 | device=device
70 | )
71 |
72 | # update this field for printing purposes
73 | args["hparams"] = model.hparams
74 |
75 | # fit the dataset
76 | model.fit(
77 | envs=envs,
78 | num_iterations=args["num_iterations"],
79 | callback=args["callback"])
80 |
81 | # compute the train, validation and test errors
82 | for split in ("train", "validation", "test", "test_peak20", "test_peak100", "test_peak500"):
83 | key = "error_" + split
84 | for k_env, env in zip(envs[split]["keys"], envs[split]["envs"]):
85 | args[key + "_" +
86 | k_env] = utils.compute_error(model, *env)
87 |
88 | # write results
89 | results_file = open(results_fname, "w")
90 | results_file.write(json.dumps(args))
91 | results_file.close()
92 | return args
93 |
94 |
95 | if __name__ == "__main__":
96 | parser = argparse.ArgumentParser(description='Synthetic invariances')
97 | parser.add_argument('--model', type=str, default="ERM")
98 | parser.add_argument('--num_iterations', type=int, default=10000)
99 | parser.add_argument('--hparams', type=str, default="default")
100 | parser.add_argument('--dataset', type=str, default="Example1")
101 | parser.add_argument('--dim_inv', type=int, default=5)
102 | parser.add_argument('--dim_spu', type=int, default=5)
103 | parser.add_argument('--n_envs', type=int, default=3)
104 | parser.add_argument('--num_samples', type=int, default=10000)
105 | parser.add_argument('--data_seed', type=int, default=0)
106 | parser.add_argument('--model_seed', type=int, default=0)
107 | parser.add_argument('--output_dir', type=str, default="results")
108 | parser.add_argument('--callback', action='store_true')
109 | args = parser.parse_args()
110 |
111 | pprint.pprint(run_experiment(vars(args)))
112 |
--------------------------------------------------------------------------------
/InvarianceUnitTests/scripts/sweep.py:
--------------------------------------------------------------------------------
1 | import main
2 | import random
3 | import models
4 | import datasets
5 | import argparse
6 | import getpass
7 | import torch
8 | from tqdm import tqdm
9 | import pdb
10 | import hashlib
11 | import os
12 |
13 |
14 | """
15 | python scripts/sweep.py --models IB_IRM --datasets Example3 --d 2
16 | python scripts/sweep.py --models IB_IRM --output_dir test_results/3envs --n_envs 3 --datasets Example1 --d 2
17 | """
18 |
19 | if __name__ == "__main__":
20 | parser = argparse.ArgumentParser(description='Synthetic invariances')
21 | parser.add_argument('--models', nargs='+', default=[])
22 | parser.add_argument('--num_iterations', type=int, default=10000)
23 | parser.add_argument('--hparams', type=str, default="default")
24 | parser.add_argument('--datasets', nargs='+', default=[])
25 | parser.add_argument('--dim_inv', type=int, default=5)
26 | parser.add_argument('--dim_spu', type=int, default=5)
27 | parser.add_argument('--n_envs', type=int, default=3)
28 | parser.add_argument('--num_samples', type=int, default=10000)
29 | parser.add_argument('--num_data_seeds', type=int, default=50)
30 | parser.add_argument('--num_model_seeds', type=int, default=20)
31 | parser.add_argument('--output_dir', type=str, default="test_results")
32 | parser.add_argument('--callback', action='store_true')
33 | parser.add_argument('--cluster', action="store_true")
34 | parser.add_argument('--jobs_cluster', type=int, default=512)
35 |
36 | parser.add_argument("--device", "--d", type=int)
37 | parser.add_argument("--clean", action="store_true")
38 | args = vars(parser.parse_args())
39 |
40 | try:
41 | import submitit
42 | except:
43 | args["cluster"] = False
44 | pass
45 |
46 | if torch.cuda.is_available() and args["device"] is not None:
47 | device = torch.device("cuda:{}".format(args["device"]))
48 | else:
49 | device = torch.device("cpu")
50 |
51 | all_jobs = []
52 | if len(args["models"]):
53 | model_lists = args["models"]
54 | else:
55 | model_lists = models.MODELS.keys()
56 | if len(args["datasets"]):
57 | dataset_lists = args["datasets"]
58 | else:
59 | dataset_lists = datasets.DATASETS.keys()
60 |
61 | for model in model_lists:
62 | for dataset in dataset_lists:
63 | for data_seed in range(args["num_data_seeds"]):
64 | for model_seed in range(args["num_model_seeds"]):
65 | train_args = {
66 | "model": model,
67 | "num_iterations": args["num_iterations"],
68 | "hparams": "random" if model_seed else "default",
69 | "dataset": dataset,
70 | "dim_inv": args["dim_inv"],
71 | "dim_spu": args["dim_spu"],
72 | "n_envs": args["n_envs"],
73 | "num_samples": args["num_samples"],
74 | "data_seed": data_seed,
75 | "model_seed": model_seed,
76 | "output_dir": args["output_dir"],
77 | "callback": args["callback"],
78 | }
79 |
80 | all_jobs.append(train_args)
81 |
82 | random.shuffle(all_jobs)
83 |
84 | print("Launching {} jobs...".format(len(all_jobs)))
85 |
86 | if args["cluster"]:
87 | executor = submitit.SlurmExecutor(
88 | folder=f"/checkpoint/{getpass.getuser()}/submitit/")
89 | executor.update_parameters(
90 | time=3*24*60,
91 | gpus_per_node=0,
92 | array_parallelism=args["jobs_cluster"],
93 | cpus_per_task=1,
94 | comment="",
95 | partition="learnfair")
96 |
97 | executor.map_array(main.run_experiment, all_jobs)
98 | else:
99 | count = 0
100 | for job in tqdm(all_jobs):
101 | if args["clean"]:
102 | results_dirname = job["output_dir"]
103 | md5_fname = hashlib.md5(str(job).encode('utf-8')).hexdigest()
104 | results_fname = os.path.join(results_dirname, md5_fname + ".jsonl")
105 | if os.path.exists(results_fname):
106 | os.remove(results_fname)
107 | count += 1
108 |
109 | else:
110 | result = main.run_experiment(job, device)
111 | # print(result)
112 |
113 | if args["clean"]:
114 | print("REMOVE COUNT:", count)
115 |
--------------------------------------------------------------------------------
/InvarianceUnitTests/scripts/utils.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | import numpy as np
4 | import random
5 | import torch
6 | import pdb
7 |
8 |
9 | def set_seed(seed):
10 | torch.manual_seed(seed)
11 | np.random.seed(seed)
12 | random.seed(seed)
13 |
14 |
15 | def compute_error(algorithm, x, y):
16 | if hasattr(algorithm, "device"):
17 | device = algorithm.device
18 | else:
19 | device = torch.device("cpu")
20 | x, y = x.to(device), y.to(device)
21 |
22 | with torch.no_grad():
23 | if len(y.unique()) == 2:
24 | return algorithm.predict(x).gt(0).ne(y).float().mean().item()
25 | else:
26 | return (algorithm.predict(x) - y).pow(2).mean().item()
27 |
28 |
29 | def compute_errors(model, envs):
30 | for split in envs.keys():
31 | if not bool(model.callbacks["errors"][split]):
32 | model.callbacks["errors"][split] = {
33 | key: [] for key in envs[split]["keys"]}
34 |
35 | for k, env in zip(envs[split]["keys"], envs[split]["envs"]):
36 | model.callbacks["errors"][split][k].append(
37 | compute_error(model, *env))
38 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Code for "Invariance Principle Meets Information Bottleneck for Out-of-Distribution Generalization"
2 |
3 | We place the three experiments
4 | (anti causal colored MNIST, covariate shift colored MNIST
5 | and invariance unit tests)
6 | and the respective instructions
7 | in three sub folders (`AC-CMNIST`, `CS-CMNIST` and `InvarianceUnitTests`).
8 |
9 | ## Requirements
10 | - torch 1.6.0
11 | - torchvision 0.7.0
12 | - numpy 1.19.1
13 | - tqdm 4.41.1
--------------------------------------------------------------------------------
/TerraIncognita/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Code of Conduct
2 |
3 | Facebook has adopted a Code of Conduct that we expect project participants to adhere to.
4 | Please read the [full text](https://code.fb.com/codeofconduct/)
5 | so that you can understand what actions will and will not be tolerated.
6 |
--------------------------------------------------------------------------------
/TerraIncognita/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to `DomainBed`
2 | We want to make contributing to this project as easy and transparent as
3 | possible.
4 |
5 | ## Pull Requests
6 | We actively welcome your pull requests.
7 |
8 | 1. Fork the repo and create your branch from `master`.
9 | 2. If you've added code that should be tested, add tests.
10 | 3. If you've changed APIs, update the documentation.
11 | 4. Ensure the test suite passes.
12 | 5. Make sure your code lints.
13 | 6. If you haven't already, complete the Contributor License Agreement ("CLA").
14 |
15 | ## Contributor License Agreement ("CLA")
16 | In order to accept your pull request, we need you to submit a CLA. You only need
17 | to do this once to work on any of Facebook's open source projects.
18 |
19 | Complete your CLA here:
20 |
21 | ## Issues
22 | We use GitHub issues to track public bugs. Please ensure your description is
23 | clear and has sufficient instructions to be able to reproduce the issue.
24 |
25 | Facebook has a [bounty program](https://www.facebook.com/whitehat/) for the safe
26 | disclosure of security bugs. In those cases, please go through the process
27 | outlined on that page and do not file a public issue.
28 |
29 | ## License
30 | By contributing to `DomainBed`, you agree that your contributions
31 | will be licensed under the LICENSE file in the root directory of this source
32 | tree.
33 |
--------------------------------------------------------------------------------
/TerraIncognita/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License
2 |
3 | Copyright (c) Facebook, Inc. and its affiliates.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
6 |
7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
8 |
9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
10 |
--------------------------------------------------------------------------------
/TerraIncognita/README.md:
--------------------------------------------------------------------------------
1 | # Reproduce the TerraIncognita results
2 |
3 | This folder should help you reproduce the table 4 for ERM, IRM, IB-ERM and IB-IRM on TerraIncognita
4 |
5 | You need to download the dataset with
6 | ```sh
7 | python3 -m domainbed.scripts.download \
8 | --data_dir=/my/datasets/path
9 | ```
10 |
11 | To launch the sweep
12 | ```sh
13 | python3 -m domainbed.scripts.sweep launch\
14 | --algorithms ERM IRM IB_ERM_F_C IB_IRM_F_C\
15 | --datasets TerraIncognita\
16 | --data_dir=/my/datasets/path\
17 | --output_dir=/my/sweep/output/path\
18 | --command_launcher local\
19 | --unique_test_env 0\
20 | --skip_confirmation
21 | ```
22 |
23 | To view the results of your sweep:
24 |
25 | ````sh
26 | python -m domainbed.scripts.collect_results\
27 | --input_dir=/my/sweep/output/path
28 | ````
29 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__init__.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/__init__.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/__init__.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/algorithms.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/algorithms.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/algorithms.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/algorithms.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/command_launchers.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/command_launchers.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/command_launchers.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/command_launchers.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/datasets.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/datasets.cpython-36.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/datasets.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/datasets.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/datasets.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/datasets.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/hparams_registry.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/hparams_registry.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/hparams_registry.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/hparams_registry.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/networks.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/networks.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/__pycache__/networks.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/__pycache__/networks.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/command_launchers.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | """
4 | A command launcher launches a list of commands on a cluster; implement your own
5 | launcher to add support for your cluster. We've provided an example launcher
6 | which runs all commands serially on the local machine.
7 | """
8 |
9 | import os
10 | import subprocess
11 | from multiprocessing import Pool
12 | import time
13 | import torch
14 |
15 | def local_launcher(commands):
16 | """Launch commands serially on the local machine."""
17 | for cmd in commands:
18 | subprocess.call(cmd, shell=True)
19 |
20 | def dummy_launcher(commands):
21 | """
22 | Doesn't run anything; instead, prints each command.
23 | Useful for testing.
24 | """
25 | for cmd in commands:
26 | print(f'Dummy launcher: {cmd}')
27 |
28 | def multi_gpu_launcher(commands):
29 | """
30 | Launch commands on the local machine, using all GPUs in parallel.
31 | """
32 | print('WARNING: using experimental multi_gpu_launcher.')
33 | n_gpus = torch.cuda.device_count()
34 | procs_by_gpu = [None]*n_gpus
35 |
36 | while len(commands) > 0:
37 | for gpu_idx in range(n_gpus):
38 | proc = procs_by_gpu[gpu_idx]
39 | if (proc is None) or (proc.poll() is not None):
40 | # Nothing is running on this GPU; launch a command.
41 | cmd = commands.pop(0)
42 | new_proc = subprocess.Popen(
43 | f'CUDA_VISIBLE_DEVICES={gpu_idx} {cmd}', shell=True)
44 | procs_by_gpu[gpu_idx] = new_proc
45 | break
46 | time.sleep(1)
47 |
48 | # Wait for the last few tasks to finish before returning
49 | for p in procs_by_gpu:
50 | if p is not None:
51 | p.wait()
52 |
53 | def slurm_launcher(commands):
54 | """
55 | Parallel job launcher for computationnal cluster using SLURM workload manager.
56 | An example of SBATCH options:
57 | #!/bin/bash
58 | #SBATCH --job-name=
59 | #SBATCH --output=.out
60 | #SBATCH --error=_error.out
61 | #SBATCH --ntasks=4
62 | #SBATCH --cpus-per-task=8
63 | #SBATCH --gres=gpu:4
64 | #SBATCH --time=1-00:00:00
65 | #SBATCH --mem=81Gb
66 | Note: --cpus-per-task should match the N_WORKERS defined in datasets.py (default 8)
67 | Note: there should be equal number of --ntasks and --gres
68 | """
69 |
70 | with Pool(processes=int(os.environ["SLURM_NTASKS"])) as pool:
71 |
72 | processes = []
73 | for command in commands:
74 | process = pool.apply_async(
75 | subprocess.run,
76 | [f'srun --ntasks=1 --cpus-per-task={os.environ["SLURM_CPUS_PER_TASK"]} --mem=20G --gres=gpu:1 --exclusive {command}'],
77 | {"shell": True}
78 | )
79 | processes.append(process)
80 | time.sleep(10)
81 |
82 | for i, process in enumerate(processes):
83 | process.wait()
84 | print("//////////////////////////////")
85 | print("//// Completed ", i , " / ", len(commands), "////")
86 | print("//////////////////////////////")
87 |
88 | REGISTRY = {
89 | 'local': local_launcher,
90 | 'dummy': dummy_launcher,
91 | 'multi_gpu': multi_gpu_launcher,
92 | 'slurm_launcher': slurm_launcher
93 | }
94 |
95 | try:
96 | from domainbed import facebook
97 | facebook.register_command_launchers(REGISTRY)
98 | except ImportError:
99 | pass
100 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/data/MNIST/MNIST/processed/test.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/data/MNIST/MNIST/processed/test.pt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/data/MNIST/MNIST/processed/training.pt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/data/MNIST/MNIST/processed/training.pt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/data/MNIST/MNIST/raw/t10k-images-idx3-ubyte:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/data/MNIST/MNIST/raw/t10k-images-idx3-ubyte
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/data/MNIST/MNIST/raw/t10k-images-idx3-ubyte.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/data/MNIST/MNIST/raw/t10k-images-idx3-ubyte.gz
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/data/MNIST/MNIST/raw/t10k-labels-idx1-ubyte.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/data/MNIST/MNIST/raw/t10k-labels-idx1-ubyte.gz
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/data/MNIST/MNIST/raw/train-images-idx3-ubyte:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/data/MNIST/MNIST/raw/train-images-idx3-ubyte
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/data/MNIST/MNIST/raw/train-images-idx3-ubyte.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/data/MNIST/MNIST/raw/train-images-idx3-ubyte.gz
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/data/MNIST/MNIST/raw/train-labels-idx1-ubyte:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/data/MNIST/MNIST/raw/train-labels-idx1-ubyte
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/data/MNIST/MNIST/raw/train-labels-idx1-ubyte.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/data/MNIST/MNIST/raw/train-labels-idx1-ubyte.gz
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/job.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #SBATCH --account=rrg-bengioy-ad
3 | #SBATCH --job-name=PACS_1
4 | #SBATCH --output=PACS_1.out
5 | #SBATCH --error=PACS_1_error.out
6 | #SBATCH --ntasks=1
7 | #SBATCH --cpus-per-task=8
8 | #SBATCH --gres=gpu:1
9 | #SBATCH --time=0:10:00
10 | #SBATCH --mem=81Gb
11 |
12 |
13 | # Load Modules and environements
14 | module load python/3.6
15 | source $HOME/invariance/bin/activate
16 | module load httpproxy
17 |
18 |
19 | cd $HOME/GitRepos/domainbed_ib/
20 |
21 | python -m domainbed.scripts.sweep delete_incomplete\
22 | --algorithm IRM\
23 | --dataset PACS\
24 | --data_dir $HOME/scratch/data/\
25 | --output_dir $HOME/scratch/IRM_experiment/PACS_rerun/results_jmtd/1/\
26 | --command_launcher slurm_launcher\
27 | --skip_confirmation\
28 | --n_trials 3 \
29 | --n_hparams 40
30 | --default_hparams \
31 | --unique_test_env 0
32 |
33 | python3 -m domainbed.scripts.sweep launch\
34 | --algorithm IRM\
35 | --dataset PACS\
36 | --data_dir $HOME/scratch/data/\
37 | --output_dir $HOME/scratch/IRM_experiment/PACS_rerun/results_jmtd/1/\
38 | --command_launcher slurm_launcher\
39 | --skip_confirmation\
40 | --n_trials 3 \
41 | --n_hparams 40
42 | --default_hparams \
43 | --unique_test_env 0
44 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/lib/__pycache__/fast_data_loader.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/lib/__pycache__/fast_data_loader.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/lib/__pycache__/fast_data_loader.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/lib/__pycache__/fast_data_loader.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/lib/__pycache__/misc.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/lib/__pycache__/misc.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/lib/__pycache__/misc.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/lib/__pycache__/misc.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/lib/__pycache__/wide_resnet.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/lib/__pycache__/wide_resnet.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/lib/__pycache__/wide_resnet.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/lib/__pycache__/wide_resnet.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/lib/fast_data_loader.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | import torch
4 |
5 | class _InfiniteSampler(torch.utils.data.Sampler):
6 | """Wraps another Sampler to yield an infinite stream."""
7 | def __init__(self, sampler):
8 | self.sampler = sampler
9 |
10 | def __iter__(self):
11 | while True:
12 | for batch in self.sampler:
13 | yield batch
14 |
15 | class InfiniteDataLoader:
16 | def __init__(self, dataset, weights, batch_size, num_workers):
17 | super().__init__()
18 |
19 | if weights is not None:
20 | sampler = torch.utils.data.WeightedRandomSampler(weights,
21 | replacement=True,
22 | num_samples=batch_size)
23 | else:
24 | sampler = torch.utils.data.RandomSampler(dataset,
25 | replacement=True)
26 |
27 | if weights == None:
28 | weights = torch.ones(len(dataset))
29 |
30 | batch_sampler = torch.utils.data.BatchSampler(
31 | sampler,
32 | batch_size=batch_size,
33 | drop_last=True)
34 |
35 | self._infinite_iterator = iter(torch.utils.data.DataLoader(
36 | dataset,
37 | num_workers=num_workers,
38 | batch_sampler=_InfiniteSampler(batch_sampler)
39 | ))
40 |
41 | def __iter__(self):
42 | while True:
43 | yield next(self._infinite_iterator)
44 |
45 | def __len__(self):
46 | raise ValueError
47 |
48 | class FastDataLoader:
49 | """DataLoader wrapper with slightly improved speed by not respawning worker
50 | processes at every epoch."""
51 | def __init__(self, dataset, batch_size, num_workers):
52 | super().__init__()
53 |
54 | batch_sampler = torch.utils.data.BatchSampler(
55 | torch.utils.data.RandomSampler(dataset, replacement=False),
56 | batch_size=batch_size,
57 | drop_last=False
58 | )
59 |
60 | self._infinite_iterator = iter(torch.utils.data.DataLoader(
61 | dataset,
62 | num_workers=num_workers,
63 | batch_sampler=_InfiniteSampler(batch_sampler)
64 | ))
65 |
66 | self._length = len(batch_sampler)
67 |
68 | def __iter__(self):
69 | for _ in range(len(self)):
70 | yield next(self._infinite_iterator)
71 |
72 | def __len__(self):
73 | return self._length
74 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/lib/reporting.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | import collections
4 |
5 | import json
6 | import os
7 |
8 | import tqdm
9 |
10 | from domainbed.lib.query import Q
11 |
12 | def load_records(path):
13 | records = []
14 | for i, subdir in tqdm.tqdm(list(enumerate(os.listdir(path))),
15 | ncols=80,
16 | leave=False):
17 | results_path = os.path.join(path, subdir, "results.jsonl")
18 | try:
19 | with open(results_path, "r") as f:
20 | for line in f:
21 | records.append(json.loads(line[:-1]))
22 | except IOError:
23 | pass
24 |
25 | return Q(records)
26 |
27 | def get_grouped_records(records):
28 | """Group records by (trial_seed, dataset, algorithm, test_env). Because
29 | records can have multiple test envs, a given record may appear in more than
30 | one group."""
31 | result = collections.defaultdict(lambda: [])
32 | for r in records:
33 | for test_env in r["args"]["test_envs"]:
34 | group = (r["args"]["trial_seed"],
35 | r["args"]["dataset"],
36 | r["args"]["algorithm"],
37 | test_env)
38 | result[group].append(r)
39 | return Q([{"trial_seed": t, "dataset": d, "algorithm": a, "test_env": e,
40 | "records": Q(r)} for (t,d,a,e),r in result.items()])
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/lib/wide_resnet.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | """
4 | From https://github.com/meliketoy/wide-resnet.pytorch
5 | """
6 |
7 | import sys
8 |
9 | import numpy as np
10 | import torch
11 | import torch.nn as nn
12 | import torch.nn.functional as F
13 | import torch.nn.init as init
14 | from torch.autograd import Variable
15 |
16 |
17 | def conv3x3(in_planes, out_planes, stride=1):
18 | return nn.Conv2d(
19 | in_planes,
20 | out_planes,
21 | kernel_size=3,
22 | stride=stride,
23 | padding=1,
24 | bias=True)
25 |
26 |
27 | def conv_init(m):
28 | classname = m.__class__.__name__
29 | if classname.find('Conv') != -1:
30 | init.xavier_uniform_(m.weight, gain=np.sqrt(2))
31 | init.constant_(m.bias, 0)
32 | elif classname.find('BatchNorm') != -1:
33 | init.constant_(m.weight, 1)
34 | init.constant_(m.bias, 0)
35 |
36 |
37 | class wide_basic(nn.Module):
38 | def __init__(self, in_planes, planes, dropout_rate, stride=1):
39 | super(wide_basic, self).__init__()
40 | self.bn1 = nn.BatchNorm2d(in_planes)
41 | self.conv1 = nn.Conv2d(
42 | in_planes, planes, kernel_size=3, padding=1, bias=True)
43 | self.dropout = nn.Dropout(p=dropout_rate)
44 | self.bn2 = nn.BatchNorm2d(planes)
45 | self.conv2 = nn.Conv2d(
46 | planes, planes, kernel_size=3, stride=stride, padding=1, bias=True)
47 |
48 | self.shortcut = nn.Sequential()
49 | if stride != 1 or in_planes != planes:
50 | self.shortcut = nn.Sequential(
51 | nn.Conv2d(
52 | in_planes, planes, kernel_size=1, stride=stride,
53 | bias=True), )
54 |
55 | def forward(self, x):
56 | out = self.dropout(self.conv1(F.relu(self.bn1(x))))
57 | out = self.conv2(F.relu(self.bn2(out)))
58 | out += self.shortcut(x)
59 |
60 | return out
61 |
62 |
63 | class Wide_ResNet(nn.Module):
64 | """Wide Resnet with the softmax layer chopped off"""
65 | def __init__(self, input_shape, depth, widen_factor, dropout_rate):
66 | super(Wide_ResNet, self).__init__()
67 | self.in_planes = 16
68 |
69 | assert ((depth - 4) % 6 == 0), 'Wide-resnet depth should be 6n+4'
70 | n = (depth - 4) / 6
71 | k = widen_factor
72 |
73 | # print('| Wide-Resnet %dx%d' % (depth, k))
74 | nStages = [16, 16 * k, 32 * k, 64 * k]
75 |
76 | self.conv1 = conv3x3(input_shape[0], nStages[0])
77 | self.layer1 = self._wide_layer(
78 | wide_basic, nStages[1], n, dropout_rate, stride=1)
79 | self.layer2 = self._wide_layer(
80 | wide_basic, nStages[2], n, dropout_rate, stride=2)
81 | self.layer3 = self._wide_layer(
82 | wide_basic, nStages[3], n, dropout_rate, stride=2)
83 | self.bn1 = nn.BatchNorm2d(nStages[3], momentum=0.9)
84 |
85 | self.n_outputs = nStages[3]
86 |
87 | def _wide_layer(self, block, planes, num_blocks, dropout_rate, stride):
88 | strides = [stride] + [1] * (int(num_blocks) - 1)
89 | layers = []
90 |
91 | for stride in strides:
92 | layers.append(block(self.in_planes, planes, dropout_rate, stride))
93 | self.in_planes = planes
94 |
95 | return nn.Sequential(*layers)
96 |
97 | def forward(self, x):
98 | out = self.conv1(x)
99 | out = self.layer1(out)
100 | out = self.layer2(out)
101 | out = self.layer3(out)
102 | out = F.relu(self.bn1(out))
103 | out = F.avg_pool2d(out, 8)
104 | return out[:, :, 0, 0]
105 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0657090f9a83ff76efe083a104fde93a/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0657090f9a83ff76efe083a104fde93a/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/0657090f9a83ff76efe083a104fde93a/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0657090f9a83ff76efe083a104fde93a/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/0657090f9a83ff76efe083a104fde93a
18 | save_model_every_checkpoint: False
19 | seed: 360234358
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1, 2]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 39
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.7028930742148706e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 0.00044832883881609976
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6448763251 0.6572438163 0.4602352941 0.4821092279 0.4028941356 0.3856707317 0.4883376527 0.4888888889 0.0000000000 1.6960320473 0 0.9077303410
37 | 1.0000000000 1.0000000000 0.5694117647 0.5536723164 0.7517136329 0.7225609756 0.9303961496 0.8592592593 10.335689045 0.2295612923 300 0.2678673498
38 | 0.9991166078 1.0000000000 0.5957647059 0.5800376648 0.7635186596 0.7240853659 0.9440947797 0.8548148148 20.671378091 0.0907488818 600 0.2698669426
39 | 1.0000000000 1.0000000000 0.5976470588 0.6082862524 0.7559025133 0.7256097561 0.9800074047 0.8503703704 31.007067137 0.0480223160 900 0.2695488143
40 | 1.0000000000 1.0000000000 0.5680000000 0.5687382298 0.7482863671 0.7362804878 0.9840799704 0.8474074074 34.452296819 0.0351698661 1000 0.2753722453
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/06db52bd7fcbb8172f97f11a62015261/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/06db52bd7fcbb8172f97f11a62015261/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/06db52bd7fcbb8172f97f11a62015261/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/06db52bd7fcbb8172f97f11a62015261/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/06db52bd7fcbb8172f97f11a62015261
18 | save_model_every_checkpoint: False
19 | seed: 1826196677
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 8
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.2352558725944602e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 1.9967320578799288e-06
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.1475265018 0.1342756184 0.0672941176 0.0753295669 0.2429550647 0.2240853659 0.1384672344 0.1555555556 0.0000000000 1.8871159554 0 0.6768667698
37 | 0.9867491166 0.9964664311 0.7336470588 0.7193973635 0.7715156131 0.7606707317 0.8393187708 0.8192592593 2.1201413428 0.7141554105 300 0.1475044028
38 | 0.9902826855 0.9858657244 0.7788235294 0.7495291902 0.8015993907 0.8094512195 0.8656053314 0.7940740741 4.2402826855 0.5276730498 600 0.1483345437
39 | 0.9823321555 0.9858657244 0.7825882353 0.7193973635 0.8423457730 0.7881097561 0.8852276934 0.8237037037 6.3604240283 0.4728276532 900 0.1456738242
40 | 0.9832155477 0.9823321555 0.8009411765 0.7514124294 0.8488194973 0.8109756098 0.8992965568 0.8444444444 7.0671378092 0.4487797840 1000 0.1817230749
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/07ea1841921ad29c18ae52563b274925/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/07ea1841921ad29c18ae52563b274925/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/07ea1841921ad29c18ae52563b274925/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/07ea1841921ad29c18ae52563b274925/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/07ea1841921ad29c18ae52563b274925
18 | save_model_every_checkpoint: False
19 | seed: 164938159
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 2]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 39
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.7028930742148706e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 0.00044832883881609976
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6369257951 0.6537102473 0.5082352941 0.5348399247 0.4508758568 0.4375000000 0.4427989633 0.4607407407 0.0000000000 1.6150231361 0 2.2098460197
37 | 0.9876325088 0.9858657244 0.8108235294 0.7947269303 0.6972581874 0.6783536585 0.8881895594 0.8325925926 10.335689045 0.5566045662 300 0.7824950083
38 | 0.9876325088 0.9858657244 0.8978823529 0.7853107345 0.7102056359 0.7134146341 0.9511292114 0.8340740741 20.671378091 0.3126574263 600 0.7610859227
39 | 0.9885159011 0.9858657244 0.9331764706 0.7476459510 0.7170601676 0.7012195122 0.9707515735 0.8311111111 31.007067137 0.1981815844 900 0.7655067587
40 | 0.9805653710 0.9717314488 0.9421176471 0.7853107345 0.7307692308 0.6798780488 0.9637171418 0.8207407407 34.452296819 0.1589800572 1000 0.7399253964
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0c53bbff83d887850721788187907586/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0c53bbff83d887850721788187907586/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/0c53bbff83d887850721788187907586/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0c53bbff83d887850721788187907586/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/0c53bbff83d887850721788187907586
18 | save_model_every_checkpoint: False
19 | seed: 883692786
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1, 3]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6139575972 0.6183745583 0.4654117647 0.4613935970 0.3769992384 0.4192073171 0.4527952610 0.4059259259 0.0000000000 1.5639189482 0 1.3415405750
37 | 0.9982332155 0.9929328622 0.6927058824 0.6798493409 0.8549124143 0.8064024390 0.7963717142 0.7674074074 8.4805653710 0.2506012543 300 0.2245095642
38 | 1.0000000000 0.9893992933 0.6254117647 0.6120527307 0.9440213252 0.8185975610 0.7023324695 0.6814814815 16.961130742 0.1403411952 600 0.2259919771
39 | 0.9973498233 0.9929328622 0.6009411765 0.5894538606 0.9257425743 0.7713414634 0.6823398741 0.6755555556 25.441696113 0.0984130776 900 0.2255344065
40 | 1.0000000000 0.9964664311 0.6174117647 0.6045197740 0.9676313785 0.8109756098 0.6564235468 0.6488888889 28.268551236 0.0749892714 1000 0.2303549671
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0ec227d205744455c681614d9f55d841/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0ec227d205744455c681614d9f55d841/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/0ec227d205744455c681614d9f55d841/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0ec227d205744455c681614d9f55d841/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/0ec227d205744455c681614d9f55d841
18 | save_model_every_checkpoint: False
19 | seed: 1652397067
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 2]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6289752650 0.6466431095 0.4720000000 0.4934086629 0.3888042650 0.3856707317 0.4535357275 0.4474074074 0.0000000000 1.5672284365 0 0.8409721851
37 | 0.9761484099 0.9787985866 0.7877647059 0.7099811676 0.7616146230 0.7301829268 0.9222510181 0.8414814815 8.4805653710 0.5017355401 300 0.4830384008
38 | 0.9726148410 0.9646643110 0.8748235294 0.7608286252 0.6816450876 0.6615853659 0.9263235839 0.8296296296 16.961130742 0.3284906636 600 0.4819568117
39 | 0.9752650177 0.9787985866 0.9256470588 0.7589453861 0.7086824067 0.6966463415 0.9559422436 0.8177777778 25.441696113 0.2250106066 900 0.4664689159
40 | 0.9681978799 0.9646643110 0.9138823529 0.7325800377 0.7349581112 0.6890243902 0.9766753054 0.8355555556 28.268551236 0.1948434772 1000 0.4899235868
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0fe0ed57077c0c9291931a388ba21be2/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0fe0ed57077c0c9291931a388ba21be2/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/0fe0ed57077c0c9291931a388ba21be2/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0fe0ed57077c0c9291931a388ba21be2/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/0fe0ed57077c0c9291931a388ba21be2
18 | save_model_every_checkpoint: False
19 | seed: 232202035
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [2, 3]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6139575972 0.6183745583 0.4668235294 0.4613935970 0.3766184311 0.4192073171 0.4535357275 0.4059259259 0.0000000000 1.5417273045 0 1.5597932339
37 | 0.9717314488 0.9575971731 0.8437647059 0.7570621469 0.5761614623 0.6006097561 0.6797482414 0.6696296296 8.4805653710 0.3278960832 300 0.5082177607
38 | 1.0000000000 1.0000000000 0.8785882353 0.7664783427 0.5670220868 0.5807926829 0.7012217697 0.6622222222 16.961130742 0.2037799085 600 0.5176990946
39 | 1.0000000000 1.0000000000 0.9317647059 0.7570621469 0.6245239909 0.6539634146 0.7515734913 0.7259259259 25.441696113 0.1357027507 900 0.5146216901
40 | 0.9991166078 0.9893992933 0.9228235294 0.7438794727 0.6054836253 0.6326219512 0.7319511292 0.7022222222 28.268551236 0.0931368149 1000 0.4918298554
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/0fe0ed57077c0c9291931a388ba21be2/results.jsonl:
--------------------------------------------------------------------------------
1 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/0fe0ed57077c0c9291931a388ba21be2", "save_model_every_checkpoint": false, "seed": 232202035, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [2, 3], "trial_seed": 1, "uda_holdout_fraction": 0}, "env0_in_acc": 0.6139575971731449, "env0_out_acc": 0.6183745583038869, "env1_in_acc": 0.4668235294117647, "env1_out_acc": 0.4613935969868173, "env2_in_acc": 0.3766184310738766, "env2_out_acc": 0.4192073170731707, "env3_in_acc": 0.45353572750833027, "env3_out_acc": 0.4059259259259259, "epoch": 0.0, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 1.5417273044586182, "step": 0, "step_time": 1.55979323387146}
2 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/0fe0ed57077c0c9291931a388ba21be2", "save_model_every_checkpoint": false, "seed": 232202035, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [2, 3], "trial_seed": 1, "uda_holdout_fraction": 0}, "env0_in_acc": 0.9717314487632509, "env0_out_acc": 0.9575971731448764, "env1_in_acc": 0.843764705882353, "env1_out_acc": 0.7570621468926554, "env2_in_acc": 0.5761614623000761, "env2_out_acc": 0.600609756097561, "env3_in_acc": 0.679748241392077, "env3_out_acc": 0.6696296296296296, "epoch": 8.480565371024735, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.32789608324567476, "step": 300, "step_time": 0.5082177607218424}
3 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/0fe0ed57077c0c9291931a388ba21be2", "save_model_every_checkpoint": false, "seed": 232202035, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [2, 3], "trial_seed": 1, "uda_holdout_fraction": 0}, "env0_in_acc": 1.0, "env0_out_acc": 1.0, "env1_in_acc": 0.8785882352941177, "env1_out_acc": 0.7664783427495292, "env2_in_acc": 0.5670220868240671, "env2_out_acc": 0.5807926829268293, "env3_in_acc": 0.7012217697149205, "env3_out_acc": 0.6622222222222223, "epoch": 16.96113074204947, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.20377990854283173, "step": 600, "step_time": 0.5176990946133931}
4 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/0fe0ed57077c0c9291931a388ba21be2", "save_model_every_checkpoint": false, "seed": 232202035, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [2, 3], "trial_seed": 1, "uda_holdout_fraction": 0}, "env0_in_acc": 1.0, "env0_out_acc": 1.0, "env1_in_acc": 0.9317647058823529, "env1_out_acc": 0.7570621468926554, "env2_in_acc": 0.6245239908606245, "env2_out_acc": 0.6539634146341463, "env3_in_acc": 0.7515734912995187, "env3_out_acc": 0.725925925925926, "epoch": 25.441696113074205, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.13570275067041318, "step": 900, "step_time": 0.5146216901143392}
5 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/0fe0ed57077c0c9291931a388ba21be2", "save_model_every_checkpoint": false, "seed": 232202035, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [2, 3], "trial_seed": 1, "uda_holdout_fraction": 0}, "env0_in_acc": 0.9991166077738516, "env0_out_acc": 0.9893992932862191, "env1_in_acc": 0.9228235294117647, "env1_out_acc": 0.743879472693032, "env2_in_acc": 0.6054836252856055, "env2_out_acc": 0.6326219512195121, "env3_in_acc": 0.7319511292114032, "env3_out_acc": 0.7022222222222222, "epoch": 28.268551236749115, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.09313681485131382, "step": 1000, "step_time": 0.4918298554420471}
6 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/1b0678ef843d122c17404ab8bd138523/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/1b0678ef843d122c17404ab8bd138523/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/1b0678ef843d122c17404ab8bd138523/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/1b0678ef843d122c17404ab8bd138523/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/1b0678ef843d122c17404ab8bd138523
18 | save_model_every_checkpoint: False
19 | seed: 703675087
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 3]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 8
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.2352558725944602e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 1.9967320578799288e-06
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6033568905 0.6007067138 0.3477647059 0.3521657250 0.3335872049 0.3780487805 0.3791188449 0.3318518519 0.0000000000 1.6503455639 0 1.3420743942
37 | 0.8966431095 0.8692579505 0.7712941176 0.7514124294 0.8042650419 0.7865853659 0.7049241022 0.6829629630 2.1201413428 0.7344291466 300 0.1374709209
38 | 0.8984098940 0.8763250883 0.7802352941 0.7438794727 0.8297791318 0.8201219512 0.7334320622 0.7155555556 4.2402826855 0.5958860209 600 0.1401097918
39 | 0.4355123675 0.4628975265 0.7924705882 0.7401129944 0.8191165270 0.7713414634 0.6467974824 0.6311111111 6.3604240283 0.5318177843 900 0.1377514847
40 | 0.9107773852 0.8727915194 0.8061176471 0.7740112994 0.8206397563 0.8003048780 0.7600888560 0.7200000000 7.0671378092 0.4978464527 1000 0.1623143768
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/1b424e4ac8bc11c9d3f36b1729e19547/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/1b424e4ac8bc11c9d3f36b1729e19547/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/1b424e4ac8bc11c9d3f36b1729e19547/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/1b424e4ac8bc11c9d3f36b1729e19547/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/1b424e4ac8bc11c9d3f36b1729e19547
18 | save_model_every_checkpoint: False
19 | seed: 808031485
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [2, 3]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 8
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.2352558725944602e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 1.9967320578799288e-06
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6033568905 0.6148409894 0.4550588235 0.4595103578 0.3450114242 0.3932926829 0.4420584969 0.3985185185 0.0000000000 1.4451200962 0 1.4165942669
37 | 0.9867491166 0.9787985866 0.7491764706 0.7325800377 0.5639756283 0.6006097561 0.7001110700 0.6518518519 2.1201413428 0.4410370264 300 0.1582184227
38 | 0.9991166078 0.9929328622 0.7783529412 0.7288135593 0.5662604722 0.5807926829 0.6878933728 0.6681481481 4.2402826855 0.3040031821 600 0.1537931506
39 | 1.0000000000 1.0000000000 0.8084705882 0.7288135593 0.5982482864 0.6112804878 0.7230655313 0.6888888889 6.3604240283 0.2854706001 900 0.1461815945
40 | 0.9991166078 1.0000000000 0.8141176471 0.7532956685 0.6587966489 0.6493902439 0.7152906331 0.6992592593 7.0671378092 0.2706131497 1000 0.1883794379
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/24c1684361b7442877526ab118da7117/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/24c1684361b7442877526ab118da7117/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/24c1684361b7442877526ab118da7117/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/24c1684361b7442877526ab118da7117/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/24c1684361b7442877526ab118da7117
18 | save_model_every_checkpoint: False
19 | seed: 845862410
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 1]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6157243816 0.6219081272 0.4663529412 0.4613935970 0.3769992384 0.4207317073 0.4539059608 0.4103703704 0.0000000000 1.6230642796 0 0.5895545483
37 | 0.9611307420 0.9646643110 0.6536470588 0.6290018832 0.8651942117 0.8445121951 0.8974453906 0.8251851852 8.4805653710 0.4414077417 300 0.2258998156
38 | 0.9708480565 0.9681978799 0.6094117647 0.5800376648 0.9116527037 0.8140243902 0.9433543132 0.8266666667 16.961130742 0.2386230343 600 0.2265182082
39 | 0.9726148410 0.9752650177 0.6315294118 0.6346516008 0.9638233054 0.8216463415 0.9733432062 0.8548148148 25.441696113 0.1686591896 900 0.2260356387
40 | 0.9814487633 0.9787985866 0.6785882353 0.6723163842 0.9535415080 0.8307926829 0.9637171418 0.8355555556 28.268551236 0.1337041207 1000 0.2317301798
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/24cf797be205aaef612b14beefc4c1a3/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/24cf797be205aaef612b14beefc4c1a3/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/24cf797be205aaef612b14beefc4c1a3/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/24cf797be205aaef612b14beefc4c1a3/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/24cf797be205aaef612b14beefc4c1a3
18 | save_model_every_checkpoint: False
19 | seed: 2080818722
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6121908127 0.6289752650 0.4597647059 0.4896421846 0.3846153846 0.3871951220 0.4435394298 0.4459259259 0.0000000000 1.4719194174 0 0.7130000591
37 | 0.9955830389 0.9823321555 0.6569411765 0.6572504708 0.8419649657 0.7652439024 0.9026286561 0.8385185185 8.4805653710 0.2995765518 300 0.3290495388
38 | 0.9982332155 0.9929328622 0.6635294118 0.6572504708 0.9105102818 0.7759146341 0.9540910774 0.8429629630 16.961130742 0.1636779740 600 0.3299173093
39 | 0.9982332155 0.9964664311 0.6371764706 0.6177024482 0.9565879665 0.7850609756 0.9663087745 0.8429629630 25.441696113 0.1089462244 900 0.3275924150
40 | 0.9982332155 0.9964664311 0.6658823529 0.6817325800 0.9527798934 0.7896341463 0.9433543132 0.8281481481 28.268551236 0.0757257283 1000 0.3295744514
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/2b696be39395e8830222b505f6aa45d8/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/2b696be39395e8830222b505f6aa45d8/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/2b696be39395e8830222b505f6aa45d8/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/2b696be39395e8830222b505f6aa45d8/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/2b696be39395e8830222b505f6aa45d8
18 | save_model_every_checkpoint: False
19 | seed: 555331067
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [3]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6210247350 0.6219081272 0.4677647059 0.4595103578 0.3769992384 0.4237804878 0.4838948538 0.4488888889 0.0000000000 1.5794914961 0 1.2731909752
37 | 0.9991166078 0.9964664311 0.8108235294 0.7476459510 0.8594821021 0.8307926829 0.7734172529 0.7629629630 8.4805653710 0.3850402999 300 0.5130404655
38 | 0.9814487633 0.9646643110 0.8625882353 0.7137476460 0.9089870526 0.8140243902 0.7223250648 0.7125925926 16.961130742 0.2540220108 600 0.5093434628
39 | 0.9991166078 0.9929328622 0.7576470588 0.6741996234 0.9158415842 0.7987804878 0.7500925583 0.7407407407 25.441696113 0.1773750270 900 0.4844152689
40 | 1.0000000000 0.9964664311 0.9265882353 0.7325800377 0.9398324448 0.7667682927 0.6915957053 0.6948148148 28.268551236 0.1285581028 1000 0.4817661285
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/2dd075c39b257eb019b4a8d813525113/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/2dd075c39b257eb019b4a8d813525113/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/2dd075c39b257eb019b4a8d813525113/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/2dd075c39b257eb019b4a8d813525113/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/2dd075c39b257eb019b4a8d813525113
18 | save_model_every_checkpoint: False
19 | seed: 1451105084
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 3]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6148409894 0.6254416961 0.5562352941 0.5404896422 0.4714394516 0.4527439024 0.4668641244 0.4651851852 0.0000000000 1.5553741455 0 0.7478101254
37 | 0.8595406360 0.8586572438 0.8244705882 0.7853107345 0.8625285605 0.7591463415 0.7478711588 0.7377777778 8.4805653710 0.5564234919 300 0.5294828455
38 | 0.3533568905 0.3498233216 0.8141176471 0.6779661017 0.9002284844 0.7698170732 0.6749352092 0.6918518519 16.961130742 0.3541142742 600 0.6940090084
39 | 0.4125441696 0.3992932862 0.9270588235 0.7495291902 0.9584920030 0.7881097561 0.7164013328 0.7185185185 25.441696113 0.2470643952 900 0.6397310909
40 | 0.3409893993 0.3074204947 0.9308235294 0.7288135593 0.9504950495 0.7804878049 0.6364309515 0.6518518519 28.268551236 0.1845126691 1000 0.6637606668
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/3539ff8139b8f1797865a2f26e51c70f/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/3539ff8139b8f1797865a2f26e51c70f/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/3539ff8139b8f1797865a2f26e51c70f/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/3539ff8139b8f1797865a2f26e51c70f/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/3539ff8139b8f1797865a2f26e51c70f
18 | save_model_every_checkpoint: False
19 | seed: 77312117
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1, 2]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6121908127 0.6289752650 0.4597647059 0.4896421846 0.3849961919 0.3871951220 0.4442798963 0.4459259259 0.0000000000 1.6168131828 0 1.5035538673
37 | 0.9911660777 0.9611307420 0.4945882353 0.4990583804 0.6031987814 0.5914634146 0.8637541651 0.7837037037 8.4805653710 0.2213101814 300 0.2264140566
38 | 0.9982332155 1.0000000000 0.6588235294 0.6779661017 0.7220106626 0.6996951220 0.9274342836 0.8385185185 16.961130742 0.1078731784 600 0.2273491073
39 | 0.9814487633 0.9717314488 0.6320000000 0.6195856874 0.6721249048 0.6371951220 0.8844872270 0.7925925926 25.441696113 0.0730464640 900 0.2263356590
40 | 1.0000000000 0.9964664311 0.5811764706 0.5800376648 0.7555217060 0.7271341463 0.9785264717 0.8533333333 28.268551236 0.0753941641 1000 0.2314931631
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/371b3e2afe1e7a754e49b2324bf159b6/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/371b3e2afe1e7a754e49b2324bf159b6/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/371b3e2afe1e7a754e49b2324bf159b6/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/371b3e2afe1e7a754e49b2324bf159b6/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/371b3e2afe1e7a754e49b2324bf159b6
18 | save_model_every_checkpoint: False
19 | seed: 673138363
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 1]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 8
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.2352558725944602e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 1.9967320578799288e-06
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.2791519435 0.2650176678 0.0489411765 0.0489642185 0.0872048743 0.1036585366 0.2436134765 0.2325925926 0.0000000000 1.6690740585 0 0.5699374676
37 | 0.9823321555 0.9717314488 0.6056470588 0.5856873823 0.8153084539 0.8033536585 0.8670862643 0.8385185185 2.1201413428 0.6575384592 300 0.0850741275
38 | 0.9858657244 0.9858657244 0.6960000000 0.6911487759 0.8297791318 0.7972560976 0.8681969641 0.8355555556 4.2402826855 0.4300726643 600 0.0843147270
39 | 0.9611307420 0.9363957597 0.6588235294 0.6384180791 0.8587204874 0.8445121951 0.8926323584 0.8251851852 6.3604240283 0.3910656881 900 0.0857653062
40 | 0.9602473498 0.9575971731 0.6268235294 0.6101694915 0.8712871287 0.8216463415 0.8907811922 0.8281481481 7.0671378092 0.3222925671 1000 0.0998253107
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/41b0ac2ee570d8ace449c34ada3fdd01/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/41b0ac2ee570d8ace449c34ada3fdd01/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/41b0ac2ee570d8ace449c34ada3fdd01/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/41b0ac2ee570d8ace449c34ada3fdd01/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/41b0ac2ee570d8ace449c34ada3fdd01
18 | save_model_every_checkpoint: False
19 | seed: 1402607286
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [2, 3]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 39
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.7028930742148706e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 0.00044832883881609976
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6113074205 0.6289752650 0.4489411765 0.4821092279 0.3625285605 0.3719512195 0.4409477971 0.4533333333 0.0000000000 1.5662012100 0 1.9600167274
37 | 1.0000000000 0.9964664311 0.8291764706 0.7702448211 0.6416603199 0.6021341463 0.7426878934 0.7407407407 10.335689045 0.3412963475 300 0.5609163912
38 | 1.0000000000 0.9929328622 0.8964705882 0.7853107345 0.5799695354 0.5838414634 0.7256571640 0.7259259259 20.671378091 0.1954872701 600 0.5597918383
39 | 0.9991166078 1.0000000000 0.9477647059 0.7608286252 0.6359482102 0.5990853659 0.7378748612 0.7348148148 31.007067137 0.1185131688 900 0.5421174677
40 | 1.0000000000 1.0000000000 0.9454117647 0.7890772128 0.5990099010 0.5807926829 0.7064050352 0.7081481481 34.452296819 0.0762012539 1000 0.5556480336
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/4a18a8be66b762f1ad5f45408bc27c78/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/4a18a8be66b762f1ad5f45408bc27c78/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/4a18a8be66b762f1ad5f45408bc27c78/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/4a18a8be66b762f1ad5f45408bc27c78/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/4a18a8be66b762f1ad5f45408bc27c78
18 | save_model_every_checkpoint: False
19 | seed: 1355770594
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1, 2]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6139575972 0.6183745583 0.4705882353 0.4595103578 0.3777608530 0.4192073171 0.4535357275 0.4014814815 0.0000000000 1.8560335636 0 1.0693266392
37 | 1.0000000000 0.9858657244 0.5774117647 0.5894538606 0.7688499619 0.7728658537 0.9222510181 0.8355555556 8.4805653710 0.2207562274 300 0.2226435788
38 | 0.9964664311 0.9929328622 0.5524705882 0.5687382298 0.7440974867 0.7332317073 0.9366901148 0.8192592593 16.961130742 0.1166370596 600 0.2226641949
39 | 0.9991166078 0.9964664311 0.5567058824 0.5612052731 0.7296268088 0.7332317073 0.9511292114 0.8192592593 25.441696113 0.0710875637 900 0.2238802059
40 | 1.0000000000 0.9964664311 0.5515294118 0.5442561205 0.7288651942 0.7012195122 0.9733432062 0.8414814815 28.268551236 0.0552595345 1000 0.2269736028
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/4ccfd57ae38cfc8fd5fba4293614ab26/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/4ccfd57ae38cfc8fd5fba4293614ab26/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/4ccfd57ae38cfc8fd5fba4293614ab26/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/4ccfd57ae38cfc8fd5fba4293614ab26/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/4ccfd57ae38cfc8fd5fba4293614ab26
18 | save_model_every_checkpoint: False
19 | seed: 225583337
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 3]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 39
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.7028930742148706e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 0.00044832883881609976
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.5803886926 0.6007067138 0.5736470588 0.5762711864 0.4249809596 0.4161585366 0.4150314698 0.4074074074 0.0000000000 1.6056649685 0 1.2535927296
37 | 0.6439929329 0.6678445230 0.7957647059 0.7363465160 0.8846153846 0.8125000000 0.7512032581 0.7274074074 10.335689045 0.5884232441 300 0.6497526010
38 | 0.6925795053 0.7632508834 0.8818823529 0.7890772128 0.9280274181 0.7637195122 0.7312106627 0.7170370370 20.671378091 0.3515189211 600 0.6339190245
39 | 0.5468197880 0.5795053004 0.9312941176 0.7514124294 0.9634424981 0.7835365854 0.7234357645 0.7318518519 31.007067137 0.2306714023 900 0.6368054978
40 | 0.4717314488 0.4664310954 0.9487058824 0.7645951036 0.9619192688 0.7942073171 0.7171417993 0.7229629630 34.452296819 0.1516468529 1000 0.6133238769
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/539c70bc47514b76736c480df7036b8b/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/539c70bc47514b76736c480df7036b8b/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/539c70bc47514b76736c480df7036b8b/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/539c70bc47514b76736c480df7036b8b/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/539c70bc47514b76736c480df7036b8b
18 | save_model_every_checkpoint: False
19 | seed: 365467527
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 2]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.2994699647 0.2897526502 0.5430588235 0.4896421846 0.4779131759 0.5015243902 0.4879674195 0.4459259259 0.0000000000 1.7162715197 0 1.5893950462
37 | 0.9885159011 0.9717314488 0.8277647059 0.7796610169 0.7372429551 0.7682926829 0.9211403184 0.8325925926 8.4805653710 0.5158454158 300 0.4736440802
38 | 0.9584805654 0.9540636042 0.8320000000 0.7645951036 0.6751713633 0.7057926829 0.9218807849 0.8207407407 16.961130742 0.3099103693 600 0.4764646832
39 | 0.9787985866 0.9752650177 0.9232941176 0.7344632768 0.7296268088 0.7378048780 0.9585338763 0.8059259259 25.441696113 0.2075849420 900 0.4813308350
40 | 0.9893992933 0.9717314488 0.9402352941 0.7532956685 0.7204874334 0.7240853659 0.9689004073 0.8177777778 28.268551236 0.1533024151 1000 0.4948641443
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/63837f74bf4ac60044c74aa87114b386/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/63837f74bf4ac60044c74aa87114b386/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/63837f74bf4ac60044c74aa87114b386/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/63837f74bf4ac60044c74aa87114b386/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/63837f74bf4ac60044c74aa87114b386
18 | save_model_every_checkpoint: False
19 | seed: 1154273106
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [2]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 8
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.2352558725944602e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 1.9967320578799288e-06
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6121908127 0.6395759717 0.4710588235 0.4651600753 0.3187357197 0.3658536585 0.4439096631 0.4014814815 0.0000000000 1.7305045128 0 0.7390449047
37 | 0.9991166078 0.9929328622 0.7510588235 0.7306967985 0.7303884235 0.7256097561 0.8593113662 0.8074074074 2.1201413428 0.4912618790 300 0.1476260916
38 | 0.9955830389 0.9929328622 0.7821176471 0.7457627119 0.7269611577 0.7012195122 0.8378378378 0.8059259259 4.2402826855 0.3587874381 600 0.1536783393
39 | 0.9991166078 1.0000000000 0.8192941176 0.7589453861 0.7463823305 0.7530487805 0.9107737875 0.8414814815 6.3604240283 0.3129132905 900 0.1507747587
40 | 0.9982332155 0.9964664311 0.7934117647 0.7532956685 0.6980198020 0.7164634146 0.8870788597 0.8192592593 7.0671378092 0.2962619931 1000 0.1803278637
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/66006bc6faa9f96db95a5bcfc3e4340a/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/66006bc6faa9f96db95a5bcfc3e4340a/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/66006bc6faa9f96db95a5bcfc3e4340a/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/66006bc6faa9f96db95a5bcfc3e4340a/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/66006bc6faa9f96db95a5bcfc3e4340a
18 | save_model_every_checkpoint: False
19 | seed: 1721972278
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6227915194 0.6431095406 0.4569411765 0.4783427495 0.4154607768 0.4298780488 0.4613106257 0.4592592593 0.0000000000 1.8584471941 0 1.0781459808
37 | 0.9876325088 0.9787985866 0.8000000000 0.7514124294 0.8667174410 0.7774390244 0.9107737875 0.8592592593 8.4805653710 0.5497589125 300 0.5222175368
38 | 0.9699646643 0.9752650177 0.8075294118 0.6930320151 0.8827113481 0.7606707317 0.9170677527 0.8014814815 16.961130742 0.3329716441 600 0.5305303041
39 | 0.9496466431 0.9505300353 0.9058823529 0.7382297552 0.9230769231 0.7621951220 0.9596445761 0.8355555556 25.441696113 0.2295369956 900 0.5154033097
40 | 0.9637809187 0.9505300353 0.8498823529 0.7325800377 0.9169840061 0.7454268293 0.9596445761 0.8444444444 28.268551236 0.1873281671 1000 0.5267591643
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/66779ee52d1111eddfcc6dafa8ae983c/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/66779ee52d1111eddfcc6dafa8ae983c/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/66779ee52d1111eddfcc6dafa8ae983c/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/66779ee52d1111eddfcc6dafa8ae983c/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/66779ee52d1111eddfcc6dafa8ae983c
18 | save_model_every_checkpoint: False
19 | seed: 121752067
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6219081272 0.6183745583 0.4630588235 0.4538606403 0.3800456969 0.4314024390 0.4535357275 0.4059259259 0.0000000000 1.6938700676 0 1.5393824577
37 | 0.9973498233 0.9964664311 0.6625882353 0.6384180791 0.8743335872 0.8384146341 0.8878193262 0.8192592593 8.4805653710 0.2932585667 300 0.3279992390
38 | 0.9991166078 0.9823321555 0.6362352941 0.6233521657 0.9166031988 0.8262195122 0.9411329137 0.8400000000 16.961130742 0.1662088908 600 0.3277589742
39 | 1.0000000000 0.9964664311 0.6141176471 0.5932203390 0.9295506474 0.7881097561 0.9681599408 0.8251851852 25.441696113 0.1098899398 900 0.3271942870
40 | 1.0000000000 0.9964664311 0.6512941176 0.6365348399 0.9531607007 0.8262195122 0.9800074047 0.8385185185 28.268551236 0.0795394431 1000 0.3333628297
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/691f8b51c9f69b380113a6a2645392bb/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/691f8b51c9f69b380113a6a2645392bb/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/691f8b51c9f69b380113a6a2645392bb/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/691f8b51c9f69b380113a6a2645392bb/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/691f8b51c9f69b380113a6a2645392bb
18 | save_model_every_checkpoint: False
19 | seed: 1308297739
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 1]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 39
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.7028930742148706e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 0.00044832883881609976
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.3286219081 0.3533568905 0.4724705882 0.5084745763 0.3773800457 0.3932926829 0.3702332469 0.3437037037 0.0000000000 1.8032251596 0 1.7875323296
37 | 0.9920494700 0.9929328622 0.6385882353 0.6384180791 0.8903274943 0.7972560976 0.9203998519 0.8488888889 10.335689045 0.4660329765 300 0.2698347815
38 | 0.9929328622 0.9964664311 0.6070588235 0.5951035782 0.9402132521 0.8079268293 0.9552017771 0.8696296296 20.671378091 0.2222180948 600 0.2707187851
39 | 0.9664310954 0.9717314488 0.6498823529 0.6610169492 0.9550647372 0.7804878049 0.9651980748 0.8607407407 31.007067137 0.1402724676 900 0.2694199697
40 | 0.9796819788 0.9752650177 0.6696470588 0.6798493409 0.9702970297 0.8003048780 0.9829692706 0.8785185185 34.452296819 0.1025925899 1000 0.2757954836
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/6d481a40ca86768fad6a5088cb58458e/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/6d481a40ca86768fad6a5088cb58458e/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/6d481a40ca86768fad6a5088cb58458e/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/6d481a40ca86768fad6a5088cb58458e/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/6d481a40ca86768fad6a5088cb58458e
18 | save_model_every_checkpoint: False
19 | seed: 1155380425
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 39
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.7028930742148706e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 0.00044832883881609976
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6095406360 0.6289752650 0.4268235294 0.4538606403 0.4002284844 0.3963414634 0.4450203628 0.4459259259 0.0000000000 1.5691214800 0 1.3228244781
37 | 0.9681978799 0.9575971731 0.8042352941 0.7627118644 0.8476770754 0.7759146341 0.8863383932 0.8488888889 10.335689045 0.5472711667 300 0.6333832312
38 | 0.9655477032 0.9787985866 0.8672941176 0.7608286252 0.9192688500 0.7637195122 0.9363198815 0.8385185185 20.671378091 0.3273245532 600 0.6328919633
39 | 0.9761484099 0.9717314488 0.8917647059 0.7627118644 0.9554455446 0.8003048780 0.9626064421 0.8622222222 31.007067137 0.2252053858 900 0.6284170349
40 | 0.9549469965 0.9611307420 0.9388235294 0.7551789077 0.9691546078 0.8033536585 0.9629766753 0.8385185185 34.452296819 0.1616993903 1000 0.6197570014
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/708942ac219532c45db7898ef9cfb955/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/708942ac219532c45db7898ef9cfb955/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/708942ac219532c45db7898ef9cfb955/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/708942ac219532c45db7898ef9cfb955/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/708942ac219532c45db7898ef9cfb955
18 | save_model_every_checkpoint: False
19 | seed: 759729212
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1, 3]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6121908127 0.6289752650 0.4663529412 0.4934086629 0.3910891089 0.3871951220 0.4439096631 0.4459259259 0.0000000000 1.7968641520 0 0.7578103542
37 | 0.9955830389 0.9929328622 0.6004705882 0.6082862524 0.8781416603 0.7942073171 0.7275083302 0.7555555556 8.4805653710 0.2684762215 300 0.2228098536
38 | 1.0000000000 1.0000000000 0.5971764706 0.5875706215 0.9226961158 0.7774390244 0.6749352092 0.6711111111 16.961130742 0.1445222108 600 0.2233129327
39 | 0.9973498233 1.0000000000 0.6522352941 0.6723163842 0.9683929931 0.8140243902 0.7574972233 0.7659259259 25.441696113 0.0898542125 900 0.2237123227
40 | 0.9982332155 1.0000000000 0.6508235294 0.6610169492 0.9565879665 0.7850609756 0.7189929656 0.7244444444 28.268551236 0.0731842542 1000 0.2280582023
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/728347e87d1c533379956bf94dca6fef/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/728347e87d1c533379956bf94dca6fef/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/728347e87d1c533379956bf94dca6fef/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/728347e87d1c533379956bf94dca6fef/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/728347e87d1c533379956bf94dca6fef
18 | save_model_every_checkpoint: False
19 | seed: 876870413
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1, 2]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 8
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.2352558725944602e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 1.9967320578799288e-06
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6492932862 0.6537102473 0.4743529412 0.4538606403 0.3651942117 0.3978658537 0.3843021103 0.3644444444 0.0000000000 1.6890671253 0 1.6487302780
37 | 1.0000000000 0.9929328622 0.6014117647 0.6139359699 0.7372429551 0.7240853659 0.8607922991 0.8370370370 2.1201413428 0.3830932904 300 0.0842626444
38 | 0.9964664311 0.9929328622 0.5538823529 0.5461393597 0.7448591013 0.7484756098 0.8955942244 0.8311111111 4.2402826855 0.2157828625 600 0.0850240620
39 | 0.9982332155 0.9964664311 0.5929411765 0.5819209040 0.7760853008 0.7743902439 0.9107737875 0.8488888889 6.3604240283 0.1769324361 900 0.0844715873
40 | 0.9973498233 0.9929328622 0.5929411765 0.5856873823 0.7757044935 0.7972560976 0.9089226213 0.8400000000 7.0671378092 0.1479099048 1000 0.0993904734
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/7a6119601f2d7f4ce36e0d5d478332dd/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/7a6119601f2d7f4ce36e0d5d478332dd/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/7a6119601f2d7f4ce36e0d5d478332dd/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/7a6119601f2d7f4ce36e0d5d478332dd/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/7a6119601f2d7f4ce36e0d5d478332dd
18 | save_model_every_checkpoint: False
19 | seed: 745093665
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6272084806 0.6395759717 0.4964705882 0.4632768362 0.4329779132 0.4710365854 0.4742687893 0.4237037037 0.0000000000 1.6059685946 0 1.8818829060
37 | 0.9911660777 0.9787985866 0.7374117647 0.7099811676 0.8476770754 0.8094512195 0.9059607553 0.8296296296 8.4805653710 0.5164279704 300 0.6003480299
38 | 0.9628975265 0.9611307420 0.8588235294 0.7589453861 0.8933739528 0.8201219512 0.9278045168 0.8207407407 16.961130742 0.3321266067 600 0.6016749573
39 | 0.9761484099 0.9752650177 0.9091764706 0.7419962335 0.9424980960 0.7865853659 0.9685301740 0.8222222222 25.441696113 0.2240007397 900 0.5920131238
40 | 0.9779151943 0.9681978799 0.9322352941 0.7532956685 0.9531607007 0.7942073171 0.9592743428 0.8325925926 28.268551236 0.1744494830 1000 0.5965073538
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/85964cf17f520330ea56101aed9602e5/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/85964cf17f520330ea56101aed9602e5/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/85964cf17f520330ea56101aed9602e5/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/85964cf17f520330ea56101aed9602e5/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/85964cf17f520330ea56101aed9602e5
18 | save_model_every_checkpoint: False
19 | seed: 969090155
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [3]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6484098940 0.6678445230 0.4936470588 0.5178907721 0.4025133283 0.4131097561 0.4653831914 0.4548148148 0.0000000000 1.7983697653 0 1.2256424427
37 | 1.0000000000 1.0000000000 0.8155294118 0.7853107345 0.8739527799 0.7728658537 0.7686042207 0.7525925926 8.4805653710 0.4000296687 300 0.8405228472
38 | 0.9982332155 0.9964664311 0.8061176471 0.7118644068 0.9185072353 0.7804878049 0.7782302851 0.7881481481 16.961130742 0.2543533290 600 0.6019054683
39 | 1.0000000000 1.0000000000 0.9162352941 0.7702448211 0.9265041889 0.7469512195 0.6730840429 0.6696296296 25.441696113 0.1691624259 900 0.5962682295
40 | 0.9991166078 0.9858657244 0.9284705882 0.7608286252 0.9158415842 0.7271341463 0.6571640133 0.6651851852 28.268551236 0.1234844617 1000 0.5267507792
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/85964cf17f520330ea56101aed9602e5/results.jsonl:
--------------------------------------------------------------------------------
1 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/85964cf17f520330ea56101aed9602e5", "save_model_every_checkpoint": false, "seed": 969090155, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [3], "trial_seed": 0, "uda_holdout_fraction": 0}, "env0_in_acc": 0.6484098939929329, "env0_out_acc": 0.6678445229681979, "env1_in_acc": 0.49364705882352944, "env1_out_acc": 0.5178907721280602, "env2_in_acc": 0.4025133282559025, "env2_out_acc": 0.41310975609756095, "env3_in_acc": 0.46538319141058865, "env3_out_acc": 0.45481481481481484, "epoch": 0.0, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 1.7983697652816772, "step": 0, "step_time": 1.225642442703247}
2 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/85964cf17f520330ea56101aed9602e5", "save_model_every_checkpoint": false, "seed": 969090155, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [3], "trial_seed": 0, "uda_holdout_fraction": 0}, "env0_in_acc": 1.0, "env0_out_acc": 1.0, "env1_in_acc": 0.8155294117647058, "env1_out_acc": 0.7853107344632768, "env2_in_acc": 0.8739527798933739, "env2_out_acc": 0.7728658536585366, "env3_in_acc": 0.7686042206590151, "env3_out_acc": 0.7525925925925926, "epoch": 8.480565371024735, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.4000296687086423, "step": 300, "step_time": 0.8405228471755981}
3 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/85964cf17f520330ea56101aed9602e5", "save_model_every_checkpoint": false, "seed": 969090155, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [3], "trial_seed": 0, "uda_holdout_fraction": 0}, "env0_in_acc": 0.9982332155477032, "env0_out_acc": 0.9964664310954063, "env1_in_acc": 0.8061176470588235, "env1_out_acc": 0.711864406779661, "env2_in_acc": 0.9185072353389185, "env2_out_acc": 0.7804878048780488, "env3_in_acc": 0.7782302850796001, "env3_out_acc": 0.7881481481481482, "epoch": 16.96113074204947, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.2543533290425936, "step": 600, "step_time": 0.601905468304952}
4 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/85964cf17f520330ea56101aed9602e5", "save_model_every_checkpoint": false, "seed": 969090155, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [3], "trial_seed": 0, "uda_holdout_fraction": 0}, "env0_in_acc": 1.0, "env0_out_acc": 1.0, "env1_in_acc": 0.916235294117647, "env1_out_acc": 0.7702448210922788, "env2_in_acc": 0.9265041888804265, "env2_out_acc": 0.7469512195121951, "env3_in_acc": 0.6730840429470566, "env3_out_acc": 0.6696296296296296, "epoch": 25.441696113074205, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.16916242592036723, "step": 900, "step_time": 0.5962682294845582}
5 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/85964cf17f520330ea56101aed9602e5", "save_model_every_checkpoint": false, "seed": 969090155, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [3], "trial_seed": 0, "uda_holdout_fraction": 0}, "env0_in_acc": 0.9991166077738516, "env0_out_acc": 0.9858657243816255, "env1_in_acc": 0.9284705882352942, "env1_out_acc": 0.7608286252354048, "env2_in_acc": 0.9158415841584159, "env2_out_acc": 0.7271341463414634, "env3_in_acc": 0.6571640133283969, "env3_out_acc": 0.6651851851851852, "epoch": 28.268551236749115, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.12348446171730756, "step": 1000, "step_time": 0.5267507791519165}
6 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/86394db2b6c2ecd1e3b08e99e14759f2/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/86394db2b6c2ecd1e3b08e99e14759f2/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/86394db2b6c2ecd1e3b08e99e14759f2/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/86394db2b6c2ecd1e3b08e99e14759f2/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/86394db2b6c2ecd1e3b08e99e14759f2
18 | save_model_every_checkpoint: False
19 | seed: 664692933
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1, 3]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 8
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.2352558725944602e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 1.9967320578799288e-06
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.2181978799 0.2190812721 0.0418823529 0.0640301318 0.3297791318 0.2942073171 0.1355053684 0.1688888889 0.0000000000 1.5366128683 0 1.3182864189
37 | 0.9973498233 1.0000000000 0.7072941176 0.6911487759 0.8137852247 0.7804878049 0.7649018882 0.7540740741 2.1201413428 0.4162907769 300 0.0860185695
38 | 0.9991166078 0.9964664311 0.6687058824 0.6421845574 0.8198781417 0.8094512195 0.8019252129 0.7629629630 4.2402826855 0.2737542759 600 0.0876681225
39 | 1.0000000000 1.0000000000 0.6508235294 0.6308851224 0.8735719726 0.8246951220 0.7526841910 0.7422222222 6.3604240283 0.2153730621 900 0.0867732620
40 | 0.9938162544 1.0000000000 0.6221176471 0.6233521657 0.8069306931 0.7332317073 0.6319881525 0.6518518519 7.0671378092 0.2005969730 1000 0.1003058171
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/8cfbf830754065d02f9723c57abc992e/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/8cfbf830754065d02f9723c57abc992e/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/8cfbf830754065d02f9723c57abc992e/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/8cfbf830754065d02f9723c57abc992e/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/8cfbf830754065d02f9723c57abc992e
18 | save_model_every_checkpoint: False
19 | seed: 1878899245
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1, 3]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 39
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.7028930742148706e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 0.00044832883881609976
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.7544169611 0.7349823322 0.4640000000 0.4990583804 0.4185072353 0.4344512195 0.4439096631 0.4459259259 0.0000000000 1.6586600542 0 0.8204424381
37 | 1.0000000000 1.0000000000 0.6381176471 0.6195856874 0.9021325209 0.7942073171 0.7460199926 0.7688888889 10.335689045 0.2694484687 300 0.2729239146
38 | 0.9991166078 0.9964664311 0.6084705882 0.5969868173 0.9405940594 0.7942073171 0.7141799334 0.7200000000 20.671378091 0.1227226931 600 0.2742725794
39 | 1.0000000000 1.0000000000 0.6475294118 0.6572504708 0.9630616908 0.8003048780 0.7671232877 0.7762962963 31.007067137 0.0694726440 900 0.2802266463
40 | 1.0000000000 0.9964664311 0.6244705882 0.6101694915 0.9813404417 0.8079268293 0.7778600518 0.7777777778 34.452296819 0.0363020070 1000 0.2752757215
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/90961e3a45300a2d4771fc090627166e/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/90961e3a45300a2d4771fc090627166e/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/90961e3a45300a2d4771fc090627166e/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/90961e3a45300a2d4771fc090627166e/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/90961e3a45300a2d4771fc090627166e
18 | save_model_every_checkpoint: False
19 | seed: 733096875
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 39
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.7028930742148706e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 0.00044832883881609976
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6298586572 0.6431095406 0.4014117647 0.4369114878 0.4059405941 0.3932926829 0.4487226953 0.4429629630 0.0000000000 1.6435878277 0 1.4049792290
37 | 0.9991166078 1.0000000000 0.6625882353 0.6591337100 0.8899466870 0.7987804878 0.9244724176 0.8651851852 10.335689045 0.3126775041 300 0.4027417898
38 | 0.9973498233 0.9893992933 0.5948235294 0.5969868173 0.9173648134 0.7667682927 0.9581636431 0.8592592593 20.671378091 0.1523421495 600 0.4037892016
39 | 1.0000000000 0.9964664311 0.6536470588 0.6497175141 0.9657273420 0.7987804878 0.9759348389 0.8829629630 31.007067137 0.1036048375 900 0.4036473759
40 | 1.0000000000 0.9964664311 0.6578823529 0.6553672316 0.9706778370 0.7865853659 0.9748241392 0.8814814815 34.452296819 0.0652515952 1000 0.4080266762
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/9f1d308cb3d13c7358eefd027ba1de04/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/9f1d308cb3d13c7358eefd027ba1de04/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/9f1d308cb3d13c7358eefd027ba1de04/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/9f1d308cb3d13c7358eefd027ba1de04/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/9f1d308cb3d13c7358eefd027ba1de04
18 | save_model_every_checkpoint: False
19 | seed: 1443892482
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [1]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 8
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.2352558725944602e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 1.9967320578799288e-06
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6095406360 0.6219081272 0.4070588235 0.3992467043 0.3549124143 0.3414634146 0.3661606812 0.3792592593 0.0000000000 1.6126624346 0 1.0119540691
37 | 0.9982332155 0.9964664311 0.6141176471 0.5988700565 0.8309215537 0.7987804878 0.8507960015 0.7985185185 2.1201413428 0.4554009163 300 0.1058194629
38 | 0.9991166078 0.9929328622 0.6310588235 0.6082862524 0.8518659558 0.8323170732 0.8933728249 0.8400000000 4.2402826855 0.2957518518 600 0.1057730643
39 | 1.0000000000 0.9929328622 0.5642352941 0.5630885122 0.8526275704 0.8094512195 0.8952239911 0.8444444444 6.3604240283 0.2582681263 900 0.1059892249
40 | 1.0000000000 0.9964664311 0.6197647059 0.6026365348 0.8659558264 0.8185975610 0.8918918919 0.8133333333 7.0671378092 0.2397152161 1000 0.1159045529
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/bf09cd8e443d5445cc15b7503c14264d/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/bf09cd8e443d5445cc15b7503c14264d/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/bf09cd8e443d5445cc15b7503c14264d/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/bf09cd8e443d5445cc15b7503c14264d/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/bf09cd8e443d5445cc15b7503c14264d
18 | save_model_every_checkpoint: False
19 | seed: 267264279
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [2, 3]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6333922261 0.6466431095 0.4720000000 0.5047080979 0.3990860625 0.4009146341 0.4450203628 0.4488888889 0.0000000000 1.5340688229 0 1.5293178558
37 | 0.9964664311 1.0000000000 0.7821176471 0.7363465160 0.5913937548 0.5838414634 0.6375416512 0.6281481481 8.4805653710 0.3256840587 300 0.4966490340
38 | 0.9982332155 0.9964664311 0.8837647059 0.7777777778 0.5194211729 0.4893292683 0.6904850056 0.6592592593 16.961130742 0.2106622866 600 0.5106681673
39 | 1.0000000000 0.9964664311 0.9416470588 0.7325800377 0.6127189642 0.5609756098 0.6638282118 0.6251851852 25.441696113 0.1415937501 900 0.5148218004
40 | 1.0000000000 0.9964664311 0.9374117647 0.7740112994 0.5529322163 0.5503048780 0.6704924102 0.6488888889 28.268551236 0.1061877130 1000 0.5078280520
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/bfce2823ee1c49ab624fde5c5e2c1143/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/bfce2823ee1c49ab624fde5c5e2c1143/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/bfce2823ee1c49ab624fde5c5e2c1143/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/bfce2823ee1c49ab624fde5c5e2c1143/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/bfce2823ee1c49ab624fde5c5e2c1143
18 | save_model_every_checkpoint: False
19 | seed: 729020776
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [2]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6192579505 0.6254416961 0.4724705882 0.4670433145 0.3773800457 0.4146341463 0.4535357275 0.4074074074 0.0000000000 1.5559741259 0 1.1550295353
37 | 0.9991166078 0.9929328622 0.8305882353 0.7570621469 0.7395277989 0.7560975610 0.9166975194 0.8325925926 8.4805653710 0.3378844495 300 0.5076199762
38 | 1.0000000000 0.9823321555 0.8832941176 0.7532956685 0.6622239147 0.6905487805 0.9403924472 0.8237037037 16.961130742 0.2088435666 600 0.4945691800
39 | 1.0000000000 0.9964664311 0.9327058824 0.7419962335 0.7117288652 0.7317073171 0.9722325065 0.8340740741 25.441696113 0.1441033643 900 0.4829492307
40 | 0.9982332155 0.9964664311 0.9327058824 0.7608286252 0.6664127951 0.6570121951 0.9592743428 0.8014814815 28.268551236 0.1133815604 1000 0.5154592729
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/c62625063d3aee2f08e5c908e7677e83/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/c62625063d3aee2f08e5c908e7677e83/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/c62625063d3aee2f08e5c908e7677e83/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/c62625063d3aee2f08e5c908e7677e83/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/c62625063d3aee2f08e5c908e7677e83
18 | save_model_every_checkpoint: False
19 | seed: 99481980
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 3]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.5821554770 0.5971731449 0.4663529412 0.4613935970 0.3716679360 0.4146341463 0.4424287301 0.4014814815 0.0000000000 1.7584050894 0 1.1681911945
37 | 0.7579505300 0.7738515901 0.7632941176 0.7024482109 0.8632901752 0.7942073171 0.7023324695 0.7155555556 8.4805653710 0.5978830648 300 0.4919224509
38 | 0.7279151943 0.6749116608 0.8592941176 0.7306967985 0.9116527037 0.8109756098 0.7045538689 0.6711111111 16.961130742 0.3988586284 600 0.4802287292
39 | 0.3683745583 0.3568904594 0.9143529412 0.7325800377 0.9592536177 0.8079268293 0.7067752684 0.7125925926 25.441696113 0.2422290696 900 0.4851771371
40 | 0.7393992933 0.7279151943 0.9298823529 0.7683615819 0.9306930693 0.7957317073 0.7467604591 0.7525925926 28.268551236 0.1837190475 1000 0.5015410733
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/ca571be94ad9fdb0c2bece0061ff3f89/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/ca571be94ad9fdb0c2bece0061ff3f89/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/ca571be94ad9fdb0c2bece0061ff3f89/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/ca571be94ad9fdb0c2bece0061ff3f89/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/ca571be94ad9fdb0c2bece0061ff3f89
18 | save_model_every_checkpoint: False
19 | seed: 99890861
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [2]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6113074205 0.6289752650 0.4597647059 0.4896421846 0.3846153846 0.3871951220 0.4435394298 0.4459259259 0.0000000000 1.5248144865 0 1.3489863873
37 | 0.9982332155 1.0000000000 0.7924705882 0.7325800377 0.7300076161 0.7012195122 0.8959644576 0.8370370370 8.4805653710 0.3458396457 300 0.4882594180
38 | 1.0000000000 0.9964664311 0.8489411765 0.7419962335 0.6839299315 0.6798780488 0.9215105516 0.8325925926 16.961130742 0.2202832393 600 0.4904143016
39 | 0.9991166078 0.9964664311 0.8983529412 0.7777777778 0.6949733435 0.6646341463 0.9396519807 0.8281481481 25.441696113 0.1531876612 900 0.4965575910
40 | 0.9991166078 0.9964664311 0.9440000000 0.7890772128 0.7220106626 0.6981707317 0.9692706405 0.8325925926 28.268551236 0.1111858229 1000 0.5122962880
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/ca571be94ad9fdb0c2bece0061ff3f89/results.jsonl:
--------------------------------------------------------------------------------
1 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/ca571be94ad9fdb0c2bece0061ff3f89", "save_model_every_checkpoint": false, "seed": 99890861, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [2], "trial_seed": 0, "uda_holdout_fraction": 0}, "env0_in_acc": 0.6113074204946997, "env0_out_acc": 0.6289752650176679, "env1_in_acc": 0.45976470588235296, "env1_out_acc": 0.4896421845574388, "env2_in_acc": 0.38461538461538464, "env2_out_acc": 0.3871951219512195, "env3_in_acc": 0.4435394298407997, "env3_out_acc": 0.44592592592592595, "epoch": 0.0, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 1.524814486503601, "step": 0, "step_time": 1.3489863872528076}
2 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/ca571be94ad9fdb0c2bece0061ff3f89", "save_model_every_checkpoint": false, "seed": 99890861, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [2], "trial_seed": 0, "uda_holdout_fraction": 0}, "env0_in_acc": 0.9982332155477032, "env0_out_acc": 1.0, "env1_in_acc": 0.7924705882352941, "env1_out_acc": 0.7325800376647834, "env2_in_acc": 0.73000761614623, "env2_out_acc": 0.7012195121951219, "env3_in_acc": 0.8959644576082932, "env3_out_acc": 0.837037037037037, "epoch": 8.480565371024735, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.3458396456638972, "step": 300, "step_time": 0.48825941801071165}
3 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/ca571be94ad9fdb0c2bece0061ff3f89", "save_model_every_checkpoint": false, "seed": 99890861, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [2], "trial_seed": 0, "uda_holdout_fraction": 0}, "env0_in_acc": 1.0, "env0_out_acc": 0.9964664310954063, "env1_in_acc": 0.8489411764705882, "env1_out_acc": 0.7419962335216572, "env2_in_acc": 0.683929931454684, "env2_out_acc": 0.6798780487804879, "env3_in_acc": 0.9215105516475379, "env3_out_acc": 0.8325925925925926, "epoch": 16.96113074204947, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.22028323932240407, "step": 600, "step_time": 0.490414301554362}
4 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/ca571be94ad9fdb0c2bece0061ff3f89", "save_model_every_checkpoint": false, "seed": 99890861, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [2], "trial_seed": 0, "uda_holdout_fraction": 0}, "env0_in_acc": 0.9991166077738516, "env0_out_acc": 0.9964664310954063, "env1_in_acc": 0.8983529411764706, "env1_out_acc": 0.7777777777777778, "env2_in_acc": 0.694973343488195, "env2_out_acc": 0.6646341463414634, "env3_in_acc": 0.9396519807478712, "env3_out_acc": 0.8281481481481482, "epoch": 25.441696113074205, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.15318766122063002, "step": 900, "step_time": 0.4965575909614563}
5 | {"args": {"algorithm": "ERM", "checkpoint_freq": null, "data_dir": "/checkpoint/dlp/datasets_new", "dataset": "VLCS", "holdout_fraction": 0.2, "hparams": null, "hparams_seed": 0, "output_dir": "domainbed/misc/test_sweep_data/ca571be94ad9fdb0c2bece0061ff3f89", "save_model_every_checkpoint": false, "seed": 99890861, "skip_model_save": false, "steps": 1001, "task": "domain_generalization", "test_envs": [2], "trial_seed": 0, "uda_holdout_fraction": 0}, "env0_in_acc": 0.9991166077738516, "env0_out_acc": 0.9964664310954063, "env1_in_acc": 0.944, "env1_out_acc": 0.7890772128060264, "env2_in_acc": 0.722010662604722, "env2_out_acc": 0.698170731707317, "env3_in_acc": 0.9692706405035172, "env3_out_acc": 0.8325925925925926, "epoch": 28.268551236749115, "hparams": {"batch_size": 32, "class_balanced": false, "data_augmentation": true, "lr": 5e-05, "nonlinear_classifier": false, "resnet18": false, "resnet_dropout": 0.0, "weight_decay": 0.0}, "loss": 0.11118582291528582, "step": 1000, "step_time": 0.5122962880134583}
6 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/cf42c3176baf91b96bb7dd0ff3c686cc/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/cf42c3176baf91b96bb7dd0ff3c686cc/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/cf42c3176baf91b96bb7dd0ff3c686cc/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/cf42c3176baf91b96bb7dd0ff3c686cc/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/cf42c3176baf91b96bb7dd0ff3c686cc
18 | save_model_every_checkpoint: False
19 | seed: 1726329315
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [3]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 8
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.2352558725944602e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 1.9967320578799288e-06
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.1316254417 0.1236749117 0.3920000000 0.3672316384 0.2494287890 0.2423780488 0.1417993336 0.1407407407 0.0000000000 1.8617510796 0 1.3313741684
37 | 0.9982332155 1.0000000000 0.7567058824 0.7401129944 0.7947448591 0.7606707317 0.7319511292 0.7200000000 2.1201413428 0.5276519541 300 0.1504819067
38 | 0.9991166078 0.9929328622 0.7840000000 0.7382297552 0.8347296268 0.8033536585 0.7726767864 0.7674074074 4.2402826855 0.3625088304 600 0.1520125484
39 | 0.9982332155 0.9964664311 0.7924705882 0.7269303202 0.8244478294 0.7393292683 0.6823398741 0.6948148148 6.3604240283 0.3448445238 900 0.1523122589
40 | 1.0000000000 0.9964664311 0.8089411765 0.7702448211 0.8434881950 0.7743902439 0.7652721214 0.7614814815 7.0671378092 0.3251545057 1000 0.1789008522
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/d093618124c5748762707da1c6804d75/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/d093618124c5748762707da1c6804d75/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/d093618124c5748762707da1c6804d75/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/d093618124c5748762707da1c6804d75/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/d093618124c5748762707da1c6804d75
18 | save_model_every_checkpoint: False
19 | seed: 794352299
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [3]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 39
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.7028930742148706e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 0.00044832883881609976
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.5768551237 0.6007067138 0.4447058824 0.4783427495 0.3591012947 0.3506097561 0.4116993706 0.4074074074 0.0000000000 1.8192925453 0 1.2970163822
37 | 0.9982332155 0.9964664311 0.8131764706 0.7645951036 0.8709063214 0.7881097561 0.7789707516 0.7911111111 10.335689045 0.4202846115 300 0.6344150265
38 | 1.0000000000 1.0000000000 0.8696470588 0.7683615819 0.9017517136 0.7759146341 0.7900777490 0.7733333333 20.671378091 0.2483884268 600 0.6257179348
39 | 0.9982332155 0.9929328622 0.9176470588 0.7419962335 0.9569687738 0.7926829268 0.7367641614 0.7170370370 31.007067137 0.1585837312 900 0.6380308660
40 | 1.0000000000 0.9964664311 0.9162352941 0.7476459510 0.9760091394 0.7881097561 0.7726767864 0.7629629630 34.452296819 0.1284457469 1000 0.6444939446
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/ea7d2d5149dd9167b364d433bb355be1/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/ea7d2d5149dd9167b364d433bb355be1/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/ea7d2d5149dd9167b364d433bb355be1/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/ea7d2d5149dd9167b364d433bb355be1/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 0
17 | output_dir: domainbed/misc/test_sweep_data/ea7d2d5149dd9167b364d433bb355be1
18 | save_model_every_checkpoint: False
19 | seed: 560039459
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 1]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 32
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 5e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.0
34 | weight_decay: 0.0
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.5901060071 0.5971731449 0.3717647059 0.3804143126 0.3865194212 0.3810975610 0.4368752314 0.4414814815 0.0000000000 1.6423946619 0 1.4854812622
37 | 0.9885159011 0.9964664311 0.6032941176 0.5988700565 0.8735719726 0.7621951220 0.9078119215 0.8311111111 8.4805653710 0.4036260696 300 0.2337139837
38 | 0.9743816254 0.9752650177 0.6470588235 0.6478342750 0.9367859863 0.8094512195 0.9500185117 0.8592592593 16.961130742 0.2497328627 600 0.2362791340
39 | 0.9743816254 0.9858657244 0.6000000000 0.5932203390 0.9520182788 0.7881097561 0.9700111070 0.8355555556 25.441696113 0.1506629159 900 0.2351563136
40 | 0.9655477032 0.9717314488 0.6277647059 0.6327683616 0.9748667174 0.7865853659 0.9759348389 0.8370370370 28.268551236 0.1228756825 1000 0.2404113364
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/ee8f05db2b9ae5a36273cc0d2161f8c0/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/ee8f05db2b9ae5a36273cc0d2161f8c0/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/ee8f05db2b9ae5a36273cc0d2161f8c0/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/ee8f05db2b9ae5a36273cc0d2161f8c0/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/ee8f05db2b9ae5a36273cc0d2161f8c0
18 | save_model_every_checkpoint: False
19 | seed: 901962056
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [2]
24 | trial_seed: 0
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 39
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.7028930742148706e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 0.00044832883881609976
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.6130742049 0.6325088339 0.4564705882 0.4896421846 0.3865194212 0.3826219512 0.4357645317 0.4222222222 0.0000000000 1.5491540432 0 1.1524951458
37 | 1.0000000000 1.0000000000 0.8183529412 0.7664783427 0.7117288652 0.6890243902 0.9059607553 0.8518518519 10.335689045 0.3856955582 300 0.6124396364
38 | 0.9991166078 1.0000000000 0.8541176471 0.7401129944 0.7520944402 0.7439024390 0.9489078119 0.8444444444 20.671378091 0.2231503439 600 0.6130792896
39 | 0.9982332155 0.9964664311 0.9261176471 0.7777777778 0.6984006093 0.6753048780 0.9603850426 0.8237037037 31.007067137 0.1381842596 900 0.6130368471
40 | 1.0000000000 1.0000000000 0.9449411765 0.7683615819 0.7349581112 0.7195121951 0.9766753054 0.8562962963 34.452296819 0.1258270860 1000 0.6275756717
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/f61766414e6b0db40063d7bc4ecdaa2b/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/f61766414e6b0db40063d7bc4ecdaa2b/err.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/misc/test_sweep_data/f61766414e6b0db40063d7bc4ecdaa2b/err.txt
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/f61766414e6b0db40063d7bc4ecdaa2b/out.txt:
--------------------------------------------------------------------------------
1 | Environment:
2 | Python: 3.7.6
3 | PyTorch: 1.7.0
4 | Torchvision: 0.8.1
5 | CUDA: 9.2
6 | CUDNN: 7603
7 | NumPy: 1.19.4
8 | PIL: 8.1.0
9 | Args:
10 | algorithm: ERM
11 | checkpoint_freq: None
12 | data_dir: /checkpoint/dlp/datasets_new
13 | dataset: VLCS
14 | holdout_fraction: 0.2
15 | hparams: None
16 | hparams_seed: 1
17 | output_dir: domainbed/misc/test_sweep_data/f61766414e6b0db40063d7bc4ecdaa2b
18 | save_model_every_checkpoint: False
19 | seed: 512619814
20 | skip_model_save: False
21 | steps: 1001
22 | task: domain_generalization
23 | test_envs: [0, 2]
24 | trial_seed: 1
25 | uda_holdout_fraction: 0
26 | HParams:
27 | batch_size: 8
28 | class_balanced: False
29 | data_augmentation: True
30 | lr: 2.2352558725944602e-05
31 | nonlinear_classifier: False
32 | resnet18: False
33 | resnet_dropout: 0.5
34 | weight_decay: 1.9967320578799288e-06
35 | env0_in_acc env0_out_acc env1_in_acc env1_out_acc env2_in_acc env2_out_acc env3_in_acc env3_out_acc epoch loss step step_time
36 | 0.0662544170 0.0459363958 0.2922352941 0.2617702448 0.2859862909 0.2530487805 0.1899296557 0.1940740741 0.0000000000 1.7476719618 0 1.3853642941
37 | 0.9779151943 0.9681978799 0.7562352941 0.7438794727 0.6637471439 0.7012195122 0.8293224732 0.7807407407 2.1201413428 0.7123324679 300 0.1359348575
38 | 0.9885159011 0.9752650177 0.7821176471 0.7777777778 0.7124904798 0.7088414634 0.8704183636 0.8148148148 4.2402826855 0.5137957147 600 0.1346128742
39 | 0.9637809187 0.9646643110 0.7891764706 0.7382297552 0.6774562072 0.6981707317 0.8685671973 0.8118518519 6.3604240283 0.4774057284 900 0.1330896823
40 | 0.9646643110 0.9505300353 0.7680000000 0.7363465160 0.7696115765 0.7987804878 0.8870788597 0.8370370370 7.0671378092 0.4129467555 1000 0.1624276757
41 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_data/results.txt:
--------------------------------------------------------------------------------
1 | Total records: 200
2 |
3 | -------- Dataset: VLCS, model selection method: training-domain validation set
4 | Algorithm C L S V Avg
5 | ERM 98.0 +/- 0.2 64.2 +/- 0.8 74.1 +/- 0.4 77.1 +/- 0.2 78.3
6 |
7 | -------- Averages, model selection method: training-domain validation set
8 | Algorithm VLCS Avg
9 | ERM 78.3 +/- 0.0 78.3
10 |
11 | -------- Dataset: VLCS, model selection method: leave-one-domain-out cross-validation
12 | Algorithm C L S V Avg
13 | ERM 96.9 +/- 1.0 64.4 +/- 0.9 70.5 +/- 0.5 76.7 +/- 0.1 77.1
14 |
15 | -------- Averages, model selection method: leave-one-domain-out cross-validation
16 | Algorithm VLCS Avg
17 | ERM 77.1 +/- 0.1 77.1
18 |
19 | -------- Dataset: VLCS, model selection method: test-domain validation set (oracle)
20 | Algorithm C L S V Avg
21 | ERM 96.9 +/- 1.0 65.9 +/- 0.5 71.6 +/- 1.3 76.9 +/- 0.3 77.8
22 |
23 | -------- Averages, model selection method: test-domain validation set (oracle)
24 | Algorithm VLCS Avg
25 | ERM 77.8 +/- 0.3 77.8
26 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/misc/test_sweep_results.txt:
--------------------------------------------------------------------------------
1 | Total records: 200
2 |
3 | -------- Dataset: VLCS, model selection method: training-domain validation set
4 | Algorithm C L S V Avg
5 | ERM 98.0 +/- 0.2 64.2 +/- 0.8 74.1 +/- 0.4 77.1 +/- 0.2 78.3
6 |
7 | -------- Averages, model selection method: training-domain validation set
8 | Algorithm VLCS Avg
9 | ERM 78.3 +/- 0.0 78.3
10 |
11 | -------- Dataset: VLCS, model selection method: leave-one-domain-out cross-validation
12 | Algorithm C L S V Avg
13 | ERM 96.9 +/- 1.0 64.4 +/- 0.9 70.5 +/- 0.5 76.7 +/- 0.1 77.1
14 |
15 | -------- Averages, model selection method: leave-one-domain-out cross-validation
16 | Algorithm VLCS Avg
17 | ERM 77.1 +/- 0.1 77.1
18 |
19 | -------- Dataset: VLCS, model selection method: test-domain validation set (oracle)
20 | Algorithm C L S V Avg
21 | ERM 96.9 +/- 1.0 65.9 +/- 0.5 71.6 +/- 1.3 76.9 +/- 0.3 77.8
22 |
23 | -------- Averages, model selection method: test-domain validation set (oracle)
24 | Algorithm VLCS Avg
25 | ERM 77.8 +/- 0.3 77.8
26 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/results/2020_10_06_7df6f06/results.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/results/2020_10_06_7df6f06/results.png
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__init__.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__pycache__/__init__.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__pycache__/__init__.cpython-37.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__pycache__/__init__.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__pycache__/__init__.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__pycache__/__init__.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__pycache__/__init__.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__pycache__/download.cpython-37.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__pycache__/download.cpython-37.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__pycache__/download.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__pycache__/download.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__pycache__/sweep.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__pycache__/sweep.cpython-36.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__pycache__/sweep.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__pycache__/sweep.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__pycache__/sweep.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__pycache__/sweep.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__pycache__/train.cpython-38.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__pycache__/train.cpython-38.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/__pycache__/train.cpython-39.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/domainbed/scripts/__pycache__/train.cpython-39.pyc
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/scripts/save_images.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | """
4 | Save some representative images from each dataset to disk.
5 | """
6 | import random
7 | import torch
8 | import argparse
9 | from domainbed import hparams_registry
10 | from domainbed import datasets
11 | import imageio
12 | import os
13 | from tqdm import tqdm
14 |
15 | if __name__ == '__main__':
16 | parser = argparse.ArgumentParser(description='Domain generalization')
17 | parser.add_argument('--data_dir', type=str)
18 | parser.add_argument('--output_dir', type=str)
19 | args = parser.parse_args()
20 |
21 | os.makedirs(args.output_dir, exist_ok=True)
22 | datasets_to_save = ['OfficeHome', 'TerraIncognita', 'DomainNet', 'RotatedMNIST', 'ColoredMNIST', 'SVIRO']
23 |
24 | for dataset_name in tqdm(datasets_to_save):
25 | hparams = hparams_registry.default_hparams('ERM', dataset_name)
26 | dataset = datasets.get_dataset_class(dataset_name)(
27 | args.data_dir,
28 | list(range(datasets.num_environments(dataset_name))),
29 | hparams)
30 | for env_idx, env in enumerate(tqdm(dataset)):
31 | for i in tqdm(range(50)):
32 | idx = random.choice(list(range(len(env))))
33 | x, y = env[idx]
34 | while y > 10:
35 | idx = random.choice(list(range(len(env))))
36 | x, y = env[idx]
37 | if x.shape[0] == 2:
38 | x = torch.cat([x, torch.zeros_like(x)], dim=0)[:3,:,:]
39 | if x.min() < 0:
40 | mean = torch.tensor([0.485, 0.456, 0.406])[:,None,None]
41 | std = torch.tensor([0.229, 0.224, 0.225])[:,None,None]
42 | x = (x * std) + mean
43 | assert(x.min() >= 0)
44 | assert(x.max() <= 1)
45 | x = (x * 255.99)
46 | x = x.numpy().astype('uint8').transpose(1,2,0)
47 | imageio.imwrite(
48 | os.path.join(args.output_dir,
49 | f'{dataset_name}_env{env_idx}{dataset.ENVIRONMENTS[env_idx]}_{i}_idx{idx}_class{y}.png'),
50 | x)
51 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 |
4 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/helpers.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | import torch
4 |
5 | DEBUG_DATASETS = ['Debug28', 'Debug224']
6 |
7 | def make_minibatches(dataset, batch_size):
8 | """Test helper to make a minibatches array like train.py"""
9 | minibatches = []
10 | for env in dataset:
11 | X = torch.stack([env[i][0] for i in range(batch_size)]).cuda()
12 | y = torch.stack([torch.as_tensor(env[i][1])
13 | for i in range(batch_size)]).cuda()
14 | minibatches.append((X, y))
15 | return minibatches
16 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/lib/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 |
4 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/lib/test_misc.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | import unittest
4 | from domainbed.lib import misc
5 |
6 | class TestMisc(unittest.TestCase):
7 |
8 | def test_make_weights_for_balanced_classes(self):
9 | dataset = [('A', 0), ('B', 1), ('C', 0), ('D', 2), ('E', 3), ('F', 0)]
10 | result = misc.make_weights_for_balanced_classes(dataset)
11 | self.assertEqual(result.sum(), 1)
12 | self.assertEqual(result[0], result[2])
13 | self.assertEqual(result[1], result[3])
14 | self.assertEqual(3 * result[0], result[1])
15 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/lib/test_query.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | import unittest
4 | from domainbed.lib.query import Q, make_selector_fn
5 |
6 | class TestQuery(unittest.TestCase):
7 | def test_everything(self):
8 | numbers = Q([1, 4, 2])
9 | people = Q([
10 | {'name': 'Bob', 'age': 40},
11 | {'name': 'Alice', 'age': 20},
12 | {'name': 'Bob', 'age': 10}
13 | ])
14 |
15 | self.assertEqual(numbers.select(lambda x: 2*x), [2, 8, 4])
16 |
17 | self.assertEqual(numbers.min(), 1)
18 | self.assertEqual(numbers.max(), 4)
19 | self.assertEqual(numbers.mean(), 7/3)
20 |
21 | self.assertEqual(people.select('name'), ['Bob', 'Alice', 'Bob'])
22 |
23 | self.assertEqual(
24 | set(people.group('name').map(lambda _,g: g.select('age').mean())),
25 | set([25, 20])
26 | )
27 |
28 | self.assertEqual(people.argmax('age'), people[0])
29 |
30 | def test_group_by_unhashable(self):
31 | jobs = Q([
32 | {'hparams': {1:2}, 'score': 3},
33 | {'hparams': {1:2}, 'score': 4},
34 | {'hparams': {2:4}, 'score': 5}
35 | ])
36 | grouped = jobs.group('hparams')
37 | self.assertEqual(grouped, [
38 | ({1:2}, [jobs[0], jobs[1]]),
39 | ({2:4}, [jobs[2]])
40 | ])
41 |
42 | def test_comma_selector(self):
43 | struct = {'a': {'b': 1}, 'c': 2}
44 | fn = make_selector_fn('a.b,c')
45 | self.assertEqual(fn(struct), (1, 2))
46 |
47 | def test_unique(self):
48 | numbers = Q([1,2,1,3,2,1,3,1,2,3])
49 | self.assertEqual(numbers.unique(), [1,2,3])
50 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/scripts/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 |
4 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/scripts/test_collect_results.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | import argparse
4 | import itertools
5 | import json
6 | import os
7 | import subprocess
8 | import sys
9 | import time
10 | import unittest
11 | import uuid
12 |
13 | import torch
14 |
15 | from domainbed import datasets
16 | from domainbed import hparams_registry
17 | from domainbed import algorithms
18 | from domainbed import networks
19 | from domainbed.test import helpers
20 | from domainbed.scripts import collect_results
21 |
22 | from parameterized import parameterized
23 | import io
24 | import textwrap
25 |
26 | class TestCollectResults(unittest.TestCase):
27 |
28 | def test_format_mean(self):
29 | self.assertEqual(
30 | collect_results.format_mean([0.1, 0.2, 0.3], False)[2],
31 | '20.0 +/- 4.7')
32 | self.assertEqual(
33 | collect_results.format_mean([0.1, 0.2, 0.3], True)[2],
34 | '20.0 $\pm$ 4.7')
35 |
36 | def test_print_table_non_latex(self):
37 | temp_out = io.StringIO()
38 | sys.stdout = temp_out
39 | table = [['1', '2'], ['3', '4']]
40 | collect_results.print_table(table, 'Header text', ['R1', 'R2'],
41 | ['C1', 'C2'], colwidth=10, latex=False)
42 | sys.stdout = sys.__stdout__
43 | self.assertEqual(
44 | temp_out.getvalue(),
45 | textwrap.dedent("""
46 | -------- Header text
47 | C1 C2
48 | R1 1 2
49 | R2 3 4
50 | """)
51 | )
52 |
53 | def test_print_table_latex(self):
54 | temp_out = io.StringIO()
55 | sys.stdout = temp_out
56 | table = [['1', '2'], ['3', '4']]
57 | collect_results.print_table(table, 'Header text', ['R1', 'R2'],
58 | ['C1', 'C2'], colwidth=10, latex=True)
59 | sys.stdout = sys.__stdout__
60 | self.assertEqual(
61 | temp_out.getvalue(),
62 | textwrap.dedent(r"""
63 | \begin{center}
64 | \adjustbox{max width=\textwidth}{%
65 | \begin{tabular}{lcc}
66 | \toprule
67 | \textbf{C1 & \textbf{C2 \\
68 | \midrule
69 | R1 & 1 & 2 \\
70 | R2 & 3 & 4 \\
71 | \bottomrule
72 | \end{tabular}}
73 | \end{center}
74 | """)
75 | )
76 |
77 | def test_get_grouped_records(self):
78 | pass # TODO
79 |
80 | def test_print_results_tables(self):
81 | pass # TODO
82 |
83 | def test_load_records(self):
84 | pass # TODO
85 |
86 | def test_end_to_end(self):
87 | """
88 | Test that collect_results.py's output matches a manually-verified
89 | ground-truth when run on a given directory of test sweep data.
90 |
91 | If you make any changes to the output of collect_results.py, you'll need
92 | to update the ground-truth and manually verify that it's still
93 | correct. The command used to update the ground-truth is:
94 |
95 | python -m domainbed.scripts.collect_results --input_dir=domainbed/misc/test_sweep_data \
96 | | tee domainbed/misc/test_sweep_results.txt
97 |
98 | Furthermore, if you make any changes to the data format, you'll also
99 | need to rerun the test sweep. The command used to run the test sweep is:
100 |
101 | python -m domainbed.scripts.sweep launch --data_dir=$DATA_DIR \
102 | --output_dir=domainbed/misc/test_sweep_data --algorithms ERM \
103 | --datasets VLCS --steps 1001 --n_hparams 2 --n_trials 2 \
104 | --command_launcher local
105 | """
106 | result = subprocess.run('python -m domainbed.scripts.collect_results'
107 | ' --input_dir=domainbed/misc/test_sweep_data', shell=True,
108 | stdout=subprocess.PIPE)
109 |
110 | with open('domainbed/misc/test_sweep_results.txt', 'r') as f:
111 | ground_truth = f.read()
112 |
113 | self.assertEqual(result.stdout.decode('utf8'), ground_truth)
114 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/scripts/test_train.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | # import argparse
4 | # import itertools
5 | import json
6 | import os
7 | import subprocess
8 | # import sys
9 | # import time
10 | import unittest
11 | import uuid
12 |
13 | import torch
14 |
15 | # import datasets
16 | # import hparams_registry
17 | # import algorithms
18 | # import networks
19 | # from parameterized import parameterized
20 |
21 | # import test.helpers
22 |
23 | class TestTrain(unittest.TestCase):
24 |
25 | @unittest.skipIf('DATA_DIR' not in os.environ, 'needs DATA_DIR environment '
26 | 'variable')
27 | def test_end_to_end(self):
28 | """Test that train.py successfully completes one step"""
29 | output_dir = os.path.join('/tmp', str(uuid.uuid4()))
30 | os.makedirs(output_dir, exist_ok=True)
31 |
32 | subprocess.run(f'python -m domainbed.scripts.train --dataset RotatedMNIST '
33 | f'--data_dir={os.environ["DATA_DIR"]} --output_dir={output_dir} '
34 | f'--steps=501', shell=True)
35 |
36 | with open(os.path.join(output_dir, 'results.jsonl')) as f:
37 | lines = [l[:-1] for l in f]
38 | last_epoch = json.loads(lines[-1])
39 | self.assertEqual(last_epoch['step'], 500)
40 | # Conservative values; anything lower and something's likely wrong.
41 | self.assertGreater(last_epoch['env0_in_acc'], 0.80)
42 | self.assertGreater(last_epoch['env1_in_acc'], 0.95)
43 | self.assertGreater(last_epoch['env2_in_acc'], 0.95)
44 | self.assertGreater(last_epoch['env3_in_acc'], 0.95)
45 | self.assertGreater(last_epoch['env3_in_acc'], 0.95)
46 |
47 | with open(os.path.join(output_dir, 'out.txt')) as f:
48 | text = f.read()
49 | self.assertTrue('500' in text)
50 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/test_datasets.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | """Unit tests."""
4 |
5 | import argparse
6 | import itertools
7 | import json
8 | import os
9 | import subprocess
10 | import sys
11 | import time
12 | import unittest
13 | import uuid
14 |
15 | import torch
16 |
17 | from domainbed import datasets
18 | from domainbed import hparams_registry
19 | from domainbed import algorithms
20 | from domainbed import networks
21 |
22 | from parameterized import parameterized
23 |
24 | from domainbed.test import helpers
25 |
26 | class TestDatasets(unittest.TestCase):
27 |
28 | @parameterized.expand(itertools.product(datasets.DATASETS))
29 | @unittest.skipIf('DATA_DIR' not in os.environ, 'needs DATA_DIR environment '
30 | 'variable')
31 | def test_dataset_erm(self, dataset_name):
32 | """
33 | Test that ERM can complete one step on a given dataset without raising
34 | an error.
35 | Also test that num_environments() works correctly.
36 | """
37 | batch_size = 8
38 | hparams = hparams_registry.default_hparams('ERM', dataset_name)
39 | dataset = datasets.get_dataset_class(dataset_name)(
40 | os.environ['DATA_DIR'], [], hparams)
41 | self.assertEqual(datasets.num_environments(dataset_name),
42 | len(dataset))
43 | algorithm = algorithms.get_algorithm_class('ERM')(
44 | dataset.input_shape,
45 | dataset.num_classes,
46 | len(dataset),
47 | hparams).cuda()
48 | minibatches = helpers.make_minibatches(dataset, batch_size)
49 | algorithm.update(minibatches)
50 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/test_hparams_registry.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | import unittest
4 | import itertools
5 |
6 | from domainbed import hparams_registry
7 | from domainbed import datasets
8 | from domainbed import algorithms
9 |
10 | from parameterized import parameterized
11 |
12 | class TestHparamsRegistry(unittest.TestCase):
13 |
14 | @parameterized.expand(itertools.product(algorithms.ALGORITHMS, datasets.DATASETS))
15 | def test_random_hparams_deterministic(self, algorithm_name, dataset_name):
16 | """Test that hparams_registry.random_hparams is deterministic"""
17 | a = hparams_registry.random_hparams(algorithm_name, dataset_name, 0)
18 | b = hparams_registry.random_hparams(algorithm_name, dataset_name, 0)
19 | self.assertEqual(a.keys(), b.keys())
20 | for key in a.keys():
21 | self.assertEqual(a[key], b[key], key)
22 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/test_models.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | """Unit tests."""
4 |
5 | import argparse
6 | import itertools
7 | import json
8 | import os
9 | import subprocess
10 | import sys
11 | import time
12 | import unittest
13 | import uuid
14 |
15 | import torch
16 |
17 | from domainbed import datasets
18 | from domainbed import hparams_registry
19 | from domainbed import algorithms
20 | from domainbed import networks
21 | from domainbed.test import helpers
22 |
23 | from parameterized import parameterized
24 |
25 |
26 | class TestAlgorithms(unittest.TestCase):
27 |
28 | @parameterized.expand(itertools.product(helpers.DEBUG_DATASETS, algorithms.ALGORITHMS))
29 | def test_init_update_predict(self, dataset_name, algorithm_name):
30 | """Test that a given algorithm inits, updates and predicts without raising
31 | errors."""
32 | batch_size = 8
33 | hparams = hparams_registry.default_hparams(algorithm_name, dataset_name)
34 | dataset = datasets.get_dataset_class(dataset_name)('', [], hparams)
35 | minibatches = helpers.make_minibatches(dataset, batch_size)
36 | algorithm_class = algorithms.get_algorithm_class(algorithm_name)
37 | algorithm = algorithm_class(dataset.input_shape, dataset.num_classes, len(dataset),
38 | hparams).cuda()
39 | for _ in range(3):
40 | self.assertIsNotNone(algorithm.update(minibatches))
41 | algorithm.eval()
42 | self.assertEqual(list(algorithm.predict(minibatches[0][0]).shape),
43 | [batch_size, dataset.num_classes])
44 |
--------------------------------------------------------------------------------
/TerraIncognita/domainbed/test/test_networks.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 |
3 | import argparse
4 | import itertools
5 | import json
6 | import os
7 | import subprocess
8 | import sys
9 | import time
10 | import unittest
11 | import uuid
12 |
13 | import torch
14 |
15 | from domainbed import datasets
16 | from domainbed import hparams_registry
17 | from domainbed import algorithms
18 | from domainbed import networks
19 | from domainbed.test import helpers
20 |
21 | from parameterized import parameterized
22 |
23 |
24 | class TestNetworks(unittest.TestCase):
25 |
26 | @parameterized.expand(itertools.product(helpers.DEBUG_DATASETS))
27 | def test_featurizer(self, dataset_name):
28 | """Test that Featurizer() returns a module which can take a
29 | correctly-sized input and return a correctly-sized output."""
30 | batch_size = 8
31 | hparams = hparams_registry.default_hparams('ERM', dataset_name)
32 | dataset = datasets.get_dataset_class(dataset_name)('', [], hparams)
33 | input_ = helpers.make_minibatches(dataset, batch_size)[0][0]
34 | input_shape = dataset.input_shape
35 | algorithm = networks.Featurizer(input_shape, hparams).cuda()
36 | output = algorithm(input_)
37 | self.assertEqual(list(output.shape), [batch_size, algorithm.n_outputs])
38 |
--------------------------------------------------------------------------------
/TerraIncognita/train_output/done:
--------------------------------------------------------------------------------
1 | done
--------------------------------------------------------------------------------
/TerraIncognita/train_output/model.pkl:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahujak/IB-IRM/a5697e1dd6516e02281898d6f868e2d2eb60690c/TerraIncognita/train_output/model.pkl
--------------------------------------------------------------------------------