├── code
├── __init__.py
├── run_exp
│ ├── __init__.py
│ ├── run_covid_privacy.sh
│ ├── run_privacy_exp.sh
│ ├── run_misc_private_exp.sh
│ ├── run_privacy_exp.submit
│ ├── run_covid_privacy.submit
│ ├── run_misc_private_exp.submit
│ ├── run_privacy_exp.in
│ ├── run_misc_private_exp.in
│ ├── run_covid_privacy.py
│ ├── run_incorrect_files.py
│ ├── run_incorrect_files2.py
│ ├── run_covid_privacy.in
│ └── covid_utils.py
├── README.md
├── __pycache__
│ ├── agent.cpython-36.pyc
│ ├── utils.cpython-36.pyc
│ ├── __init__.cpython-36.pyc
│ ├── fltrain.cpython-36.pyc
│ └── network.cpython-36.pyc
├── run_privacy_exp.sh
├── run_misc_private_exp.sh
├── run_non_private.sh
├── run_privacy_exp.submit
├── run_non_private.submit
├── run_misc_private_exp.submit
├── network.py
├── write_input_files.py
├── agent.py
├── run_privacy_exp.py
├── run_non_private.py
├── utils.py
├── run_misc_private_exp.py
├── fltrain.py
├── run_privacy_exp.in
├── run_misc_private_exp.in
└── run_non_private.in
├── .DS_Store
├── .idea
├── vcs.xml
├── misc.xml
├── inspectionProfiles
│ └── profiles_settings.xml
├── modules.xml
├── BFL.iml
└── workspace.xml
└── README.md
/code/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/code/run_exp/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuongtran-syr/BFL/HEAD/.DS_Store
--------------------------------------------------------------------------------
/code/README.md:
--------------------------------------------------------------------------------
1 | Codes for Blockchain Federated Learning
2 |
3 | Requirements: Pytorch-DP library
--------------------------------------------------------------------------------
/code/__pycache__/agent.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuongtran-syr/BFL/HEAD/code/__pycache__/agent.cpython-36.pyc
--------------------------------------------------------------------------------
/code/__pycache__/utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuongtran-syr/BFL/HEAD/code/__pycache__/utils.cpython-36.pyc
--------------------------------------------------------------------------------
/code/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuongtran-syr/BFL/HEAD/code/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/code/__pycache__/fltrain.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuongtran-syr/BFL/HEAD/code/__pycache__/fltrain.cpython-36.pyc
--------------------------------------------------------------------------------
/code/__pycache__/network.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cuongtran-syr/BFL/HEAD/code/__pycache__/network.cpython-36.pyc
--------------------------------------------------------------------------------
/code/run_exp/run_covid_privacy.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | m=$1
3 | s=$2
4 | k=$3
5 | t=$4
6 | python3 run_covid_privacy.py --model_choice $m --sigma $s --K $k --seed $t > result_${s}.out
--------------------------------------------------------------------------------
/code/run_privacy_exp.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | d=$1
3 | m=$2
4 | s=$3
5 | k=$4
6 | t=$5
7 | python3 run_privacy_exp.py --data $d --model_choice $m --sigma $s --K $k --seed $t > result_${s}.out
--------------------------------------------------------------------------------
/code/run_exp/run_privacy_exp.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | d=$1
3 | m=$2
4 | s=$3
5 | k=$4
6 | t=$5
7 | python3 run_incorrect_files.py --data $d --model_choice $m --sigma $s --K $k --seed $t > result_${s}.out
--------------------------------------------------------------------------------
/code/run_misc_private_exp.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | d=$1
3 | m=$2
4 | s=$3
5 | k=$4
6 | t=$5
7 | python3 run_misc_private_exp.py --data $d --model_choice $m --sigma $s --K $k --seed $t > new_result_${s}.out
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/code/run_exp/run_misc_private_exp.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | d=$1
3 | m=$2
4 | s=$3
5 | k=$4
6 | t=$5
7 | python3 run_incorrect_files2.py --data $d --model_choice $m --sigma $s --K $k --seed $t > new_result_${s}.out
--------------------------------------------------------------------------------
/code/run_non_private.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | d=$1
3 | m=$2
4 | a=$3
5 | b=$4
6 | k=$5
7 | t=$6
8 | python3 run_non_private.py --data $d --model_choice $m --augment $a --bs $b --K $k --seed $t > new_result_${t}.out
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # BFL
2 |
3 | ### Introduction
4 | This repository provides the Python implementation of our AAMAS 2021 paper titled `A privacy-preserving and accountable multiagent
5 | learning framework`
6 |
7 | In this project, we implement federated learning in which clients can share the model updates under topology structure such as tree, chain, or ring. We compare the training time, bandwidth and accuracy for each topology structure.
8 |
--------------------------------------------------------------------------------
/code/run_privacy_exp.submit:
--------------------------------------------------------------------------------
1 | ############
2 | #
3 | # Example submit file for vanilla job
4 | #
5 | ############
6 | getenv = True
7 | request_cpus = 1
8 | request_memory = 20 GB
9 | Universe = vanilla
10 | Executable = run_privacy_exp.sh
11 | Arguments = $(d) $(m) $(s) $(k) $(t)
12 | input = /dev/null
13 | output = run_privacy_exp.out
14 | error = run_privacy_exp.error
15 | log = run_privacy_exp.log
16 | Queue d,m,s,k,t from run_privacy_exp.in
--------------------------------------------------------------------------------
/code/run_exp/run_privacy_exp.submit:
--------------------------------------------------------------------------------
1 | ############
2 | #
3 | # Example submit file for vanilla job
4 | #
5 | ############
6 | getenv = True
7 | request_cpus = 1
8 | request_memory = 20 GB
9 | Universe = vanilla
10 | Executable = run_privacy_exp.sh
11 | Arguments = $(d) $(m) $(s) $(k) $(t)
12 | input = /dev/null
13 | output = run_privacy_exp.out
14 | error = run_privacy_exp.error
15 | log = run_privacy_exp.log
16 | Queue d,m,s,k,t from run_privacy_exp.in
--------------------------------------------------------------------------------
/code/run_non_private.submit:
--------------------------------------------------------------------------------
1 | ############
2 | #
3 | # Example submit file for vanilla job
4 | #
5 | ############
6 | getenv = True
7 | request_cpus = 1
8 | request_memory = 10 GB
9 | Universe = vanilla
10 | Executable = run_non_private.sh
11 | Arguments = $(d) $(m) $(a) $(b) $(k) $(t)
12 | input = /dev/null
13 | output = run_non_private.out
14 | error = run_non_private.error
15 | log = run_non_private.log
16 | Queue d,m,a,b,k,t from run_non_private.in
--------------------------------------------------------------------------------
/code/run_exp/run_covid_privacy.submit:
--------------------------------------------------------------------------------
1 | ############
2 | #
3 | # Example submit file for vanilla job
4 | #
5 | ############
6 | getenv = True
7 | request_cpus = 1
8 | request_memory = 20 GB
9 | Universe = vanilla
10 | Executable = run_covid_privacy.sh
11 | Arguments = $(m) $(s) $(k) $(t)
12 | input = /dev/null
13 | output = run_covid_privacy.out
14 | error = run_covid_privacy.error
15 | log = run_covid_privacyp.log
16 | Queue m,s,k,t from run_covid_privacy.in
--------------------------------------------------------------------------------
/code/run_exp/run_misc_private_exp.submit:
--------------------------------------------------------------------------------
1 | ############
2 | #
3 | # Example submit file for vanilla job
4 | #
5 | ############
6 | getenv = True
7 | request_cpus = 1
8 | request_memory = 20 GB
9 | Universe = vanilla
10 | Executable = run_misc_private_exp.sh
11 | Arguments = $(d) $(m) $(s) $(k) $(t)
12 | input = /dev/null
13 | output = run_misc_private_exp.out
14 | error = run_misc_private_exp.error
15 | log = run_privacy_exp.log
16 | Queue d,m,s,k,t from run_misc_private_exp.in
--------------------------------------------------------------------------------
/code/run_misc_private_exp.submit:
--------------------------------------------------------------------------------
1 | ############
2 | #
3 | # Example submit file for vanilla job
4 | #
5 | ############
6 | getenv = True
7 | request_cpus = 1
8 | request_memory = 15 GB
9 | Universe = vanilla
10 | Executable = run_misc_private_exp.sh
11 | Arguments = $(d) $(m) $(s) $(k) $(t)
12 | input = /dev/null
13 | output = run_misc_private_exp.out
14 | error = run_misc_private_exp.error
15 | log = run_misc_private_exp.log
16 | Queue d,m,s,k,t from run_misc_private_exp.in
--------------------------------------------------------------------------------
/.idea/BFL.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/code/run_exp/run_privacy_exp.in:
--------------------------------------------------------------------------------
1 | MNIST,fedavg,2.0,10,0
2 | MNIST,fedavg,2.0,10,1
3 | MNIST,fedavg,2.0,10,2
4 | MNIST,fedavg,2.0,10,3
5 | MNIST,fedavg,2.0,10,4
6 | MNIST,fedavg,2.0,50,0
7 | MNIST,fedavg,2.0,50,1
8 | MNIST,fedavg,2.0,50,2
9 | MNIST,fedavg,2.0,50,3
10 | MNIST,fedavg,2.0,50,4
11 | MNIST,fedavg,2.0,100,0
12 | MNIST,fedavg,2.0,100,1
13 | MNIST,fedavg,2.0,100,2
14 | MNIST,fedavg,2.0,100,3
15 | MNIST,fedavg,2.0,100,4
16 | FMNIST,fedavg,2.0,10,0
17 | FMNIST,fedavg,2.0,10,1
18 | FMNIST,fedavg,2.0,10,2
19 | FMNIST,fedavg,2.0,10,3
20 | FMNIST,fedavg,2.0,10,4
21 | FMNIST,fedavg,2.0,50,0
22 | FMNIST,fedavg,2.0,50,1
23 | FMNIST,fedavg,2.0,50,2
24 | FMNIST,fedavg,2.0,50,3
25 | FMNIST,fedavg,2.0,50,4
26 | FMNIST,fedavg,2.0,100,0
27 | FMNIST,fedavg,2.0,100,1
28 | FMNIST,fedavg,2.0,100,2
29 | FMNIST,fedavg,2.0,100,3
30 | FMNIST,fedavg,2.0,100,4
31 |
--------------------------------------------------------------------------------
/code/run_exp/run_misc_private_exp.in:
--------------------------------------------------------------------------------
1 | biased_MNIST,fedavg,2.0,10,0
2 | biased_MNIST,fedavg,2.0,10,1
3 | biased_MNIST,fedavg,2.0,10,2
4 | biased_MNIST,fedavg,2.0,10,3
5 | biased_MNIST,fedavg,2.0,10,4
6 | biased_MNIST,fedavg,2.0,50,0
7 | biased_MNIST,fedavg,2.0,50,1
8 | biased_MNIST,fedavg,2.0,50,2
9 | biased_MNIST,fedavg,2.0,50,3
10 | biased_MNIST,fedavg,2.0,50,4
11 | biased_MNIST,fedavg,2.0,100,0
12 | biased_MNIST,fedavg,2.0,100,1
13 | biased_MNIST,fedavg,2.0,100,2
14 | biased_MNIST,fedavg,2.0,100,3
15 | biased_MNIST,fedavg,2.0,100,4
16 | biased_FMNIST,fedavg,2.0,10,0
17 | biased_FMNIST,fedavg,2.0,10,1
18 | biased_FMNIST,fedavg,2.0,10,2
19 | biased_FMNIST,fedavg,2.0,10,3
20 | biased_FMNIST,fedavg,2.0,10,4
21 | biased_FMNIST,fedavg,2.0,50,0
22 | biased_FMNIST,fedavg,2.0,50,1
23 | biased_FMNIST,fedavg,2.0,50,2
24 | biased_FMNIST,fedavg,2.0,50,3
25 | biased_FMNIST,fedavg,2.0,50,4
26 | biased_FMNIST,fedavg,2.0,100,0
27 | biased_FMNIST,fedavg,2.0,100,1
28 | biased_FMNIST,fedavg,2.0,100,2
29 | biased_FMNIST,fedavg,2.0,100,3
30 | biased_FMNIST,fedavg,2.0,100,4
31 |
--------------------------------------------------------------------------------
/code/network.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | import torch
3 | import torch.nn as nn
4 | import torch.nn.functional as F
5 | import torch.optim as optim
6 | from torchvision import datasets, transforms
7 | import csv
8 | from pprint import pprint
9 |
10 |
11 | class Net(nn.Module):
12 | def __init__(self):
13 | super(Net, self).__init__()
14 | self.conv1 = nn.Conv2d(1, 32, 3, 1)
15 | self.conv2 = nn.Conv2d(32, 64, 3, 1)
16 | self.dropout1 = nn.Dropout2d(0.25)
17 | self.dropout2 = nn.Dropout2d(0.5)
18 | self.fc1 = nn.Linear(9216, 128)
19 | self.fc2 = nn.Linear(128, 10)
20 |
21 | def forward(self, x):
22 | x = self.conv1(x)
23 | x = F.relu(x)
24 | x = self.conv2(x)
25 | x = F.relu(x)
26 | x = F.max_pool2d(x, 2)
27 | x = self.dropout1(x)
28 | x = torch.flatten(x, 1)
29 | x = self.fc1(x)
30 | x = F.relu(x)
31 | x = self.dropout2(x)
32 | x = self.fc2(x)
33 | output = F.log_softmax(x, dim=1)
34 | return output
35 |
36 |
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/code/write_input_files.py:
--------------------------------------------------------------------------------
1 | dataset_list = [ 'biased_MNIST','biased_FMNIST']
2 | model_choice_list = ['fedavg']
3 | K_list = [10, 50, 100]
4 | sigma_list = [2.0]
5 | file_ = open('./run_exp/run_misc_private_exp.in', 'w')
6 | for dataset in dataset_list:
7 | for model_choice in model_choice_list:
8 | for sigma in sigma_list:
9 | for K in K_list:
10 | for seed in range(5):
11 | file_.write('{},{},{},{},{}\n'.format(dataset, model_choice,sigma,K , seed))
12 |
13 | file_.close()
14 | # #
15 | #
16 | #
17 | # dataset_list = [ 'MNIST','FMNIST']
18 | # model_choice_list = [ 'tree','ring','chain','fedavg']
19 | # K_list = [10, 50, 100]
20 | # sigma_list = [0.5, 2.0, 3.0]
21 | # file_ = open('./run_privacy_exp.in', 'w')
22 | # for dataset in dataset_list:
23 | # for model_choice in model_choice_list:
24 | # for sigma in sigma_list:
25 | # for K in K_list:
26 | # for seed in range(5):
27 | # file_.write('{},{},{},{},{}\n'.format(dataset, model_choice,sigma, K,seed))
28 | #
29 | # file_.close()
30 |
31 |
32 |
33 | dataset_list = [ 'MNIST','FMNIST']
34 | model_choice_list = [ 'fedavg']
35 | K_list = [10, 50, 100]
36 | sigma_list = [ 2.0]
37 | file_ = open('./run_exp/run_privacy_exp.in', 'w')
38 | for dataset in dataset_list:
39 | for model_choice in model_choice_list:
40 | for sigma in sigma_list:
41 | for K in K_list:
42 | for seed in range(5):
43 | file_.write('{},{},{},{},{}\n'.format(dataset, model_choice,sigma, K,seed))
44 |
45 | file_.close()
46 |
47 |
48 | # dataset_list = [ 'MNIST','FMNIST']
49 | # model_choice_list = [ 'tree','ring','chain','fedavg']
50 | # K_list = [10, 50, 100]
51 | # augment_list = [0, 1]
52 | # bs_list = [16, 32, 64]
53 | # file_ = open('run_non_private.in', 'w')
54 | # for dataset in dataset_list:
55 | # for model_choice in model_choice_list:
56 | # for augment in augment_list:
57 | # for bs in bs_list:
58 | # for K in K_list:
59 | # for seed in range(5):
60 | # file_.write('{},{},{},{},{},{}\n'.format(dataset, model_choice, augment, bs, K,seed))
61 | #
62 | # file_.close()
63 | #
64 |
65 |
66 | # model_choice_list = [ 'fedavg']
67 | # K_list = [5, 10, 20]
68 | # sigma_list = [ 2.0]
69 | # file_ = open('./run_exp/run_covid_privacy.in', 'w')
70 | # for model_choice in model_choice_list:
71 | # for sigma in sigma_list:
72 | # for K in K_list:
73 | # for seed in range(10):
74 | # file_.write('{},{},{},{}\n'.format(model_choice,sigma, K,seed))
75 | #
76 | # file_.close()
--------------------------------------------------------------------------------
/code/agent.py:
--------------------------------------------------------------------------------
1 | from network import *
2 | from utils import *
3 | from torch.optim.lr_scheduler import StepLR
4 | import torch.utils.data as data_utils
5 | from torch.utils.data import DataLoader
6 | import copy
7 | import numpy as np
8 | from torchdp import PrivacyEngine
9 |
10 |
11 | class Agent_CLF(object):
12 | def __init__(self, params):
13 |
14 | for key, val in params.items():
15 | setattr(self, key, val)
16 | self.logs = {'train_loss': [], 'eps': [], 'val_acc': []}
17 | torch.manual_seed(0)
18 | self.model = Net()
19 | if self.train_loader is None:
20 | if self.augmented == False:
21 | self.train_loader = DataLoader(dataset=data_utils.TensorDataset(self.x_train, self.y_train),
22 | batch_size=self.bs,
23 | shuffle=True)
24 | else:
25 | self.train_loader = DataLoader(dataset=data_utils.TensorDataset(self.x_train, self.y_train),
26 | batch_size=1,
27 | shuffle=True)
28 |
29 | if self.augmented == True:
30 | x_train, y_train = get_augmented_data(self.train_loader, self.device)
31 | self.train_loader = DataLoader(dataset=data_utils.TensorDataset(x_train, y_train), batch_size=self.bs,
32 | shuffle=True)
33 |
34 | self.num_train_samples = float(len(self.train_loader.dataset))
35 | self.num_run_epochs = 0
36 | self.random_idx = 0
37 |
38 | def set_weights(self, ref_model):
39 | self.model = Net()
40 | copy_model(ref_model, self.model)
41 |
42 | def get_weights(self):
43 | """
44 | get model weights
45 | """
46 | w_dict = {}
47 | for name, param in self.model.named_parameters():
48 | w_dict[name] = copy.deepcopy(param)
49 | return w_dict
50 |
51 | def train(self):
52 | """
53 | train/update the curr model of the agent
54 | """
55 | #optimizer = optim.Adadelta(self.model.parameters(), lr=self.lr)
56 | optimizer = optim.Adam(self.model.parameters(), lr=1e-3)
57 | #scheduler = StepLR(optimizer, step_size=1, gamma=self.gamma)
58 | if self.dp:
59 | self.model.zero_grad()
60 | optimizer.zero_grad()
61 | clear_backprops(self.model)
62 |
63 | privacy_engine = PrivacyEngine(
64 | self.model,
65 | batch_size=self.bs,
66 | sample_size=self.num_train_samples,
67 | alphas=[1 + x / 10.0 for x in range(1, 100)] + list(range(12, 64)),
68 | noise_multiplier=self.sigma,
69 | max_grad_norm=self.C)
70 | privacy_engine.attach(optimizer)
71 |
72 | if self.device == 'cuda':
73 | self.model.to('cuda')
74 | self.model.train()
75 | for _ in range(self.epochs):
76 | num_batches = len(self.train_loader)
77 | default_list = list(range(num_batches))
78 | if self.fed_avg:
79 | default_list = np.random.choice(default_list, 1, replace = False)
80 | for batch_idx, (data, target) in enumerate(self.train_loader):
81 | if batch_idx in default_list:
82 | if self.device == 'cuda':
83 | data, target = data.to('cuda'), target.to('cuda')
84 | optimizer.zero_grad()
85 | output = self.model(data)
86 | loss = F.nll_loss(output, target)
87 | loss.backward()
88 | optimizer.step()
89 | self.logs['train_loss'].append(copy.deepcopy(loss.item()))
90 |
91 | #scheduler.step()
92 | self.lr = get_lr(optimizer)
93 | if self.fl_train is False:
94 | curr_acc = eval(self.model, self.test_loader, self.device)
95 | self.logs['val_acc'].append(copy.deepcopy(curr_acc))
96 |
97 |
--------------------------------------------------------------------------------
/code/run_privacy_exp.py:
--------------------------------------------------------------------------------
1 | from fltrain import *
2 | import pickle
3 | import argparse, time
4 | file_path = '/home/cutran/Documents/federated_learning/res3/'
5 | data_path = '/home/cutran/Documents/federated_learning/data/'
6 |
7 |
8 |
9 | def run_exp(data, model_choice, sigma, K, seed):
10 | file_name = file_path + '{}_bfl_{}_{}_private_sigma_{}_K_{}_C20_.pkl'.format(model_choice, data, sigma, K, seed)
11 |
12 | bs = 64
13 |
14 | if K == 10:
15 | temp_bs = 6000
16 | elif K == 100:
17 | temp_bs = 600
18 | else:
19 | temp_bs = 1200
20 | if data == 'MNIST':
21 | train_loader = torch.utils.data.DataLoader(
22 | datasets.MNIST(data_path, train=True, download=False,
23 | transform=transforms.Compose([
24 | transforms.ToTensor(),
25 | transforms.Normalize((0.1307,), (0.3081,))
26 | ])), batch_size = temp_bs, shuffle=True)
27 |
28 | test_loader = torch.utils.data.DataLoader(
29 | datasets.MNIST(data_path, train=False, download=False, transform=transforms.Compose([
30 | transforms.ToTensor(),
31 | transforms.Normalize((0.1307,), (0.3081,))
32 | ])), batch_size=10000, shuffle=True)
33 | else:
34 |
35 | train_loader = torch.utils.data.DataLoader(
36 | datasets.FashionMNIST(data_path, train=True, download=False,
37 | transform=transforms.Compose([
38 | transforms.ToTensor(),
39 | transforms.Normalize((0.1307,), (0.3081,))
40 | ])), batch_size=temp_bs, shuffle=True)
41 |
42 | test_loader = torch.utils.data.DataLoader(
43 | datasets.FashionMNIST(data_path, train=False, download=False, transform=transforms.Compose([
44 | transforms.ToTensor(),
45 | transforms.Normalize((0.1307,), (0.3081,))
46 | ])), batch_size=10000, shuffle=True)
47 |
48 | default_params = {'lr': 1.0,'augmented':False, 'bs': bs, 'gamma': 0.70, 'epochs': 1, 'fl_train':True,'num_clients':K,
49 | 'dp': True, 'delta': 1e-5, 'sigma': sigma, 'C': 20, 'device': 'cpu','fed_avg':False}
50 |
51 |
52 | params = {}
53 | for client_idx, (x_train, y_train) in enumerate(train_loader):
54 | params[client_idx] = copy.deepcopy(default_params)
55 | params[client_idx]['x_train'] = x_train
56 | params[client_idx]['y_train'] = y_train
57 | params[client_idx]['train_loader'] = None
58 | if model_choice == 'fedavg':
59 | params[client_idx]['fed_avg'] = True
60 |
61 | num_outer_epochs = 20
62 | num_iters = temp_bs // bs
63 |
64 | if model_choice == 'chain':
65 | fl_model = ChainFL(
66 | configs={'params': params, 'T': num_outer_epochs, 'B': 2, 'test_loader': test_loader, 'num_clients': K})
67 | elif model_choice == 'tree':
68 | fl_model = TreeFL(
69 | configs={'params': params, 'T': num_outer_epochs, 'B': 2, 'test_loader': test_loader, 'num_clients': K})
70 | elif model_choice == 'ring':
71 | fl_model = RingFL(
72 | configs={'params': params, 'T': num_outer_epochs, 'K': 100, 'test_loader': test_loader, 'num_clients': K})
73 | else:
74 | num_rounds = int(num_outer_epochs * num_iters)
75 | fl_model = FedAvg(configs={'params': params, 'T': num_rounds, 'test_loader': test_loader, 'num_clients': K})
76 |
77 | fl_model.train()
78 |
79 | logs = fl_model.logs['val_acc']
80 |
81 | file_handle = open(file_name, 'wb')
82 | pickle.dump(logs, file_handle)
83 |
84 |
85 | def main():
86 | starttime = time.time()
87 | parser = argparse.ArgumentParser(description='Test')
88 | parser.add_argument('--model_choice', default='ring', type=str)
89 | parser.add_argument('--data', default='MNIST', type=str)
90 | parser.add_argument('--sigma', default= 0.5, type=float)
91 | parser.add_argument('--K', default=10, type=int)
92 | parser.add_argument('--seed', default=0, type=int)
93 | args = parser.parse_args()
94 | run_exp( args.data, args.model_choice, args.sigma, args.K, args.seed)
95 | print('That took {} seconds'.format(time.time() - starttime))
96 |
97 | if __name__ == "__main__":
98 | main()
--------------------------------------------------------------------------------
/code/run_non_private.py:
--------------------------------------------------------------------------------
1 | from fltrain import *
2 | import pickle
3 | import argparse, time
4 | file_path = '/home/cutran/Documents/federated_learning/res3/'
5 | data_path = '/home/cutran/Documents/federated_learning/data/'
6 |
7 |
8 |
9 | def run_exp(data, model_choice, augment, bs, K, seed):
10 | file_name = file_path + 'non_private_data_{}_{}_{}_{}_{}_{}.pkl'.format(model_choice, data, augment, bs, K, seed)
11 | if augment ==1:
12 | augmented = True
13 | else:
14 | augmented = False
15 |
16 | if K == 10:
17 | temp_bs = 6000
18 | elif K == 100:
19 | temp_bs = 600
20 | else:
21 | temp_bs = 1200
22 | if data == 'MNIST':
23 | train_loader = torch.utils.data.DataLoader(
24 | datasets.MNIST(data_path, train=True, download=False,
25 | transform=transforms.Compose([
26 | transforms.ToTensor(),
27 | transforms.Normalize((0.1307,), (0.3081,))
28 | ])), batch_size = temp_bs, shuffle=True)
29 |
30 | test_loader = torch.utils.data.DataLoader(
31 | datasets.MNIST(data_path, train=False, download=False, transform=transforms.Compose([
32 | transforms.ToTensor(),
33 | transforms.Normalize((0.1307,), (0.3081,))
34 | ])), batch_size=10000, shuffle=True)
35 | else:
36 |
37 | train_loader = torch.utils.data.DataLoader(
38 | datasets.FashionMNIST(data_path, train=True, download=False,
39 | transform=transforms.Compose([
40 | transforms.ToTensor(),
41 | transforms.Normalize((0.1307,), (0.3081,))
42 | ])), batch_size=temp_bs, shuffle=True)
43 |
44 | test_loader = torch.utils.data.DataLoader(
45 | datasets.FashionMNIST(data_path, train=False, download=False, transform=transforms.Compose([
46 | transforms.ToTensor(),
47 | transforms.Normalize((0.1307,), (0.3081,))
48 | ])), batch_size=10000, shuffle=True)
49 |
50 | default_params = {'lr': 1.0,'augmented':augmented, 'bs': bs, 'gamma': 0.70, 'epochs': 1, 'fl_train':True,'num_clients':K,
51 | 'dp': False, 'delta': 1e-5, 'sigma': 0.2, 'C': 20, 'device': 'cpu','fed_avg':False}
52 |
53 |
54 | params = {}
55 | for client_idx, (x_train, y_train) in enumerate(train_loader):
56 | params[client_idx] = copy.deepcopy(default_params)
57 | params[client_idx]['x_train'] = x_train
58 | params[client_idx]['y_train'] = y_train
59 | params[client_idx]['train_loader'] = None
60 | if model_choice == 'fedavg':
61 | params[client_idx]['fed_avg'] = True
62 |
63 | num_outer_epochs = 20
64 | num_iters = temp_bs // bs
65 |
66 | if model_choice == 'chain':
67 | fl_model = ChainFL(
68 | configs={'params': params, 'T': num_outer_epochs, 'B': 2, 'test_loader': test_loader, 'num_clients': K})
69 | elif model_choice == 'tree':
70 | fl_model = TreeFL(
71 | configs={'params': params, 'T': num_outer_epochs, 'B': 2, 'test_loader': test_loader, 'num_clients': K})
72 | elif model_choice == 'ring':
73 | fl_model = RingFL(
74 | configs={'params': params, 'T': num_outer_epochs, 'K': 100, 'test_loader': test_loader, 'num_clients': K})
75 | else:
76 | num_rounds = int(num_outer_epochs * num_iters)
77 | fl_model = FedAvg(configs={'params': params, 'T': num_rounds, 'test_loader': test_loader, 'num_clients': K})
78 |
79 | fl_model.train()
80 |
81 | logs = fl_model.logs['val_acc']
82 |
83 | file_handle = open(file_name, 'wb')
84 | pickle.dump(logs, file_handle)
85 |
86 |
87 | def main():
88 | starttime = time.time()
89 | parser = argparse.ArgumentParser(description='Test')
90 | parser.add_argument('--model_choice', default='ring', type=str)
91 | parser.add_argument('--data', default='MNIST', type=str)
92 | parser.add_argument('--augment', default= 0, type=int)
93 | parser.add_argument('--bs', default=16, type=int)
94 | parser.add_argument('--K', default=10, type=int)
95 | parser.add_argument('--seed', default=0, type=int)
96 | args = parser.parse_args()
97 | run_exp( args.data, args.model_choice, args.augment, args.bs, args.K, args.seed)
98 | print('That took {} seconds'.format(time.time() - starttime))
99 |
100 | if __name__ == "__main__":
101 | main()
--------------------------------------------------------------------------------
/code/run_exp/run_covid_privacy.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from covid_utils import *
3 | import pickle
4 | import argparse, time
5 | file_path = '/home/cutran/Documents/federated_learning/res5/'
6 | data_path = '/home/cutran/Documents/federated_learning/data/'
7 |
8 | device = 'cpu'
9 | if device == 'cuda':
10 | torch.set_default_tensor_type(torch.cuda.FloatTensor)
11 | else:
12 | torch.set_default_tensor_type(torch.FloatTensor)
13 | from os import path
14 |
15 |
16 | covid_data_path = '/home/cutran/Documents/federated_learning/data/COVID/data.pkl'
17 | file_ = open(covid_data_path,'rb')
18 | data_res = pickle.load(file_)
19 |
20 | trainX = data_res['trainX']
21 | trainY = data_res['trainY']
22 | testX = data_res['testX']
23 | testY = data_res['testY']
24 |
25 |
26 |
27 | def run_exp(model_choice, sigma, K, seed):
28 | file_name = file_path + 'augmented_{}_bfl_{}_{}_private_sigma_{}_K_{}_C20_.pkl'.format(model_choice, 'covid', sigma,
29 | K, seed)
30 | if path.exists(file_name):
31 | print('EXIST {}'.format(file_name))
32 | return
33 |
34 | bs = 16
35 | test_loader = DataLoader(
36 | dataset=data_utils.TensorDataset(torch.FloatTensor(testX).permute(0, 3, 1, 2), torch.LongTensor(testY[:, 1])),
37 | batch_size=340,
38 | shuffle=True)
39 |
40 | temp_bs = 1440 // K
41 | train_loader = DataLoader(
42 | dataset=data_utils.TensorDataset(torch.FloatTensor(trainX).permute(0, 3, 1, 2), torch.LongTensor(trainY[:, 1])),
43 | batch_size=temp_bs,
44 | shuffle=True)
45 |
46 | default_params = {'lr': 1.0, 'augmented': True, 'bs': bs, 'gamma': 0.70, 'epochs': 1, 'fl_train': True,
47 | 'num_clients': K,
48 | 'dp': True, 'delta': 1e-5, 'sigma': sigma, 'C': 10, 'device': device, 'fed_avg': False,
49 | 'covid_model': True, 'device': device}
50 |
51 | params = {}
52 | for client_idx, (x_train, y_train) in enumerate(train_loader):
53 | params[client_idx] = copy.deepcopy(default_params)
54 | params[client_idx]['x_train'] = x_train
55 | params[client_idx]['y_train'] = y_train
56 | params[client_idx]['train_loader'] = None
57 | if model_choice == 'fedavg':
58 | params[client_idx]['fed_avg'] = True
59 |
60 | num_outer_epochs = 20
61 | num_iters = temp_bs // bs
62 |
63 | if model_choice == 'chain':
64 | fl_model = ChainFL(
65 | configs={'params': params, 'T': num_outer_epochs, 'B': 2, 'test_loader': test_loader, 'num_clients': K,
66 | 'covid_model': True, 'device': device})
67 | elif model_choice == 'tree':
68 | fl_model = TreeFL(
69 | configs={'params': params, 'T': num_outer_epochs, 'B': 2, 'test_loader': test_loader, 'num_clients': K,
70 | 'covid_model': True, 'device': device})
71 | elif model_choice == 'ring':
72 | fl_model = RingFL(
73 | configs={'params': params, 'T': num_outer_epochs, 'K': 100, 'test_loader': test_loader, 'num_clients': K,
74 | 'covid_model': True, 'device': device})
75 | elif model_choice == 'fedavg':
76 | num_rounds = int(num_outer_epochs * num_iters)
77 | fl_model = FedAvg(configs={'params': params, 'T': num_rounds, 'test_loader': test_loader, 'num_clients': K,
78 | 'covid_model': True, 'device': device})
79 | else:
80 | fl_model = NewFedAvg(
81 | configs={'params': params, 'T': num_outer_epochs, 'test_loader': test_loader, 'num_clients': K,
82 | 'covid_model': True, 'device': device})
83 |
84 | fl_model.train()
85 |
86 | res = {}
87 | res['val_acc'] = copy.deepcopy(fl_model.logs['val_acc'])
88 | res['val_acc_iter'] = copy.deepcopy(fl_model.logs['val_acc_iter'])
89 |
90 | file_handle = open(file_name, 'wb')
91 | pickle.dump(res, file_handle)
92 |
93 |
94 |
95 |
96 | def main():
97 | starttime = time.time()
98 | parser = argparse.ArgumentParser(description='Test')
99 | parser.add_argument('--model_choice', default='ring', type=str)
100 | parser.add_argument('--sigma', default= 0.5, type=float)
101 | parser.add_argument('--K', default=10, type=int)
102 | parser.add_argument('--seed', default=0, type=int)
103 | args = parser.parse_args()
104 | run_exp(args.model_choice, args.sigma, args.K, args.seed)
105 | print('That took {} seconds'.format(time.time() - starttime))
106 |
107 | if __name__ == "__main__":
108 | main()
--------------------------------------------------------------------------------
/code/run_exp/run_incorrect_files.py:
--------------------------------------------------------------------------------
1 | import sys
2 | sys.path.append('/home/cutran/Documents/federated_learning/BFL/')
3 | from fltrain import *
4 | from utils import *
5 | from network import *
6 | import pickle
7 | import argparse, time
8 | file_path = '/home/cutran/Documents/federated_learning/res4/'
9 | data_path = '/home/cutran/Documents/federated_learning/data/'
10 |
11 | def run_exp(data, model_choice, sigma, K, seed):
12 | file_name = file_path + 'augmented_{}_bfl_{}_{}_private_sigma_{}_K_{}_C20_.pkl'.format(model_choice, data, sigma, K, seed)
13 |
14 | bs = 64
15 |
16 | if K == 10:
17 | temp_bs = 6000
18 | elif K == 100:
19 | temp_bs = 600
20 | else:
21 | temp_bs = 1200
22 |
23 |
24 | if data == 'MNIST':
25 | train_loader = torch.utils.data.DataLoader(
26 | datasets.MNIST(data_path, train=True, download=False,
27 | transform=transforms.Compose([
28 | transforms.ToTensor(),
29 | transforms.Normalize((0.1307,), (0.3081,))
30 | ])), batch_size = temp_bs, shuffle=True)
31 |
32 | test_loader = torch.utils.data.DataLoader(
33 | datasets.MNIST(data_path, train=False, download=False, transform=transforms.Compose([
34 | transforms.ToTensor(),
35 | transforms.Normalize((0.1307,), (0.3081,))
36 | ])), batch_size=10000, shuffle=True)
37 | else:
38 |
39 | train_loader = torch.utils.data.DataLoader(
40 | datasets.FashionMNIST(data_path, train=True, download=False,
41 | transform=transforms.Compose([
42 | transforms.ToTensor(),
43 | transforms.Normalize((0.1307,), (0.3081,))
44 | ])), batch_size=temp_bs, shuffle=True)
45 |
46 | test_loader = torch.utils.data.DataLoader(
47 | datasets.FashionMNIST(data_path, train=False, download=False, transform=transforms.Compose([
48 | transforms.ToTensor(),
49 | transforms.Normalize((0.1307,), (0.3081,))
50 | ])), batch_size=10000, shuffle=True)
51 |
52 | default_params = {'lr': 1.0, 'augmented':False,'bs': bs, 'gamma': 0.70, 'epochs': 1, 'fl_train':True,'num_clients':K,
53 | 'dp': True, 'delta': 1e-5, 'sigma': sigma, 'C': 20, 'device': 'cpu','fed_avg':False}
54 |
55 |
56 | params = {}
57 | for client_idx, (x_train, y_train) in enumerate(train_loader):
58 | params[client_idx] = copy.deepcopy(default_params)
59 | params[client_idx]['x_train'] = x_train
60 | params[client_idx]['y_train'] = y_train
61 | params[client_idx]['train_loader'] = None
62 | if model_choice == 'fedavg':
63 | params[client_idx]['fed_avg'] = True
64 |
65 | num_outer_epochs = 20
66 | num_iters = temp_bs // bs
67 |
68 | if model_choice == 'chain':
69 | fl_model = ChainFL(
70 | configs={'params': params, 'T': num_outer_epochs, 'B': 2, 'test_loader': test_loader, 'num_clients': K})
71 | elif model_choice == 'tree':
72 | fl_model = TreeFL(
73 | configs={'params': params, 'T': num_outer_epochs, 'B': 2, 'test_loader': test_loader, 'num_clients': K})
74 | elif model_choice == 'ring':
75 | fl_model = RingFL(
76 | configs={'params': params, 'T': num_outer_epochs, 'K': 100, 'test_loader': test_loader, 'num_clients': K})
77 | elif model_choice =='fedavg':
78 | num_rounds = int(num_outer_epochs * num_iters)
79 | fl_model = FedAvg(configs={'params': params, 'T': num_rounds, 'test_loader': test_loader, 'num_clients': K})
80 | else:
81 | fl_model = NewFedAvg(configs={'params': params, 'T': num_outer_epochs, 'test_loader': test_loader, 'num_clients': K})
82 |
83 |
84 | fl_model.train()
85 |
86 | res ={}
87 | res['val_acc'] = copy.deepcopy(fl_model.logs['val_acc'])
88 | res['val_acc_iter'] = copy.deepcopy(fl_model.logs['val_acc_iter'])
89 |
90 |
91 | file_handle = open(file_name, 'wb')
92 | pickle.dump(res, file_handle)
93 |
94 |
95 | def main():
96 | starttime = time.time()
97 | parser = argparse.ArgumentParser(description='Test')
98 | parser.add_argument('--model_choice', default='newfedavg', type=str)
99 | parser.add_argument('--data', default='MNIST', type=str)
100 | parser.add_argument('--sigma', default= 0.5, type=float)
101 | parser.add_argument('--K', default=10, type=int)
102 | parser.add_argument('--seed', default=0, type=int)
103 | args = parser.parse_args()
104 | run_exp( args.data, args.model_choice, args.sigma, args.K, args.seed)
105 | print('That took {} seconds'.format(time.time() - starttime))
106 |
107 | if __name__ == "__main__":
108 | main()
109 |
110 |
--------------------------------------------------------------------------------
/code/utils.py:
--------------------------------------------------------------------------------
1 | import torch.nn as nn
2 | import torch
3 | import random
4 | from network import *
5 | import torchvision
6 | from keras.preprocessing.image import ImageDataGenerator
7 | from sklearn.metrics import confusion_matrix
8 |
9 | def get_covid_model():
10 | model_conv = torchvision.models.resnet18(pretrained=True)
11 | for param in model_conv.parameters():
12 | param.requires_grad = False
13 | num_ftrs = model_conv.fc.in_features
14 | model_conv.fc = nn.Linear(num_ftrs, 2)
15 |
16 | return model_conv
17 |
18 |
19 | def get_metrics(y_true, y_pred):
20 | cm = confusion_matrix(y_true.argmax(axis=1), y_pred)
21 | total = sum(sum(cm))
22 | if (len(cm[0]) > 1):
23 | acc = (cm[0, 0] + cm[1, 1]) / total
24 | sensitivity = cm[0, 0] / (cm[0, 0] + cm[0, 1])
25 | specificity = cm[1, 1] / (cm[1, 0] + cm[1, 1])
26 | else:
27 | acc = (cm[0, 0]) / total
28 | sensitivity = 1
29 | specificity = 1
30 | return acc, sensitivity, specificity
31 |
32 | def get_augmented_data(train_loader, device):
33 |
34 | datagen = ImageDataGenerator(
35 | rotation_range=10,
36 | zoom_range=0.10,
37 | width_shift_range=0.1,
38 | height_shift_range=0.1)
39 |
40 | train_dataset = []
41 |
42 | for batch_idx, (data, target) in (enumerate(train_loader)):
43 | train_dataset.append([data, target])
44 | #print(type(target))
45 | for _ in range(4):
46 | if device == 'cuda':
47 | data_aug_x, data_aug_y = datagen.flow(data.reshape((1, 28, 28, 1)).cpu().numpy(), target.cpu().numpy()).next()
48 | else:
49 | data_aug_x, data_aug_y = datagen.flow(data.reshape((1, 28, 28, 1)), target).next()
50 |
51 | train_dataset.append([data_aug_x.reshape((1, 1, 28, 28)), target])
52 |
53 | random.shuffle(train_dataset)
54 |
55 | x_train = torch.cat([torch.FloatTensor(x[0]) for x in train_dataset])
56 | y_train = torch.cat([x[1] for x in train_dataset])
57 |
58 |
59 | return x_train, y_train
60 |
61 |
62 |
63 |
64 | def eval(model, test_loader, device):
65 | """
66 | evaluation function -> similar to your test function
67 | """
68 | model.eval()
69 | test_loss = 0
70 | correct = 0
71 | if device =='cuda':
72 | model.to('cuda')
73 | with torch.no_grad():
74 | num_test_samples = 0
75 | for data, target in test_loader:
76 | if device == 'cuda':
77 | data, target = data.to('cuda'), target.to('cuda')
78 | output = model(data)
79 | test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
80 | pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
81 | correct += pred.eq(target.view_as(pred)).sum().item()
82 | num_test_samples += output.shape[0]
83 |
84 | test_loss /= num_test_samples
85 |
86 | return 100. * correct / num_test_samples
87 |
88 |
89 | def get_lr(optimizer):
90 | for param_group in optimizer.param_groups:
91 | return param_group['lr']
92 |
93 |
94 | def copy_model(model1, model2):
95 | """
96 | copy model1 to model2
97 | """
98 | params1 = model1.named_parameters()
99 | params2 = model2.named_parameters()
100 |
101 | dict_params2 = dict(params2)
102 |
103 | for name1, param1 in params1:
104 | if name1 in dict_params2:
105 | dict_params2[name1].data.copy_(param1.data)
106 |
107 |
108 |
109 | def clear_backprops(model: nn.Module) -> None:
110 | """Delete layer.backprops_list in every layer."""
111 | for layer in model.modules():
112 | if hasattr(layer, "backprops_list"):
113 | del layer.backprops_list
114 |
115 |
116 |
117 | def getDataSrc(train_loader):
118 | """
119 | Makure train_loader has mini-batch of 1
120 | :param train_loader:
121 | :return:
122 | """
123 | train_dataset = []
124 |
125 | for batch_idx, (data, target) in enumerate(train_loader):
126 | train_dataset.append([data, target])
127 |
128 | return train_dataset
129 |
130 |
131 | def getBaisedDataset(dataSrc, deviceInd, deviceBatchSize, biasPer =0.3):
132 |
133 | """
134 | deviceBatchsize = trimSize = len(train_dataset)//device_cnt
135 | :return:
136 | """
137 |
138 | train_segmented = [[],[],[],[],[],[],[],[],[],[]]
139 | deviceData = []
140 | biasClass = random.randint(0,9)
141 | totClass = 10
142 |
143 | for idx, (data, target) in enumerate(dataSrc[deviceInd*deviceBatchSize:(deviceInd+1)*deviceBatchSize]):
144 | train_segmented[target.tolist()[0]].append([data, target])
145 |
146 |
147 |
148 | for ind in range(len(train_segmented)):
149 | if (ind != biasClass):
150 | l = len(train_segmented[ind]) - ((biasPer/(totClass-1))*len(train_segmented[ind]))
151 | # print(ind, l, biasPer//(totClass-1))
152 | train_segmented[ind] = train_segmented[ind][:int(l)]
153 |
154 |
155 | for x in train_segmented:
156 | deviceData += x
157 |
158 | random.shuffle(deviceData)
159 |
160 | x_train = torch.cat([x[0] for x in deviceData])
161 | y_train = torch.cat([x[1] for x in deviceData])
162 |
163 | return x_train, y_train
164 |
--------------------------------------------------------------------------------
/code/run_exp/run_incorrect_files2.py:
--------------------------------------------------------------------------------
1 | import sys
2 | sys.path.append('/home/cutran/Documents/federated_learning/BFL/')
3 | from fltrain import *
4 | from utils import *
5 | from network import *
6 | import pickle
7 | import argparse, time
8 | file_path = '/home/cutran/Documents/federated_learning/res5/'
9 | data_path = '/home/cutran/Documents/federated_learning/data/'
10 |
11 | use_cuda = False
12 | device = torch.device("cuda" if use_cuda else "cpu")
13 |
14 | kwargs = {'num_workers': 1, 'pin_memory': True} if use_cuda else {}
15 |
16 |
17 | def run_exp(data, model_choice, sigma, K, seed):
18 | bs = 64
19 |
20 | if K == 10:
21 | temp_bs = 6000
22 | elif K == 100:
23 | temp_bs = 600
24 | else:
25 | temp_bs = 1200
26 |
27 |
28 | file_name = file_path + '{}_bfl_{}_{}_private_sigma_{}_K_{}_C20_.pkl'.format(model_choice, data, sigma, K, seed)
29 |
30 | default_params = {'lr': 1.0, 'augmented': False, 'bs': bs, 'gamma': 0.70, 'epochs': 1, 'fl_train': True,
31 | 'num_clients': K, 'dp': True, 'delta': 1e-5, 'sigma': sigma, 'C': 20, 'device': 'cpu', 'fed_avg': False}
32 |
33 | if data == 'biased_MNIST':
34 | train_loader = torch.utils.data.DataLoader(
35 | datasets.MNIST(data_path, train=True, download=False,
36 | transform=transforms.Compose([
37 | transforms.ToTensor(),
38 | transforms.Normalize((0.1307,), (0.3081,))
39 | ])), batch_size=1, shuffle=True, **kwargs)
40 |
41 | test_loader = torch.utils.data.DataLoader(
42 | datasets.MNIST(data_path, train=False, download=False, transform=transforms.Compose([
43 | transforms.ToTensor(),
44 | transforms.Normalize((0.1307,), (0.3081,))
45 | ])), batch_size=10000, shuffle=True, **kwargs)
46 | elif data == 'biased_FMNIST':
47 |
48 | train_loader = torch.utils.data.DataLoader(
49 | datasets.FashionMNIST(data_path, train=True, download=False,
50 | transform=transforms.Compose([
51 | transforms.ToTensor(),
52 | transforms.Normalize((0.1307,), (0.3081,))
53 | ])), batch_size=1, shuffle=True, **kwargs)
54 |
55 | test_loader = torch.utils.data.DataLoader(
56 | datasets.FashionMNIST(data_path, train=False, download=False, transform=transforms.Compose([
57 | transforms.ToTensor(),
58 | transforms.Normalize((0.1307,), (0.3081,))
59 | ])), batch_size=10000, shuffle=True, **kwargs)
60 | else:
61 | return
62 |
63 |
64 | if data =='biased_MNIST' or data =='biased_FMNIST':
65 | train_dataset = getDataSrc(train_loader)
66 | trimSize = len(train_dataset) // K
67 |
68 | params = {}
69 | if data == 'EMNIST':
70 | for client_idx, (x_train, y_train) in enumerate(train_loader):
71 |
72 | params[client_idx] = copy.deepcopy(default_params)
73 | params[client_idx]['x_train'] = x_train
74 | params[client_idx]['y_train'] = y_train
75 | params[client_idx]['train_loader'] = None
76 | else:
77 |
78 | for client_idx in range(K):
79 | params[client_idx] = copy.deepcopy(default_params)
80 | x_train, y_train = getBaisedDataset(train_dataset, client_idx, trimSize, biasPer =0.3)
81 |
82 | params[client_idx]['x_train'] = x_train
83 | params[client_idx]['y_train'] = y_train
84 | params[client_idx]['train_loader'] = None
85 | if model_choice == 'fedavg':
86 | params[client_idx]['fed_avg'] = True
87 |
88 | num_outer_epochs = 20
89 | num_iters = temp_bs // bs
90 |
91 | if model_choice == 'chain':
92 | fl_model = ChainFL(configs={'params': params, 'T': 20, 'B': 2, 'test_loader': test_loader, 'num_clients':K})
93 | elif model_choice == 'tree':
94 | fl_model = TreeFL(configs={'params': params, 'T': 20, 'B': 2, 'test_loader': test_loader,'num_clients':K})
95 | elif model_choice =='ring':
96 | fl_model = RingFL(configs={'params': params, 'T': 20, 'K': 100, 'test_loader': test_loader,'num_clients':K})
97 | elif model_choice == 'fedavg':
98 | num_rounds = int(num_outer_epochs * num_iters)
99 | fl_model = FedAvg(configs={'params': params, 'T': num_rounds, 'test_loader': test_loader, 'num_clients': K})
100 | else:
101 | fl_model = NewFedAvg(
102 | configs={'params': params, 'T': num_outer_epochs, 'test_loader': test_loader, 'num_clients': K})
103 |
104 | fl_model.train()
105 |
106 | res = {}
107 | res['val_acc'] = copy.deepcopy(fl_model.logs['val_acc'])
108 | res['val_acc_iter'] = copy.deepcopy(fl_model.logs['val_acc_iter'])
109 |
110 | file_handle = open(file_name, 'wb')
111 | pickle.dump(res, file_handle)
112 |
113 | def main():
114 | starttime = time.time()
115 | parser = argparse.ArgumentParser(description='Test')
116 | parser.add_argument('--model_choice', default='ring', type=str)
117 | parser.add_argument('--data', default='MNIST', type=str)
118 | parser.add_argument('--sigma', default= 0.5, type=float)
119 | parser.add_argument('--K', default=10, type=int)
120 | parser.add_argument('--seed', default=0, type=int)
121 | args = parser.parse_args()
122 | run_exp( args.data, args.model_choice, args.sigma, args.K, args.seed)
123 | print('That took {} seconds'.format(time.time() - starttime))
124 |
125 | if __name__ == "__main__":
126 | main()
127 |
--------------------------------------------------------------------------------
/code/run_misc_private_exp.py:
--------------------------------------------------------------------------------
1 | from fltrain import *
2 | import pickle
3 | import argparse, time
4 | file_path = '/home/cutran/Documents/federated_learning/res2/'
5 | data_path = '/home/cutran/Documents/federated_learning/data/'
6 |
7 |
8 | def run_exp(data, model_choice, sigma, K, seed):
9 | file_name = file_path + '{}_bfl_{}_{}_private_sigma_{}_K_{}_C20_.pkl'.format(model_choice, data, sigma, K, seed)
10 |
11 | if K == 10:
12 | temp_bs = 24000
13 | elif K == 100 :
14 | temp_bs = 2400
15 | else:
16 | temp_bs = 4800
17 |
18 | default_params = {'lr': 1.0, 'bs': 64, 'gamma': 0.70, 'epochs': 1, 'fl_train': True, 'num_clients': K,
19 | 'dp': True, 'delta': 1e-5, 'sigma': sigma, 'C': 20, 'device': 'cpu', 'fed_avg':False}
20 |
21 | if data == 'biased_MNIST':
22 | train_loader = torch.utils.data.DataLoader(
23 | datasets.MNIST(data_path, train=True, download=False,
24 | transform=transforms.Compose([
25 | transforms.ToTensor(),
26 | transforms.Normalize((0.1307,), (0.3081,))
27 | ])), batch_size=1, shuffle=True)
28 |
29 | test_loader = torch.utils.data.DataLoader(
30 | datasets.MNIST(data_path, train=False, download=False, transform=transforms.Compose([
31 | transforms.ToTensor(),
32 | transforms.Normalize((0.1307,), (0.3081,))
33 | ])), batch_size=10000, shuffle=True)
34 | elif data == 'biased_FMNIST':
35 |
36 | train_loader = torch.utils.data.DataLoader(
37 | datasets.FashionMNIST(data_path, train=True, download=False,
38 | transform=transforms.Compose([
39 | transforms.ToTensor(),
40 | transforms.Normalize((0.1307,), (0.3081,))
41 | ])), batch_size=1, shuffle=True)
42 |
43 | test_loader = torch.utils.data.DataLoader(
44 | datasets.FashionMNIST(data_path, train=False, download=False, transform=transforms.Compose([
45 | transforms.ToTensor(),
46 | transforms.Normalize((0.1307,), (0.3081,))
47 | ])), batch_size=10000, shuffle=True)
48 | else:
49 | train_loader = torch.utils.data.DataLoader(
50 | datasets.FashionMNIST(data_path, train=True, download=False,
51 | transform=transforms.Compose([
52 | transforms.ToTensor(),
53 | transforms.Normalize((0.1307,), (0.3081,))
54 | ])), batch_size=temp_bs, shuffle=True)
55 |
56 | test_loader = torch.utils.data.DataLoader(
57 | datasets.FashionMNIST(data_path, train=False, download=False, transform=transforms.Compose([
58 | transforms.ToTensor(),
59 | transforms.Normalize((0.1307,), (0.3081,))
60 | ])), batch_size=40000, shuffle=True)
61 |
62 |
63 | if data =='biased_MNIST' or data =='biased_FMNIST':
64 | train_dataset = getDataSrc(train_loader)
65 | trimSize = len(train_dataset) // K
66 |
67 | params = {}
68 | if data == 'EMNIST':
69 | for client_idx, (x_train, y_train) in enumerate(train_loader):
70 |
71 | params[client_idx] = copy.deepcopy(default_params)
72 | params[client_idx]['x_train'] = x_train
73 | params[client_idx]['y_train'] = y_train
74 | params[client_idx]['train_loader'] = None
75 | if model_choice =='fedavg':
76 | params[client_idx]['fed_avg'] = True
77 |
78 |
79 | else:
80 |
81 | for client_idx in range(K):
82 | params[client_idx] = copy.deepcopy(default_params)
83 | x_train, y_train = getBaisedDataset(train_dataset, client_idx, trimSize, biasPer =0.3)
84 | params[client_idx]['x_train'] = x_train
85 | params[client_idx]['y_train'] = y_train
86 | params[client_idx]['train_loader'] = None
87 | if model_choice == 'fedavg':
88 | params[client_idx]['fed_avg'] = True
89 |
90 | num_outer_epochs = 20
91 | num_iters = np.min([ len(params[client_idx]['x_train']) for client_idx in range(K)])//64
92 |
93 | if model_choice == 'chain':
94 | fl_model = ChainFL(
95 | configs={'params': params, 'T': num_outer_epochs, 'B': 2, 'test_loader': test_loader, 'num_clients': K})
96 | elif model_choice == 'tree':
97 | fl_model = TreeFL(
98 | configs={'params': params, 'T': num_outer_epochs, 'B': 2, 'test_loader': test_loader, 'num_clients': K})
99 | elif model_choice == 'ring':
100 | fl_model = RingFL(
101 | configs={'params': params, 'T': num_outer_epochs, 'K': 100, 'test_loader': test_loader, 'num_clients': K})
102 | else:
103 | num_rounds = int(num_outer_epochs * num_iters)
104 | fl_model = FedAvg(configs={'params': params, 'T': num_rounds, 'test_loader': test_loader, 'num_clients': K})
105 |
106 |
107 | fl_model.train()
108 |
109 | logs = fl_model.logs['val_acc']
110 |
111 | file_handle = open(file_name, 'wb')
112 | pickle.dump(logs, file_handle)
113 |
114 |
115 | def main():
116 | starttime = time.time()
117 | parser = argparse.ArgumentParser(description='Test')
118 | parser.add_argument('--model_choice', default='ring', type=str)
119 | parser.add_argument('--data', default='MNIST', type=str)
120 | parser.add_argument('--sigma', default= 0.5, type=float)
121 | parser.add_argument('--K', default=10, type=int)
122 | parser.add_argument('--seed', default=0, type=int)
123 | args = parser.parse_args()
124 | run_exp( args.data, args.model_choice, args.sigma, args.K, args.seed)
125 | print('That took {} seconds'.format(time.time() - starttime))
126 |
127 | if __name__ == "__main__":
128 | main()
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
--------------------------------------------------------------------------------
/code/fltrain.py:
--------------------------------------------------------------------------------
1 | # Federated training starting from here
2 | from agent import *
3 | import numpy as np
4 |
5 |
6 | class BaseFL(object):
7 | def __init__(self, configs=None):
8 | default_configs = {
9 | 'num_clients': 100,
10 | 'T': 20, # num outer epochs 30-40
11 | 'B': 2, # branch size of tree,
12 | 'params': {},
13 | 'device': 'cpu'
14 | }
15 | self.curr_model = Net()
16 |
17 |
18 | if configs is not None:
19 | default_configs.update(configs)
20 | for key, val in default_configs.items():
21 | setattr(self, key, val)
22 |
23 | for key, val in default_configs.items():
24 | # set property for BaseFL object based on dictionary key-values.
25 | setattr(self, key, val)
26 |
27 | self.clients = [Agent_CLF(self.params[i]) for i in range(self.num_clients)]
28 | self.logs = {'val_acc': [], 'val_acc_iter':[]}
29 |
30 | def shuffle_clients(self):
31 | return np.random.permutation(self.num_clients)
32 |
33 | def set_weights(self, ref_model):
34 | """
35 | Set model
36 | """
37 | self.curr_model = Net()
38 | copy_model(ref_model, self.curr_model)
39 |
40 | def get_weights(self):
41 | """
42 | get model weights
43 | """
44 | w_dict = {}
45 | for name, param in self.curr_model.named_parameters():
46 | w_dict[name] = copy.deepcopy(param)
47 | return w_dict
48 |
49 | def agg_model(self, model_list, start, end ):
50 | with torch.no_grad():
51 | global_params = {}
52 | for param in model_list[start]:
53 | param_data = model_list[start][param].data
54 | num_ = 1.0
55 | for model_state in model_list[start+1:end]:
56 | param_data += model_state[param].data
57 | num_ += 1.0
58 | param_data /= num_
59 | global_params[param] = param_data
60 |
61 | return global_params
62 |
63 |
64 |
65 |
66 | class ChainFL(BaseFL):
67 | # extend the base federated learning class for chain topology
68 | def __init__(self, configs=None):
69 | super().__init__(configs)
70 | self.B = 1 # branch factor = 1
71 |
72 | def train(self):
73 | curr_model = None
74 | for _ in range(self.T):
75 | shuffled_clts = super().shuffle_clients()
76 | for clt in shuffled_clts:
77 | if curr_model is not None:
78 | self.clients[clt].set_weights(curr_model)
79 | self.clients[clt].train()
80 | curr_model_dict = self.clients[clt].get_weights()
81 | curr_model = Net()
82 | curr_model.load_state_dict(curr_model_dict)
83 | curr_acc = eval(curr_model, self.test_loader, self.device)
84 | self.logs['val_acc_iter'].append(copy.deepcopy(curr_acc))
85 |
86 | curr_acc = eval(curr_model, self.test_loader, self.device)
87 | print(curr_acc)
88 | self.logs['val_acc'].append(curr_acc)
89 |
90 |
91 | class TreeFL(BaseFL):
92 | def __init__(self, configs=None):
93 | super().__init__(configs)
94 |
95 | self.h = [h for h in range(self.num_clients) if (self.B ** h - 1) / (self.B - 1) >= self.num_clients][
96 | 0] # height of tree
97 | self.index_leaf = (self.B ** (self.h - 1) - 1) / (self.B - 1) + 1
98 | self.num_leaves = float(self.num_clients - self.index_leaf + 1)
99 | self.index_level = [int( (self.B ** (i - 1) - 1) / (self.B - 1)) for i in range(1, self.h +1)]
100 |
101 | def train(self):
102 |
103 | for t in range(self.T):
104 | model_list = []
105 |
106 | shuffled_clts = super().shuffle_clients()
107 | print(shuffled_clts)
108 | for i, clt in enumerate(shuffled_clts):
109 | parent_index = int(np.floor(
110 | (i - 1) / self.B)) # get parent index of clt, check my write up, parent of a node i, is [(i-1)/B]
111 | if parent_index >= 0:
112 | parent_model_dict = self.clients[shuffled_clts[parent_index]].get_weights()
113 | self.clients[clt].model = Net()
114 | self.clients[clt].model.load_state_dict(parent_model_dict)
115 | else:
116 | if t >= 1:
117 | self.clients[clt].model = copy.deepcopy(curr_model)
118 |
119 | self.clients[clt].train()
120 |
121 |
122 | model_list.append(dict(self.clients[clt].model.named_parameters()))
123 |
124 |
125 | for (start, end) in zip(self.index_level [:-1], self.index_level[1:]):
126 | global_params = self.agg_model(model_list, start, end )
127 | curr_model = Net()
128 | curr_model.load_state_dict(global_params)
129 | curr_acc = eval(curr_model, self.test_loader, self.device)
130 |
131 | self.logs['val_acc_iter'].append(curr_acc)
132 |
133 | self.curr_model = copy.deepcopy(curr_model)
134 | print(curr_acc)
135 | self.logs['val_acc'].append(curr_acc)
136 |
137 |
138 |
139 | class RingFL(BaseFL):
140 | def __init__(self, configs=None):
141 | super().__init__(configs)
142 |
143 | def train(self):
144 | for t in range(self.T):
145 | model_list = []
146 | shuffled_clts = super().shuffle_clients()
147 | for clt in shuffled_clts:
148 | if t >= 1:
149 | self.clients[clt].model = copy.deepcopy(curr_model)
150 | self.clients[clt].train()
151 | model_list.append(dict(self.clients[clt].model.named_parameters()))
152 |
153 | global_params = self.agg_model(model_list, 0, len(model_list))
154 | curr_model = Net()
155 | curr_model.load_state_dict(global_params)
156 | self.curr_model = copy.deepcopy(curr_model)
157 | curr_acc = eval(self.curr_model, self.test_loader, self.device)
158 | self.logs['val_acc'].append(curr_acc)
159 | self.logs['val_acc_iter'].append(curr_acc)
160 |
161 |
162 |
163 | class FedAvg(BaseFL):
164 | def __init__(self, configs=None):
165 | super().__init__(configs)
166 | self.R = self.num_clients // 2
167 | for clt_idx in range(self.num_clients):
168 | self.clients[clt_idx].fed_avg = True
169 |
170 |
171 | def train(self):
172 | for t in range(self.T):
173 | model_list = []
174 | shuffled_clts = super().shuffle_clients()
175 | for clt in shuffled_clts[:self.R]:
176 | if t >= 1:
177 | self.clients[clt].model = copy.deepcopy(curr_model)
178 | self.clients[clt].train()
179 | model_list.append(dict(self.clients[clt].model.named_parameters()))
180 |
181 | global_params = self.agg_model(model_list, 0, len(model_list))
182 |
183 | curr_model = Net()
184 | curr_model.load_state_dict(global_params)
185 | self.curr_model = copy.deepcopy(curr_model)
186 | curr_acc = eval(self.curr_model, self.test_loader, self.device)
187 | print(curr_acc)
188 | self.logs['val_acc'].append(curr_acc)
189 | self.logs['val_acc_iter'].append(curr_acc)
190 |
191 |
192 | class NewFedAvg(BaseFL):
193 | def __init__(self, configs=None):
194 | super().__init__(configs)
195 | self.R = self.num_clients // 5
196 |
197 | for clt_idx in range(self.num_clients):
198 | self.clients[clt_idx].fed_avg = False
199 |
200 |
201 | def train(self):
202 | for t in range(self.T):
203 | model_list = []
204 | shuffled_clts = super().shuffle_clients()
205 | for clt in shuffled_clts[:self.R]:
206 | if t >= 1:
207 | self.clients[clt].model = copy.deepcopy(curr_model)
208 | self.clients[clt].train()
209 | model_list.append(dict(self.clients[clt].model.named_parameters()))
210 |
211 | global_params = self.agg_model(model_list, 0, len(model_list))
212 |
213 | curr_model = Net()
214 | curr_model.load_state_dict(global_params)
215 | self.curr_model = copy.deepcopy(curr_model)
216 | curr_acc = eval(self.curr_model, self.test_loader, self.device)
217 | print(curr_acc)
218 | self.logs['val_acc'].append(curr_acc)
219 | self.logs['val_acc_iter'].append(curr_acc)
220 |
221 |
--------------------------------------------------------------------------------
/code/run_exp/run_covid_privacy.in:
--------------------------------------------------------------------------------
1 | fedavg,0.5,5,0
2 | fedavg,0.5,5,1
3 | fedavg,0.5,5,2
4 | fedavg,0.5,5,3
5 | fedavg,0.5,5,4
6 | fedavg,0.5,5,5
7 | fedavg,0.5,5,6
8 | fedavg,0.5,5,7
9 | fedavg,0.5,5,8
10 | fedavg,0.5,5,9
11 | fedavg,0.5,10,0
12 | fedavg,0.5,10,1
13 | fedavg,0.5,10,2
14 | fedavg,0.5,10,3
15 | fedavg,0.5,10,4
16 | fedavg,0.5,10,5
17 | fedavg,0.5,10,6
18 | fedavg,0.5,10,7
19 | fedavg,0.5,10,8
20 | fedavg,0.5,10,9
21 | fedavg,0.5,20,0
22 | fedavg,0.5,20,1
23 | fedavg,0.5,20,2
24 | fedavg,0.5,20,3
25 | fedavg,0.5,20,4
26 | fedavg,0.5,20,5
27 | fedavg,0.5,20,6
28 | fedavg,0.5,20,7
29 | fedavg,0.5,20,8
30 | fedavg,0.5,20,9
31 | fedavg,1.0,5,0
32 | fedavg,1.0,5,1
33 | fedavg,1.0,5,2
34 | fedavg,1.0,5,3
35 | fedavg,1.0,5,4
36 | fedavg,1.0,5,5
37 | fedavg,1.0,5,6
38 | fedavg,1.0,5,7
39 | fedavg,1.0,5,8
40 | fedavg,1.0,5,9
41 | fedavg,1.0,10,0
42 | fedavg,1.0,10,1
43 | fedavg,1.0,10,2
44 | fedavg,1.0,10,3
45 | fedavg,1.0,10,4
46 | fedavg,1.0,10,5
47 | fedavg,1.0,10,6
48 | fedavg,1.0,10,7
49 | fedavg,1.0,10,8
50 | fedavg,1.0,10,9
51 | fedavg,1.0,20,0
52 | fedavg,1.0,20,1
53 | fedavg,1.0,20,2
54 | fedavg,1.0,20,3
55 | fedavg,1.0,20,4
56 | fedavg,1.0,20,5
57 | fedavg,1.0,20,6
58 | fedavg,1.0,20,7
59 | fedavg,1.0,20,8
60 | fedavg,1.0,20,9
61 | fedavg,2.0,5,0
62 | fedavg,2.0,5,1
63 | fedavg,2.0,5,2
64 | fedavg,2.0,5,3
65 | fedavg,2.0,5,4
66 | fedavg,2.0,5,5
67 | fedavg,2.0,5,6
68 | fedavg,2.0,5,7
69 | fedavg,2.0,5,8
70 | fedavg,2.0,5,9
71 | fedavg,2.0,10,0
72 | fedavg,2.0,10,1
73 | fedavg,2.0,10,2
74 | fedavg,2.0,10,3
75 | fedavg,2.0,10,4
76 | fedavg,2.0,10,5
77 | fedavg,2.0,10,6
78 | fedavg,2.0,10,7
79 | fedavg,2.0,10,8
80 | fedavg,2.0,10,9
81 | fedavg,2.0,20,0
82 | fedavg,2.0,20,1
83 | fedavg,2.0,20,2
84 | fedavg,2.0,20,3
85 | fedavg,2.0,20,4
86 | fedavg,2.0,20,5
87 | fedavg,2.0,20,6
88 | fedavg,2.0,20,7
89 | fedavg,2.0,20,8
90 | fedavg,2.0,20,9
91 | chain,0.5,5,0
92 | chain,0.5,5,1
93 | chain,0.5,5,2
94 | chain,0.5,5,3
95 | chain,0.5,5,4
96 | chain,0.5,5,5
97 | chain,0.5,5,6
98 | chain,0.5,5,7
99 | chain,0.5,5,8
100 | chain,0.5,5,9
101 | chain,0.5,10,0
102 | chain,0.5,10,1
103 | chain,0.5,10,2
104 | chain,0.5,10,3
105 | chain,0.5,10,4
106 | chain,0.5,10,5
107 | chain,0.5,10,6
108 | chain,0.5,10,7
109 | chain,0.5,10,8
110 | chain,0.5,10,9
111 | chain,0.5,20,0
112 | chain,0.5,20,1
113 | chain,0.5,20,2
114 | chain,0.5,20,3
115 | chain,0.5,20,4
116 | chain,0.5,20,5
117 | chain,0.5,20,6
118 | chain,0.5,20,7
119 | chain,0.5,20,8
120 | chain,0.5,20,9
121 | chain,1.0,5,0
122 | chain,1.0,5,1
123 | chain,1.0,5,2
124 | chain,1.0,5,3
125 | chain,1.0,5,4
126 | chain,1.0,5,5
127 | chain,1.0,5,6
128 | chain,1.0,5,7
129 | chain,1.0,5,8
130 | chain,1.0,5,9
131 | chain,1.0,10,0
132 | chain,1.0,10,1
133 | chain,1.0,10,2
134 | chain,1.0,10,3
135 | chain,1.0,10,4
136 | chain,1.0,10,5
137 | chain,1.0,10,6
138 | chain,1.0,10,7
139 | chain,1.0,10,8
140 | chain,1.0,10,9
141 | chain,1.0,20,0
142 | chain,1.0,20,1
143 | chain,1.0,20,2
144 | chain,1.0,20,3
145 | chain,1.0,20,4
146 | chain,1.0,20,5
147 | chain,1.0,20,6
148 | chain,1.0,20,7
149 | chain,1.0,20,8
150 | chain,1.0,20,9
151 | chain,2.0,5,0
152 | chain,2.0,5,1
153 | chain,2.0,5,2
154 | chain,2.0,5,3
155 | chain,2.0,5,4
156 | chain,2.0,5,5
157 | chain,2.0,5,6
158 | chain,2.0,5,7
159 | chain,2.0,5,8
160 | chain,2.0,5,9
161 | chain,2.0,10,0
162 | chain,2.0,10,1
163 | chain,2.0,10,2
164 | chain,2.0,10,3
165 | chain,2.0,10,4
166 | chain,2.0,10,5
167 | chain,2.0,10,6
168 | chain,2.0,10,7
169 | chain,2.0,10,8
170 | chain,2.0,10,9
171 | chain,2.0,20,0
172 | chain,2.0,20,1
173 | chain,2.0,20,2
174 | chain,2.0,20,3
175 | chain,2.0,20,4
176 | chain,2.0,20,5
177 | chain,2.0,20,6
178 | chain,2.0,20,7
179 | chain,2.0,20,8
180 | chain,2.0,20,9
181 | ring,0.5,5,0
182 | ring,0.5,5,1
183 | ring,0.5,5,2
184 | ring,0.5,5,3
185 | ring,0.5,5,4
186 | ring,0.5,5,5
187 | ring,0.5,5,6
188 | ring,0.5,5,7
189 | ring,0.5,5,8
190 | ring,0.5,5,9
191 | ring,0.5,10,0
192 | ring,0.5,10,1
193 | ring,0.5,10,2
194 | ring,0.5,10,3
195 | ring,0.5,10,4
196 | ring,0.5,10,5
197 | ring,0.5,10,6
198 | ring,0.5,10,7
199 | ring,0.5,10,8
200 | ring,0.5,10,9
201 | ring,0.5,20,0
202 | ring,0.5,20,1
203 | ring,0.5,20,2
204 | ring,0.5,20,3
205 | ring,0.5,20,4
206 | ring,0.5,20,5
207 | ring,0.5,20,6
208 | ring,0.5,20,7
209 | ring,0.5,20,8
210 | ring,0.5,20,9
211 | ring,1.0,5,0
212 | ring,1.0,5,1
213 | ring,1.0,5,2
214 | ring,1.0,5,3
215 | ring,1.0,5,4
216 | ring,1.0,5,5
217 | ring,1.0,5,6
218 | ring,1.0,5,7
219 | ring,1.0,5,8
220 | ring,1.0,5,9
221 | ring,1.0,10,0
222 | ring,1.0,10,1
223 | ring,1.0,10,2
224 | ring,1.0,10,3
225 | ring,1.0,10,4
226 | ring,1.0,10,5
227 | ring,1.0,10,6
228 | ring,1.0,10,7
229 | ring,1.0,10,8
230 | ring,1.0,10,9
231 | ring,1.0,20,0
232 | ring,1.0,20,1
233 | ring,1.0,20,2
234 | ring,1.0,20,3
235 | ring,1.0,20,4
236 | ring,1.0,20,5
237 | ring,1.0,20,6
238 | ring,1.0,20,7
239 | ring,1.0,20,8
240 | ring,1.0,20,9
241 | ring,2.0,5,0
242 | ring,2.0,5,1
243 | ring,2.0,5,2
244 | ring,2.0,5,3
245 | ring,2.0,5,4
246 | ring,2.0,5,5
247 | ring,2.0,5,6
248 | ring,2.0,5,7
249 | ring,2.0,5,8
250 | ring,2.0,5,9
251 | ring,2.0,10,0
252 | ring,2.0,10,1
253 | ring,2.0,10,2
254 | ring,2.0,10,3
255 | ring,2.0,10,4
256 | ring,2.0,10,5
257 | ring,2.0,10,6
258 | ring,2.0,10,7
259 | ring,2.0,10,8
260 | ring,2.0,10,9
261 | ring,2.0,20,0
262 | ring,2.0,20,1
263 | ring,2.0,20,2
264 | ring,2.0,20,3
265 | ring,2.0,20,4
266 | ring,2.0,20,5
267 | ring,2.0,20,6
268 | ring,2.0,20,7
269 | ring,2.0,20,8
270 | ring,2.0,20,9
271 | tree,0.5,5,0
272 | tree,0.5,5,1
273 | tree,0.5,5,2
274 | tree,0.5,5,3
275 | tree,0.5,5,4
276 | tree,0.5,5,5
277 | tree,0.5,5,6
278 | tree,0.5,5,7
279 | tree,0.5,5,8
280 | tree,0.5,5,9
281 | tree,0.5,10,0
282 | tree,0.5,10,1
283 | tree,0.5,10,2
284 | tree,0.5,10,3
285 | tree,0.5,10,4
286 | tree,0.5,10,5
287 | tree,0.5,10,6
288 | tree,0.5,10,7
289 | tree,0.5,10,8
290 | tree,0.5,10,9
291 | tree,0.5,20,0
292 | tree,0.5,20,1
293 | tree,0.5,20,2
294 | tree,0.5,20,3
295 | tree,0.5,20,4
296 | tree,0.5,20,5
297 | tree,0.5,20,6
298 | tree,0.5,20,7
299 | tree,0.5,20,8
300 | tree,0.5,20,9
301 | tree,1.0,5,0
302 | tree,1.0,5,1
303 | tree,1.0,5,2
304 | tree,1.0,5,3
305 | tree,1.0,5,4
306 | tree,1.0,5,5
307 | tree,1.0,5,6
308 | tree,1.0,5,7
309 | tree,1.0,5,8
310 | tree,1.0,5,9
311 | tree,1.0,10,0
312 | tree,1.0,10,1
313 | tree,1.0,10,2
314 | tree,1.0,10,3
315 | tree,1.0,10,4
316 | tree,1.0,10,5
317 | tree,1.0,10,6
318 | tree,1.0,10,7
319 | tree,1.0,10,8
320 | tree,1.0,10,9
321 | tree,1.0,20,0
322 | tree,1.0,20,1
323 | tree,1.0,20,2
324 | tree,1.0,20,3
325 | tree,1.0,20,4
326 | tree,1.0,20,5
327 | tree,1.0,20,6
328 | tree,1.0,20,7
329 | tree,1.0,20,8
330 | tree,1.0,20,9
331 | tree,2.0,5,0
332 | tree,2.0,5,1
333 | tree,2.0,5,2
334 | tree,2.0,5,3
335 | tree,2.0,5,4
336 | tree,2.0,5,5
337 | tree,2.0,5,6
338 | tree,2.0,5,7
339 | tree,2.0,5,8
340 | tree,2.0,5,9
341 | tree,2.0,10,0
342 | tree,2.0,10,1
343 | tree,2.0,10,2
344 | tree,2.0,10,3
345 | tree,2.0,10,4
346 | tree,2.0,10,5
347 | tree,2.0,10,6
348 | tree,2.0,10,7
349 | tree,2.0,10,8
350 | tree,2.0,10,9
351 | tree,2.0,20,0
352 | tree,2.0,20,1
353 | tree,2.0,20,2
354 | tree,2.0,20,3
355 | tree,2.0,20,4
356 | tree,2.0,20,5
357 | tree,2.0,20,6
358 | tree,2.0,20,7
359 | tree,2.0,20,8
360 | tree,2.0,20,9
361 | newfedavg,0.5,5,0
362 | newfedavg,0.5,5,1
363 | newfedavg,0.5,5,2
364 | newfedavg,0.5,5,3
365 | newfedavg,0.5,5,4
366 | newfedavg,0.5,5,5
367 | newfedavg,0.5,5,6
368 | newfedavg,0.5,5,7
369 | newfedavg,0.5,5,8
370 | newfedavg,0.5,5,9
371 | newfedavg,0.5,10,0
372 | newfedavg,0.5,10,1
373 | newfedavg,0.5,10,2
374 | newfedavg,0.5,10,3
375 | newfedavg,0.5,10,4
376 | newfedavg,0.5,10,5
377 | newfedavg,0.5,10,6
378 | newfedavg,0.5,10,7
379 | newfedavg,0.5,10,8
380 | newfedavg,0.5,10,9
381 | newfedavg,0.5,20,0
382 | newfedavg,0.5,20,1
383 | newfedavg,0.5,20,2
384 | newfedavg,0.5,20,3
385 | newfedavg,0.5,20,4
386 | newfedavg,0.5,20,5
387 | newfedavg,0.5,20,6
388 | newfedavg,0.5,20,7
389 | newfedavg,0.5,20,8
390 | newfedavg,0.5,20,9
391 | newfedavg,1.0,5,0
392 | newfedavg,1.0,5,1
393 | newfedavg,1.0,5,2
394 | newfedavg,1.0,5,3
395 | newfedavg,1.0,5,4
396 | newfedavg,1.0,5,5
397 | newfedavg,1.0,5,6
398 | newfedavg,1.0,5,7
399 | newfedavg,1.0,5,8
400 | newfedavg,1.0,5,9
401 | newfedavg,1.0,10,0
402 | newfedavg,1.0,10,1
403 | newfedavg,1.0,10,2
404 | newfedavg,1.0,10,3
405 | newfedavg,1.0,10,4
406 | newfedavg,1.0,10,5
407 | newfedavg,1.0,10,6
408 | newfedavg,1.0,10,7
409 | newfedavg,1.0,10,8
410 | newfedavg,1.0,10,9
411 | newfedavg,1.0,20,0
412 | newfedavg,1.0,20,1
413 | newfedavg,1.0,20,2
414 | newfedavg,1.0,20,3
415 | newfedavg,1.0,20,4
416 | newfedavg,1.0,20,5
417 | newfedavg,1.0,20,6
418 | newfedavg,1.0,20,7
419 | newfedavg,1.0,20,8
420 | newfedavg,1.0,20,9
421 | newfedavg,2.0,5,0
422 | newfedavg,2.0,5,1
423 | newfedavg,2.0,5,2
424 | newfedavg,2.0,5,3
425 | newfedavg,2.0,5,4
426 | newfedavg,2.0,5,5
427 | newfedavg,2.0,5,6
428 | newfedavg,2.0,5,7
429 | newfedavg,2.0,5,8
430 | newfedavg,2.0,5,9
431 | newfedavg,2.0,10,0
432 | newfedavg,2.0,10,1
433 | newfedavg,2.0,10,2
434 | newfedavg,2.0,10,3
435 | newfedavg,2.0,10,4
436 | newfedavg,2.0,10,5
437 | newfedavg,2.0,10,6
438 | newfedavg,2.0,10,7
439 | newfedavg,2.0,10,8
440 | newfedavg,2.0,10,9
441 | newfedavg,2.0,20,0
442 | newfedavg,2.0,20,1
443 | newfedavg,2.0,20,2
444 | newfedavg,2.0,20,3
445 | newfedavg,2.0,20,4
446 | newfedavg,2.0,20,5
447 | newfedavg,2.0,20,6
448 | newfedavg,2.0,20,7
449 | newfedavg,2.0,20,8
450 | newfedavg,2.0,20,9
451 |
--------------------------------------------------------------------------------
/code/run_privacy_exp.in:
--------------------------------------------------------------------------------
1 | MNIST,tree,0.5,10,0
2 | MNIST,tree,0.5,10,1
3 | MNIST,tree,0.5,10,2
4 | MNIST,tree,0.5,10,3
5 | MNIST,tree,0.5,10,4
6 | MNIST,tree,0.5,50,0
7 | MNIST,tree,0.5,50,1
8 | MNIST,tree,0.5,50,2
9 | MNIST,tree,0.5,50,3
10 | MNIST,tree,0.5,50,4
11 | MNIST,tree,0.5,100,0
12 | MNIST,tree,0.5,100,1
13 | MNIST,tree,0.5,100,2
14 | MNIST,tree,0.5,100,3
15 | MNIST,tree,0.5,100,4
16 | MNIST,tree,2.0,10,0
17 | MNIST,tree,2.0,10,1
18 | MNIST,tree,2.0,10,2
19 | MNIST,tree,2.0,10,3
20 | MNIST,tree,2.0,10,4
21 | MNIST,tree,2.0,50,0
22 | MNIST,tree,2.0,50,1
23 | MNIST,tree,2.0,50,2
24 | MNIST,tree,2.0,50,3
25 | MNIST,tree,2.0,50,4
26 | MNIST,tree,2.0,100,0
27 | MNIST,tree,2.0,100,1
28 | MNIST,tree,2.0,100,2
29 | MNIST,tree,2.0,100,3
30 | MNIST,tree,2.0,100,4
31 | MNIST,tree,3.0,10,0
32 | MNIST,tree,3.0,10,1
33 | MNIST,tree,3.0,10,2
34 | MNIST,tree,3.0,10,3
35 | MNIST,tree,3.0,10,4
36 | MNIST,tree,3.0,50,0
37 | MNIST,tree,3.0,50,1
38 | MNIST,tree,3.0,50,2
39 | MNIST,tree,3.0,50,3
40 | MNIST,tree,3.0,50,4
41 | MNIST,tree,3.0,100,0
42 | MNIST,tree,3.0,100,1
43 | MNIST,tree,3.0,100,2
44 | MNIST,tree,3.0,100,3
45 | MNIST,tree,3.0,100,4
46 | MNIST,ring,0.5,10,0
47 | MNIST,ring,0.5,10,1
48 | MNIST,ring,0.5,10,2
49 | MNIST,ring,0.5,10,3
50 | MNIST,ring,0.5,10,4
51 | MNIST,ring,0.5,50,0
52 | MNIST,ring,0.5,50,1
53 | MNIST,ring,0.5,50,2
54 | MNIST,ring,0.5,50,3
55 | MNIST,ring,0.5,50,4
56 | MNIST,ring,0.5,100,0
57 | MNIST,ring,0.5,100,1
58 | MNIST,ring,0.5,100,2
59 | MNIST,ring,0.5,100,3
60 | MNIST,ring,0.5,100,4
61 | MNIST,ring,2.0,10,0
62 | MNIST,ring,2.0,10,1
63 | MNIST,ring,2.0,10,2
64 | MNIST,ring,2.0,10,3
65 | MNIST,ring,2.0,10,4
66 | MNIST,ring,2.0,50,0
67 | MNIST,ring,2.0,50,1
68 | MNIST,ring,2.0,50,2
69 | MNIST,ring,2.0,50,3
70 | MNIST,ring,2.0,50,4
71 | MNIST,ring,2.0,100,0
72 | MNIST,ring,2.0,100,1
73 | MNIST,ring,2.0,100,2
74 | MNIST,ring,2.0,100,3
75 | MNIST,ring,2.0,100,4
76 | MNIST,ring,3.0,10,0
77 | MNIST,ring,3.0,10,1
78 | MNIST,ring,3.0,10,2
79 | MNIST,ring,3.0,10,3
80 | MNIST,ring,3.0,10,4
81 | MNIST,ring,3.0,50,0
82 | MNIST,ring,3.0,50,1
83 | MNIST,ring,3.0,50,2
84 | MNIST,ring,3.0,50,3
85 | MNIST,ring,3.0,50,4
86 | MNIST,ring,3.0,100,0
87 | MNIST,ring,3.0,100,1
88 | MNIST,ring,3.0,100,2
89 | MNIST,ring,3.0,100,3
90 | MNIST,ring,3.0,100,4
91 | MNIST,chain,0.5,10,0
92 | MNIST,chain,0.5,10,1
93 | MNIST,chain,0.5,10,2
94 | MNIST,chain,0.5,10,3
95 | MNIST,chain,0.5,10,4
96 | MNIST,chain,0.5,50,0
97 | MNIST,chain,0.5,50,1
98 | MNIST,chain,0.5,50,2
99 | MNIST,chain,0.5,50,3
100 | MNIST,chain,0.5,50,4
101 | MNIST,chain,0.5,100,0
102 | MNIST,chain,0.5,100,1
103 | MNIST,chain,0.5,100,2
104 | MNIST,chain,0.5,100,3
105 | MNIST,chain,0.5,100,4
106 | MNIST,chain,2.0,10,0
107 | MNIST,chain,2.0,10,1
108 | MNIST,chain,2.0,10,2
109 | MNIST,chain,2.0,10,3
110 | MNIST,chain,2.0,10,4
111 | MNIST,chain,2.0,50,0
112 | MNIST,chain,2.0,50,1
113 | MNIST,chain,2.0,50,2
114 | MNIST,chain,2.0,50,3
115 | MNIST,chain,2.0,50,4
116 | MNIST,chain,2.0,100,0
117 | MNIST,chain,2.0,100,1
118 | MNIST,chain,2.0,100,2
119 | MNIST,chain,2.0,100,3
120 | MNIST,chain,2.0,100,4
121 | MNIST,chain,3.0,10,0
122 | MNIST,chain,3.0,10,1
123 | MNIST,chain,3.0,10,2
124 | MNIST,chain,3.0,10,3
125 | MNIST,chain,3.0,10,4
126 | MNIST,chain,3.0,50,0
127 | MNIST,chain,3.0,50,1
128 | MNIST,chain,3.0,50,2
129 | MNIST,chain,3.0,50,3
130 | MNIST,chain,3.0,50,4
131 | MNIST,chain,3.0,100,0
132 | MNIST,chain,3.0,100,1
133 | MNIST,chain,3.0,100,2
134 | MNIST,chain,3.0,100,3
135 | MNIST,chain,3.0,100,4
136 | MNIST,fedavg,0.5,10,0
137 | MNIST,fedavg,0.5,10,1
138 | MNIST,fedavg,0.5,10,2
139 | MNIST,fedavg,0.5,10,3
140 | MNIST,fedavg,0.5,10,4
141 | MNIST,fedavg,0.5,50,0
142 | MNIST,fedavg,0.5,50,1
143 | MNIST,fedavg,0.5,50,2
144 | MNIST,fedavg,0.5,50,3
145 | MNIST,fedavg,0.5,50,4
146 | MNIST,fedavg,0.5,100,0
147 | MNIST,fedavg,0.5,100,1
148 | MNIST,fedavg,0.5,100,2
149 | MNIST,fedavg,0.5,100,3
150 | MNIST,fedavg,0.5,100,4
151 | MNIST,fedavg,2.0,10,0
152 | MNIST,fedavg,2.0,10,1
153 | MNIST,fedavg,2.0,10,2
154 | MNIST,fedavg,2.0,10,3
155 | MNIST,fedavg,2.0,10,4
156 | MNIST,fedavg,2.0,50,0
157 | MNIST,fedavg,2.0,50,1
158 | MNIST,fedavg,2.0,50,2
159 | MNIST,fedavg,2.0,50,3
160 | MNIST,fedavg,2.0,50,4
161 | MNIST,fedavg,2.0,100,0
162 | MNIST,fedavg,2.0,100,1
163 | MNIST,fedavg,2.0,100,2
164 | MNIST,fedavg,2.0,100,3
165 | MNIST,fedavg,2.0,100,4
166 | MNIST,fedavg,3.0,10,0
167 | MNIST,fedavg,3.0,10,1
168 | MNIST,fedavg,3.0,10,2
169 | MNIST,fedavg,3.0,10,3
170 | MNIST,fedavg,3.0,10,4
171 | MNIST,fedavg,3.0,50,0
172 | MNIST,fedavg,3.0,50,1
173 | MNIST,fedavg,3.0,50,2
174 | MNIST,fedavg,3.0,50,3
175 | MNIST,fedavg,3.0,50,4
176 | MNIST,fedavg,3.0,100,0
177 | MNIST,fedavg,3.0,100,1
178 | MNIST,fedavg,3.0,100,2
179 | MNIST,fedavg,3.0,100,3
180 | MNIST,fedavg,3.0,100,4
181 | FMNIST,tree,0.5,10,0
182 | FMNIST,tree,0.5,10,1
183 | FMNIST,tree,0.5,10,2
184 | FMNIST,tree,0.5,10,3
185 | FMNIST,tree,0.5,10,4
186 | FMNIST,tree,0.5,50,0
187 | FMNIST,tree,0.5,50,1
188 | FMNIST,tree,0.5,50,2
189 | FMNIST,tree,0.5,50,3
190 | FMNIST,tree,0.5,50,4
191 | FMNIST,tree,0.5,100,0
192 | FMNIST,tree,0.5,100,1
193 | FMNIST,tree,0.5,100,2
194 | FMNIST,tree,0.5,100,3
195 | FMNIST,tree,0.5,100,4
196 | FMNIST,tree,2.0,10,0
197 | FMNIST,tree,2.0,10,1
198 | FMNIST,tree,2.0,10,2
199 | FMNIST,tree,2.0,10,3
200 | FMNIST,tree,2.0,10,4
201 | FMNIST,tree,2.0,50,0
202 | FMNIST,tree,2.0,50,1
203 | FMNIST,tree,2.0,50,2
204 | FMNIST,tree,2.0,50,3
205 | FMNIST,tree,2.0,50,4
206 | FMNIST,tree,2.0,100,0
207 | FMNIST,tree,2.0,100,1
208 | FMNIST,tree,2.0,100,2
209 | FMNIST,tree,2.0,100,3
210 | FMNIST,tree,2.0,100,4
211 | FMNIST,tree,3.0,10,0
212 | FMNIST,tree,3.0,10,1
213 | FMNIST,tree,3.0,10,2
214 | FMNIST,tree,3.0,10,3
215 | FMNIST,tree,3.0,10,4
216 | FMNIST,tree,3.0,50,0
217 | FMNIST,tree,3.0,50,1
218 | FMNIST,tree,3.0,50,2
219 | FMNIST,tree,3.0,50,3
220 | FMNIST,tree,3.0,50,4
221 | FMNIST,tree,3.0,100,0
222 | FMNIST,tree,3.0,100,1
223 | FMNIST,tree,3.0,100,2
224 | FMNIST,tree,3.0,100,3
225 | FMNIST,tree,3.0,100,4
226 | FMNIST,ring,0.5,10,0
227 | FMNIST,ring,0.5,10,1
228 | FMNIST,ring,0.5,10,2
229 | FMNIST,ring,0.5,10,3
230 | FMNIST,ring,0.5,10,4
231 | FMNIST,ring,0.5,50,0
232 | FMNIST,ring,0.5,50,1
233 | FMNIST,ring,0.5,50,2
234 | FMNIST,ring,0.5,50,3
235 | FMNIST,ring,0.5,50,4
236 | FMNIST,ring,0.5,100,0
237 | FMNIST,ring,0.5,100,1
238 | FMNIST,ring,0.5,100,2
239 | FMNIST,ring,0.5,100,3
240 | FMNIST,ring,0.5,100,4
241 | FMNIST,ring,2.0,10,0
242 | FMNIST,ring,2.0,10,1
243 | FMNIST,ring,2.0,10,2
244 | FMNIST,ring,2.0,10,3
245 | FMNIST,ring,2.0,10,4
246 | FMNIST,ring,2.0,50,0
247 | FMNIST,ring,2.0,50,1
248 | FMNIST,ring,2.0,50,2
249 | FMNIST,ring,2.0,50,3
250 | FMNIST,ring,2.0,50,4
251 | FMNIST,ring,2.0,100,0
252 | FMNIST,ring,2.0,100,1
253 | FMNIST,ring,2.0,100,2
254 | FMNIST,ring,2.0,100,3
255 | FMNIST,ring,2.0,100,4
256 | FMNIST,ring,3.0,10,0
257 | FMNIST,ring,3.0,10,1
258 | FMNIST,ring,3.0,10,2
259 | FMNIST,ring,3.0,10,3
260 | FMNIST,ring,3.0,10,4
261 | FMNIST,ring,3.0,50,0
262 | FMNIST,ring,3.0,50,1
263 | FMNIST,ring,3.0,50,2
264 | FMNIST,ring,3.0,50,3
265 | FMNIST,ring,3.0,50,4
266 | FMNIST,ring,3.0,100,0
267 | FMNIST,ring,3.0,100,1
268 | FMNIST,ring,3.0,100,2
269 | FMNIST,ring,3.0,100,3
270 | FMNIST,ring,3.0,100,4
271 | FMNIST,chain,0.5,10,0
272 | FMNIST,chain,0.5,10,1
273 | FMNIST,chain,0.5,10,2
274 | FMNIST,chain,0.5,10,3
275 | FMNIST,chain,0.5,10,4
276 | FMNIST,chain,0.5,50,0
277 | FMNIST,chain,0.5,50,1
278 | FMNIST,chain,0.5,50,2
279 | FMNIST,chain,0.5,50,3
280 | FMNIST,chain,0.5,50,4
281 | FMNIST,chain,0.5,100,0
282 | FMNIST,chain,0.5,100,1
283 | FMNIST,chain,0.5,100,2
284 | FMNIST,chain,0.5,100,3
285 | FMNIST,chain,0.5,100,4
286 | FMNIST,chain,2.0,10,0
287 | FMNIST,chain,2.0,10,1
288 | FMNIST,chain,2.0,10,2
289 | FMNIST,chain,2.0,10,3
290 | FMNIST,chain,2.0,10,4
291 | FMNIST,chain,2.0,50,0
292 | FMNIST,chain,2.0,50,1
293 | FMNIST,chain,2.0,50,2
294 | FMNIST,chain,2.0,50,3
295 | FMNIST,chain,2.0,50,4
296 | FMNIST,chain,2.0,100,0
297 | FMNIST,chain,2.0,100,1
298 | FMNIST,chain,2.0,100,2
299 | FMNIST,chain,2.0,100,3
300 | FMNIST,chain,2.0,100,4
301 | FMNIST,chain,3.0,10,0
302 | FMNIST,chain,3.0,10,1
303 | FMNIST,chain,3.0,10,2
304 | FMNIST,chain,3.0,10,3
305 | FMNIST,chain,3.0,10,4
306 | FMNIST,chain,3.0,50,0
307 | FMNIST,chain,3.0,50,1
308 | FMNIST,chain,3.0,50,2
309 | FMNIST,chain,3.0,50,3
310 | FMNIST,chain,3.0,50,4
311 | FMNIST,chain,3.0,100,0
312 | FMNIST,chain,3.0,100,1
313 | FMNIST,chain,3.0,100,2
314 | FMNIST,chain,3.0,100,3
315 | FMNIST,chain,3.0,100,4
316 | FMNIST,fedavg,0.5,10,0
317 | FMNIST,fedavg,0.5,10,1
318 | FMNIST,fedavg,0.5,10,2
319 | FMNIST,fedavg,0.5,10,3
320 | FMNIST,fedavg,0.5,10,4
321 | FMNIST,fedavg,0.5,50,0
322 | FMNIST,fedavg,0.5,50,1
323 | FMNIST,fedavg,0.5,50,2
324 | FMNIST,fedavg,0.5,50,3
325 | FMNIST,fedavg,0.5,50,4
326 | FMNIST,fedavg,0.5,100,0
327 | FMNIST,fedavg,0.5,100,1
328 | FMNIST,fedavg,0.5,100,2
329 | FMNIST,fedavg,0.5,100,3
330 | FMNIST,fedavg,0.5,100,4
331 | FMNIST,fedavg,2.0,10,0
332 | FMNIST,fedavg,2.0,10,1
333 | FMNIST,fedavg,2.0,10,2
334 | FMNIST,fedavg,2.0,10,3
335 | FMNIST,fedavg,2.0,10,4
336 | FMNIST,fedavg,2.0,50,0
337 | FMNIST,fedavg,2.0,50,1
338 | FMNIST,fedavg,2.0,50,2
339 | FMNIST,fedavg,2.0,50,3
340 | FMNIST,fedavg,2.0,50,4
341 | FMNIST,fedavg,2.0,100,0
342 | FMNIST,fedavg,2.0,100,1
343 | FMNIST,fedavg,2.0,100,2
344 | FMNIST,fedavg,2.0,100,3
345 | FMNIST,fedavg,2.0,100,4
346 | FMNIST,fedavg,3.0,10,0
347 | FMNIST,fedavg,3.0,10,1
348 | FMNIST,fedavg,3.0,10,2
349 | FMNIST,fedavg,3.0,10,3
350 | FMNIST,fedavg,3.0,10,4
351 | FMNIST,fedavg,3.0,50,0
352 | FMNIST,fedavg,3.0,50,1
353 | FMNIST,fedavg,3.0,50,2
354 | FMNIST,fedavg,3.0,50,3
355 | FMNIST,fedavg,3.0,50,4
356 | FMNIST,fedavg,3.0,100,0
357 | FMNIST,fedavg,3.0,100,1
358 | FMNIST,fedavg,3.0,100,2
359 | FMNIST,fedavg,3.0,100,3
360 | FMNIST,fedavg,3.0,100,4
361 |
--------------------------------------------------------------------------------
/code/run_misc_private_exp.in:
--------------------------------------------------------------------------------
1 | biased_MNIST,tree,0.5,10,0
2 | biased_MNIST,tree,0.5,10,1
3 | biased_MNIST,tree,0.5,10,2
4 | biased_MNIST,tree,0.5,10,3
5 | biased_MNIST,tree,0.5,10,4
6 | biased_MNIST,tree,0.5,50,0
7 | biased_MNIST,tree,0.5,50,1
8 | biased_MNIST,tree,0.5,50,2
9 | biased_MNIST,tree,0.5,50,3
10 | biased_MNIST,tree,0.5,50,4
11 | biased_MNIST,tree,0.5,100,0
12 | biased_MNIST,tree,0.5,100,1
13 | biased_MNIST,tree,0.5,100,2
14 | biased_MNIST,tree,0.5,100,3
15 | biased_MNIST,tree,0.5,100,4
16 | biased_MNIST,tree,2.0,10,0
17 | biased_MNIST,tree,2.0,10,1
18 | biased_MNIST,tree,2.0,10,2
19 | biased_MNIST,tree,2.0,10,3
20 | biased_MNIST,tree,2.0,10,4
21 | biased_MNIST,tree,2.0,50,0
22 | biased_MNIST,tree,2.0,50,1
23 | biased_MNIST,tree,2.0,50,2
24 | biased_MNIST,tree,2.0,50,3
25 | biased_MNIST,tree,2.0,50,4
26 | biased_MNIST,tree,2.0,100,0
27 | biased_MNIST,tree,2.0,100,1
28 | biased_MNIST,tree,2.0,100,2
29 | biased_MNIST,tree,2.0,100,3
30 | biased_MNIST,tree,2.0,100,4
31 | biased_MNIST,tree,3.0,10,0
32 | biased_MNIST,tree,3.0,10,1
33 | biased_MNIST,tree,3.0,10,2
34 | biased_MNIST,tree,3.0,10,3
35 | biased_MNIST,tree,3.0,10,4
36 | biased_MNIST,tree,3.0,50,0
37 | biased_MNIST,tree,3.0,50,1
38 | biased_MNIST,tree,3.0,50,2
39 | biased_MNIST,tree,3.0,50,3
40 | biased_MNIST,tree,3.0,50,4
41 | biased_MNIST,tree,3.0,100,0
42 | biased_MNIST,tree,3.0,100,1
43 | biased_MNIST,tree,3.0,100,2
44 | biased_MNIST,tree,3.0,100,3
45 | biased_MNIST,tree,3.0,100,4
46 | biased_MNIST,ring,0.5,10,0
47 | biased_MNIST,ring,0.5,10,1
48 | biased_MNIST,ring,0.5,10,2
49 | biased_MNIST,ring,0.5,10,3
50 | biased_MNIST,ring,0.5,10,4
51 | biased_MNIST,ring,0.5,50,0
52 | biased_MNIST,ring,0.5,50,1
53 | biased_MNIST,ring,0.5,50,2
54 | biased_MNIST,ring,0.5,50,3
55 | biased_MNIST,ring,0.5,50,4
56 | biased_MNIST,ring,0.5,100,0
57 | biased_MNIST,ring,0.5,100,1
58 | biased_MNIST,ring,0.5,100,2
59 | biased_MNIST,ring,0.5,100,3
60 | biased_MNIST,ring,0.5,100,4
61 | biased_MNIST,ring,2.0,10,0
62 | biased_MNIST,ring,2.0,10,1
63 | biased_MNIST,ring,2.0,10,2
64 | biased_MNIST,ring,2.0,10,3
65 | biased_MNIST,ring,2.0,10,4
66 | biased_MNIST,ring,2.0,50,0
67 | biased_MNIST,ring,2.0,50,1
68 | biased_MNIST,ring,2.0,50,2
69 | biased_MNIST,ring,2.0,50,3
70 | biased_MNIST,ring,2.0,50,4
71 | biased_MNIST,ring,2.0,100,0
72 | biased_MNIST,ring,2.0,100,1
73 | biased_MNIST,ring,2.0,100,2
74 | biased_MNIST,ring,2.0,100,3
75 | biased_MNIST,ring,2.0,100,4
76 | biased_MNIST,ring,3.0,10,0
77 | biased_MNIST,ring,3.0,10,1
78 | biased_MNIST,ring,3.0,10,2
79 | biased_MNIST,ring,3.0,10,3
80 | biased_MNIST,ring,3.0,10,4
81 | biased_MNIST,ring,3.0,50,0
82 | biased_MNIST,ring,3.0,50,1
83 | biased_MNIST,ring,3.0,50,2
84 | biased_MNIST,ring,3.0,50,3
85 | biased_MNIST,ring,3.0,50,4
86 | biased_MNIST,ring,3.0,100,0
87 | biased_MNIST,ring,3.0,100,1
88 | biased_MNIST,ring,3.0,100,2
89 | biased_MNIST,ring,3.0,100,3
90 | biased_MNIST,ring,3.0,100,4
91 | biased_MNIST,chain,0.5,10,0
92 | biased_MNIST,chain,0.5,10,1
93 | biased_MNIST,chain,0.5,10,2
94 | biased_MNIST,chain,0.5,10,3
95 | biased_MNIST,chain,0.5,10,4
96 | biased_MNIST,chain,0.5,50,0
97 | biased_MNIST,chain,0.5,50,1
98 | biased_MNIST,chain,0.5,50,2
99 | biased_MNIST,chain,0.5,50,3
100 | biased_MNIST,chain,0.5,50,4
101 | biased_MNIST,chain,0.5,100,0
102 | biased_MNIST,chain,0.5,100,1
103 | biased_MNIST,chain,0.5,100,2
104 | biased_MNIST,chain,0.5,100,3
105 | biased_MNIST,chain,0.5,100,4
106 | biased_MNIST,chain,2.0,10,0
107 | biased_MNIST,chain,2.0,10,1
108 | biased_MNIST,chain,2.0,10,2
109 | biased_MNIST,chain,2.0,10,3
110 | biased_MNIST,chain,2.0,10,4
111 | biased_MNIST,chain,2.0,50,0
112 | biased_MNIST,chain,2.0,50,1
113 | biased_MNIST,chain,2.0,50,2
114 | biased_MNIST,chain,2.0,50,3
115 | biased_MNIST,chain,2.0,50,4
116 | biased_MNIST,chain,2.0,100,0
117 | biased_MNIST,chain,2.0,100,1
118 | biased_MNIST,chain,2.0,100,2
119 | biased_MNIST,chain,2.0,100,3
120 | biased_MNIST,chain,2.0,100,4
121 | biased_MNIST,chain,3.0,10,0
122 | biased_MNIST,chain,3.0,10,1
123 | biased_MNIST,chain,3.0,10,2
124 | biased_MNIST,chain,3.0,10,3
125 | biased_MNIST,chain,3.0,10,4
126 | biased_MNIST,chain,3.0,50,0
127 | biased_MNIST,chain,3.0,50,1
128 | biased_MNIST,chain,3.0,50,2
129 | biased_MNIST,chain,3.0,50,3
130 | biased_MNIST,chain,3.0,50,4
131 | biased_MNIST,chain,3.0,100,0
132 | biased_MNIST,chain,3.0,100,1
133 | biased_MNIST,chain,3.0,100,2
134 | biased_MNIST,chain,3.0,100,3
135 | biased_MNIST,chain,3.0,100,4
136 | biased_MNIST,fedavg,0.5,10,0
137 | biased_MNIST,fedavg,0.5,10,1
138 | biased_MNIST,fedavg,0.5,10,2
139 | biased_MNIST,fedavg,0.5,10,3
140 | biased_MNIST,fedavg,0.5,10,4
141 | biased_MNIST,fedavg,0.5,50,0
142 | biased_MNIST,fedavg,0.5,50,1
143 | biased_MNIST,fedavg,0.5,50,2
144 | biased_MNIST,fedavg,0.5,50,3
145 | biased_MNIST,fedavg,0.5,50,4
146 | biased_MNIST,fedavg,0.5,100,0
147 | biased_MNIST,fedavg,0.5,100,1
148 | biased_MNIST,fedavg,0.5,100,2
149 | biased_MNIST,fedavg,0.5,100,3
150 | biased_MNIST,fedavg,0.5,100,4
151 | biased_MNIST,fedavg,2.0,10,0
152 | biased_MNIST,fedavg,2.0,10,1
153 | biased_MNIST,fedavg,2.0,10,2
154 | biased_MNIST,fedavg,2.0,10,3
155 | biased_MNIST,fedavg,2.0,10,4
156 | biased_MNIST,fedavg,2.0,50,0
157 | biased_MNIST,fedavg,2.0,50,1
158 | biased_MNIST,fedavg,2.0,50,2
159 | biased_MNIST,fedavg,2.0,50,3
160 | biased_MNIST,fedavg,2.0,50,4
161 | biased_MNIST,fedavg,2.0,100,0
162 | biased_MNIST,fedavg,2.0,100,1
163 | biased_MNIST,fedavg,2.0,100,2
164 | biased_MNIST,fedavg,2.0,100,3
165 | biased_MNIST,fedavg,2.0,100,4
166 | biased_MNIST,fedavg,3.0,10,0
167 | biased_MNIST,fedavg,3.0,10,1
168 | biased_MNIST,fedavg,3.0,10,2
169 | biased_MNIST,fedavg,3.0,10,3
170 | biased_MNIST,fedavg,3.0,10,4
171 | biased_MNIST,fedavg,3.0,50,0
172 | biased_MNIST,fedavg,3.0,50,1
173 | biased_MNIST,fedavg,3.0,50,2
174 | biased_MNIST,fedavg,3.0,50,3
175 | biased_MNIST,fedavg,3.0,50,4
176 | biased_MNIST,fedavg,3.0,100,0
177 | biased_MNIST,fedavg,3.0,100,1
178 | biased_MNIST,fedavg,3.0,100,2
179 | biased_MNIST,fedavg,3.0,100,3
180 | biased_MNIST,fedavg,3.0,100,4
181 | biased_FMNIST,tree,0.5,10,0
182 | biased_FMNIST,tree,0.5,10,1
183 | biased_FMNIST,tree,0.5,10,2
184 | biased_FMNIST,tree,0.5,10,3
185 | biased_FMNIST,tree,0.5,10,4
186 | biased_FMNIST,tree,0.5,50,0
187 | biased_FMNIST,tree,0.5,50,1
188 | biased_FMNIST,tree,0.5,50,2
189 | biased_FMNIST,tree,0.5,50,3
190 | biased_FMNIST,tree,0.5,50,4
191 | biased_FMNIST,tree,0.5,100,0
192 | biased_FMNIST,tree,0.5,100,1
193 | biased_FMNIST,tree,0.5,100,2
194 | biased_FMNIST,tree,0.5,100,3
195 | biased_FMNIST,tree,0.5,100,4
196 | biased_FMNIST,tree,2.0,10,0
197 | biased_FMNIST,tree,2.0,10,1
198 | biased_FMNIST,tree,2.0,10,2
199 | biased_FMNIST,tree,2.0,10,3
200 | biased_FMNIST,tree,2.0,10,4
201 | biased_FMNIST,tree,2.0,50,0
202 | biased_FMNIST,tree,2.0,50,1
203 | biased_FMNIST,tree,2.0,50,2
204 | biased_FMNIST,tree,2.0,50,3
205 | biased_FMNIST,tree,2.0,50,4
206 | biased_FMNIST,tree,2.0,100,0
207 | biased_FMNIST,tree,2.0,100,1
208 | biased_FMNIST,tree,2.0,100,2
209 | biased_FMNIST,tree,2.0,100,3
210 | biased_FMNIST,tree,2.0,100,4
211 | biased_FMNIST,tree,3.0,10,0
212 | biased_FMNIST,tree,3.0,10,1
213 | biased_FMNIST,tree,3.0,10,2
214 | biased_FMNIST,tree,3.0,10,3
215 | biased_FMNIST,tree,3.0,10,4
216 | biased_FMNIST,tree,3.0,50,0
217 | biased_FMNIST,tree,3.0,50,1
218 | biased_FMNIST,tree,3.0,50,2
219 | biased_FMNIST,tree,3.0,50,3
220 | biased_FMNIST,tree,3.0,50,4
221 | biased_FMNIST,tree,3.0,100,0
222 | biased_FMNIST,tree,3.0,100,1
223 | biased_FMNIST,tree,3.0,100,2
224 | biased_FMNIST,tree,3.0,100,3
225 | biased_FMNIST,tree,3.0,100,4
226 | biased_FMNIST,ring,0.5,10,0
227 | biased_FMNIST,ring,0.5,10,1
228 | biased_FMNIST,ring,0.5,10,2
229 | biased_FMNIST,ring,0.5,10,3
230 | biased_FMNIST,ring,0.5,10,4
231 | biased_FMNIST,ring,0.5,50,0
232 | biased_FMNIST,ring,0.5,50,1
233 | biased_FMNIST,ring,0.5,50,2
234 | biased_FMNIST,ring,0.5,50,3
235 | biased_FMNIST,ring,0.5,50,4
236 | biased_FMNIST,ring,0.5,100,0
237 | biased_FMNIST,ring,0.5,100,1
238 | biased_FMNIST,ring,0.5,100,2
239 | biased_FMNIST,ring,0.5,100,3
240 | biased_FMNIST,ring,0.5,100,4
241 | biased_FMNIST,ring,2.0,10,0
242 | biased_FMNIST,ring,2.0,10,1
243 | biased_FMNIST,ring,2.0,10,2
244 | biased_FMNIST,ring,2.0,10,3
245 | biased_FMNIST,ring,2.0,10,4
246 | biased_FMNIST,ring,2.0,50,0
247 | biased_FMNIST,ring,2.0,50,1
248 | biased_FMNIST,ring,2.0,50,2
249 | biased_FMNIST,ring,2.0,50,3
250 | biased_FMNIST,ring,2.0,50,4
251 | biased_FMNIST,ring,2.0,100,0
252 | biased_FMNIST,ring,2.0,100,1
253 | biased_FMNIST,ring,2.0,100,2
254 | biased_FMNIST,ring,2.0,100,3
255 | biased_FMNIST,ring,2.0,100,4
256 | biased_FMNIST,ring,3.0,10,0
257 | biased_FMNIST,ring,3.0,10,1
258 | biased_FMNIST,ring,3.0,10,2
259 | biased_FMNIST,ring,3.0,10,3
260 | biased_FMNIST,ring,3.0,10,4
261 | biased_FMNIST,ring,3.0,50,0
262 | biased_FMNIST,ring,3.0,50,1
263 | biased_FMNIST,ring,3.0,50,2
264 | biased_FMNIST,ring,3.0,50,3
265 | biased_FMNIST,ring,3.0,50,4
266 | biased_FMNIST,ring,3.0,100,0
267 | biased_FMNIST,ring,3.0,100,1
268 | biased_FMNIST,ring,3.0,100,2
269 | biased_FMNIST,ring,3.0,100,3
270 | biased_FMNIST,ring,3.0,100,4
271 | biased_FMNIST,chain,0.5,10,0
272 | biased_FMNIST,chain,0.5,10,1
273 | biased_FMNIST,chain,0.5,10,2
274 | biased_FMNIST,chain,0.5,10,3
275 | biased_FMNIST,chain,0.5,10,4
276 | biased_FMNIST,chain,0.5,50,0
277 | biased_FMNIST,chain,0.5,50,1
278 | biased_FMNIST,chain,0.5,50,2
279 | biased_FMNIST,chain,0.5,50,3
280 | biased_FMNIST,chain,0.5,50,4
281 | biased_FMNIST,chain,0.5,100,0
282 | biased_FMNIST,chain,0.5,100,1
283 | biased_FMNIST,chain,0.5,100,2
284 | biased_FMNIST,chain,0.5,100,3
285 | biased_FMNIST,chain,0.5,100,4
286 | biased_FMNIST,chain,2.0,10,0
287 | biased_FMNIST,chain,2.0,10,1
288 | biased_FMNIST,chain,2.0,10,2
289 | biased_FMNIST,chain,2.0,10,3
290 | biased_FMNIST,chain,2.0,10,4
291 | biased_FMNIST,chain,2.0,50,0
292 | biased_FMNIST,chain,2.0,50,1
293 | biased_FMNIST,chain,2.0,50,2
294 | biased_FMNIST,chain,2.0,50,3
295 | biased_FMNIST,chain,2.0,50,4
296 | biased_FMNIST,chain,2.0,100,0
297 | biased_FMNIST,chain,2.0,100,1
298 | biased_FMNIST,chain,2.0,100,2
299 | biased_FMNIST,chain,2.0,100,3
300 | biased_FMNIST,chain,2.0,100,4
301 | biased_FMNIST,chain,3.0,10,0
302 | biased_FMNIST,chain,3.0,10,1
303 | biased_FMNIST,chain,3.0,10,2
304 | biased_FMNIST,chain,3.0,10,3
305 | biased_FMNIST,chain,3.0,10,4
306 | biased_FMNIST,chain,3.0,50,0
307 | biased_FMNIST,chain,3.0,50,1
308 | biased_FMNIST,chain,3.0,50,2
309 | biased_FMNIST,chain,3.0,50,3
310 | biased_FMNIST,chain,3.0,50,4
311 | biased_FMNIST,chain,3.0,100,0
312 | biased_FMNIST,chain,3.0,100,1
313 | biased_FMNIST,chain,3.0,100,2
314 | biased_FMNIST,chain,3.0,100,3
315 | biased_FMNIST,chain,3.0,100,4
316 | biased_FMNIST,fedavg,0.5,10,0
317 | biased_FMNIST,fedavg,0.5,10,1
318 | biased_FMNIST,fedavg,0.5,10,2
319 | biased_FMNIST,fedavg,0.5,10,3
320 | biased_FMNIST,fedavg,0.5,10,4
321 | biased_FMNIST,fedavg,0.5,50,0
322 | biased_FMNIST,fedavg,0.5,50,1
323 | biased_FMNIST,fedavg,0.5,50,2
324 | biased_FMNIST,fedavg,0.5,50,3
325 | biased_FMNIST,fedavg,0.5,50,4
326 | biased_FMNIST,fedavg,0.5,100,0
327 | biased_FMNIST,fedavg,0.5,100,1
328 | biased_FMNIST,fedavg,0.5,100,2
329 | biased_FMNIST,fedavg,0.5,100,3
330 | biased_FMNIST,fedavg,0.5,100,4
331 | biased_FMNIST,fedavg,2.0,10,0
332 | biased_FMNIST,fedavg,2.0,10,1
333 | biased_FMNIST,fedavg,2.0,10,2
334 | biased_FMNIST,fedavg,2.0,10,3
335 | biased_FMNIST,fedavg,2.0,10,4
336 | biased_FMNIST,fedavg,2.0,50,0
337 | biased_FMNIST,fedavg,2.0,50,1
338 | biased_FMNIST,fedavg,2.0,50,2
339 | biased_FMNIST,fedavg,2.0,50,3
340 | biased_FMNIST,fedavg,2.0,50,4
341 | biased_FMNIST,fedavg,2.0,100,0
342 | biased_FMNIST,fedavg,2.0,100,1
343 | biased_FMNIST,fedavg,2.0,100,2
344 | biased_FMNIST,fedavg,2.0,100,3
345 | biased_FMNIST,fedavg,2.0,100,4
346 | biased_FMNIST,fedavg,3.0,10,0
347 | biased_FMNIST,fedavg,3.0,10,1
348 | biased_FMNIST,fedavg,3.0,10,2
349 | biased_FMNIST,fedavg,3.0,10,3
350 | biased_FMNIST,fedavg,3.0,10,4
351 | biased_FMNIST,fedavg,3.0,50,0
352 | biased_FMNIST,fedavg,3.0,50,1
353 | biased_FMNIST,fedavg,3.0,50,2
354 | biased_FMNIST,fedavg,3.0,50,3
355 | biased_FMNIST,fedavg,3.0,50,4
356 | biased_FMNIST,fedavg,3.0,100,0
357 | biased_FMNIST,fedavg,3.0,100,1
358 | biased_FMNIST,fedavg,3.0,100,2
359 | biased_FMNIST,fedavg,3.0,100,3
360 | biased_FMNIST,fedavg,3.0,100,4
361 | EMNIST,tree,0.5,10,0
362 | EMNIST,tree,0.5,10,1
363 | EMNIST,tree,0.5,10,2
364 | EMNIST,tree,0.5,10,3
365 | EMNIST,tree,0.5,10,4
366 | EMNIST,tree,0.5,50,0
367 | EMNIST,tree,0.5,50,1
368 | EMNIST,tree,0.5,50,2
369 | EMNIST,tree,0.5,50,3
370 | EMNIST,tree,0.5,50,4
371 | EMNIST,tree,0.5,100,0
372 | EMNIST,tree,0.5,100,1
373 | EMNIST,tree,0.5,100,2
374 | EMNIST,tree,0.5,100,3
375 | EMNIST,tree,0.5,100,4
376 | EMNIST,tree,2.0,10,0
377 | EMNIST,tree,2.0,10,1
378 | EMNIST,tree,2.0,10,2
379 | EMNIST,tree,2.0,10,3
380 | EMNIST,tree,2.0,10,4
381 | EMNIST,tree,2.0,50,0
382 | EMNIST,tree,2.0,50,1
383 | EMNIST,tree,2.0,50,2
384 | EMNIST,tree,2.0,50,3
385 | EMNIST,tree,2.0,50,4
386 | EMNIST,tree,2.0,100,0
387 | EMNIST,tree,2.0,100,1
388 | EMNIST,tree,2.0,100,2
389 | EMNIST,tree,2.0,100,3
390 | EMNIST,tree,2.0,100,4
391 | EMNIST,tree,3.0,10,0
392 | EMNIST,tree,3.0,10,1
393 | EMNIST,tree,3.0,10,2
394 | EMNIST,tree,3.0,10,3
395 | EMNIST,tree,3.0,10,4
396 | EMNIST,tree,3.0,50,0
397 | EMNIST,tree,3.0,50,1
398 | EMNIST,tree,3.0,50,2
399 | EMNIST,tree,3.0,50,3
400 | EMNIST,tree,3.0,50,4
401 | EMNIST,tree,3.0,100,0
402 | EMNIST,tree,3.0,100,1
403 | EMNIST,tree,3.0,100,2
404 | EMNIST,tree,3.0,100,3
405 | EMNIST,tree,3.0,100,4
406 | EMNIST,ring,0.5,10,0
407 | EMNIST,ring,0.5,10,1
408 | EMNIST,ring,0.5,10,2
409 | EMNIST,ring,0.5,10,3
410 | EMNIST,ring,0.5,10,4
411 | EMNIST,ring,0.5,50,0
412 | EMNIST,ring,0.5,50,1
413 | EMNIST,ring,0.5,50,2
414 | EMNIST,ring,0.5,50,3
415 | EMNIST,ring,0.5,50,4
416 | EMNIST,ring,0.5,100,0
417 | EMNIST,ring,0.5,100,1
418 | EMNIST,ring,0.5,100,2
419 | EMNIST,ring,0.5,100,3
420 | EMNIST,ring,0.5,100,4
421 | EMNIST,ring,2.0,10,0
422 | EMNIST,ring,2.0,10,1
423 | EMNIST,ring,2.0,10,2
424 | EMNIST,ring,2.0,10,3
425 | EMNIST,ring,2.0,10,4
426 | EMNIST,ring,2.0,50,0
427 | EMNIST,ring,2.0,50,1
428 | EMNIST,ring,2.0,50,2
429 | EMNIST,ring,2.0,50,3
430 | EMNIST,ring,2.0,50,4
431 | EMNIST,ring,2.0,100,0
432 | EMNIST,ring,2.0,100,1
433 | EMNIST,ring,2.0,100,2
434 | EMNIST,ring,2.0,100,3
435 | EMNIST,ring,2.0,100,4
436 | EMNIST,ring,3.0,10,0
437 | EMNIST,ring,3.0,10,1
438 | EMNIST,ring,3.0,10,2
439 | EMNIST,ring,3.0,10,3
440 | EMNIST,ring,3.0,10,4
441 | EMNIST,ring,3.0,50,0
442 | EMNIST,ring,3.0,50,1
443 | EMNIST,ring,3.0,50,2
444 | EMNIST,ring,3.0,50,3
445 | EMNIST,ring,3.0,50,4
446 | EMNIST,ring,3.0,100,0
447 | EMNIST,ring,3.0,100,1
448 | EMNIST,ring,3.0,100,2
449 | EMNIST,ring,3.0,100,3
450 | EMNIST,ring,3.0,100,4
451 | EMNIST,chain,0.5,10,0
452 | EMNIST,chain,0.5,10,1
453 | EMNIST,chain,0.5,10,2
454 | EMNIST,chain,0.5,10,3
455 | EMNIST,chain,0.5,10,4
456 | EMNIST,chain,0.5,50,0
457 | EMNIST,chain,0.5,50,1
458 | EMNIST,chain,0.5,50,2
459 | EMNIST,chain,0.5,50,3
460 | EMNIST,chain,0.5,50,4
461 | EMNIST,chain,0.5,100,0
462 | EMNIST,chain,0.5,100,1
463 | EMNIST,chain,0.5,100,2
464 | EMNIST,chain,0.5,100,3
465 | EMNIST,chain,0.5,100,4
466 | EMNIST,chain,2.0,10,0
467 | EMNIST,chain,2.0,10,1
468 | EMNIST,chain,2.0,10,2
469 | EMNIST,chain,2.0,10,3
470 | EMNIST,chain,2.0,10,4
471 | EMNIST,chain,2.0,50,0
472 | EMNIST,chain,2.0,50,1
473 | EMNIST,chain,2.0,50,2
474 | EMNIST,chain,2.0,50,3
475 | EMNIST,chain,2.0,50,4
476 | EMNIST,chain,2.0,100,0
477 | EMNIST,chain,2.0,100,1
478 | EMNIST,chain,2.0,100,2
479 | EMNIST,chain,2.0,100,3
480 | EMNIST,chain,2.0,100,4
481 | EMNIST,chain,3.0,10,0
482 | EMNIST,chain,3.0,10,1
483 | EMNIST,chain,3.0,10,2
484 | EMNIST,chain,3.0,10,3
485 | EMNIST,chain,3.0,10,4
486 | EMNIST,chain,3.0,50,0
487 | EMNIST,chain,3.0,50,1
488 | EMNIST,chain,3.0,50,2
489 | EMNIST,chain,3.0,50,3
490 | EMNIST,chain,3.0,50,4
491 | EMNIST,chain,3.0,100,0
492 | EMNIST,chain,3.0,100,1
493 | EMNIST,chain,3.0,100,2
494 | EMNIST,chain,3.0,100,3
495 | EMNIST,chain,3.0,100,4
496 | EMNIST,fedavg,0.5,10,0
497 | EMNIST,fedavg,0.5,10,1
498 | EMNIST,fedavg,0.5,10,2
499 | EMNIST,fedavg,0.5,10,3
500 | EMNIST,fedavg,0.5,10,4
501 | EMNIST,fedavg,0.5,50,0
502 | EMNIST,fedavg,0.5,50,1
503 | EMNIST,fedavg,0.5,50,2
504 | EMNIST,fedavg,0.5,50,3
505 | EMNIST,fedavg,0.5,50,4
506 | EMNIST,fedavg,0.5,100,0
507 | EMNIST,fedavg,0.5,100,1
508 | EMNIST,fedavg,0.5,100,2
509 | EMNIST,fedavg,0.5,100,3
510 | EMNIST,fedavg,0.5,100,4
511 | EMNIST,fedavg,2.0,10,0
512 | EMNIST,fedavg,2.0,10,1
513 | EMNIST,fedavg,2.0,10,2
514 | EMNIST,fedavg,2.0,10,3
515 | EMNIST,fedavg,2.0,10,4
516 | EMNIST,fedavg,2.0,50,0
517 | EMNIST,fedavg,2.0,50,1
518 | EMNIST,fedavg,2.0,50,2
519 | EMNIST,fedavg,2.0,50,3
520 | EMNIST,fedavg,2.0,50,4
521 | EMNIST,fedavg,2.0,100,0
522 | EMNIST,fedavg,2.0,100,1
523 | EMNIST,fedavg,2.0,100,2
524 | EMNIST,fedavg,2.0,100,3
525 | EMNIST,fedavg,2.0,100,4
526 | EMNIST,fedavg,3.0,10,0
527 | EMNIST,fedavg,3.0,10,1
528 | EMNIST,fedavg,3.0,10,2
529 | EMNIST,fedavg,3.0,10,3
530 | EMNIST,fedavg,3.0,10,4
531 | EMNIST,fedavg,3.0,50,0
532 | EMNIST,fedavg,3.0,50,1
533 | EMNIST,fedavg,3.0,50,2
534 | EMNIST,fedavg,3.0,50,3
535 | EMNIST,fedavg,3.0,50,4
536 | EMNIST,fedavg,3.0,100,0
537 | EMNIST,fedavg,3.0,100,1
538 | EMNIST,fedavg,3.0,100,2
539 | EMNIST,fedavg,3.0,100,3
540 | EMNIST,fedavg,3.0,100,4
541 |
--------------------------------------------------------------------------------
/code/run_non_private.in:
--------------------------------------------------------------------------------
1 | MNIST,tree,0,16,10,0
2 | MNIST,tree,0,16,10,1
3 | MNIST,tree,0,16,10,2
4 | MNIST,tree,0,16,10,3
5 | MNIST,tree,0,16,10,4
6 | MNIST,tree,0,16,50,0
7 | MNIST,tree,0,16,50,1
8 | MNIST,tree,0,16,50,2
9 | MNIST,tree,0,16,50,3
10 | MNIST,tree,0,16,50,4
11 | MNIST,tree,0,16,100,0
12 | MNIST,tree,0,16,100,1
13 | MNIST,tree,0,16,100,2
14 | MNIST,tree,0,16,100,3
15 | MNIST,tree,0,16,100,4
16 | MNIST,tree,0,32,10,0
17 | MNIST,tree,0,32,10,1
18 | MNIST,tree,0,32,10,2
19 | MNIST,tree,0,32,10,3
20 | MNIST,tree,0,32,10,4
21 | MNIST,tree,0,32,50,0
22 | MNIST,tree,0,32,50,1
23 | MNIST,tree,0,32,50,2
24 | MNIST,tree,0,32,50,3
25 | MNIST,tree,0,32,50,4
26 | MNIST,tree,0,32,100,0
27 | MNIST,tree,0,32,100,1
28 | MNIST,tree,0,32,100,2
29 | MNIST,tree,0,32,100,3
30 | MNIST,tree,0,32,100,4
31 | MNIST,tree,0,64,10,0
32 | MNIST,tree,0,64,10,1
33 | MNIST,tree,0,64,10,2
34 | MNIST,tree,0,64,10,3
35 | MNIST,tree,0,64,10,4
36 | MNIST,tree,0,64,50,0
37 | MNIST,tree,0,64,50,1
38 | MNIST,tree,0,64,50,2
39 | MNIST,tree,0,64,50,3
40 | MNIST,tree,0,64,50,4
41 | MNIST,tree,0,64,100,0
42 | MNIST,tree,0,64,100,1
43 | MNIST,tree,0,64,100,2
44 | MNIST,tree,0,64,100,3
45 | MNIST,tree,0,64,100,4
46 | MNIST,tree,1,16,10,0
47 | MNIST,tree,1,16,10,1
48 | MNIST,tree,1,16,10,2
49 | MNIST,tree,1,16,10,3
50 | MNIST,tree,1,16,10,4
51 | MNIST,tree,1,16,50,0
52 | MNIST,tree,1,16,50,1
53 | MNIST,tree,1,16,50,2
54 | MNIST,tree,1,16,50,3
55 | MNIST,tree,1,16,50,4
56 | MNIST,tree,1,16,100,0
57 | MNIST,tree,1,16,100,1
58 | MNIST,tree,1,16,100,2
59 | MNIST,tree,1,16,100,3
60 | MNIST,tree,1,16,100,4
61 | MNIST,tree,1,32,10,0
62 | MNIST,tree,1,32,10,1
63 | MNIST,tree,1,32,10,2
64 | MNIST,tree,1,32,10,3
65 | MNIST,tree,1,32,10,4
66 | MNIST,tree,1,32,50,0
67 | MNIST,tree,1,32,50,1
68 | MNIST,tree,1,32,50,2
69 | MNIST,tree,1,32,50,3
70 | MNIST,tree,1,32,50,4
71 | MNIST,tree,1,32,100,0
72 | MNIST,tree,1,32,100,1
73 | MNIST,tree,1,32,100,2
74 | MNIST,tree,1,32,100,3
75 | MNIST,tree,1,32,100,4
76 | MNIST,tree,1,64,10,0
77 | MNIST,tree,1,64,10,1
78 | MNIST,tree,1,64,10,2
79 | MNIST,tree,1,64,10,3
80 | MNIST,tree,1,64,10,4
81 | MNIST,tree,1,64,50,0
82 | MNIST,tree,1,64,50,1
83 | MNIST,tree,1,64,50,2
84 | MNIST,tree,1,64,50,3
85 | MNIST,tree,1,64,50,4
86 | MNIST,tree,1,64,100,0
87 | MNIST,tree,1,64,100,1
88 | MNIST,tree,1,64,100,2
89 | MNIST,tree,1,64,100,3
90 | MNIST,tree,1,64,100,4
91 | MNIST,ring,0,16,10,0
92 | MNIST,ring,0,16,10,1
93 | MNIST,ring,0,16,10,2
94 | MNIST,ring,0,16,10,3
95 | MNIST,ring,0,16,10,4
96 | MNIST,ring,0,16,50,0
97 | MNIST,ring,0,16,50,1
98 | MNIST,ring,0,16,50,2
99 | MNIST,ring,0,16,50,3
100 | MNIST,ring,0,16,50,4
101 | MNIST,ring,0,16,100,0
102 | MNIST,ring,0,16,100,1
103 | MNIST,ring,0,16,100,2
104 | MNIST,ring,0,16,100,3
105 | MNIST,ring,0,16,100,4
106 | MNIST,ring,0,32,10,0
107 | MNIST,ring,0,32,10,1
108 | MNIST,ring,0,32,10,2
109 | MNIST,ring,0,32,10,3
110 | MNIST,ring,0,32,10,4
111 | MNIST,ring,0,32,50,0
112 | MNIST,ring,0,32,50,1
113 | MNIST,ring,0,32,50,2
114 | MNIST,ring,0,32,50,3
115 | MNIST,ring,0,32,50,4
116 | MNIST,ring,0,32,100,0
117 | MNIST,ring,0,32,100,1
118 | MNIST,ring,0,32,100,2
119 | MNIST,ring,0,32,100,3
120 | MNIST,ring,0,32,100,4
121 | MNIST,ring,0,64,10,0
122 | MNIST,ring,0,64,10,1
123 | MNIST,ring,0,64,10,2
124 | MNIST,ring,0,64,10,3
125 | MNIST,ring,0,64,10,4
126 | MNIST,ring,0,64,50,0
127 | MNIST,ring,0,64,50,1
128 | MNIST,ring,0,64,50,2
129 | MNIST,ring,0,64,50,3
130 | MNIST,ring,0,64,50,4
131 | MNIST,ring,0,64,100,0
132 | MNIST,ring,0,64,100,1
133 | MNIST,ring,0,64,100,2
134 | MNIST,ring,0,64,100,3
135 | MNIST,ring,0,64,100,4
136 | MNIST,ring,1,16,10,0
137 | MNIST,ring,1,16,10,1
138 | MNIST,ring,1,16,10,2
139 | MNIST,ring,1,16,10,3
140 | MNIST,ring,1,16,10,4
141 | MNIST,ring,1,16,50,0
142 | MNIST,ring,1,16,50,1
143 | MNIST,ring,1,16,50,2
144 | MNIST,ring,1,16,50,3
145 | MNIST,ring,1,16,50,4
146 | MNIST,ring,1,16,100,0
147 | MNIST,ring,1,16,100,1
148 | MNIST,ring,1,16,100,2
149 | MNIST,ring,1,16,100,3
150 | MNIST,ring,1,16,100,4
151 | MNIST,ring,1,32,10,0
152 | MNIST,ring,1,32,10,1
153 | MNIST,ring,1,32,10,2
154 | MNIST,ring,1,32,10,3
155 | MNIST,ring,1,32,10,4
156 | MNIST,ring,1,32,50,0
157 | MNIST,ring,1,32,50,1
158 | MNIST,ring,1,32,50,2
159 | MNIST,ring,1,32,50,3
160 | MNIST,ring,1,32,50,4
161 | MNIST,ring,1,32,100,0
162 | MNIST,ring,1,32,100,1
163 | MNIST,ring,1,32,100,2
164 | MNIST,ring,1,32,100,3
165 | MNIST,ring,1,32,100,4
166 | MNIST,ring,1,64,10,0
167 | MNIST,ring,1,64,10,1
168 | MNIST,ring,1,64,10,2
169 | MNIST,ring,1,64,10,3
170 | MNIST,ring,1,64,10,4
171 | MNIST,ring,1,64,50,0
172 | MNIST,ring,1,64,50,1
173 | MNIST,ring,1,64,50,2
174 | MNIST,ring,1,64,50,3
175 | MNIST,ring,1,64,50,4
176 | MNIST,ring,1,64,100,0
177 | MNIST,ring,1,64,100,1
178 | MNIST,ring,1,64,100,2
179 | MNIST,ring,1,64,100,3
180 | MNIST,ring,1,64,100,4
181 | MNIST,chain,0,16,10,0
182 | MNIST,chain,0,16,10,1
183 | MNIST,chain,0,16,10,2
184 | MNIST,chain,0,16,10,3
185 | MNIST,chain,0,16,10,4
186 | MNIST,chain,0,16,50,0
187 | MNIST,chain,0,16,50,1
188 | MNIST,chain,0,16,50,2
189 | MNIST,chain,0,16,50,3
190 | MNIST,chain,0,16,50,4
191 | MNIST,chain,0,16,100,0
192 | MNIST,chain,0,16,100,1
193 | MNIST,chain,0,16,100,2
194 | MNIST,chain,0,16,100,3
195 | MNIST,chain,0,16,100,4
196 | MNIST,chain,0,32,10,0
197 | MNIST,chain,0,32,10,1
198 | MNIST,chain,0,32,10,2
199 | MNIST,chain,0,32,10,3
200 | MNIST,chain,0,32,10,4
201 | MNIST,chain,0,32,50,0
202 | MNIST,chain,0,32,50,1
203 | MNIST,chain,0,32,50,2
204 | MNIST,chain,0,32,50,3
205 | MNIST,chain,0,32,50,4
206 | MNIST,chain,0,32,100,0
207 | MNIST,chain,0,32,100,1
208 | MNIST,chain,0,32,100,2
209 | MNIST,chain,0,32,100,3
210 | MNIST,chain,0,32,100,4
211 | MNIST,chain,0,64,10,0
212 | MNIST,chain,0,64,10,1
213 | MNIST,chain,0,64,10,2
214 | MNIST,chain,0,64,10,3
215 | MNIST,chain,0,64,10,4
216 | MNIST,chain,0,64,50,0
217 | MNIST,chain,0,64,50,1
218 | MNIST,chain,0,64,50,2
219 | MNIST,chain,0,64,50,3
220 | MNIST,chain,0,64,50,4
221 | MNIST,chain,0,64,100,0
222 | MNIST,chain,0,64,100,1
223 | MNIST,chain,0,64,100,2
224 | MNIST,chain,0,64,100,3
225 | MNIST,chain,0,64,100,4
226 | MNIST,chain,1,16,10,0
227 | MNIST,chain,1,16,10,1
228 | MNIST,chain,1,16,10,2
229 | MNIST,chain,1,16,10,3
230 | MNIST,chain,1,16,10,4
231 | MNIST,chain,1,16,50,0
232 | MNIST,chain,1,16,50,1
233 | MNIST,chain,1,16,50,2
234 | MNIST,chain,1,16,50,3
235 | MNIST,chain,1,16,50,4
236 | MNIST,chain,1,16,100,0
237 | MNIST,chain,1,16,100,1
238 | MNIST,chain,1,16,100,2
239 | MNIST,chain,1,16,100,3
240 | MNIST,chain,1,16,100,4
241 | MNIST,chain,1,32,10,0
242 | MNIST,chain,1,32,10,1
243 | MNIST,chain,1,32,10,2
244 | MNIST,chain,1,32,10,3
245 | MNIST,chain,1,32,10,4
246 | MNIST,chain,1,32,50,0
247 | MNIST,chain,1,32,50,1
248 | MNIST,chain,1,32,50,2
249 | MNIST,chain,1,32,50,3
250 | MNIST,chain,1,32,50,4
251 | MNIST,chain,1,32,100,0
252 | MNIST,chain,1,32,100,1
253 | MNIST,chain,1,32,100,2
254 | MNIST,chain,1,32,100,3
255 | MNIST,chain,1,32,100,4
256 | MNIST,chain,1,64,10,0
257 | MNIST,chain,1,64,10,1
258 | MNIST,chain,1,64,10,2
259 | MNIST,chain,1,64,10,3
260 | MNIST,chain,1,64,10,4
261 | MNIST,chain,1,64,50,0
262 | MNIST,chain,1,64,50,1
263 | MNIST,chain,1,64,50,2
264 | MNIST,chain,1,64,50,3
265 | MNIST,chain,1,64,50,4
266 | MNIST,chain,1,64,100,0
267 | MNIST,chain,1,64,100,1
268 | MNIST,chain,1,64,100,2
269 | MNIST,chain,1,64,100,3
270 | MNIST,chain,1,64,100,4
271 | MNIST,fedavg,0,16,10,0
272 | MNIST,fedavg,0,16,10,1
273 | MNIST,fedavg,0,16,10,2
274 | MNIST,fedavg,0,16,10,3
275 | MNIST,fedavg,0,16,10,4
276 | MNIST,fedavg,0,16,50,0
277 | MNIST,fedavg,0,16,50,1
278 | MNIST,fedavg,0,16,50,2
279 | MNIST,fedavg,0,16,50,3
280 | MNIST,fedavg,0,16,50,4
281 | MNIST,fedavg,0,16,100,0
282 | MNIST,fedavg,0,16,100,1
283 | MNIST,fedavg,0,16,100,2
284 | MNIST,fedavg,0,16,100,3
285 | MNIST,fedavg,0,16,100,4
286 | MNIST,fedavg,0,32,10,0
287 | MNIST,fedavg,0,32,10,1
288 | MNIST,fedavg,0,32,10,2
289 | MNIST,fedavg,0,32,10,3
290 | MNIST,fedavg,0,32,10,4
291 | MNIST,fedavg,0,32,50,0
292 | MNIST,fedavg,0,32,50,1
293 | MNIST,fedavg,0,32,50,2
294 | MNIST,fedavg,0,32,50,3
295 | MNIST,fedavg,0,32,50,4
296 | MNIST,fedavg,0,32,100,0
297 | MNIST,fedavg,0,32,100,1
298 | MNIST,fedavg,0,32,100,2
299 | MNIST,fedavg,0,32,100,3
300 | MNIST,fedavg,0,32,100,4
301 | MNIST,fedavg,0,64,10,0
302 | MNIST,fedavg,0,64,10,1
303 | MNIST,fedavg,0,64,10,2
304 | MNIST,fedavg,0,64,10,3
305 | MNIST,fedavg,0,64,10,4
306 | MNIST,fedavg,0,64,50,0
307 | MNIST,fedavg,0,64,50,1
308 | MNIST,fedavg,0,64,50,2
309 | MNIST,fedavg,0,64,50,3
310 | MNIST,fedavg,0,64,50,4
311 | MNIST,fedavg,0,64,100,0
312 | MNIST,fedavg,0,64,100,1
313 | MNIST,fedavg,0,64,100,2
314 | MNIST,fedavg,0,64,100,3
315 | MNIST,fedavg,0,64,100,4
316 | MNIST,fedavg,1,16,10,0
317 | MNIST,fedavg,1,16,10,1
318 | MNIST,fedavg,1,16,10,2
319 | MNIST,fedavg,1,16,10,3
320 | MNIST,fedavg,1,16,10,4
321 | MNIST,fedavg,1,16,50,0
322 | MNIST,fedavg,1,16,50,1
323 | MNIST,fedavg,1,16,50,2
324 | MNIST,fedavg,1,16,50,3
325 | MNIST,fedavg,1,16,50,4
326 | MNIST,fedavg,1,16,100,0
327 | MNIST,fedavg,1,16,100,1
328 | MNIST,fedavg,1,16,100,2
329 | MNIST,fedavg,1,16,100,3
330 | MNIST,fedavg,1,16,100,4
331 | MNIST,fedavg,1,32,10,0
332 | MNIST,fedavg,1,32,10,1
333 | MNIST,fedavg,1,32,10,2
334 | MNIST,fedavg,1,32,10,3
335 | MNIST,fedavg,1,32,10,4
336 | MNIST,fedavg,1,32,50,0
337 | MNIST,fedavg,1,32,50,1
338 | MNIST,fedavg,1,32,50,2
339 | MNIST,fedavg,1,32,50,3
340 | MNIST,fedavg,1,32,50,4
341 | MNIST,fedavg,1,32,100,0
342 | MNIST,fedavg,1,32,100,1
343 | MNIST,fedavg,1,32,100,2
344 | MNIST,fedavg,1,32,100,3
345 | MNIST,fedavg,1,32,100,4
346 | MNIST,fedavg,1,64,10,0
347 | MNIST,fedavg,1,64,10,1
348 | MNIST,fedavg,1,64,10,2
349 | MNIST,fedavg,1,64,10,3
350 | MNIST,fedavg,1,64,10,4
351 | MNIST,fedavg,1,64,50,0
352 | MNIST,fedavg,1,64,50,1
353 | MNIST,fedavg,1,64,50,2
354 | MNIST,fedavg,1,64,50,3
355 | MNIST,fedavg,1,64,50,4
356 | MNIST,fedavg,1,64,100,0
357 | MNIST,fedavg,1,64,100,1
358 | MNIST,fedavg,1,64,100,2
359 | MNIST,fedavg,1,64,100,3
360 | MNIST,fedavg,1,64,100,4
361 | FMNIST,tree,0,16,10,0
362 | FMNIST,tree,0,16,10,1
363 | FMNIST,tree,0,16,10,2
364 | FMNIST,tree,0,16,10,3
365 | FMNIST,tree,0,16,10,4
366 | FMNIST,tree,0,16,50,0
367 | FMNIST,tree,0,16,50,1
368 | FMNIST,tree,0,16,50,2
369 | FMNIST,tree,0,16,50,3
370 | FMNIST,tree,0,16,50,4
371 | FMNIST,tree,0,16,100,0
372 | FMNIST,tree,0,16,100,1
373 | FMNIST,tree,0,16,100,2
374 | FMNIST,tree,0,16,100,3
375 | FMNIST,tree,0,16,100,4
376 | FMNIST,tree,0,32,10,0
377 | FMNIST,tree,0,32,10,1
378 | FMNIST,tree,0,32,10,2
379 | FMNIST,tree,0,32,10,3
380 | FMNIST,tree,0,32,10,4
381 | FMNIST,tree,0,32,50,0
382 | FMNIST,tree,0,32,50,1
383 | FMNIST,tree,0,32,50,2
384 | FMNIST,tree,0,32,50,3
385 | FMNIST,tree,0,32,50,4
386 | FMNIST,tree,0,32,100,0
387 | FMNIST,tree,0,32,100,1
388 | FMNIST,tree,0,32,100,2
389 | FMNIST,tree,0,32,100,3
390 | FMNIST,tree,0,32,100,4
391 | FMNIST,tree,0,64,10,0
392 | FMNIST,tree,0,64,10,1
393 | FMNIST,tree,0,64,10,2
394 | FMNIST,tree,0,64,10,3
395 | FMNIST,tree,0,64,10,4
396 | FMNIST,tree,0,64,50,0
397 | FMNIST,tree,0,64,50,1
398 | FMNIST,tree,0,64,50,2
399 | FMNIST,tree,0,64,50,3
400 | FMNIST,tree,0,64,50,4
401 | FMNIST,tree,0,64,100,0
402 | FMNIST,tree,0,64,100,1
403 | FMNIST,tree,0,64,100,2
404 | FMNIST,tree,0,64,100,3
405 | FMNIST,tree,0,64,100,4
406 | FMNIST,tree,1,16,10,0
407 | FMNIST,tree,1,16,10,1
408 | FMNIST,tree,1,16,10,2
409 | FMNIST,tree,1,16,10,3
410 | FMNIST,tree,1,16,10,4
411 | FMNIST,tree,1,16,50,0
412 | FMNIST,tree,1,16,50,1
413 | FMNIST,tree,1,16,50,2
414 | FMNIST,tree,1,16,50,3
415 | FMNIST,tree,1,16,50,4
416 | FMNIST,tree,1,16,100,0
417 | FMNIST,tree,1,16,100,1
418 | FMNIST,tree,1,16,100,2
419 | FMNIST,tree,1,16,100,3
420 | FMNIST,tree,1,16,100,4
421 | FMNIST,tree,1,32,10,0
422 | FMNIST,tree,1,32,10,1
423 | FMNIST,tree,1,32,10,2
424 | FMNIST,tree,1,32,10,3
425 | FMNIST,tree,1,32,10,4
426 | FMNIST,tree,1,32,50,0
427 | FMNIST,tree,1,32,50,1
428 | FMNIST,tree,1,32,50,2
429 | FMNIST,tree,1,32,50,3
430 | FMNIST,tree,1,32,50,4
431 | FMNIST,tree,1,32,100,0
432 | FMNIST,tree,1,32,100,1
433 | FMNIST,tree,1,32,100,2
434 | FMNIST,tree,1,32,100,3
435 | FMNIST,tree,1,32,100,4
436 | FMNIST,tree,1,64,10,0
437 | FMNIST,tree,1,64,10,1
438 | FMNIST,tree,1,64,10,2
439 | FMNIST,tree,1,64,10,3
440 | FMNIST,tree,1,64,10,4
441 | FMNIST,tree,1,64,50,0
442 | FMNIST,tree,1,64,50,1
443 | FMNIST,tree,1,64,50,2
444 | FMNIST,tree,1,64,50,3
445 | FMNIST,tree,1,64,50,4
446 | FMNIST,tree,1,64,100,0
447 | FMNIST,tree,1,64,100,1
448 | FMNIST,tree,1,64,100,2
449 | FMNIST,tree,1,64,100,3
450 | FMNIST,tree,1,64,100,4
451 | FMNIST,ring,0,16,10,0
452 | FMNIST,ring,0,16,10,1
453 | FMNIST,ring,0,16,10,2
454 | FMNIST,ring,0,16,10,3
455 | FMNIST,ring,0,16,10,4
456 | FMNIST,ring,0,16,50,0
457 | FMNIST,ring,0,16,50,1
458 | FMNIST,ring,0,16,50,2
459 | FMNIST,ring,0,16,50,3
460 | FMNIST,ring,0,16,50,4
461 | FMNIST,ring,0,16,100,0
462 | FMNIST,ring,0,16,100,1
463 | FMNIST,ring,0,16,100,2
464 | FMNIST,ring,0,16,100,3
465 | FMNIST,ring,0,16,100,4
466 | FMNIST,ring,0,32,10,0
467 | FMNIST,ring,0,32,10,1
468 | FMNIST,ring,0,32,10,2
469 | FMNIST,ring,0,32,10,3
470 | FMNIST,ring,0,32,10,4
471 | FMNIST,ring,0,32,50,0
472 | FMNIST,ring,0,32,50,1
473 | FMNIST,ring,0,32,50,2
474 | FMNIST,ring,0,32,50,3
475 | FMNIST,ring,0,32,50,4
476 | FMNIST,ring,0,32,100,0
477 | FMNIST,ring,0,32,100,1
478 | FMNIST,ring,0,32,100,2
479 | FMNIST,ring,0,32,100,3
480 | FMNIST,ring,0,32,100,4
481 | FMNIST,ring,0,64,10,0
482 | FMNIST,ring,0,64,10,1
483 | FMNIST,ring,0,64,10,2
484 | FMNIST,ring,0,64,10,3
485 | FMNIST,ring,0,64,10,4
486 | FMNIST,ring,0,64,50,0
487 | FMNIST,ring,0,64,50,1
488 | FMNIST,ring,0,64,50,2
489 | FMNIST,ring,0,64,50,3
490 | FMNIST,ring,0,64,50,4
491 | FMNIST,ring,0,64,100,0
492 | FMNIST,ring,0,64,100,1
493 | FMNIST,ring,0,64,100,2
494 | FMNIST,ring,0,64,100,3
495 | FMNIST,ring,0,64,100,4
496 | FMNIST,ring,1,16,10,0
497 | FMNIST,ring,1,16,10,1
498 | FMNIST,ring,1,16,10,2
499 | FMNIST,ring,1,16,10,3
500 | FMNIST,ring,1,16,10,4
501 | FMNIST,ring,1,16,50,0
502 | FMNIST,ring,1,16,50,1
503 | FMNIST,ring,1,16,50,2
504 | FMNIST,ring,1,16,50,3
505 | FMNIST,ring,1,16,50,4
506 | FMNIST,ring,1,16,100,0
507 | FMNIST,ring,1,16,100,1
508 | FMNIST,ring,1,16,100,2
509 | FMNIST,ring,1,16,100,3
510 | FMNIST,ring,1,16,100,4
511 | FMNIST,ring,1,32,10,0
512 | FMNIST,ring,1,32,10,1
513 | FMNIST,ring,1,32,10,2
514 | FMNIST,ring,1,32,10,3
515 | FMNIST,ring,1,32,10,4
516 | FMNIST,ring,1,32,50,0
517 | FMNIST,ring,1,32,50,1
518 | FMNIST,ring,1,32,50,2
519 | FMNIST,ring,1,32,50,3
520 | FMNIST,ring,1,32,50,4
521 | FMNIST,ring,1,32,100,0
522 | FMNIST,ring,1,32,100,1
523 | FMNIST,ring,1,32,100,2
524 | FMNIST,ring,1,32,100,3
525 | FMNIST,ring,1,32,100,4
526 | FMNIST,ring,1,64,10,0
527 | FMNIST,ring,1,64,10,1
528 | FMNIST,ring,1,64,10,2
529 | FMNIST,ring,1,64,10,3
530 | FMNIST,ring,1,64,10,4
531 | FMNIST,ring,1,64,50,0
532 | FMNIST,ring,1,64,50,1
533 | FMNIST,ring,1,64,50,2
534 | FMNIST,ring,1,64,50,3
535 | FMNIST,ring,1,64,50,4
536 | FMNIST,ring,1,64,100,0
537 | FMNIST,ring,1,64,100,1
538 | FMNIST,ring,1,64,100,2
539 | FMNIST,ring,1,64,100,3
540 | FMNIST,ring,1,64,100,4
541 | FMNIST,chain,0,16,10,0
542 | FMNIST,chain,0,16,10,1
543 | FMNIST,chain,0,16,10,2
544 | FMNIST,chain,0,16,10,3
545 | FMNIST,chain,0,16,10,4
546 | FMNIST,chain,0,16,50,0
547 | FMNIST,chain,0,16,50,1
548 | FMNIST,chain,0,16,50,2
549 | FMNIST,chain,0,16,50,3
550 | FMNIST,chain,0,16,50,4
551 | FMNIST,chain,0,16,100,0
552 | FMNIST,chain,0,16,100,1
553 | FMNIST,chain,0,16,100,2
554 | FMNIST,chain,0,16,100,3
555 | FMNIST,chain,0,16,100,4
556 | FMNIST,chain,0,32,10,0
557 | FMNIST,chain,0,32,10,1
558 | FMNIST,chain,0,32,10,2
559 | FMNIST,chain,0,32,10,3
560 | FMNIST,chain,0,32,10,4
561 | FMNIST,chain,0,32,50,0
562 | FMNIST,chain,0,32,50,1
563 | FMNIST,chain,0,32,50,2
564 | FMNIST,chain,0,32,50,3
565 | FMNIST,chain,0,32,50,4
566 | FMNIST,chain,0,32,100,0
567 | FMNIST,chain,0,32,100,1
568 | FMNIST,chain,0,32,100,2
569 | FMNIST,chain,0,32,100,3
570 | FMNIST,chain,0,32,100,4
571 | FMNIST,chain,0,64,10,0
572 | FMNIST,chain,0,64,10,1
573 | FMNIST,chain,0,64,10,2
574 | FMNIST,chain,0,64,10,3
575 | FMNIST,chain,0,64,10,4
576 | FMNIST,chain,0,64,50,0
577 | FMNIST,chain,0,64,50,1
578 | FMNIST,chain,0,64,50,2
579 | FMNIST,chain,0,64,50,3
580 | FMNIST,chain,0,64,50,4
581 | FMNIST,chain,0,64,100,0
582 | FMNIST,chain,0,64,100,1
583 | FMNIST,chain,0,64,100,2
584 | FMNIST,chain,0,64,100,3
585 | FMNIST,chain,0,64,100,4
586 | FMNIST,chain,1,16,10,0
587 | FMNIST,chain,1,16,10,1
588 | FMNIST,chain,1,16,10,2
589 | FMNIST,chain,1,16,10,3
590 | FMNIST,chain,1,16,10,4
591 | FMNIST,chain,1,16,50,0
592 | FMNIST,chain,1,16,50,1
593 | FMNIST,chain,1,16,50,2
594 | FMNIST,chain,1,16,50,3
595 | FMNIST,chain,1,16,50,4
596 | FMNIST,chain,1,16,100,0
597 | FMNIST,chain,1,16,100,1
598 | FMNIST,chain,1,16,100,2
599 | FMNIST,chain,1,16,100,3
600 | FMNIST,chain,1,16,100,4
601 | FMNIST,chain,1,32,10,0
602 | FMNIST,chain,1,32,10,1
603 | FMNIST,chain,1,32,10,2
604 | FMNIST,chain,1,32,10,3
605 | FMNIST,chain,1,32,10,4
606 | FMNIST,chain,1,32,50,0
607 | FMNIST,chain,1,32,50,1
608 | FMNIST,chain,1,32,50,2
609 | FMNIST,chain,1,32,50,3
610 | FMNIST,chain,1,32,50,4
611 | FMNIST,chain,1,32,100,0
612 | FMNIST,chain,1,32,100,1
613 | FMNIST,chain,1,32,100,2
614 | FMNIST,chain,1,32,100,3
615 | FMNIST,chain,1,32,100,4
616 | FMNIST,chain,1,64,10,0
617 | FMNIST,chain,1,64,10,1
618 | FMNIST,chain,1,64,10,2
619 | FMNIST,chain,1,64,10,3
620 | FMNIST,chain,1,64,10,4
621 | FMNIST,chain,1,64,50,0
622 | FMNIST,chain,1,64,50,1
623 | FMNIST,chain,1,64,50,2
624 | FMNIST,chain,1,64,50,3
625 | FMNIST,chain,1,64,50,4
626 | FMNIST,chain,1,64,100,0
627 | FMNIST,chain,1,64,100,1
628 | FMNIST,chain,1,64,100,2
629 | FMNIST,chain,1,64,100,3
630 | FMNIST,chain,1,64,100,4
631 | FMNIST,fedavg,0,16,10,0
632 | FMNIST,fedavg,0,16,10,1
633 | FMNIST,fedavg,0,16,10,2
634 | FMNIST,fedavg,0,16,10,3
635 | FMNIST,fedavg,0,16,10,4
636 | FMNIST,fedavg,0,16,50,0
637 | FMNIST,fedavg,0,16,50,1
638 | FMNIST,fedavg,0,16,50,2
639 | FMNIST,fedavg,0,16,50,3
640 | FMNIST,fedavg,0,16,50,4
641 | FMNIST,fedavg,0,16,100,0
642 | FMNIST,fedavg,0,16,100,1
643 | FMNIST,fedavg,0,16,100,2
644 | FMNIST,fedavg,0,16,100,3
645 | FMNIST,fedavg,0,16,100,4
646 | FMNIST,fedavg,0,32,10,0
647 | FMNIST,fedavg,0,32,10,1
648 | FMNIST,fedavg,0,32,10,2
649 | FMNIST,fedavg,0,32,10,3
650 | FMNIST,fedavg,0,32,10,4
651 | FMNIST,fedavg,0,32,50,0
652 | FMNIST,fedavg,0,32,50,1
653 | FMNIST,fedavg,0,32,50,2
654 | FMNIST,fedavg,0,32,50,3
655 | FMNIST,fedavg,0,32,50,4
656 | FMNIST,fedavg,0,32,100,0
657 | FMNIST,fedavg,0,32,100,1
658 | FMNIST,fedavg,0,32,100,2
659 | FMNIST,fedavg,0,32,100,3
660 | FMNIST,fedavg,0,32,100,4
661 | FMNIST,fedavg,0,64,10,0
662 | FMNIST,fedavg,0,64,10,1
663 | FMNIST,fedavg,0,64,10,2
664 | FMNIST,fedavg,0,64,10,3
665 | FMNIST,fedavg,0,64,10,4
666 | FMNIST,fedavg,0,64,50,0
667 | FMNIST,fedavg,0,64,50,1
668 | FMNIST,fedavg,0,64,50,2
669 | FMNIST,fedavg,0,64,50,3
670 | FMNIST,fedavg,0,64,50,4
671 | FMNIST,fedavg,0,64,100,0
672 | FMNIST,fedavg,0,64,100,1
673 | FMNIST,fedavg,0,64,100,2
674 | FMNIST,fedavg,0,64,100,3
675 | FMNIST,fedavg,0,64,100,4
676 | FMNIST,fedavg,1,16,10,0
677 | FMNIST,fedavg,1,16,10,1
678 | FMNIST,fedavg,1,16,10,2
679 | FMNIST,fedavg,1,16,10,3
680 | FMNIST,fedavg,1,16,10,4
681 | FMNIST,fedavg,1,16,50,0
682 | FMNIST,fedavg,1,16,50,1
683 | FMNIST,fedavg,1,16,50,2
684 | FMNIST,fedavg,1,16,50,3
685 | FMNIST,fedavg,1,16,50,4
686 | FMNIST,fedavg,1,16,100,0
687 | FMNIST,fedavg,1,16,100,1
688 | FMNIST,fedavg,1,16,100,2
689 | FMNIST,fedavg,1,16,100,3
690 | FMNIST,fedavg,1,16,100,4
691 | FMNIST,fedavg,1,32,10,0
692 | FMNIST,fedavg,1,32,10,1
693 | FMNIST,fedavg,1,32,10,2
694 | FMNIST,fedavg,1,32,10,3
695 | FMNIST,fedavg,1,32,10,4
696 | FMNIST,fedavg,1,32,50,0
697 | FMNIST,fedavg,1,32,50,1
698 | FMNIST,fedavg,1,32,50,2
699 | FMNIST,fedavg,1,32,50,3
700 | FMNIST,fedavg,1,32,50,4
701 | FMNIST,fedavg,1,32,100,0
702 | FMNIST,fedavg,1,32,100,1
703 | FMNIST,fedavg,1,32,100,2
704 | FMNIST,fedavg,1,32,100,3
705 | FMNIST,fedavg,1,32,100,4
706 | FMNIST,fedavg,1,64,10,0
707 | FMNIST,fedavg,1,64,10,1
708 | FMNIST,fedavg,1,64,10,2
709 | FMNIST,fedavg,1,64,10,3
710 | FMNIST,fedavg,1,64,10,4
711 | FMNIST,fedavg,1,64,50,0
712 | FMNIST,fedavg,1,64,50,1
713 | FMNIST,fedavg,1,64,50,2
714 | FMNIST,fedavg,1,64,50,3
715 | FMNIST,fedavg,1,64,50,4
716 | FMNIST,fedavg,1,64,100,0
717 | FMNIST,fedavg,1,64,100,1
718 | FMNIST,fedavg,1,64,100,2
719 | FMNIST,fedavg,1,64,100,3
720 | FMNIST,fedavg,1,64,100,4
721 |
--------------------------------------------------------------------------------
/code/run_exp/covid_utils.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from keras.preprocessing.image import ImageDataGenerator
3 |
4 | sys.path.insert(1, '/content/pytorch-dp/')
5 | from torchdp import PrivacyEngine
6 | from torchdp import PerSampleGradientClipper
7 | import torch
8 | import random
9 | import torch
10 | import torch.nn as nn
11 | import torch.nn.functional as F
12 | import torch.optim as optim
13 | import torchvision
14 | from torchvision import datasets, transforms
15 | import csv
16 | from pprint import pprint
17 |
18 | import torch.nn as nn
19 |
20 | import torch
21 | import torch.nn as nn
22 | import torch.nn.functional as F
23 | import torch.optim as optim
24 | from torchvision import datasets, transforms
25 | import csv
26 | from pprint import pprint
27 |
28 |
29 | class Net(nn.Module):
30 | def __init__(self):
31 | super(Net, self).__init__()
32 | self.conv1 = nn.Conv2d(1, 32, 3, 1)
33 | self.conv2 = nn.Conv2d(32, 64, 3, 1)
34 | self.dropout1 = nn.Dropout2d(0.25)
35 | self.dropout2 = nn.Dropout2d(0.5)
36 | self.fc1 = nn.Linear(9216, 128)
37 | self.fc2 = nn.Linear(128, 10)
38 |
39 | def forward(self, x):
40 | x = self.conv1(x)
41 | x = F.relu(x)
42 | x = self.conv2(x)
43 | x = F.relu(x)
44 | x = F.max_pool2d(x, 2)
45 | x = self.dropout1(x)
46 | x = torch.flatten(x, 1)
47 | x = self.fc1(x)
48 | x = F.relu(x)
49 | x = self.dropout2(x)
50 | x = self.fc2(x)
51 | output = F.log_softmax(x, dim=1)
52 | return output
53 |
54 |
55 | def get_lr(optimizer):
56 | for param_group in optimizer.param_groups:
57 | return param_group['lr']
58 |
59 |
60 | def copy_model(model1, model2):
61 | """
62 | copy model1 to model2
63 | """
64 | params1 = model1.named_parameters()
65 | params2 = model2.named_parameters()
66 |
67 | dict_params2 = dict(params2)
68 |
69 | for name1, param1 in params1:
70 | if name1 in dict_params2:
71 | dict_params2[name1].data.copy_(param1.data)
72 |
73 |
74 | def clear_backprops(model: nn.Module) -> None:
75 | """Delete layer.backprops_list in every layer."""
76 | for layer in model.modules():
77 | if hasattr(layer, "backprops_list"):
78 | del layer.backprops_list
79 |
80 |
81 | def get_covid_model():
82 | model_conv = torchvision.models.resnet18(pretrained=True)
83 | for param in model_conv.parameters():
84 | param.requires_grad = False
85 | num_ftrs = model_conv.fc.in_features
86 | model_conv.fc = nn.Linear(num_ftrs, 2)
87 |
88 | return model_conv
89 |
90 |
91 | from torch.optim.lr_scheduler import StepLR
92 | import torch.utils.data as data_utils
93 | from torch.utils.data import DataLoader
94 | import copy
95 | import numpy as np
96 | from torchdp import PrivacyEngine
97 |
98 |
99 | def get_augmented_data(train_loader, device):
100 | datagen = ImageDataGenerator(
101 | rotation_range=2,
102 | zoom_range=0.01,
103 | width_shift_range=0.01,
104 | height_shift_range=0.01)
105 |
106 | train_dataset = []
107 |
108 | for batch_idx, (data, target) in (enumerate(train_loader)):
109 | train_dataset.append([data, target])
110 | # print(type(target))
111 | for _ in range(9):
112 | if device == 'cuda':
113 | data_aug_x, data_aug_y = datagen.flow(data.cpu().numpy(), target.cpu().numpy()).next()
114 | else:
115 | data_aug_x, data_aug_y = datagen.flow(data, target).next()
116 |
117 | train_dataset.append([data_aug_x.reshape((1, 3, 224, 224)), target])
118 |
119 | random.shuffle(train_dataset)
120 |
121 | x_train = torch.cat([torch.FloatTensor(x[0]) for x in train_dataset])
122 | y_train = torch.cat([x[1] for x in train_dataset])
123 |
124 | return x_train, y_train
125 |
126 |
127 | def eval(model, test_loader, device):
128 | """
129 | evaluation function -> similar to your test function
130 | """
131 | model.eval()
132 | test_loss = 0
133 | correct = 0
134 | if device == 'cuda':
135 | model.to('cuda')
136 | with torch.no_grad():
137 | num_test_samples = 0
138 | for data, target in test_loader:
139 | if device == 'cuda':
140 | data, target = data.to('cuda'), target.to('cuda')
141 | output = model(data)
142 | test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
143 | pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
144 | correct += pred.eq(target.view_as(pred)).sum().item()
145 | num_test_samples += output.shape[0]
146 |
147 | test_loss /= num_test_samples
148 |
149 | return 100. * correct / num_test_samples
150 |
151 |
152 | class Agent_CLF(object):
153 | def __init__(self, params):
154 |
155 | for key, val in params.items():
156 | setattr(self, key, val)
157 | self.logs = {'train_loss': [], 'eps': [], 'val_acc': []}
158 | torch.manual_seed(0)
159 | if not self.covid_model:
160 | self.model = Net()
161 | else:
162 | self.model = get_covid_model()
163 | if self.train_loader is None:
164 | if self.augmented == False:
165 | self.train_loader = DataLoader(dataset=data_utils.TensorDataset(self.x_train, self.y_train),
166 | batch_size=self.bs,
167 | shuffle=True)
168 | else:
169 | self.train_loader = DataLoader(dataset=data_utils.TensorDataset(self.x_train, self.y_train),
170 | batch_size=1,
171 | shuffle=True)
172 |
173 | if self.augmented == True:
174 | x_train, y_train = get_augmented_data(self.train_loader, self.device)
175 | self.train_loader = DataLoader(dataset=data_utils.TensorDataset(x_train, y_train), batch_size=self.bs,
176 | shuffle=True)
177 |
178 | self.num_train_samples = float(len(self.train_loader.dataset))
179 | self.num_run_epochs = 0
180 | self.random_idx = 0
181 |
182 | def set_weights(self, ref_model):
183 | if not self.covid_model:
184 | self.curr_model = Net()
185 | else:
186 | self.curr_model = get_covid_model()
187 | copy_model(ref_model, self.model)
188 |
189 | def get_weights(self):
190 | """
191 | get model weights
192 | """
193 | w_dict = {}
194 | for name, param in self.model.named_parameters():
195 | w_dict[name] = copy.deepcopy(param)
196 | return w_dict
197 |
198 | def train(self):
199 | """
200 | train/update the curr model of the agent
201 | """
202 | optimizer = optim.Adadelta(self.model.parameters(), lr=self.lr)
203 | scheduler = StepLR(optimizer, step_size=1, gamma=self.gamma)
204 | loss_func = nn.CrossEntropyLoss()
205 |
206 | if self.dp:
207 | self.model.zero_grad()
208 | optimizer.zero_grad()
209 | clear_backprops(self.model)
210 |
211 | privacy_engine = PrivacyEngine(
212 | self.model,
213 | batch_size=self.bs,
214 | sample_size=self.num_train_samples,
215 | alphas=[1 + x / 10.0 for x in range(1, 100)] + list(range(12, 64)),
216 | noise_multiplier=self.sigma,
217 | max_grad_norm=self.C)
218 | privacy_engine.attach(optimizer)
219 |
220 | if self.device == 'cuda':
221 | self.model.to('cuda')
222 | self.model.train()
223 | for _ in range(self.epochs):
224 | num_batches = len(self.train_loader)
225 | start, end = 0, num_batches
226 | if self.fed_avg:
227 | start, end = self.random_idx, self.random_idx + 1
228 | self.random_idx += 1
229 | if self.random_idx >= num_batches:
230 | self.random_idx = 0
231 |
232 | with torch.set_grad_enabled(True):
233 | for batch_idx, (data, target) in enumerate(self.train_loader):
234 | if start <= batch_idx < end:
235 | if self.device == 'cuda':
236 | data, target = data.to('cuda'), target.to('cuda')
237 | optimizer.zero_grad()
238 | output = self.model(data)
239 | loss = loss_func(output, target)
240 | loss.backward()
241 | optimizer.step()
242 | self.logs['train_loss'].append(copy.deepcopy(loss.item()))
243 |
244 | scheduler.step()
245 | self.lr = get_lr(optimizer)
246 | if self.fl_train is False:
247 | curr_acc = eval(self.model, self.test_loader, self.device)
248 | self.logs['val_acc'].append(copy.deepcopy(curr_acc))
249 |
250 |
251 | import numpy as np
252 |
253 | import numpy as np
254 |
255 | import torch
256 | import torch.nn as nn
257 | import torch.nn.functional as F
258 | import torch.optim as optim
259 | from torchvision import datasets, transforms
260 | import csv
261 | from pprint import pprint
262 |
263 |
264 | class Net(nn.Module):
265 | def __init__(self):
266 | super(Net, self).__init__()
267 | self.conv1 = nn.Conv2d(1, 32, 3, 1)
268 | self.conv2 = nn.Conv2d(32, 64, 3, 1)
269 | self.dropout1 = nn.Dropout2d(0.25)
270 | self.dropout2 = nn.Dropout2d(0.5)
271 | self.fc1 = nn.Linear(9216, 128)
272 | self.fc2 = nn.Linear(128, 10)
273 |
274 | def forward(self, x):
275 | x = self.conv1(x)
276 | x = F.relu(x)
277 | x = self.conv2(x)
278 | x = F.relu(x)
279 | x = F.max_pool2d(x, 2)
280 | x = self.dropout1(x)
281 | x = torch.flatten(x, 1)
282 | x = self.fc1(x)
283 | x = F.relu(x)
284 | x = self.dropout2(x)
285 | x = self.fc2(x)
286 | output = F.log_softmax(x, dim=1)
287 | return output
288 |
289 |
290 | class BaseFL(object):
291 | def __init__(self, configs=None):
292 | default_configs = {
293 | 'num_clients': 100,
294 | 'T': 20, # num outer epochs 30-40
295 | 'B': 2, # branch size of tree,
296 | 'params': {},
297 | 'device': 'cpu'
298 | }
299 |
300 | if configs is not None:
301 | default_configs.update(configs)
302 | for key, val in default_configs.items():
303 | setattr(self, key, val)
304 |
305 | for key, val in default_configs.items():
306 | # set property for BaseFL object based on dictionary key-values.
307 | setattr(self, key, val)
308 |
309 | if not self.covid_model:
310 | self.curr_model = Net()
311 | else:
312 | self.curr_model = get_covid_model()
313 |
314 | self.clients = [Agent_CLF(self.params[i]) for i in range(self.num_clients)]
315 | self.logs = {'val_acc': [], 'val_acc_iter': []}
316 |
317 | def shuffle_clients(self):
318 | return np.random.permutation(self.num_clients)
319 |
320 | def set_weights(self, ref_model):
321 | """
322 | Set model
323 | """
324 | if not self.covid_model:
325 | self.curr_model = Net()
326 | else:
327 | self.curr_model = get_covid_model()
328 | copy_model(ref_model, self.curr_model)
329 |
330 | def get_weights(self):
331 | """
332 | get model weights
333 | """
334 | w_dict = {}
335 | for name, param in self.curr_model.named_parameters():
336 | w_dict[name] = copy.deepcopy(param)
337 | return w_dict
338 |
339 | def agg_model(self, model_list, start, end):
340 | with torch.no_grad():
341 | global_params = {}
342 | for param in model_list[start]:
343 | param_data = model_list[start][param].data
344 | num_ = 1.0
345 | for model_state in model_list[start + 1:end]:
346 | param_data += model_state[param].data
347 | num_ += 1.0
348 | param_data /= num_
349 | global_params[param] = param_data
350 |
351 | return global_params
352 |
353 |
354 | class RingFL(BaseFL):
355 | def __init__(self, configs=None):
356 | super().__init__(configs)
357 |
358 | def train(self):
359 | for t in range(self.T):
360 | model_list = []
361 | shuffled_clts = super().shuffle_clients()
362 | for clt in shuffled_clts:
363 | if t >= 1:
364 | self.clients[clt].model = copy.deepcopy(curr_model)
365 | self.clients[clt].train()
366 | model_list.append(dict(self.clients[clt].model.named_parameters()))
367 |
368 | global_params = self.agg_model(model_list, 0, len(model_list))
369 |
370 | if not self.covid_model:
371 | curr_model = Net()
372 | else:
373 | curr_model = get_covid_model()
374 |
375 | curr_model.load_state_dict(global_params, strict=False)
376 | self.curr_model = copy.deepcopy(curr_model)
377 | curr_acc = eval(self.curr_model, self.test_loader, self.device)
378 | print(curr_acc)
379 | self.logs['val_acc'].append(curr_acc)
380 | self.logs['val_acc_iter'].append(curr_acc)
381 |
382 |
383 | class ChainFL(BaseFL):
384 | # extend the base federated learning class for chain topology
385 | def __init__(self, configs=None):
386 | super().__init__(configs)
387 | self.B = 1 # branch factor = 1
388 |
389 | def train(self):
390 | curr_model = None
391 | for _ in range(self.T):
392 | shuffled_clts = super().shuffle_clients()
393 | for clt in shuffled_clts:
394 | if curr_model is not None:
395 | self.clients[clt].set_weights(curr_model)
396 | self.clients[clt].train()
397 | curr_model_dict = self.clients[clt].get_weights()
398 | if not self.covid_model:
399 | curr_model = Net()
400 | else:
401 | curr_model = get_covid_model()
402 |
403 | curr_model.load_state_dict(curr_model_dict, strict=False)
404 | curr_acc = eval(curr_model, self.test_loader, self.device)
405 | self.logs['val_acc_iter'].append(copy.deepcopy(curr_acc))
406 |
407 | curr_acc = eval(curr_model, self.test_loader, self.device)
408 | print(curr_acc)
409 | self.logs['val_acc'].append(curr_acc)
410 |
411 |
412 | class TreeFL(BaseFL):
413 | def __init__(self, configs=None):
414 | super().__init__(configs)
415 |
416 | self.h = [h for h in range(self.num_clients) if (self.B ** h - 1) / (self.B - 1) >= self.num_clients][
417 | 0] # height of tree
418 | self.index_leaf = (self.B ** (self.h - 1) - 1) / (self.B - 1) + 1
419 | self.num_leaves = float(self.num_clients - self.index_leaf + 1)
420 | self.index_level = [int((self.B ** (i - 1) - 1) / (self.B - 1)) for i in range(1, self.h + 1)]
421 |
422 | def train(self):
423 |
424 | for t in range(self.T):
425 | model_list = []
426 |
427 | shuffled_clts = super().shuffle_clients()
428 | print(shuffled_clts)
429 | for i, clt in enumerate(shuffled_clts):
430 | parent_index = int(np.floor(
431 | (i - 1) / self.B)) # get parent index of clt, check my write up, parent of a node i, is [(i-1)/B]
432 | if parent_index >= 0:
433 | parent_model_dict = self.clients[shuffled_clts[parent_index]].get_weights()
434 | if not self.covid_model:
435 | self.clients[clt].model = Net()
436 | else:
437 | self.clients[clt].model = get_covid_model()
438 |
439 | self.clients[clt].model.load_state_dict(parent_model_dict, strict=False)
440 | else:
441 | if t >= 1:
442 | self.clients[clt].model = copy.deepcopy(curr_model)
443 |
444 | self.clients[clt].train()
445 |
446 | model_list.append(dict(self.clients[clt].model.named_parameters()))
447 |
448 | for (start, end) in zip(self.index_level[:-1], self.index_level[1:]):
449 | global_params = self.agg_model(model_list, start, end)
450 | if not self.covid_model:
451 | curr_model = Net()
452 | else:
453 | curr_model = get_covid_model()
454 | curr_model.load_state_dict(global_params, strict=False)
455 | curr_acc = eval(curr_model, self.test_loader, self.device)
456 |
457 | self.logs['val_acc_iter'].append(curr_acc)
458 |
459 | self.curr_model = copy.deepcopy(curr_model)
460 | print(curr_acc)
461 | self.logs['val_acc'].append(curr_acc)
462 |
463 |
464 | class FedAvg(BaseFL):
465 | def __init__(self, configs=None):
466 | super().__init__(configs)
467 | self.R = self.num_clients // 2
468 | for clt_idx in range(self.num_clients):
469 | self.clients[clt_idx].fed_avg = True
470 |
471 | def train(self):
472 | for t in range(self.T):
473 | model_list = []
474 | shuffled_clts = super().shuffle_clients()
475 | for clt in shuffled_clts[:self.R]:
476 | if t >= 1:
477 | self.clients[clt].model = copy.deepcopy(curr_model)
478 | self.clients[clt].train()
479 | model_list.append(dict(self.clients[clt].model.named_parameters()))
480 |
481 | with torch.no_grad():
482 | global_params = {}
483 | for param in model_list[0]:
484 | param_data = model_list[0][param].data
485 | for model_state in model_list[1:]:
486 | param_data += model_state[param].data
487 | param_data /= len(model_list)
488 | global_params[param] = param_data
489 |
490 | if not self.covid_model:
491 | curr_model = Net()
492 | else:
493 | curr_model = get_covid_model()
494 |
495 | curr_model.load_state_dict(global_params, strict=False)
496 | self.curr_model = copy.deepcopy(curr_model)
497 | curr_acc = eval(self.curr_model, self.test_loader, self.device)
498 | print(curr_acc)
499 | self.logs['val_acc'].append(curr_acc)
500 | self.logs['val_acc_iter'].append(curr_acc)
501 |
502 |
503 | class NewFedAvg(BaseFL):
504 | def __init__(self, configs=None):
505 | super().__init__(configs)
506 | self.R = self.num_clients // 5
507 |
508 | for clt_idx in range(self.num_clients):
509 | self.clients[clt_idx].fed_avg = False
510 |
511 | def train(self):
512 | for t in range(self.T):
513 | model_list = []
514 | shuffled_clts = super().shuffle_clients()
515 | for clt in shuffled_clts[:self.R]:
516 | if t >= 1:
517 | self.clients[clt].model = copy.deepcopy(curr_model)
518 | self.clients[clt].train()
519 | model_list.append(dict(self.clients[clt].model.named_parameters()))
520 |
521 | with torch.no_grad():
522 | global_params = {}
523 | for param in model_list[0]:
524 | param_data = model_list[0][param].data
525 | for model_state in model_list[1:]:
526 | param_data += model_state[param].data
527 | param_data /= len(model_list)
528 | global_params[param] = param_data
529 |
530 | if not self.covid_model:
531 | curr_model = Net()
532 | else:
533 | curr_model = get_covid_model()
534 |
535 | curr_model.load_state_dict(global_params, strict=False)
536 | self.curr_model = copy.deepcopy(curr_model)
537 | curr_acc = eval(self.curr_model, self.test_loader, self.device)
538 | print(curr_acc)
539 | self.logs['val_acc'].append(curr_acc)
540 | self.logs['val_acc_iter'].append(curr_acc)
541 |
542 |
--------------------------------------------------------------------------------
/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
149 |
150 |
151 |
152 | copy
153 | device
154 | trim
155 | val_acc_iter
156 | from
157 | future
158 | file_path
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 | true
207 | DEFINITION_ORDER
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
376 |
377 |
378 |
379 |
380 |
381 |
382 |
383 |
384 |
385 |
386 |
387 |
388 |
389 |
390 |
391 |
392 |
393 |
394 |
395 |
396 | project
397 |
398 |
399 |
400 |
401 |
402 |
403 |
404 |
405 |
406 |
407 |
408 |
409 |
410 |
411 |
412 |
413 |
414 |
415 |
416 |
417 |
418 |
419 |
420 |
421 |
422 |
423 |
424 |
425 |
426 |
427 |
428 |
429 |
430 |
431 |
432 |
433 |
434 |
435 |
436 |
437 |
438 |
439 |
440 |
441 |
442 |
443 |
444 |
445 |
446 |
447 |
448 |
449 |
450 |
451 |
452 |
453 |
454 |
455 |
456 |
457 |
458 |
459 |
460 |
461 |
462 |
463 | 1601101872092
464 |
465 |
466 | 1601101872092
467 |
468 |
469 |
470 |
471 |
472 |
473 |
474 |
475 |
476 |
477 |
478 |
479 |
480 |
481 |
482 |
483 |
484 |
485 |
486 |
487 |
488 |
489 |
490 |
491 |
492 |
493 |
494 |
495 |
496 |
497 |
498 |
499 |
500 |
501 |
502 |
503 |
504 |
505 |
506 |
507 |
508 |
509 |
510 |
511 |
512 |
513 |
514 |
515 |
516 |
517 |
518 |
519 |
520 |
521 |
522 |
523 |
524 |
525 |
526 |
527 |
528 |
529 |
530 |
531 |
532 |
533 |
534 |
535 |
536 |
537 |
538 |
539 |
540 |
541 |
542 |
543 |
544 |
545 |
546 |
547 |
548 |
549 |
550 |
551 |
552 |
553 |
554 |
555 |
556 |
557 |
558 |
559 |
560 |
561 |
562 |
563 |
564 |
565 |
566 |
567 |
568 |
569 |
570 |
571 |
572 |
573 |
574 |
575 |
576 |
577 |
578 |
579 |
580 |
581 |
582 |
583 |
584 |
585 |
586 |
587 |
588 |
589 |
590 |
591 |
592 |
593 |
594 |
595 |
596 |
597 |
598 |
599 |
600 |
601 |
602 |
603 |
604 |
605 |
606 |
607 |
608 |
609 |
610 |
611 |
612 |
613 |
614 |
615 |
616 |
617 |
618 |
619 |
620 |
621 |
622 |
623 |
624 |
625 |
626 |
627 |
628 |
629 |
630 |
631 |
632 |
633 |
634 |
635 |
636 |
637 |
638 |
639 |
640 |
641 |
642 |
643 |
644 |
645 |
646 |
647 |
648 |
649 |
650 |
651 |
652 |
653 |
654 |
655 |
656 |
657 |
658 |
659 |
660 |
661 |
662 |
663 |
664 |
665 |
666 |
667 |
668 |
669 |
670 |
671 |
672 |
673 |
674 |
675 |
676 |
677 |
678 |
679 |
680 |
681 |
682 |
683 |
684 |
685 |
686 |
687 |
688 |
689 |
690 |
691 |
692 |
693 |
694 |
695 |
696 |
697 |
698 |
699 |
700 |
701 |
702 |
703 |
704 |
705 |
706 |
707 |
708 |
709 |
710 |
711 |
712 |
713 |
714 |
715 |
716 |
717 |
718 |
719 |
720 |
721 |
722 |
723 |
724 |
725 |
726 |
727 |
728 |
729 |
730 |
731 |
732 |
733 |
734 |
735 |
736 |
737 |
738 |
739 |
740 |
741 |
742 |
743 |
744 |
745 |
746 |
747 |
748 |
749 |
750 |
751 |
752 |
753 |
754 |
755 |
756 |
757 |
758 |
759 |
760 |
761 |
762 |
763 |
764 |
765 |
766 |
767 |
768 |
769 |
770 |
771 |
772 |
773 |
774 |
775 |
776 |
777 |
778 |
779 |
780 |
781 |
782 |
783 |
784 |
785 |
786 |
787 |
788 |
789 |
790 |
791 |
792 |
793 |
794 |
795 |
796 |
797 |
798 |
799 |
800 |
801 |
802 |
803 |
804 |
805 |
806 |
807 |
808 |
809 |
810 |
811 |
812 |
813 |
814 |
815 |
816 |
817 |
818 |
819 |
820 |
821 |
822 |
823 |
824 |
825 |
826 |
827 |
828 |
829 |
830 |
831 |
832 |
833 |
834 |
835 |
836 |
837 |
838 |
839 |
840 |
841 |
842 |
843 |
844 |
845 |
846 |
847 |
848 |
849 |
850 |
851 |
852 |
853 |
854 |
855 |
856 |
857 |
858 |
859 |
860 |
861 |
862 |
863 |
864 |
865 |
866 |
867 |
868 |
869 |
870 |
871 |
872 |
873 |
874 |
875 |
876 |
877 |
--------------------------------------------------------------------------------